[PATCH 03/47] tag-cmd: accommodate python 3

58 views
Skip to first unread message

Rob Browning

unread,
Feb 8, 2020, 2:26:19 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
cmd/tag-cmd.py | 50 +++++++++++++++++++++++++++++---------------------
1 file changed, 29 insertions(+), 21 deletions(-)

diff --git a/cmd/tag-cmd.py b/cmd/tag-cmd.py
index 2fc068dd..44c5b33f 100755
--- a/cmd/tag-cmd.py
+++ b/cmd/tag-cmd.py
@@ -5,11 +5,14 @@ exec "$bup_python" "$0" ${1+"$@"}
"""
# end of bup preamble

-from __future__ import absolute_import, print_function
+from __future__ import absolute_import
+from binascii import hexlify
import os, sys

from bup import git, options
+from bup.compat import argv_bytes
from bup.helpers import debug1, handle_ctrl_c, log
+from bup.io import byte_stream, path_msg

# FIXME: review for safe writes.

@@ -36,31 +39,36 @@ if opt.delete:
# could implement this verification but we'd need to read in the
# contents of the tag file and pass the hash, and we already know
# about the tag's existance via "tags".
- if not opt.force and opt.delete not in tags:
- log("error: tag '%s' doesn't exist\n" % opt.delete)
+ tag_name = argv_bytes(opt.delete)
+ if not opt.force and tag_name not in tags:
+ log("error: tag '%s' doesn't exist\n" % path_msg(tag_name))
sys.exit(1)
- tag_file = 'refs/tags/%s' % opt.delete
+ tag_file = b'refs/tags/%s' % tag_name
git.delete_ref(tag_file)
sys.exit(0)

if not extra:
for t in tags:
- print(t)
+ sys.stdout.flush()
+ out = byte_stream(sys.stdout)
+ out.write(t)
+ out.write(b'\n')
sys.exit(0)
-elif len(extra) < 2:
- o.fatal('no commit ref or hash given.')
+elif len(extra) != 2:
+ o.fatal('expected commit ref and hash')

-(tag_name, commit) = extra[:2]
+tag_name, commit = map(argv_bytes, extra[:2])
if not tag_name:
o.fatal("tag name must not be empty.")
-debug1("args: tag name = %s; commit = %s\n" % (tag_name, commit))
+debug1("args: tag name = %s; commit = %s\n"
+ % (path_msg(tag_name), commit.decode('ascii')))

if tag_name in tags and not opt.force:
- log("bup: error: tag '%s' already exists\n" % tag_name)
+ log("bup: error: tag '%s' already exists\n" % path_msg(tag_name))
sys.exit(1)

-if tag_name.startswith('.'):
- o.fatal("'%s' is not a valid tag name." % tag_name)
+if tag_name.startswith(b'.'):
+ o.fatal("'%s' is not a valid tag name." % path_msg(tag_name))

try:
hash = git.rev_parse(commit)
@@ -69,20 +77,20 @@ except git.GitError as e:
sys.exit(2)

if not hash:
- log("bup: error: commit %s not found.\n" % commit)
+ log("bup: error: commit %s not found.\n" % commit.decode('ascii'))
sys.exit(2)

-pL = git.PackIdxList(git.repo('objects/pack'))
+pL = git.PackIdxList(git.repo(b'objects/pack'))
if not pL.exists(hash):
- log("bup: error: commit %s not found.\n" % commit)
+ log("bup: error: commit %s not found.\n" % commit.decode('ascii'))
sys.exit(2)

-tag_file = git.repo('refs/tags/%s' % tag_name)
+tag_file = git.repo(b'refs/tags/' + tag_name)
try:
- tag = open(tag_file, 'w')
+ tag = open(tag_file, 'wb')
except OSError as e:
- log("bup: error: could not create tag '%s': %s" % (tag_name, e))
+ log("bup: error: could not create tag '%s': %s" % (path_msg(tag_name), e))
sys.exit(3)
-
-tag.write(hash.encode('hex'))
-tag.close()
+with tag as tag:
+ tag.write(hexlify(hash))
+ tag.write(b'\n')
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:19 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 4 +-
lib/bup/t/tvfs.py | 233 ++++++++++++++++++++++----------------------
lib/bup/test/vfs.py | 6 +-
3 files changed, 124 insertions(+), 119 deletions(-)

diff --git a/Makefile b/Makefile
index edf6f658..24d75faa 100644
--- a/Makefile
+++ b/Makefile
@@ -152,14 +152,14 @@ python_tests := \
lib/bup/t/tmetadata.py \
lib/bup/t/toptions.py \
lib/bup/t/tshquote.py \
+ lib/bup/t/tvfs.py \
lib/bup/t/tvint.py \
lib/bup/t/txstat.py

ifeq "2" "$(bup_python_majver)"
python_tests += \
lib/bup/t/tclient.py \
- lib/bup/t/tresolve.py \
- lib/bup/t/tvfs.py
+ lib/bup/t/tresolve.py
endif

# The "pwd -P" here may not be appropriate in the long run, but we
diff --git a/lib/bup/t/tvfs.py b/lib/bup/t/tvfs.py
index a35ad057..a2500f94 100644
--- a/lib/bup/t/tvfs.py
+++ b/lib/bup/t/tvfs.py
@@ -1,18 +1,20 @@

from __future__ import absolute_import, print_function
+from binascii import unhexlify
from collections import namedtuple
from errno import ELOOP, ENOTDIR
from io import BytesIO
-from os import environ, symlink
+from os import symlink
from random import Random, randint
from stat import S_IFDIR, S_IFLNK, S_IFREG, S_ISDIR, S_ISREG
from sys import stderr
-from time import localtime, strftime
+from time import localtime, strftime, tzset

from wvtest import *

from bup._helpers import write_random
from bup import git, metadata, vfs
+from bup.compat import environ, fsencode, items, range
from bup.git import BUP_CHUNKED
from bup.helpers import exc, shstr
from bup.metadata import Metadata
@@ -20,9 +22,9 @@ from bup.repo import LocalRepo
from bup.test.vfs import tree_dict
from buptest import ex, exo, no_lingering_errors, test_tempdir

-top_dir = '../../..'
-bup_tmp = os.path.realpath('../../../t/tmp')
-bup_path = top_dir + '/bup'
+top_dir = b'../../..'
+bup_tmp = os.path.realpath(b'../../../t/tmp')
+bup_path = top_dir + b'/bup'
start_dir = os.getcwd()

def ex(cmd, **kwargs):
@@ -44,22 +46,22 @@ def test_cache_behavior():
wvpasseq({}, vfs._cache)
wvpasseq([], vfs._cache_keys)
wvfail(vfs._cache_keys)
- wvexcept(Exception, vfs.cache_notice, 'x', 1)
- key_0 = 'itm:' + b'\0' * 20
- key_1 = 'itm:' + b'\1' * 20
- key_2 = 'itm:' + b'\2' * 20
- vfs.cache_notice(key_0, 'something')
- wvpasseq({key_0 : 'something'}, vfs._cache)
+ wvexcept(Exception, vfs.cache_notice, b'x', 1)
+ key_0 = b'itm:' + b'\0' * 20
+ key_1 = b'itm:' + b'\1' * 20
+ key_2 = b'itm:' + b'\2' * 20
+ vfs.cache_notice(key_0, b'something')
+ wvpasseq({key_0 : b'something'}, vfs._cache)
wvpasseq([key_0], vfs._cache_keys)
- vfs.cache_notice(key_1, 'something else')
- wvpasseq({key_0 : 'something', key_1 : 'something else'}, vfs._cache)
+ vfs.cache_notice(key_1, b'something else')
+ wvpasseq({key_0 : b'something', key_1 : b'something else'}, vfs._cache)
wvpasseq(frozenset([key_0, key_1]), frozenset(vfs._cache_keys))
- vfs.cache_notice(key_2, 'and also')
+ vfs.cache_notice(key_2, b'and also')
wvpasseq(2, len(vfs._cache))
- wvpass(frozenset(vfs._cache.iteritems())
- < frozenset({key_0 : 'something',
- key_1 : 'something else',
- key_2 : 'and also'}.iteritems()))
+ wvpass(frozenset(items(vfs._cache))
+ < frozenset(items({key_0 : b'something',
+ key_1 : b'something else',
+ key_2 : b'and also'})))
wvpasseq(2, len(vfs._cache_keys))
wvpass(frozenset(vfs._cache_keys) < frozenset([key_0, key_1, key_2]))
vfs.clear_cache()
@@ -135,75 +137,75 @@ def run_augment_item_meta_tests(repo,
def test_item_mode():
with no_lingering_errors():
mode = S_IFDIR | 0o755
- meta = metadata.from_path('.')
- oid = '\0' * 20
+ meta = metadata.from_path(b'.')
+ oid = b'\0' * 20
wvpasseq(mode, vfs.item_mode(vfs.Item(oid=oid, meta=mode)))
wvpasseq(meta.mode, vfs.item_mode(vfs.Item(oid=oid, meta=meta)))

@wvtest
def test_reverse_suffix_duplicates():
suffix = lambda x: tuple(vfs._reverse_suffix_duplicates(x))
- wvpasseq(('x',), suffix(('x',)))
- wvpasseq(('x', 'y'), suffix(('x', 'y')))
- wvpasseq(('x-1', 'x-0'), suffix(('x',) * 2))
- wvpasseq(['x-%02d' % n for n in reversed(range(11))],
- list(suffix(('x',) * 11)))
- wvpasseq(('x-1', 'x-0', 'y'), suffix(('x', 'x', 'y')))
- wvpasseq(('x', 'y-1', 'y-0'), suffix(('x', 'y', 'y')))
- wvpasseq(('x', 'y-1', 'y-0', 'z'), suffix(('x', 'y', 'y', 'z')))
+ wvpasseq((b'x',), suffix((b'x',)))
+ wvpasseq((b'x', b'y'), suffix((b'x', b'y')))
+ wvpasseq((b'x-1', b'x-0'), suffix((b'x',) * 2))
+ wvpasseq([b'x-%02d' % n for n in reversed(range(11))],
+ list(suffix((b'x',) * 11)))
+ wvpasseq((b'x-1', b'x-0', b'y'), suffix((b'x', b'x', b'y')))
+ wvpasseq((b'x', b'y-1', b'y-0'), suffix((b'x', b'y', b'y')))
+ wvpasseq((b'x', b'y-1', b'y-0', b'z'), suffix((b'x', b'y', b'y', b'z')))

@wvtest
def test_misc():
with no_lingering_errors():
- with test_tempdir('bup-tvfs-') as tmpdir:
- bup_dir = tmpdir + '/bup'
- environ['GIT_DIR'] = bup_dir
- environ['BUP_DIR'] = bup_dir
+ with test_tempdir(b'bup-tvfs-') as tmpdir:
+ bup_dir = tmpdir + b'/bup'
+ environ[b'GIT_DIR'] = bup_dir
+ environ[b'BUP_DIR'] = bup_dir
git.repodir = bup_dir
- data_path = tmpdir + '/src'
+ data_path = tmpdir + b'/src'
os.mkdir(data_path)
- with open(data_path + '/file', 'w+') as tmpfile:
+ with open(data_path + b'/file', 'wb+') as tmpfile:
tmpfile.write(b'canary\n')
- symlink('file', data_path + '/symlink')
- ex((bup_path, 'init'))
- ex((bup_path, 'index', '-v', data_path))
- ex((bup_path, 'save', '-d', '100000', '-tvvn', 'test', '--strip',
- data_path))
+ symlink(b'file', data_path + b'/symlink')
+ ex((bup_path, b'init'))
+ ex((bup_path, b'index', b'-v', data_path))
+ ex((bup_path, b'save', b'-d', b'100000', b'-tvvn', b'test',
+ b'--strip', data_path))
repo = LocalRepo()

wvstart('readlink')
- ls_tree = exo(('git', 'ls-tree', 'test', 'symlink')).out
+ ls_tree = exo((b'git', b'ls-tree', b'test', b'symlink')).out
mode, typ, oidx, name = ls_tree.strip().split(None, 3)
- assert name == 'symlink'
- link_item = vfs.Item(oid=oidx.decode('hex'), meta=int(mode, 8))
- wvpasseq('file', vfs.readlink(repo, link_item))
+ assert name == b'symlink'
+ link_item = vfs.Item(oid=unhexlify(oidx), meta=int(mode, 8))
+ wvpasseq(b'file', vfs.readlink(repo, link_item))

- ls_tree = exo(('git', 'ls-tree', 'test', 'file')).out
+ ls_tree = exo((b'git', b'ls-tree', b'test', b'file')).out
mode, typ, oidx, name = ls_tree.strip().split(None, 3)
- assert name == 'file'
- file_item = vfs.Item(oid=oidx.decode('hex'), meta=int(mode, 8))
+ assert name == b'file'
+ file_item = vfs.Item(oid=unhexlify(oidx), meta=int(mode, 8))
wvexcept(Exception, vfs.readlink, repo, file_item)

wvstart('item_size')
wvpasseq(4, vfs.item_size(repo, link_item))
wvpasseq(7, vfs.item_size(repo, file_item))
- meta = metadata.from_path(__file__)
+ meta = metadata.from_path(fsencode(__file__))
meta.size = 42
fake_item = file_item._replace(meta=meta)
wvpasseq(42, vfs.item_size(repo, fake_item))

- _, fakelink_item = vfs.resolve(repo, '/test/latest', follow=False)[-1]
+ _, fakelink_item = vfs.resolve(repo, b'/test/latest', follow=False)[-1]
wvpasseq(17, vfs.item_size(repo, fakelink_item))

wvstart('augment_item_meta')
run_augment_item_meta_tests(repo,
- '/test/latest/file', 7,
- '/test/latest/symlink', 'file')
+ b'/test/latest/file', 7,
+ b'/test/latest/symlink', b'file')

wvstart('copy_item')
# FIXME: this caused StopIteration
#_, file_item = vfs.resolve(repo, '/file')[-1]
- _, file_item = vfs.resolve(repo, '/test/latest/file')[-1]
+ _, file_item = vfs.resolve(repo, b'/test/latest/file')[-1]
file_copy = vfs.copy_item(file_item)
wvpass(file_copy is not file_item)
wvpass(file_copy.meta is not file_item.meta)
@@ -215,7 +217,7 @@ def test_misc():

def write_sized_random_content(parent_dir, size, seed):
verbose = 0
- with open('%s/%d' % (parent_dir, size), 'wb') as f:
+ with open(b'%s/%d' % (parent_dir, size), 'wb') as f:
write_random(f.fileno(), size, seed, verbose)

def validate_vfs_streaming_read(repo, item, expected_path, read_sizes):
@@ -231,8 +233,8 @@ def validate_vfs_streaming_read(repo, item, expected_path, read_sizes):
wvpass(ex_buf == act_buf)
ex_buf = expected.read(read_size)
act_buf = actual.read(read_size)
- wvpasseq('', ex_buf)
- wvpasseq('', act_buf)
+ wvpasseq(b'', ex_buf)
+ wvpasseq(b'', act_buf)

def validate_vfs_seeking_read(repo, item, expected_path, read_sizes):
def read_act(act_pos):
@@ -260,8 +262,8 @@ def validate_vfs_seeking_read(repo, item, expected_path, read_sizes):
ex_buf = expected.read(read_size)
else: # hit expected eof first
act_pos, act_buf = read_act(act_pos)
- wvpasseq('', ex_buf)
- wvpasseq('', act_buf)
+ wvpasseq(b'', ex_buf)
+ wvpasseq(b'', act_buf)

@wvtest
def test_read_and_seek():
@@ -270,120 +272,123 @@ def test_read_and_seek():
# from the vfs when seeking and reading with various block sizes
# matches the original content.
with no_lingering_errors():
- with test_tempdir('bup-tvfs-read-') as tmpdir:
+ with test_tempdir(b'bup-tvfs-read-') as tmpdir:
resolve = vfs.resolve
- bup_dir = tmpdir + '/bup'
- environ['GIT_DIR'] = bup_dir
- environ['BUP_DIR'] = bup_dir
+ bup_dir = tmpdir + b'/bup'
+ environ[b'GIT_DIR'] = bup_dir
+ environ[b'BUP_DIR'] = bup_dir
git.repodir = bup_dir
repo = LocalRepo()
- data_path = tmpdir + '/src'
+ data_path = tmpdir + b'/src'
os.mkdir(data_path)
seed = randint(-(1 << 31), (1 << 31) - 1)
rand = Random()
rand.seed(seed)
print('test_read seed:', seed, file=sys.stderr)
max_size = 2 * 1024 * 1024
- sizes = set((rand.randint(1, max_size) for _ in xrange(5)))
+ sizes = set((rand.randint(1, max_size) for _ in range(5)))
sizes.add(1)
sizes.add(max_size)
for size in sizes:
write_sized_random_content(data_path, size, seed)
- ex((bup_path, 'init'))
- ex((bup_path, 'index', '-v', data_path))
- ex((bup_path, 'save', '-d', '100000', '-tvvn', 'test', '--strip',
- data_path))
- read_sizes = set((rand.randint(1, max_size) for _ in xrange(10)))
+ ex((bup_path, b'init'))
+ ex((bup_path, b'index', b'-v', data_path))
+ ex((bup_path, b'save', b'-d', b'100000', b'-tvvn', b'test',
+ b'--strip', data_path))
+ read_sizes = set((rand.randint(1, max_size) for _ in range(10)))
sizes.add(1)
sizes.add(max_size)
print('test_read src sizes:', sizes, file=sys.stderr)
print('test_read read sizes:', read_sizes, file=sys.stderr)
for size in sizes:
- res = resolve(repo, '/test/latest/' + str(size))
+ res = resolve(repo, b'/test/latest/' + str(size).encode('ascii'))
_, item = res[-1]
wvpasseq(size, vfs.item_size(repo, res[-1][1]))
validate_vfs_streaming_read(repo, item,
- '%s/%d' % (data_path, size),
+ b'%s/%d' % (data_path, size),
read_sizes)
validate_vfs_seeking_read(repo, item,
- '%s/%d' % (data_path, size),
+ b'%s/%d' % (data_path, size),
read_sizes)

@wvtest
def test_contents_with_mismatched_bupm_git_ordering():
with no_lingering_errors():
- with test_tempdir('bup-tvfs-') as tmpdir:
- bup_dir = tmpdir + '/bup'
- environ['GIT_DIR'] = bup_dir
- environ['BUP_DIR'] = bup_dir
+ with test_tempdir(b'bup-tvfs-') as tmpdir:
+ bup_dir = tmpdir + b'/bup'
+ environ[b'GIT_DIR'] = bup_dir
+ environ[b'BUP_DIR'] = bup_dir
git.repodir = bup_dir
- data_path = tmpdir + '/src'
+ data_path = tmpdir + b'/src'
os.mkdir(data_path)
- os.mkdir(data_path + '/foo')
- with open(data_path + '/foo.', 'w+') as tmpfile:
+ os.mkdir(data_path + b'/foo')
+ with open(data_path + b'/foo.', 'wb+') as tmpfile:
tmpfile.write(b'canary\n')
- ex((bup_path, 'init'))
- ex((bup_path, 'index', '-v', data_path))
+ ex((bup_path, b'init'))
+ ex((bup_path, b'index', b'-v', data_path))
save_utc = 100000
- save_name = strftime('%Y-%m-%d-%H%M%S', localtime(save_utc))
- ex((bup_path, 'save', '-tvvn', 'test', '-d', str(save_utc),
- '--strip', data_path))
+ save_name = strftime('%Y-%m-%d-%H%M%S', localtime(save_utc)).encode('ascii')
+ ex((bup_path, b'save', b'-tvvn', b'test', b'-d', b'%d' % save_utc,
+ b'--strip', data_path))
repo = LocalRepo()
- tip_sref = exo(('git', 'show-ref', 'refs/heads/test')).out
+ tip_sref = exo((b'git', b'show-ref', b'refs/heads/test')).out
tip_oidx = tip_sref.strip().split()[0]
- tip_tree_oidx = exo(('git', 'log', '--pretty=%T', '-n1',
+ tip_tree_oidx = exo((b'git', b'log', b'--pretty=%T', b'-n1',
tip_oidx)).out.strip()
- tip_tree_oid = tip_tree_oidx.decode('hex')
+ tip_tree_oid = unhexlify(tip_tree_oidx)
tip_tree = tree_dict(repo, tip_tree_oid)

- name, item = vfs.resolve(repo, '/test/latest')[2]
+ name, item = vfs.resolve(repo, b'/test/latest')[2]
wvpasseq(save_name, name)
expected = frozenset((x.name, vfs.Item(oid=x.oid, meta=x.meta))
for x in (tip_tree[name]
- for name in ('.', 'foo', 'foo.')))
+ for name in (b'.', b'foo', b'foo.')))
contents = tuple(vfs.contents(repo, item))
wvpasseq(expected, frozenset(contents))
# Spot check, in case tree_dict shares too much code with the vfs
- name, item = next(((n, i) for n, i in contents if n == 'foo'))
+ name, item = next(((n, i) for n, i in contents if n == b'foo'))
wvpass(S_ISDIR(item.meta))
- name, item = next(((n, i) for n, i in contents if n == 'foo.'))
+ name, item = next(((n, i) for n, i in contents if n == b'foo.'))
wvpass(S_ISREG(item.meta.mode))

@wvtest
def test_duplicate_save_dates():
with no_lingering_errors():
- with test_tempdir('bup-tvfs-') as tmpdir:
- bup_dir = tmpdir + '/bup'
- environ['GIT_DIR'] = bup_dir
- environ['BUP_DIR'] = bup_dir
- environ['TZ'] = 'UTC'
+ with test_tempdir(b'bup-tvfs-') as tmpdir:
+ bup_dir = tmpdir + b'/bup'
+ environ[b'GIT_DIR'] = bup_dir
+ environ[b'BUP_DIR'] = bup_dir
+ environ[b'TZ'] = b'UTC'
+ tzset()
git.repodir = bup_dir
- data_path = tmpdir + '/src'
+ data_path = tmpdir + b'/src'
os.mkdir(data_path)
- with open(data_path + '/file', 'w+') as tmpfile:
+ with open(data_path + b'/file', 'wb+') as tmpfile:
tmpfile.write(b'canary\n')
- ex((bup_path, 'init'))
- ex((bup_path, 'index', '-v', data_path))
+ ex((b'env',))
+ ex((bup_path, b'init'))
+ ex((bup_path, b'index', b'-v', data_path))
for i in range(11):
- ex((bup_path, 'save', '-d', '100000', '-n', 'test', data_path))
+ ex((bup_path, b'save', b'-d', b'100000', b'-n', b'test',
+ data_path))
repo = LocalRepo()
- res = vfs.resolve(repo, '/test')
+ res = vfs.resolve(repo, b'/test')
wvpasseq(2, len(res))
name, revlist = res[-1]
- wvpasseq('test', name)
- wvpasseq(('.',
- '1970-01-02-034640-00',
- '1970-01-02-034640-01',
- '1970-01-02-034640-02',
- '1970-01-02-034640-03',
- '1970-01-02-034640-04',
- '1970-01-02-034640-05',
- '1970-01-02-034640-06',
- '1970-01-02-034640-07',
- '1970-01-02-034640-08',
- '1970-01-02-034640-09',
- '1970-01-02-034640-10',
- 'latest'),
+ wvpasseq(b'test', name)
+ wvpasseq((b'.',
+ b'1970-01-02-034640-00',
+ b'1970-01-02-034640-01',
+ b'1970-01-02-034640-02',
+ b'1970-01-02-034640-03',
+ b'1970-01-02-034640-04',
+ b'1970-01-02-034640-05',
+ b'1970-01-02-034640-06',
+ b'1970-01-02-034640-07',
+ b'1970-01-02-034640-08',
+ b'1970-01-02-034640-09',
+ b'1970-01-02-034640-10',
+ b'latest'),
tuple(sorted(x[0] for x in vfs.contents(repo, revlist))))

@wvtest
diff --git a/lib/bup/test/vfs.py b/lib/bup/test/vfs.py
index a4d7484b..db2e9f4f 100644
--- a/lib/bup/test/vfs.py
+++ b/lib/bup/test/vfs.py
@@ -24,12 +24,12 @@ def tree_items(repo, oid):
m = maybe_meta()
if m and m.size is None:
m.size = 0
- yield TreeDictValue(name='.', oid=oid, meta=m)
+ yield TreeDictValue(name=b'.', oid=oid, meta=m)
tree_ents = vfs.ordered_tree_entries(tree_data, bupm=True)
for name, mangled_name, kind, gitmode, sub_oid in tree_ents:
- if mangled_name == '.bupm':
+ if mangled_name == b'.bupm':
continue
- assert name != '.'
+ assert name != b'.'
if S_ISDIR(gitmode):
if kind == BUP_CHUNKED:
yield TreeDictValue(name=name, oid=sub_oid,
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:19 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
cmd/damage-cmd.py | 54 +++++++++++++++++++++++------------------------
2 files changed, 27 insertions(+), 29 deletions(-)

diff --git a/Makefile b/Makefile
index 74b41c1e..89bbffbf 100644
--- a/Makefile
+++ b/Makefile
@@ -170,6 +170,7 @@ runtests-python: all t/tmp
cmdline_tests := \
t/test-argv \
t/test-compression.sh \
+ t/test-fsck.sh \
t/test-index-clear.sh \
t/test-ls \
t/test-ls-remote \
@@ -191,7 +192,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-fuse.sh \
t/test-drecurse.sh \
t/test-cat-file.sh \
- t/test-fsck.sh \
t/test-index-check-device.sh \
t/test-meta.sh \
t/test-on.sh \
diff --git a/cmd/damage-cmd.py b/cmd/damage-cmd.py
index ca2e98c7..07f0e03b 100755
--- a/cmd/damage-cmd.py
+++ b/cmd/damage-cmd.py
@@ -9,15 +9,13 @@ from __future__ import absolute_import
import sys, os, random

from bup import options
-from bup.compat import range
+from bup.compat import argv_bytes, bytes_from_uint, range
from bup.helpers import log
+from bup.io import path_msg


def randblock(n):
- l = []
- for i in range(n):
- l.append(chr(random.randrange(0,256)))
- return ''.join(l)
+ return b''.join(bytes_from_uint(random.randrange(0,256)) for i in range(n))


optspec = """
@@ -40,27 +38,27 @@ if opt.seed != None:
random.seed(opt.seed)

for name in extra:
- log('Damaging "%s"...\n' % name)
- f = open(name, 'r+b')
- st = os.fstat(f.fileno())
- size = st.st_size
- if opt.percent or opt.size:
- ms1 = int(float(opt.percent or 0)/100.0*size) or size
- ms2 = opt.size or size
- maxsize = min(ms1, ms2)
- else:
- maxsize = 1
- chunks = opt.num or 10
- chunksize = size/chunks
- for r in range(chunks):
- sz = random.randrange(1, maxsize+1)
- if sz > size:
- sz = size
- if opt.equal:
- ofs = r*chunksize
+ name = argv_bytes(name)
+ log('Damaging "%s"...\n' % path_msg(name))
+ with open(name, 'r+b') as f:
+ st = os.fstat(f.fileno())
+ size = st.st_size
+ if opt.percent or opt.size:
+ ms1 = int(float(opt.percent or 0)/100.0*size) or size
+ ms2 = opt.size or size
+ maxsize = min(ms1, ms2)
else:
- ofs = random.randrange(0, size - sz + 1)
- log(' %6d bytes at %d\n' % (sz, ofs))
- f.seek(ofs)
- f.write(randblock(sz))
- f.close()
+ maxsize = 1
+ chunks = opt.num or 10
+ chunksize = size // chunks
+ for r in range(chunks):
+ sz = random.randrange(1, maxsize+1)
+ if sz > size:
+ sz = size
+ if opt.equal:
+ ofs = r*chunksize
+ else:
+ ofs = random.randrange(0, size - sz + 1)
+ log(' %6d bytes at %d\n' % (sz, ofs))
+ f.seek(ofs)
+ f.write(randblock(sz))
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:19 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
cmd/cat-file-cmd.py | 15 ++++++++++-----
1 file changed, 10 insertions(+), 5 deletions(-)

diff --git a/cmd/cat-file-cmd.py b/cmd/cat-file-cmd.py
index 9cb6961f..3f776a28 100755
--- a/cmd/cat-file-cmd.py
+++ b/cmd/cat-file-cmd.py
@@ -9,7 +9,9 @@ from __future__ import absolute_import
import re, stat, sys

from bup import options, git, vfs
+from bup.compat import argv_bytes
from bup.helpers import chunkyreader, handle_ctrl_c, log, saved_errors
+from bup.io import byte_stream
from bup.repo import LocalRepo

optspec = """
@@ -33,9 +35,9 @@ if len(extra) > 1:
if opt.bupm and opt.meta:
o.fatal('--meta and --bupm are incompatible')

-target = extra[0]
+target = argv_bytes(extra[0])

-if not re.match(r'/*[^/]+/[^/]+', target):
+if not re.match(br'/*[^/]+/[^/]+', target):
o.fatal("path %r doesn't include a branch and revision" % target)

repo = LocalRepo()
@@ -48,21 +50,24 @@ if not leaf_item:

mode = vfs.item_mode(leaf_item)

+sys.stdout.flush()
+out = byte_stream(sys.stdout)
+
if opt.bupm:
if not stat.S_ISDIR(mode):
o.fatal('%r is not a directory' % target)
_, bupm_oid = vfs.tree_data_and_bupm(repo, leaf_item.oid)
if bupm_oid:
with vfs.tree_data_reader(repo, bupm_oid) as meta_stream:
- sys.stdout.write(meta_stream.read())
+ out.write(meta_stream.read())
elif opt.meta:
augmented = vfs.augment_item_meta(repo, leaf_item, include_size=True)
- sys.stdout.write(augmented.meta.encode())
+ out.write(augmented.meta.encode())
else:
if stat.S_ISREG(mode):
with vfs.fopen(repo, leaf_item) as f:
for b in chunkyreader(f):
- sys.stdout.write(b)
+ out.write(b)
else:
o.fatal('%r is not a plain file' % target)

--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:19 PM2/8/20
to bup-...@googlegroups.com
This includes the remainder of the patches referred to by

Subject: Very prelimiary, potential python 3 support
Message-Id: <875zhq7...@trouble.defaultvalue.org>

incorporating all of the outstanding python 3 related changes.

Thanks in particular to Johannes Berg for all the help reviewing and
testing them.

Rob Browning (47):
tvfs: accommodate python 3 and test there
Adjust server and client to accommodate python 3
tag-cmd: accommodate python 3
tresolve: accommodate python 3 and test there
Test test-argv test-compression test-index-clear test-tz w/python 3
Adjust columnate for python 3; enable test-ls test-ls-remote
damage-cmd: adjust for python 3 and enable test-fsck
subtree-hash: accommodate python 3
ns-timestamp-resolutions: accommodate python 3
cat-file-cmd: accommodate python 3
Adjust --exclude* parsing and drecurse-cmd for python 3 and test
test-cat-file test-command-without-init-fails: test with python 3
list-idx-cmd: adjust for python 3 and enable test-list-idx
hardlink-sets: accommodate python 3
unknown-owner: accommodate python 3
Adjust split-cmd join-cmd margin-cmd for python 3; test-split-join
test-packsizelimit: test with python 3
test-meta: accommodate python 3
Adjust restore-cmd for python 3 and then enable test-meta
sparse-test-data: accommodate python 3
data-size: accommodate python 3
test-sparse-files: accommodate python 3 and test there
Adjust on-cmd on--server-cmd mux-cmd DemuxConn for python 3
Enable test-index test-restore-single-file for python 3
Test test-restore-single-file test-save-creates-no-unrefs w/python 3
Enable test-rm-between-index-and-save for python 3
Adjust resolve-parent for python 3 and enable test-save-*
Adjust rm-cmd and bup.rm for python 3 and enable test-rm
test-gc: enable for python 3
helpers.exo: only report non-empty stderr
helpers: add close_fds to exo
import-duplicity-cmd: adjust for python 3 and enable test
test-main: enable for python 3
test-rdiff-backup: enable for python 3
test-on: enable for python 3
prune-older: accommodate python 3 and test there
ftp: accommodate python 3 and test there
Adjust memtest and random_sha for python 3 and enable test.sh
fuse: adjust for python 3 and test there
get: adjust for python 3 and test there
hashsplit: replace join_bytes with cat_bytes
pwdgrp: pass strings to python for python 3
pwdgrp: check for bytes with isinstance everywhere
id-other-than: accommodate python 3
Enable test-index-check-device test-xdev for python 3
test-restore-map-owner: accommodate python 3 and test there
test-web: mark as unresolved with respect to python 3

Makefile | 110 ++---
cmd/cat-file-cmd.py | 15 +-
cmd/damage-cmd.py | 54 +-
cmd/drecurse-cmd.py | 10 +-
cmd/ftp-cmd.py | 81 +--
cmd/fuse-cmd.py | 50 +-
cmd/import-duplicity-cmd.py | 66 +--
cmd/join-cmd.py | 13 +-
cmd/list-idx-cmd.py | 25 +-
cmd/margin-cmd.py | 25 +-
cmd/memtest-cmd.py | 49 +-
cmd/mux-cmd.py | 10 +-
cmd/on--server-cmd.py | 18 +-
cmd/on-cmd.py | 23 +-
cmd/prune-older-cmd.py | 26 +-
cmd/restore-cmd.py | 67 +--
cmd/rm-cmd.py | 4 +-
cmd/server-cmd.py | 115 ++---
cmd/split-cmd.py | 41 +-
cmd/tag-cmd.py | 50 +-
lib/bup/_helpers.c | 47 +-
lib/bup/compat.py | 25 +-
lib/bup/hashsplit.py | 9 +-
lib/bup/helpers.py | 101 ++--
lib/bup/pwdgrp.py | 28 +-
lib/bup/rm.py | 42 +-
lib/bup/ssh.py | 33 +-
lib/bup/t/tclient.py | 51 +-
lib/bup/t/thelpers.py | 8 +-
lib/bup/t/tresolve.py | 279 +++++------
lib/bup/t/tvfs.py | 233 ++++-----
lib/bup/test/vfs.py | 6 +-
t/data-size | 6 +-
t/hardlink-sets | 20 +-
t/id-other-than | 14 +-
t/lib.sh | 2 +-
t/ns-timestamp-resolutions | 12 +-
t/sparse-test-data | 21 +-
t/subtree-hash | 25 +-
t/test-ftp | 170 +++----
t/test-fuse.sh | 4 +-
t/test-get | 957 ++++++++++++++++++------------------
t/test-ls | 8 +-
t/test-meta.sh | 11 +-
t/test-prune-older | 134 ++---
t/test-restore-map-owner.sh | 6 +-
t/test-sparse-files.sh | 2 +-
t/unknown-owner | 6 +-
48 files changed, 1675 insertions(+), 1437 deletions(-)

--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Makefile b/Makefile
index 3aea558d..99305dae 100644
--- a/Makefile
+++ b/Makefile
@@ -184,6 +184,7 @@ cmdline_tests := \
t/test-ls-remote \
t/test-main.sh \
t/test-meta.sh \
+ t/test-on.sh \
t/test-packsizelimit \
t/test-redundant-saves.sh \
t/test-restore-single-file.sh \
@@ -205,7 +206,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-web.sh \
t/test-fuse.sh \
t/test-index-check-device.sh \
- t/test-on.sh \
t/test-restore-map-owner.sh \
t/test-xdev.sh \
t/test.sh
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Run test-split-join for python 3 after finishing the relevant
adjustments.

Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
cmd/join-cmd.py | 13 ++++++++++---
cmd/margin-cmd.py | 25 +++++++++++++++----------
cmd/split-cmd.py | 41 ++++++++++++++++++++++++++---------------
4 files changed, 52 insertions(+), 29 deletions(-)

diff --git a/Makefile b/Makefile
index 46a4d33e..d2c9ab5c 100644
--- a/Makefile
+++ b/Makefile
@@ -178,6 +178,7 @@ cmdline_tests := \
t/test-list-idx.sh \
t/test-ls \
t/test-ls-remote \
+ t/test-split-join.sh \
t/test-tz.sh

ifeq "2" "$(bup_python_majver)"
@@ -191,7 +192,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-gc.sh \
t/test-main.sh \
t/test-index.sh \
- t/test-split-join.sh \
t/test-fuse.sh \
t/test-index-check-device.sh \
t/test-meta.sh \
diff --git a/cmd/join-cmd.py b/cmd/join-cmd.py
index 68c31eed..48bebe88 100755
--- a/cmd/join-cmd.py
+++ b/cmd/join-cmd.py
@@ -9,7 +9,9 @@ from __future__ import absolute_import
import sys

from bup import git, options
+from bup.compat import argv_bytes
from bup.helpers import linereader, log
+from bup.io import byte_stream
from bup.repo import LocalRepo, RemoteRepo


@@ -21,11 +23,15 @@ o= output filename
"""
o = options.Options(optspec)
(opt, flags, extra) = o.parse(sys.argv[1:])
+if opt.remote:
+ opt.remote = argv_bytes(opt.remote)

git.check_repo_or_die()

+stdin = byte_stream(sys.stdin)
+
if not extra:
- extra = linereader(sys.stdin)
+ extra = linereader(stdin)

ret = 0
repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo()
@@ -33,9 +39,10 @@ repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo()
if opt.o:
outfile = open(opt.o, 'wb')
else:
- outfile = sys.stdout
+ sys.stdout.flush()
+ outfile = byte_stream(sys.stdout)

-for ref in extra:
+for ref in [argv_bytes(x) for x in extra]:
try:
for blob in repo.join(ref):
outfile.write(blob)
diff --git a/cmd/margin-cmd.py b/cmd/margin-cmd.py
index 7eba4484..14e7cd71 100755
--- a/cmd/margin-cmd.py
+++ b/cmd/margin-cmd.py
@@ -5,11 +5,12 @@ exec "$bup_python" "$0" ${1+"$@"}
"""
# end of bup preamble

-from __future__ import absolute_import, print_function
+from __future__ import absolute_import
import sys, struct, math

from bup import options, git, _helpers
from bup.helpers import log
+from bup.io import byte_stream

POPULATION_OF_EARTH=6.7e9 # as of September, 2010

@@ -27,29 +28,33 @@ if extra:

git.check_repo_or_die()

-mi = git.PackIdxList(git.repo('objects/pack'), ignore_midx=opt.ignore_midx)
+mi = git.PackIdxList(git.repo(b'objects/pack'), ignore_midx=opt.ignore_midx)

-def do_predict(ix):
+def do_predict(ix, out):
total = len(ix)
maxdiff = 0
for count,i in enumerate(ix):
prefix = struct.unpack('!Q', i[:8])[0]
- expected = prefix * total / (1<<64)
+ expected = prefix * total // (1 << 64)
diff = count - expected
maxdiff = max(maxdiff, abs(diff))
- print('%d of %d (%.3f%%) ' % (maxdiff, len(ix), maxdiff*100.0/len(ix)))
- sys.stdout.flush()
+ out.write(b'%d of %d (%.3f%%) '
+ % (maxdiff, len(ix), maxdiff * 100.0 / len(ix)))
+ out.flush()
assert(count+1 == len(ix))

+sys.stdout.flush()
+out = byte_stream(sys.stdout)
+
if opt.predict:
if opt.ignore_midx:
for pack in mi.packs:
- do_predict(pack)
+ do_predict(pack, out)
else:
- do_predict(mi)
+ do_predict(mi, out)
else:
# default mode: find longest matching prefix
- last = '\0'*20
+ last = b'\0'*20
longmatch = 0
for i in mi:
if i == last:
@@ -58,7 +63,7 @@ else:
pm = _helpers.bitmatch(last, i)
longmatch = max(longmatch, pm)
last = i
- print(longmatch)
+ out.write(b'%d\n' % longmatch)
log('%d matching prefix bits\n' % longmatch)
doublings = math.log(len(mi), 2)
bpd = longmatch / doublings
diff --git a/cmd/split-cmd.py b/cmd/split-cmd.py
index 31950ec2..bb4cf2e6 100755
--- a/cmd/split-cmd.py
+++ b/cmd/split-cmd.py
@@ -6,13 +6,16 @@ exec "$bup_python" "$0" ${1+"$@"}
# end of bup preamble

from __future__ import absolute_import, division, print_function
+from binascii import hexlify
import os, sys, time

from bup import hashsplit, git, options, client
+from bup.compat import argv_bytes, environ
from bup.helpers import (add_error, handle_ctrl_c, hostname, log, parse_num,
qprogress, reprogress, saved_errors,
valid_save_name,
parse_date_or_fatal)
+from bup.io import byte_stream
from bup.pwdgrp import userfullname, username


@@ -47,6 +50,9 @@ handle_ctrl_c()

o = options.Options(optspec)
(opt, flags, extra) = o.parse(sys.argv[1:])
+if opt.name: opt.name = argv_bytes(opt.name)
+if opt.remote: opt.remote = argv_bytes(opt.remote)
+if opt.verbose is None: opt.verbose = 0

if not (opt.blobs or opt.tree or opt.commit or opt.name or
opt.noop or opt.copy):
@@ -93,14 +99,14 @@ def prog(filenum, nbytes):
qprogress('Splitting: %d kbytes\r' % (total_bytes // 1024))


-is_reverse = os.environ.get('BUP_SERVER_REVERSE')
+is_reverse = environ.get(b'BUP_SERVER_REVERSE')
if is_reverse and opt.remote:
o.fatal("don't use -r in reverse mode; it's automatic")
start_time = time.time()

if opt.name and not valid_save_name(opt.name):
- o.fatal("'%s' is not a valid branch name." % opt.name)
-refname = opt.name and 'refs/heads/%s' % opt.name or None
+ o.fatal("'%r' is not a valid branch name." % opt.name)
+refname = opt.name and b'refs/heads/%s' % opt.name or None

if opt.noop or opt.copy:
cli = pack_writer = oldref = None
@@ -119,6 +125,8 @@ else:
max_pack_size=max_pack_size,
max_pack_objects=max_pack_objects)

+input = byte_stream(sys.stdin)
+
if opt.git_ids:
# the input is actually a series of git object ids that we should retrieve
# and split.
@@ -134,10 +142,10 @@ if opt.git_ids:
self.it = iter(it)
def read(self, size):
v = next(self.it, None)
- return v or ''
+ return v or b''
def read_ids():
while 1:
- line = sys.stdin.readline()
+ line = input.readline()
if not line:
break
if line:
@@ -152,22 +160,25 @@ if opt.git_ids:
files = read_ids()
else:
# the input either comes from a series of files or from stdin.
- files = extra and (open(fn) for fn in extra) or [sys.stdin]
+ files = extra and (open(argv_bytes(fn), 'rb') for fn in extra) or [input]

if pack_writer:
new_blob = pack_writer.new_blob
new_tree = pack_writer.new_tree
elif opt.blobs or opt.tree:
# --noop mode
- new_blob = lambda content: git.calc_hash('blob', content)
- new_tree = lambda shalist: git.calc_hash('tree', git.tree_encode(shalist))
+ new_blob = lambda content: git.calc_hash(b'blob', content)
+ new_tree = lambda shalist: git.calc_hash(b'tree', git.tree_encode(shalist))
+
+sys.stdout.flush()
+out = byte_stream(sys.stdout)

if opt.blobs:
shalist = hashsplit.split_to_blobs(new_blob, files,
keep_boundaries=opt.keep_boundaries,
progress=prog)
for (sha, size, level) in shalist:
- print(sha.encode('hex'))
+ out.write(hexlify(sha) + b'\n')
reprogress()
elif opt.tree or opt.commit or opt.name:
if opt.name: # insert dummy_name which may be used as a restore target
@@ -175,7 +186,7 @@ elif opt.tree or opt.commit or opt.name:
hashsplit.split_to_blob_or_tree(new_blob, new_tree, files,
keep_boundaries=opt.keep_boundaries,
progress=prog)
- splitfile_name = git.mangle_name('data', hashsplit.GIT_MODE_FILE, mode)
+ splitfile_name = git.mangle_name(b'data', hashsplit.GIT_MODE_FILE, mode)
shalist = [(mode, splitfile_name, sha)]
else:
shalist = hashsplit.split_to_shalist(
@@ -198,15 +209,15 @@ else:
if opt.verbose:
log('\n')
if opt.tree:
- print(tree.encode('hex'))
+ out.write(hexlify(tree) + b'\n')
if opt.commit or opt.name:
- msg = 'bup split\n\nGenerated by command:\n%r\n' % sys.argv
- ref = opt.name and ('refs/heads/%s' % opt.name) or None
- userline = '%s <%s@%s>' % (userfullname(), username(), hostname())
+ msg = b'bup split\n\nGenerated by command:\n%r\n' % sys.argv
+ ref = opt.name and (b'refs/heads/%s' % opt.name) or None
+ userline = b'%s <%s@%s>' % (userfullname(), username(), hostname())
commit = pack_writer.new_commit(tree, oldref, userline, date, None,
userline, date, None, msg)
if opt.commit:
- print(commit.encode('hex'))
+ out.write(hexlify(commit) + b'\n')

if pack_writer:
pack_writer.close() # must close before we can update the ref
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
t/data-size | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/t/data-size b/t/data-size
index 0c7a4f9b..ce94f786 100755
--- a/t/data-size
+++ b/t/data-size
@@ -5,7 +5,7 @@ exec "$bup_python" "$0" ${1+"$@"}
"""
# end of bup preamble

-from __future__ import absolute_import
+from __future__ import absolute_import, print_function

from os.path import getsize, isdir
from sys import argv, stderr
@@ -15,7 +15,7 @@ def listdir_failure(ex):
raise ex

def usage():
- print >> stderr, "Usage: data-size PATH ..."
+ print('Usage: data-size PATH ...', file=sys.stderr)

total = 0
for path in argv[1:]:
@@ -25,4 +25,4 @@ for path in argv[1:]:
else:
total += getsize(path)

-print total
+print(total)
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
cmd/list-idx-cmd.py | 25 +++++++++++++++----------
2 files changed, 16 insertions(+), 11 deletions(-)

diff --git a/Makefile b/Makefile
index 385fddde..46a4d33e 100644
--- a/Makefile
+++ b/Makefile
@@ -175,6 +175,7 @@ cmdline_tests := \
t/test-drecurse.sh \
t/test-fsck.sh \
t/test-index-clear.sh \
+ t/test-list-idx.sh \
t/test-ls \
t/test-ls-remote \
t/test-tz.sh
@@ -189,7 +190,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-rm.sh \
t/test-gc.sh \
t/test-main.sh \
- t/test-list-idx.sh \
t/test-index.sh \
t/test-split-join.sh \
t/test-fuse.sh \
diff --git a/cmd/list-idx-cmd.py b/cmd/list-idx-cmd.py
index 90753de4..78bb0a00 100755
--- a/cmd/list-idx-cmd.py
+++ b/cmd/list-idx-cmd.py
@@ -6,11 +6,13 @@ exec "$bup_python" "$0" ${1+"$@"}
# end of bup preamble

from __future__ import absolute_import, print_function
+from binascii import hexlify, unhexlify
import sys, os

from bup import git, options
+from bup.compat import argv_bytes
from bup.helpers import add_error, handle_ctrl_c, log, qprogress, saved_errors
-
+from bup.io import byte_stream

optspec = """
bup list-idx [--find=<prefix>] <idxfilenames...>
@@ -21,7 +23,8 @@ o = options.Options(optspec)
(opt, flags, extra) = o.parse(sys.argv[1:])

handle_ctrl_c()
-opt.find = opt.find or ''
+
+opt.find = argv_bytes(opt.find) if opt.find else b''

if not extra:
o.fatal('you must provide at least one filename')
@@ -30,32 +33,34 @@ if len(opt.find) > 40:
o.fatal('--find parameter must be <= 40 chars long')
else:
if len(opt.find) % 2:
- s = opt.find + '0'
+ s = opt.find + b'0'
else:
s = opt.find
try:
- bin = s.decode('hex')
+ bin = unhexlify(s)
except TypeError:
o.fatal('--find parameter is not a valid hex string')

+sys.stdout.flush()
+out = byte_stream(sys.stdout)
find = opt.find.lower()
-
count = 0
-for name in extra:
+idxfiles = [argv_bytes(x) for x in extra]
+for name in idxfiles:
try:
ix = git.open_idx(name)
except git.GitError as e:
- add_error('%s: %s' % (name, e))
+ add_error('%r: %s' % (name, e))
continue
if len(opt.find) == 40:
if ix.exists(bin):
- print(name, find)
+ out.write(b'%s %s\n' % (name, find))
else:
# slow, exhaustive search
for _i in ix:
- i = str(_i).encode('hex')
+ i = hexlify(_i)
if i.startswith(find):
- print(name, i)
+ out.write(b'%s %s\n' % (name, i))
qprogress('Searching: %d\r' % count)
count += 1

--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
t/subtree-hash | 25 +++++++++++++++----------
1 file changed, 15 insertions(+), 10 deletions(-)

diff --git a/t/subtree-hash b/t/subtree-hash
index 062f4858..1ca9e869 100755
--- a/t/subtree-hash
+++ b/t/subtree-hash
@@ -5,10 +5,12 @@ exec "$bup_python" "$0" ${1+"$@"}
"""
# end of bup preamble

-from __future__ import absolute_import
-import os, sys
+from __future__ import absolute_import, print_function
+import sys

+from bup.compat import argv_bytes
from bup.helpers import handle_ctrl_c, readpipe
+from bup.io import byte_stream
from bup import options


@@ -25,22 +27,23 @@ o = options.Options(optspec)
if len(extra) < 1:
o.fatal('must specify a root hash')

-tree_hash = extra[0]
-path = extra[1:]
+tree_hash = argv_bytes(extra[0])
+path = [argv_bytes(x) for x in extra[1:]]

while path:
target_name = path[0]
- subtree_items = readpipe(['git', 'ls-tree', '-z', tree_hash])
+ subtree_items = readpipe([b'git', b'ls-tree', b'-z', tree_hash])
target_hash = None
- for entry in subtree_items.split('\0'):
+ for entry in subtree_items.split(b'\0'):
if not entry:
break
- info, name = entry.split('\t', 1)
+ info, name = entry.split(b'\t', 1)
if name == target_name:
- _, _, target_hash = info.split(' ')
+ _, _, target_hash = info.split(b' ')
break
if not target_hash:
- print >> sys.stderr, "Can't find %r in %s" % (target_name, tree_hash)
+ print("Can't find %r in %s" % (target_name, tree_hash.decode('ascii')),
+ file=sys.stderr)
break
tree_hash = target_hash
path = path[1:]
@@ -48,4 +51,6 @@ while path:
if path:
sys.exit(1)

-print tree_hash
+sys.stdout.flush()
+out = byte_stream(sys.stdout)
+out.write(tree_hash + b'\n')
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Python 3's getpwnam and getgrnam functions only accept unicode
strings, so decode the bytes we have as iso-8859-1, which is what they
should be, given bup-python's LC_CTYPE override.

Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
lib/bup/pwdgrp.py | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/lib/bup/pwdgrp.py b/lib/bup/pwdgrp.py
index cb8ccf36..485b4292 100644
--- a/lib/bup/pwdgrp.py
+++ b/lib/bup/pwdgrp.py
@@ -37,7 +37,9 @@ def getpwuid(uid):
return _passwd_from_py(pwd.getpwuid(uid))

def getpwnam(name):
- return _passwd_from_py(pwd.getpwnam(name))
+ assert isinstance(name, bytes)
+ return _passwd_from_py(pwd.getpwnam(name.decode('iso-8859-1') if py_maj > 2
+ else name))


class Group:
@@ -63,7 +65,9 @@ def getgrgid(uid):
return _group_from_py(grp.getgrgid(uid))

def getgrnam(name):
- return _group_from_py(grp.getgrnam(name))
+ assert isinstance(name, bytes)
+ return _group_from_py(grp.getgrnam(name.decode('iso-8859-1') if py_maj > 2
+ else name))


_uid_to_pwd_cache = {}
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
lib/bup/helpers.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/lib/bup/helpers.py b/lib/bup/helpers.py
index 38fbc244..a5c5165b 100644
--- a/lib/bup/helpers.py
+++ b/lib/bup/helpers.py
@@ -327,8 +327,9 @@ def exo(cmd,
preexec_fn=preexec_fn)
out, err = p.communicate(input)
if check and p.returncode != 0:
- raise Exception('subprocess %r failed with status %d, stderr: %r'
- % (b' '.join(map(quote, cmd)), p.returncode, err))
+ raise Exception('subprocess %r failed with status %d%s'
+ % (b' '.join(map(quote, cmd)), p.returncode,
+ ', stderr: %r' % err if err else ''))
return out, err, p

def readpipe(argv, preexec_fn=None, shell=False):
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
cmd/restore-cmd.py | 67 ++++++++++++++++++++++++++--------------------
2 files changed, 39 insertions(+), 30 deletions(-)

diff --git a/Makefile b/Makefile
index 0bab1231..4a920497 100644
--- a/Makefile
+++ b/Makefile
@@ -178,6 +178,7 @@ cmdline_tests := \
t/test-list-idx.sh \
t/test-ls \
t/test-ls-remote \
+ t/test-meta.sh \
t/test-packsizelimit \
t/test-split-join.sh \
t/test-tz.sh
@@ -194,7 +195,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-index.sh \
t/test-fuse.sh \
t/test-index-check-device.sh \
- t/test-meta.sh \
t/test-on.sh \
t/test-restore-map-owner.sh \
t/test-restore-single-file.sh \
diff --git a/cmd/restore-cmd.py b/cmd/restore-cmd.py
index 0edbdeff..a5993638 100755
--- a/cmd/restore-cmd.py
+++ b/cmd/restore-cmd.py
@@ -5,16 +5,17 @@ exec "$bup_python" "$0" ${1+"$@"}
"""
# end of bup preamble

-from __future__ import absolute_import, print_function
+from __future__ import absolute_import
from stat import S_ISDIR
import copy, errno, os, sys, stat, re

from bup import options, git, metadata, vfs
from bup._helpers import write_sparsely
-from bup.compat import wrap_main
+from bup.compat import argv_bytes, fsencode, wrap_main
from bup.helpers import (add_error, chunkyreader, die_if_errors, handle_ctrl_c,
log, mkdirp, parse_rx_excludes, progress, qprogress,
saved_errors, should_rx_exclude_path, unlink)
+from bup.io import byte_stream
from bup.repo import LocalRepo, RemoteRepo


@@ -38,30 +39,34 @@ q,quiet don't show progress meter
total_restored = 0

# stdout should be flushed after each line, even when not connected to a tty
+stdoutfd = sys.stdout.fileno()
sys.stdout.flush()
-sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
+sys.stdout = os.fdopen(stdoutfd, 'w', 1)
+out = byte_stream(sys.stdout)

def valid_restore_path(path):
path = os.path.normpath(path)
- if path.startswith('/'):
+ if path.startswith(b'/'):
path = path[1:]
- if '/' in path:
+ if b'/' in path:
return True

def parse_owner_mappings(type, options, fatal):
"""Traverse the options and parse all --map-TYPEs, or call Option.fatal()."""
opt_name = '--map-' + type
- value_rx = r'^([^=]+)=([^=]*)$'
if type in ('uid', 'gid'):
- value_rx = r'^(-?[0-9]+)=(-?[0-9]+)$'
+ value_rx = re.compile(br'^(-?[0-9]+)=(-?[0-9]+)$')
+ else:
+ value_rx = re.compile(br'^([^=]+)=([^=]*)$')
owner_map = {}
for flag in options:
(option, parameter) = flag
if option != opt_name:
continue
- match = re.match(value_rx, parameter)
+ parameter = argv_bytes(parameter)
+ match = value_rx.match(parameter)
if not match:
- raise fatal("couldn't parse %s as %s mapping" % (parameter, type))
+ raise fatal("couldn't parse %r as %s mapping" % (parameter, type))
old_id, new_id = match.groups()
if type in ('uid', 'gid'):
old_id = int(old_id)
@@ -114,7 +119,7 @@ def hardlink_if_possible(fullname, item, top, hardlinks):

target = item.meta.hardlink_target
assert(target)
- assert(fullname.startswith('/'))
+ assert(fullname.startswith(b'/'))
target_versions = hardlinks.get(target)
if target_versions:
# Check every path in the set that we've written so far for a match.
@@ -155,9 +160,9 @@ def restore(repo, parent_path, name, item, top, sparse, numeric_ids, owner_map,
global total_restored
mode = vfs.item_mode(item)
treeish = S_ISDIR(mode)
- fullname = parent_path + '/' + name
+ fullname = parent_path + b'/' + name
# Match behavior of index --exclude-rx with respect to paths.
- if should_rx_exclude_path(fullname + ('/' if treeish else ''),
+ if should_rx_exclude_path(fullname + (b'/' if treeish else b''),
exclude_rxs):
return

@@ -169,14 +174,14 @@ def restore(repo, parent_path, name, item, top, sparse, numeric_ids, owner_map,

if stat.S_ISDIR(mode):
if verbosity >= 1:
- print('%s/' % fullname)
+ out.write(b'%s/\n' % fullname)
elif stat.S_ISLNK(mode):
assert(meta.symlink_target)
if verbosity >= 2:
- print('%s@ -> %s' % (fullname, meta.symlink_target))
+ out.write(b'%s@ -> %s\n' % (fullname, meta.symlink_target))
else:
if verbosity >= 2:
- print(fullname)
+ out.write(fullname + '\n')

orig_cwd = os.getcwd()
try:
@@ -184,7 +189,7 @@ def restore(repo, parent_path, name, item, top, sparse, numeric_ids, owner_map,
# Assumes contents() returns '.' with the full metadata first
sub_items = vfs.contents(repo, item, want_meta=True)
dot, item = next(sub_items, None)
- assert(dot == '.')
+ assert(dot == b'.')
item = vfs.augment_item_meta(repo, item, include_size=True)
meta = item.meta
meta.create_path(name)
@@ -196,7 +201,7 @@ def restore(repo, parent_path, name, item, top, sparse, numeric_ids, owner_map,
restore(repo, fullname, sub_name, sub_item, top, sparse,
numeric_ids, owner_map, exclude_rxs, verbosity,
hardlinks)
- os.chdir('..')
+ os.chdir(b'..')
apply_metadata(meta, name, numeric_ids, owner_map)
else:
created_hardlink = False
@@ -221,7 +226,11 @@ def restore(repo, parent_path, name, item, top, sparse, numeric_ids, owner_map,
def main():
o = options.Options(optspec)
opt, flags, extra = o.parse(sys.argv[1:])
- verbosity = opt.verbose if not opt.quiet else -1
+ verbosity = (opt.verbose or 0) if not opt.quiet else -1
+ if opt.remote:
+ opt.remote = argv_bytes(opt.remote)
+ if opt.outdir:
+ opt.outdir = argv_bytes(opt.outdir)

git.check_repo_or_die()

@@ -239,9 +248,9 @@ def main():
os.chdir(opt.outdir)

repo = RemoteRepo(opt.remote) if opt.remote else LocalRepo()
- top = os.getcwd()
+ top = fsencode(os.getcwd())
hardlinks = {}
- for path in extra:
+ for path in [argv_bytes(x) for x in extra]:
if not valid_restore_path(path):
add_error("path %r doesn't include a branch and revision" % path)
continue
@@ -250,16 +259,16 @@ def main():
except vfs.IOError as e:
add_error(e)
continue
- if len(resolved) == 3 and resolved[2][0] == 'latest':
+ if len(resolved) == 3 and resolved[2][0] == b'latest':
# Follow latest symlink to the actual save
try:
- resolved = vfs.resolve(repo, 'latest', parent=resolved[:-1],
+ resolved = vfs.resolve(repo, b'latest', parent=resolved[:-1],
want_meta=True)
except vfs.IOError as e:
add_error(e)
continue
# Rename it back to 'latest'
- resolved = tuple(elt if i != 2 else ('latest',) + elt[1:]
+ resolved = tuple(elt if i != 2 else (b'latest',) + elt[1:]
for i, elt in enumerate(resolved))
path_parent, path_name = os.path.split(path)
leaf_name, leaf_item = resolved[-1]
@@ -268,7 +277,7 @@ def main():
% ('/'.join(name for name, item in resolved),
path))
continue
- if not path_name or path_name == '.':
+ if not path_name or path_name == b'.':
# Source is /foo/what/ever/ or /foo/what/ever/. -- extract
# what/ever/* to the current directory, and if name == '.'
# (i.e. /foo/what/ever/.), then also restore what/ever's
@@ -279,18 +288,18 @@ def main():
else:
items = vfs.contents(repo, leaf_item, want_meta=True)
dot, leaf_item = next(items, None)
- assert(dot == '.')
+ assert dot == b'.'
for sub_name, sub_item in items:
- restore(repo, '', sub_name, sub_item, top,
+ restore(repo, b'', sub_name, sub_item, top,
opt.sparse, opt.numeric_ids, owner_map,
exclude_rxs, verbosity, hardlinks)
- if path_name == '.':
+ if path_name == b'.':
leaf_item = vfs.augment_item_meta(repo, leaf_item,
include_size=True)
- apply_metadata(leaf_item.meta, '.',
+ apply_metadata(leaf_item.meta, b'.',
opt.numeric_ids, owner_map)
else:
- restore(repo, '', leaf_name, leaf_item, top,
+ restore(repo, b'', leaf_name, leaf_item, top,
opt.sparse, opt.numeric_ids, owner_map,
exclude_rxs, verbosity, hardlinks)

--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
cmd/rm-cmd.py | 4 +++-
lib/bup/rm.py | 42 +++++++++++++++++++++---------------------
3 files changed, 25 insertions(+), 23 deletions(-)

diff --git a/Makefile b/Makefile
index 27885dcd..67cbca2b 100644
--- a/Makefile
+++ b/Makefile
@@ -183,6 +183,7 @@ cmdline_tests := \
t/test-packsizelimit \
t/test-redundant-saves.sh \
t/test-restore-single-file.sh \
+ t/test-rm.sh \
t/test-rm-between-index-and-save.sh \
t/test-save-creates-no-unrefs.sh \
t/test-save-restore \
@@ -198,7 +199,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-ftp \
t/test-prune-older \
t/test-web.sh \
- t/test-rm.sh \
t/test-gc.sh \
t/test-main.sh \
t/test-fuse.sh \
diff --git a/cmd/rm-cmd.py b/cmd/rm-cmd.py
index 3464e3c3..c0f7e55b 100755
--- a/cmd/rm-cmd.py
+++ b/cmd/rm-cmd.py
@@ -8,6 +8,7 @@ exec "$bup_python" "$0" ${1+"$@"}
from __future__ import absolute_import
import sys

+from bup.compat import argv_bytes
from bup.git import check_repo_or_die
from bup.options import Options
from bup.helpers import die_if_errors, handle_ctrl_c, log
@@ -35,5 +36,6 @@ if len(extra) < 1:

check_repo_or_die()
repo = LocalRepo()
-bup_rm(repo, extra, compression=opt.compress, verbosity=opt.verbose)
+bup_rm(repo, [argv_bytes(x) for x in extra],
+ compression=opt.compress, verbosity=opt.verbose)
die_if_errors()
diff --git a/lib/bup/rm.py b/lib/bup/rm.py
index e43abc5b..3bc2d833 100644
--- a/lib/bup/rm.py
+++ b/lib/bup/rm.py
@@ -1,5 +1,6 @@

from __future__ import absolute_import
+from binascii import hexlify, unhexlify
import sys

from bup import compat, git, vfs
@@ -7,13 +8,13 @@ from bup.client import ClientError
from bup.compat import hexstr
from bup.git import get_commit_items
from bup.helpers import add_error, die_if_errors, log, saved_errors
-
+from bup.io import path_msg

def append_commit(hash, parent, cp, writer):
ci = get_commit_items(hash, cp)
- tree = ci.tree.decode('hex')
- author = '%s <%s>' % (ci.author_name, ci.author_mail)
- committer = '%s <%s>' % (ci.committer_name, ci.committer_mail)
+ tree = unhexlify(ci.tree)
+ author = b'%s <%s>' % (ci.author_name, ci.author_mail)
+ committer = b'%s <%s>' % (ci.committer_name, ci.committer_mail)
c = writer.new_commit(tree, parent,
author, ci.author_sec, ci.author_offset,
committer, ci.committer_sec, ci.committer_offset,
@@ -23,7 +24,7 @@ def append_commit(hash, parent, cp, writer):

def filter_branch(tip_commit_hex, exclude, writer):
# May return None if everything is excluded.
- commits = [x.decode('hex') for x in git.rev_list(tip_commit_hex)]
+ commits = [unhexlify(x) for x in git.rev_list(tip_commit_hex)]
commits.reverse()
last_c, tree = None, None
# Rather than assert that we always find an exclusion here, we'll
@@ -31,13 +32,12 @@ def filter_branch(tip_commit_hex, exclude, writer):
first_exclusion = next(i for i, c in enumerate(commits) if exclude(c))
if first_exclusion != 0:
last_c = commits[first_exclusion - 1]
- tree = get_commit_items(last_c.encode('hex'),
- git.cp()).tree.decode('hex')
+ tree = unhexlify(get_commit_items(hexlify(last_c), git.cp()).tree)
commits = commits[first_exclusion:]
for c in commits:
if exclude(c):
continue
- last_c, tree = append_commit(c.encode('hex'), last_c, git.cp(), writer)
+ last_c, tree = append_commit(hexlify(c), last_c, git.cp(), writer)
return last_c

def commit_oid(item):
@@ -53,7 +53,7 @@ def rm_saves(saves, writer):
assert(branch == first_branch_item)
rm_commits = frozenset([commit_oid(save) for save, branch in saves])
orig_tip = commit_oid(first_branch_item)
- new_tip = filter_branch(orig_tip.encode('hex'),
+ new_tip = filter_branch(hexlify(orig_tip),
lambda x: x in rm_commits,
writer)
assert(orig_tip)
@@ -76,16 +76,16 @@ def dead_items(repo, paths):
else:
leaf_name, leaf_item = resolved[-1]
if not leaf_item:
- add_error('error: cannot access %r in %r'
- % ('/'.join(name for name, item in resolved),
- path))
+ add_error('error: cannot access %s in %s'
+ % (path_msg(b'/'.join(name for name, item in resolved)),
+ path_msg(path)))
continue
if isinstance(leaf_item, vfs.RevList): # rm /foo
branchname = leaf_name
dead_branches[branchname] = leaf_item
dead_saves.pop(branchname, None) # rm /foo obviates rm /foo/bar
elif isinstance(leaf_item, vfs.Commit): # rm /foo/bar
- if leaf_name == 'latest':
+ if leaf_name == b'latest':
add_error("error: cannot delete 'latest' symlink")
else:
branchname, branchitem = resolved[-2]
@@ -93,7 +93,7 @@ def dead_items(repo, paths):
dead = leaf_item, branchitem
dead_saves.setdefault(branchname, []).append(dead)
else:
- add_error("don't know how to remove %r yet" % path)
+ add_error("don't know how to remove %s yet" % path_msg(path))
if saved_errors:
return None, None
return dead_branches, dead_saves
@@ -106,7 +106,7 @@ def bup_rm(repo, paths, compression=6, verbosity=None):
updated_refs = {} # ref_name -> (original_ref, tip_commit(bin))

for branchname, branchitem in compat.items(dead_branches):
- ref = 'refs/heads/' + branchname
+ ref = b'refs/heads/' + branchname
assert(not ref in updated_refs)
updated_refs[ref] = (branchitem.oid, None)

@@ -115,7 +115,7 @@ def bup_rm(repo, paths, compression=6, verbosity=None):
try:
for branch, saves in compat.items(dead_saves):
assert(saves)
- updated_refs['refs/heads/' + branch] = rm_saves(saves, writer)
+ updated_refs[b'refs/heads/' + branch] = rm_saves(saves, writer)
except:
if writer:
writer.abort()
@@ -132,18 +132,18 @@ def bup_rm(repo, paths, compression=6, verbosity=None):
orig_ref, new_ref = info
try:
if not new_ref:
- git.delete_ref(ref_name, orig_ref.encode('hex'))
+ git.delete_ref(ref_name, hexlify(orig_ref))
else:
git.update_ref(ref_name, new_ref, orig_ref)
if verbosity:
- log('updated %r (%s%s)\n'
- % (ref_name,
+ log('updated %s (%s%s)\n'
+ % (path_msg(ref_name),
hexstr(orig_ref) + ' -> ' if orig_ref else '',
hexstr(new_ref)))
except (git.GitError, ClientError) as ex:
if new_ref:
- add_error('while trying to update %r (%s%s): %s'
- % (ref_name,
+ add_error('while trying to update %s (%s%s): %s'
+ % (path_msg(ref_name),
hexstr(orig_ref) + ' -> ' if orig_ref else '',
hexstr(new_ref),
ex))
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
t/test-meta.sh | 11 ++++++-----
1 file changed, 6 insertions(+), 5 deletions(-)

diff --git a/t/test-meta.sh b/t/test-meta.sh
index e3b79e39..f2510656 100755
--- a/t/test-meta.sh
+++ b/t/test-meta.sh
@@ -438,10 +438,11 @@ src/foo/3"

tmpdir="$(WVPASS wvmktempdir)" || exit $?

+ # FIXME: binary groups
first_group="$(WVPASS bup-python -c 'import os,grp; \
- print grp.getgrgid(os.getgroups()[0])[0]')" || exit $?
+ print(grp.getgrgid(os.getgroups()[0])[0])')" || exit $?
last_group="$(bup-python -c 'import os,grp; \
- print grp.getgrgid(os.getgroups()[-1])[0]')" || exit $?
+ print(grp.getgrgid(os.getgroups()[-1])[0])')" || exit $?
last_group_erx="$(escape-erx "$last_group")"

WVSTART 'metadata (restoration of ownership)'
@@ -679,8 +680,8 @@ if [ "$root_status" = root ]; then
WVPASS touch "$testfs"/src/bar
WVPASS bup-python -c "from bup import xstat; \
x = xstat.timespec_to_nsecs((42, 0));\
- xstat.utime('$testfs/src/foo', (x, x));\
- xstat.utime('$testfs/src/bar', (x, x));"
+ xstat.utime(b'$testfs/src/foo', (x, x));\
+ xstat.utime(b'$testfs/src/bar', (x, x));"
WVPASS cd "$testfs"
WVPASS bup meta -v --create --recurse --file src.meta src
WVPASS bup meta -tvf src.meta
@@ -741,7 +742,7 @@ if [ "$root_status" = root ]; then
WVPASS cd "$testfs_limited"/src-restore
WVFAIL bup meta --extract --file "$testfs"/src.meta
WVFAIL bup meta --extract --file "$testfs"/src.meta 2>&1 \
- | WVPASS grep -e "^xattr\.set '" \
+ | WVPASS grep -e "^xattr\.set u\?'" \
| WVPASS bup-python -c \
'import sys; exit(not len(sys.stdin.readlines()) == 2)'
) || exit $?
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
t/sparse-test-data | 21 ++++++++++++---------
1 file changed, 12 insertions(+), 9 deletions(-)

diff --git a/t/sparse-test-data b/t/sparse-test-data
index a0ec17f1..8f982e00 100755
--- a/t/sparse-test-data
+++ b/t/sparse-test-data
@@ -4,10 +4,12 @@ bup_python="$(dirname "$0")/../cmd/bup-python" || exit $?
exec "$bup_python" "$0" ${1+"$@"}
"""

+from __future__ import absolute_import, print_function
from random import randint
from sys import stderr, stdout
import sys

+from bup.io import byte_stream

def smaller_region(max_offset):
start = randint(0, max_offset)
@@ -44,13 +46,14 @@ def random_region():
global generators
return generators[randint(0, len(generators) - 1)]()

-
-out = stdout
-
+if len(sys.argv) == 0:
+ stdout.flush()
+ out = byte_stream(stdout)
if len(sys.argv) == 2:
out = open(sys.argv[1], 'wb')
-elif len(sys.argv):
- print >> stderr, "Usage: sparse-test-data [FILE]"
+else:
+ print('Usage: sparse-test-data [FILE]', file=stderr)
+ sys.exit(2)

bup_read_size = 2 ** 16
bup_min_sparse_len = 512
@@ -81,14 +84,14 @@ if sparse[1][1] != out_size:
sparse_offsets.append(out_size)

# Now sparse_offsets indicates where to start/stop zero runs
-data = 'x'
+data = b'x'
pos = 0
-print >> stderr, 'offsets:', sparse_offsets
+print('offsets:', sparse_offsets, file=stderr)
for offset in sparse_offsets:
count = offset - pos
- print >> stderr, 'write:', 'x' if data == 'x' else '0', count
+ print('write:', 'x' if data == 'x' else '0', count, file=stderr)
out.write(data * (offset - pos))
pos += count
- data = '\0' if data == 'x' else 'x'
+ data = b'\0' if data == b'x' else b'x'

out.close()
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
cmd/import-duplicity-cmd.py | 66 +++++++++++++++++++++----------------
2 files changed, 39 insertions(+), 29 deletions(-)

diff --git a/Makefile b/Makefile
index db033c9a..6ea5207b 100644
--- a/Makefile
+++ b/Makefile
@@ -175,6 +175,7 @@ cmdline_tests := \
t/test-drecurse.sh \
t/test-fsck.sh \
t/test-gc.sh \
+ t/test-import-duplicity.sh \
t/test-index.sh \
t/test-index-clear.sh \
t/test-list-idx.sh \
@@ -205,7 +206,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-index-check-device.sh \
t/test-on.sh \
t/test-restore-map-owner.sh \
- t/test-import-duplicity.sh \
t/test-import-rdiff-backup.sh \
t/test-xdev.sh \
t/test.sh
diff --git a/cmd/import-duplicity-cmd.py b/cmd/import-duplicity-cmd.py
index 565aca62..45666efa 100755
--- a/cmd/import-duplicity-cmd.py
+++ b/cmd/import-duplicity-cmd.py
@@ -10,11 +10,18 @@ from calendar import timegm
from pipes import quote
from subprocess import check_call
from time import strftime, strptime
+import os
import sys
import tempfile

-from bup import git, options
-from bup.helpers import handle_ctrl_c, log, readpipe, saved_errors, unlink
+from bup import git, helpers, options
+from bup.compat import argv_bytes, str_type
+from bup.helpers import (handle_ctrl_c,
+ log,
+ readpipe,
+ shstr,
+ saved_errors,
+ unlink)
import bup.path

optspec = """
@@ -23,13 +30,8 @@ bup import-duplicity [-n] <duplicity-source-url> <bup-save-name>
n,dry-run don't do anything; just print what would be done
"""

-
def logcmd(cmd):
- if isinstance(cmd, basestring):
- log(cmd + '\n')
- else:
- log(' '.join(map(quote, cmd)) + '\n')
-
+ log(shstr(cmd).decode('iso-8859-1', errors='replace') + '\n')

def exc(cmd, shell=False):
global opt
@@ -37,11 +39,16 @@ def exc(cmd, shell=False):
if not opt.dry_run:
check_call(cmd, shell=shell)

-def exo(cmd, shell=False):
+def exo(cmd, shell=False, preexec_fn=None, close_fds=True):
global opt
logcmd(cmd)
if not opt.dry_run:
- return readpipe(cmd, shell=shell)
+ return helpers.exo(cmd, shell=shell, preexec_fn=preexec_fn,
+ close_fds=close_fds)[0]
+
+def redirect_dup_output():
+ os.dup2(1, 3)
+ os.dup2(1, 2)


handle_ctrl_c()
@@ -59,39 +66,42 @@ if len(extra) > 2:
o.fatal('too many arguments')

source_url, save_name = extra
+source_url = argv_bytes(source_url)
+save_name = argv_bytes(save_name)
bup = bup.path.exe()

git.check_repo_or_die()

-tmpdir = tempfile.mkdtemp(prefix='bup-import-dup-')
+tmpdir = tempfile.mkdtemp(prefix=b'bup-import-dup-')
try:
- dup = ['duplicity', '--archive-dir', tmpdir + '/dup-cache']
- restoredir = tmpdir + '/restore'
- tmpidx = tmpdir + '/index'
+ dup = [b'duplicity', b'--archive-dir', tmpdir + b'/dup-cache']
+ restoredir = tmpdir + b'/restore'
+ tmpidx = tmpdir + b'/index'
+
collection_status = \
- exo(' '.join(map(quote, dup))
- + ' collection-status --log-fd=3 %s 3>&1 1>&2' % quote(source_url),
- shell=True)
+ exo(dup + [b'collection-status', b'--log-fd=3', source_url],
+ close_fds=False, preexec_fn=redirect_dup_output) # i.e. 3>&1 1>&2
# Duplicity output lines of interest look like this (one leading space):
# full 20150222T073111Z 1 noenc
# inc 20150222T073233Z 1 noenc
dup_timestamps = []
for line in collection_status.splitlines():
- if line.startswith(' inc '):
- assert(len(line) >= len(' inc 20150222T073233Z'))
+ if line.startswith(b' inc '):
+ assert(len(line) >= len(b' inc 20150222T073233Z'))
dup_timestamps.append(line[5:21])
- elif line.startswith(' full '):
- assert(len(line) >= len(' full 20150222T073233Z'))
+ elif line.startswith(b' full '):
+ assert(len(line) >= len(b' full 20150222T073233Z'))
dup_timestamps.append(line[6:22])
for i, dup_ts in enumerate(dup_timestamps):
- tm = strptime(dup_ts, '%Y%m%dT%H%M%SZ')
- exc(['rm', '-rf', restoredir])
- exc(dup + ['restore', '-t', dup_ts, source_url, restoredir])
- exc([bup, 'index', '-uxf', tmpidx, restoredir])
- exc([bup, 'save', '--strip', '--date', str(timegm(tm)), '-f', tmpidx,
- '-n', save_name, restoredir])
+ tm = strptime(dup_ts.decode('ascii'), '%Y%m%dT%H%M%SZ')
+ exc([b'rm', b'-rf', restoredir])
+ exc(dup + [b'restore', b'-t', dup_ts, source_url, restoredir])
+ exc([bup, b'index', b'-uxf', tmpidx, restoredir])
+ exc([bup, b'save', b'--strip', b'--date', b'%d' % timegm(tm),
+ b'-f', tmpidx, b'-n', save_name, restoredir])
+ sys.stderr.flush()
finally:
- exc(['rm', '-rf', tmpdir])
+ exc([b'rm', b'-rf', tmpdir])

if saved_errors:
log('warning: %d errors encountered\n' % len(saved_errors))
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Makefile b/Makefile
index 7fc522cc..1651d30d 100644
--- a/Makefile
+++ b/Makefile
@@ -183,6 +183,7 @@ cmdline_tests := \
t/test-packsizelimit \
t/test-redundant-saves.sh \
t/test-restore-single-file.sh \
+ t/test-rm-between-index-and-save.sh \
t/test-save-creates-no-unrefs.sh \
t/test-sparse-files.sh \
t/test-split-join.sh \
@@ -201,7 +202,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-index-check-device.sh \
t/test-on.sh \
t/test-restore-map-owner.sh \
- t/test-rm-between-index-and-save.sh \
t/test-save-with-valid-parent.sh \
t/test-save-restore-excludes.sh \
t/test-save-strip-graft.sh \
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Make the typical bytes-related adjustments to bup-server, bup.client,
and bup.ssh and enable the tclient tests with python 3.

Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
cmd/server-cmd.py | 115 ++++++++++++++++++++++---------------------
lib/bup/helpers.py | 14 +++---
lib/bup/ssh.py | 33 +++++++------
lib/bup/t/tclient.py | 51 +++++++++----------
5 files changed, 110 insertions(+), 105 deletions(-)

diff --git a/Makefile b/Makefile
index 24d75faa..1423e13f 100644
--- a/Makefile
+++ b/Makefile
@@ -145,6 +145,7 @@ runtests: runtests-python runtests-cmdline

python_tests := \
lib/bup/t/tbloom.py \
+ lib/bup/t/tclient.py \
lib/bup/t/tgit.py \
lib/bup/t/thashsplit.py \
lib/bup/t/thelpers.py \
@@ -158,7 +159,6 @@ python_tests := \

ifeq "2" "$(bup_python_majver)"
python_tests += \
- lib/bup/t/tclient.py \
lib/bup/t/tresolve.py
endif

diff --git a/cmd/server-cmd.py b/cmd/server-cmd.py
index b1b77297..acd70077 100755
--- a/cmd/server-cmd.py
+++ b/cmd/server-cmd.py
@@ -6,13 +6,15 @@ exec "$bup_python" "$0" ${1+"$@"}
# end of bup preamble

from __future__ import absolute_import
+from binascii import hexlify, unhexlify
import os, sys, struct, subprocess

from bup import options, git, vfs, vint
-from bup.compat import hexstr
+from bup.compat import environ, hexstr
from bup.git import MissingObject
from bup.helpers import (Conn, debug1, debug2, linereader, lines_until_sentinel,
log)
+from bup.io import byte_stream, path_msg
from bup.repo import LocalRepo


@@ -21,13 +23,13 @@ dumb_server_mode = False
repo = None

def do_help(conn, junk):
- conn.write('Commands:\n %s\n' % '\n '.join(sorted(commands)))
+ conn.write(b'Commands:\n %s\n' % b'\n '.join(sorted(commands)))
conn.ok()


def _set_mode():
global dumb_server_mode
- dumb_server_mode = os.path.exists(git.repo('bup-dumb-server'))
+ dumb_server_mode = os.path.exists(git.repo(b'bup-dumb-server'))
debug1('bup server: serving in %s mode\n'
% (dumb_server_mode and 'dumb' or 'smart'))

@@ -44,14 +46,14 @@ def _init_session(reinit_with_new_repopath=None):
repo = LocalRepo()
# OK. we now know the path is a proper repository. Record this path in the
# environment so that subprocesses inherit it and know where to operate.
- os.environ['BUP_DIR'] = git.repodir
- debug1('bup server: bupdir is %r\n' % git.repodir)
+ environ[b'BUP_DIR'] = git.repodir
+ debug1('bup server: bupdir is %s\n' % path_msg(git.repodir))
_set_mode()


def init_dir(conn, arg):
git.init_repo(arg)
- debug1('bup server: bupdir initialized: %r\n' % git.repodir)
+ debug1('bup server: bupdir initialized: %s\n' % path_msg(git.repodir))
_init_session(arg)
conn.ok()

@@ -63,20 +65,20 @@ def set_dir(conn, arg):

def list_indexes(conn, junk):
_init_session()
- suffix = ''
+ suffix = b''
if dumb_server_mode:
- suffix = ' load'
- for f in os.listdir(git.repo('objects/pack')):
- if f.endswith('.idx'):
- conn.write('%s%s\n' % (f, suffix))
+ suffix = b' load'
+ for f in os.listdir(git.repo(b'objects/pack')):
+ if f.endswith(b'.idx'):
+ conn.write(b'%s%s\n' % (f, suffix))
conn.ok()


def send_index(conn, name):
_init_session()
- assert(name.find('/') < 0)
- assert(name.endswith('.idx'))
- idx = git.open_idx(git.repo('objects/pack/%s' % name))
+ assert name.find(b'/') < 0
+ assert name.endswith(b'.idx')
+ idx = git.open_idx(git.repo(b'objects/pack/%s' % name))
conn.write(struct.pack('!I', len(idx.map)))
conn.write(idx.map)
conn.ok()
@@ -107,7 +109,7 @@ def receive_objects_v2(conn, junk):
fullpath = w.close(run_midx=not dumb_server_mode)
if fullpath:
(dir, name) = os.path.split(fullpath)
- conn.write('%s.idx\n' % name)
+ conn.write(b'%s.idx\n' % name)
conn.ok()
return
elif n == 0xffffffff:
@@ -126,14 +128,14 @@ def receive_objects_v2(conn, junk):
oldpack = w.exists(shar, want_source=True)
if oldpack:
assert(not oldpack == True)
- assert(oldpack.endswith('.idx'))
+ assert(oldpack.endswith(b'.idx'))
(dir,name) = os.path.split(oldpack)
if not (name in suggested):
debug1("bup server: suggesting index %s\n"
- % git.shorten_hash(name))
+ % git.shorten_hash(name).decode('ascii'))
debug1("bup server: because of object %s\n"
% hexstr(shar))
- conn.write('index %s\n' % name)
+ conn.write(b'index %s\n' % name)
suggested.add(name)
continue
nw, crc = w._raw_write((buf,), sha=shar)
@@ -150,7 +152,7 @@ def _check(w, expected, actual, msg):
def read_ref(conn, refname):
_init_session()
r = git.read_ref(refname)
- conn.write('%s\n' % (r or '').encode('hex'))
+ conn.write(b'%s\n' % hexlify(r) if r else b'')
conn.ok()


@@ -158,7 +160,7 @@ def update_ref(conn, refname):
_init_session()
newval = conn.readline().strip()
oldval = conn.readline().strip()
- git.update_ref(refname, newval.decode('hex'), oldval.decode('hex'))
+ git.update_ref(refname, unhexlify(newval), unhexlify(oldval))
conn.ok()

def join(conn, id):
@@ -169,42 +171,42 @@ def join(conn, id):
conn.write(blob)
except KeyError as e:
log('server: error: %s\n' % e)
- conn.write('\0\0\0\0')
+ conn.write(b'\0\0\0\0')
conn.error(e)
else:
- conn.write('\0\0\0\0')
+ conn.write(b'\0\0\0\0')
conn.ok()

def cat_batch(conn, dummy):
_init_session()
cat_pipe = git.cp()
# For now, avoid potential deadlock by just reading them all
- for ref in tuple(lines_until_sentinel(conn, '\n', Exception)):
+ for ref in tuple(lines_until_sentinel(conn, b'\n', Exception)):
ref = ref[:-1]
it = cat_pipe.get(ref)
info = next(it)
if not info[0]:
- conn.write('missing\n')
+ conn.write(b'missing\n')
continue
- conn.write('%s %s %d\n' % info)
+ conn.write(b'%s %s %d\n' % info)
for buf in it:
conn.write(buf)
conn.ok()

def refs(conn, args):
limit_to_heads, limit_to_tags = args.split()
- assert limit_to_heads in ('0', '1')
- assert limit_to_tags in ('0', '1')
+ assert limit_to_heads in (b'0', b'1')
+ assert limit_to_tags in (b'0', b'1')
limit_to_heads = int(limit_to_heads)
limit_to_tags = int(limit_to_tags)
_init_session()
- patterns = tuple(x[:-1] for x in lines_until_sentinel(conn, '\n', Exception))
+ patterns = tuple(x[:-1] for x in lines_until_sentinel(conn, b'\n', Exception))
for name, oid in git.list_refs(patterns=patterns,
limit_to_heads=limit_to_heads,
limit_to_tags=limit_to_tags):
- assert '\n' not in name
- conn.write('%s %s\n' % (oid.encode('hex'), name))
- conn.write('\n')
+ assert b'\n' not in name
+ conn.write(b'%s %s\n' % (hexlify(oid), name))
+ conn.write(b'\n')
conn.ok()

def rev_list(conn, _):
@@ -212,12 +214,12 @@ def rev_list(conn, _):
count = conn.readline()
if not count:
raise Exception('Unexpected EOF while reading rev-list count')
- count = None if count == '\n' else int(count)
+ count = None if count == b'\n' else int(count)
fmt = conn.readline()
if not fmt:
raise Exception('Unexpected EOF while reading rev-list format')
- fmt = None if fmt == '\n' else fmt[:-1]
- refs = tuple(x[:-1] for x in lines_until_sentinel(conn, '\n', Exception))
+ fmt = None if fmt == b'\n' else fmt[:-1]
+ refs = tuple(x[:-1] for x in lines_until_sentinel(conn, b'\n', Exception))
args = git.rev_list_invocation(refs, count=count, format=fmt)
p = subprocess.Popen(git.rev_list_invocation(refs, count=count, format=fmt),
env=git._gitenv(git.repodir),
@@ -227,7 +229,7 @@ def rev_list(conn, _):
if not out:
break
conn.write(out)
- conn.write('\n')
+ conn.write(b'\n')
rv = p.wait() # not fatal
if rv:
msg = 'git rev-list returned error %d' % rv
@@ -252,10 +254,10 @@ def resolve(conn, args):
except vfs.IOError as ex:
res = ex
if isinstance(res, vfs.IOError):
- conn.write(b'\0') # error
+ conn.write(b'\x00') # error
vfs.write_ioerror(conn, res)
else:
- conn.write(b'\1') # success
+ conn.write(b'\x01') # success
vfs.write_resolution(conn, res)
conn.ok()

@@ -271,36 +273,37 @@ if extra:
debug2('bup server: reading from stdin.\n')

commands = {
- 'quit': None,
- 'help': do_help,
- 'init-dir': init_dir,
- 'set-dir': set_dir,
- 'list-indexes': list_indexes,
- 'send-index': send_index,
- 'receive-objects-v2': receive_objects_v2,
- 'read-ref': read_ref,
- 'update-ref': update_ref,
- 'join': join,
- 'cat': join, # apocryphal alias
- 'cat-batch' : cat_batch,
- 'refs': refs,
- 'rev-list': rev_list,
- 'resolve': resolve
+ b'quit': None,
+ b'help': do_help,
+ b'init-dir': init_dir,
+ b'set-dir': set_dir,
+ b'list-indexes': list_indexes,
+ b'send-index': send_index,
+ b'receive-objects-v2': receive_objects_v2,
+ b'read-ref': read_ref,
+ b'update-ref': update_ref,
+ b'join': join,
+ b'cat': join, # apocryphal alias
+ b'cat-batch' : cat_batch,
+ b'refs': refs,
+ b'rev-list': rev_list,
+ b'resolve': resolve
}

# FIXME: this protocol is totally lame and not at all future-proof.
# (Especially since we abort completely as soon as *anything* bad happens)
-conn = Conn(sys.stdin, sys.stdout)
+sys.stdout.flush()
+conn = Conn(byte_stream(sys.stdin), byte_stream(sys.stdout))
lr = linereader(conn)
for _line in lr:
line = _line.strip()
if not line:
continue
debug1('bup server: command: %r\n' % line)
- words = line.split(' ', 1)
+ words = line.split(b' ', 1)
cmd = words[0]
- rest = len(words)>1 and words[1] or ''
- if cmd == 'quit':
+ rest = len(words)>1 and words[1] or b''
+ if cmd == b'quit':
break
else:
cmd = commands.get(cmd)
diff --git a/lib/bup/helpers.py b/lib/bup/helpers.py
index 35c88cdb..0e907809 100644
--- a/lib/bup/helpers.py
+++ b/lib/bup/helpers.py
@@ -112,7 +112,7 @@ def lines_until_sentinel(f, sentinel, ex_type):
# sentinel must end with \n and must contain only one \n
while True:
line = f.readline()
- if not (line and line.endswith('\n')):
+ if not (line and line.endswith(b'\n')):
raise ex_type('Hit EOF while reading line')
if line == sentinel:
return
@@ -483,23 +483,23 @@ class BaseConn:

def ok(self):
"""Indicate end of output from last sent command."""
- self.write('\nok\n')
+ self.write(b'\nok\n')

def error(self, s):
"""Indicate server error to the client."""
- s = re.sub(r'\s+', ' ', str(s))
- self.write('\nerror %s\n' % s)
+ s = re.sub(br'\s+', b' ', s)
+ self.write(b'\nerror %s\n' % s)

def _check_ok(self, onempty):
self.outp.flush()
- rl = ''
+ rl = b''
for rl in linereader(self):
#log('%d got line: %r\n' % (os.getpid(), rl))
if not rl: # empty line
continue
- elif rl == 'ok':
+ elif rl == b'ok':
return None
- elif rl.startswith('error '):
+ elif rl.startswith(b'error '):
#log('client: error: %s\n' % rl[6:])
return NotOk(rl[6:])
else:
diff --git a/lib/bup/ssh.py b/lib/bup/ssh.py
index 5602921b..de0448d0 100644
--- a/lib/bup/ssh.py
+++ b/lib/bup/ssh.py
@@ -4,17 +4,18 @@ Connect to a remote host via SSH and execute a command on the host.

from __future__ import absolute_import, print_function
import sys, os, re, subprocess
-from bup import helpers, path

+from bup import helpers, path
+from bup.compat import environ

def connect(rhost, port, subcmd, stderr=None):
"""Connect to 'rhost' and execute the bup subcommand 'subcmd' on it."""
- assert(not re.search(r'[^\w-]', subcmd))
- nicedir = re.sub(r':', "_", path.exedir())
- if rhost == '-':
+ assert not re.search(br'[^\w-]', subcmd)
+ nicedir = re.sub(b':', b'_', path.exedir())
+ if rhost == b'-':
rhost = None
if not rhost:
- argv = ['bup', subcmd]
+ argv = [b'bup', subcmd]
else:
# WARNING: shell quoting security holes are possible here, so we
# have to be super careful. We have to use 'sh -c' because
@@ -23,23 +24,23 @@ def connect(rhost, port, subcmd, stderr=None):
# can't exec *safely* using argv, because *both* ssh and 'sh -c'
# allow shellquoting. So we end up having to double-shellquote
# stuff here.
- escapedir = re.sub(r'([^\w/])', r'\\\\\\\1', nicedir)
- buglvl = helpers.atoi(os.environ.get('BUP_DEBUG'))
- force_tty = helpers.atoi(os.environ.get('BUP_FORCE_TTY'))
- cmd = r"""
+ escapedir = re.sub(br'([^\w/])', br'\\\\\\\1', nicedir)
+ buglvl = helpers.atoi(environ.get(b'BUP_DEBUG'))
+ force_tty = helpers.atoi(environ.get(b'BUP_FORCE_TTY'))
+ cmd = b"""
sh -c PATH=%s:'$PATH BUP_DEBUG=%s BUP_FORCE_TTY=%s bup %s'
""" % (escapedir, buglvl, force_tty, subcmd)
- argv = ['ssh']
+ argv = [b'ssh']
if port:
- argv.extend(('-p', port))
- argv.extend((rhost, '--', cmd.strip()))
+ argv.extend((b'-p', port))
+ argv.extend((rhost, b'--', cmd.strip()))
#helpers.log('argv is: %r\n' % argv)
if rhost:
- env = os.environ
+ env = environ
else:
- envpath = os.environ.get('PATH')
- env = os.environ.copy()
- env['PATH'] = nicedir if not envpath else nicedir + ':' + envpath
+ envpath = environ.get(b'PATH')
+ env = environ.copy()
+ env[b'PATH'] = nicedir if not envpath else nicedir + b':' + envpath
if sys.version_info[0] < 3:
return subprocess.Popen(argv,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
diff --git a/lib/bup/t/tclient.py b/lib/bup/t/tclient.py
index 284effcc..afbb09f0 100644
--- a/lib/bup/t/tclient.py
+++ b/lib/bup/t/tclient.py
@@ -5,14 +5,15 @@ import sys, os, stat, time, random, subprocess, glob
from wvtest import *

from bup import client, git, path
+from bup.compat import bytes_from_uint, environ, range
from bup.helpers import mkdirp
from buptest import no_lingering_errors, test_tempdir


def randbytes(sz):
- s = ''
- for i in xrange(sz):
- s += chr(random.randrange(0,256))
+ s = b''
+ for i in range(sz):
+ s += bytes_from_uint(random.randrange(0,256))
return s


@@ -20,14 +21,14 @@ s1 = randbytes(10000)
s2 = randbytes(10000)
s3 = randbytes(10000)

-IDX_PAT = '/*.idx'
+IDX_PAT = b'/*.idx'


@wvtest
def test_server_split_with_indexes():
with no_lingering_errors():
- with test_tempdir('bup-tclient-') as tmpdir:
- os.environ['BUP_DIR'] = bupdir = tmpdir
+ with test_tempdir(b'bup-tclient-') as tmpdir:
+ environ[b'BUP_DIR'] = bupdir = tmpdir
git.init_repo(bupdir)
lw = git.PackWriter()
c = client.Client(bupdir, create=True)
@@ -45,8 +46,8 @@ def test_server_split_with_indexes():
@wvtest
def test_multiple_suggestions():
with no_lingering_errors():
- with test_tempdir('bup-tclient-') as tmpdir:
- os.environ['BUP_DIR'] = bupdir = tmpdir
+ with test_tempdir(b'bup-tclient-') as tmpdir:
+ environ[b'BUP_DIR'] = bupdir = tmpdir
git.init_repo(bupdir)

lw = git.PackWriter()
@@ -55,7 +56,7 @@ def test_multiple_suggestions():
lw = git.PackWriter()
lw.new_blob(s2)
lw.close()
- WVPASSEQ(len(glob.glob(git.repo('objects/pack'+IDX_PAT))), 2)
+ WVPASSEQ(len(glob.glob(git.repo(b'objects/pack'+IDX_PAT))), 2)

c = client.Client(bupdir, create=True)
WVPASSEQ(len(glob.glob(c.cachedir+IDX_PAT)), 0)
@@ -80,10 +81,10 @@ def test_multiple_suggestions():
@wvtest
def test_dumb_client_server():
with no_lingering_errors():
- with test_tempdir('bup-tclient-') as tmpdir:
- os.environ['BUP_DIR'] = bupdir = tmpdir
+ with test_tempdir(b'bup-tclient-') as tmpdir:
+ environ[b'BUP_DIR'] = bupdir = tmpdir
git.init_repo(bupdir)
- open(git.repo('bup-dumb-server'), 'w').close()
+ open(git.repo(b'bup-dumb-server'), 'w').close()

lw = git.PackWriter()
lw.new_blob(s1)
@@ -102,8 +103,8 @@ def test_dumb_client_server():
@wvtest
def test_midx_refreshing():
with no_lingering_errors():
- with test_tempdir('bup-tclient-') as tmpdir:
- os.environ['BUP_DIR'] = bupdir = tmpdir
+ with test_tempdir(b'bup-tclient-') as tmpdir:
+ environ[b'BUP_DIR'] = bupdir = tmpdir
git.init_repo(bupdir)
c = client.Client(bupdir, create=True)
rw = c.new_packwriter()
@@ -116,7 +117,7 @@ def test_midx_refreshing():
p2name = os.path.join(c.cachedir, p2base)
del rw

- pi = git.PackIdxList(bupdir + '/objects/pack')
+ pi = git.PackIdxList(bupdir + b'/objects/pack')
WVPASSEQ(len(pi.packs), 2)
pi.refresh()
WVPASSEQ(len(pi.packs), 2)
@@ -129,7 +130,7 @@ def test_midx_refreshing():
WVFAIL(p2.exists(s1sha))
WVPASS(p2.exists(s2sha))

- subprocess.call([path.exe(), 'midx', '-f'])
+ subprocess.call([path.exe(), b'midx', b'-f'])
pi.refresh()
WVPASSEQ(len(pi.packs), 1)
pi.refresh(skip_midx=True)
@@ -142,18 +143,18 @@ def test_midx_refreshing():
def test_remote_parsing():
with no_lingering_errors():
tests = (
- (':/bup', ('file', None, None, '/bup')),
- ('file:///bup', ('file', None, None, '/bup')),
- ('192.168.1.1:/bup', ('ssh', '192.168.1.1', None, '/bup')),
- ('ssh://192.168.1.1:2222/bup', ('ssh', '192.168.1.1', '2222', '/bup')),
- ('ssh://[ff:fe::1]:2222/bup', ('ssh', 'ff:fe::1', '2222', '/bup')),
- ('bup://foo.com:1950', ('bup', 'foo.com', '1950', None)),
- ('bup://foo.com:1950/bup', ('bup', 'foo.com', '1950', '/bup')),
- ('bup://[ff:fe::1]/bup', ('bup', 'ff:fe::1', None, '/bup')),)
+ (b':/bup', (b'file', None, None, b'/bup')),
+ (b'file:///bup', (b'file', None, None, b'/bup')),
+ (b'192.168.1.1:/bup', (b'ssh', b'192.168.1.1', None, b'/bup')),
+ (b'ssh://192.168.1.1:2222/bup', (b'ssh', b'192.168.1.1', b'2222', b'/bup')),
+ (b'ssh://[ff:fe::1]:2222/bup', (b'ssh', b'ff:fe::1', b'2222', b'/bup')),
+ (b'bup://foo.com:1950', (b'bup', b'foo.com', b'1950', None)),
+ (b'bup://foo.com:1950/bup', (b'bup', b'foo.com', b'1950', b'/bup')),
+ (b'bup://[ff:fe::1]/bup', (b'bup', b'ff:fe::1', None, b'/bup')),)
for remote, values in tests:
WVPASSEQ(client.parse_remote(remote), values)
try:
- client.parse_remote('http://asdf.com/bup')
+ client.parse_remote(b'http://asdf.com/bup')
WVFAIL()
except client.ClientError:
WVPASS()
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 4 ++--
t/test-restore-map-owner.sh | 6 +++---
2 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/Makefile b/Makefile
index 32f0a51d..3f709a54 100644
--- a/Makefile
+++ b/Makefile
@@ -192,6 +192,7 @@ cmdline_tests := \
t/test-packsizelimit \
t/test-prune-older \
t/test-redundant-saves.sh \
+ t/test-restore-map-owner.sh \
t/test-restore-single-file.sh \
t/test-rm.sh \
t/test-rm-between-index-and-save.sh \
@@ -207,8 +208,7 @@ cmdline_tests := \

ifeq "2" "$(bup_python_majver)"
cmdline_tests += \
- t/test-web.sh \
- t/test-restore-map-owner.sh
+ t/test-web.sh
endif

tmp-target-run-test-get-%: all t/tmp
diff --git a/t/test-restore-map-owner.sh b/t/test-restore-map-owner.sh
index bf9362e4..84f3b08a 100755
--- a/t/test-restore-map-owner.sh
+++ b/t/test-restore-map-owner.sh
@@ -87,10 +87,10 @@ import grp, pwd
try:
pwd.getpwuid(0)
grp.getgrgid(0)
- print 'yes'
-except KeyError, ex:
+ print('yes')
+except KeyError as ex:
pass
-" 2>/dev/null) || exit $?
+") || exit $?
if [ "$has_uid_gid_0" == yes ]
then
WVSTART "restore --map-user/group/uid/gid (zero uid/gid trumps all)"
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Add a C cat_bytes that can concatenate two bytes objects with offsets
and extents. This allows us to have the same implementation for
python 2 and 3, to drop another use of buffer(), and may be handy in
the future, particularly given the expense of getting a buffer offset
in python 3 (i.e. memoryview() adds about ~200 bytes).

Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
lib/bup/_helpers.c | 45 ++++++++++++++++++++++++++++++++++++++++++++
lib/bup/compat.py | 15 ---------------
lib/bup/hashsplit.py | 9 +++++++--
3 files changed, 52 insertions(+), 17 deletions(-)

diff --git a/lib/bup/_helpers.c b/lib/bup/_helpers.c
index 312ecd45..5f13d6a1 100644
--- a/lib/bup/_helpers.c
+++ b/lib/bup/_helpers.c
@@ -273,6 +273,49 @@ static PyObject *bup_bytescmp(PyObject *self, PyObject *args)
}


+static PyObject *bup_cat_bytes(PyObject *self, PyObject *args)
+{
+ unsigned char *bufx = NULL, *bufy = NULL;
+ Py_ssize_t bufx_len, bufx_ofs, bufx_n;
+ Py_ssize_t bufy_len, bufy_ofs, bufy_n;
+ if (!PyArg_ParseTuple(args,
+ rbuf_argf "nn"
+ rbuf_argf "nn",
+ &bufx, &bufx_len, &bufx_ofs, &bufx_n,
+ &bufy, &bufy_len, &bufy_ofs, &bufy_n))
+ return NULL;
+ if (bufx_ofs < 0)
+ return PyErr_Format(PyExc_ValueError, "negative x offset");
+ if (bufx_n < 0)
+ return PyErr_Format(PyExc_ValueError, "negative x extent");
+ if (bufx_ofs > bufx_len)
+ return PyErr_Format(PyExc_ValueError, "x offset greater than length");
+ if (bufx_n > bufx_len - bufx_ofs)
+ return PyErr_Format(PyExc_ValueError, "x extent past end of buffer");
+
+ if (bufy_ofs < 0)
+ return PyErr_Format(PyExc_ValueError, "negative y offset");
+ if (bufy_n < 0)
+ return PyErr_Format(PyExc_ValueError, "negative y extent");
+ if (bufy_ofs > bufy_len)
+ return PyErr_Format(PyExc_ValueError, "y offset greater than length");
+ if (bufy_n > bufy_len - bufy_ofs)
+ return PyErr_Format(PyExc_ValueError, "y extent past end of buffer");
+
+ if (bufy_n > PY_SSIZE_T_MAX - bufx_n)
+ return PyErr_Format(PyExc_OverflowError, "result length too long");
+
+ PyObject *result = PyBytes_FromStringAndSize(NULL, bufx_n + bufy_n);
+ if (!result)
+ return PyErr_NoMemory();
+ char *buf = PyBytes_AS_STRING(result);
+ memcpy(buf, bufx + bufx_ofs, bufx_n);
+ memcpy(buf + bufx_n, bufy + bufy_ofs, bufy_n);
+ return result;
+}
+
+
+
// Probably we should use autoconf or something and set HAVE_PY_GETARGCARGV...
#if __WIN32__ || __CYGWIN__

@@ -1725,6 +1768,8 @@ static PyMethodDef helper_methods[] = {
#endif
{ "bytescmp", bup_bytescmp, METH_VARARGS,
"Return a negative value if x < y, zero if equal, positive otherwise."},
+ { "cat_bytes", bup_cat_bytes, METH_VARARGS,
+ "For (x_bytes, x_ofs, x_n, y_bytes, y_ofs, y_n) arguments, return their concatenation."},
#ifdef BUP_MINCORE_BUF_TYPE
{ "mincore", bup_mincore, METH_VARARGS,
"For mincore(src, src_n, src_off, dest, dest_off)"
diff --git a/lib/bup/compat.py b/lib/bup/compat.py
index 985d8ac4..859898d1 100644
--- a/lib/bup/compat.py
+++ b/lib/bup/compat.py
@@ -70,10 +70,6 @@ if py3:
return memoryview(object)[offset:]
return memoryview(object)

- def join_bytes(*items):
- """Return the concatenated bytes or memoryview arguments as bytes."""
- return b''.join(items)
-
def getcwd():
return fsencode(os.getcwd())

@@ -148,17 +144,6 @@ else: # Python 2

buffer = buffer

- def join_bytes(x, y):
- """Return the concatenated bytes or buffer arguments as bytes."""
- if type(x) == buffer:
- assert type(y) in (bytes, buffer)
- return x + y
- assert type(x) == bytes
- if type(y) == bytes:
- return b''.join((x, y))
- assert type(y) in (bytes, buffer)
- return buffer(x) + y
-

def restore_lc_env():
# Once we're up and running with iso-8859-1, undo the bup-python
diff --git a/lib/bup/hashsplit.py b/lib/bup/hashsplit.py
index dc3a538f..0ae6acdb 100644
--- a/lib/bup/hashsplit.py
+++ b/lib/bup/hashsplit.py
@@ -3,7 +3,8 @@ from __future__ import absolute_import
import io, math, os

from bup import _helpers, compat, helpers
-from bup.compat import buffer, join_bytes
+from bup._helpers import cat_bytes
+from bup.compat import buffer, py_maj
from bup.helpers import sc_page_size


@@ -29,10 +30,14 @@ class Buf:

def put(self, s):
if s:
- self.data = join_bytes(buffer(self.data, self.start), s)
+ remaining = len(self.data) - self.start
+ self.data = cat_bytes(self.data, self.start, remaining,
+ s, 0, len(s))
self.start = 0

def peek(self, count):
+ if count <= 256:
+ return self.data[self.start : self.start + count]
return buffer(self.data, self.start, count)

def eat(self, count):
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Tested-by: Rob Browning <r...@defaultvalue.org>
Signed-off-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Makefile b/Makefile
index d2c9ab5c..0bab1231 100644
--- a/Makefile
+++ b/Makefile
@@ -178,6 +178,7 @@ cmdline_tests := \
t/test-list-idx.sh \
t/test-ls \
t/test-ls-remote \
+ t/test-packsizelimit \
t/test-split-join.sh \
t/test-tz.sh

@@ -185,7 +186,6 @@ ifeq "2" "$(bup_python_majver)"
cmdline_tests += \
t/test-ftp \
t/test-save-restore \
- t/test-packsizelimit \
t/test-prune-older \
t/test-web.sh \
t/test-rm.sh \
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
For now, completely rely on the bup-python LC_CTYPE=iso-8859-1 setting
since at a minimum, bup.shquote can't handle bytes.

Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
cmd/ftp-cmd.py | 81 ++++++++++++----------
lib/bup/compat.py | 2 +
t/test-ftp | 170 ++++++++++++++++++++++++----------------------
4 files changed, 137 insertions(+), 118 deletions(-)

diff --git a/Makefile b/Makefile
index 67cc4b59..a8541b50 100644
--- a/Makefile
+++ b/Makefile
@@ -174,6 +174,7 @@ cmdline_tests := \
t/test-compression.sh \
t/test-drecurse.sh \
t/test-fsck.sh \
+ t/test-ftp \
t/test-gc.sh \
t/test-import-duplicity.sh \
t/test-import-rdiff-backup.sh \
@@ -202,7 +203,6 @@ cmdline_tests := \

ifeq "2" "$(bup_python_majver)"
cmdline_tests += \
- t/test-ftp \
t/test-web.sh \
t/test-fuse.sh \
t/test-index-check-device.sh \
diff --git a/cmd/ftp-cmd.py b/cmd/ftp-cmd.py
index f1e48bdb..53b8c222 100755
--- a/cmd/ftp-cmd.py
+++ b/cmd/ftp-cmd.py
@@ -5,12 +5,18 @@ exec "$bup_python" "$0" ${1+"$@"}
"""
# end of bup preamble

+# For now, this completely relies on the assumption that the current
+# encoding (LC_CTYPE, etc.) is ASCII compatible, and that it returns
+# the exact same bytes from a decode/encode round-trip (or the reverse
+# (e.g. ISO-8859-1).
+
from __future__ import absolute_import, print_function
import sys, os, stat, fnmatch

from bup import options, git, shquote, ls, vfs
-from bup.io import byte_stream
+from bup.compat import argv_bytes, input
from bup.helpers import chunkyreader, handle_ctrl_c, log
+from bup.io import byte_stream, path_msg
from bup.repo import LocalRepo

handle_ctrl_c()
@@ -20,6 +26,10 @@ class OptionError(Exception):
pass


+def input_bytes(s):
+ return s.encode('iso-8859-1')
+
+
def do_ls(repo, args, out):
try:
opt = ls.opts_from_cmdline(args, onabort=OptionError)
@@ -38,7 +48,7 @@ def inputiter():
if os.isatty(sys.stdin.fileno()):
while 1:
try:
- yield raw_input('bup> ')
+ yield input('bup> ')
except EOFError:
print() # Clear the line for the terminal's next prompt
break
@@ -49,16 +59,16 @@ def inputiter():

def _completer_get_subs(repo, line):
(qtype, lastword) = shquote.unfinished_word(line)
- (dir,name) = os.path.split(lastword)
- dir_path = vfs.resolve(repo, dir or '/')
+ dir, name = os.path.split(lastword.encode('iso-8859-1'))
+ dir_path = vfs.resolve(repo, dir or b'/')
_, dir_item = dir_path[-1]
if not dir_item:
subs = tuple()
else:
subs = tuple(dir_path + (entry,)
for entry in vfs.contents(repo, dir_item)
- if (entry[0] != '.' and entry[0].startswith(name)))
- return dir, name, qtype, lastword, subs
+ if (entry[0] != b'.' and entry[0].startswith(name)))
+ return qtype, lastword, subs


_last_line = None
@@ -72,7 +82,7 @@ def completer(text, iteration):
if _last_line != line:
_last_res = _completer_get_subs(repo, line)
_last_line = line
- (dir, name, qtype, lastword, subs) = _last_res
+ qtype, lastword, subs = _last_res
if iteration < len(subs):
path = subs[iteration]
leaf_name, leaf_item = path[-1]
@@ -80,11 +90,13 @@ def completer(text, iteration):
leaf_name, leaf_item = res[-1]
fullname = os.path.join(*(name for name, item in res))
if stat.S_ISDIR(vfs.item_mode(leaf_item)):
- ret = shquote.what_to_add(qtype, lastword, fullname+'/',
+ ret = shquote.what_to_add(qtype, lastword,
+ fullname.decode('iso-8859-1') + '/',
terminate=False)
else:
- ret = shquote.what_to_add(qtype, lastword, fullname,
- terminate=True) + ' '
+ ret = shquote.what_to_add(qtype, lastword,
+ fullname.decode('iso-8859-1'),
+ terminate=True) + b' '
return text + ret
except Exception as e:
log('\n')
@@ -107,7 +119,7 @@ git.check_repo_or_die()
sys.stdout.flush()
out = byte_stream(sys.stdout)
repo = LocalRepo()
-pwd = vfs.resolve(repo, '/')
+pwd = vfs.resolve(repo, b'/')
rv = 0

if extra:
@@ -137,72 +149,73 @@ for line in lines:
try:
if cmd == 'ls':
# FIXME: respect pwd (perhaps via ls accepting resolve path/parent)
- sys.stdout.flush() # FIXME: remove when we finish py3 support
do_ls(repo, words[1:], out)
elif cmd == 'cd':
np = pwd
for parm in words[1:]:
- res = vfs.resolve(repo, parm, parent=np)
+ res = vfs.resolve(repo, input_bytes(parm), parent=np)
_, leaf_item = res[-1]
if not leaf_item:
- raise Exception('%r does not exist'
- % '/'.join(name for name, item in res))
+ raise Exception('%s does not exist'
+ % path_msg(b'/'.join(name for name, item
+ in res)))
if not stat.S_ISDIR(vfs.item_mode(leaf_item)):
- raise Exception('%r is not a directory' % parm)
+ raise Exception('%s is not a directory' % path_msg(parm))
np = res
pwd = np
elif cmd == 'pwd':
if len(pwd) == 1:
- sys.stdout.write('/')
- print('/'.join(name for name, item in pwd))
+ out.write(b'/')
+ out.write(b'/'.join(name for name, item in pwd) + b'\n')
elif cmd == 'cat':
for parm in words[1:]:
- res = vfs.resolve(repo, parm, parent=pwd)
+ res = vfs.resolve(repo, input_bytes(parm), parent=pwd)
_, leaf_item = res[-1]
if not leaf_item:
- raise Exception('%r does not exist' %
- '/'.join(name for name, item in res))
+ raise Exception('%s does not exist' %
+ path_msg(b'/'.join(name for name, item
+ in res)))
with vfs.fopen(repo, leaf_item) as srcfile:
- write_to_file(srcfile, sys.stdout)
+ write_to_file(srcfile, out)
elif cmd == 'get':
if len(words) not in [2,3]:
rv = 1
raise Exception('Usage: get <filename> [localname]')
- rname = words[1]
+ rname = input_bytes(words[1])
(dir,base) = os.path.split(rname)
- lname = len(words)>2 and words[2] or base
+ lname = input_bytes(len(words) > 2 and words[2] or base)
res = vfs.resolve(repo, rname, parent=pwd)
_, leaf_item = res[-1]
if not leaf_item:
- raise Exception('%r does not exist' %
- '/'.join(name for name, item in res))
+ raise Exception('%s does not exist' %
+ path_msg(b'/'.join(name for name, item in res)))
with vfs.fopen(repo, leaf_item) as srcfile:
with open(lname, 'wb') as destfile:
- log('Saving %r\n' % lname)
+ log('Saving %s\n' % path_msg(lname))
write_to_file(srcfile, destfile)
elif cmd == 'mget':
for parm in words[1:]:
- (dir,base) = os.path.split(parm)
+ dir, base = os.path.split(input_bytes(parm))

res = vfs.resolve(repo, dir, parent=pwd)
_, dir_item = res[-1]
if not dir_item:
- raise Exception('%r does not exist' % dir)
+ raise Exception('%s does not exist' % path_msg(dir))
for name, item in vfs.contents(repo, dir_item):
- if name == '.':
+ if name == b'.':
continue
if fnmatch.fnmatch(name, base):
if stat.S_ISLNK(vfs.item_mode(item)):
deref = vfs.resolve(repo, name, parent=res)
deref_name, deref_item = deref[-1]
if not deref_item:
- raise Exception('%r does not exist' %
- '/'.join(name for name, item
- in deref))
+ raise Exception('%s does not exist' %
+ path_msg('/'.join(name for name, item
+ in deref)))
item = deref_item
with vfs.fopen(repo, item) as srcfile:
with open(name, 'wb') as destfile:
- log('Saving %r\n' % name)
+ log('Saving %s\n' % path_msg(name))
write_to_file(srcfile, destfile)
elif cmd == 'help' or cmd == '?':
# FIXME: move to stdout
diff --git a/lib/bup/compat.py b/lib/bup/compat.py
index 692bd9c1..03041f35 100644
--- a/lib/bup/compat.py
+++ b/lib/bup/compat.py
@@ -27,6 +27,7 @@ if py3:

from os import fsencode
from shlex import quote
+ input = input
range = range
str_type = str
int_types = (int,)
@@ -83,6 +84,7 @@ else: # Python 2

from bup.py2raise import reraise

+ input = raw_input
range = xrange
str_type = basestring
int_types = (int, long)
diff --git a/t/test-ftp b/t/test-ftp
index 3a8e5db3..e9f41664 100755
--- a/t/test-ftp
+++ b/t/test-ftp
@@ -6,21 +6,23 @@ exec "$bup_python" "$0" ${1+"$@"}
# end of bup preamble

from __future__ import absolute_import, print_function
-from os import environ, chdir, mkdir, symlink, unlink
+from os import chdir, mkdir, symlink, unlink
from os.path import abspath, dirname
from subprocess import PIPE
from time import localtime, strftime
import os, sys

-script_home = abspath(dirname(sys.argv[0] or '.'))
-sys.path[:0] = [abspath(script_home + '/../lib'), abspath(script_home + '/..')]
-top = os.getcwd()
-bup_cmd = top + '/bup'
+# For buptest, wvtest, ...
+sys.path[:0] = (abspath(os.path.dirname(__file__) + '/..'),)

from buptest import ex, exo, logcmd, test_tempdir
from wvtest import wvfail, wvpass, wvpasseq, wvpassne, wvstart

+from bup.compat import environ
from bup.helpers import unlink as unlink_if_exists
+import bup.path
+
+bup_cmd = bup.path.exe()

def bup(*args, **kwargs):
if 'stdout' not in kwargs:
@@ -28,106 +30,108 @@ def bup(*args, **kwargs):
return ex((bup_cmd,) + args, **kwargs)

def jl(*lines):
- return ''.join(line + '\n' for line in lines)
+ return b''.join(line + b'\n' for line in lines)

-environ['GIT_AUTHOR_NAME'] = 'bup test'
-environ['GIT_COMMITTER_NAME'] = 'bup test'
-environ['GIT_AUTHOR_EMAIL'] = 'bup@a425bc70a02811e49bdf73ee56450e6f'
-environ['GIT_COMMITTER_EMAIL'] = 'bup@a425bc70a02811e49bdf73ee56450e6f'
+environ[b'GIT_AUTHOR_NAME'] = b'bup test'
+environ[b'GIT_COMMITTER_NAME'] = b'bup test'
+environ[b'GIT_AUTHOR_EMAIL'] = b'bup@a425bc70a02811e49bdf73ee56450e6f'
+environ[b'GIT_COMMITTER_EMAIL'] = b'bup@a425bc70a02811e49bdf73ee56450e6f'

-with test_tempdir('ftp-') as tmpdir:
- environ['BUP_DIR'] = tmpdir + '/repo'
- environ['GIT_DIR'] = tmpdir + '/repo'
+with test_tempdir(b'ftp-') as tmpdir:
+ environ[b'BUP_DIR'] = tmpdir + b'/repo'
+ environ[b'GIT_DIR'] = tmpdir + b'/repo'

chdir(tmpdir)
- mkdir('src')
- chdir('src')
- mkdir('dir')
- with open('file-1', 'wb') as f:
- print('excitement!', file=f)
- with open('dir/file-2', 'wb') as f:
- print('more excitement!', file=f)
- symlink('file-1', 'file-symlink')
- symlink('dir', 'dir-symlink')
- symlink('not-there', 'bad-symlink')
+ mkdir(b'src')
+ chdir(b'src')
+ mkdir(b'dir')
+ with open(b'file-1', 'wb') as f:
+ f.write(b'excitement!\n')
+ with open(b'dir/file-2', 'wb') as f:
+ f.write(b'more excitement!\n')
+ symlink(b'file-1', b'file-symlink')
+ symlink(b'dir', b'dir-symlink')
+ symlink(b'not-there', b'bad-symlink')

chdir(tmpdir)
- bup('init')
- bup('index', 'src')
- bup('save', '-n', 'src', '--strip', 'src')
- save_utc = int(exo(('git', 'show', '-s', '--format=%at', 'src')).out.strip())
- save_name = strftime('%Y-%m-%d-%H%M%S', localtime(save_utc))
+ bup(b'init')
+ bup(b'index', b'src')
+ bup(b'save', b'-n', b'src', b'--strip', b'src')
+ save_utc = int(exo((b'git', b'show',
+ b'-s', b'--format=%at', b'src')).out.strip())
+ save_name = strftime('%Y-%m-%d-%H%M%S', localtime(save_utc)).encode('ascii')

wvstart('help')
- wvpasseq('Commands: ls cd pwd cat get mget help quit\n',
- exo((bup_cmd, 'ftp'), input='help\n', stderr=PIPE).err)
+ wvpasseq(b'Commands: ls cd pwd cat get mget help quit\n',
+ exo((bup_cmd, b'ftp'), input=b'help\n', stderr=PIPE).err)

wvstart('pwd/cd')
- wvpasseq('/\n', bup('ftp', input='pwd\n').out)
- wvpasseq('', bup('ftp', input='cd src\n').out)
- wvpasseq('/src\n', bup('ftp', input=jl('cd src', 'pwd')).out)
- wvpasseq('/src\n/\n', bup('ftp', input=jl('cd src', 'pwd',
- 'cd ..', 'pwd')).out)
- wvpasseq('/src\n/\n', bup('ftp', input=jl('cd src', 'pwd',
- 'cd ..', 'cd ..', 'pwd')).out)
- wvpasseq('/src/%s/dir\n' % save_name,
- bup('ftp', input=jl('cd src/latest/dir-symlink', 'pwd')).out)
- wvpasseq('/src/%s/dir\n' % save_name,
- bup('ftp', input=jl('cd src latest dir-symlink', 'pwd')).out)
- wvpassne(0, bup('ftp',
- input=jl('cd src/latest/bad-symlink', 'pwd'),
+ wvpasseq(b'/\n', bup(b'ftp', input=b'pwd\n').out)
+ wvpasseq(b'', bup(b'ftp', input=b'cd src\n').out)
+ wvpasseq(b'/src\n', bup(b'ftp', input=jl(b'cd src', b'pwd')).out)
+ wvpasseq(b'/src\n/\n', bup(b'ftp', input=jl(b'cd src', b'pwd',
+ b'cd ..', b'pwd')).out)
+ wvpasseq(b'/src\n/\n', bup(b'ftp', input=jl(b'cd src', b'pwd',
+ b'cd ..', b'cd ..',
+ b'pwd')).out)
+ wvpasseq(b'/src/%s/dir\n' % save_name,
+ bup(b'ftp', input=jl(b'cd src/latest/dir-symlink', b'pwd')).out)
+ wvpasseq(b'/src/%s/dir\n' % save_name,
+ bup(b'ftp', input=jl(b'cd src latest dir-symlink', b'pwd')).out)
+ wvpassne(0, bup(b'ftp',
+ input=jl(b'cd src/latest/bad-symlink', b'pwd'),
check=False, stdout=None).rc)
- wvpassne(0, bup('ftp',
- input=jl('cd src/latest/not-there', 'pwd'),
+ wvpassne(0, bup(b'ftp',
+ input=jl(b'cd src/latest/not-there', b'pwd'),
check=False, stdout=None).rc)

wvstart('ls')
# FIXME: elaborate
- wvpasseq('src\n', bup('ftp', input='ls\n').out)
- wvpasseq(save_name + '\nlatest\n',
- bup('ftp', input='ls src\n').out)
+ wvpasseq(b'src\n', bup(b'ftp', input=b'ls\n').out)
+ wvpasseq(save_name + b'\nlatest\n',
+ bup(b'ftp', input=b'ls src\n').out)

wvstart('cat')
- wvpasseq('excitement!\n',
- bup('ftp', input='cat src/latest/file-1\n').out)
- wvpasseq('excitement!\nmore excitement!\n',
- bup('ftp',
- input='cat src/latest/file-1 src/latest/dir/file-2\n').out)
+ wvpasseq(b'excitement!\n',
+ bup(b'ftp', input=b'cat src/latest/file-1\n').out)
+ wvpasseq(b'excitement!\nmore excitement!\n',
+ bup(b'ftp',
+ input=b'cat src/latest/file-1 src/latest/dir/file-2\n').out)

wvstart('get')
- bup('ftp', input=jl('get src/latest/file-1 dest'))
- with open('dest', 'rb') as f:
- wvpasseq('excitement!\n', f.read())
- unlink('dest')
- bup('ftp', input=jl('get src/latest/file-symlink dest'))
- with open('dest', 'rb') as f:
- wvpasseq('excitement!\n', f.read())
- unlink('dest')
- wvpassne(0, bup('ftp',
- input=jl('get src/latest/bad-symlink dest'),
+ bup(b'ftp', input=jl(b'get src/latest/file-1 dest'))
+ with open(b'dest', 'rb') as f:
+ wvpasseq(b'excitement!\n', f.read())
+ unlink(b'dest')
+ bup(b'ftp', input=jl(b'get src/latest/file-symlink dest'))
+ with open(b'dest', 'rb') as f:
+ wvpasseq(b'excitement!\n', f.read())
+ unlink(b'dest')
+ wvpassne(0, bup(b'ftp',
+ input=jl(b'get src/latest/bad-symlink dest'),
check=False, stdout=None).rc)
- wvpassne(0, bup('ftp',
- input=jl('get src/latest/not-there'),
+ wvpassne(0, bup(b'ftp',
+ input=jl(b'get src/latest/not-there'),
check=False, stdout=None).rc)

wvstart('mget')
- unlink_if_exists('file-1')
- bup('ftp', input=jl('mget src/latest/file-1'))
- with open('file-1', 'rb') as f:
- wvpasseq('excitement!\n', f.read())
- unlink_if_exists('file-1')
- unlink_if_exists('file-2')
- bup('ftp', input=jl('mget src/latest/file-1 src/latest/dir/file-2'))
- with open('file-1', 'rb') as f:
- wvpasseq('excitement!\n', f.read())
- with open('file-2', 'rb') as f:
- wvpasseq('more excitement!\n', f.read())
- unlink_if_exists('file-symlink')
- bup('ftp', input=jl('mget src/latest/file-symlink'))
- with open('file-symlink', 'rb') as f:
- wvpasseq('excitement!\n', f.read())
- wvpassne(0, bup('ftp',
- input=jl('mget src/latest/bad-symlink dest'),
+ unlink_if_exists(b'file-1')
+ bup(b'ftp', input=jl(b'mget src/latest/file-1'))
+ with open(b'file-1', 'rb') as f:
+ wvpasseq(b'excitement!\n', f.read())
+ unlink_if_exists(b'file-1')
+ unlink_if_exists(b'file-2')
+ bup(b'ftp', input=jl(b'mget src/latest/file-1 src/latest/dir/file-2'))
+ with open(b'file-1', 'rb') as f:
+ wvpasseq(b'excitement!\n', f.read())
+ with open(b'file-2', 'rb') as f:
+ wvpasseq(b'more excitement!\n', f.read())
+ unlink_if_exists(b'file-symlink')
+ bup(b'ftp', input=jl(b'mget src/latest/file-symlink'))
+ with open(b'file-symlink', 'rb') as f:
+ wvpasseq(b'excitement!\n', f.read())
+ wvpassne(0, bup(b'ftp',
+ input=jl(b'mget src/latest/bad-symlink dest'),
check=False, stdout=None).rc)
# bup mget currently always does pattern matching
- bup('ftp', input='mget src/latest/not-there\n')
+ bup(b'ftp', input=b'mget src/latest/not-there\n')
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
t/unknown-owner | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/t/unknown-owner b/t/unknown-owner
index 9ec7e449..7e55191e 100755
--- a/t/unknown-owner
+++ b/t/unknown-owner
@@ -5,14 +5,14 @@ exec "$bup_python" "$0" ${1+"$@"}
"""
# end of bup preamble

-from __future__ import absolute_import
+from __future__ import absolute_import, print_function

import grp
import pwd
import sys

def usage():
- print >> sys.stderr, "Usage: unknown-owner (--user | --group)"
+ print("Usage: unknown-owner (--user | --group)", file=sys.stderr)

if len(sys.argv) != 2:
usage()
@@ -26,4 +26,4 @@ else:
usage()
sys.exit(1)

-print 'x' * (max_name_len + 1)
+print('x' * (max_name_len + 1))
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/Makefile b/Makefile
index ed792848..1362d7f8 100644
--- a/Makefile
+++ b/Makefile
@@ -174,12 +174,14 @@ cmdline_tests := \
t/test-compression.sh \
t/test-drecurse.sh \
t/test-fsck.sh \
+ t/test-index.sh \
t/test-index-clear.sh \
t/test-list-idx.sh \
t/test-ls \
t/test-ls-remote \
t/test-meta.sh \
t/test-packsizelimit \
+ t/test-redundant-saves.sh \
t/test-sparse-files.sh \
t/test-split-join.sh \
t/test-tz.sh
@@ -193,7 +195,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-rm.sh \
t/test-gc.sh \
t/test-main.sh \
- t/test-index.sh \
t/test-fuse.sh \
t/test-index-check-device.sh \
t/test-on.sh \
@@ -201,7 +202,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-restore-single-file.sh \
t/test-rm-between-index-and-save.sh \
t/test-save-with-valid-parent.sh \
- t/test-redundant-saves.sh \
t/test-save-creates-no-unrefs.sh \
t/test-save-restore-excludes.sh \
t/test-save-strip-graft.sh \
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 8 ++++----
t/lib.sh | 2 +-
2 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/Makefile b/Makefile
index 1651d30d..27885dcd 100644
--- a/Makefile
+++ b/Makefile
@@ -185,6 +185,10 @@ cmdline_tests := \
t/test-restore-single-file.sh \
t/test-rm-between-index-and-save.sh \
t/test-save-creates-no-unrefs.sh \
+ t/test-save-restore \
+ t/test-save-restore-excludes.sh \
+ t/test-save-strip-graft.sh \
+ t/test-save-with-valid-parent.sh \
t/test-sparse-files.sh \
t/test-split-join.sh \
t/test-tz.sh
@@ -192,7 +196,6 @@ cmdline_tests := \
ifeq "2" "$(bup_python_majver)"
cmdline_tests += \
t/test-ftp \
- t/test-save-restore \
t/test-prune-older \
t/test-web.sh \
t/test-rm.sh \
@@ -202,9 +205,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-index-check-device.sh \
t/test-on.sh \
t/test-restore-map-owner.sh \
- t/test-save-with-valid-parent.sh \
- t/test-save-restore-excludes.sh \
- t/test-save-strip-graft.sh \
t/test-import-duplicity.sh \
t/test-import-rdiff-backup.sh \
t/test-xdev.sh \
diff --git a/t/lib.sh b/t/lib.sh
index 5c5b01ee..4d67341e 100644
--- a/t/lib.sh
+++ b/t/lib.sh
@@ -17,7 +17,7 @@ resolve-parent()
test "$#" -eq 1 || return $?
echo "$1" | \
PYTHONPATH="$bup_t_lib_script_home/../lib" bup-python -c \
- "import sys, bup.helpers; print bup.helpers.resolve_parent(sys.stdin.readline())" \
+ "import sys, bup.helpers; print(bup.helpers.resolve_parent(sys.stdin.readline()))" \
|| return $?
}

--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Handle --exclude and --exclude-from values as bytes, adjust
drecurse-command for python 3, and include test-drecurse.sh
in the python 3 test set.

Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
cmd/drecurse-cmd.py | 10 +++++++---
lib/bup/helpers.py | 20 ++++++++++----------
3 files changed, 18 insertions(+), 14 deletions(-)

diff --git a/Makefile b/Makefile
index 89bbffbf..2158f49c 100644
--- a/Makefile
+++ b/Makefile
@@ -170,6 +170,7 @@ runtests-python: all t/tmp
cmdline_tests := \
t/test-argv \
t/test-compression.sh \
+ t/test-drecurse.sh \
t/test-fsck.sh \
t/test-index-clear.sh \
t/test-ls \
@@ -190,7 +191,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-index.sh \
t/test-split-join.sh \
t/test-fuse.sh \
- t/test-drecurse.sh \
t/test-cat-file.sh \
t/test-index-check-device.sh \
t/test-meta.sh \
diff --git a/cmd/drecurse-cmd.py b/cmd/drecurse-cmd.py
index a3a7d30a..3fa155fd 100755
--- a/cmd/drecurse-cmd.py
+++ b/cmd/drecurse-cmd.py
@@ -10,7 +10,9 @@ from os.path import relpath
import sys

from bup import options, drecurse
+from bup.compat import argv_bytes
from bup.helpers import log, parse_excludes, parse_rx_excludes, saved_errors
+from bup.io import byte_stream


optspec = """
@@ -30,9 +32,9 @@ o = options.Options(optspec)
if len(extra) != 1:
o.fatal("exactly one filename expected")

-drecurse_top = extra[0]
+drecurse_top = argv_bytes(extra[0])
excluded_paths = parse_excludes(flags, o.fatal)
-if not drecurse_top.startswith('/'):
+if not drecurse_top.startswith(b'/'):
excluded_paths = [relpath(x) for x in excluded_paths]
exclude_rxs = parse_rx_excludes(flags, o.fatal)
it = drecurse.recursive_dirlist([drecurse_top], opt.xdev,
@@ -49,8 +51,10 @@ else:
for i in it:
pass
else:
+ sys.stdout.flush()
+ out = byte_stream(sys.stdout)
for (name,st) in it:
- print(name)
+ out.write(name + b'\n')

if saved_errors:
log('WARNING: %d errors encountered.\n' % len(saved_errors))
diff --git a/lib/bup/helpers.py b/lib/bup/helpers.py
index 8773cfc0..a85c3784 100644
--- a/lib/bup/helpers.py
+++ b/lib/bup/helpers.py
@@ -12,7 +12,7 @@ import hashlib, heapq, math, operator, time, grp, tempfile

from bup import _helpers
from bup import compat
-from bup.compat import byte_int
+from bup.compat import argv_bytes, byte_int
from bup.io import path_msg
# This function should really be in helpers, not in bup.options. But we
# want options.py to be standalone so people can include it in other projects.
@@ -964,12 +964,12 @@ def parse_excludes(options, fatal):
for flag in options:
(option, parameter) = flag
if option == '--exclude':
- excluded_paths.append(resolve_parent(parameter))
+ excluded_paths.append(resolve_parent(argv_bytes(parameter)))
elif option == '--exclude-from':
try:
- f = open(resolve_parent(parameter))
+ f = open(resolve_parent(argv_bytes(parameter)), 'rb')
except IOError as e:
- raise fatal("couldn't read %s" % parameter)
+ raise fatal("couldn't read %r" % parameter)
for exclude_path in f.readlines():
# FIXME: perhaps this should be rstrip('\n')
exclude_path = resolve_parent(exclude_path.strip())
@@ -987,22 +987,22 @@ def parse_rx_excludes(options, fatal):
(option, parameter) = flag
if option == '--exclude-rx':
try:
- excluded_patterns.append(re.compile(parameter))
+ excluded_patterns.append(re.compile(argv_bytes(parameter)))
except re.error as ex:
- fatal('invalid --exclude-rx pattern (%s): %s' % (parameter, ex))
+ fatal('invalid --exclude-rx pattern (%r): %s' % (parameter, ex))
elif option == '--exclude-rx-from':
try:
- f = open(resolve_parent(parameter))
+ f = open(resolve_parent(parameter), 'rb')
except IOError as e:
- raise fatal("couldn't read %s" % parameter)
+ raise fatal("couldn't read %r" % parameter)
for pattern in f.readlines():
- spattern = pattern.rstrip('\n')
+ spattern = pattern.rstrip(b'\n')
if not spattern:
continue
try:
excluded_patterns.append(re.compile(spattern))
except re.error as ex:
- fatal('invalid --exclude-rx pattern (%s): %s' % (spattern, ex))
+ fatal('invalid --exclude-rx pattern (%r): %s' % (spattern, ex))
return excluded_patterns


--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
t/ns-timestamp-resolutions | 12 +++++++++---
1 file changed, 9 insertions(+), 3 deletions(-)

diff --git a/t/ns-timestamp-resolutions b/t/ns-timestamp-resolutions
index f8ac60d8..e8be1ef9 100755
--- a/t/ns-timestamp-resolutions
+++ b/t/ns-timestamp-resolutions
@@ -8,9 +8,11 @@ exec "$bup_python" "$0" ${1+"$@"}
from __future__ import absolute_import
import os, sys

-import bup.xstat as xstat
+from bup.compat import argv_bytes
from bup.helpers import handle_ctrl_c, saved_errors
+from bup.io import byte_stream
from bup import metadata, options
+import bup.xstat as xstat


optspec = """
@@ -23,10 +25,13 @@ handle_ctrl_c()
o = options.Options(optspec)
(opt, flags, extra) = o.parse(sys.argv[1:])

+sys.stdout.flush()
+out = byte_stream(sys.stdout)
+
if len(extra) != 1:
o.fatal('must specify a test file name')

-target = extra[0]
+target = argv_bytes(extra[0])

open(target, 'w').close()
xstat.utime(target, (123456789, 123456789))
@@ -39,7 +44,8 @@ def ns_resolution(x):
n *= 10
return n

-print ns_resolution(meta.atime), ns_resolution(meta.mtime)
+out.write(b'%d %d\n' % (ns_resolution(meta.atime),
+ ns_resolution(meta.mtime)))

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Makefile b/Makefile
index 67cbca2b..db033c9a 100644
--- a/Makefile
+++ b/Makefile
@@ -174,6 +174,7 @@ cmdline_tests := \
t/test-compression.sh \
t/test-drecurse.sh \
t/test-fsck.sh \
+ t/test-gc.sh \
t/test-index.sh \
t/test-index-clear.sh \
t/test-list-idx.sh \
@@ -199,7 +200,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-ftp \
t/test-prune-older \
t/test-web.sh \
- t/test-gc.sh \
t/test-main.sh \
t/test-fuse.sh \
t/test-index-check-device.sh \
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
cmd/mux-cmd.py | 10 ++++++----
cmd/on--server-cmd.py | 18 +++++++++++-------
cmd/on-cmd.py | 23 ++++++++++++++---------
lib/bup/helpers.py | 18 +++++++++---------
4 files changed, 40 insertions(+), 29 deletions(-)

diff --git a/cmd/mux-cmd.py b/cmd/mux-cmd.py
index 1a500b65..f7be4c2f 100755
--- a/cmd/mux-cmd.py
+++ b/cmd/mux-cmd.py
@@ -10,11 +10,11 @@ import os, sys, subprocess, struct

from bup import options
from bup.helpers import debug1, debug2, mux
-
+from bup.io import byte_stream

# Give the subcommand exclusive access to stdin.
orig_stdin = os.dup(0)
-devnull = os.open('/dev/null', os.O_RDONLY)
+devnull = os.open(os.devnull, os.O_RDONLY)
os.dup2(devnull, 0)
os.close(devnull)

@@ -41,9 +41,11 @@ p = subprocess.Popen(subcmd, stdin=orig_stdin, stdout=outw, stderr=errw,
close_fds=False, preexec_fn=close_fds)
os.close(outw)
os.close(errw)
-sys.stdout.write('BUPMUX')
sys.stdout.flush()
-mux(p, sys.stdout.fileno(), outr, errr)
+out = byte_stream(sys.stdout)
+out.write(b'BUPMUX')
+out.flush()
+mux(p, out.fileno(), outr, errr)
os.close(outr)
os.close(errr)
prv = p.wait()
diff --git a/cmd/on--server-cmd.py b/cmd/on--server-cmd.py
index 64e79d29..e5b7b190 100755
--- a/cmd/on--server-cmd.py
+++ b/cmd/on--server-cmd.py
@@ -9,7 +9,8 @@ from __future__ import absolute_import
import sys, os, struct

from bup import options, helpers, path
-
+from bup.compat import environ, py_maj
+from bup.io import byte_stream

optspec = """
bup on--server
@@ -25,15 +26,18 @@ if extra:
# Normally we could just pass this on the command line, but since we'll often
# be getting called on the other end of an ssh pipe, which tends to mangle
# argv (by sending it via the shell), this way is much safer.
-buf = sys.stdin.read(4)
+
+stdin = byte_stream(sys.stdin)
+buf = stdin.read(4)
sz = struct.unpack('!I', buf)[0]
assert(sz > 0)
assert(sz < 1000000)
-buf = sys.stdin.read(sz)
+buf = stdin.read(sz)
assert(len(buf) == sz)
-argv = buf.split('\0')
+argv = buf.split(b'\0')
argv[0] = path.exe()
-argv = [argv[0], 'mux', '--'] + argv
+argv = [argv[0], b'mux', b'--'] + argv
+

# stdin/stdout are supposedly connected to 'bup server' that the caller
# started for us (often on the other end of an ssh tunnel), so we don't want
@@ -52,10 +56,10 @@ argv = [argv[0], 'mux', '--'] + argv
os.dup2(0, 3)
os.dup2(1, 4)
os.dup2(2, 1)
-fd = os.open('/dev/null', os.O_RDONLY)
+fd = os.open(os.devnull, os.O_RDONLY)
os.dup2(fd, 0)
os.close(fd)

-os.environ['BUP_SERVER_REVERSE'] = helpers.hostname()
+environ[b'BUP_SERVER_REVERSE'] = helpers.hostname()
os.execvp(argv[0], argv)
sys.exit(99)
diff --git a/cmd/on-cmd.py b/cmd/on-cmd.py
index 0643ef80..2c0e9fc8 100755
--- a/cmd/on-cmd.py
+++ b/cmd/on-cmd.py
@@ -6,11 +6,13 @@ exec "$bup_python" "$0" ${1+"$@"}
# end of bup preamble

from __future__ import absolute_import
+from subprocess import PIPE
import sys, os, struct, getopt, subprocess, signal

-from subprocess import PIPE
from bup import options, ssh, path
+from bup.compat import argv_bytes
from bup.helpers import DemuxConn, log
+from bup.io import byte_stream


optspec = """
@@ -34,31 +36,34 @@ def handler(signum, frame):
signal.signal(signal.SIGTERM, handler)
signal.signal(signal.SIGINT, handler)

+sys.stdout.flush()
+out = byte_stream(sys.stdout)
+
try:
sp = None
p = None
ret = 99

- hp = extra[0].split(':')
+ hp = argv_bytes(extra[0]).split(b':')
if len(hp) == 1:
(hostname, port) = (hp[0], None)
else:
(hostname, port) = hp
- argv = extra[1:]
- p = ssh.connect(hostname, port, 'on--server', stderr=PIPE)
+ argv = [argv_bytes(x) for x in extra[1:]]
+ p = ssh.connect(hostname, port, b'on--server', stderr=PIPE)

try:
- argvs = '\0'.join(['bup'] + argv)
+ argvs = b'\0'.join([b'bup'] + argv)
p.stdin.write(struct.pack('!I', len(argvs)) + argvs)
p.stdin.flush()
- sp = subprocess.Popen([path.exe(), 'server'],
+ sp = subprocess.Popen([path.exe(), b'server'],
stdin=p.stdout, stdout=p.stdin)
p.stdin.close()
p.stdout.close()
# Demultiplex remote client's stderr (back to stdout/stderr).
- dmc = DemuxConn(p.stderr.fileno(), open(os.devnull, "w"))
- for line in iter(dmc.readline, ""):
- sys.stdout.write(line)
+ dmc = DemuxConn(p.stderr.fileno(), open(os.devnull, "wb"))
+ for line in iter(dmc.readline, b''):
+ out.write(line)
finally:
while 1:
# if we get a signal while waiting, we have to keep waiting, just
diff --git a/lib/bup/helpers.py b/lib/bup/helpers.py
index a85c3784..38fbc244 100644
--- a/lib/bup/helpers.py
+++ b/lib/bup/helpers.py
@@ -13,7 +13,7 @@ import hashlib, heapq, math, operator, time, grp, tempfile
from bup import _helpers
from bup import compat
from bup.compat import argv_bytes, byte_int
-from bup.io import path_msg
+from bup.io import byte_stream, path_msg
# This function should really be in helpers, not in bup.options. But we
# want options.py to be standalone so people can include it in other projects.
from bup.options import _tty_width as tty_width
@@ -574,13 +574,13 @@ class DemuxConn(BaseConn):
BaseConn.__init__(self, outp)
# Anything that comes through before the sync string was not
# multiplexed and can be assumed to be debug/log before mux init.
- tail = ''
- while tail != 'BUPMUX':
+ tail = b''
+ while tail != b'BUPMUX':
b = os.read(infd, (len(tail) < 6) and (6-len(tail)) or 1)
if not b:
raise IOError('demux: unexpected EOF during initialization')
tail += b
- sys.stderr.write(tail[:-6]) # pre-mux log messages
+ byte_stream(sys.stderr).write(tail[:-6]) # pre-mux log messages
tail = tail[-6:]
self.infd = infd
self.reader = None
@@ -596,14 +596,14 @@ class DemuxConn(BaseConn):
rl, wl, xl = select.select([self.infd], [], [], timeout)
if not rl: return False
assert(rl[0] == self.infd)
- ns = ''.join(checked_reader(self.infd, 5))
+ ns = b''.join(checked_reader(self.infd, 5))
n, fdw = struct.unpack('!IB', ns)
assert(n <= MAX_PACKET)
if fdw == 1:
self.reader = checked_reader(self.infd, n)
elif fdw == 2:
for buf in checked_reader(self.infd, n):
- sys.stderr.write(buf)
+ byte_stream(sys.stderr).write(buf)
elif fdw == 3:
self.closed = True
debug2("DemuxConn: marked closed\n")
@@ -640,10 +640,10 @@ class DemuxConn(BaseConn):
def _readline(self):
def find_eol(buf):
try:
- return buf.index('\n')+1
+ return buf.index(b'\n')+1
except ValueError:
return None
- return ''.join(self._read_parts(find_eol))
+ return b''.join(self._read_parts(find_eol))

def _read(self, size):
csize = [size]
@@ -653,7 +653,7 @@ class DemuxConn(BaseConn):
return None
else:
return csize[0]
- return ''.join(self._read_parts(until_size))
+ return b''.join(self._read_parts(until_size))

def has_input(self):
return self._load_buf(0)
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
lib/bup/pwdgrp.py | 20 ++++++++++----------
1 file changed, 10 insertions(+), 10 deletions(-)

diff --git a/lib/bup/pwdgrp.py b/lib/bup/pwdgrp.py
index 485b4292..4374bea8 100644
--- a/lib/bup/pwdgrp.py
+++ b/lib/bup/pwdgrp.py
@@ -14,11 +14,11 @@ class Passwd:
__slots__ = ('pw_name', 'pw_passwd', 'pw_uid', 'pw_gid', 'pw_gecos',
'pw_dir', 'pw_shell')
def __init__(self, name, passwd, uid, gid, gecos, dir, shell):
- assert type(name) == bytes
- assert type(passwd) == bytes
- assert type(gecos) == bytes
- assert type(dir) == bytes
- assert type(shell) == bytes
+ assert isinstance(name, bytes)
+ assert isinstance(passwd, bytes)
+ assert isinstance(gecos, bytes)
+ assert isinstance(dir, bytes)
+ assert isinstance(shell, bytes)
(self.pw_name, self.pw_passwd, self.pw_uid, self.pw_gid,
self.pw_gecos, self.pw_dir, self.pw_shell) = \
name, passwd, uid, gid, gecos, dir, shell
@@ -46,10 +46,10 @@ class Group:
"""Drop in replacement for grp's structure with bytes instead of strings."""
__slots__ = 'gr_name', 'gr_passwd', 'gr_gid', 'gr_mem'
def __init__(self, name, passwd, gid, mem):
- assert type(name) == bytes
- assert type(passwd) == bytes
+ assert isinstance(name, bytes)
+ assert isinstance(passwd, bytes)
for m in mem:
- assert type(m) == bytes
+ assert isinstance(m, bytes)
self.gr_name, self.gr_passwd, self.gr_gid, self.gr_mem = \
name, passwd, gid, mem

@@ -87,7 +87,7 @@ def pwd_from_name(name):
"""Return password database entry for name (may be a cached value).
Return None if no entry is found.
"""
- assert type(name) == bytes
+ assert isinstance(name, bytes)
global _uid_to_pwd_cache, _name_to_pwd_cache
entry, cached = cache_key_value(getpwnam, name, _name_to_pwd_cache)
if entry and not cached:
@@ -113,7 +113,7 @@ def grp_from_name(name):
"""Return password database entry for name (may be a cached value).
Return None if no entry is found.
"""
- assert type(name) == bytes
+ assert isinstance(name, bytes)
global _gid_to_grp_cache, _name_to_grp_cache
entry, cached = cache_key_value(getgrnam, name, _name_to_grp_cache)
if entry and not cached:
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Add close_fds to exo() for import-duplicity, and while we're
there, define readpipe in terms of exo(), since it's just a
simplification.

Default close_fds to True, since it doesn't look like we need to
preserve the open fds in our other calls.

Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
lib/bup/helpers.py | 14 +++++---------
lib/bup/t/thelpers.py | 8 +++-----
2 files changed, 8 insertions(+), 14 deletions(-)

diff --git a/lib/bup/helpers.py b/lib/bup/helpers.py
index a5c5165b..cfbdff49 100644
--- a/lib/bup/helpers.py
+++ b/lib/bup/helpers.py
@@ -317,14 +317,16 @@ def exo(cmd,
stderr=None,
shell=False,
check=True,
- preexec_fn=None):
+ preexec_fn=None,
+ close_fds=True):
if input:
assert stdin in (None, PIPE)
stdin = PIPE
p = Popen(cmd,
stdin=stdin, stdout=PIPE, stderr=stderr,
shell=shell,
- preexec_fn=preexec_fn)
+ preexec_fn=preexec_fn,
+ close_fds=close_fds)
out, err = p.communicate(input)
if check and p.returncode != 0:
raise Exception('subprocess %r failed with status %d%s'
@@ -334,13 +336,7 @@ def exo(cmd,

def readpipe(argv, preexec_fn=None, shell=False):
"""Run a subprocess and return its output."""
- p = subprocess.Popen(argv, stdout=subprocess.PIPE, preexec_fn=preexec_fn,
- shell=shell)
- out, err = p.communicate()
- if p.returncode != 0:
- raise Exception('subprocess %r failed with status %d'
- % (b' '.join(argv), p.returncode))
- return out
+ return exo(argv, preexec_fn=preexec_fn, shell=shell)[0]


def _argmax_base(command):
diff --git a/lib/bup/t/thelpers.py b/lib/bup/t/thelpers.py
index c71cbb71..17ee6357 100644
--- a/lib/bup/t/thelpers.py
+++ b/lib/bup/t/thelpers.py
@@ -133,11 +133,9 @@ def test_readpipe():
try:
readpipe([b'bash', b'-c', b'exit 42'])
except Exception as ex:
- if not re.match("^subprocess b?'bash -c exit 42' failed with status 42$",
- str(ex)):
- WVPASSEQ(str(ex),
- "^subprocess b?'bash -c exit 42' failed with status 42$")
-
+ rx = '^subprocess b?"bash -c \'exit 42\'" failed with status 42$'
+ if not re.match(rx, str(ex)):
+ WVPASSEQ(str(ex), rx)


@wvtest
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Makefile b/Makefile
index 6ea5207b..257c2ce8 100644
--- a/Makefile
+++ b/Makefile
@@ -181,6 +181,7 @@ cmdline_tests := \
t/test-list-idx.sh \
t/test-ls \
t/test-ls-remote \
+ t/test-main.sh \
t/test-meta.sh \
t/test-packsizelimit \
t/test-redundant-saves.sh \
@@ -201,7 +202,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-ftp \
t/test-prune-older \
t/test-web.sh \
- t/test-main.sh \
t/test-fuse.sh \
t/test-index-check-device.sh \
t/test-on.sh \
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/Makefile b/Makefile
index 86fbd5bd..4bfd833e 100644
--- a/Makefile
+++ b/Makefile
@@ -167,11 +167,14 @@ runtests-python: all t/tmp
./wvtest.py $(python_tests) 2>&1 \
| tee -a t/tmp/test-log/$$$$.log

-cmdline_tests :=
+cmdline_tests := \
+ t/test-argv \
+ t/test-compression.sh \
+ t/test-index-clear.sh \
+ t/test-tz.sh

ifeq "2" "$(bup_python_majver)"
cmdline_tests += \
- t/test-argv \
t/test-ftp \
t/test-save-restore \
t/test-packsizelimit \
@@ -186,13 +189,10 @@ ifeq "2" "$(bup_python_majver)"
t/test-fuse.sh \
t/test-drecurse.sh \
t/test-cat-file.sh \
- t/test-compression.sh \
t/test-fsck.sh \
- t/test-index-clear.sh \
t/test-index-check-device.sh \
t/test-ls \
t/test-ls-remote \
- t/test-tz.sh \
t/test-meta.sh \
t/test-on.sh \
t/test-restore-map-owner.sh \
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 4 ++--
lib/bup/helpers.py | 12 ++++++++----
t/test-ls | 8 ++++----
3 files changed, 14 insertions(+), 10 deletions(-)

diff --git a/Makefile b/Makefile
index 4bfd833e..74b41c1e 100644
--- a/Makefile
+++ b/Makefile
@@ -171,6 +171,8 @@ cmdline_tests := \
t/test-argv \
t/test-compression.sh \
t/test-index-clear.sh \
+ t/test-ls \
+ t/test-ls-remote \
t/test-tz.sh

ifeq "2" "$(bup_python_majver)"
@@ -191,8 +193,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-cat-file.sh \
t/test-fsck.sh \
t/test-index-check-device.sh \
- t/test-ls \
- t/test-ls-remote \
t/test-meta.sh \
t/test-on.sh \
t/test-restore-map-owner.sh \
diff --git a/lib/bup/helpers.py b/lib/bup/helpers.py
index 0e907809..8773cfc0 100644
--- a/lib/bup/helpers.py
+++ b/lib/bup/helpers.py
@@ -922,8 +922,11 @@ def columnate(l, prefix):
The number of columns is determined automatically based on the string
lengths.
"""
+ binary = isinstance(prefix, bytes)
+ nothing = b'' if binary else ''
+ nl = b'\n' if binary else '\n'
if not l:
- return ""
+ return nothing
l = l[:]
clen = max(len(s) for s in l)
ncols = (tty_width() - len(prefix)) // (clen + 2)
@@ -932,13 +935,14 @@ def columnate(l, prefix):
clen = 0
cols = []
while len(l) % ncols:
- l.append('')
+ l.append(nothing)
rows = len(l) // ncols
for s in compat.range(0, len(l), rows):
cols.append(l[s:s+rows])
- out = ''
+ out = nothing
+ fmt = b'%-*s' if binary else '%-*s'
for row in zip(*cols):
- out += prefix + ''.join(('%-*s' % (clen+2, s)) for s in row) + '\n'
+ out += prefix + nothing.join((fmt % (clen+2, s)) for s in row) + nl
return out


diff --git a/t/test-ls b/t/test-ls
index 2b919a21..fcff652d 100755
--- a/t/test-ls
+++ b/t/test-ls
@@ -51,10 +51,10 @@ WVPASS bup save -n src -d 242312160 --strip src
WVPASS bup tag some-tag src

uid="$(WVPASS id -u)" || exit $?
-gid="$(WVPASS bup-python -c 'import os; print os.stat("src").st_gid')" || exit $?
+gid="$(WVPASS bup-python -c 'import os; print(os.stat("src").st_gid)')" || exit $?
user="$(WVPASS id -un)" || exit $?
group="$(WVPASS bup-python -c 'import grp, os;
-print grp.getgrgid(os.stat("src").st_gid)[0]')" || exit $?
+print(grp.getgrgid(os.stat("src").st_gid)[0])')" || exit $?
src_commit_hash=$(git log --format=%H -n1 src)
src_tree_hash=$(git log --format=%T -n1 src)

@@ -170,7 +170,7 @@ test "$bad_symlink_date" || exit 1

if test "$(uname -s)" != NetBSD; then
bad_symlink_size="$(WVPASS bup-python -c "import os
-print os.lstat('src/bad-symlink').st_size")" || exit $?
+print(os.lstat('src/bad-symlink').st_size)")" || exit $?
else
# NetBSD appears to return varying sizes, so for now, just ignore it.
bad_symlink_size="$(WVPASS echo "$bad_symlink_bup_info" \
@@ -191,7 +191,7 @@ test "$symlink_date" || exit 1

if test "$(uname -s)" != NetBSD; then
symlink_size="$(WVPASS bup-python -c "import os
-print os.lstat('src/symlink').st_size")" || exit $?
+print(os.lstat('src/symlink').st_size)")" || exit $?
else
# NetBSD appears to return varying sizes, so for now, just ignore it.
symlink_size="$(WVPASS echo "$symlink_bup_info" \
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/Makefile b/Makefile
index ac18a583..32f0a51d 100644
--- a/Makefile
+++ b/Makefile
@@ -181,6 +181,7 @@ cmdline_tests := \
t/test-import-duplicity.sh \
t/test-import-rdiff-backup.sh \
t/test-index.sh \
+ t/test-index-check-device.sh \
t/test-index-clear.sh \
t/test-list-idx.sh \
t/test-ls \
@@ -201,14 +202,13 @@ cmdline_tests := \
t/test-save-with-valid-parent.sh \
t/test-sparse-files.sh \
t/test-split-join.sh \
- t/test-tz.sh
+ t/test-tz.sh \
+ t/test-xdev.sh

ifeq "2" "$(bup_python_majver)"
cmdline_tests += \
t/test-web.sh \
- t/test-index-check-device.sh \
- t/test-restore-map-owner.sh \
- t/test-xdev.sh
+ t/test-restore-map-owner.sh
endif

tmp-target-run-test-get-%: all t/tmp
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 6 +-
lib/bup/t/tresolve.py | 279 +++++++++++++++++++++---------------------
2 files changed, 142 insertions(+), 143 deletions(-)

diff --git a/Makefile b/Makefile
index 1423e13f..86fbd5bd 100644
--- a/Makefile
+++ b/Makefile
@@ -152,16 +152,12 @@ python_tests := \
lib/bup/t/tindex.py \
lib/bup/t/tmetadata.py \
lib/bup/t/toptions.py \
+ lib/bup/t/tresolve.py \
lib/bup/t/tshquote.py \
lib/bup/t/tvfs.py \
lib/bup/t/tvint.py \
lib/bup/t/txstat.py

-ifeq "2" "$(bup_python_majver)"
- python_tests += \
- lib/bup/t/tresolve.py
-endif
-
# The "pwd -P" here may not be appropriate in the long run, but we
# need it until we settle the relevant drecurse/exclusion questions:
# https://groups.google.com/forum/#!topic/bup-list/9ke-Mbp10Q0
diff --git a/lib/bup/t/tresolve.py b/lib/bup/t/tresolve.py
index b66bee3d..f2e29d4b 100644
--- a/lib/bup/t/tresolve.py
+++ b/lib/bup/t/tresolve.py
@@ -1,7 +1,8 @@

from __future__ import absolute_import, print_function
+from binascii import unhexlify
from errno import ELOOP, ENOTDIR
-from os import environ, symlink
+from os import symlink
from stat import S_IFDIR
from sys import stderr
from time import localtime, strftime
@@ -9,6 +10,8 @@ from time import localtime, strftime
from wvtest import *

from bup import git, path, vfs
+from bup.compat import environ
+from bup.io import path_msg
from bup.metadata import Metadata
from bup.repo import LocalRepo, RemoteRepo
from bup.test.vfs import tree_dict
@@ -24,11 +27,11 @@ bup_path = path.exe()

def prep_and_test_repo(name, create_repo, test_repo):
with no_lingering_errors():
- with test_tempdir('bup-t' + name) as tmpdir:
- bup_dir = tmpdir + '/bup'
- environ['GIT_DIR'] = bup_dir
- environ['BUP_DIR'] = bup_dir
- ex((bup_path, 'init'))
+ with test_tempdir(b'bup-t' + name) as tmpdir:
+ bup_dir = tmpdir + b'/bup'
+ environ[b'GIT_DIR'] = bup_dir
+ environ[b'BUP_DIR'] = bup_dir
+ ex((bup_path, b'init'))
git.repodir = bup_dir
with create_repo(bup_dir) as repo:
test_repo(repo, tmpdir)
@@ -37,35 +40,35 @@ def prep_and_test_repo(name, create_repo, test_repo):
# just a straight redirection to vfs.resolve.

def test_resolve(repo, tmpdir):
- data_path = tmpdir + '/src'
+ data_path = tmpdir + b'/src'
resolve = repo.resolve
save_time = 100000
- save_time_str = strftime('%Y-%m-%d-%H%M%S', localtime(save_time))
+ save_time_str = strftime('%Y-%m-%d-%H%M%S', localtime(save_time)).encode('ascii')
os.mkdir(data_path)
- os.mkdir(data_path + '/dir')
- with open(data_path + '/file', 'w+') as tmpfile:
- print('canary', file=tmpfile)
- symlink('file', data_path + '/file-symlink')
- symlink('dir', data_path + '/dir-symlink')
- symlink('not-there', data_path + '/bad-symlink')
- ex((bup_path, 'index', '-v', data_path))
- ex((bup_path, 'save', '-d', str(save_time), '-tvvn', 'test',
- '--strip', data_path))
- ex((bup_path, 'tag', 'test-tag', 'test'))
+ os.mkdir(data_path + b'/dir')
+ with open(data_path + b'/file', 'wb+') as tmpfile:
+ tmpfile.write(b'canary\n')
+ symlink(b'file', data_path + b'/file-symlink')
+ symlink(b'dir', data_path + b'/dir-symlink')
+ symlink(b'not-there', data_path + b'/bad-symlink')
+ ex((bup_path, b'index', b'-v', data_path))
+ ex((bup_path, b'save', b'-d', b'%d' % save_time, b'-tvvn', b'test',
+ b'--strip', data_path))
+ ex((bup_path, b'tag', b'test-tag', b'test'))

- tip_hash = exo(('git', 'show-ref', 'refs/heads/test'))[0]
+ tip_hash = exo((b'git', b'show-ref', b'refs/heads/test'))[0]
tip_oidx = tip_hash.strip().split()[0]
- tip_oid = tip_oidx.decode('hex')
- tip_tree_oidx = exo(('git', 'log', '--pretty=%T', '-n1',
+ tip_oid = unhexlify(tip_oidx)
+ tip_tree_oidx = exo((b'git', b'log', b'--pretty=%T', b'-n1',
tip_oidx))[0].strip()
- tip_tree_oid = tip_tree_oidx.decode('hex')
+ tip_tree_oid = unhexlify(tip_tree_oidx)
tip_tree = tree_dict(repo, tip_tree_oid)
- test_revlist_w_meta = vfs.RevList(meta=tip_tree['.'].meta,
+ test_revlist_w_meta = vfs.RevList(meta=tip_tree[b'.'].meta,
oid=tip_oid)
expected_latest_item = vfs.Commit(meta=S_IFDIR | 0o755,
oid=tip_tree_oid,
coid=tip_oid)
- expected_latest_item_w_meta = vfs.Commit(meta=tip_tree['.'].meta,
+ expected_latest_item_w_meta = vfs.Commit(meta=tip_tree[b'.'].meta,
oid=tip_tree_oid,
coid=tip_oid)
expected_latest_link = vfs.FakeLink(meta=vfs.default_symlink_mode,
@@ -74,212 +77,212 @@ def test_resolve(repo, tmpdir):

wvstart('resolve: /')
vfs.clear_cache()
- res = resolve('/')
+ res = resolve(b'/')
wvpasseq(1, len(res))
- wvpasseq((('', vfs._root),), res)
+ wvpasseq(((b'', vfs._root),), res)
ignore, root_item = res[0]
root_content = frozenset(vfs.contents(repo, root_item))
- wvpasseq(frozenset([('.', root_item),
- ('.tag', vfs._tags),
- ('test', test_revlist_w_meta)]),
+ wvpasseq(frozenset([(b'.', root_item),
+ (b'.tag', vfs._tags),
+ (b'test', test_revlist_w_meta)]),
root_content)
- for path in ('//', '/.', '/./', '/..', '/../',
- '/test/latest/dir/../../..',
- '/test/latest/dir/../../../',
- '/test/latest/dir/../../../.',
- '/test/latest/dir/../../..//',
- '/test//latest/dir/../../..',
- '/test/./latest/dir/../../..',
- '/test/././latest/dir/../../..',
- '/test/.//./latest/dir/../../..',
- '/test//.//.//latest/dir/../../..'
- '/test//./latest/dir/../../..'):
- wvstart('resolve: ' + path)
+ for path in (b'//', b'/.', b'/./', b'/..', b'/../',
+ b'/test/latest/dir/../../..',
+ b'/test/latest/dir/../../../',
+ b'/test/latest/dir/../../../.',
+ b'/test/latest/dir/../../..//',
+ b'/test//latest/dir/../../..',
+ b'/test/./latest/dir/../../..',
+ b'/test/././latest/dir/../../..',
+ b'/test/.//./latest/dir/../../..',
+ b'/test//.//.//latest/dir/../../..'
+ b'/test//./latest/dir/../../..'):
+ wvstart('resolve: ' + path_msg(path))
vfs.clear_cache()
res = resolve(path)
- wvpasseq((('', vfs._root),), res)
+ wvpasseq(((b'', vfs._root),), res)

wvstart('resolve: /.tag')
vfs.clear_cache()
- res = resolve('/.tag')
+ res = resolve(b'/.tag')
wvpasseq(2, len(res))
- wvpasseq((('', vfs._root), ('.tag', vfs._tags)),
+ wvpasseq(((b'', vfs._root), (b'.tag', vfs._tags)),
res)
ignore, tag_item = res[1]
tag_content = frozenset(vfs.contents(repo, tag_item))
- wvpasseq(frozenset([('.', tag_item),
- ('test-tag', expected_test_tag_item)]),
+ wvpasseq(frozenset([(b'.', tag_item),
+ (b'test-tag', expected_test_tag_item)]),
tag_content)

wvstart('resolve: /test')
vfs.clear_cache()
- res = resolve('/test')
+ res = resolve(b'/test')
wvpasseq(2, len(res))
- wvpasseq((('', vfs._root), ('test', test_revlist_w_meta)), res)
+ wvpasseq(((b'', vfs._root), (b'test', test_revlist_w_meta)), res)
ignore, test_item = res[1]
test_content = frozenset(vfs.contents(repo, test_item))
# latest has metadata here due to caching
- wvpasseq(frozenset([('.', test_revlist_w_meta),
+ wvpasseq(frozenset([(b'.', test_revlist_w_meta),
(save_time_str, expected_latest_item_w_meta),
- ('latest', expected_latest_link)]),
+ (b'latest', expected_latest_link)]),
test_content)

wvstart('resolve: /test/latest')
vfs.clear_cache()
- res = resolve('/test/latest')
+ res = resolve(b'/test/latest')
wvpasseq(3, len(res))
- expected_latest_item_w_meta = vfs.Commit(meta=tip_tree['.'].meta,
+ expected_latest_item_w_meta = vfs.Commit(meta=tip_tree[b'.'].meta,
oid=tip_tree_oid,
coid=tip_oid)
- expected = (('', vfs._root),
- ('test', test_revlist_w_meta),
+ expected = ((b'', vfs._root),
+ (b'test', test_revlist_w_meta),
(save_time_str, expected_latest_item_w_meta))
wvpasseq(expected, res)
ignore, latest_item = res[2]
latest_content = frozenset(vfs.contents(repo, latest_item))
expected = frozenset((x.name, vfs.Item(oid=x.oid, meta=x.meta))
for x in (tip_tree[name]
- for name in ('.',
- 'bad-symlink',
- 'dir',
- 'dir-symlink',
- 'file',
- 'file-symlink')))
+ for name in (b'.',
+ b'bad-symlink',
+ b'dir',
+ b'dir-symlink',
+ b'file',
+ b'file-symlink')))
wvpasseq(expected, latest_content)

wvstart('resolve: /test/latest/file')
vfs.clear_cache()
- res = resolve('/test/latest/file')
+ res = resolve(b'/test/latest/file')
wvpasseq(4, len(res))
- expected_file_item_w_meta = vfs.Item(meta=tip_tree['file'].meta,
- oid=tip_tree['file'].oid)
- expected = (('', vfs._root),
- ('test', test_revlist_w_meta),
+ expected_file_item_w_meta = vfs.Item(meta=tip_tree[b'file'].meta,
+ oid=tip_tree[b'file'].oid)
+ expected = ((b'', vfs._root),
+ (b'test', test_revlist_w_meta),
(save_time_str, expected_latest_item_w_meta),
- ('file', expected_file_item_w_meta))
+ (b'file', expected_file_item_w_meta))
wvpasseq(expected, res)

wvstart('resolve: /test/latest/bad-symlink')
vfs.clear_cache()
- res = resolve('/test/latest/bad-symlink')
+ res = resolve(b'/test/latest/bad-symlink')
wvpasseq(4, len(res))
- expected = (('', vfs._root),
- ('test', test_revlist_w_meta),
+ expected = ((b'', vfs._root),
+ (b'test', test_revlist_w_meta),
(save_time_str, expected_latest_item_w_meta),
- ('not-there', None))
+ (b'not-there', None))
wvpasseq(expected, res)

wvstart('resolve nofollow: /test/latest/bad-symlink')
vfs.clear_cache()
- res = resolve('/test/latest/bad-symlink', follow=False)
+ res = resolve(b'/test/latest/bad-symlink', follow=False)
wvpasseq(4, len(res))
- bad_symlink_value = tip_tree['bad-symlink']
+ bad_symlink_value = tip_tree[b'bad-symlink']
expected_bad_symlink_item_w_meta = vfs.Item(meta=bad_symlink_value.meta,
oid=bad_symlink_value.oid)
- expected = (('', vfs._root),
- ('test', test_revlist_w_meta),
+ expected = ((b'', vfs._root),
+ (b'test', test_revlist_w_meta),
(save_time_str, expected_latest_item_w_meta),
- ('bad-symlink', expected_bad_symlink_item_w_meta))
+ (b'bad-symlink', expected_bad_symlink_item_w_meta))
wvpasseq(expected, res)

wvstart('resolve: /test/latest/file-symlink')
vfs.clear_cache()
- res = resolve('/test/latest/file-symlink')
+ res = resolve(b'/test/latest/file-symlink')
wvpasseq(4, len(res))
- expected = (('', vfs._root),
- ('test', test_revlist_w_meta),
+ expected = ((b'', vfs._root),
+ (b'test', test_revlist_w_meta),
(save_time_str, expected_latest_item_w_meta),
- ('file', expected_file_item_w_meta))
+ (b'file', expected_file_item_w_meta))
wvpasseq(expected, res)

wvstart('resolve nofollow: /test/latest/file-symlink')
vfs.clear_cache()
- res = resolve('/test/latest/file-symlink', follow=False)
+ res = resolve(b'/test/latest/file-symlink', follow=False)
wvpasseq(4, len(res))
- file_symlink_value = tip_tree['file-symlink']
+ file_symlink_value = tip_tree[b'file-symlink']
expected_file_symlink_item_w_meta = vfs.Item(meta=file_symlink_value.meta,
oid=file_symlink_value.oid)
- expected = (('', vfs._root),
- ('test', test_revlist_w_meta),
+ expected = ((b'', vfs._root),
+ (b'test', test_revlist_w_meta),
(save_time_str, expected_latest_item_w_meta),
- ('file-symlink', expected_file_symlink_item_w_meta))
+ (b'file-symlink', expected_file_symlink_item_w_meta))
wvpasseq(expected, res)

wvstart('resolve: /test/latest/missing')
vfs.clear_cache()
- res = resolve('/test/latest/missing')
+ res = resolve(b'/test/latest/missing')
wvpasseq(4, len(res))
name, item = res[-1]
- wvpasseq('missing', name)
+ wvpasseq(b'missing', name)
wvpass(item is None)

- for path in ('/test/latest/file/',
- '/test/latest/file/.',
- '/test/latest/file/..',
- '/test/latest/file/../',
- '/test/latest/file/../.',
- '/test/latest/file/../..',
- '/test/latest/file/foo'):
- wvstart('resolve: ' + path)
+ for path in (b'/test/latest/file/',
+ b'/test/latest/file/.',
+ b'/test/latest/file/..',
+ b'/test/latest/file/../',
+ b'/test/latest/file/../.',
+ b'/test/latest/file/../..',
+ b'/test/latest/file/foo'):
+ wvstart('resolve: ' + path_msg(path))
vfs.clear_cache()
try:
resolve(path)
except vfs.IOError as res_ex:
wvpasseq(ENOTDIR, res_ex.errno)
- wvpasseq(['', 'test', save_time_str, 'file'],
+ wvpasseq([b'', b'test', save_time_str, b'file'],
[name for name, item in res_ex.terminus])

- for path in ('/test/latest/file-symlink/',
- '/test/latest/file-symlink/.',
- '/test/latest/file-symlink/..',
- '/test/latest/file-symlink/../',
- '/test/latest/file-symlink/../.',
- '/test/latest/file-symlink/../..'):
- wvstart('resolve nofollow: ' + path)
+ for path in (b'/test/latest/file-symlink/',
+ b'/test/latest/file-symlink/.',
+ b'/test/latest/file-symlink/..',
+ b'/test/latest/file-symlink/../',
+ b'/test/latest/file-symlink/../.',
+ b'/test/latest/file-symlink/../..'):
+ wvstart('resolve nofollow: ' + path_msg(path))
vfs.clear_cache()
try:
resolve(path, follow=False)
except vfs.IOError as res_ex:
wvpasseq(ENOTDIR, res_ex.errno)
- wvpasseq(['', 'test', save_time_str, 'file'],
+ wvpasseq([b'', b'test', save_time_str, b'file'],
[name for name, item in res_ex.terminus])

wvstart('resolve: non-directory parent')
vfs.clear_cache()
- file_res = resolve('/test/latest/file')
+ file_res = resolve(b'/test/latest/file')
try:
- resolve('foo', parent=file_res)
+ resolve(b'foo', parent=file_res)
except vfs.IOError as res_ex:
wvpasseq(ENOTDIR, res_ex.errno)
wvpasseq(None, res_ex.terminus)

wvstart('resolve nofollow: /test/latest/dir-symlink')
vfs.clear_cache()
- res = resolve('/test/latest/dir-symlink', follow=False)
+ res = resolve(b'/test/latest/dir-symlink', follow=False)
wvpasseq(4, len(res))
- dir_symlink_value = tip_tree['dir-symlink']
+ dir_symlink_value = tip_tree[b'dir-symlink']
expected_dir_symlink_item_w_meta = vfs.Item(meta=dir_symlink_value.meta,
oid=dir_symlink_value.oid)
- expected = (('', vfs._root),
- ('test', test_revlist_w_meta),
+ expected = ((b'', vfs._root),
+ (b'test', test_revlist_w_meta),
(save_time_str, expected_latest_item_w_meta),
- ('dir-symlink', expected_dir_symlink_item_w_meta))
+ (b'dir-symlink', expected_dir_symlink_item_w_meta))
wvpasseq(expected, res)

- dir_value = tip_tree['dir']
+ dir_value = tip_tree[b'dir']
expected_dir_item = vfs.Item(oid=dir_value.oid,
- meta=tree_dict(repo, dir_value.oid)['.'].meta)
- expected = (('', vfs._root),
- ('test', test_revlist_w_meta),
+ meta=tree_dict(repo, dir_value.oid)[b'.'].meta)
+ expected = ((b'', vfs._root),
+ (b'test', test_revlist_w_meta),
(save_time_str, expected_latest_item_w_meta),
- ('dir', expected_dir_item))
+ (b'dir', expected_dir_item))
def lresolve(*args, **keys):
return resolve(*args, **dict(keys, follow=False))
for resname, resolver in (('resolve', resolve),
('resolve nofollow', lresolve)):
- for path in ('/test/latest/dir-symlink/',
- '/test/latest/dir-symlink/.'):
- wvstart(resname + ': ' + path)
+ for path in (b'/test/latest/dir-symlink/',
+ b'/test/latest/dir-symlink/.'):
+ wvstart(resname + ': ' + path_msg(path))
vfs.clear_cache()
res = resolver(path)
wvpasseq(4, len(res))
@@ -292,40 +295,40 @@ def test_resolve(repo, tmpdir):

@wvtest
def test_local_resolve():
- prep_and_test_repo('local-vfs-resolve',
+ prep_and_test_repo(b'local-vfs-resolve',
lambda x: LocalRepo(repo_dir=x), test_resolve)

@wvtest
def test_remote_resolve():
- prep_and_test_repo('remote-vfs-resolve',
+ prep_and_test_repo(b'remote-vfs-resolve',
lambda x: RemoteRepo(x), test_resolve)

def test_resolve_loop(repo, tmpdir):
- data_path = tmpdir + '/src'
- os.mkdir(data_path)
- symlink('loop', data_path + '/loop')
- ex((bup_path, 'init'))
- ex((bup_path, 'index', '-v', data_path))
- save_utc = 100000
- ex((bup_path, 'save', '-d', str(save_utc), '-tvvn', 'test', '--strip',
- data_path))
- save_name = strftime('%Y-%m-%d-%H%M%S', localtime(save_utc))
- try:
- wvpasseq('this call should never return',
- repo.resolve('/test/%s/loop' % save_name))
- except vfs.IOError as res_ex:
- wvpasseq(ELOOP, res_ex.errno)
- wvpasseq(['', 'test', save_name, 'loop'],
- [name for name, item in res_ex.terminus])
+ data_path = tmpdir + b'/src'
+ os.mkdir(data_path)
+ symlink(b'loop', data_path + b'/loop')
+ ex((bup_path, b'init'))
+ ex((bup_path, b'index', b'-v', data_path))
+ save_utc = 100000
+ ex((bup_path, b'save', b'-d', b'%d' % save_utc, b'-tvvn', b'test', b'--strip',
+ data_path))
+ save_name = strftime('%Y-%m-%d-%H%M%S', localtime(save_utc)).encode('ascii')
+ try:
+ wvpasseq('this call should never return',
+ repo.resolve(b'/test/%s/loop' % save_name))
+ except vfs.IOError as res_ex:
+ wvpasseq(ELOOP, res_ex.errno)
+ wvpasseq([b'', b'test', save_name, b'loop'],
+ [name for name, item in res_ex.terminus])

@wvtest
def test_local_resolve_loop():
- prep_and_test_repo('local-vfs-resolve-loop',
+ prep_and_test_repo(b'local-vfs-resolve-loop',
lambda x: LocalRepo(x), test_resolve_loop)

@wvtest
def test_remote_resolve_loop():
- prep_and_test_repo('remote-vfs-resolve-loop',
+ prep_and_test_repo(b'remote-vfs-resolve-loop',
lambda x: RemoteRepo(x), test_resolve_loop)

# FIXME: add tests for the want_meta=False cases.
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 4 ++--
cmd/memtest-cmd.py | 49 +++++++++++++++++++++++++---------------------
lib/bup/_helpers.c | 2 +-
3 files changed, 30 insertions(+), 25 deletions(-)

diff --git a/Makefile b/Makefile
index a8541b50..4c232f40 100644
--- a/Makefile
+++ b/Makefile
@@ -168,6 +168,7 @@ runtests-python: all t/tmp
| tee -a t/tmp/test-log/$$$$.log

cmdline_tests := \
+ t/test.sh \
t/test-argv \
t/test-cat-file.sh \
t/test-command-without-init-fails.sh \
@@ -207,8 +208,7 @@ ifeq "2" "$(bup_python_majver)"
t/test-fuse.sh \
t/test-index-check-device.sh \
t/test-restore-map-owner.sh \
- t/test-xdev.sh \
- t/test.sh
+ t/test-xdev.sh
endif

tmp-target-run-test-get-%: all t/tmp
diff --git a/cmd/memtest-cmd.py b/cmd/memtest-cmd.py
index 3dc47dac..bf5f0d5a 100755
--- a/cmd/memtest-cmd.py
+++ b/cmd/memtest-cmd.py
@@ -11,6 +11,7 @@ import sys, re, struct, time, resource
from bup import git, bloom, midx, options, _helpers
from bup.compat import range
from bup.helpers import handle_ctrl_c
+from bup.io import byte_stream


handle_ctrl_c()
@@ -22,7 +23,7 @@ def linux_memstat():
#fields = ['VmSize', 'VmRSS', 'VmData', 'VmStk', 'ms']
d = {}
try:
- f = open('/proc/self/status')
+ f = open(b'/proc/self/status', 'rb')
except IOError as e:
if not _linux_warned:
log('Warning: %s\n' % e)
@@ -33,7 +34,7 @@ def linux_memstat():
# happens, this split() might not return two elements. We don't
# really need to care about the binary format since this output
# isn't used for much and report() can deal with missing entries.
- t = re.split(r':\s*', line.strip(), 1)
+ t = re.split(br':\s*', line.strip(), 1)
if len(t) == 2:
k,v = t
d[k] = v
@@ -41,14 +42,14 @@ def linux_memstat():


last = last_u = last_s = start = 0
-def report(count):
+def report(count, out):
global last, last_u, last_s, start
headers = ['RSS', 'MajFlt', 'user', 'sys', 'ms']
ru = resource.getrusage(resource.RUSAGE_SELF)
now = time.time()
- rss = int(ru.ru_maxrss/1024)
+ rss = int(ru.ru_maxrss // 1024)
if not rss:
- rss = linux_memstat().get('VmRSS', '??')
+ rss = linux_memstat().get(b'VmRSS', b'??')
fields = [rss,
ru.ru_majflt,
int((ru.ru_utime - last_u) * 1000),
@@ -56,11 +57,12 @@ def report(count):
int((now - last) * 1000)]
fmt = '%9s ' + ('%10s ' * len(fields))
if count >= 0:
- print(fmt % tuple([count] + fields))
+ line = fmt % tuple([count] + fields)
+ out.write(line.encode('ascii') + b'\n')
else:
start = now
- print(fmt % tuple([''] + headers))
- sys.stdout.flush()
+ out.write((fmt % tuple([''] + headers)).encode('ascii') + b'\n')
+ out.flush()

# don't include time to run report() in usage counts
ru = resource.getrusage(resource.RUSAGE_SELF)
@@ -84,11 +86,14 @@ if extra:
o.fatal('no arguments expected')

git.check_repo_or_die()
-m = git.PackIdxList(git.repo('objects/pack'), ignore_midx=opt.ignore_midx)
+m = git.PackIdxList(git.repo(b'objects/pack'), ignore_midx=opt.ignore_midx)

-report(-1)
+sys.stdout.flush()
+out = byte_stream(sys.stdout)
+
+report(-1, out)
_helpers.random_sha()
-report(0)
+report(0, out)

if opt.existing:
def foreverit(mi):
@@ -110,18 +115,18 @@ for c in range(opt.cycles):
# a collision in sha-1 by accident, which is so unlikely that
# we don't care.
assert(not m.exists(bin))
- report((c+1)*opt.number)
+ report((c+1)*opt.number, out)

if bloom._total_searches:
- print('bloom: %d objects searched in %d steps: avg %.3f steps/object'
- % (bloom._total_searches, bloom._total_steps,
- bloom._total_steps*1.0/bloom._total_searches))
+ out.write(b'bloom: %d objects searched in %d steps: avg %.3f steps/object\n'
+ % (bloom._total_searches, bloom._total_steps,
+ bloom._total_steps*1.0/bloom._total_searches))
if midx._total_searches:
- print('midx: %d objects searched in %d steps: avg %.3f steps/object'
- % (midx._total_searches, midx._total_steps,
- midx._total_steps*1.0/midx._total_searches))
+ out.write(b'midx: %d objects searched in %d steps: avg %.3f steps/object\n'
+ % (midx._total_searches, midx._total_steps,
+ midx._total_steps*1.0/midx._total_searches))
if git._total_searches:
- print('idx: %d objects searched in %d steps: avg %.3f steps/object'
- % (git._total_searches, git._total_steps,
- git._total_steps*1.0/git._total_searches))
-print('Total time: %.3fs' % (time.time() - start))
+ out.write(b'idx: %d objects searched in %d steps: avg %.3f steps/object\n'
+ % (git._total_searches, git._total_steps,
+ git._total_steps*1.0/git._total_searches))
+out.write(b'Total time: %.3fs\n' % (time.time() - start))
diff --git a/lib/bup/_helpers.c b/lib/bup/_helpers.c
index d325578f..312ecd45 100644
--- a/lib/bup/_helpers.c
+++ b/lib/bup/_helpers.c
@@ -1164,7 +1164,7 @@ static PyObject *random_sha(PyObject *self, PyObject *args)
memset(shabuf, 0, sizeof(shabuf));
for (i=0; i < 20/4; i++)
shabuf[i] = random();
- return Py_BuildValue("s#", shabuf, 20);
+ return Py_BuildValue(rbuf_argf, shabuf, 20);
}


--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/Makefile b/Makefile
index 1362d7f8..7fc522cc 100644
--- a/Makefile
+++ b/Makefile
@@ -182,6 +182,8 @@ cmdline_tests := \
t/test-meta.sh \
t/test-packsizelimit \
t/test-redundant-saves.sh \
+ t/test-restore-single-file.sh \
+ t/test-save-creates-no-unrefs.sh \
t/test-sparse-files.sh \
t/test-split-join.sh \
t/test-tz.sh
@@ -199,10 +201,8 @@ ifeq "2" "$(bup_python_majver)"
t/test-index-check-device.sh \
t/test-on.sh \
t/test-restore-map-owner.sh \
- t/test-restore-single-file.sh \
t/test-rm-between-index-and-save.sh \
t/test-save-with-valid-parent.sh \
- t/test-save-creates-no-unrefs.sh \
t/test-save-restore-excludes.sh \
t/test-save-strip-graft.sh \
t/test-import-duplicity.sh \
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
t/test-sparse-files.sh | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/Makefile b/Makefile
index 4a920497..ed792848 100644
--- a/Makefile
+++ b/Makefile
@@ -180,6 +180,7 @@ cmdline_tests := \
t/test-ls-remote \
t/test-meta.sh \
t/test-packsizelimit \
+ t/test-sparse-files.sh \
t/test-split-join.sh \
t/test-tz.sh

@@ -200,7 +201,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-restore-single-file.sh \
t/test-rm-between-index-and-save.sh \
t/test-save-with-valid-parent.sh \
- t/test-sparse-files.sh \
t/test-redundant-saves.sh \
t/test-save-creates-no-unrefs.sh \
t/test-save-restore-excludes.sh \
diff --git a/t/test-sparse-files.sh b/t/test-sparse-files.sh
index 40d25d02..fdbfa4bc 100755
--- a/t/test-sparse-files.sh
+++ b/t/test-sparse-files.sh
@@ -19,7 +19,7 @@ WVPASS cd "$tmpdir"
# The 3MB guess is semi-arbitrary, but we've been informed that
# Lustre, for example, uses 1MB, so guess higher than that, at least.
block_size=$(bup-python -c \
- "import os; print getattr(os.stat('.'), 'st_blksize', 0) or $mb * 3") \
+ "import os; print(getattr(os.stat('.'), 'st_blksize', 0)) or $mb * 3") \
|| exit $?
data_size=$((block_size * 10))
readonly block_size data_size
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:20 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
t/hardlink-sets | 20 +++++++++++++-------
1 file changed, 13 insertions(+), 7 deletions(-)

diff --git a/t/hardlink-sets b/t/hardlink-sets
index 99ab8838..bcb3cd0e 100755
--- a/t/hardlink-sets
+++ b/t/hardlink-sets
@@ -5,16 +5,19 @@ exec "$bup_python" "$0" ${1+"$@"}
"""
# end of bup preamble

-from __future__ import absolute_import
-
+from __future__ import absolute_import, print_function
import os, stat, sys

+from bup.compat import argv_bytes
+from bup.io import byte_stream
+
+
# Print the full paths of all the files in each hardlink set
# underneath one of the paths. Separate sets with a blank line, sort
# the paths within each set, and sort the sets by their first path.

def usage():
- print >> sys.stderr, "Usage: hardlink-sets <paths ...>"
+ print("Usage: hardlink-sets <paths ...>", file=sys.stderr)

if len(sys.argv) < 2:
usage()
@@ -23,15 +26,18 @@ if len(sys.argv) < 2:
def on_walk_error(e):
raise e

+sys.stdout.flush()
+out = byte_stream(sys.stdout)
+
hardlink_set = {}

-for p in sys.argv[1:]:
+for p in (argv_bytes(x) for x in sys.argv[1:]):
for root, dirs, files in os.walk(p, onerror = on_walk_error):
for filename in files:
full_path = os.path.join(root, filename)
st = os.lstat(full_path)
if not stat.S_ISDIR(st.st_mode):
- node = '%s:%s' % (st.st_dev, st.st_ino)
+ node = b'%d:%d' % (st.st_dev, st.st_ino)
link_paths = hardlink_set.get(node)
if link_paths:
link_paths.append(full_path)
@@ -48,8 +54,8 @@ for link_paths in sorted(hardlink_set.values(), key = lambda x : x[0]):
if first_set:
first_set = False
else:
- print
+ out.write(b'\n')
for p in sorted(link_paths):
- print p
+ out.write(p + b'\n')

sys.exit(0)
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:21 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Makefile b/Makefile
index 257c2ce8..3aea558d 100644
--- a/Makefile
+++ b/Makefile
@@ -176,6 +176,7 @@ cmdline_tests := \
t/test-fsck.sh \
t/test-gc.sh \
t/test-import-duplicity.sh \
+ t/test-import-rdiff-backup.sh \
t/test-index.sh \
t/test-index-clear.sh \
t/test-list-idx.sh \
@@ -206,7 +207,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-index-check-device.sh \
t/test-on.sh \
t/test-restore-map-owner.sh \
- t/test-import-rdiff-backup.sh \
t/test-xdev.sh \
t/test.sh
endif
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:21 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
cmd/prune-older-cmd.py | 26 +++++---
lib/bup/helpers.py | 18 +++---
t/test-prune-older | 134 +++++++++++++++++++++--------------------
4 files changed, 96 insertions(+), 84 deletions(-)

diff --git a/Makefile b/Makefile
index 99305dae..67cc4b59 100644
--- a/Makefile
+++ b/Makefile
@@ -186,6 +186,7 @@ cmdline_tests := \
t/test-meta.sh \
t/test-on.sh \
t/test-packsizelimit \
+ t/test-prune-older \
t/test-redundant-saves.sh \
t/test-restore-single-file.sh \
t/test-rm.sh \
@@ -202,7 +203,6 @@ cmdline_tests := \
ifeq "2" "$(bup_python_majver)"
cmdline_tests += \
t/test-ftp \
- t/test-prune-older \
t/test-web.sh \
t/test-fuse.sh \
t/test-index-check-device.sh \
diff --git a/cmd/prune-older-cmd.py b/cmd/prune-older-cmd.py
index 96554233..fcc0fbd2 100755
--- a/cmd/prune-older-cmd.py
+++ b/cmd/prune-older-cmd.py
@@ -6,6 +6,7 @@ exec "$bup_python" "$0" ${1+"$@"}
# end of bup preamble

from __future__ import absolute_import, print_function
+from binascii import hexlify, unhexlify
from collections import defaultdict
from itertools import groupby
from sys import stderr
@@ -13,20 +14,22 @@ from time import localtime, strftime, time
import re, sys

from bup import git, options
-from bup.compat import int_types
+from bup.compat import argv_bytes, int_types
from bup.gc import bup_gc
from bup.helpers import die_if_errors, log, partition, period_as_secs
+from bup.io import byte_stream
from bup.repo import LocalRepo
from bup.rm import bup_rm


-def branches(refnames=()):
- return ((name[11:], sha.encode('hex')) for (name,sha)
- in git.list_refs(patterns=('refs/heads/' + n for n in refnames),
+def branches(refnames=tuple()):
+ return ((name[11:], hexlify(sha)) for (name,sha)
+ in git.list_refs(patterns=(b'refs/heads/' + n for n in refnames),
limit_to_heads=True))

def save_name(branch, utc):
- return branch + '/' + strftime('%Y-%m-%d-%H%M%S', localtime(utc))
+ return branch + b'/' \
+ + strftime('%Y-%m-%d-%H%M%S', localtime(utc)).encode('ascii')

def classify_saves(saves, period_start):
"""For each (utc, id) in saves, yield (True, (utc, id)) if the save
@@ -82,6 +85,7 @@ unsafe use the command even though it may be DANGEROUS

o = options.Options(optspec)
opt, flags, roots = o.parse(sys.argv[1:])
+roots = [argv_bytes(x) for x in roots]

if not opt.unsafe:
o.fatal('refusing to run dangerous, experimental command without --unsafe')
@@ -96,7 +100,7 @@ for period, extent in (('all', opt.keep_all_for),
('monthlies', opt.keep_monthlies_for),
('yearlies', opt.keep_yearlies_for)):
if extent:
- secs = period_as_secs(extent)
+ secs = period_as_secs(extent.encode('ascii'))
if not secs:
o.fatal('%r is not a valid period' % extent)
period_start[period] = now - secs
@@ -136,16 +140,20 @@ def parse_info(f):
author_secs = f.readline().strip()
return int(author_secs)

+sys.stdout.flush()
+out = byte_stream(sys.stdout)
+
removals = []
for branch, branch_id in branches(roots):
die_if_errors()
- saves = ((utc, oidx.decode('hex')) for (oidx, utc) in
- git.rev_list(branch_id, format='%at', parse=parse_info))
+ saves = ((utc, unhexlify(oidx)) for (oidx, utc) in
+ git.rev_list(branch_id, format=b'%at', parse=parse_info))
for keep_save, (utc, id) in classify_saves(saves, period_start):
assert(keep_save in (False, True))
# FIXME: base removals on hashes
if opt.pretend:
- print('+' if keep_save else '-', save_name(branch, utc))
+ out.write(b'+ ' if keep_save else b'- '
+ + save_name(branch, utc) + b'\n')
elif not keep_save:
removals.append(save_name(branch, utc))

diff --git a/lib/bup/helpers.py b/lib/bup/helpers.py
index cfbdff49..c77f630f 100644
--- a/lib/bup/helpers.py
+++ b/lib/bup/helpers.py
@@ -1166,20 +1166,20 @@ def valid_save_name(name):
return True


-_period_rx = re.compile(r'^([0-9]+)(s|min|h|d|w|m|y)$')
+_period_rx = re.compile(br'^([0-9]+)(s|min|h|d|w|m|y)$')

def period_as_secs(s):
- if s == 'forever':
+ if s == b'forever':
return float('inf')
match = _period_rx.match(s)
if not match:
return None
mag = int(match.group(1))
scale = match.group(2)
- return mag * {'s': 1,
- 'min': 60,
- 'h': 60 * 60,
- 'd': 60 * 60 * 24,
- 'w': 60 * 60 * 24 * 7,
- 'm': 60 * 60 * 24 * 31,
- 'y': 60 * 60 * 24 * 366}[scale]
+ return mag * {b's': 1,
+ b'min': 60,
+ b'h': 60 * 60,
+ b'd': 60 * 60 * 24,
+ b'w': 60 * 60 * 24 * 7,
+ b'm': 60 * 60 * 24 * 31,
+ b'y': 60 * 60 * 24 * 366}[scale]
diff --git a/t/test-prune-older b/t/test-prune-older
index c460e821..a2ea4df0 100755
--- a/t/test-prune-older
+++ b/t/test-prune-older
@@ -9,7 +9,7 @@ from __future__ import absolute_import, print_function
from collections import defaultdict
from difflib import unified_diff
from itertools import chain, dropwhile, groupby, takewhile
-from os import environ, chdir
+from os import chdir
from os.path import abspath, dirname
from random import choice, randint
from shutil import copytree, rmtree
@@ -18,46 +18,46 @@ from sys import stderr
from time import localtime, strftime, time
import os, random, sys

-script_home = abspath(dirname(sys.argv[0] or '.'))
-sys.path[:0] = [abspath(script_home + '/../lib'), abspath(script_home + '/..')]
-top = os.getcwd()
-bup_cmd = top + '/bup'
+# For buptest, wvtest, ...
+sys.path[:0] = (abspath(os.path.dirname(__file__) + '/..'),)

from buptest import ex, exo, test_tempdir
from wvtest import wvfail, wvpass, wvpasseq, wvpassne, wvstart

from bup import compat
+from bup.compat import environ
from bup.helpers import partition, period_as_secs, readpipe
+import bup.path


def create_older_random_saves(n, start_utc, end_utc):
- with open('foo', 'w') as f:
+ with open(b'foo', 'wb') as f:
pass
- ex(['git', 'add', 'foo'])
+ ex([b'git', b'add', b'foo'])
utcs = set()
while len(utcs) != n:
utcs.add(randint(start_utc, end_utc))
utcs = sorted(utcs)
for utc in utcs:
- with open('foo', 'w') as f:
- f.write(str(utc) + '\n')
- ex(['git', 'commit', '--date', str(utc), '-qam', str(utc)])
- ex(['git', 'gc', '--aggressive'])
+ with open(b'foo', 'wb') as f:
+ f.write(b'%d\n' % utc)
+ ex([b'git', b'commit', b'--date', b'%d' % utc, b'-qam', b'%d' % utc])
+ ex([b'git', b'gc', b'--aggressive'])
return utcs

# There is corresponding code in bup for some of this, but the
# computation method is different here, in part so that the test can
# provide a more effective cross-check.

-period_kinds = ['all', 'dailies', 'monthlies', 'yearlies']
-period_scale = {'s': 1,
- 'min': 60,
- 'h': 60 * 60,
- 'd': 60 * 60 * 24,
- 'w': 60 * 60 * 24 * 7,
- 'm': 60 * 60 * 24 * 31,
- 'y': 60 * 60 * 24 * 366}
-period_scale_kinds = period_scale.keys()
+period_kinds = [b'all', b'dailies', b'monthlies', b'yearlies']
+period_scale = {b's': 1,
+ b'min': 60,
+ b'h': 60 * 60,
+ b'd': 60 * 60 * 24,
+ b'w': 60 * 60 * 24 * 7,
+ b'm': 60 * 60 * 24 * 31,
+ b'y': 60 * 60 * 24 * 366}
+period_scale_kinds = list(period_scale.keys())

def expected_retentions(utcs, utc_start, spec):
if not spec:
@@ -68,20 +68,20 @@ def expected_retentions(utcs, utc_start, spec):
period_start[kind] = utc_start - period_as_secs(duration)
period_start = defaultdict(lambda: float('inf'), period_start)

- all = list(takewhile(lambda x: x >= period_start['all'], utcs))
- utcs = list(dropwhile(lambda x: x >= period_start['all'], utcs))
+ all = list(takewhile(lambda x: x >= period_start[b'all'], utcs))
+ utcs = list(dropwhile(lambda x: x >= period_start[b'all'], utcs))

- matches = takewhile(lambda x: x >= period_start['dailies'], utcs)
+ matches = takewhile(lambda x: x >= period_start[b'dailies'], utcs)
dailies = [max(day_utcs) for yday, day_utcs
in groupby(matches, lambda x: localtime(x).tm_yday)]
- utcs = list(dropwhile(lambda x: x >= period_start['dailies'], utcs))
+ utcs = list(dropwhile(lambda x: x >= period_start[b'dailies'], utcs))

- matches = takewhile(lambda x: x >= period_start['monthlies'], utcs)
+ matches = takewhile(lambda x: x >= period_start[b'monthlies'], utcs)
monthlies = [max(month_utcs) for month, month_utcs
in groupby(matches, lambda x: localtime(x).tm_mon)]
- utcs = dropwhile(lambda x: x >= period_start['monthlies'], utcs)
+ utcs = dropwhile(lambda x: x >= period_start[b'monthlies'], utcs)

- matches = takewhile(lambda x: x >= period_start['yearlies'], utcs)
+ matches = takewhile(lambda x: x >= period_start[b'yearlies'], utcs)
yearlies = [max(year_utcs) for year, year_utcs
in groupby(matches, lambda x: localtime(x).tm_year)]

@@ -95,14 +95,14 @@ def period_spec(start_utc, end_utc):
while len(result) < desired_specs:
period = None
if randint(1, 100) <= 5:
- period = 'forever'
+ period = b'forever'
else:
assert(end_utc > start_utc)
period_secs = randint(1, end_utc - start_utc)
scale = choice(period_scale_kinds)
mag = int(float(period_secs) / period_scale[scale])
if mag != 0:
- period = str(mag) + scale
+ period = (b'%d' % mag) + scale
if period:
result += [(choice(period_kinds), period)]
return tuple(result)
@@ -114,16 +114,17 @@ def unique_period_specs(n, start_utc, end_utc):
return tuple(invocations)

def period_spec_to_period_args(spec):
- return tuple(chain(*(('--keep-' + kind + '-for', period)
+ return tuple(chain(*((b'--keep-' + kind + b'-for', period)
for kind, period in spec)))

def result_diffline(x):
- return str(x) + strftime(' %Y-%m-%d-%H%M%S', localtime(x)) + '\n'
+ return (b'%d %s\n'
+ % (x, strftime(' %Y-%m-%d-%H%M%S', localtime(x)).encode('ascii')))

def check_prune_result(expected):
actual = sorted([int(x)
- for x in exo(['git', 'log',
- '--pretty=format:%at']).out.splitlines()])
+ for x in exo([b'git', b'log',
+ b'--pretty=format:%at']).out.splitlines()])
if expected != actual:
for x in expected:
print('ex:', x, strftime('%Y-%m-%d-%H%M%S', localtime(x)),
@@ -135,45 +136,47 @@ def check_prune_result(expected):
wvpass(expected == actual)


-environ['GIT_AUTHOR_NAME'] = 'bup test'
-environ['GIT_COMMITTER_NAME'] = 'bup test'
-environ['GIT_AUTHOR_EMAIL'] = 'bup@a425bc70a02811e49bdf73ee56450e6f'
-environ['GIT_COMMITTER_EMAIL'] = 'bup@a425bc70a02811e49bdf73ee56450e6f'
+environ[b'GIT_AUTHOR_NAME'] = b'bup test'
+environ[b'GIT_COMMITTER_NAME'] = b'bup test'
+environ[b'GIT_AUTHOR_EMAIL'] = b'bup@a425bc70a02811e49bdf73ee56450e6f'
+environ[b'GIT_COMMITTER_EMAIL'] = b'bup@a425bc70a02811e49bdf73ee56450e6f'

-seed = int(environ.get('BUP_TEST_SEED', time()))
+seed = int(environ.get(b'BUP_TEST_SEED', time()))
random.seed(seed)
print('random seed:', seed, file=stderr)

-save_population = int(environ.get('BUP_TEST_PRUNE_OLDER_SAVES', 2000))
-prune_cycles = int(environ.get('BUP_TEST_PRUNE_OLDER_CYCLES', 20))
-prune_gc_cycles = int(environ.get('BUP_TEST_PRUNE_OLDER_GC_CYCLES', 10))
+save_population = int(environ.get(b'BUP_TEST_PRUNE_OLDER_SAVES', 2000))
+prune_cycles = int(environ.get(b'BUP_TEST_PRUNE_OLDER_CYCLES', 20))
+prune_gc_cycles = int(environ.get(b'BUP_TEST_PRUNE_OLDER_GC_CYCLES', 10))

-with test_tempdir('prune-older-') as tmpdir:
- environ['BUP_DIR'] = tmpdir + '/work/.git'
- environ['GIT_DIR'] = tmpdir + '/work/.git'
+bup_cmd = bup.path.exe()
+
+with test_tempdir(b'prune-older-') as tmpdir:
+ environ[b'BUP_DIR'] = tmpdir + b'/work/.git'
+ environ[b'GIT_DIR'] = tmpdir + b'/work/.git'
now = int(time())
three_years_ago = now - (60 * 60 * 24 * 366 * 3)
chdir(tmpdir)
- ex(['git', 'init', 'work'])
- ex(['git', 'config', 'gc.autoDetach', 'false'])
+ ex([b'git', b'init', b'work'])
+ ex([b'git', b'config', b'gc.autoDetach', b'false'])

wvstart('generating ' + str(save_population) + ' random saves')
- chdir(tmpdir + '/work')
+ chdir(tmpdir + b'/work')
save_utcs = create_older_random_saves(save_population, three_years_ago, now)
chdir(tmpdir)
- test_set_hash = exo(['git', 'show-ref', '-s', 'master']).out.rstrip()
- ls_saves = exo((bup_cmd, 'ls', 'master')).out.splitlines()
+ test_set_hash = exo([b'git', b'show-ref', b'-s', b'master']).out.rstrip()
+ ls_saves = exo((bup_cmd, b'ls', b'master')).out.splitlines()
wvpasseq(save_population + 1, len(ls_saves))

wvstart('ensure everything kept, if no keep arguments')
- ex(['git', 'reset', '--hard', test_set_hash])
+ ex([b'git', b'reset', b'--hard', test_set_hash])
proc = ex((bup_cmd,
- 'prune-older', '-v', '--unsafe', '--no-gc',
- '--wrt', str(now)) \
- + ('master',),
+ b'prune-older', b'-v', b'--unsafe', b'--no-gc',
+ b'--wrt', b'%d' % now) \
+ + (b'master',),
stdout=None, stderr=PIPE, check=False)
wvpassne(proc.rc, 0)
- wvpass('at least one keep argument is required' in proc.err)
+ wvpass(b'at least one keep argument is required' in proc.err)
check_prune_result(save_utcs)


@@ -182,32 +185,33 @@ with test_tempdir('prune-older-') as tmpdir:
for spec in unique_period_specs(prune_cycles,
# Make it more likely we'll have
# some outside the save range.
- three_years_ago - period_scale['m'],
+ three_years_ago - period_scale[b'm'],
now):
- ex(['git', 'reset', '--hard', test_set_hash])
+ ex([b'git', b'reset', b'--hard', test_set_hash])
expected = sorted(expected_retentions(save_utcs, now, spec))
ex((bup_cmd,
- 'prune-older', '-v', '--unsafe', '--no-gc', '--wrt', str(now)) \
+ b'prune-older', b'-v', b'--unsafe', b'--no-gc', b'--wrt',
+ b'%d' % now) \
+ period_spec_to_period_args(spec) \
- + ('master',))
+ + (b'master',))
check_prune_result(expected)


# More expensive because we have to recreate the repo each time
wvstart('running %d generative gc tests on %d saves' % (prune_gc_cycles,
save_population))
- ex(['git', 'reset', '--hard', test_set_hash])
- copytree('work/.git', 'clean-test-repo', symlinks=True)
+ ex([b'git', b'reset', b'--hard', test_set_hash])
+ copytree(b'work/.git', b'clean-test-repo', symlinks=True)
for spec in unique_period_specs(prune_gc_cycles,
# Make it more likely we'll have
# some outside the save range.
- three_years_ago - period_scale['m'],
+ three_years_ago - period_scale[b'm'],
now):
- rmtree('work/.git')
- copytree('clean-test-repo', 'work/.git')
+ rmtree(b'work/.git')
+ copytree(b'clean-test-repo', b'work/.git')
expected = sorted(expected_retentions(save_utcs, now, spec))
ex((bup_cmd,
- 'prune-older', '-v', '--unsafe', '--wrt', str(now)) \
+ b'prune-older', b'-v', b'--unsafe', b'--wrt', b'%d' % now) \
+ period_spec_to_period_args(spec) \
- + ('master',))
+ + (b'master',))
check_prune_result(expected)
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:21 PM2/8/20
to bup-...@googlegroups.com
The python 3 version could have issues until the fuse module supports
binary data more completely (e.g. bytes paths), or until we switch to
some other foundation, but it may be OK even so (with some
inefficiency) given our bup-python iso-8859-1 hack.

Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 +-
cmd/fuse-cmd.py | 50 +++++++++++++++++++++++++++++++++++++++++------
lib/bup/compat.py | 5 ++++-
t/test-fuse.sh | 4 ++--
4 files changed, 51 insertions(+), 10 deletions(-)

diff --git a/Makefile b/Makefile
index 4c232f40..50a34a93 100644
--- a/Makefile
+++ b/Makefile
@@ -175,6 +175,7 @@ cmdline_tests := \
t/test-compression.sh \
t/test-drecurse.sh \
t/test-fsck.sh \
+ t/test-fuse.sh \
t/test-ftp \
t/test-gc.sh \
t/test-import-duplicity.sh \
@@ -205,7 +206,6 @@ cmdline_tests := \
ifeq "2" "$(bup_python_majver)"
cmdline_tests += \
t/test-web.sh \
- t/test-fuse.sh \
t/test-index-check-device.sh \
t/test-restore-map-owner.sh \
t/test-xdev.sh
diff --git a/cmd/fuse-cmd.py b/cmd/fuse-cmd.py
index 8e57ab6f..dbc0cd1f 100755
--- a/cmd/fuse-cmd.py
+++ b/cmd/fuse-cmd.py
@@ -5,24 +5,44 @@ exec "$bup_python" "$0" ${1+"$@"}
"""
# end of bup preamble

-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import sys, os, errno

try:
import fuse
except ImportError:
- log('error: cannot find the python "fuse" module; please install it\n')
- sys.exit(1)
+ print('error: cannot find the python "fuse" module; please install it',
+ file=sys.stderr)
+ sys.exit(2)
if not hasattr(fuse, '__version__'):
- raise RuntimeError('your fuse module is too old for fuse.__version__')
+ print('error: fuse module is too old for fuse.__version__', file=sys.stderr)
+ sys.exit(2)
fuse.fuse_python_api = (0, 2)

+if sys.version_info[0] > 2:
+ try:
+ fuse_ver = fuse.__version__.split('.')
+ fuse_ver_maj = int(fuse_ver[0])
+ except:
+ log('error: cannot determine the fuse major version; please report',
+ file=sys.stderr)
+ sys.exit(2)
+ if len(fuse_ver) < 3 or fuse_ver_maj < 1:
+ print("error: fuse module can't handle binary data; please upgrade to 1.0+\n",
+ file=sys.stderr)
+ sys.exit(2)
+
from bup import options, git, vfs, xstat
+from bup.compat import argv_bytes, fsdecode, py_maj
from bup.helpers import log
from bup.repo import LocalRepo

+
# FIXME: self.meta and want_meta?

+# The path handling is just wrong, but the current fuse module can't
+# handle bytes paths.
+
class BupFs(fuse.Fuse):
def __init__(self, repo, verbose=0, fake_metadata=False):
fuse.Fuse.__init__(self)
@@ -31,6 +51,7 @@ class BupFs(fuse.Fuse):
self.fake_metadata = fake_metadata

def getattr(self, path):
+ path = argv_bytes(path)
global opt
if self.verbose > 0:
log('--getattr(%r)\n' % path)
@@ -56,6 +77,7 @@ class BupFs(fuse.Fuse):
return st

def readdir(self, path, offset):
+ path = argv_bytes(path)
assert not offset # We don't return offsets, so offset should be unused
res = vfs.resolve(self.repo, path, follow=False)
dir_name, dir_item = res[-1]
@@ -64,18 +86,21 @@ class BupFs(fuse.Fuse):
yield fuse.Direntry('..')
# FIXME: make sure want_meta=False is being completely respected
for ent_name, ent_item in vfs.contents(repo, dir_item, want_meta=False):
- yield fuse.Direntry(ent_name.replace('/', '-'))
+ fusename = fsdecode(ent_name.replace(b'/', b'-'))
+ yield fuse.Direntry(fusename)

def readlink(self, path):
+ path = argv_bytes(path)
if self.verbose > 0:
log('--readlink(%r)\n' % path)
res = vfs.resolve(self.repo, path, follow=False)
name, item = res[-1]
if not item:
return -errno.ENOENT
- return vfs.readlink(repo, item)
+ return fsdecode(vfs.readlink(repo, item))

def open(self, path, flags):
+ path = argv_bytes(path)
if self.verbose > 0:
log('--open(%r)\n' % path)
res = vfs.resolve(self.repo, path, follow=False)
@@ -90,6 +115,7 @@ class BupFs(fuse.Fuse):
#return vfs.fopen(repo, item)

def read(self, path, size, offset):
+ path = argv_bytes(path)
if self.verbose > 0:
log('--read(%r)\n' % path)
res = vfs.resolve(self.repo, path, follow=False)
@@ -100,6 +126,7 @@ class BupFs(fuse.Fuse):
f.seek(offset)
return f.read(size)

+
optspec = """
bup fuse [-d] [-f] <mountpoint>
--
@@ -111,6 +138,14 @@ v,verbose increase log output (can be used more than once)
"""
o = options.Options(optspec)
opt, flags, extra = o.parse(sys.argv[1:])
+if not opt.verbose:
+ opt.verbose = 0
+
+# Set stderr to be line buffered, even if it's not connected to the console
+# so that we'll be able to see diagnostics in a timely fashion.
+errfd = sys.stderr.fileno()
+sys.stderr.flush()
+sys.stderr = os.fdopen(errfd, 'w', 1)

if len(extra) != 1:
o.fatal('only one mount point argument expected')
@@ -118,7 +153,10 @@ if len(extra) != 1:
git.check_repo_or_die()
repo = LocalRepo()
f = BupFs(repo=repo, verbose=opt.verbose, fake_metadata=(not opt.meta))
+
+# This is likely wrong, but the fuse module doesn't currently accept bytes
f.fuse_args.mountpoint = extra[0]
+
if opt.debug:
f.fuse_args.add('debug')
if opt.foreground:
diff --git a/lib/bup/compat.py b/lib/bup/compat.py
index 03041f35..e59f4d38 100644
--- a/lib/bup/compat.py
+++ b/lib/bup/compat.py
@@ -25,7 +25,7 @@ if py3:
file=sys.stderr)
sys.exit(2)

- from os import fsencode
+ from os import fsdecode, fsencode
from shlex import quote
input = input
range = range
@@ -76,6 +76,9 @@ if py3:

else: # Python 2

+ def fsdecode(x):
+ return x
+
def fsencode(x):
return x

diff --git a/t/test-fuse.sh b/t/test-fuse.sh
index 1732d66f..1ed35542 100755
--- a/t/test-fuse.sh
+++ b/t/test-fuse.sh
@@ -41,7 +41,7 @@ savename()
{
readonly secs="$1"
WVPASS bup-python -c "from time import strftime, localtime; \
- print strftime('%Y-%m-%d-%H%M%S', localtime($secs))"
+ print(strftime('%Y-%m-%d-%H%M%S', localtime($secs)))"
}

export TZ=UTC
@@ -49,7 +49,7 @@ export TZ=UTC
WVPASS bup init
WVPASS cd "$tmpdir"

-savestamp1=$(WVPASS bup-python -c 'import time; print int(time.time())') || exit $?
+savestamp1=$(WVPASS bup-python -c 'import time; print(int(time.time()))') || exit $?
savestamp2=$(($savestamp1 + 1))

savename1="$(savename "$savestamp1")" || exit $?
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:21 PM2/8/20
to bup-...@googlegroups.com
Thanks to Johannes Berg for pointing out some overlooked conversions
in a previous version of the patch.

Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 20 +-
lib/bup/compat.py | 5 +-
t/test-get | 957 +++++++++++++++++++++++-----------------------
3 files changed, 497 insertions(+), 485 deletions(-)

diff --git a/Makefile b/Makefile
index 50a34a93..ac18a583 100644
--- a/Makefile
+++ b/Makefile
@@ -215,18 +215,14 @@ tmp-target-run-test-get-%: all t/tmp
$(pf); cd $$(pwd -P); TMPDIR="$(test_tmp)" \
t/test-get $* 2>&1 | tee -a t/tmp/test-log/$$$$.log

-test_get_targets :=
-
-ifeq "2" "$(bup_python_majver)"
- test_get_targets += \
- tmp-target-run-test-get-replace \
- tmp-target-run-test-get-universal \
- tmp-target-run-test-get-ff \
- tmp-target-run-test-get-append \
- tmp-target-run-test-get-pick \
- tmp-target-run-test-get-new-tag \
- tmp-target-run-test-get-unnamed
-endif
+test_get_targets += \
+ tmp-target-run-test-get-replace \
+ tmp-target-run-test-get-universal \
+ tmp-target-run-test-get-ff \
+ tmp-target-run-test-get-append \
+ tmp-target-run-test-get-pick \
+ tmp-target-run-test-get-new-tag \
+ tmp-target-run-test-get-unnamed

# For parallel runs.
# The "pwd -P" here may not be appropriate in the long run, but we
diff --git a/lib/bup/compat.py b/lib/bup/compat.py
index e59f4d38..985d8ac4 100644
--- a/lib/bup/compat.py
+++ b/lib/bup/compat.py
@@ -74,6 +74,9 @@ if py3:
"""Return the concatenated bytes or memoryview arguments as bytes."""
return b''.join(items)

+ def getcwd():
+ return fsencode(os.getcwd())
+
else: # Python 2

def fsdecode(x):
@@ -83,7 +86,7 @@ else: # Python 2
return x

from pipes import quote
- from os import environ
+ from os import environ, getcwd

from bup.py2raise import reraise

diff --git a/t/test-get b/t/test-get
index 7f5c7d0e..f6fe8cc8 100755
--- a/t/test-get
+++ b/t/test-get
@@ -7,32 +7,37 @@ exec "$bup_python" "$0" ${1+"$@"}

from __future__ import print_function
from errno import ENOENT
-from os import chdir, environ, getcwd, mkdir, rename
+from os import chdir, mkdir, rename
from os.path import abspath, dirname
-from pipes import quote
from shutil import rmtree
from subprocess import PIPE
-import re, sys
+import os, re, sys

-script_home = abspath(dirname(sys.argv[0] or '.'))
-sys.path[:0] = [abspath(script_home + '/../lib'), abspath(script_home + '/..')]
+# For buptest, wvtest, ...
+sys.path[:0] = (abspath(os.path.dirname(__file__) + '/..'),)

-from bup import compat
-from bup.helpers import merge_dict, unlink
+from bup import compat, path
+from bup.compat import environ, getcwd, items
+from bup.helpers import bquote, merge_dict, unlink
+from bup.io import byte_stream
from buptest import ex, exo, test_tempdir
from wvtest import wvcheck, wvfail, wvmsg, wvpass, wvpasseq, wvpassne, wvstart
+import bup.path
+
+sys.stdout.flush()
+stdout = byte_stream(sys.stdout)

# FIXME: per-test function
-environ['GIT_AUTHOR_NAME'] = 'bup test-get'
-environ['GIT_COMMITTER_NAME'] = 'bup test-get'
-environ['GIT_AUTHOR_EMAIL'] = 'bup@85430dcca2b611e4b2c3-8f5691723476'
-environ['GIT_COMMITTER_EMAIL'] = 'bup@85430dcca2b611e4b2c3-8f5691723476'
+environ[b'GIT_AUTHOR_NAME'] = b'bup test-get'
+environ[b'GIT_COMMITTER_NAME'] = b'bup test-get'
+environ[b'GIT_AUTHOR_EMAIL'] = b'bup@85430dcca2b611e4b2c3-8f5691723476'
+environ[b'GIT_COMMITTER_EMAIL'] = b'bup@85430dcca2b611e4b2c3-8f5691723476'

# The clean-repo test can probably be applied more broadly. It was
# initially just applied to test-pick to catch a bug.

top = getcwd()
-bup_cmd = top + '/bup'
+bup_cmd = bup.path.exe()

def rmrf(path):
err = [] # because python's scoping mess...
@@ -47,8 +52,8 @@ def rmrf(path):

def verify_trees_match(path1, path2):
global top
- exr = exo((top + '/t/compare-trees', '-c', path1, path2), check=False)
- print(exr.out)
+ exr = exo((top + b'/t/compare-trees', b'-c', path1, path2), check=False)
+ stdout.write(exr.out)
sys.stdout.flush()
wvcheck(exr.rc == 0, 'process exit %d == 0' % exr.rc)

@@ -56,7 +61,7 @@ def verify_rcz(cmd, **kwargs):
assert not kwargs.get('check')
kwargs['check'] = False
result = exo(cmd, **kwargs)
- print(result.out)
+ stdout.write(result.out)
rc = result.proc.returncode
wvcheck(rc == 0, 'process exit %d == 0' % rc)
return result
@@ -70,103 +75,104 @@ def verify_nrx(rx, string):
wvcheck(not re.search(rx, string), "rx %r doesn't match %r" % (rx, string))

def validate_clean_repo():
- out = verify_rcz(('git', '--git-dir', 'get-dest', 'fsck')).out
- verify_nrx(r'dangling|mismatch|missing|unreachable', out)
+ out = verify_rcz((b'git', b'--git-dir', b'get-dest', b'fsck')).out
+ verify_nrx(br'dangling|mismatch|missing|unreachable', out)

def validate_blob(src_id, dest_id):
global top
- rmrf('restore-src')
- rmrf('restore-dest')
- cat_tree = top + '/t/git-cat-tree'
- src_blob = verify_rcz((cat_tree, '--git-dir', 'get-src', src_id)).out
- dest_blob = verify_rcz((cat_tree, '--git-dir', 'get-src', src_id)).out
+ rmrf(b'restore-src')
+ rmrf(b'restore-dest')
+ cat_tree = top + b'/t/git-cat-tree'
+ src_blob = verify_rcz((cat_tree, b'--git-dir', b'get-src', src_id)).out
+ dest_blob = verify_rcz((cat_tree, b'--git-dir', b'get-src', src_id)).out
wvpasseq(src_blob, dest_blob)

def validate_tree(src_id, dest_id):

- rmrf('restore-src')
- rmrf('restore-dest')
- mkdir('restore-src')
- mkdir('restore-dest')
+ rmrf(b'restore-src')
+ rmrf(b'restore-dest')
+ mkdir(b'restore-src')
+ mkdir(b'restore-dest')

- commit_env = merge_dict(environ, {'GIT_COMMITTER_DATE': '2014-01-01 01:01'})
+ commit_env = merge_dict(environ, {b'GIT_COMMITTER_DATE': b'2014-01-01 01:01'})

# Create a commit so the archive contents will have matching timestamps.
- src_c = exo(('git', '--git-dir', 'get-src',
- 'commit-tree', '-m', 'foo', src_id),
+ src_c = exo((b'git', b'--git-dir', b'get-src',
+ b'commit-tree', b'-m', b'foo', src_id),
env=commit_env).out.strip()
- dest_c = exo(('git', '--git-dir', 'get-dest',
- 'commit-tree', '-m', 'foo', dest_id),
+ dest_c = exo((b'git', b'--git-dir', b'get-dest',
+ b'commit-tree', b'-m', b'foo', dest_id),
env=commit_env).out.strip()
- exr = verify_rcz('git --git-dir get-src archive %s | tar xvf - -C restore-src'
- % quote(src_c),
+ exr = verify_rcz(b'git --git-dir get-src archive %s | tar xvf - -C restore-src'
+ % bquote(src_c),
shell=True)
if exr.rc != 0: return False
- exr = verify_rcz('git --git-dir get-dest archive %s | tar xvf - -C restore-dest'
- % quote(dest_c),
+ exr = verify_rcz(b'git --git-dir get-dest archive %s | tar xvf - -C restore-dest'
+ % bquote(dest_c),
shell=True)
if exr.rc != 0: return False

# git archive doesn't include an entry for ./.
- unlink('restore-src/pax_global_header')
- unlink('restore-dest/pax_global_header')
- ex(('touch', '-r', 'restore-src', 'restore-dest'))
- verify_trees_match('restore-src/', 'restore-dest/')
- rmrf('restore-src')
- rmrf('restore-dest')
+ unlink(b'restore-src/pax_global_header')
+ unlink(b'restore-dest/pax_global_header')
+ ex((b'touch', b'-r', b'restore-src', b'restore-dest'))
+ verify_trees_match(b'restore-src/', b'restore-dest/')
+ rmrf(b'restore-src')
+ rmrf(b'restore-dest')

def validate_commit(src_id, dest_id):
- exr = verify_rcz(('git', '--git-dir', 'get-src', 'cat-file', 'commit', src_id))
+ exr = verify_rcz((b'git', b'--git-dir', b'get-src', b'cat-file', b'commit', src_id))
if exr.rc != 0: return False
src_cat = exr.out
- exr = verify_rcz(('git', '--git-dir', 'get-dest', 'cat-file', 'commit', dest_id))
+ exr = verify_rcz((b'git', b'--git-dir', b'get-dest', b'cat-file', b'commit', dest_id))
if exr.rc != 0: return False
dest_cat = exr.out
wvpasseq(src_cat, dest_cat)
if src_cat != dest_cat: return False

- rmrf('restore-src')
- rmrf('restore-dest')
- mkdir('restore-src')
- mkdir('restore-dest')
- qsrc = quote(src_id)
- qdest = quote(dest_id)
- exr = verify_rcz(('git --git-dir get-src archive ' + qsrc
- + ' | tar xf - -C restore-src'),
+ rmrf(b'restore-src')
+ rmrf(b'restore-dest')
+ mkdir(b'restore-src')
+ mkdir(b'restore-dest')
+ qsrc = bquote(src_id)
+ qdest = bquote(dest_id)
+ exr = verify_rcz((b'git --git-dir get-src archive ' + qsrc
+ + b' | tar xf - -C restore-src'),
shell=True)
if exr.rc != 0: return False
- exr = verify_rcz(('git --git-dir get-dest archive ' + qdest +
- ' | tar xf - -C restore-dest'),
+ exr = verify_rcz((b'git --git-dir get-dest archive ' + qdest +
+ b' | tar xf - -C restore-dest'),
shell=True)
if exr.rc != 0: return False

# git archive doesn't include an entry for ./.
- ex(('touch', '-r', 'restore-src', 'restore-dest'))
- verify_trees_match('restore-src/', 'restore-dest/')
- rmrf('restore-src')
- rmrf('restore-dest')
+ ex((b'touch', b'-r', b'restore-src', b'restore-dest'))
+ verify_trees_match(b'restore-src/', b'restore-dest/')
+ rmrf(b'restore-src')
+ rmrf(b'restore-dest')

def _validate_save(orig_dir, save_path, commit_id, tree_id):
global bup_cmd
- rmrf('restore')
- exr = verify_rcz((bup_cmd, '-d', 'get-dest',
- 'restore', '-C', 'restore', save_path + '/.'))
+ rmrf(b'restore')
+ exr = verify_rcz((bup_cmd, b'-d', b'get-dest',
+ b'restore', b'-C', b'restore', save_path + b'/.'))
if exr.rc: return False
- verify_trees_match(orig_dir + '/', 'restore/')
+ verify_trees_match(orig_dir + b'/', b'restore/')
if tree_id:
# FIXME: double check that get-dest is correct
- exr = verify_rcz(('git', '--git-dir', 'get-dest', 'ls-tree', tree_id))
+ exr = verify_rcz((b'git', b'--git-dir', b'get-dest', b'ls-tree', tree_id))
if exr.rc: return False
- cat = verify_rcz(('git', '--git-dir', 'get-dest',
- 'cat-file', 'commit', commit_id))
+ cat = verify_rcz((b'git', b'--git-dir', b'get-dest',
+ b'cat-file', b'commit', commit_id))
if cat.rc: return False
- wvpasseq('tree ' + tree_id, cat.out.splitlines()[0])
+ wvpasseq(b'tree ' + tree_id, cat.out.splitlines()[0])

# FIXME: re-merge save and new_save?

def validate_save(dest_name, restore_subpath, commit_id, tree_id, orig_value,
get_out):
out = get_out.splitlines()
+ print('blarg: out', repr(out), file=sys.stderr)
wvpasseq(2, len(out))
get_tree_id = out[0]
get_commit_id = out[1]
@@ -191,97 +197,100 @@ def validate_tagged_save(tag_name, restore_subpath,
get_tag_id = out[0]
wvpasseq(commit_id, get_tag_id)
# Make sure tmp doesn't already exist.
- exr = exo(('git', '--git-dir', 'get-dest', 'show-ref', 'tmp-branch-for-tag'),
+ exr = exo((b'git', b'--git-dir', b'get-dest', b'show-ref', b'tmp-branch-for-tag'),
check=False)
wvpasseq(1, exr.rc)

- ex(('git', '--git-dir', 'get-dest', 'branch', 'tmp-branch-for-tag',
- 'refs/tags/' + tag_name))
- _validate_save(orig_value, 'tmp-branch-for-tag/latest' + restore_subpath,
+ ex((b'git', b'--git-dir', b'get-dest', b'branch', b'tmp-branch-for-tag',
+ b'refs/tags/' + tag_name))
+ _validate_save(orig_value, b'tmp-branch-for-tag/latest' + restore_subpath,
commit_id, tree_id)
- ex(('git', '--git-dir', 'get-dest', 'branch', '-D', 'tmp-branch-for-tag'))
+ ex((b'git', b'--git-dir', b'get-dest', b'branch', b'-D', b'tmp-branch-for-tag'))

def validate_new_tagged_commit(tag_name, commit_id, tree_id, get_out):
out = get_out.splitlines()
wvpasseq(1, len(out))
get_tag_id = out[0]
wvpassne(commit_id, get_tag_id)
- validate_tree(tree_id, tag_name + ':')
+ validate_tree(tree_id, tag_name + b':')


get_cases_tested = 0
-

def _run_get(disposition, method, what):
+ print('run_get:', repr((disposition, method, what)), file=sys.stderr)
global bup_cmd

if disposition == 'get':
- get_cmd = (bup_cmd, '-d', 'get-dest',
- 'get', '-vvct', '--print-tags', '-s', 'get-src')
+ get_cmd = (bup_cmd, b'-d', b'get-dest',
+ b'get', b'-vvct', b'--print-tags', b'-s', b'get-src')
elif disposition == 'get-on':
- get_cmd = (bup_cmd, '-d', 'get-dest',
- 'on', '-', 'get', '-vvct', '--print-tags', '-s', 'get-src')
+ get_cmd = (bup_cmd, b'-d', b'get-dest',
+ b'on', b'-', b'get', b'-vvct', b'--print-tags', b'-s', b'get-src')
elif disposition == 'get-to':
- get_cmd = (bup_cmd, '-d', 'get-dest',
- 'get', '-vvct', '--print-tags', '-s', 'get-src',
- '-r', '-:' + getcwd() + '/get-dest')
+ get_cmd = (bup_cmd, b'-d', b'get-dest',
+ b'get', b'-vvct', b'--print-tags', b'-s', b'get-src',
+ b'-r', b'-:' + getcwd() + b'/get-dest')
else:
- raise Exception('error: unexpected get disposition ' + disposition)
+ raise Exception('error: unexpected get disposition ' + repr(disposition))

global get_cases_tested
- if isinstance(what, compat.str_type):
+ if isinstance(what, bytes):
cmd = get_cmd + (method, what)
else:
- if method in ('--ff', '--append', '--pick', '--force-pick', '--new-tag',
- '--replace'):
- method += ':'
+ assert not isinstance(what, str) # python 3 sanity check
+ if method in (b'--ff', b'--append', b'--pick', b'--force-pick', b'--new-tag',
+ b'--replace'):
+ method += b':'
src, dest = what
cmd = get_cmd + (method, src, dest)
result = exo(cmd, check=False, stderr=PIPE)
get_cases_tested += 1
- fsck = ex((bup_cmd, '-d', 'get-dest', 'fsck'), check=False)
+ fsck = ex((bup_cmd, b'-d', b'get-dest', b'fsck'), check=False)
wvpasseq(0, fsck.rc)
return result

def run_get(disposition, method, what=None, given=None):
global bup_cmd
- rmrf('get-dest')
- ex((bup_cmd, '-d', 'get-dest', 'init'))
+ rmrf(b'get-dest')
+ ex((bup_cmd, b'-d', b'get-dest', b'init'))

if given:
# FIXME: replace bup-get with independent commands as is feasible
- exr = _run_get(disposition, '--replace', given)
+ exr = _run_get(disposition, b'--replace', given)
assert not exr.rc
return _run_get(disposition, method, what)

def test_universal_behaviors(get_disposition):
- methods = ('--ff', '--append', '--pick', '--force-pick', '--new-tag',
- '--replace', '--unnamed')
+ methods = (b'--ff', b'--append', b'--pick', b'--force-pick', b'--new-tag',
+ b'--replace', b'--unnamed')
for method in methods:
- wvstart(get_disposition + ' ' + method + ', missing source, fails')
- exr = run_get(get_disposition, method, 'not-there')
+ mmsg = method.decode('ascii')
+ wvstart(get_disposition + ' ' + mmsg + ', missing source, fails')
+ exr = run_get(get_disposition, method, b'not-there')
wvpassne(0, exr.rc)
- verify_rx(r'cannot find source', exr.err)
+ verify_rx(br'cannot find source', exr.err)
for method in methods:
- wvstart(get_disposition + ' ' + method + ' / fails')
- exr = run_get(get_disposition, method, '/')
+ mmsg = method.decode('ascii')
+ wvstart(get_disposition + ' ' + mmsg + ' / fails')
+ exr = run_get(get_disposition, method, b'/')
wvpassne(0, exr.rc)
- verify_rx('cannot fetch entire repository', exr.err)
+ verify_rx(b'cannot fetch entire repository', exr.err)

def verify_only_refs(**kwargs):
- for kind, refs in kwargs.iteritems():
+ for kind, refs in items(kwargs):
if kind == 'heads':
- abs_refs = ['refs/heads/' + ref for ref in refs]
- karg = '--heads'
+ abs_refs = [b'refs/heads/' + ref for ref in refs]
+ karg = b'--heads'
elif kind == 'tags':
- abs_refs = ['refs/tags/' + ref for ref in refs]
- karg = '--tags'
+ abs_refs = [b'refs/tags/' + ref for ref in refs]
+ karg = b'--tags'
else:
raise TypeError('unexpected keyword argument %r' % kind)
if abs_refs:
- verify_rcz(['git', '--git-dir', 'get-dest',
- 'show-ref', '--verify', karg] + abs_refs)
- exr = exo(('git', '--git-dir', 'get-dest', 'show-ref', karg),
+ verify_rcz([b'git', b'--git-dir', b'get-dest',
+ b'show-ref', b'--verify', karg] + abs_refs)
+ exr = exo((b'git', b'--git-dir', b'get-dest', b'show-ref', karg),
check=False)
wvpasseq(0, exr.rc)
expected_refs = sorted(abs_refs)
@@ -289,24 +298,25 @@ def verify_only_refs(**kwargs):
wvpasseq(expected_refs, repo_refs)
else:
# FIXME: can we just check "git show-ref --heads == ''"?
- exr = exo(('git', '--git-dir', 'get-dest', 'show-ref', karg),
+ exr = exo((b'git', b'--git-dir', b'get-dest', b'show-ref', karg),
check=False)
wvpasseq(1, exr.rc)
- wvpasseq('', exr.out.strip())
-
+ wvpasseq(b'', exr.out.strip())
+
def test_replace(get_disposition, src_info):
+ print('blarg:', repr(src_info), file=sys.stderr)

wvstart(get_disposition + ' --replace to root fails')
- for item in ('.tag/tinyfile',
- 'src/latest' + src_info['tinyfile-path'],
- '.tag/subtree',
- 'src/latest' + src_info['subtree-vfs-path'],
- '.tag/commit-1',
- 'src/latest',
- 'src'):
- exr = run_get(get_disposition, '--replace', (item, '/'))
+ for item in (b'.tag/tinyfile',
+ b'src/latest' + src_info['tinyfile-path'],
+ b'.tag/subtree',
+ b'src/latest' + src_info['subtree-vfs-path'],
+ b'.tag/commit-1',
+ b'src/latest',
+ b'src'):
+ exr = run_get(get_disposition, b'--replace', (item, b'/'))
wvpassne(0, exr.rc)
- verify_rx(r'impossible; can only overwrite branch or tag', exr.err)
+ verify_rx(br'impossible; can only overwrite branch or tag', exr.err)

tinyfile_id = src_info['tinyfile-id']
tinyfile_path = src_info['tinyfile-path']
@@ -317,382 +327,385 @@ def test_replace(get_disposition, src_info):

# Anything to tag
existing_items = {'nothing' : None,
- 'blob' : ('.tag/tinyfile', '.tag/obj'),
- 'tree' : ('.tag/tree-1', '.tag/obj'),
- 'commit': ('.tag/commit-1', '.tag/obj')}
- for ex_type, ex_ref in existing_items.iteritems():
+ 'blob' : (b'.tag/tinyfile', b'.tag/obj'),
+ 'tree' : (b'.tag/tree-1', b'.tag/obj'),
+ 'commit': (b'.tag/commit-1', b'.tag/obj')}
+ for ex_type, ex_ref in items(existing_items):
wvstart(get_disposition + ' --replace ' + ex_type + ' with blob tag')
- for item in ('.tag/tinyfile', 'src/latest' + tinyfile_path):
- exr = run_get(get_disposition, '--replace', (item ,'.tag/obj'),
+ for item in (b'.tag/tinyfile', b'src/latest' + tinyfile_path):
+ exr = run_get(get_disposition, b'--replace', (item ,b'.tag/obj'),
given=ex_ref)
wvpasseq(0, exr.rc)
validate_blob(tinyfile_id, tinyfile_id)
- verify_only_refs(heads=[], tags=('obj',))
+ verify_only_refs(heads=[], tags=(b'obj',))
wvstart(get_disposition + ' --replace ' + ex_type + ' with tree tag')
- for item in ('.tag/subtree', 'src/latest' + subtree_vfs_path):
- exr = run_get(get_disposition, '--replace', (item, '.tag/obj'),
+ for item in (b'.tag/subtree', b'src/latest' + subtree_vfs_path):
+ exr = run_get(get_disposition, b'--replace', (item, b'.tag/obj'),
given=ex_ref)
validate_tree(subtree_id, subtree_id)
- verify_only_refs(heads=[], tags=('obj',))
+ verify_only_refs(heads=[], tags=(b'obj',))
wvstart(get_disposition + ' --replace ' + ex_type + ' with commitish tag')
- for item in ('.tag/commit-2', 'src/latest', 'src'):
- exr = run_get(get_disposition, '--replace', (item, '.tag/obj'),
+ for item in (b'.tag/commit-2', b'src/latest', b'src'):
+ exr = run_get(get_disposition, b'--replace', (item, b'.tag/obj'),
given=ex_ref)
- validate_tagged_save('obj', getcwd() + '/src',
- commit_2_id, tree_2_id, 'src-2', exr.out)
- verify_only_refs(heads=[], tags=('obj',))
+ validate_tagged_save(b'obj', getcwd() + b'/src',
+ commit_2_id, tree_2_id, b'src-2', exr.out)
+ verify_only_refs(heads=[], tags=(b'obj',))

# Committish to branch.
existing_items = (('nothing', None),
- ('branch', ('.tag/commit-1', 'obj')))
+ ('branch', (b'.tag/commit-1', b'obj')))
for ex_type, ex_ref in existing_items:
- for item_type, item in (('commit', '.tag/commit-2'),
- ('save', 'src/latest'),
- ('branch', 'src')):
+ for item_type, item in (('commit', b'.tag/commit-2'),
+ ('save', b'src/latest'),
+ ('branch', b'src')):
wvstart(get_disposition + ' --replace '
+ ex_type + ' with ' + item_type)
- exr = run_get(get_disposition, '--replace', (item, 'obj'),
+ exr = run_get(get_disposition, b'--replace', (item, b'obj'),
given=ex_ref)
- validate_save('obj/latest', getcwd() + '/src',
- commit_2_id, tree_2_id, 'src-2', exr.out)
- verify_only_refs(heads=('obj',), tags=[])
+ validate_save(b'obj/latest', getcwd() + b'/src',
+ commit_2_id, tree_2_id, b'src-2', exr.out)
+ verify_only_refs(heads=(b'obj',), tags=[])

# Not committish to branch
existing_items = (('nothing', None),
- ('branch', ('.tag/commit-1', 'obj')))
+ ('branch', (b'.tag/commit-1', b'obj')))
for ex_type, ex_ref in existing_items:
- for item_type, item in (('blob', '.tag/tinyfile'),
- ('blob', 'src/latest' + tinyfile_path),
- ('tree', '.tag/subtree'),
- ('tree', 'src/latest' + subtree_vfs_path)):
+ for item_type, item in (('blob', b'.tag/tinyfile'),
+ ('blob', b'src/latest' + tinyfile_path),
+ ('tree', b'.tag/subtree'),
+ ('tree', b'src/latest' + subtree_vfs_path)):
wvstart(get_disposition + ' --replace branch with '
+ item_type + ' given ' + ex_type + ' fails')

- exr = run_get(get_disposition, '--replace', (item, 'obj'),
+ exr = run_get(get_disposition, b'--replace', (item, b'obj'),
given=ex_ref)
wvpassne(0, exr.rc)
- verify_rx(r'cannot overwrite branch with .+ for', exr.err)
+ verify_rx(br'cannot overwrite branch with .+ for', exr.err)

wvstart(get_disposition + ' --replace, implicit destinations')

- exr = run_get(get_disposition, '--replace', 'src')
- validate_save('src/latest', getcwd() + '/src',
- commit_2_id, tree_2_id, 'src-2', exr.out)
- verify_only_refs(heads=('src',), tags=[])
+ exr = run_get(get_disposition, b'--replace', b'src')
+ validate_save(b'src/latest', getcwd() + b'/src',
+ commit_2_id, tree_2_id, b'src-2', exr.out)
+ verify_only_refs(heads=(b'src',), tags=[])

- exr = run_get(get_disposition, '--replace', '.tag/commit-2')
- validate_tagged_save('commit-2', getcwd() + '/src',
- commit_2_id, tree_2_id, 'src-2', exr.out)
- verify_only_refs(heads=[], tags=('commit-2',))
+ exr = run_get(get_disposition, b'--replace', b'.tag/commit-2')
+ validate_tagged_save(b'commit-2', getcwd() + b'/src',
+ commit_2_id, tree_2_id, b'src-2', exr.out)
+ verify_only_refs(heads=[], tags=(b'commit-2',))

def test_ff(get_disposition, src_info):

wvstart(get_disposition + ' --ff to root fails')
tinyfile_path = src_info['tinyfile-path']
- for item in ('.tag/tinyfile', 'src/latest' + tinyfile_path):
- exr = run_get(get_disposition, '--ff', (item, '/'))
+ for item in (b'.tag/tinyfile', b'src/latest' + tinyfile_path):
+ exr = run_get(get_disposition, b'--ff', (item, b'/'))
wvpassne(0, exr.rc)
- verify_rx(r'source for .+ must be a branch, save, or commit', exr.err)
+ verify_rx(br'source for .+ must be a branch, save, or commit', exr.err)
subtree_vfs_path = src_info['subtree-vfs-path']
- for item in ('.tag/subtree', 'src/latest' + subtree_vfs_path):
- exr = run_get(get_disposition, '--ff', (item, '/'))
+ for item in (b'.tag/subtree', b'src/latest' + subtree_vfs_path):
+ exr = run_get(get_disposition, b'--ff', (item, b'/'))
wvpassne(0, exr.rc)
- verify_rx(r'is impossible; can only --append a tree to a branch',
+ verify_rx(br'is impossible; can only --append a tree to a branch',
exr.err)
- for item in ('.tag/commit-1', 'src/latest', 'src'):
- exr = run_get(get_disposition, '--ff', (item, '/'))
+ for item in (b'.tag/commit-1', b'src/latest', b'src'):
+ exr = run_get(get_disposition, b'--ff', (item, b'/'))
wvpassne(0, exr.rc)
- verify_rx(r'destination for .+ is a root, not a branch', exr.err)
+ verify_rx(br'destination for .+ is a root, not a branch', exr.err)

wvstart(get_disposition + ' --ff of not-committish fails')
- for src in ('.tag/tinyfile', 'src/latest' + tinyfile_path):
+ for src in (b'.tag/tinyfile', b'src/latest' + tinyfile_path):
# FIXME: use get_item elsewhere?
- for given, get_item in ((None, (src, 'obj')),
- (None, (src, '.tag/obj')),
- (('.tag/tinyfile', '.tag/obj'), (src, '.tag/obj')),
- (('.tag/tree-1', '.tag/obj'), (src, '.tag/obj')),
- (('.tag/commit-1', '.tag/obj'), (src, '.tag/obj')),
- (('.tag/commit-1', 'obj'), (src, 'obj'))):
- exr = run_get(get_disposition, '--ff', get_item, given=given)
+ for given, get_item in ((None, (src, b'obj')),
+ (None, (src, b'.tag/obj')),
+ ((b'.tag/tinyfile', b'.tag/obj'), (src, b'.tag/obj')),
+ ((b'.tag/tree-1', b'.tag/obj'), (src, b'.tag/obj')),
+ ((b'.tag/commit-1', b'.tag/obj'), (src, b'.tag/obj')),
+ ((b'.tag/commit-1', b'obj'), (src, b'obj'))):
+ exr = run_get(get_disposition, b'--ff', get_item, given=given)
wvpassne(0, exr.rc)
- verify_rx(r'must be a branch, save, or commit', exr.err)
- for src in ('.tag/subtree', 'src/latest' + subtree_vfs_path):
- for given, get_item in ((None, (src, 'obj')),
- (None, (src, '.tag/obj')),
- (('.tag/tinyfile', '.tag/obj'), (src, '.tag/obj')),
- (('.tag/tree-1', '.tag/obj'), (src, '.tag/obj')),
- (('.tag/commit-1', '.tag/obj'), (src, '.tag/obj')),
- (('.tag/commit-1', 'obj'), (src, 'obj'))):
- exr = run_get(get_disposition, '--ff', get_item, given=given)
+ verify_rx(br'must be a branch, save, or commit', exr.err)
+ for src in (b'.tag/subtree', b'src/latest' + subtree_vfs_path):
+ for given, get_item in ((None, (src, b'obj')),
+ (None, (src, b'.tag/obj')),
+ ((b'.tag/tinyfile', b'.tag/obj'), (src, b'.tag/obj')),
+ ((b'.tag/tree-1', b'.tag/obj'), (src, b'.tag/obj')),
+ ((b'.tag/commit-1', b'.tag/obj'), (src, b'.tag/obj')),
+ ((b'.tag/commit-1', b'obj'), (src, b'obj'))):
+ exr = run_get(get_disposition, b'--ff', get_item, given=given)
wvpassne(0, exr.rc)
- verify_rx(r'can only --append a tree to a branch', exr.err)
+ verify_rx(br'can only --append a tree to a branch', exr.err)

wvstart(get_disposition + ' --ff committish, ff possible')
save_2 = src_info['save-2']
- for src in ('.tag/commit-2', 'src/' + save_2, 'src'):
+ for src in (b'.tag/commit-2', b'src/' + save_2, b'src'):
for given, get_item, complaint in \
- ((None, (src, '.tag/obj'),
- r'destination .+ must be a valid branch name'),
- (('.tag/tinyfile', '.tag/obj'), (src, '.tag/obj'),
- r'destination .+ is a blob, not a branch'),
- (('.tag/tree-1', '.tag/obj'), (src, '.tag/obj'),
- r'destination .+ is a tree, not a branch'),
- (('.tag/commit-1', '.tag/obj'), (src, '.tag/obj'),
- r'destination .+ is a tagged commit, not a branch'),
- (('.tag/commit-2', '.tag/obj'), (src, '.tag/obj'),
- r'destination .+ is a tagged commit, not a branch')):
- exr = run_get(get_disposition, '--ff', get_item, given=given)
+ ((None, (src, b'.tag/obj'),
+ br'destination .+ must be a valid branch name'),
+ ((b'.tag/tinyfile', b'.tag/obj'), (src, b'.tag/obj'),
+ br'destination .+ is a blob, not a branch'),
+ ((b'.tag/tree-1', b'.tag/obj'), (src, b'.tag/obj'),
+ br'destination .+ is a tree, not a branch'),
+ ((b'.tag/commit-1', b'.tag/obj'), (src, b'.tag/obj'),
+ br'destination .+ is a tagged commit, not a branch'),
+ ((b'.tag/commit-2', b'.tag/obj'), (src, b'.tag/obj'),
+ br'destination .+ is a tagged commit, not a branch')):
+ exr = run_get(get_disposition, b'--ff', get_item, given=given)
wvpassne(0, exr.rc)
verify_rx(complaint, exr.err)
# FIXME: use src or item and given or existing consistently in loops...
commit_2_id = src_info['commit-2-id']
tree_2_id = src_info['tree-2-id']
- for src in ('.tag/commit-2', 'src/' + save_2, 'src'):
- for given in (None, ('.tag/commit-1', 'obj'), ('.tag/commit-2', 'obj')):
- exr = run_get(get_disposition, '--ff', (src, 'obj'), given=given)
+ for src in (b'.tag/commit-2', b'src/' + save_2, b'src'):
+ for given in (None, (b'.tag/commit-1', b'obj'), (b'.tag/commit-2', b'obj')):
+ exr = run_get(get_disposition, b'--ff', (src, b'obj'), given=given)
wvpasseq(0, exr.rc)
- validate_save('obj/latest', getcwd() + '/src',
- commit_2_id, tree_2_id, 'src-2', exr.out)
- verify_only_refs(heads=('obj',), tags=[])
+ validate_save(b'obj/latest', getcwd() + b'/src',
+ commit_2_id, tree_2_id, b'src-2', exr.out)
+ verify_only_refs(heads=(b'obj',), tags=[])

wvstart(get_disposition + ' --ff, implicit destinations')
- for item in ('src', 'src/latest'):
- exr = run_get(get_disposition, '--ff', item)
+ for item in (b'src', b'src/latest'):
+ exr = run_get(get_disposition, b'--ff', item)
wvpasseq(0, exr.rc)

- ex(('find', 'get-dest/refs'))
- ex((bup_cmd, '-d', 'get-dest', 'ls'))
+ ex((b'find', b'get-dest/refs'))
+ ex((bup_cmd, b'-d', b'get-dest', b'ls'))

- validate_save('src/latest', getcwd() + '/src',
- commit_2_id, tree_2_id, 'src-2', exr.out)
+ validate_save(b'src/latest', getcwd() + b'/src',
+ commit_2_id, tree_2_id, b'src-2', exr.out)
#verify_only_refs(heads=('src',), tags=[])

wvstart(get_disposition + ' --ff, ff impossible')
- for given, get_item in ((('unrelated-branch', 'src'), 'src'),
- (('.tag/commit-2', 'src'), ('.tag/commit-1', 'src'))):
- exr = run_get(get_disposition, '--ff', get_item, given=given)
+ for given, get_item in (((b'unrelated-branch', b'src'), b'src'),
+ ((b'.tag/commit-2', b'src'), (b'.tag/commit-1', b'src'))):
+ exr = run_get(get_disposition, b'--ff', get_item, given=given)
wvpassne(0, exr.rc)
- verify_rx(r'destination is not an ancestor of source', exr.err)
+ verify_rx(br'destination is not an ancestor of source', exr.err)

def test_append(get_disposition, src_info):
tinyfile_path = src_info['tinyfile-path']
subtree_vfs_path = src_info['subtree-vfs-path']

wvstart(get_disposition + ' --append to root fails')
- for item in ('.tag/tinyfile', 'src/latest' + tinyfile_path):
- exr = run_get(get_disposition, '--append', (item, '/'))
+ for item in (b'.tag/tinyfile', b'src/latest' + tinyfile_path):
+ exr = run_get(get_disposition, b'--append', (item, b'/'))
wvpassne(0, exr.rc)
- verify_rx(r'source for .+ must be a branch, save, commit, or tree',
+ verify_rx(br'source for .+ must be a branch, save, commit, or tree',
exr.err)
- for item in ('.tag/subtree', 'src/latest' + subtree_vfs_path,
- '.tag/commit-1', 'src/latest', 'src'):
- exr = run_get(get_disposition, '--append', (item, '/'))
+ for item in (b'.tag/subtree', b'src/latest' + subtree_vfs_path,
+ b'.tag/commit-1', b'src/latest', b'src'):
+ exr = run_get(get_disposition, b'--append', (item, b'/'))
wvpassne(0, exr.rc)
- verify_rx(r'destination for .+ is a root, not a branch', exr.err)
+ verify_rx(br'destination for .+ is a root, not a branch', exr.err)

wvstart(get_disposition + ' --append of not-treeish fails')
- for src in ('.tag/tinyfile', 'src/latest' + tinyfile_path):
- for given, item in ((None, (src, 'obj')),
- (None, (src, '.tag/obj')),
- (('.tag/tinyfile', '.tag/obj'), (src, '.tag/obj')),
- (('.tag/tree-1', '.tag/obj'), (src, '.tag/obj')),
- (('.tag/commit-1', '.tag/obj'), (src, '.tag/obj')),
- (('.tag/commit-1', 'obj'), (src, 'obj'))):
- exr = run_get(get_disposition, '--append', item, given=given)
+ for src in (b'.tag/tinyfile', b'src/latest' + tinyfile_path):
+ for given, item in ((None, (src, b'obj')),
+ (None, (src, b'.tag/obj')),
+ ((b'.tag/tinyfile', b'.tag/obj'), (src, b'.tag/obj')),
+ ((b'.tag/tree-1', b'.tag/obj'), (src, b'.tag/obj')),
+ ((b'.tag/commit-1', b'.tag/obj'), (src, b'.tag/obj')),
+ ((b'.tag/commit-1', b'obj'), (src, b'obj'))):
+ exr = run_get(get_disposition, b'--append', item, given=given)
wvpassne(0, exr.rc)
- verify_rx(r'must be a branch, save, commit, or tree', exr.err)
+ verify_rx(br'must be a branch, save, commit, or tree', exr.err)

wvstart(get_disposition + ' --append committish failure cases')
save_2 = src_info['save-2']
- for src in ('.tag/subtree', 'src/latest' + subtree_vfs_path,
- '.tag/commit-2', 'src/' + save_2, 'src'):
+ for src in (b'.tag/subtree', b'src/latest' + subtree_vfs_path,
+ b'.tag/commit-2', b'src/' + save_2, b'src'):
for given, item, complaint in \
- ((None, (src, '.tag/obj'),
- r'destination .+ must be a valid branch name'),
- (('.tag/tinyfile', '.tag/obj'), (src, '.tag/obj'),
- r'destination .+ is a blob, not a branch'),
- (('.tag/tree-1', '.tag/obj'), (src, '.tag/obj'),
- r'destination .+ is a tree, not a branch'),
- (('.tag/commit-1', '.tag/obj'), (src, '.tag/obj'),
- r'destination .+ is a tagged commit, not a branch'),
- (('.tag/commit-2', '.tag/obj'), (src, '.tag/obj'),
- r'destination .+ is a tagged commit, not a branch')):
- exr = run_get(get_disposition, '--append', item, given=given)
+ ((None, (src, b'.tag/obj'),
+ br'destination .+ must be a valid branch name'),
+ ((b'.tag/tinyfile', b'.tag/obj'), (src, b'.tag/obj'),
+ br'destination .+ is a blob, not a branch'),
+ ((b'.tag/tree-1', b'.tag/obj'), (src, b'.tag/obj'),
+ br'destination .+ is a tree, not a branch'),
+ ((b'.tag/commit-1', b'.tag/obj'), (src, b'.tag/obj'),
+ br'destination .+ is a tagged commit, not a branch'),
+ ((b'.tag/commit-2', b'.tag/obj'), (src, b'.tag/obj'),
+ br'destination .+ is a tagged commit, not a branch')):
+ exr = run_get(get_disposition, b'--append', item, given=given)
wvpassne(0, exr.rc)
verify_rx(complaint, exr.err)
-
+
wvstart(get_disposition + ' --append committish')
commit_2_id = src_info['commit-2-id']
tree_2_id = src_info['tree-2-id']
- for item in ('.tag/commit-2', 'src/' + save_2, 'src'):
- for existing in (None, ('.tag/commit-1', 'obj'),
- ('.tag/commit-2', 'obj'),
- ('unrelated-branch', 'obj')):
- exr = run_get(get_disposition, '--append', (item, 'obj'),
+ for item in (b'.tag/commit-2', b'src/' + save_2, b'src'):
+ for existing in (None, (b'.tag/commit-1', b'obj'),
+ (b'.tag/commit-2', b'obj'),
+ (b'unrelated-branch', b'obj')):
+ exr = run_get(get_disposition, b'--append', (item, b'obj'),
given=existing)
wvpasseq(0, exr.rc)
- validate_new_save('obj/latest', getcwd() + '/src',
- commit_2_id, tree_2_id, 'src-2', exr.out)
- verify_only_refs(heads=('obj',), tags=[])
+ validate_new_save(b'obj/latest', getcwd() + b'/src',
+ commit_2_id, tree_2_id, b'src-2', exr.out)
+ verify_only_refs(heads=(b'obj',), tags=[])
# Append ancestor
save_1 = src_info['save-1']
commit_1_id = src_info['commit-1-id']
tree_1_id = src_info['tree-1-id']
- for item in ('.tag/commit-1', 'src/' + save_1, 'src-1'):
- exr = run_get(get_disposition, '--append', (item, 'obj'),
- given=('.tag/commit-2', 'obj'))
+ for item in (b'.tag/commit-1', b'src/' + save_1, b'src-1'):
+ exr = run_get(get_disposition, b'--append', (item, b'obj'),
+ given=(b'.tag/commit-2', b'obj'))
wvpasseq(0, exr.rc)
- validate_new_save('obj/latest', getcwd() + '/src',
- commit_1_id, tree_1_id, 'src-1', exr.out)
- verify_only_refs(heads=('obj',), tags=[])
+ validate_new_save(b'obj/latest', getcwd() + b'/src',
+ commit_1_id, tree_1_id, b'src-1', exr.out)
+ verify_only_refs(heads=(b'obj',), tags=[])

wvstart(get_disposition + ' --append tree')
subtree_path = src_info['subtree-path']
subtree_id = src_info['subtree-id']
- for item in ('.tag/subtree', 'src/latest' + subtree_vfs_path):
- for existing in (None, ('.tag/commit-1', 'obj'), ('.tag/commit-2','obj')):
- exr = run_get(get_disposition, '--append', (item, 'obj'),
+ for item in (b'.tag/subtree', b'src/latest' + subtree_vfs_path):
+ for existing in (None,
+ (b'.tag/commit-1', b'obj'),
+ (b'.tag/commit-2', b'obj')):
+ exr = run_get(get_disposition, b'--append', (item, b'obj'),
given=existing)
wvpasseq(0, exr.rc)
- validate_new_save('obj/latest', '/', None, subtree_id, subtree_path,
+ validate_new_save(b'obj/latest', b'/', None, subtree_id, subtree_path,
exr.out)
- verify_only_refs(heads=('obj',), tags=[])
+ verify_only_refs(heads=(b'obj',), tags=[])

wvstart(get_disposition + ' --append, implicit destinations')

- for item in ('src', 'src/latest'):
- exr = run_get(get_disposition, '--append', item)
+ for item in (b'src', b'src/latest'):
+ exr = run_get(get_disposition, b'--append', item)
wvpasseq(0, exr.rc)
- validate_new_save('src/latest', getcwd() + '/src', commit_2_id, tree_2_id,
- 'src-2', exr.out)
- verify_only_refs(heads=('src',), tags=[])
+ validate_new_save(b'src/latest', getcwd() + b'/src', commit_2_id, tree_2_id,
+ b'src-2', exr.out)
+ verify_only_refs(heads=(b'src',), tags=[])

def test_pick(get_disposition, src_info, force=False):
- flavor = '--force-pick' if force else '--pick'
+ flavor = b'--force-pick' if force else b'--pick'
+ flavormsg = flavor.decode('ascii')
tinyfile_path = src_info['tinyfile-path']
subtree_vfs_path = src_info['subtree-vfs-path']

- wvstart(get_disposition + ' ' + flavor + ' to root fails')
- for item in ('.tag/tinyfile', 'src/latest' + tinyfile_path, 'src'):
- exr = run_get(get_disposition, flavor, (item, '/'))
+ wvstart(get_disposition + ' ' + flavormsg + ' to root fails')
+ for item in (b'.tag/tinyfile', b'src/latest' + tinyfile_path, b'src'):
+ exr = run_get(get_disposition, flavor, (item, b'/'))
wvpassne(0, exr.rc)
- verify_rx(r'can only pick a commit or save', exr.err)
- for item in ('.tag/commit-1', 'src/latest'):
- exr = run_get(get_disposition, flavor, (item, '/'))
+ verify_rx(br'can only pick a commit or save', exr.err)
+ for item in (b'.tag/commit-1', b'src/latest'):
+ exr = run_get(get_disposition, flavor, (item, b'/'))
wvpassne(0, exr.rc)
- verify_rx(r'destination is not a tag or branch', exr.err)
- for item in ('.tag/subtree', 'src/latest' + subtree_vfs_path):
- exr = run_get(get_disposition, flavor, (item, '/'))
+ verify_rx(br'destination is not a tag or branch', exr.err)
+ for item in (b'.tag/subtree', b'src/latest' + subtree_vfs_path):
+ exr = run_get(get_disposition, flavor, (item, b'/'))
wvpassne(0, exr.rc)
- verify_rx(r'is impossible; can only --append a tree', exr.err)
-
- wvstart(get_disposition + ' ' + flavor + ' of blob or branch fails')
- for item in ('.tag/tinyfile', 'src/latest' + tinyfile_path, 'src'):
- for given, get_item in ((None, (item, 'obj')),
- (None, (item, '.tag/obj')),
- (('.tag/tinyfile', '.tag/obj'), (item, '.tag/obj')),
- (('.tag/tree-1', '.tag/obj'), (item, '.tag/obj')),
- (('.tag/commit-1', '.tag/obj'), (item, '.tag/obj')),
- (('.tag/commit-1', 'obj'), (item, 'obj'))):
+ verify_rx(br'is impossible; can only --append a tree', exr.err)
+
+ wvstart(get_disposition + ' ' + flavormsg + ' of blob or branch fails')
+ for item in (b'.tag/tinyfile', b'src/latest' + tinyfile_path, b'src'):
+ for given, get_item in ((None, (item, b'obj')),
+ (None, (item, b'.tag/obj')),
+ ((b'.tag/tinyfile', b'.tag/obj'), (item, b'.tag/obj')),
+ ((b'.tag/tree-1', b'.tag/obj'), (item, b'.tag/obj')),
+ ((b'.tag/commit-1', b'.tag/obj'), (item, b'.tag/obj')),
+ ((b'.tag/commit-1', b'obj'), (item, b'obj'))):
exr = run_get(get_disposition, flavor, get_item, given=given)
wvpassne(0, exr.rc)
- verify_rx(r'impossible; can only pick a commit or save', exr.err)
-
- wvstart(get_disposition + ' ' + flavor + ' of tree fails')
- for item in ('.tag/subtree', 'src/latest' + subtree_vfs_path):
- for given, get_item in ((None, (item, 'obj')),
- (None, (item, '.tag/obj')),
- (('.tag/tinyfile', '.tag/obj'), (item, '.tag/obj')),
- (('.tag/tree-1', '.tag/obj'), (item, '.tag/obj')),
- (('.tag/commit-1', '.tag/obj'), (item, '.tag/obj')),
- (('.tag/commit-1', 'obj'), (item, 'obj'))):
+ verify_rx(br'impossible; can only pick a commit or save', exr.err)
+
+ wvstart(get_disposition + ' ' + flavormsg + ' of tree fails')
+ for item in (b'.tag/subtree', b'src/latest' + subtree_vfs_path):
+ for given, get_item in ((None, (item, b'obj')),
+ (None, (item, b'.tag/obj')),
+ ((b'.tag/tinyfile', b'.tag/obj'), (item, b'.tag/obj')),
+ ((b'.tag/tree-1', b'.tag/obj'), (item, b'.tag/obj')),
+ ((b'.tag/commit-1', b'.tag/obj'), (item, b'.tag/obj')),
+ ((b'.tag/commit-1', b'obj'), (item, b'obj'))):
exr = run_get(get_disposition, flavor, get_item, given=given)
wvpassne(0, exr.rc)
- verify_rx(r'impossible; can only --append a tree', exr.err)
+ verify_rx(br'impossible; can only --append a tree', exr.err)

save_2 = src_info['save-2']
commit_2_id = src_info['commit-2-id']
tree_2_id = src_info['tree-2-id']
# FIXME: these two wvstart texts?
if force:
- wvstart(get_disposition + ' ' + flavor + ' commit/save to existing tag')
- for item in ('.tag/commit-2', 'src/' + save_2):
- for given in (('.tag/tinyfile', '.tag/obj'),
- ('.tag/tree-1', '.tag/obj'),
- ('.tag/commit-1', '.tag/obj')):
- exr = run_get(get_disposition, flavor, (item, '.tag/obj'),
+ wvstart(get_disposition + ' ' + flavormsg + ' commit/save to existing tag')
+ for item in (b'.tag/commit-2', b'src/' + save_2):
+ for given in ((b'.tag/tinyfile', b'.tag/obj'),
+ (b'.tag/tree-1', b'.tag/obj'),
+ (b'.tag/commit-1', b'.tag/obj')):
+ exr = run_get(get_disposition, flavor, (item, b'.tag/obj'),
given=given)
wvpasseq(0, exr.rc)
- validate_new_tagged_commit('obj', commit_2_id, tree_2_id,
+ validate_new_tagged_commit(b'obj', commit_2_id, tree_2_id,
exr.out)
- verify_only_refs(heads=[], tags=('obj',))
+ verify_only_refs(heads=[], tags=(b'obj',))
else: # --pick
- wvstart(get_disposition + ' ' + flavor
+ wvstart(get_disposition + ' ' + flavormsg
+ ' commit/save to existing tag fails')
- for item in ('.tag/commit-2', 'src/' + save_2):
- for given in (('.tag/tinyfile', '.tag/obj'),
- ('.tag/tree-1', '.tag/obj'),
- ('.tag/commit-1', '.tag/obj')):
- exr = run_get(get_disposition, flavor, (item, '.tag/obj'), given=given)
+ for item in (b'.tag/commit-2', b'src/' + save_2):
+ for given in ((b'.tag/tinyfile', b'.tag/obj'),
+ (b'.tag/tree-1', b'.tag/obj'),
+ (b'.tag/commit-1', b'.tag/obj')):
+ exr = run_get(get_disposition, flavor, (item, b'.tag/obj'), given=given)
wvpassne(0, exr.rc)
- verify_rx(r'cannot overwrite existing tag', exr.err)
+ verify_rx(br'cannot overwrite existing tag', exr.err)

- wvstart(get_disposition + ' ' + flavor + ' commit/save to tag')
- for item in ('.tag/commit-2', 'src/' + save_2):
- exr = run_get(get_disposition, flavor, (item, '.tag/obj'))
+ wvstart(get_disposition + ' ' + flavormsg + ' commit/save to tag')
+ for item in (b'.tag/commit-2', b'src/' + save_2):
+ exr = run_get(get_disposition, flavor, (item, b'.tag/obj'))
wvpasseq(0, exr.rc)
validate_clean_repo()
- validate_new_tagged_commit('obj', commit_2_id, tree_2_id, exr.out)
- verify_only_refs(heads=[], tags=('obj',))
+ validate_new_tagged_commit(b'obj', commit_2_id, tree_2_id, exr.out)
+ verify_only_refs(heads=[], tags=(b'obj',))

- wvstart(get_disposition + ' ' + flavor + ' commit/save to branch')
- for item in ('.tag/commit-2', 'src/' + save_2):
- for given in (None, ('.tag/commit-1', 'obj'), ('.tag/commit-2', 'obj')):
- exr = run_get(get_disposition, flavor, (item, 'obj'), given=given)
+ wvstart(get_disposition + ' ' + flavormsg + ' commit/save to branch')
+ for item in (b'.tag/commit-2', b'src/' + save_2):
+ for given in (None, (b'.tag/commit-1', b'obj'), (b'.tag/commit-2', b'obj')):
+ exr = run_get(get_disposition, flavor, (item, b'obj'), given=given)
wvpasseq(0, exr.rc)
validate_clean_repo()
- validate_new_save('obj/latest', getcwd() + '/src',
- commit_2_id, tree_2_id, 'src-2', exr.out)
- verify_only_refs(heads=('obj',), tags=[])
+ validate_new_save(b'obj/latest', getcwd() + b'/src',
+ commit_2_id, tree_2_id, b'src-2', exr.out)
+ verify_only_refs(heads=(b'obj',), tags=[])

- wvstart(get_disposition + ' ' + flavor
+ wvstart(get_disposition + ' ' + flavormsg
+ ' commit/save unrelated commit to branch')
- for item in('.tag/commit-2', 'src/' + save_2):
- exr = run_get(get_disposition, flavor, (item, 'obj'),
- given=('unrelated-branch', 'obj'))
+ for item in(b'.tag/commit-2', b'src/' + save_2):
+ exr = run_get(get_disposition, flavor, (item, b'obj'),
+ given=(b'unrelated-branch', b'obj'))
wvpasseq(0, exr.rc)
validate_clean_repo()
- validate_new_save('obj/latest', getcwd() + '/src',
- commit_2_id, tree_2_id, 'src-2', exr.out)
- verify_only_refs(heads=('obj',), tags=[])
+ validate_new_save(b'obj/latest', getcwd() + b'/src',
+ commit_2_id, tree_2_id, b'src-2', exr.out)
+ verify_only_refs(heads=(b'obj',), tags=[])

- wvstart(get_disposition + ' ' + flavor + ' commit/save ancestor to branch')
+ wvstart(get_disposition + ' ' + flavormsg + ' commit/save ancestor to branch')
save_1 = src_info['save-1']
commit_1_id = src_info['commit-1-id']
tree_1_id = src_info['tree-1-id']
- for item in ('.tag/commit-1', 'src/' + save_1):
- exr = run_get(get_disposition, flavor, (item, 'obj'),
- given=('.tag/commit-2', 'obj'))
+ for item in (b'.tag/commit-1', b'src/' + save_1):
+ exr = run_get(get_disposition, flavor, (item, b'obj'),
+ given=(b'.tag/commit-2', b'obj'))
wvpasseq(0, exr.rc)
validate_clean_repo()
- validate_new_save('obj/latest', getcwd() + '/src',
- commit_1_id, tree_1_id, 'src-1', exr.out)
- verify_only_refs(heads=('obj',), tags=[])
+ validate_new_save(b'obj/latest', getcwd() + b'/src',
+ commit_1_id, tree_1_id, b'src-1', exr.out)
+ verify_only_refs(heads=(b'obj',), tags=[])


- wvstart(get_disposition + ' ' + flavor + ', implicit destinations')
- exr = run_get(get_disposition, flavor, '.tag/commit-2')
+ wvstart(get_disposition + ' ' + flavormsg + ', implicit destinations')
+ exr = run_get(get_disposition, flavor, b'.tag/commit-2')
wvpasseq(0, exr.rc)
validate_clean_repo()
- validate_new_tagged_commit('commit-2', commit_2_id, tree_2_id, exr.out)
- verify_only_refs(heads=[], tags=('commit-2',))
+ validate_new_tagged_commit(b'commit-2', commit_2_id, tree_2_id, exr.out)
+ verify_only_refs(heads=[], tags=(b'commit-2',))

- exr = run_get(get_disposition, flavor, 'src/latest')
+ exr = run_get(get_disposition, flavor, b'src/latest')
wvpasseq(0, exr.rc)
validate_clean_repo()
- validate_new_save('src/latest', getcwd() + '/src',
- commit_2_id, tree_2_id, 'src-2', exr.out)
- verify_only_refs(heads=('src',), tags=[])
+ validate_new_save(b'src/latest', getcwd() + b'/src',
+ commit_2_id, tree_2_id, b'src-2', exr.out)
+ verify_only_refs(heads=(b'src',), tags=[])

def test_new_tag(get_disposition, src_info):
tinyfile_id = src_info['tinyfile-id']
@@ -703,229 +716,229 @@ def test_new_tag(get_disposition, src_info):
subtree_vfs_path = src_info['subtree-vfs-path']

wvstart(get_disposition + ' --new-tag to root fails')
- for item in ('.tag/tinyfile',
- 'src/latest' + tinyfile_path,
- '.tag/subtree',
- 'src/latest' + subtree_vfs_path,
- '.tag/commit-1',
- 'src/latest',
- 'src'):
- exr = run_get(get_disposition, '--new-tag', (item, '/'))
+ for item in (b'.tag/tinyfile',
+ b'src/latest' + tinyfile_path,
+ b'.tag/subtree',
+ b'src/latest' + subtree_vfs_path,
+ b'.tag/commit-1',
+ b'src/latest',
+ b'src'):
+ exr = run_get(get_disposition, b'--new-tag', (item, b'/'))
wvpassne(0, exr.rc)
- verify_rx(r'destination for .+ must be a VFS tag', exr.err)
+ verify_rx(br'destination for .+ must be a VFS tag', exr.err)

# Anything to new tag.
wvstart(get_disposition + ' --new-tag, blob tag')
- for item in ('.tag/tinyfile', 'src/latest' + tinyfile_path):
- exr = run_get(get_disposition, '--new-tag', (item, '.tag/obj'))
+ for item in (b'.tag/tinyfile', b'src/latest' + tinyfile_path):
+ exr = run_get(get_disposition, b'--new-tag', (item, b'.tag/obj'))
wvpasseq(0, exr.rc)
validate_blob(tinyfile_id, tinyfile_id)
- verify_only_refs(heads=[], tags=('obj',))
+ verify_only_refs(heads=[], tags=(b'obj',))

wvstart(get_disposition + ' --new-tag, tree tag')
- for item in ('.tag/subtree', 'src/latest' + subtree_vfs_path):
- exr = run_get(get_disposition, '--new-tag', (item, '.tag/obj'))
+ for item in (b'.tag/subtree', b'src/latest' + subtree_vfs_path):
+ exr = run_get(get_disposition, b'--new-tag', (item, b'.tag/obj'))
wvpasseq(0, exr.rc)
validate_tree(subtree_id, subtree_id)
- verify_only_refs(heads=[], tags=('obj',))
+ verify_only_refs(heads=[], tags=(b'obj',))

wvstart(get_disposition + ' --new-tag, committish tag')
- for item in ('.tag/commit-2', 'src/latest', 'src'):
- exr = run_get(get_disposition, '--new-tag', (item, '.tag/obj'))
+ for item in (b'.tag/commit-2', b'src/latest', b'src'):
+ exr = run_get(get_disposition, b'--new-tag', (item, b'.tag/obj'))
wvpasseq(0, exr.rc)
- validate_tagged_save('obj', getcwd() + '/src/', commit_2_id, tree_2_id,
- 'src-2', exr.out)
- verify_only_refs(heads=[], tags=('obj',))
-
+ validate_tagged_save(b'obj', getcwd() + b'/src/', commit_2_id, tree_2_id,
+ b'src-2', exr.out)
+ verify_only_refs(heads=[], tags=(b'obj',))
+
# Anything to existing tag (fails).
- for ex_type, ex_tag in (('blob', ('.tag/tinyfile', '.tag/obj')),
- ('tree', ('.tag/tree-1', '.tag/obj')),
- ('commit', ('.tag/commit-1', '.tag/obj'))):
- for item_type, item in (('blob tag', '.tag/tinyfile'),
- ('blob path', 'src/latest' + tinyfile_path),
- ('tree tag', '.tag/subtree'),
- ('tree path', 'src/latest' + subtree_vfs_path),
- ('commit tag', '.tag/commit-2'),
- ('save', 'src/latest'),
- ('branch', 'src')):
+ for ex_type, ex_tag in (('blob', (b'.tag/tinyfile', b'.tag/obj')),
+ ('tree', (b'.tag/tree-1', b'.tag/obj')),
+ ('commit', (b'.tag/commit-1', b'.tag/obj'))):
+ for item_type, item in (('blob tag', b'.tag/tinyfile'),
+ ('blob path', b'src/latest' + tinyfile_path),
+ ('tree tag', b'.tag/subtree'),
+ ('tree path', b'src/latest' + subtree_vfs_path),
+ ('commit tag', b'.tag/commit-2'),
+ ('save', b'src/latest'),
+ ('branch', b'src')):
wvstart(get_disposition + ' --new-tag of ' + item_type
+ ', given existing ' + ex_type + ' tag, fails')
- exr = run_get(get_disposition, '--new-tag', (item, '.tag/obj'),
+ exr = run_get(get_disposition, b'--new-tag', (item, b'.tag/obj'),
given=ex_tag)
wvpassne(0, exr.rc)
- verify_rx(r'cannot overwrite existing tag .* \(requires --replace\)',
+ verify_rx(br'cannot overwrite existing tag .* \(requires --replace\)',
exr.err)

# Anything to branch (fails).
for ex_type, ex_tag in (('nothing', None),
- ('blob', ('.tag/tinyfile', '.tag/obj')),
- ('tree', ('.tag/tree-1', '.tag/obj')),
- ('commit', ('.tag/commit-1', '.tag/obj'))):
- for item_type, item in (('blob tag', '.tag/tinyfile'),
- ('blob path', 'src/latest' + tinyfile_path),
- ('tree tag', '.tag/subtree'),
- ('tree path', 'src/latest' + subtree_vfs_path),
- ('commit tag', '.tag/commit-2'),
- ('save', 'src/latest'),
- ('branch', 'src')):
+ ('blob', (b'.tag/tinyfile', b'.tag/obj')),
+ ('tree', (b'.tag/tree-1', b'.tag/obj')),
+ ('commit', (b'.tag/commit-1', b'.tag/obj'))):
+ for item_type, item in (('blob tag', b'.tag/tinyfile'),
+ ('blob path', b'src/latest' + tinyfile_path),
+ ('tree tag', b'.tag/subtree'),
+ ('tree path', b'src/latest' + subtree_vfs_path),
+ ('commit tag', b'.tag/commit-2'),
+ ('save', b'src/latest'),
+ ('branch', b'src')):
wvstart(get_disposition + ' --new-tag to branch of ' + item_type
+ ', given existing ' + ex_type + ' tag, fails')
- exr = run_get(get_disposition, '--new-tag', (item, 'obj'),
+ exr = run_get(get_disposition, b'--new-tag', (item, b'obj'),
given=ex_tag)
wvpassne(0, exr.rc)
- verify_rx(r'destination for .+ must be a VFS tag', exr.err)
+ verify_rx(br'destination for .+ must be a VFS tag', exr.err)

wvstart(get_disposition + ' --new-tag, implicit destinations')
- exr = run_get(get_disposition, '--new-tag', '.tag/commit-2')
+ exr = run_get(get_disposition, b'--new-tag', b'.tag/commit-2')
wvpasseq(0, exr.rc)
- validate_tagged_save('commit-2', getcwd() + '/src/', commit_2_id, tree_2_id,
- 'src-2', exr.out)
- verify_only_refs(heads=[], tags=('commit-2',))
+ validate_tagged_save(b'commit-2', getcwd() + b'/src/', commit_2_id, tree_2_id,
+ b'src-2', exr.out)
+ verify_only_refs(heads=[], tags=(b'commit-2',))

def test_unnamed(get_disposition, src_info):
tinyfile_id = src_info['tinyfile-id']
tinyfile_path = src_info['tinyfile-path']
subtree_vfs_path = src_info['subtree-vfs-path']
wvstart(get_disposition + ' --unnamed to root fails')
- for item in ('.tag/tinyfile',
- 'src/latest' + tinyfile_path,
- '.tag/subtree',
- 'src/latest' + subtree_vfs_path,
- '.tag/commit-1',
- 'src/latest',
- 'src'):
- for ex_ref in (None, (item, '.tag/obj')):
- exr = run_get(get_disposition, '--unnamed', (item, '/'),
+ for item in (b'.tag/tinyfile',
+ b'src/latest' + tinyfile_path,
+ b'.tag/subtree',
+ b'src/latest' + subtree_vfs_path,
+ b'.tag/commit-1',
+ b'src/latest',
+ b'src'):
+ for ex_ref in (None, (item, b'.tag/obj')):
+ exr = run_get(get_disposition, b'--unnamed', (item, b'/'),
given=ex_ref)
wvpassne(0, exr.rc)
- verify_rx(r'usage: bup get ', exr.err)
+ verify_rx(br'usage: bup get ', exr.err)

wvstart(get_disposition + ' --unnamed file')
- for item in ('.tag/tinyfile', 'src/latest' + tinyfile_path):
- exr = run_get(get_disposition, '--unnamed', item)
+ for item in (b'.tag/tinyfile', b'src/latest' + tinyfile_path):
+ exr = run_get(get_disposition, b'--unnamed', item)
wvpasseq(0, exr.rc)
validate_blob(tinyfile_id, tinyfile_id)
verify_only_refs(heads=[], tags=[])

- exr = run_get(get_disposition, '--unnamed', item,
- given=(item, '.tag/obj'))
+ exr = run_get(get_disposition, b'--unnamed', item,
+ given=(item, b'.tag/obj'))
wvpasseq(0, exr.rc)
validate_blob(tinyfile_id, tinyfile_id)
- verify_only_refs(heads=[], tags=('obj',))
+ verify_only_refs(heads=[], tags=(b'obj',))

wvstart(get_disposition + ' --unnamed tree')
subtree_id = src_info['subtree-id']
- for item in ('.tag/subtree', 'src/latest' + subtree_vfs_path):
- exr = run_get(get_disposition, '--unnamed', item)
+ for item in (b'.tag/subtree', b'src/latest' + subtree_vfs_path):
+ exr = run_get(get_disposition, b'--unnamed', item)
wvpasseq(0, exr.rc)
validate_tree(subtree_id, subtree_id)
verify_only_refs(heads=[], tags=[])

- exr = run_get(get_disposition, '--unnamed', item,
- given=(item, '.tag/obj'))
+ exr = run_get(get_disposition, b'--unnamed', item,
+ given=(item, b'.tag/obj'))
wvpasseq(0, exr.rc)
validate_tree(subtree_id, subtree_id)
- verify_only_refs(heads=[], tags=('obj',))
+ verify_only_refs(heads=[], tags=(b'obj',))

wvstart(get_disposition + ' --unnamed committish')
save_2 = src_info['save-2']
commit_2_id = src_info['commit-2-id']
- for item in ('.tag/commit-2', 'src/' + save_2, 'src'):
- exr = run_get(get_disposition, '--unnamed', item)
+ for item in (b'.tag/commit-2', b'src/' + save_2, b'src'):
+ exr = run_get(get_disposition, b'--unnamed', item)
wvpasseq(0, exr.rc)
validate_commit(commit_2_id, commit_2_id)
verify_only_refs(heads=[], tags=[])

- exr = run_get(get_disposition, '--unnamed', item,
- given=(item, '.tag/obj'))
+ exr = run_get(get_disposition, b'--unnamed', item,
+ given=(item, b'.tag/obj'))
wvpasseq(0, exr.rc)
validate_commit(commit_2_id, commit_2_id)
- verify_only_refs(heads=[], tags=('obj',))
+ verify_only_refs(heads=[], tags=(b'obj',))

def create_get_src():
global bup_cmd, src_info
wvstart('preparing')
- ex((bup_cmd, '-d', 'get-src', 'init'))
-
- mkdir('src')
- open('src/unrelated', 'a').close()
- ex((bup_cmd, '-d', 'get-src', 'index', 'src'))
- ex((bup_cmd, '-d', 'get-src', 'save', '-tcn', 'unrelated-branch', 'src'))
-
- ex((bup_cmd, '-d', 'get-src', 'index', '--clear'))
- rmrf('src')
- mkdir('src')
- open('src/zero', 'a').close()
- ex((bup_cmd, '-d', 'get-src', 'index', 'src'))
- exr = exo((bup_cmd, '-d', 'get-src', 'save', '-tcn', 'src', 'src'))
+ ex((bup_cmd, b'-d', b'get-src', b'init'))
+
+ mkdir(b'src')
+ open(b'src/unrelated', 'a').close()
+ ex((bup_cmd, b'-d', b'get-src', b'index', b'src'))
+ ex((bup_cmd, b'-d', b'get-src', b'save', b'-tcn', b'unrelated-branch', b'src'))
+
+ ex((bup_cmd, b'-d', b'get-src', b'index', b'--clear'))
+ rmrf(b'src')
+ mkdir(b'src')
+ open(b'src/zero', 'a').close()
+ ex((bup_cmd, b'-d', b'get-src', b'index', b'src'))
+ exr = exo((bup_cmd, b'-d', b'get-src', b'save', b'-tcn', b'src', b'src'))
out = exr.out.splitlines()
tree_0_id = out[0]
commit_0_id = out[-1]
- exr = exo((bup_cmd, '-d', 'get-src', 'ls', 'src'))
+ exr = exo((bup_cmd, b'-d', b'get-src', b'ls', b'src'))
save_0 = exr.out.splitlines()[0]
- ex(('git', '--git-dir', 'get-src', 'branch', 'src-0', 'src'))
- ex(('cp', '-RPp', 'src', 'src-0'))
+ ex((b'git', b'--git-dir', b'get-src', b'branch', b'src-0', b'src'))
+ ex((b'cp', b'-RPp', b'src', b'src-0'))

- rmrf('src')
- mkdir('src')
- mkdir('src/x')
- mkdir('src/x/y')
- ex((bup_cmd + ' -d get-src random 1k > src/1'), shell=True)
- ex((bup_cmd + ' -d get-src random 1k > src/x/2'), shell=True)
- ex((bup_cmd, '-d', 'get-src', 'index', 'src'))
- exr = exo((bup_cmd, '-d', 'get-src', 'save', '-tcn', 'src', 'src'))
+ rmrf(b'src')
+ mkdir(b'src')
+ mkdir(b'src/x')
+ mkdir(b'src/x/y')
+ ex((bup_cmd + b' -d get-src random 1k > src/1'), shell=True)
+ ex((bup_cmd + b' -d get-src random 1k > src/x/2'), shell=True)
+ ex((bup_cmd, b'-d', b'get-src', b'index', b'src'))
+ exr = exo((bup_cmd, b'-d', b'get-src', b'save', b'-tcn', b'src', b'src'))
out = exr.out.splitlines()
tree_1_id = out[0]
commit_1_id = out[-1]
- exr = exo((bup_cmd, '-d', 'get-src', 'ls', 'src'))
+ exr = exo((bup_cmd, b'-d', b'get-src', b'ls', b'src'))
save_1 = exr.out.splitlines()[1]
- ex(('git', '--git-dir', 'get-src', 'branch', 'src-1', 'src'))
- ex(('cp', '-RPp', 'src', 'src-1'))
+ ex((b'git', b'--git-dir', b'get-src', b'branch', b'src-1', b'src'))
+ ex((b'cp', b'-RPp', b'src', b'src-1'))

# Make a copy the current state of src so we'll have an ancestor.
- ex(('cp', '-RPp',
- 'get-src/refs/heads/src', 'get-src/refs/heads/src-ancestor'))
+ ex((b'cp', b'-RPp',
+ b'get-src/refs/heads/src', b'get-src/refs/heads/src-ancestor'))

- with open('src/tiny-file', 'a') as f: f.write('xyzzy')
- ex((bup_cmd, '-d', 'get-src', 'index', 'src'))
- ex((bup_cmd, '-d', 'get-src', 'tick')) # Ensure the save names differ
- exr = exo((bup_cmd, '-d', 'get-src', 'save', '-tcn', 'src', 'src'))
+ with open(b'src/tiny-file', 'ab') as f: f.write(b'xyzzy')
+ ex((bup_cmd, b'-d', b'get-src', b'index', b'src'))
+ ex((bup_cmd, b'-d', b'get-src', b'tick')) # Ensure the save names differ
+ exr = exo((bup_cmd, b'-d', b'get-src', b'save', b'-tcn', b'src', b'src'))
out = exr.out.splitlines()
tree_2_id = out[0]
commit_2_id = out[-1]
- exr = exo((bup_cmd, '-d', 'get-src', 'ls', 'src'))
+ exr = exo((bup_cmd, b'-d', b'get-src', b'ls', b'src'))
save_2 = exr.out.splitlines()[2]
- rename('src', 'src-2')
+ rename(b'src', b'src-2')

- src_root = getcwd() + '/src'
+ src_root = getcwd() + b'/src'

- subtree_path = 'src-2/x'
- subtree_vfs_path = src_root + '/x'
+ subtree_path = b'src-2/x'
+ subtree_vfs_path = src_root + b'/x'

# No support for "ls -d", so grep...
- exr = exo((bup_cmd, '-d', 'get-src', 'ls', '-s', 'src/latest' + src_root))
+ exr = exo((bup_cmd, b'-d', b'get-src', b'ls', b'-s', b'src/latest' + src_root))
out = exr.out.splitlines()
subtree_id = None
for line in out:
- if 'x' in line:
+ if b'x' in line:
subtree_id = line.split()[0]
assert(subtree_id)

# With a tiny file, we'll get a single blob, not a chunked tree
- tinyfile_path = src_root + '/tiny-file'
- exr = exo((bup_cmd, '-d', 'get-src', 'ls', '-s', 'src/latest' + tinyfile_path))
+ tinyfile_path = src_root + b'/tiny-file'
+ exr = exo((bup_cmd, b'-d', b'get-src', b'ls', b'-s', b'src/latest' + tinyfile_path))
tinyfile_id = exr.out.splitlines()[0].split()[0]

- ex((bup_cmd, '-d', 'get-src', 'tag', 'tinyfile', tinyfile_id))
- ex((bup_cmd, '-d', 'get-src', 'tag', 'subtree', subtree_id))
- ex((bup_cmd, '-d', 'get-src', 'tag', 'tree-0', tree_0_id))
- ex((bup_cmd, '-d', 'get-src', 'tag', 'tree-1', tree_1_id))
- ex((bup_cmd, '-d', 'get-src', 'tag', 'tree-2', tree_2_id))
- ex((bup_cmd, '-d', 'get-src', 'tag', 'commit-0', commit_0_id))
- ex((bup_cmd, '-d', 'get-src', 'tag', 'commit-1', commit_1_id))
- ex((bup_cmd, '-d', 'get-src', 'tag', 'commit-2', commit_2_id))
- ex(('git', '--git-dir', 'get-src', 'branch', 'commit-1', commit_1_id))
- ex(('git', '--git-dir', 'get-src', 'branch', 'commit-2', commit_2_id))
+ ex((bup_cmd, b'-d', b'get-src', b'tag', b'tinyfile', tinyfile_id))
+ ex((bup_cmd, b'-d', b'get-src', b'tag', b'subtree', subtree_id))
+ ex((bup_cmd, b'-d', b'get-src', b'tag', b'tree-0', tree_0_id))
+ ex((bup_cmd, b'-d', b'get-src', b'tag', b'tree-1', tree_1_id))
+ ex((bup_cmd, b'-d', b'get-src', b'tag', b'tree-2', tree_2_id))
+ ex((bup_cmd, b'-d', b'get-src', b'tag', b'commit-0', commit_0_id))
+ ex((bup_cmd, b'-d', b'get-src', b'tag', b'commit-1', commit_1_id))
+ ex((bup_cmd, b'-d', b'get-src', b'tag', b'commit-2', commit_2_id))
+ ex((b'git', b'--git-dir', b'get-src', b'branch', b'commit-1', commit_1_id))
+ ex((b'git', b'--git-dir', b'get-src', b'branch', b'commit-2', commit_2_id))

return {'tinyfile-path' : tinyfile_path,
'tinyfile-id' : tinyfile_id,
@@ -946,7 +959,7 @@ def create_get_src():

dispositions_to_test = ('get',)

-if int(environ.get('BUP_TEST_LEVEL', '0')) >= 11:
+if int(environ.get(b'BUP_TEST_LEVEL', b'0')) >= 11:
dispositions_to_test += ('get-on', 'get-to')

if len(sys.argv) == 1:
@@ -955,7 +968,7 @@ if len(sys.argv) == 1:
else:
categories = sys.argv[1:]

-with test_tempdir('get-') as tmpdir:
+with test_tempdir(b'get-') as tmpdir:
chdir(tmpdir)
try:
src_info = create_get_src()
@@ -979,7 +992,7 @@ with test_tempdir('get-') as tmpdir:
test_unnamed(disposition, src_info)
else:
raise Exception('unrecognized get test category')
- except Exception, ex:
+ except Exception as ex:
chdir(top)
raise
chdir(top)
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:21 PM2/8/20
to bup-...@googlegroups.com
Since cmd-web hasn't been adjusted for python 3 yet.

Signed-off-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 2 ++
1 file changed, 2 insertions(+)

diff --git a/Makefile b/Makefile
index 3f709a54..8b062a44 100644
--- a/Makefile
+++ b/Makefile
@@ -207,6 +207,8 @@ cmdline_tests := \
t/test-xdev.sh

ifeq "2" "$(bup_python_majver)"
+ # unresolved
+ # web: needs more careful attention, path bytes round-trips, reprs, etc.
cmdline_tests += \
t/test-web.sh
endif
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:21 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
Makefile | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/Makefile b/Makefile
index 2158f49c..385fddde 100644
--- a/Makefile
+++ b/Makefile
@@ -169,6 +169,8 @@ runtests-python: all t/tmp

cmdline_tests := \
t/test-argv \
+ t/test-cat-file.sh \
+ t/test-command-without-init-fails.sh \
t/test-compression.sh \
t/test-drecurse.sh \
t/test-fsck.sh \
@@ -191,7 +193,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-index.sh \
t/test-split-join.sh \
t/test-fuse.sh \
- t/test-cat-file.sh \
t/test-index-check-device.sh \
t/test-meta.sh \
t/test-on.sh \
@@ -200,7 +201,6 @@ ifeq "2" "$(bup_python_majver)"
t/test-rm-between-index-and-save.sh \
t/test-save-with-valid-parent.sh \
t/test-sparse-files.sh \
- t/test-command-without-init-fails.sh \
t/test-redundant-saves.sh \
t/test-save-creates-no-unrefs.sh \
t/test-save-restore-excludes.sh \
--
2.24.1

Rob Browning

unread,
Feb 8, 2020, 2:26:22 PM2/8/20
to bup-...@googlegroups.com
Signed-off-by: Rob Browning <r...@defaultvalue.org>
Tested-by: Rob Browning <r...@defaultvalue.org>
---
t/id-other-than | 14 +++++++++-----
1 file changed, 9 insertions(+), 5 deletions(-)

diff --git a/t/id-other-than b/t/id-other-than
index eb6aead5..9c240fa9 100755
--- a/t/id-other-than
+++ b/t/id-other-than
@@ -5,14 +5,18 @@ exec "$bup_python" "$0" ${1+"$@"}
"""
# end of bup preamble

-from __future__ import absolute_import
+# Note: this currently relies on bup-python to handle arbitrary binary
+# user/group names.
+
+from __future__ import absolute_import, print_function

import grp
import pwd
import sys

def usage():
- print >> sys.stderr, "Usage: id-other-than <--user|--group> ID [ID ...]"
+ print('Usage: id-other-than <--user|--group> ID [ID ...]',
+ file=sys.stderr)

if len(sys.argv) < 2:
usage()
@@ -22,7 +26,7 @@ def is_integer(x):
try:
int(x)
return True
- except ValueError, e:
+ except ValueError as e:
return False

excluded_ids = set(int(x) for x in sys.argv[2:] if is_integer(x))
@@ -33,14 +37,14 @@ if sys.argv[1] == '--user':
excluded_ids.add(pwd.getpwnam(x).pw_uid)
for x in pwd.getpwall():
if x.pw_uid not in excluded_ids:
- print x.pw_name + ':' + str(x.pw_uid)
+ print(x.pw_name + ':' + str(x.pw_uid))
sys.exit(0)
elif sys.argv[1] == '--group':
for x in excluded_names:
excluded_ids.add(grp.getgrnam(x).gr_gid)
for x in grp.getgrall():
if x.gr_gid not in excluded_ids:
- print x.gr_name + ':' + str(x.gr_gid)
+ print(x.gr_name + ':' + str(x.gr_gid))
sys.exit(0)
else:
usage()
--
2.24.1

Reply all
Reply to author
Forward
0 new messages