summaryrefslogtreecommitdiff
path: root/src/s3ql/mkfs.py
diff options
context:
space:
mode:
authorNikolaus Rath <Nikolaus@rath.org>2016-03-09 10:09:11 -0800
committerNikolaus Rath <Nikolaus@rath.org>2016-03-09 10:09:11 -0800
commit347f05d4ab3ac1d7ce1db9599278f6533477acf5 (patch)
tree9f62d5769996d0cfa574fbcff6df20425cc2e531 /src/s3ql/mkfs.py
parent8f568f6678cf1520b608cd587513399e026b881f (diff)
Import s3ql_2.5.orig.tar.bz2
Diffstat (limited to 'src/s3ql/mkfs.py')
-rw-r--r--src/s3ql/mkfs.py63
1 files changed, 39 insertions, 24 deletions
diff --git a/src/s3ql/mkfs.py b/src/s3ql/mkfs.py
index c1385a4..1f29f75 100644
--- a/src/s3ql/mkfs.py
+++ b/src/s3ql/mkfs.py
@@ -6,25 +6,26 @@ Copyright (C) 2008-2009 Nikolaus Rath <Nikolaus@rath.org>
This program can be distributed under the terms of the GNU GPLv3.
'''
-from __future__ import division, print_function, absolute_import
+from .logging import logging, setup_logging, QuietError
from . import CURRENT_FS_REV
from .backends.common import get_backend, BetterBackend, DanglingStorageURLError
-from .common import get_backend_cachedir, setup_logging, QuietError, CTRL_INODE, stream_write_bz2
+from .backends import s3
+from .common import get_backend_cachedir, CTRL_INODE, stream_write_bz2, PICKLE_PROTOCOL
from .database import Connection
from .metadata import dump_metadata, create_tables
from .parse_args import ArgumentParser
from getpass import getpass
from llfuse import ROOT_INODE
-import cPickle as pickle
-import logging
import os
+import pickle
import shutil
import stat
import sys
import tempfile
import time
+import atexit
-log = logging.getLogger("mkfs")
+log = logging.getLogger(__name__)
def parse_args(args):
@@ -38,6 +39,7 @@ def parse_args(args):
parser.add_ssl()
parser.add_version()
parser.add_storage_url()
+ parser.add_fatal_warnings()
parser.add_argument("-L", default='', help="Filesystem label",
dest="label", metavar='<name>',)
@@ -88,17 +90,25 @@ def main(args=None):
setup_logging(options)
if options.max_obj_size < 1024:
- log.warn('Warning: maximum object sizes less than 1 MiB will seriously degrade '
- 'performance.')
+ # This warning should never be converrted to an exception
+ log.warning('Warning: maximum object sizes less than 1 MiB will seriously degrade '
+ 'performance.', extra={ 'force_log': True })
try:
plain_backend = get_backend(options, plain=True)
+ atexit.register(plain_backend.close)
except DanglingStorageURLError as exc:
- raise QuietError(str(exc))
+ raise QuietError(str(exc)) from None
log.info("Before using S3QL, make sure to read the user's guide, especially\n"
"the 'Important Rules to Avoid Loosing Data' section.")
-
+
+ if isinstance(plain_backend, s3.Backend) and '.' in plain_backend.bucket_name:
+ log.warning('***Warning*** S3 Buckets with names containing dots cannot be '
+ 'accessed using SSL!')
+ log.warning('(cf. https://forums.aws.amazon.com/thread.jspa?threadID=130560)')
+
+
if 's3ql_metadata' in plain_backend:
if not options.force:
raise QuietError("Found existing file system! Use --force to overwrite")
@@ -115,6 +125,7 @@ def main(args=None):
raise QuietError("Passwords don't match.")
else:
wrap_pw = sys.stdin.readline().rstrip()
+ wrap_pw = wrap_pw.encode('utf-8')
# Generate data encryption passphrase
log.info('Generating random encryption key...')
@@ -122,13 +133,15 @@ def main(args=None):
data_pw = fh.read(32)
fh.close()
- backend = BetterBackend(wrap_pw, 'bzip2', plain_backend)
+ backend = BetterBackend(wrap_pw, ('lzma', 2), plain_backend)
backend['s3ql_passphrase'] = data_pw
else:
data_pw = None
- backend = BetterBackend(data_pw, 'bzip2', plain_backend)
-
+ backend = BetterBackend(data_pw, ('lzma', 2), plain_backend)
+ atexit.unregister(plain_backend.close)
+ atexit.register(backend.close)
+
# Setup database
cachepath = get_backend_cachedir(options.storage_url, options.cachedir)
@@ -160,19 +173,21 @@ def main(args=None):
param['backend_revision'] = 1
log.info('Dumping metadata...')
- fh = tempfile.TemporaryFile()
- dump_metadata(db, fh)
- def do_write(obj_fh):
- fh.seek(0)
- stream_write_bz2(fh, obj_fh)
- return obj_fh
-
- log.info("Compressing and uploading metadata...")
- backend.store('s3ql_seq_no_%d' % param['seq_no'], 'Empty')
- obj_fh = backend.perform_write(do_write, "s3ql_metadata", metadata=param,
- is_compressed=True)
+ with tempfile.TemporaryFile() as fh:
+ dump_metadata(db, fh)
+ def do_write(obj_fh):
+ fh.seek(0)
+ stream_write_bz2(fh, obj_fh)
+ return obj_fh
+
+ log.info("Compressing and uploading metadata...")
+ backend.store('s3ql_seq_no_%d' % param['seq_no'], b'Empty')
+ obj_fh = backend.perform_write(do_write, "s3ql_metadata", metadata=param,
+ is_compressed=True)
+
log.info('Wrote %.2f MiB of compressed metadata.', obj_fh.get_obj_size() / 1024 ** 2)
- pickle.dump(param, open(cachepath + '.params', 'wb'), 2)
+ with open(cachepath + '.params', 'wb') as fh:
+ pickle.dump(param, fh, PICKLE_PROTOCOL)
if __name__ == '__main__':