summaryrefslogtreecommitdiff
path: root/src/s3ql/mkfs.py
diff options
context:
space:
mode:
authorNikolaus Rath <Nikolaus@rath.org>2016-03-09 10:10:20 -0800
committerNikolaus Rath <Nikolaus@rath.org>2016-03-09 10:10:20 -0800
commit57ba7d4c658aa7c5d2e0ca2ae71e3915e6052b17 (patch)
tree192904d2eaa4f63ec239b644c75797c6024b2e2a /src/s3ql/mkfs.py
parent061b768a9d87d125df6edb494df519447fab21c6 (diff)
Import s3ql_2.14+dfsg.orig.tar.gz
Diffstat (limited to 'src/s3ql/mkfs.py')
-rw-r--r--src/s3ql/mkfs.py31
1 files changed, 8 insertions, 23 deletions
diff --git a/src/s3ql/mkfs.py b/src/s3ql/mkfs.py
index 27f48fe..ba46e35 100644
--- a/src/s3ql/mkfs.py
+++ b/src/s3ql/mkfs.py
@@ -7,22 +7,20 @@ This program can be distributed under the terms of the GNU GPLv3.
'''
from .logging import logging, setup_logging, QuietError
-from . import CURRENT_FS_REV, CTRL_INODE, PICKLE_PROTOCOL, ROOT_INODE
+from . import CURRENT_FS_REV, CTRL_INODE, ROOT_INODE
from .backends.comprenc import ComprencBackend
from .backends import s3
-from .common import (get_backend_cachedir, stream_write_bz2, get_backend,
- pretty_print_size, split_by_n)
+from .common import (get_backend_cachedir, get_backend, split_by_n,
+ freeze_basic_mapping)
from .database import Connection
-from .metadata import dump_metadata, create_tables
+from .metadata import dump_and_upload_metadata, create_tables
from .parse_args import ArgumentParser
from getpass import getpass
from base64 import b64encode
import os
-import pickle
import shutil
import stat
import sys
-import tempfile
import time
import atexit
@@ -95,7 +93,7 @@ def main(args=None):
log.warning('Warning: maximum object sizes less than 1 MiB will seriously degrade '
'performance.', extra={ 'force_log': True })
- plain_backend = get_backend(options, plain=True)
+ plain_backend = get_backend(options, raw=True)
atexit.register(plain_backend.close)
log.info("Before using S3QL, make sure to read the user's guide, especially\n"
@@ -170,23 +168,10 @@ def main(args=None):
param['last-modified'] = time.time()
log.info('Dumping metadata...')
- with tempfile.TemporaryFile() as fh:
- dump_metadata(db, fh)
- def do_write(obj_fh):
- fh.seek(0)
- stream_write_bz2(fh, obj_fh)
- return obj_fh
-
- # Store metadata first, and seq_no second so that if mkfs
- # is interrupted, fsck won't see a file system at all.
- log.info("Compressing and uploading metadata...")
- obj_fh = backend.perform_write(do_write, "s3ql_metadata", metadata=param,
- is_compressed=True)
- backend.store('s3ql_seq_no_%d' % param['seq_no'], b'Empty')
-
- log.info('Wrote %s of compressed metadata.', pretty_print_size(obj_fh.get_obj_size()))
+ dump_and_upload_metadata(backend, db, param)
+ backend.store('s3ql_seq_no_%d' % param['seq_no'], b'Empty')
with open(cachepath + '.params', 'wb') as fh:
- pickle.dump(param, fh, PICKLE_PROTOCOL)
+ fh.write(freeze_basic_mapping(param))
if data_pw is not None:
print('Please store the following master key in a safe location. It allows ',