summaryrefslogtreecommitdiff
path: root/src/s3ql/backends
diff options
context:
space:
mode:
authorNikolaus Rath <Nikolaus@rath.org>2016-03-09 10:08:31 -0800
committerNikolaus Rath <Nikolaus@rath.org>2016-03-09 10:08:31 -0800
commit3caad2930aec3a2af013eb98d9e231f3f6abb93f (patch)
treec4206545fef08c62473a7f7cdf122e6fc2b73c87 /src/s3ql/backends
parentdcdc33cc51b7e4fd4d370df89d5e42377cb04e51 (diff)
Import s3ql_1.7.orig.tar.bz2
Diffstat (limited to 'src/s3ql/backends')
-rw-r--r--src/s3ql/backends/common.py14
-rw-r--r--src/s3ql/backends/local.py5
-rw-r--r--src/s3ql/backends/s3.py25
3 files changed, 43 insertions, 1 deletions
diff --git a/src/s3ql/backends/common.py b/src/s3ql/backends/common.py
index e7ed5fc..72a58a9 100644
--- a/src/s3ql/backends/common.py
+++ b/src/s3ql/backends/common.py
@@ -246,6 +246,11 @@ class AbstractBucket(object):
pass
@abstractmethod
+ def get_size(self, key):
+ '''Return size of object stored under *key*'''
+ pass
+
+ @abstractmethod
def open_read(self, key):
"""Open object for reading
@@ -371,6 +376,15 @@ class BetterBucket(AbstractBucket):
convert_legacy_metadata(metadata)
return self._unwrap_meta(metadata)
+ def get_size(self, key):
+ '''Return size of object stored under *key*
+
+ This method returns the compressed size, i.e. the storage space
+ that's actually occupied by the object.
+ '''
+
+ return self.bucket.get_size(key)
+
def is_temp_failure(self, exc):
'''Return true if exc indicates a temporary error
diff --git a/src/s3ql/backends/local.py b/src/s3ql/backends/local.py
index 46aabad..443af9b 100644
--- a/src/s3ql/backends/local.py
+++ b/src/s3ql/backends/local.py
@@ -66,6 +66,11 @@ class Bucket(AbstractBucket):
raise ChecksumError('Invalid metadata')
raise
+ def get_size(self, key):
+ '''Return size of object stored under *key*'''
+
+ return os.path.getsize(self._key_to_path(key))
+
def open_read(self, key):
"""Open object for reading
diff --git a/src/s3ql/backends/s3.py b/src/s3ql/backends/s3.py
index ae1c306..70d417d 100644
--- a/src/s3ql/backends/s3.py
+++ b/src/s3ql/backends/s3.py
@@ -242,6 +242,27 @@ class Bucket(AbstractBucket):
return extractmeta(resp)
@retry
+ def get_size(self, key):
+ '''Return size of object stored under *key*'''
+
+ log.debug('get_size(%s)', key)
+
+ try:
+ resp = self._do_request('HEAD', '/%s%s' % (self.prefix, key))
+ assert resp.length == 0
+ except HTTPError as exc:
+ if exc.status == 404:
+ raise NoSuchObject(key)
+ else:
+ raise
+
+ for (name, val) in resp.getheaders():
+ if name.lower() == 'content-length':
+ return int(val)
+ raise RuntimeError('HEAD request did not return Content-Length')
+
+
+ @retry
def open_read(self, key):
''''Open object for reading
@@ -403,7 +424,9 @@ class Bucket(AbstractBucket):
False.
"""
- for (no, s3key) in enumerate(self):
+ # We have to cache keys, because otherwise we can't use the
+ # http connection to delete keys.
+ for (no, s3key) in enumerate(list(self)):
if no != 0 and no % 1000 == 0:
log.info('clear(): deleted %d objects so far..', no)