[PATCH s3storage V2.] [storages] workaround to boto bug to keep file open

Arthur Lutz arthur.lutz at logilab.fr
Wed Jan 29 17:06:07 CET 2020


# HG changeset patch
# User Arthur Lutz <arthur.lutz at logilab.fr>
# Date 1580311314 -3600
#      Wed Jan 29 16:21:54 2020 +0100
# Node ID 82577b1215044411d44578626892556ff80584fe
# Parent  adb7dd73f6c76f0cf6d900f821e88cf4a6aa8d07
# EXP-Topic boto-file-closed-hack
[storages] workaround to boto bug to keep file open

Hooks require the file to accessible and open

diff --git a/cubicweb_s3storage/storages.py b/cubicweb_s3storage/storages.py
--- a/cubicweb_s3storage/storages.py
+++ b/cubicweb_s3storage/storages.py
@@ -29,6 +29,17 @@
 from cubicweb.server.sources.storages import Storage
 from cubicweb.server.edition import EditedEntity
 
+if PY3:
+    from io import BytesIO
+else:
+    # StringIO from cubicweb < 3.27 - remove when upgraded cubicweb / py3
+    from StringIO import StringIO as BytesIO
+
+
+class NonCloseableBufferedReader(BytesIO):
+    def close(self):
+        self.flush()
+
 
 class S3Storage(Storage):
     is_source_callback = True
@@ -66,10 +77,14 @@
             # bytes storage used to store S3's object key
             binary_obj = Binary(key.encode())
             entity.cw_edited.edited_attribute(attr, binary_obj)
+            # required workaround for boto bug
+            # https://github.com/boto/s3transfer/issues/80
+            buffer = NonCloseableBufferedReader(binary.read())
             self.debug('Upload object to S3')
             # upload_fileobj should make automagically a multipart upload if
             # needed
-            self.s3cnx.upload_fileobj(binary, self.bucket, key)
+            self.s3cnx.upload_fileobj(buffer, self.bucket, key)
+            buffer.close()
             self.info('Uploaded object %s.%s to S3', entity.eid, attr)
         return binary
 



More information about the cubicweb-devel mailing list