From 525731c4b595876b24ed7c578f7daf53d2b7f2ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20BEAU?= Date: Wed, 12 Apr 2017 12:32:25 +0200 Subject: [PATCH 001/375] [IMP] add meta code --- storage_backend/__init__.py | 4 ++ storage_backend/__openerp__.py | 31 +++++++++ storage_backend/models/__init__.py | 3 + storage_backend/models/storage_backend.py | 19 +++++ .../views/backend_storage_view.xml | 69 +++++++++++++++++++ 5 files changed, 126 insertions(+) create mode 100644 storage_backend/__init__.py create mode 100644 storage_backend/__openerp__.py create mode 100644 storage_backend/models/__init__.py create mode 100644 storage_backend/models/storage_backend.py create mode 100644 storage_backend/views/backend_storage_view.xml diff --git a/storage_backend/__init__.py b/storage_backend/__init__.py new file mode 100644 index 0000000000..77bbdbd391 --- /dev/null +++ b/storage_backend/__init__.py @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- + +from . import models + diff --git a/storage_backend/__openerp__.py b/storage_backend/__openerp__.py new file mode 100644 index 0000000000..08b3258c34 --- /dev/null +++ b/storage_backend/__openerp__.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Akretion (http://www.akretion.com). +# @author Sébastien BEAU +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +{ + "name": "Storage Bakend", + "summary": "Implement the concept of Storage with amazon S3, sftp...", + "version": "8.0.1.0.0", + "category": "Uncategorized", + "website": "www.akretion.com", + "author": " Akretion", + "license": "AGPL-3", + "application": False, + "installable": True, + "external_dependencies": { + "python": ["fs"], + "bin": [], + }, + "depends": [ + "base", + "keychain", + ], + "data": [ + "views/backend_storage_view.xml", + ], + "demo": [ + ], + "qweb": [ + ] +} diff --git a/storage_backend/models/__init__.py b/storage_backend/models/__init__.py new file mode 100644 index 0000000000..4731534aa1 --- /dev/null +++ b/storage_backend/models/__init__.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- + +from . import storage_backend diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py new file mode 100644 index 0000000000..7fed946f73 --- /dev/null +++ b/storage_backend/models/storage_backend.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Akretion (http://www.akretion.com). +# @author Sébastien BEAU +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +from openerp import api, fields, models + + +class StorageBackend(models.Model): + _name = 'storage.backend' + _inherit = 'keychain.backend' + + name = fields.Char(required=True) + backend_type = fields.Selection([ + ('amazon-s3', 'Amazon-S3'), + ('filestore', 'Filestore'), + ('sftp', 'Sftp'), + ], required=True) + public_base_url = fields.Char() diff --git a/storage_backend/views/backend_storage_view.xml b/storage_backend/views/backend_storage_view.xml new file mode 100644 index 0000000000..c604afa3da --- /dev/null +++ b/storage_backend/views/backend_storage_view.xml @@ -0,0 +1,69 @@ + + + + + + storage.backend + + + + + + + + + storage.backend + +
+ + +
+
+ + + storage.backend + + + + + + + + + Storage Backend + ir.actions.act_window + storage.backend + form + tree,form + + [] + {} + + + + + + form + + + + + + + tree + + + + + + + + +
+
From 132e86e6b1a3199df0229ea2b546762b7af2287e Mon Sep 17 00:00:00 2001 From: Raph Date: Fri, 14 Apr 2017 18:35:01 +0200 Subject: [PATCH 002/375] First work --- storage_backend/__openerp__.py | 5 ---- storage_backend/models/__init__.py | 1 + storage_backend/models/local_backend.py | 39 +++++++++++++++++++++++++ 3 files changed, 40 insertions(+), 5 deletions(-) create mode 100644 storage_backend/models/local_backend.py diff --git a/storage_backend/__openerp__.py b/storage_backend/__openerp__.py index 08b3258c34..3a5502db3b 100644 --- a/storage_backend/__openerp__.py +++ b/storage_backend/__openerp__.py @@ -15,7 +15,6 @@ "installable": True, "external_dependencies": { "python": ["fs"], - "bin": [], }, "depends": [ "base", @@ -24,8 +23,4 @@ "data": [ "views/backend_storage_view.xml", ], - "demo": [ - ], - "qweb": [ - ] } diff --git a/storage_backend/models/__init__.py b/storage_backend/models/__init__.py index 4731534aa1..2c85594c6f 100644 --- a/storage_backend/models/__init__.py +++ b/storage_backend/models/__init__.py @@ -1,3 +1,4 @@ # -*- coding: utf-8 -*- from . import storage_backend +from . import local_backend \ No newline at end of file diff --git a/storage_backend/models/local_backend.py b/storage_backend/models/local_backend.py new file mode 100644 index 0000000000..cf44aea8a4 --- /dev/null +++ b/storage_backend/models/local_backend.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Akretion (http://www.akretion.com). +# @author Sébastien BEAU +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +from openerp import api, fields, models +import hashlib +from fs.osfs import OSFS +import logging +logger = logging.getLogger(__name__) + +class LocalStorageBackend(models.Model): + _inherit = 'storage.backend' + + public_base_url = fields.Char() + base_path = u'~/images' + + def store(self, binary, vals, object_type): + # enregistre le binary la où on lui dit + # renvois l'objet en question + file_hash = u'' + hashlib.sha1(binary).hexdigest() + path = file_hash + + with OSFS(self.base_path) as the_file: + the_file.settext(path, binary) + size = the_file.getsize(path) + + basic_vals = { + 'name': '', + 'path': path, + 'size': size, + 'sha1': file_hash, + 'backend_id': self.id, + } + obj = object_type.create(basic_vals) + return obj + + def get_public_url(self, obj): + return self.public_base_url + '/' + obj.path From 96faa9bb1e00c09ce174a0a37ca8d34abc2ddab4 Mon Sep 17 00:00:00 2001 From: Raph Date: Tue, 18 Apr 2017 16:32:39 +0200 Subject: [PATCH 003/375] Comment code --- storage_backend/models/local_backend.py | 1 + 1 file changed, 1 insertion(+) diff --git a/storage_backend/models/local_backend.py b/storage_backend/models/local_backend.py index cf44aea8a4..90bec32e9d 100644 --- a/storage_backend/models/local_backend.py +++ b/storage_backend/models/local_backend.py @@ -36,4 +36,5 @@ def store(self, binary, vals, object_type): return obj def get_public_url(self, obj): + logger.info('get_public_url') return self.public_base_url + '/' + obj.path From 6cfeb93d548f2d0fb93dd0757e811299a014c720 Mon Sep 17 00:00:00 2001 From: Raph Date: Wed, 19 Apr 2017 18:33:03 +0200 Subject: [PATCH 004/375] Work --- storage_backend/models/local_backend.py | 27 ++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/storage_backend/models/local_backend.py b/storage_backend/models/local_backend.py index 90bec32e9d..28f2f4e3e6 100644 --- a/storage_backend/models/local_backend.py +++ b/storage_backend/models/local_backend.py @@ -15,26 +15,35 @@ class LocalStorageBackend(models.Model): public_base_url = fields.Char() base_path = u'~/images' + def store(self, binary, vals, object_type): + # TODO: refactorer, ça marche plus vraiment # enregistre le binary la où on lui dit # renvois l'objet en question - file_hash = u'' + hashlib.sha1(binary).hexdigest() - path = file_hash + checksum = u'' + hashlib.sha1(binary).hexdigest() + path = checksum - with OSFS(self.base_path) as the_file: - the_file.settext(path, binary) - size = the_file.getsize(path) + with OSFS(self.base_path) as the_dir: + the_dir.settext(path, binary) + size = the_dir.getsize(path) basic_vals = { 'name': '', - 'path': path, - 'size': size, - 'sha1': file_hash, + 'url': path, + 'file_size': size, + 'checksum': checksum, 'backend_id': self.id, } - obj = object_type.create(basic_vals) + basic_vals + vals.update(basic_vals) + obj = object_type.create(vals) return obj def get_public_url(self, obj): logger.info('get_public_url') return self.public_base_url + '/' + obj.path + + def get_data(self, path): + with OSFS(self.base_path) as the_dir: + return the_dir.getbytes(path) + From e3dd4e355887c2f957f56f36f073744f47e9d5f7 Mon Sep 17 00:00:00 2001 From: Raph Date: Thu, 20 Apr 2017 17:44:59 +0200 Subject: [PATCH 005/375] ajout de la factory --- storage_backend/models/local_backend.py | 27 ++++++++++---------- storage_backend/models/storage_backend.py | 31 ++++++++++++++++++++++- 2 files changed, 44 insertions(+), 14 deletions(-) diff --git a/storage_backend/models/local_backend.py b/storage_backend/models/local_backend.py index 28f2f4e3e6..1943d44d47 100644 --- a/storage_backend/models/local_backend.py +++ b/storage_backend/models/local_backend.py @@ -9,41 +9,42 @@ import logging logger = logging.getLogger(__name__) + class LocalStorageBackend(models.Model): _inherit = 'storage.backend' public_base_url = fields.Char() base_path = u'~/images' - - def store(self, binary, vals, object_type): + def store(self, blob, vals={}, object_type=None): # TODO: refactorer, ça marche plus vraiment # enregistre le binary la où on lui dit # renvois l'objet en question - checksum = u'' + hashlib.sha1(binary).hexdigest() + checksum = u'' + hashlib.sha1(blob).hexdigest() path = checksum with OSFS(self.base_path) as the_dir: - the_dir.settext(path, binary) + the_dir.settext(path, blob) size = the_dir.getsize(path) basic_vals = { - 'name': '', + # 'name': '', 'url': path, 'file_size': size, 'checksum': checksum, 'backend_id': self.id, } - basic_vals - vals.update(basic_vals) - obj = object_type.create(vals) - return obj + return basic_vals + # vals.update(basic_vals) + # obj = object_type.create(vals) # ou déléguer? + # return obj def get_public_url(self, obj): + # TODO faire mieux logger.info('get_public_url') - return self.public_base_url + '/' + obj.path + return self.public_base_url + '/' + obj.name - def get_data(self, path): + def get_base64(self, file_id): + logger.info('return base64 of a file') with OSFS(self.base_path) as the_dir: - return the_dir.getbytes(path) - + return the_dir.getbytes(file_id.url) diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index 7fed946f73..1927ecbbb1 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -2,9 +2,20 @@ # Copyright 2017 Akretion (http://www.akretion.com). # @author Sébastien BEAU # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). - +import logging from openerp import api, fields, models +_logger = logging.getLogger(__name__) +def implemented_by_factory(func): + """Call a prefixed function based on 'namespace'.""" + @wraps(func) + def wrapper(cls, *args, **kwargs): + fun_name = func.__name__ + fun = '_%s%s' % (cls.type, fun_name) + if not hasattr(cls, fun): + fun = '_default%s' % (fun_name) + return getattr(cls, fun)(*args, **kwargs) + return wrapper class StorageBackend(models.Model): _name = 'storage.backend' @@ -17,3 +28,21 @@ class StorageBackend(models.Model): ('sftp', 'Sftp'), ], required=True) public_base_url = fields.Char() + + def _get_account(self): + """Appelé par celui qui dépose le fichiers.""" + keychain = self.env['keychain.account'] + if self.env.user.has_group('storage.backend_access'): + retrieve = keychain.suspend_security().retrieve + else: + retrieve = keychain.retrieve + + accounts = retrieve( + [ + ['namespace', '=', 'storage_%s' % self.backend_type], + ['technical_name', '=', self.name] + ]) + if len(accounts) == 0: + _logger.debug('No account found for %s' % self.backend_type) + raise Warning("No account found based on the ") + return accounts From ace039a085fa382b63481ed4e86bf67c517f32fa Mon Sep 17 00:00:00 2001 From: Raph Date: Tue, 25 Apr 2017 17:42:04 +0200 Subject: [PATCH 006/375] Add implented_by wrapper for backend --- storage_backend/models/__init__.py | 4 +++- storage_backend/models/local_backend.py | 13 +++++++------ storage_backend/models/storage_backend.py | 19 +++++++++++++++++-- 3 files changed, 27 insertions(+), 9 deletions(-) diff --git a/storage_backend/models/__init__.py b/storage_backend/models/__init__.py index 2c85594c6f..a8d7af1e9c 100644 --- a/storage_backend/models/__init__.py +++ b/storage_backend/models/__init__.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- from . import storage_backend -from . import local_backend \ No newline at end of file +from . import local_backend +from . import sftp_backend +from . import odoo_backend \ No newline at end of file diff --git a/storage_backend/models/local_backend.py b/storage_backend/models/local_backend.py index 1943d44d47..305bb128a0 100644 --- a/storage_backend/models/local_backend.py +++ b/storage_backend/models/local_backend.py @@ -10,13 +10,13 @@ logger = logging.getLogger(__name__) -class LocalStorageBackend(models.Model): +class FileStoreStorageBackend(models.Model): _inherit = 'storage.backend' public_base_url = fields.Char() base_path = u'~/images' - def store(self, blob, vals={}, object_type=None): + def _filestorestore(self, blob, vals={}, object_type=None): # TODO: refactorer, ça marche plus vraiment # enregistre le binary la où on lui dit # renvois l'objet en question @@ -24,7 +24,7 @@ def store(self, blob, vals={}, object_type=None): path = checksum with OSFS(self.base_path) as the_dir: - the_dir.settext(path, blob) + the_dir.setcontents(path, blob) size = the_dir.getsize(path) basic_vals = { @@ -33,18 +33,19 @@ def store(self, blob, vals={}, object_type=None): 'file_size': size, 'checksum': checksum, 'backend_id': self.id, + 'private_path': path, } return basic_vals # vals.update(basic_vals) # obj = object_type.create(vals) # ou déléguer? # return obj - def get_public_url(self, obj): + def _filestoreget_public_url(self, obj): # TODO faire mieux logger.info('get_public_url') return self.public_base_url + '/' + obj.name - def get_base64(self, file_id): + def _filestoreget_base64(self, file_id): logger.info('return base64 of a file') with OSFS(self.base_path) as the_dir: - return the_dir.getbytes(file_id.url) + return the_dir.open(file_id.url).read() diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index 1927ecbbb1..0cc3f713d6 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -3,6 +3,7 @@ # @author Sébastien BEAU # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). import logging +from functools import wraps from openerp import api, fields, models _logger = logging.getLogger(__name__) @@ -11,7 +12,8 @@ def implemented_by_factory(func): @wraps(func) def wrapper(cls, *args, **kwargs): fun_name = func.__name__ - fun = '_%s%s' % (cls.type, fun_name) + fun = '_%s%s' % (cls.backend_type, fun_name) + _logger.info('try %s' % fun) if not hasattr(cls, fun): fun = '_default%s' % (fun_name) return getattr(cls, fun)(*args, **kwargs) @@ -26,9 +28,22 @@ class StorageBackend(models.Model): ('amazon-s3', 'Amazon-S3'), ('filestore', 'Filestore'), ('sftp', 'Sftp'), - ], required=True) + ], required=True) public_base_url = fields.Char() + @implemented_by_factory + def store(self, blob, vals={}, object_type=None): + pass + + @implemented_by_factory + def get_public_url(self, obj): + pass + + @implemented_by_factory + def get_base64(self, file_id): + pass + + @implemented_by_factory def _get_account(self): """Appelé par celui qui dépose le fichiers.""" keychain = self.env['keychain.account'] From acdb56defd4ce6554688e343bb60b345a3e3e555 Mon Sep 17 00:00:00 2001 From: Raph Date: Wed, 3 May 2017 12:02:47 +0200 Subject: [PATCH 007/375] Add odoo backend --- storage_backend/models/odoo_backend.py | 67 ++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 storage_backend/models/odoo_backend.py diff --git a/storage_backend/models/odoo_backend.py b/storage_backend/models/odoo_backend.py new file mode 100644 index 0000000000..8a9b253b8e --- /dev/null +++ b/storage_backend/models/odoo_backend.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Akretion (http://www.akretion.com). +# @author Sébastien BEAU +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +from openerp import api, fields, models +import hashlib +from fs.osfs import OSFS +import logging +logger = logging.getLogger(__name__) + + +class OdooStorageBackend(models.Model): + _inherit = 'storage.backend' + + backend_type = fields.Selection( + selection_add=[('odoo', 'Odoo')]) + + def _odoostore(self, blob, vals={}, object_type=None): + checksum = u'' + hashlib.sha1(blob).hexdigest() + path = checksum + + ir_attach = { + 'name': checksum, # utiliser name a la place + 'type': 'binary', + 'datas': blob, + 'res_model': self._name, + 'res_id': self.id, + } + logger.info('on va crée le ir suivant:') + logger.info(ir_attach) + + pj = self.env['ir.attachment'].create(ir_attach) + size = pj.file_size + url = ( + '/web/binary/image?model=%(res_model)s' + '&id=%(res_id)s&field=image_medium' + ) % { + 'res_model': ir_attach['res_model'], + 'res_id': ir_attach['res_id'] + } + + basic_vals = { + # 'name': '', + 'url': url, + 'file_size': size, + 'checksum': checksum, + 'backend_id': self.id, + 'private_path': pj.id + } + return basic_vals + + def _odooget_public_url(self, obj): + # TODO faire mieux + logger.info('get_public_url') + return self._odoo_lookup(obj).url + + def _odooget_base64(self, file_id): + logger.info('return base64 of a file') + return self._odoo_lookup(file_id).datas + + def _odoo_lookup(self, obj): + return self.env['ir.attachment'].search([ + ('res_model', '=', self._name), + ('res_id', '=', self.id), + ('id', '=', obj.private_path) + ]) From 009468a9c141b5cc3be3e2ff3255fa13d5a20e66 Mon Sep 17 00:00:00 2001 From: Raph Date: Wed, 3 May 2017 12:10:49 +0200 Subject: [PATCH 008/375] Add some comments --- storage_backend/models/odoo_backend.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/storage_backend/models/odoo_backend.py b/storage_backend/models/odoo_backend.py index 8a9b253b8e..a346ede779 100644 --- a/storage_backend/models/odoo_backend.py +++ b/storage_backend/models/odoo_backend.py @@ -20,6 +20,10 @@ def _odoostore(self, blob, vals={}, object_type=None): checksum = u'' + hashlib.sha1(blob).hexdigest() path = checksum + # res_model = OdooStrogageBackend + # car il faut faire savoir sur quel + # backend on est lié + ir_attach = { 'name': checksum, # utiliser name a la place 'type': 'binary', @@ -33,10 +37,10 @@ def _odoostore(self, blob, vals={}, object_type=None): pj = self.env['ir.attachment'].create(ir_attach) size = pj.file_size url = ( - '/web/binary/image?model=%(res_model)s' - '&id=%(res_id)s&field=image_medium' + '/web/binary/image?model=%(res_model)s' #res_model doit être storage.image + '&id=%(res_id)s&field=image_medium' # comment on sait que c'est une image? a mettre ailleurs ) % { - 'res_model': ir_attach['res_model'], + 'res_model': ir_attach['res_model'], # devrait être storage.image ou storage.thumbnail 'res_id': ir_attach['res_id'] } From d8c39c7287cead2535aa0d1079962c0352d73568 Mon Sep 17 00:00:00 2001 From: Raph Date: Wed, 3 May 2017 17:11:52 +0200 Subject: [PATCH 009/375] Fix thumbnail url for odoo backend --- storage_backend/models/odoo_backend.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/storage_backend/models/odoo_backend.py b/storage_backend/models/odoo_backend.py index a346ede779..66d7029268 100644 --- a/storage_backend/models/odoo_backend.py +++ b/storage_backend/models/odoo_backend.py @@ -37,11 +37,12 @@ def _odoostore(self, blob, vals={}, object_type=None): pj = self.env['ir.attachment'].create(ir_attach) size = pj.file_size url = ( - '/web/binary/image?model=%(res_model)s' #res_model doit être storage.image - '&id=%(res_id)s&field=image_medium' # comment on sait que c'est une image? a mettre ailleurs + '/web/binary/image?model=%(res_model)s' + '&id=%(res_id)s&field=datas' + # comment on sait que c'est une image? a mettre ailleurs ) % { - 'res_model': ir_attach['res_model'], # devrait être storage.image ou storage.thumbnail - 'res_id': ir_attach['res_id'] + 'res_model': pj._name, + 'res_id': pj.id } basic_vals = { From 3fdeaa2d73f6543e9785af234b6e962f71e619fa Mon Sep 17 00:00:00 2001 From: Raph Date: Thu, 4 May 2017 15:12:28 +0200 Subject: [PATCH 010/375] Gestion du nom --- storage_backend/models/odoo_backend.py | 7 ++++--- storage_backend/models/storage_backend.py | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/storage_backend/models/odoo_backend.py b/storage_backend/models/odoo_backend.py index 66d7029268..8ced5eb412 100644 --- a/storage_backend/models/odoo_backend.py +++ b/storage_backend/models/odoo_backend.py @@ -16,16 +16,16 @@ class OdooStorageBackend(models.Model): backend_type = fields.Selection( selection_add=[('odoo', 'Odoo')]) - def _odoostore(self, blob, vals={}, object_type=None): + def _odoostore(self, blob, vals): checksum = u'' + hashlib.sha1(blob).hexdigest() - path = checksum + name = vals.get('name', checksum) # res_model = OdooStrogageBackend # car il faut faire savoir sur quel # backend on est lié ir_attach = { - 'name': checksum, # utiliser name a la place + 'name': name, # utiliser name a la place 'type': 'binary', 'datas': blob, 'res_model': self._name, @@ -47,6 +47,7 @@ def _odoostore(self, blob, vals={}, object_type=None): basic_vals = { # 'name': '', + 'name': name, 'url': url, 'file_size': size, 'checksum': checksum, diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index 0cc3f713d6..14202c140b 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -32,7 +32,7 @@ class StorageBackend(models.Model): public_base_url = fields.Char() @implemented_by_factory - def store(self, blob, vals={}, object_type=None): + def store(self, blob, vals): pass @implemented_by_factory From 48ed103448aae62be14268eb879dca141d28eadc Mon Sep 17 00:00:00 2001 From: Raph Date: Thu, 4 May 2017 16:46:54 +0200 Subject: [PATCH 011/375] Pylinting --- storage_backend/models/local_backend.py | 13 +++++++------ storage_backend/models/odoo_backend.py | 3 +-- storage_backend/models/storage_backend.py | 6 ++++-- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/storage_backend/models/local_backend.py b/storage_backend/models/local_backend.py index 305bb128a0..8c68054585 100644 --- a/storage_backend/models/local_backend.py +++ b/storage_backend/models/local_backend.py @@ -3,12 +3,16 @@ # @author Sébastien BEAU # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). -from openerp import api, fields, models import hashlib -from fs.osfs import OSFS import logging +from openerp import fields, models logger = logging.getLogger(__name__) +try: + from fs.osfs import OSFS +except ImportError as err: + logger.debug(err) + class FileStoreStorageBackend(models.Model): _inherit = 'storage.backend' @@ -16,7 +20,7 @@ class FileStoreStorageBackend(models.Model): public_base_url = fields.Char() base_path = u'~/images' - def _filestorestore(self, blob, vals={}, object_type=None): + def _filestorestore(self, blob, vals): # TODO: refactorer, ça marche plus vraiment # enregistre le binary la où on lui dit # renvois l'objet en question @@ -36,9 +40,6 @@ def _filestorestore(self, blob, vals={}, object_type=None): 'private_path': path, } return basic_vals - # vals.update(basic_vals) - # obj = object_type.create(vals) # ou déléguer? - # return obj def _filestoreget_public_url(self, obj): # TODO faire mieux diff --git a/storage_backend/models/odoo_backend.py b/storage_backend/models/odoo_backend.py index 8ced5eb412..4d5d35770f 100644 --- a/storage_backend/models/odoo_backend.py +++ b/storage_backend/models/odoo_backend.py @@ -3,9 +3,8 @@ # @author Sébastien BEAU # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). -from openerp import api, fields, models +from openerp import fields, models import hashlib -from fs.osfs import OSFS import logging logger = logging.getLogger(__name__) diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index 14202c140b..2a01a4eff0 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -4,9 +4,10 @@ # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). import logging from functools import wraps -from openerp import api, fields, models +from openerp import _, fields, models _logger = logging.getLogger(__name__) + def implemented_by_factory(func): """Call a prefixed function based on 'namespace'.""" @wraps(func) @@ -19,6 +20,7 @@ def wrapper(cls, *args, **kwargs): return getattr(cls, fun)(*args, **kwargs) return wrapper + class StorageBackend(models.Model): _name = 'storage.backend' _inherit = 'keychain.backend' @@ -59,5 +61,5 @@ def _get_account(self): ]) if len(accounts) == 0: _logger.debug('No account found for %s' % self.backend_type) - raise Warning("No account found based on the ") + raise Warning(_("No account found based on the ")) return accounts From 4fdc0e710d959a72af7d528a7bb34e258f0924a8 Mon Sep 17 00:00:00 2001 From: Raph Date: Thu, 4 May 2017 17:19:46 +0200 Subject: [PATCH 012/375] + Readme --- storage_backend/README.rst | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 storage_backend/README.rst diff --git a/storage_backend/README.rst b/storage_backend/README.rst new file mode 100644 index 0000000000..d695998b96 --- /dev/null +++ b/storage_backend/README.rst @@ -0,0 +1,7 @@ +Storage backend +=============== + + +Le backend défini comment on stock le fichier. +Exemples: +- ftp, sftp, S3, Swift \ No newline at end of file From bdce575375ae6b0666834f5ad9fd47cfa5c7bfcb Mon Sep 17 00:00:00 2001 From: Raph Date: Thu, 4 May 2017 18:09:28 +0200 Subject: [PATCH 013/375] Add sftp backend --- storage_backend/models/sftp_backend.py | 80 ++++++++++++++++++++++++++ 1 file changed, 80 insertions(+) create mode 100644 storage_backend/models/sftp_backend.py diff --git a/storage_backend/models/sftp_backend.py b/storage_backend/models/sftp_backend.py new file mode 100644 index 0000000000..2a9e34fbdf --- /dev/null +++ b/storage_backend/models/sftp_backend.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Akretion (http://www.akretion.com). +# @author Sébastien BEAU +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +import socket +import hashlib +import logging +import base64 + +from openerp import fields, models +from openerp.exceptions import Warning as UserError + +logger = logging.getLogger(__name__) + +try: + from fs import sftpfs +except ImportError as err: + logger.debug(err) + + +class SftpStorageBackend(models.Model): + _inherit = 'storage.backend' + + public_base_url = fields.Char( + string='Public url', + help='') + sftp_server = fields.Char( + string='SFTP host', + help='') + sftp_dir_path = fields.Char( + string='Remote path', + help='Dir on the server where to store files') + + # TODO externiser ça dans des parametres + # ou dans un keychain ? + + def _sftpstore(self, blob, vals): + checksum = u'' + hashlib.sha1(blob).hexdigest() + + name = vals.get('name', checksum) + # todo add filename here (for extention) + b_decoded = base64.b64decode(blob) + try: + with sftpfs.SFTPFS( + self.sftp_server, + root_path=self.sftp_dir_path + ) as the_dir: + the_dir.setcontents(name, b_decoded) + size = the_dir.getsize(name) + except socket.error: + raise UserError('SFTP server not available') + + basic_vals = { + 'name': name, + 'url': name, + 'file_size': size, + 'checksum': checksum, + 'backend_id': self.id, + 'private_path': self.sftp_dir_path + name, + } + return basic_vals + + def _sftpget_public_url(self, obj): + # TODO faire mieux + logger.info('get_public_url') + + if obj.to_do: + logger.warning( + 'public url not available for not processed thumbnail') + return None + return self.public_base_url + obj.url + + def _sftpget_base64(self, file_id): + logger.info('return base64 of a file') + with sftpfs.SFTPFS( + self.sftp_server, + root_path=self.sftp_dir_path + ) as the_dir: + return the_dir.open(file_id.url, 'r') From d90e8bf400003b52ae19591ed36ce4d1c414a506 Mon Sep 17 00:00:00 2001 From: Raph Date: Tue, 9 May 2017 12:16:20 +0200 Subject: [PATCH 014/375] Suppression d'image wizard --- storage_backend/models/local_backend.py | 3 ++- storage_backend/models/odoo_backend.py | 3 ++- storage_backend/models/storage_backend.py | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/storage_backend/models/local_backend.py b/storage_backend/models/local_backend.py index 8c68054585..02983980eb 100644 --- a/storage_backend/models/local_backend.py +++ b/storage_backend/models/local_backend.py @@ -20,10 +20,11 @@ class FileStoreStorageBackend(models.Model): public_base_url = fields.Char() base_path = u'~/images' - def _filestorestore(self, blob, vals): + def _filestorestore(self, vals): # TODO: refactorer, ça marche plus vraiment # enregistre le binary la où on lui dit # renvois l'objet en question + blob = vals['datas'] checksum = u'' + hashlib.sha1(blob).hexdigest() path = checksum diff --git a/storage_backend/models/odoo_backend.py b/storage_backend/models/odoo_backend.py index 4d5d35770f..392eb03e23 100644 --- a/storage_backend/models/odoo_backend.py +++ b/storage_backend/models/odoo_backend.py @@ -15,7 +15,8 @@ class OdooStorageBackend(models.Model): backend_type = fields.Selection( selection_add=[('odoo', 'Odoo')]) - def _odoostore(self, blob, vals): + def _odoostore(self, vals): + blob = vals['datas'] checksum = u'' + hashlib.sha1(blob).hexdigest() name = vals.get('name', checksum) diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index 2a01a4eff0..f3012e8e5c 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -34,7 +34,7 @@ class StorageBackend(models.Model): public_base_url = fields.Char() @implemented_by_factory - def store(self, blob, vals): + def store(self, vals): pass @implemented_by_factory From 8ee4c7129a4e861d04e3a77be3beb0e92ef91951 Mon Sep 17 00:00:00 2001 From: Raph Date: Tue, 9 May 2017 16:28:41 +0200 Subject: [PATCH 015/375] Virer storage.thumbnail factory & wizard --- storage_backend/models/sftp_backend.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/storage_backend/models/sftp_backend.py b/storage_backend/models/sftp_backend.py index 2a9e34fbdf..3ee3d1c0aa 100644 --- a/storage_backend/models/sftp_backend.py +++ b/storage_backend/models/sftp_backend.py @@ -35,7 +35,8 @@ class SftpStorageBackend(models.Model): # TODO externiser ça dans des parametres # ou dans un keychain ? - def _sftpstore(self, blob, vals): + def _sftpstore(self, vals): + blob = vals['datas'] checksum = u'' + hashlib.sha1(blob).hexdigest() name = vals.get('name', checksum) From d1ce7121becb1e6daa2b4b367738f1b202fb3163 Mon Sep 17 00:00:00 2001 From: Raph Date: Tue, 9 May 2017 17:15:41 +0200 Subject: [PATCH 016/375] Add view for backend --- storage_backend/models/local_backend.py | 10 +++++----- storage_backend/models/sftp_backend.py | 4 ++-- storage_backend/views/backend_storage_view.xml | 13 +++++++++++++ 3 files changed, 20 insertions(+), 7 deletions(-) diff --git a/storage_backend/models/local_backend.py b/storage_backend/models/local_backend.py index 02983980eb..bc9f918937 100644 --- a/storage_backend/models/local_backend.py +++ b/storage_backend/models/local_backend.py @@ -17,8 +17,8 @@ class FileStoreStorageBackend(models.Model): _inherit = 'storage.backend' - public_base_url = fields.Char() - base_path = u'~/images' + filestore_public_base_url = fields.Char() + filestore_base_path = fields.Char() def _filestorestore(self, vals): # TODO: refactorer, ça marche plus vraiment @@ -28,7 +28,7 @@ def _filestorestore(self, vals): checksum = u'' + hashlib.sha1(blob).hexdigest() path = checksum - with OSFS(self.base_path) as the_dir: + with OSFS(self.filestore_base_path) as the_dir: the_dir.setcontents(path, blob) size = the_dir.getsize(path) @@ -45,9 +45,9 @@ def _filestorestore(self, vals): def _filestoreget_public_url(self, obj): # TODO faire mieux logger.info('get_public_url') - return self.public_base_url + '/' + obj.name + return self.filestore_public_base_url + '/' + obj.name def _filestoreget_base64(self, file_id): logger.info('return base64 of a file') - with OSFS(self.base_path) as the_dir: + with OSFS(self.filestore_base_path) as the_dir: return the_dir.open(file_id.url).read() diff --git a/storage_backend/models/sftp_backend.py b/storage_backend/models/sftp_backend.py index 3ee3d1c0aa..7423978136 100644 --- a/storage_backend/models/sftp_backend.py +++ b/storage_backend/models/sftp_backend.py @@ -22,7 +22,7 @@ class SftpStorageBackend(models.Model): _inherit = 'storage.backend' - public_base_url = fields.Char( + sftp_public_base_url = fields.Char( string='Public url', help='') sftp_server = fields.Char( @@ -70,7 +70,7 @@ def _sftpget_public_url(self, obj): logger.warning( 'public url not available for not processed thumbnail') return None - return self.public_base_url + obj.url + return self.sftp_public_base_url + obj.url def _sftpget_base64(self, file_id): logger.info('return base64 of a file') diff --git a/storage_backend/views/backend_storage_view.xml b/storage_backend/views/backend_storage_view.xml index c604afa3da..6960b4ffc9 100644 --- a/storage_backend/views/backend_storage_view.xml +++ b/storage_backend/views/backend_storage_view.xml @@ -7,6 +7,7 @@ + @@ -16,6 +17,18 @@
+ + + + + + + + + + + + From 496445f444a1d0a0c03ffbe6f17d3118af60525e Mon Sep 17 00:00:00 2001 From: Raph Date: Tue, 9 May 2017 18:15:22 +0200 Subject: [PATCH 017/375] sparse backend --- storage_backend/__openerp__.py | 1 + storage_backend/models/__init__.py | 3 ++- storage_backend/models/sftp_backend.py | 16 ++++++++++++---- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/storage_backend/__openerp__.py b/storage_backend/__openerp__.py index 3a5502db3b..3cfab65dce 100644 --- a/storage_backend/__openerp__.py +++ b/storage_backend/__openerp__.py @@ -19,6 +19,7 @@ "depends": [ "base", "keychain", + "base_sparse_field", ], "data": [ "views/backend_storage_view.xml", diff --git a/storage_backend/models/__init__.py b/storage_backend/models/__init__.py index a8d7af1e9c..4cfce70956 100644 --- a/storage_backend/models/__init__.py +++ b/storage_backend/models/__init__.py @@ -3,4 +3,5 @@ from . import storage_backend from . import local_backend from . import sftp_backend -from . import odoo_backend \ No newline at end of file +from . import odoo_backend +from . import keychain \ No newline at end of file diff --git a/storage_backend/models/sftp_backend.py b/storage_backend/models/sftp_backend.py index 7423978136..60a949637e 100644 --- a/storage_backend/models/sftp_backend.py +++ b/storage_backend/models/sftp_backend.py @@ -8,7 +8,7 @@ import logging import base64 -from openerp import fields, models +from openerp import api, fields, models from openerp.exceptions import Warning as UserError logger = logging.getLogger(__name__) @@ -22,15 +22,23 @@ class SftpStorageBackend(models.Model): _inherit = 'storage.backend' + _backend_name = 'storage_backend_sftp' + sftp_public_base_url = fields.Char( string='Public url', - help='') + help='', + sparse="data" + ) sftp_server = fields.Char( string='SFTP host', - help='') + help='', + sparse="data" + ) sftp_dir_path = fields.Char( string='Remote path', - help='Dir on the server where to store files') + help='Dir on the server where to store files', + sparse="data" + ) # TODO externiser ça dans des parametres # ou dans un keychain ? From cca1bb898eee16b4c499ab945a550c484df3f6fc Mon Sep 17 00:00:00 2001 From: Raph Date: Wed, 10 May 2017 11:18:14 +0200 Subject: [PATCH 018/375] Add backend_type by sub classes --- storage_backend/models/local_backend.py | 3 +++ storage_backend/models/sftp_backend.py | 3 +++ storage_backend/models/storage_backend.py | 6 +----- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/storage_backend/models/local_backend.py b/storage_backend/models/local_backend.py index bc9f918937..465089e754 100644 --- a/storage_backend/models/local_backend.py +++ b/storage_backend/models/local_backend.py @@ -17,6 +17,9 @@ class FileStoreStorageBackend(models.Model): _inherit = 'storage.backend' + backend_type = fields.Selection( + selection_add=[('filestore', 'Filestore')]) + filestore_public_base_url = fields.Char() filestore_base_path = fields.Char() diff --git a/storage_backend/models/sftp_backend.py b/storage_backend/models/sftp_backend.py index 60a949637e..f5a740e086 100644 --- a/storage_backend/models/sftp_backend.py +++ b/storage_backend/models/sftp_backend.py @@ -24,6 +24,9 @@ class SftpStorageBackend(models.Model): _backend_name = 'storage_backend_sftp' + backend_type = fields.Selection( + selection_add=[('sftp', 'SFTP')]) + sftp_public_base_url = fields.Char( string='Public url', help='', diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index f3012e8e5c..2fb62a2def 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -26,11 +26,7 @@ class StorageBackend(models.Model): _inherit = 'keychain.backend' name = fields.Char(required=True) - backend_type = fields.Selection([ - ('amazon-s3', 'Amazon-S3'), - ('filestore', 'Filestore'), - ('sftp', 'Sftp'), - ], required=True) + backend_type = fields.Selection([], required=True) # added by subclasses public_base_url = fields.Char() @implemented_by_factory From 33509723d075a255bc4b61bce6e9553780729ce5 Mon Sep 17 00:00:00 2001 From: Raph Date: Wed, 10 May 2017 11:19:19 +0200 Subject: [PATCH 019/375] Shared backend_name --- storage_backend/models/sftp_backend.py | 2 -- storage_backend/models/storage_backend.py | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/storage_backend/models/sftp_backend.py b/storage_backend/models/sftp_backend.py index f5a740e086..7bd6df37cb 100644 --- a/storage_backend/models/sftp_backend.py +++ b/storage_backend/models/sftp_backend.py @@ -22,8 +22,6 @@ class SftpStorageBackend(models.Model): _inherit = 'storage.backend' - _backend_name = 'storage_backend_sftp' - backend_type = fields.Selection( selection_add=[('sftp', 'SFTP')]) diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index 2fb62a2def..067ed28bd0 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -24,6 +24,7 @@ def wrapper(cls, *args, **kwargs): class StorageBackend(models.Model): _name = 'storage.backend' _inherit = 'keychain.backend' + _backend_name = 'storage_backend' name = fields.Char(required=True) backend_type = fields.Selection([], required=True) # added by subclasses From 92e9a048d802b498b6ffee08f36f365c7a93821f Mon Sep 17 00:00:00 2001 From: Raph Date: Wed, 10 May 2017 12:52:44 +0200 Subject: [PATCH 020/375] add s3 (marche pas) Fix s3 Work on s3 --- storage_backend/models/__init__.py | 2 +- storage_backend/models/s3_backend.py | 87 +++++++++++++++++++ .../views/backend_storage_view.xml | 5 ++ 3 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 storage_backend/models/s3_backend.py diff --git a/storage_backend/models/__init__.py b/storage_backend/models/__init__.py index 4cfce70956..1ba0adf43e 100644 --- a/storage_backend/models/__init__.py +++ b/storage_backend/models/__init__.py @@ -4,4 +4,4 @@ from . import local_backend from . import sftp_backend from . import odoo_backend -from . import keychain \ No newline at end of file +from . import s3_backend diff --git a/storage_backend/models/s3_backend.py b/storage_backend/models/s3_backend.py new file mode 100644 index 0000000000..ce6e47371f --- /dev/null +++ b/storage_backend/models/s3_backend.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Akretion (http://www.akretion.com). +# @author Sébastien BEAU +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +import socket +import hashlib +import logging +import base64 +import mimetypes + +from openerp import api, fields, models +from openerp.exceptions import Warning as UserError + +logger = logging.getLogger(__name__) + +try: + from fs import s3fs +except ImportError as err: + logger.debug(err) + + +class S3StorageBackend(models.Model): + _inherit = 'storage.backend' + _backend_name = 'storage_backend_sftp' + + backend_type = fields.Selection( + selection_add=[('amazon_s3', 'Amazon S3')]) + + aws_bucket = fields.Char(sparse="data") + aws_secret_key = fields.Char(sparse="data") + aws_access_key = fields.Char(sparse="data") + + def _amazon_s3store(self, vals): + blob = vals['datas'] + checksum = u'' + hashlib.sha1(blob).hexdigest() + + name = vals.get('name', checksum) + # todo add filename here (for extention) + mime, enc = mimetypes.guess_type(name) + + b_decoded = base64.b64decode(blob) + try: + with s3fs.S3FS( + self.aws_bucket, + aws_secret_key=self.aws_secret_key, + aws_access_key=self.aws_access_key, + host='s3.eu-central-1.amazonaws.com' + ) as the_dir: + the_dir.setcontents(name, b_decoded) + size = the_dir.getsize(name) + url = the_dir.getpathurl(name) + key = the_dir._s3bukt.get_key(name) + key.copy( + key.bucket, + key.name, + preserve_acl=True, + metadata={'Content-Type': mime}) + except socket.error: + raise UserError('S3 server not available') + + basic_vals = { + 'name': name, + 'url': url, + 'file_size': size, + 'checksum': checksum, + 'backend_id': self.id, + 'private_path': name + } + return basic_vals + + def _amazon_s3get_public_url(self, obj): + # TODO faire mieux + logger.info('get_public_url') + return obj.url + + def _amazon_s3get_base64(self, file_id): + logger.warning('return base64 of a file') + with s3fs.S3FS( + self.aws_bucket, + aws_secret_key=self.aws_secret_key, + aws_access_key=self.aws_access_key, + host='s3.eu-central-1.amazonaws.com' + ) as the_dir: + # TODO : quel horreur ! on a deja l'url + bin = the_dir.getcontents(file_id.name) # mettre private_path + return base64.b64encode(bin) diff --git a/storage_backend/views/backend_storage_view.xml b/storage_backend/views/backend_storage_view.xml index 6960b4ffc9..144c33eb10 100644 --- a/storage_backend/views/backend_storage_view.xml +++ b/storage_backend/views/backend_storage_view.xml @@ -29,6 +29,11 @@ + + + + + From 41ab136eb793100c6fdf1e08d27455777c0798cf Mon Sep 17 00:00:00 2001 From: Raph Date: Thu, 11 May 2017 17:11:47 +0200 Subject: [PATCH 021/375] res_model marche pas --- storage_backend/models/s3_backend.py | 1 + 1 file changed, 1 insertion(+) diff --git a/storage_backend/models/s3_backend.py b/storage_backend/models/s3_backend.py index ce6e47371f..d3748704ca 100644 --- a/storage_backend/models/s3_backend.py +++ b/storage_backend/models/s3_backend.py @@ -50,6 +50,7 @@ def _amazon_s3store(self, vals): the_dir.setcontents(name, b_decoded) size = the_dir.getsize(name) url = the_dir.getpathurl(name) + # Todo : j'arrive pas mettre le mime type ici key = the_dir._s3bukt.get_key(name) key.copy( key.bucket, From 0df6df0a819dde04033d8c28edb97b28935c14aa Mon Sep 17 00:00:00 2001 From: Raph Date: Thu, 11 May 2017 17:56:58 +0200 Subject: [PATCH 022/375] Add config_parameter avec odoo par defaut Rends les url en public --- storage_backend/__openerp__.py | 1 + storage_backend/models/s3_backend.py | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/storage_backend/__openerp__.py b/storage_backend/__openerp__.py index 3cfab65dce..6a4f9d593c 100644 --- a/storage_backend/__openerp__.py +++ b/storage_backend/__openerp__.py @@ -23,5 +23,6 @@ ], "data": [ "views/backend_storage_view.xml", + "data/data.xml", ], } diff --git a/storage_backend/models/s3_backend.py b/storage_backend/models/s3_backend.py index d3748704ca..78a31da152 100644 --- a/storage_backend/models/s3_backend.py +++ b/storage_backend/models/s3_backend.py @@ -49,7 +49,6 @@ def _amazon_s3store(self, vals): ) as the_dir: the_dir.setcontents(name, b_decoded) size = the_dir.getsize(name) - url = the_dir.getpathurl(name) # Todo : j'arrive pas mettre le mime type ici key = the_dir._s3bukt.get_key(name) key.copy( @@ -57,6 +56,11 @@ def _amazon_s3store(self, vals): key.name, preserve_acl=True, metadata={'Content-Type': mime}) + # make shor url + # peut etre avec des ACL on pourrait s'en passer + key.make_public() + url = the_dir.getpathurl(name) + except socket.error: raise UserError('S3 server not available') From 3e4da613ca1c091014747bc8a9d9e8a2d5288cdb Mon Sep 17 00:00:00 2001 From: Raph Date: Thu, 18 May 2017 10:38:45 +0200 Subject: [PATCH 023/375] Add default backend --- storage_backend/data/data.xml | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 storage_backend/data/data.xml diff --git a/storage_backend/data/data.xml b/storage_backend/data/data.xml new file mode 100644 index 0000000000..4519dc763f --- /dev/null +++ b/storage_backend/data/data.xml @@ -0,0 +1,9 @@ + + + + + Odoo backend + odoo + + + \ No newline at end of file From 171184fb3831c75b5d2567c7ac196bb1df31816e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20BEAU?= Date: Thu, 18 May 2017 14:31:21 +0200 Subject: [PATCH 024/375] [IMP] remove dead code, start to refactor amazon export, fix issue with uploading image with res_id/res_model, fix issue with the creation of the backend --- storage_backend/models/__init__.py | 1 + storage_backend/models/keychain.py | 16 +++++ storage_backend/models/s3_backend.py | 61 ++++++++----------- storage_backend/models/storage_backend.py | 19 ------ .../views/backend_storage_view.xml | 47 ++++++++------ 5 files changed, 73 insertions(+), 71 deletions(-) create mode 100644 storage_backend/models/keychain.py diff --git a/storage_backend/models/__init__.py b/storage_backend/models/__init__.py index 1ba0adf43e..f1ff8e4ae6 100644 --- a/storage_backend/models/__init__.py +++ b/storage_backend/models/__init__.py @@ -5,3 +5,4 @@ from . import sftp_backend from . import odoo_backend from . import s3_backend +from . import keychain diff --git a/storage_backend/models/keychain.py b/storage_backend/models/keychain.py new file mode 100644 index 0000000000..b9024e56ef --- /dev/null +++ b/storage_backend/models/keychain.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Akretion (http://www.akretion.com). +# @author Sébastien BEAU +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +from openerp import fields, models + + +class KeychainAccount(models.Model): + _inherit = 'keychain.account' + + namespace = fields.Selection( + selection_add=[('storage_backend', 'Storage Backend')]) + + def _storage_backend_validate_data(self, data): + return True diff --git a/storage_backend/models/s3_backend.py b/storage_backend/models/s3_backend.py index 78a31da152..21fc7d1861 100644 --- a/storage_backend/models/s3_backend.py +++ b/storage_backend/models/s3_backend.py @@ -15,14 +15,13 @@ logger = logging.getLogger(__name__) try: - from fs import s3fs + from boto.s3.connection import S3Connection except ImportError as err: logger.debug(err) class S3StorageBackend(models.Model): _inherit = 'storage.backend' - _backend_name = 'storage_backend_sftp' backend_type = fields.Selection( selection_add=[('amazon_s3', 'Amazon S3')]) @@ -30,49 +29,43 @@ class S3StorageBackend(models.Model): aws_bucket = fields.Char(sparse="data") aws_secret_key = fields.Char(sparse="data") aws_access_key = fields.Char(sparse="data") + aws_host = fields.Char(sparse="data") + aws_cloudfront_domain = fields.Char(sparse="data") - def _amazon_s3store(self, vals): - blob = vals['datas'] - checksum = u'' + hashlib.sha1(blob).hexdigest() + def _amazon_s3_build_public_url(self, name): + if self.aws_cloudfront_domain: + host = self.aws_cloutfront_domain + else: + host = self.aws_host + return "https://%s/%s/%s" % (host, self.aws_bucket, name) - name = vals.get('name', checksum) - # todo add filename here (for extention) + def _amazon_s3store(self, vals): + name = vals['name'] mime, enc = mimetypes.guess_type(name) - - b_decoded = base64.b64decode(blob) + b_decoded = base64.b64decode(vals['datas']) try: - with s3fs.S3FS( - self.aws_bucket, - aws_secret_key=self.aws_secret_key, - aws_access_key=self.aws_access_key, - host='s3.eu-central-1.amazonaws.com' - ) as the_dir: - the_dir.setcontents(name, b_decoded) - size = the_dir.getsize(name) - # Todo : j'arrive pas mettre le mime type ici - key = the_dir._s3bukt.get_key(name) - key.copy( - key.bucket, - key.name, - preserve_acl=True, - metadata={'Content-Type': mime}) - # make shor url - # peut etre avec des ACL on pourrait s'en passer - key.make_public() - url = the_dir.getpathurl(name) - + conn = S3Connection( + self.aws_access_key, + self.aws_secret_key, + host=self.aws_host) + buck = conn.get_bucket('storage-testing-raph') + key = buck.get_key(name) + if not key: + key = buck.new_key(name) + key.set_metadata("Content-Type", mime) + key.set_contents_from_string(b_decoded) + key.make_public() except socket.error: raise UserError('S3 server not available') - basic_vals = { + return { 'name': name, - 'url': url, - 'file_size': size, - 'checksum': checksum, + 'url': self._amazon_s3_build_public_url(name), + 'file_size': key.size, + 'checksum': key.md5, 'backend_id': self.id, 'private_path': name } - return basic_vals def _amazon_s3get_public_url(self, obj): # TODO faire mieux diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index 067ed28bd0..0ab44ae1fe 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -41,22 +41,3 @@ def get_public_url(self, obj): @implemented_by_factory def get_base64(self, file_id): pass - - @implemented_by_factory - def _get_account(self): - """Appelé par celui qui dépose le fichiers.""" - keychain = self.env['keychain.account'] - if self.env.user.has_group('storage.backend_access'): - retrieve = keychain.suspend_security().retrieve - else: - retrieve = keychain.retrieve - - accounts = retrieve( - [ - ['namespace', '=', 'storage_%s' % self.backend_type], - ['technical_name', '=', self.name] - ]) - if len(accounts) == 0: - _logger.debug('No account found for %s' % self.backend_type) - raise Warning(_("No account found based on the ")) - return accounts diff --git a/storage_backend/views/backend_storage_view.xml b/storage_backend/views/backend_storage_view.xml index 144c33eb10..16f12e76e5 100644 --- a/storage_backend/views/backend_storage_view.xml +++ b/storage_backend/views/backend_storage_view.xml @@ -16,24 +16,35 @@ storage.backend
- - - - - - - - - - - - - - - - - - + +
+
+ + + + + + + + + + + + + + + + + + + + + +
From d1a87a1cfa61134dd01b3235d05f804d122def66 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20BEAU?= Date: Thu, 18 May 2017 17:29:15 +0200 Subject: [PATCH 025/375] [IMP] start to add a special widget for reading the image from the public url directly --- storage_backend/data/data.xml | 4 ++-- storage_backend/models/s3_backend.py | 19 ++++++++++--------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/storage_backend/data/data.xml b/storage_backend/data/data.xml index 4519dc763f..70451f82c1 100644 --- a/storage_backend/data/data.xml +++ b/storage_backend/data/data.xml @@ -1,9 +1,9 @@ - + Odoo backend odoo - \ No newline at end of file + diff --git a/storage_backend/models/s3_backend.py b/storage_backend/models/s3_backend.py index 21fc7d1861..da6c3bdac0 100644 --- a/storage_backend/models/s3_backend.py +++ b/storage_backend/models/s3_backend.py @@ -74,12 +74,13 @@ def _amazon_s3get_public_url(self, obj): def _amazon_s3get_base64(self, file_id): logger.warning('return base64 of a file') - with s3fs.S3FS( - self.aws_bucket, - aws_secret_key=self.aws_secret_key, - aws_access_key=self.aws_access_key, - host='s3.eu-central-1.amazonaws.com' - ) as the_dir: - # TODO : quel horreur ! on a deja l'url - bin = the_dir.getcontents(file_id.name) # mettre private_path - return base64.b64encode(bin) + # TODO reimplement + #with s3fs.S3FS( + # self.aws_bucket, + # aws_secret_key=self.aws_secret_key, + # aws_access_key=self.aws_access_key, + # host='s3.eu-central-1.amazonaws.com' + #) as the_dir: + # # TODO : quel horreur ! on a deja l'url + # bin = the_dir.getcontents(file_id.name) # mettre private_path + # return base64.b64encode(bin) From 1f7100e48864c71a18a31146b2a053d97204fefc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20BEAU?= Date: Mon, 22 May 2017 11:05:51 +0200 Subject: [PATCH 026/375] [REF] refactor thumbnails and marke it work on small and medium of odoo --- storage_backend/models/storage_backend.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index 0ab44ae1fe..3aad2e9aca 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -37,7 +37,3 @@ def store(self, vals): @implemented_by_factory def get_public_url(self, obj): pass - - @implemented_by_factory - def get_base64(self, file_id): - pass From 7edee17bc6a8155102609534175f31ce11d9e15f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20BEAU?= Date: Mon, 22 May 2017 13:44:41 +0200 Subject: [PATCH 027/375] [REF] continue refactoring, remove inheriting ir.attachment as the write/create was inherited and this was adding issue, use a computed field to store the field into the storage backend --- storage_backend/models/s3_backend.py | 35 +++++++---------------- storage_backend/models/storage_backend.py | 7 ++++- 2 files changed, 16 insertions(+), 26 deletions(-) diff --git a/storage_backend/models/s3_backend.py b/storage_backend/models/s3_backend.py index da6c3bdac0..0bb3e69150 100644 --- a/storage_backend/models/s3_backend.py +++ b/storage_backend/models/s3_backend.py @@ -32,17 +32,8 @@ class S3StorageBackend(models.Model): aws_host = fields.Char(sparse="data") aws_cloudfront_domain = fields.Char(sparse="data") - def _amazon_s3_build_public_url(self, name): - if self.aws_cloudfront_domain: - host = self.aws_cloutfront_domain - else: - host = self.aws_host - return "https://%s/%s/%s" % (host, self.aws_bucket, name) - - def _amazon_s3store(self, vals): - name = vals['name'] + def _amazon_s3_store(self, name, datas, is_public=False): mime, enc = mimetypes.guess_type(name) - b_decoded = base64.b64decode(vals['datas']) try: conn = S3Connection( self.aws_access_key, @@ -53,24 +44,18 @@ def _amazon_s3store(self, vals): if not key: key = buck.new_key(name) key.set_metadata("Content-Type", mime) - key.set_contents_from_string(b_decoded) - key.make_public() + key.set_contents_from_string(datas) + if is_public: + key.make_public() except socket.error: raise UserError('S3 server not available') - return { - 'name': name, - 'url': self._amazon_s3_build_public_url(name), - 'file_size': key.size, - 'checksum': key.md5, - 'backend_id': self.id, - 'private_path': name - } - - def _amazon_s3get_public_url(self, obj): - # TODO faire mieux - logger.info('get_public_url') - return obj.url + def _amazon_s3get_public_url(self, name): + if self.aws_cloudfront_domain: + host = self.aws_cloutfront_domain + else: + host = self.aws_host + return "https://%s/%s/%s" % (host, self.aws_bucket, name) def _amazon_s3get_base64(self, file_id): logger.warning('return base64 of a file') diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index 3aad2e9aca..f46de573c1 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -30,8 +30,13 @@ class StorageBackend(models.Model): backend_type = fields.Selection([], required=True) # added by subclasses public_base_url = fields.Char() + def store(self, name, datas, is_base64=True, **kwargs): + if is_base64: + datas = base64.b64decode(datas) + return self._store(name, datas, **kwargs) + @implemented_by_factory - def store(self, vals): + def _store(self, name, datas, **kwargs): pass @implemented_by_factory From 1a5c05e7cbdc1a559eaf6f7233537a82ced47735 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20BEAU?= Date: Sun, 28 May 2017 19:15:29 +0200 Subject: [PATCH 028/375] [REF] finish to refactor and remove testing code as product_multi_image have been ported using this new module --- storage_backend/models/s3_backend.py | 8 +++----- storage_backend/models/sftp_backend.py | 2 +- storage_backend/models/storage_backend.py | 3 ++- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/storage_backend/models/s3_backend.py b/storage_backend/models/s3_backend.py index 0bb3e69150..b93ffaa46b 100644 --- a/storage_backend/models/s3_backend.py +++ b/storage_backend/models/s3_backend.py @@ -4,12 +4,10 @@ # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). import socket -import hashlib import logging -import base64 import mimetypes -from openerp import api, fields, models +from openerp import fields, models from openerp.exceptions import Warning as UserError logger = logging.getLogger(__name__) @@ -60,12 +58,12 @@ def _amazon_s3get_public_url(self, name): def _amazon_s3get_base64(self, file_id): logger.warning('return base64 of a file') # TODO reimplement - #with s3fs.S3FS( + # with s3fs.S3FS( # self.aws_bucket, # aws_secret_key=self.aws_secret_key, # aws_access_key=self.aws_access_key, # host='s3.eu-central-1.amazonaws.com' - #) as the_dir: + # ) as the_dir: # # TODO : quel horreur ! on a deja l'url # bin = the_dir.getcontents(file_id.name) # mettre private_path # return base64.b64encode(bin) diff --git a/storage_backend/models/sftp_backend.py b/storage_backend/models/sftp_backend.py index 7bd6df37cb..44348b4821 100644 --- a/storage_backend/models/sftp_backend.py +++ b/storage_backend/models/sftp_backend.py @@ -8,7 +8,7 @@ import logging import base64 -from openerp import api, fields, models +from openerp import fields, models from openerp.exceptions import Warning as UserError logger = logging.getLogger(__name__) diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index f46de573c1..d0da4ccb70 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -2,9 +2,10 @@ # Copyright 2017 Akretion (http://www.akretion.com). # @author Sébastien BEAU # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +import base64 import logging from functools import wraps -from openerp import _, fields, models +from openerp import fields, models _logger = logging.getLogger(__name__) From 764ea7043136a6eac0678770c871ea2683b58769 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20BEAU?= Date: Mon, 29 May 2017 07:34:11 +0200 Subject: [PATCH 029/375] [IMP] add security rule for admin. Add TODO for adding some extra code rule based on res_id and res_model like in ir.attachment --- storage_backend/__openerp__.py | 1 + storage_backend/security/ir.model.access.csv | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 storage_backend/security/ir.model.access.csv diff --git a/storage_backend/__openerp__.py b/storage_backend/__openerp__.py index 6a4f9d593c..9483eb6b36 100644 --- a/storage_backend/__openerp__.py +++ b/storage_backend/__openerp__.py @@ -24,5 +24,6 @@ "data": [ "views/backend_storage_view.xml", "data/data.xml", + "security/ir.model.access.csv", ], } diff --git a/storage_backend/security/ir.model.access.csv b/storage_backend/security/ir.model.access.csv new file mode 100644 index 0000000000..fd86f3527c --- /dev/null +++ b/storage_backend/security/ir.model.access.csv @@ -0,0 +1,2 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink +access_storage_backend,storage_backend manager,model_storage_backend,base.group_system,1,1,1,1 From 5ab16a2f0f4e52426c103abef4beb5e62f8a7633 Mon Sep 17 00:00:00 2001 From: Florian da Costa Date: Tue, 30 May 2017 13:15:32 +0200 Subject: [PATCH 030/375] Refactore Odoo, local and sftp storage --- storage_backend/models/local_backend.py | 38 +++------- storage_backend/models/odoo_backend.py | 65 +++++++---------- storage_backend/models/s3_backend.py | 1 + storage_backend/models/sftp_backend.py | 71 +++++++++++-------- storage_backend/models/storage_backend.py | 2 +- .../views/backend_storage_view.xml | 1 + 6 files changed, 82 insertions(+), 96 deletions(-) diff --git a/storage_backend/models/local_backend.py b/storage_backend/models/local_backend.py index 465089e754..12f8c90510 100644 --- a/storage_backend/models/local_backend.py +++ b/storage_backend/models/local_backend.py @@ -8,11 +8,6 @@ from openerp import fields, models logger = logging.getLogger(__name__) -try: - from fs.osfs import OSFS -except ImportError as err: - logger.debug(err) - class FileStoreStorageBackend(models.Model): _inherit = 'storage.backend' @@ -20,35 +15,24 @@ class FileStoreStorageBackend(models.Model): backend_type = fields.Selection( selection_add=[('filestore', 'Filestore')]) - filestore_public_base_url = fields.Char() - filestore_base_path = fields.Char() + filestore_public_base_url = fields.Char( + sparse="data") + filestore_base_path = fields.Char( + sparse="data") - def _filestorestore(self, vals): + def _filestore_store(self, name, datas, is_public=False): # TODO: refactorer, ça marche plus vraiment # enregistre le binary la où on lui dit # renvois l'objet en question - blob = vals['datas'] - checksum = u'' + hashlib.sha1(blob).hexdigest() - path = checksum + full_path = self.filestore_base_path + '/' + name + with open(full_path, "wb") as my_file: + my_file.write(datas) + return name - with OSFS(self.filestore_base_path) as the_dir: - the_dir.setcontents(path, blob) - size = the_dir.getsize(path) - - basic_vals = { - # 'name': '', - 'url': path, - 'file_size': size, - 'checksum': checksum, - 'backend_id': self.id, - 'private_path': path, - } - return basic_vals - - def _filestoreget_public_url(self, obj): + def _filestoreget_public_url(self, name): # TODO faire mieux logger.info('get_public_url') - return self.filestore_public_base_url + '/' + obj.name + return self.filestore_public_base_url + '/' + name def _filestoreget_base64(self, file_id): logger.info('return base64 of a file') diff --git a/storage_backend/models/odoo_backend.py b/storage_backend/models/odoo_backend.py index 392eb03e23..f099d1bc3a 100644 --- a/storage_backend/models/odoo_backend.py +++ b/storage_backend/models/odoo_backend.py @@ -5,6 +5,9 @@ from openerp import fields, models import hashlib +import base64 + + import logging logger = logging.getLogger(__name__) @@ -15,51 +18,37 @@ class OdooStorageBackend(models.Model): backend_type = fields.Selection( selection_add=[('odoo', 'Odoo')]) - def _odoostore(self, vals): - blob = vals['datas'] - checksum = u'' + hashlib.sha1(blob).hexdigest() - name = vals.get('name', checksum) - - # res_model = OdooStrogageBackend - # car il faut faire savoir sur quel - # backend on est lié - - ir_attach = { - 'name': name, # utiliser name a la place + def _odoo_store(self, name, datas, is_public=False, **kwargs): + checksum = u'' + hashlib.sha1(datas).hexdigest() + name = name or checksum + datas_encoded = base64.b64encode(datas) + ir_attach_vals = { + 'name': name, 'type': 'binary', - 'datas': blob, - 'res_model': self._name, - 'res_id': self.id, + 'datas': datas_encoded, } logger.info('on va crée le ir suivant:') - logger.info(ir_attach) + logger.info(ir_attach_vals) + + attachment = self.env['ir.attachment'].create(ir_attach_vals) + return attachment.id - pj = self.env['ir.attachment'].create(ir_attach) - size = pj.file_size + def _odooget_public_url(self, attach_id): + # TODO faire mieux + logger.info('get_public_url') +# attach = self.env['ir.attachment'].search([('name', '=', name)], +# limit=1) + attach = self.env['ir.attachment'].browse(attach_id) url = ( - '/web/binary/image?model=%(res_model)s' - '&id=%(res_id)s&field=datas' + '/web/binary/image?model=%(model)s' + '&id=%(attach_id)s&field=datas' # comment on sait que c'est une image? a mettre ailleurs ) % { - 'res_model': pj._name, - 'res_id': pj.id - } - - basic_vals = { - # 'name': '', - 'name': name, - 'url': url, - 'file_size': size, - 'checksum': checksum, - 'backend_id': self.id, - 'private_path': pj.id + 'model': attach._name, + 'attach_id': attach.id } - return basic_vals - - def _odooget_public_url(self, obj): - # TODO faire mieux - logger.info('get_public_url') - return self._odoo_lookup(obj).url + print url + return url def _odooget_base64(self, file_id): logger.info('return base64 of a file') @@ -67,7 +56,5 @@ def _odooget_base64(self, file_id): def _odoo_lookup(self, obj): return self.env['ir.attachment'].search([ - ('res_model', '=', self._name), - ('res_id', '=', self.id), ('id', '=', obj.private_path) ]) diff --git a/storage_backend/models/s3_backend.py b/storage_backend/models/s3_backend.py index b93ffaa46b..8e6237afd1 100644 --- a/storage_backend/models/s3_backend.py +++ b/storage_backend/models/s3_backend.py @@ -47,6 +47,7 @@ def _amazon_s3_store(self, name, datas, is_public=False): key.make_public() except socket.error: raise UserError('S3 server not available') + return name def _amazon_s3get_public_url(self, name): if self.aws_cloudfront_domain: diff --git a/storage_backend/models/sftp_backend.py b/storage_backend/models/sftp_backend.py index 44348b4821..879dfdb8ea 100644 --- a/storage_backend/models/sftp_backend.py +++ b/storage_backend/models/sftp_backend.py @@ -32,7 +32,8 @@ class SftpStorageBackend(models.Model): ) sftp_server = fields.Char( string='SFTP host', - help='', + help='Can include the port if necessary, like ' + 'my-server:22222', sparse="data" ) sftp_dir_path = fields.Char( @@ -40,46 +41,58 @@ class SftpStorageBackend(models.Model): help='Dir on the server where to store files', sparse="data" ) + sftp_login = fields.Char( + string='SFTP login', + help='Login to connect to sftp server', + sparse="data" + ) # TODO externiser ça dans des parametres # ou dans un keychain ? + # Can't work without login/password?? +# def _sftp_store(self, name, datas, is_public=False): +# checksum = u'' + hashlib.sha1(blob).hexdigest() +# name = name or checksum +# # todo add filename here (for extention) +# b_decoded = base64.b64decode(datas) +# try: +# with sftpfs.SFTPFS( +# self.sftp_server, +# root_path=self.sftp_dir_path +# ) as the_dir: +# the_dir.setcontents(name, b_decoded) +# except socket.error: +# raise UserError('SFTP server not available') +# return name - def _sftpstore(self, vals): - blob = vals['datas'] - checksum = u'' + hashlib.sha1(blob).hexdigest() - - name = vals.get('name', checksum) + def _sftp_store(self, name, datas, is_public=False): # todo add filename here (for extention) - b_decoded = base64.b64decode(blob) try: - with sftpfs.SFTPFS( - self.sftp_server, - root_path=self.sftp_dir_path - ) as the_dir: - the_dir.setcontents(name, b_decoded) - size = the_dir.getsize(name) + account = self._get_keychain_account() + password = account.get_password() + print password, self.sftp_login, self.sftp_server + with sftpfs.SFTPFS(connection=self.sftp_server, + username=self.sftp_login, + password=password + ) as conn: + full_path = self.sftp_dir_path + '/' + name + conn.setcontents(full_path, datas) except socket.error: raise UserError('SFTP server not available') + return name - basic_vals = { - 'name': name, - 'url': name, - 'file_size': size, - 'checksum': checksum, - 'backend_id': self.id, - 'private_path': self.sftp_dir_path + name, - } - return basic_vals - - def _sftpget_public_url(self, obj): + def _sftpget_public_url(self, name): # TODO faire mieux logger.info('get_public_url') - if obj.to_do: - logger.warning( - 'public url not available for not processed thumbnail') - return None - return self.sftp_public_base_url + obj.url +# if obj.to_do: +# logger.warning( +# 'public url not available for not processed thumbnail') +# return None + host = self.sftp_public_base_url + directory = self.sftp_dir_path + return "https://%s/%s/%s" % (host, directory, name) +# return self.sftp_public_base_url + obj.url def _sftpget_base64(self, file_id): logger.info('return base64 of a file') diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index d0da4ccb70..fc36bab99f 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -41,5 +41,5 @@ def _store(self, name, datas, **kwargs): pass @implemented_by_factory - def get_public_url(self, obj): + def get_public_url(self, name, **kwargs): pass diff --git a/storage_backend/views/backend_storage_view.xml b/storage_backend/views/backend_storage_view.xml index 16f12e76e5..eb91431ed1 100644 --- a/storage_backend/views/backend_storage_view.xml +++ b/storage_backend/views/backend_storage_view.xml @@ -30,6 +30,7 @@ + From de16d325db270c0a606d75ccd9d256a084199af2 Mon Sep 17 00:00:00 2001 From: Florian da Costa Date: Wed, 31 May 2017 13:30:26 +0200 Subject: [PATCH 031/375] Add pre and post init hook meant to be used is submodules --- storage_backend/models/local_backend.py | 15 +++++---- storage_backend/models/odoo_backend.py | 25 +++++++-------- storage_backend/models/sftp_backend.py | 39 ++++++++++++----------- storage_backend/models/storage_backend.py | 4 +++ 4 files changed, 45 insertions(+), 38 deletions(-) diff --git a/storage_backend/models/local_backend.py b/storage_backend/models/local_backend.py index 12f8c90510..b25b45c56e 100644 --- a/storage_backend/models/local_backend.py +++ b/storage_backend/models/local_backend.py @@ -3,8 +3,9 @@ # @author Sébastien BEAU # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). -import hashlib +import base64 import logging +import os from openerp import fields, models logger = logging.getLogger(__name__) @@ -24,7 +25,7 @@ def _filestore_store(self, name, datas, is_public=False): # TODO: refactorer, ça marche plus vraiment # enregistre le binary la où on lui dit # renvois l'objet en question - full_path = self.filestore_base_path + '/' + name + full_path = os.path.join(self.filestore_base_path, name) with open(full_path, "wb") as my_file: my_file.write(datas) return name @@ -32,9 +33,11 @@ def _filestore_store(self, name, datas, is_public=False): def _filestoreget_public_url(self, name): # TODO faire mieux logger.info('get_public_url') - return self.filestore_public_base_url + '/' + name + return os.path.join(self.filestore_public_base_url, name) - def _filestoreget_base64(self, file_id): + def _filestoreretrieve_datas(self, name): logger.info('return base64 of a file') - with OSFS(self.filestore_base_path) as the_dir: - return the_dir.open(file_id.url).read() + full_path = os.path.join(self.filestore_base_path, name) + with open(full_path, "b") as my_file: + datas = my_file.read() + return datas and base64.b64encode(datas) or False diff --git a/storage_backend/models/odoo_backend.py b/storage_backend/models/odoo_backend.py index f099d1bc3a..b4a0627b00 100644 --- a/storage_backend/models/odoo_backend.py +++ b/storage_backend/models/odoo_backend.py @@ -4,7 +4,6 @@ # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from openerp import fields, models -import hashlib import base64 @@ -19,8 +18,6 @@ class OdooStorageBackend(models.Model): selection_add=[('odoo', 'Odoo')]) def _odoo_store(self, name, datas, is_public=False, **kwargs): - checksum = u'' + hashlib.sha1(datas).hexdigest() - name = name or checksum datas_encoded = base64.b64encode(datas) ir_attach_vals = { 'name': name, @@ -40,21 +37,21 @@ def _odooget_public_url(self, attach_id): # limit=1) attach = self.env['ir.attachment'].browse(attach_id) url = ( - '/web/binary/image?model=%(model)s' + 'web/binary/image?model=%(model)s' '&id=%(attach_id)s&field=datas' # comment on sait que c'est une image? a mettre ailleurs ) % { 'model': attach._name, 'attach_id': attach.id } - print url - return url - - def _odooget_base64(self, file_id): + base_url = self.env['ir.config_parameter'].get_param('web.base.url') + if not base_url.endswith('/'): + base_url = base_url + '/' + return base_url + url + + # This method is kind of useless but we can keep it to be consistent with + # other storage backends + def _odooretrieve_datas(self, attach_id): logger.info('return base64 of a file') - return self._odoo_lookup(file_id).datas - - def _odoo_lookup(self, obj): - return self.env['ir.attachment'].search([ - ('id', '=', obj.private_path) - ]) + attach = self.env['ir.attachment'].browse(attach_id) + return attach.datas diff --git a/storage_backend/models/sftp_backend.py b/storage_backend/models/sftp_backend.py index 879dfdb8ea..98615143be 100644 --- a/storage_backend/models/sftp_backend.py +++ b/storage_backend/models/sftp_backend.py @@ -4,9 +4,9 @@ # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). import socket -import hashlib import logging import base64 +import os from openerp import fields, models from openerp.exceptions import Warning as UserError @@ -70,12 +70,11 @@ def _sftp_store(self, name, datas, is_public=False): try: account = self._get_keychain_account() password = account.get_password() - print password, self.sftp_login, self.sftp_server - with sftpfs.SFTPFS(connection=self.sftp_server, - username=self.sftp_login, - password=password - ) as conn: - full_path = self.sftp_dir_path + '/' + name + with sftpfs.SFTPFS(connection=self.sftp_server, + username=self.sftp_login, + password=password + ) as conn: + full_path = os.path.join(self.sftp_dir_path, name) conn.setcontents(full_path, datas) except socket.error: raise UserError('SFTP server not available') @@ -85,19 +84,23 @@ def _sftpget_public_url(self, name): # TODO faire mieux logger.info('get_public_url') -# if obj.to_do: -# logger.warning( -# 'public url not available for not processed thumbnail') -# return None host = self.sftp_public_base_url directory = self.sftp_dir_path return "https://%s/%s/%s" % (host, directory, name) -# return self.sftp_public_base_url + obj.url - def _sftpget_base64(self, file_id): + def _sftpretrieve_datas(self, name): logger.info('return base64 of a file') - with sftpfs.SFTPFS( - self.sftp_server, - root_path=self.sftp_dir_path - ) as the_dir: - return the_dir.open(file_id.url, 'r') + try: + account = self._get_keychain_account() + password = account.get_password() + with sftpfs.SFTPFS(connection=self.sftp_server, + username=self.sftp_login, + password=password + ) as conn: + full_path = os.path.join(self.sftp_dir_path, name) + file_data = conn.open(full_path, 'rb') + datas = file_data.read() + datas_encoded = datas and base64.b64encode(datas) or False + except socket.error: + raise UserError('SFTP server not available') + return datas_encoded diff --git a/storage_backend/models/storage_backend.py b/storage_backend/models/storage_backend.py index fc36bab99f..d16237e5f2 100644 --- a/storage_backend/models/storage_backend.py +++ b/storage_backend/models/storage_backend.py @@ -43,3 +43,7 @@ def _store(self, name, datas, **kwargs): @implemented_by_factory def get_public_url(self, name, **kwargs): pass + + @implemented_by_factory + def retrieve_datas(self, name, **kwargs): + pass From f1acd097504035140606c8399d00d9320cf0ec4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20BEAU?= Date: Mon, 19 Jun 2017 16:01:55 +0200 Subject: [PATCH 032/375] [IMP] improve storage with amazon S3, better support of cloudfront --- storage_backend/models/s3_backend.py | 22 +++++++++++++------ .../views/backend_storage_view.xml | 10 +++++---- 2 files changed, 21 insertions(+), 11 deletions(-) diff --git a/storage_backend/models/s3_backend.py b/storage_backend/models/s3_backend.py index 8e6237afd1..033a34667d 100644 --- a/storage_backend/models/s3_backend.py +++ b/storage_backend/models/s3_backend.py @@ -25,19 +25,23 @@ class S3StorageBackend(models.Model): selection_add=[('amazon_s3', 'Amazon S3')]) aws_bucket = fields.Char(sparse="data") - aws_secret_key = fields.Char(sparse="data") + aws_directory = fields.Char(sparse="data") aws_access_key = fields.Char(sparse="data") aws_host = fields.Char(sparse="data") aws_cloudfront_domain = fields.Char(sparse="data") + aws_cloudfront_domain_include_directory = fields.Boolean(sparse="data") def _amazon_s3_store(self, name, datas, is_public=False): mime, enc = mimetypes.guess_type(name) + account = self._get_existing_keychain() try: conn = S3Connection( self.aws_access_key, - self.aws_secret_key, + account.get_password(), host=self.aws_host) - buck = conn.get_bucket('storage-testing-raph') + buck = conn.get_bucket(self.aws_bucket) + if self.aws_directory: + name = "%s/%s" % (self.aws_directory, name) key = buck.get_key(name) if not key: key = buck.new_key(name) @@ -49,12 +53,16 @@ def _amazon_s3_store(self, name, datas, is_public=False): raise UserError('S3 server not available') return name - def _amazon_s3get_public_url(self, name): + def _amazon_s3get_public_url(self, path): if self.aws_cloudfront_domain: - host = self.aws_cloutfront_domain + if self.aws_cloudfront_domain_include_directory: + if path.startswith('%s/' % self.aws_directory): + path = path[len(self.aws_directory)+1:] + else: + raise UserError(_('Path do not match with aws directory')) + return "https://%s/%s" % (self.aws_cloudfront_domain, path) else: - host = self.aws_host - return "https://%s/%s/%s" % (host, self.aws_bucket, name) + return "https://%s/%s/%s" % (self.aws_host, self.aws_bucket, path) def _amazon_s3get_base64(self, file_id): logger.warning('return base64 of a file') diff --git a/storage_backend/views/backend_storage_view.xml b/storage_backend/views/backend_storage_view.xml index eb91431ed1..afcb81e951 100644 --- a/storage_backend/views/backend_storage_view.xml +++ b/storage_backend/views/backend_storage_view.xml @@ -39,11 +39,13 @@ - + - - - + + + + + From 2ece96ecdfb551adc7ef7e7d2a6b582bfc204b9f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20BEAU?= Date: Fri, 25 Aug 2017 13:21:18 +0200 Subject: [PATCH 033/375] [FIX] jail local storage inside the directory /storage, this will avoid reading/writing unwanted file --- storage_backend/models/local_backend.py | 28 +++++++++++++------ .../views/backend_storage_view.xml | 6 +++- 2 files changed, 25 insertions(+), 9 deletions(-) diff --git a/storage_backend/models/local_backend.py b/storage_backend/models/local_backend.py index b25b45c56e..fc57cd9ef5 100644 --- a/storage_backend/models/local_backend.py +++ b/storage_backend/models/local_backend.py @@ -6,6 +6,7 @@ import base64 import logging import os +import re from openerp import fields, models logger = logging.getLogger(__name__) @@ -21,23 +22,34 @@ class FileStoreStorageBackend(models.Model): filestore_base_path = fields.Char( sparse="data") + def _fullpath(self, name): + """This will build the full path for the file, we force to + store the data inside the filestore in the directory 'storage". + Becarefull if you implement your own custom path, end user + should never be able to write or read unwanted filesystem file""" + # sanitize base_path + base_path = re.sub('[.]', '', self.filestore_base_path).strip('/\\') + # sanitize name + name = name.strip('/\\') + return os.path.join( + self.env['ir.attachment']._filestore(), 'storage', base_path, name) + def _filestore_store(self, name, datas, is_public=False): - # TODO: refactorer, ça marche plus vraiment - # enregistre le binary la où on lui dit - # renvois l'objet en question - full_path = os.path.join(self.filestore_base_path, name) + full_path = self._fullpath(name) + dirname = os.path.dirname(full_path) + if not os.path.isdir(dirname): + os.makedirs(dirname) + logger.debug('Backend Storage: Write file %s to filestore', full_path) with open(full_path, "wb") as my_file: my_file.write(datas) return name def _filestoreget_public_url(self, name): - # TODO faire mieux - logger.info('get_public_url') return os.path.join(self.filestore_public_base_url, name) def _filestoreretrieve_datas(self, name): - logger.info('return base64 of a file') - full_path = os.path.join(self.filestore_base_path, name) + logger.debug('Backend Storage: Read file %s from filestore', name) + full_path = self._fullpath(name) with open(full_path, "b") as my_file: datas = my_file.read() return datas and base64.b64encode(datas) or False diff --git a/storage_backend/views/backend_storage_view.xml b/storage_backend/views/backend_storage_view.xml index afcb81e951..a4453f5382 100644 --- a/storage_backend/views/backend_storage_view.xml +++ b/storage_backend/views/backend_storage_view.xml @@ -35,6 +35,8 @@ + The path is relative and your data will be store inside the filestore + in the directory filestore/your_database/storage/your_custom_path @@ -45,8 +47,10 @@ - +