Skip to content

Commit

Permalink
Run pre-commit with black, isort, ...
Browse files Browse the repository at this point in the history
  • Loading branch information
guewen committed Oct 14, 2019
1 parent f26eae2 commit a83f8ae
Show file tree
Hide file tree
Showing 43 changed files with 1,437 additions and 1,404 deletions.
2 changes: 1 addition & 1 deletion .isort.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,4 @@ line_length=88
known_odoo=odoo
known_odoo_addons=odoo.addons
sections=FUTURE,STDLIB,THIRDPARTY,ODOO,ODOO_ADDONS,FIRSTPARTY,LOCALFOLDER
known_third_party=dateutil,mock,psycopg2,requests
known_third_party=dateutil,mock,psycopg2,requests,setuptools
36 changes: 16 additions & 20 deletions base_export_async/__manifest__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,27 +2,23 @@
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).

{
'name': 'Base Export Async',
'summary': """
"name": "Base Export Async",
"summary": """
Asynchronous export with job queue
""",
'version': '12.0.1.0.0',
'license': 'AGPL-3',
'author': 'ACSONE SA/NV, Odoo Community Association (OCA)',
'website': 'https://github.com/OCA/queue',
'depends': [
'web',
'queue_job'
"version": "12.0.1.0.0",
"license": "AGPL-3",
"author": "ACSONE SA/NV, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/queue",
"depends": ["web", "queue_job"],
"data": [
"views/assets.xml",
"security/ir.model.access.csv",
"security/ir_rule.xml",
"data/config_parameter.xml",
"data/cron.xml",
],
'data': [
'views/assets.xml',
'security/ir.model.access.csv',
'security/ir_rule.xml',
'data/config_parameter.xml',
'data/cron.xml',
],
'demo': [
],
'qweb': ['static/src/xml/base.xml'],
'installable': False,
"demo": [],
"qweb": ["static/src/xml/base.xml"],
"installable": False,
}
115 changes: 63 additions & 52 deletions base_export_async/models/delay_export.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
# Copyright 2019 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).

import logging
import base64
import json
import logging
import operator
import base64
from dateutil.relativedelta import relativedelta

from odoo import api, fields, models, _
from dateutil.relativedelta import relativedelta
from odoo import _, api, fields, models
from odoo.addons.queue_job.job import job
from odoo.addons.web.controllers.main import CSVExport, ExcelExport
from odoo.exceptions import UserError
Expand All @@ -17,49 +17,49 @@

class DelayExport(models.Model):

_name = 'delay.export'
_description = 'Allow to delay the export'
_name = "delay.export"
_description = "Allow to delay the export"

user_id = fields.Many2one('res.users', string='User', index=True)
user_id = fields.Many2one("res.users", string="User", index=True)

@api.model
def delay_export(self, data):
params = json.loads(data.get('data'))
params = json.loads(data.get("data"))
if not self.env.user.email:
raise UserError(_("You must set an email address to your user."))
self.with_delay().export(params)

@api.model
def _get_file_content(self, params):
export_format = params.get('format')
raw_data = export_format != 'csv'
export_format = params.get("format")
raw_data = export_format != "csv"

model_name, fields_name, ids, domain, import_compat, context = \
operator.itemgetter('model', 'fields', 'ids',
'domain', 'import_compat', 'context')(params)
user = self.env['res.users'].browse([context.get('uid')])
item_names = ("model", "fields", "ids", "domain", "import_compat", "context")
items = operator.itemgetter(item_names)(params)
model_name, fields_name, ids, domain, import_compat, context = items
user = self.env["res.users"].browse([context.get("uid")])
if not user or not user.email:
raise UserError(_("The user doesn't have an email address."))

model = self.env[model_name].with_context(
import_compat=import_compat, **context)
import_compat=import_compat, **context
)
records = model.browse(ids) or model.search(
domain, offset=0, limit=False, order=False)
domain, offset=0, limit=False, order=False
)

if not model._is_an_ordinary_table():
fields_name = [field for field in fields_name
if field['name'] != 'id']
fields_name = [field for field in fields_name if field["name"] != "id"]

field_names = [f['name'] for f in fields_name]
import_data = records.export_data(
field_names, raw_data).get('datas', [])
field_names = [f["name"] for f in fields_name]
import_data = records.export_data(field_names, raw_data).get("datas", [])

if import_compat:
columns_headers = field_names
else:
columns_headers = [val['label'].strip() for val in fields_name]
columns_headers = [val["label"].strip() for val in fields_name]

if export_format == 'csv':
if export_format == "csv":
csv = CSVExport()
return csv.from_data(columns_headers, import_data)
else:
Expand All @@ -71,59 +71,70 @@ def _get_file_content(self, params):
def export(self, params):
content = self._get_file_content(params)

model_name, context, export_format = \
operator.itemgetter('model', 'context', 'format')(params)
user = self.env['res.users'].browse([context.get('uid')])
model_name, context, export_format = operator.itemgetter(
"model", "context", "format"
)(params)
user = self.env["res.users"].browse([context.get("uid")])

export_record = self.sudo().create({'user_id': user.id})
export_record = self.sudo().create({"user_id": user.id})

name = "{}.{}".format(model_name, export_format)
attachment = self.env['ir.attachment'].create({
'name': name,
'datas': base64.b64encode(content),
'datas_fname': name,
'type': 'binary',
'res_model': self._name,
'res_id': export_record.id,
})
attachment = self.env["ir.attachment"].create(
{
"name": name,
"datas": base64.b64encode(content),
"datas_fname": name,
"type": "binary",
"res_model": self._name,
"res_id": export_record.id,
}
)

url = "{}/web/content/ir.attachment/{}/datas/{}?download=true".format(
self.env['ir.config_parameter'].sudo().get_param('web.base.url'),
self.env["ir.config_parameter"].sudo().get_param("web.base.url"),
attachment.id,
attachment.name,
)

time_to_live = self.env['ir.config_parameter'].sudo(). \
get_param('attachment.ttl', 7)
time_to_live = (
self.env["ir.config_parameter"].sudo().get_param("attachment.ttl", 7)
)
date_today = fields.Date.today()
expiration_date = fields.Date.to_string(
date_today + relativedelta(days=+int(time_to_live)))
date_today + relativedelta(days=+int(time_to_live))
)

# TODO : move to email template
odoo_bot = self.sudo().env.ref("base.partner_root")
email_from = odoo_bot.email
model_description = self.env[model_name]._description
self.env['mail.mail'].create({
'email_from': email_from,
'reply_to': email_from,
'email_to': user.email,
'subject': _("Export {} {}").format(
model_description, fields.Date.to_string(fields.Date.today())),
'body_html': _("""
self.env["mail.mail"].create(
{
"email_from": email_from,
"reply_to": email_from,
"email_to": user.email,
"subject": _("Export {} {}").format(
model_description, fields.Date.to_string(fields.Date.today())
),
"body_html": _(
"""
<p>Your export is available <a href="{}">here</a>.</p>
<p>It will be automatically deleted the {}.</p>
<p>&nbsp;</p>
<p><span style="color: #808080;">
This is an automated message please do not reply.
</span></p>
""").format(url, expiration_date),
'auto_delete': True,
})
"""
).format(url, expiration_date),
"auto_delete": True,
}
)

@api.model
def cron_delete(self):
time_to_live = self.env['ir.config_parameter'].sudo(). \
get_param('attachment.ttl', 7)
time_to_live = (
self.env["ir.config_parameter"].sudo().get_param("attachment.ttl", 7)
)
date_today = fields.Date.today()
date_to_delete = date_today + relativedelta(days=-int(time_to_live))
self.search([('create_date', '<=', date_to_delete)]).unlink()
self.search([("create_date", "<=", date_to_delete)]).unlink()
4 changes: 2 additions & 2 deletions base_export_async/readme/USAGE.rst
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
The user is presented with a new checkbox "Asynchronous export"
in the export screen. When selected, the export is delayed in a
The user is presented with a new checkbox "Asynchronous export"
in the export screen. When selected, the export is delayed in a
background job.

The .csv or .xls file generated by the export will be sent by email
Expand Down
62 changes: 31 additions & 31 deletions base_export_async/tests/test_base_export_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,38 +2,41 @@
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).

import json
from dateutil.relativedelta import relativedelta

from odoo import fields
import odoo.tests.common as common
from dateutil.relativedelta import relativedelta
from odoo import fields

data_csv = {'data': """{"format": "csv", "model": "res.partner",
data_csv = {
"data": """{"format": "csv", "model": "res.partner",
"fields": [{"name": "id", "label": "External ID"},
{"name": "display_name", "label": "Display Name"},
{"name": "email", "label": "Email"},
{"name": "phone", "label": "Phone"}],
"ids": false,
"domain": [],
"context": {"lang": "en_US", "tz": "Europe/Brussels", "uid": 2},
"import_compat": false}"""}
"import_compat": false}"""
}

data_xls = {'data': """{"format": "xls", "model": "res.partner",
data_xls = {
"data": """{"format": "xls", "model": "res.partner",
"fields": [{"name": "id", "label": "External ID"},
{"name": "display_name", "label": "Display Name"},
{"name": "email", "label": "Email"},
{"name": "phone", "label": "Phone"}],
"ids": false,
"domain": [],
"context": {"lang": "en_US", "tz": "Europe/Brussels", "uid": 2},
"import_compat": false}"""}
"import_compat": false}"""
}


class TestBaseExportAsync(common.TransactionCase):

def setUp(self):
super(TestBaseExportAsync, self).setUp()
self.delay_export_obj = self.env['delay.export']
self.job_obj = self.env['queue.job']
self.delay_export_obj = self.env["delay.export"]
self.job_obj = self.env["queue.job"]

def test_delay_export(self):
""" Check that the call create a new JOB"""
Expand All @@ -44,42 +47,39 @@ def test_delay_export(self):

def test_export_csv(self):
""" Check that the export generate an attachment and email"""
params = json.loads(data_csv.get('data'))
mails = self.env['mail.mail'].search([])
attachments = self.env['ir.attachment'].search([])
params = json.loads(data_csv.get("data"))
mails = self.env["mail.mail"].search([])
attachments = self.env["ir.attachment"].search([])
self.delay_export_obj.export(params)
new_mail = self.env['mail.mail'].search([]) - mails
new_attachment = self.env['ir.attachment'].search([]) - attachments
new_mail = self.env["mail.mail"].search([]) - mails
new_attachment = self.env["ir.attachment"].search([]) - attachments
self.assertEqual(len(new_mail), 1)
self.assertEqual(new_attachment.datas_fname,
"res.partner.csv")
self.assertEqual(new_attachment.datas_fname, "res.partner.csv")

def test_export_xls(self):
""" Check that the export generate an attachment and email"""
params = json.loads(data_xls.get('data'))
mails = self.env['mail.mail'].search([])
attachments = self.env['ir.attachment'].search([])
params = json.loads(data_xls.get("data"))
mails = self.env["mail.mail"].search([])
attachments = self.env["ir.attachment"].search([])
self.delay_export_obj.export(params)
new_mail = self.env['mail.mail'].search([]) - mails
new_attachment = self.env['ir.attachment'].search([]) - attachments
new_mail = self.env["mail.mail"].search([]) - mails
new_attachment = self.env["ir.attachment"].search([]) - attachments
self.assertEqual(len(new_mail), 1)
self.assertEqual(new_attachment.datas_fname,
"res.partner.xls")
self.assertEqual(new_attachment.datas_fname, "res.partner.xls")

def test_cron_delete(self):
""" Check that cron delete attachment after TTL"""
params = json.loads(data_csv.get('data'))
attachments = self.env['ir.attachment'].search([])
params = json.loads(data_csv.get("data"))
attachments = self.env["ir.attachment"].search([])
self.delay_export_obj.export(params)
new_attachment = self.env['ir.attachment'].search([]) - attachments
time_to_live = self.env['ir.config_parameter'].sudo(). \
get_param('attachment.ttl', 7)
new_attachment = self.env["ir.attachment"].search([]) - attachments
time_to_live = (
self.env["ir.config_parameter"].sudo().get_param("attachment.ttl", 7)
)
date_today = fields.Date.today()
date_to_delete = date_today + relativedelta(days=-int(time_to_live))
# Update create_date with today - TTL
self.delay_export_obj.search([]).write({
'create_date': date_to_delete
})
self.delay_export_obj.search([]).write({"create_date": date_to_delete})
self.delay_export_obj.sudo().cron_delete()
# The attachment must be deleted
self.assertFalse(new_attachment.exists())
29 changes: 11 additions & 18 deletions base_import_async/__manifest__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,15 @@
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).

{
'name': 'Asynchronous Import',
'summary': 'Import CSV files in the background',
'version': '11.0.1.0.0',
'author': 'Akretion, ACSONE SA/NV, Odoo Community Association (OCA)',
'license': 'AGPL-3',
'website': 'https://github.com/OCA/queue',
'category': 'Generic Modules',
'depends': [
'base_import',
'queue_job',
],
'data': [
'views/base_import_async.xml',
],
'qweb': [
'static/src/xml/import.xml',
],
'installable': False,
"name": "Asynchronous Import",
"summary": "Import CSV files in the background",
"version": "11.0.1.0.0",
"author": "Akretion, ACSONE SA/NV, Odoo Community Association (OCA)",
"license": "AGPL-3",
"website": "https://github.com/OCA/queue",
"category": "Generic Modules",
"depends": ["base_import", "queue_job"],
"data": ["views/base_import_async.xml"],
"qweb": ["static/src/xml/import.xml"],
"installable": False,
}
Loading

0 comments on commit a83f8ae

Please sign in to comment.