-
Notifications
You must be signed in to change notification settings - Fork 0
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[RFR] Improve patchwork #3
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,3 @@ | ||
from . import odoo | ||
from . import addons | ||
from . import odoo | ||
from . import odoo_patch |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,3 @@ | ||
from . import mrp | ||
from . import stock | ||
from . import point_of_sale | ||
from . import stock |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,19 +1,10 @@ | ||
from odoo.addons import mrp | ||
from ...odoo_patch import OdooPatch | ||
|
||
|
||
def _pre_init_mrp(cr): | ||
""" Allow installing MRP in databases with large stock.move table (>1M records) | ||
- Creating the computed+stored field stock_move.is_done is terribly slow with the ORM and | ||
leads to "Out of Memory" crashes | ||
""" | ||
# <OpenUpgrade:REMOVE> | ||
# don't try to add 'is_done' column, because it will fail | ||
# when executing the generation of records, in the openupgrade_records | ||
# module. | ||
# cr.execute("""ALTER TABLE "stock_move" ADD COLUMN "is_done" bool;""") | ||
# cr.execute("""UPDATE stock_move | ||
# SET is_done=COALESCE(state in ('done', 'cancel'), FALSE);""") | ||
# </OpenUpgrade> | ||
class PreInitHookPatch(OdooPatch): | ||
target = mrp | ||
method_names = ['_pre_init_mrp'] | ||
|
||
|
||
mrp._pre_init_mrp = _pre_init_mrp | ||
def _pre_init_mrp(cr): | ||
""" Don't try to create an existing column on reinstall """ |
This file was deleted.
This file was deleted.
This file was deleted.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,17 +1,10 @@ | ||
from odoo.addons import stock | ||
from ...odoo_patch import OdooPatch | ||
|
||
|
||
def pre_init_hook(cr): | ||
# <OpenUpgrade:REMOVE> | ||
# don't uninstall data as this breaks the analysis | ||
# Origin of this code is https://github.com/odoo/odoo/issues/22243 | ||
# env = api.Environment(cr, SUPERUSER_ID, {}) | ||
# env['ir.model.data'].search([ | ||
# ('model', 'like', '%stock%'), | ||
# ('module', '=', 'stock') | ||
# ]).unlink() | ||
pass | ||
# </OpenUpgrade> | ||
class PreInitHookPatch(OdooPatch): | ||
target = stock | ||
method_names = ['pre_init_hook'] | ||
|
||
|
||
stock.pre_init_hook = pre_init_hook | ||
def pre_init_hook(cr): | ||
""" Don't unlink stock data on reinstall """ |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,3 @@ | ||
from . import models | ||
from . import modules | ||
from . import service | ||
from . import tools | ||
from . import http | ||
from . import models |
This file was deleted.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,179 +1,21 @@ | ||
# flake8: noqa | ||
# pylint: skip-file | ||
|
||
import odoo | ||
import psycopg2 | ||
from odoo import _ | ||
from odoo.models import fix_import_export_id_paths, BaseModel, _logger | ||
from odoo import api, models | ||
from ..odoo_patch import OdooPatch | ||
from ... import upgrade_log | ||
|
||
|
||
if True: | ||
def _load(self, fields, data): | ||
""" | ||
Attempts to load the data matrix, and returns a list of ids (or | ||
``False`` if there was an error and no id could be generated) and a | ||
list of messages. | ||
|
||
The ids are those of the records created and saved (in database), in | ||
the same order they were extracted from the file. They can be passed | ||
directly to :meth:`~read` | ||
|
||
:param fields: list of fields to import, at the same index as the corresponding data | ||
:type fields: list(str) | ||
:param data: row-major matrix of data to import | ||
:type data: list(list(str)) | ||
:returns: {ids: list(int)|False, messages: [Message][, lastrow: int]} | ||
""" | ||
self.flush() | ||
|
||
# determine values of mode, current_module and noupdate | ||
mode = self._context.get('mode', 'init') | ||
current_module = self._context.get('module', '__import__') | ||
noupdate = self._context.get('noupdate', False) | ||
# add current module in context for the conversion of xml ids | ||
self = self.with_context(_import_current_module=current_module) | ||
|
||
cr = self._cr | ||
cr.execute('SAVEPOINT model_load') | ||
|
||
fields = [fix_import_export_id_paths(f) for f in fields] | ||
fg = self.fields_get() | ||
|
||
ids = [] | ||
messages = [] | ||
ModelData = self.env['ir.model.data'] | ||
|
||
# list of (xid, vals, info) for records to be created in batch | ||
batch = [] | ||
batch_xml_ids = set() | ||
# models in which we may have created / modified data, therefore might | ||
# require flushing in order to name_search: the root model and any | ||
# o2m | ||
creatable_models = {self._name} | ||
for field_path in fields: | ||
if field_path[0] in (None, 'id', '.id'): | ||
continue | ||
model_fields = self._fields | ||
if isinstance(model_fields[field_path[0]], odoo.fields.Many2one): | ||
# this only applies for toplevel m2o (?) fields | ||
if field_path[0] in (self.env.context.get('name_create_enabled_fieds') or {}): | ||
creatable_models.add(model_fields[field_path[0]].comodel_name) | ||
for field_name in field_path: | ||
if field_name in (None, 'id', '.id'): | ||
break | ||
|
||
if isinstance(model_fields[field_name], odoo.fields.One2many): | ||
comodel = model_fields[field_name].comodel_name | ||
creatable_models.add(comodel) | ||
model_fields = self.env[comodel]._fields | ||
|
||
def flush(*, xml_id=None, model=None): | ||
if not batch: | ||
return | ||
class BaseModelPatch(OdooPatch): | ||
target = models.BaseModel | ||
method_names = ['_convert_records'] | ||
|
||
assert not (xml_id and model), \ | ||
"flush can specify *either* an external id or a model, not both" | ||
|
||
if xml_id and xml_id not in batch_xml_ids: | ||
if xml_id not in self.env: | ||
return | ||
if model and model not in creatable_models: | ||
return | ||
|
||
data_list = [ | ||
dict(xml_id=xid, values=vals, info=info, noupdate=noupdate) | ||
for xid, vals, info in batch | ||
] | ||
batch.clear() | ||
batch_xml_ids.clear() | ||
|
||
# try to create in batch | ||
try: | ||
with cr.savepoint(): | ||
recs = self._load_records(data_list, mode == 'update') | ||
ids.extend(recs.ids) | ||
return | ||
except psycopg2.InternalError as e: | ||
# broken transaction, exit and hope the source error was already logged | ||
if not any(message['type'] == 'error' for message in messages): | ||
info = data_list[0]['info'] | ||
messages.append(dict(info, type='error', message=_(u"Unknown database error: '%s'", e))) | ||
return | ||
except Exception: | ||
pass | ||
|
||
errors = 0 | ||
# try again, this time record by record | ||
for i, rec_data in enumerate(data_list, 1): | ||
try: | ||
with cr.savepoint(): | ||
rec = self._load_records([rec_data], mode == 'update') | ||
ids.append(rec.id) | ||
except psycopg2.Warning as e: | ||
info = rec_data['info'] | ||
messages.append(dict(info, type='warning', message=str(e))) | ||
except psycopg2.Error as e: | ||
info = rec_data['info'] | ||
messages.append(dict(info, type='error', **PGERROR_TO_OE[e.pgcode](self, fg, info, e))) | ||
# Failed to write, log to messages, rollback savepoint (to | ||
# avoid broken transaction) and keep going | ||
errors += 1 | ||
except Exception as e: | ||
_logger.debug("Error while loading record", exc_info=True) | ||
info = rec_data['info'] | ||
message = (_(u'Unknown error during import:') + u' %s: %s' % (type(e), e)) | ||
moreinfo = _('Resolve other errors first') | ||
messages.append(dict(info, type='error', message=message, moreinfo=moreinfo)) | ||
# Failed for some reason, perhaps due to invalid data supplied, | ||
# rollback savepoint and keep going | ||
errors += 1 | ||
if errors >= 10 and (errors >= i / 10): | ||
messages.append({ | ||
'type': 'warning', | ||
'message': _(u"Found more than 10 errors and more than one error per 10 records, interrupted to avoid showing too many errors.") | ||
}) | ||
break | ||
|
||
# make 'flush' available to the methods below, in the case where XMLID | ||
# resolution fails, for instance | ||
flush_self = self.with_context(import_flush=flush) | ||
|
||
# TODO: break load's API instead of smuggling via context? | ||
limit = self._context.get('_import_limit') | ||
if limit is None: | ||
limit = float('inf') | ||
extracted = flush_self._extract_records(fields, data, log=messages.append, limit=limit) | ||
|
||
converted = flush_self._convert_records(extracted, log=messages.append) | ||
|
||
info = {'rows': {'to': -1}} | ||
for id, xid, record, info in converted: | ||
@api.model | ||
def _convert_records(self, records, log=lambda a: None): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Huge simplification ! from 179 lines to 21 ! "yeld function" seem easier to patch. Very clean. |
||
""" Log data ids that are imported with `load` """ | ||
current_module = self.env.context['module'] | ||
for res in BaseModelPatch._convert_records._original_method( | ||
self, records, log=log): | ||
_id, xid, _record, _info = res | ||
if xid: | ||
xid = xid if '.' in xid else "%s.%s" % (current_module, xid) | ||
batch_xml_ids.add(xid) | ||
# <OpenUpgrade:ADD> | ||
# log csv records | ||
upgrade_log.log_xml_id(self.env.cr, current_module, xid) | ||
# </OpenUpgrade> | ||
elif id: | ||
record['id'] = id | ||
batch.append((xid, record, info)) | ||
|
||
flush() | ||
if any(message['type'] == 'error' for message in messages): | ||
cr.execute('ROLLBACK TO SAVEPOINT model_load') | ||
ids = False | ||
# cancel all changes done to the registry/ormcache | ||
self.pool.reset_changes() | ||
|
||
nextrow = info['rows']['to'] + 1 | ||
if nextrow < limit: | ||
nextrow = 0 | ||
return { | ||
'ids': ids, | ||
'messages': messages, | ||
'nextrow': nextrow, | ||
} | ||
|
||
BaseModel.load = _load | ||
yield res |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,12 +1 @@ | ||
# Minor changes. (call to safe_eval changed) | ||
# otherwise : adapted to V14 | ||
from . import graph | ||
|
||
# A lot of changes in the core functions. | ||
from . import loading | ||
|
||
# Adapted to V14 | ||
from . import migration | ||
|
||
# Adapted to V14 | ||
from . import registry |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
👍 for making it configurable.
(it is also more visible for the user).
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yes, as I needed this to set the path from XMLRPC.