Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[16.0][UPD] Forward port changes from 14.0 and 15.0 #729

Open
wants to merge 12 commits into
base: 16.0
Choose a base branch
from
Open
20 changes: 12 additions & 8 deletions base_export_async/models/delay_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,14 +89,18 @@ def export(self, params):
export_record = self.sudo().create({"user_ids": [(6, 0, users.ids)]})

name = "{}.{}".format(model_name, export_format)
attachment = self.env["ir.attachment"].create(
{
"name": name,
"datas": base64.b64encode(content),
"type": "binary",
"res_model": self._name,
"res_id": export_record.id,
}
attachment = (
self.env["ir.attachment"]
.sudo()
.create(
{
"name": name,
"datas": base64.b64encode(content),
"type": "binary",
"res_model": self._name,
"res_id": export_record.id,
}
)
)

url = "{}/web/content/ir.attachment/{}/datas/{}?download=true".format(
Expand Down
4 changes: 2 additions & 2 deletions base_export_async/tests/test_base_export_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
"domain": [],
"context": {"lang": "en_US", "tz": "Europe/Brussels", "uid": 2},
"import_compat": false,
"user_ids": [2]
"user_ids": [6]
}"""
}

Expand All @@ -37,7 +37,7 @@
"domain": [],
"context": {"lang": "en_US", "tz": "Europe/Brussels", "uid": 2},
"import_compat": false,
"user_ids": [2]
"user_ids": [6]
}"""
}

Expand Down
7 changes: 7 additions & 0 deletions base_import_async/data/queue_job_function_data.xml
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
<odoo noupdate="1">
<record model="queue.job.channel" id="channel_base_import">
<field name="name">base_import</field>
<field name="parent_id" ref="queue_job.channel_root" />
</record>

<record id="job_function_base_import_import_split_file" model="queue.job.function">
<field name="model_id" ref="base_import.model_base_import_import" />
<field name="method">_split_file</field>
<field name="channel_id" ref="channel_base_import" />
<field
name="related_action"
eval='{"func_name": "_related_action_attachment"}'
Expand All @@ -13,6 +19,7 @@
>
<field name="model_id" ref="base_import.model_base_import_import" />
<field name="method">_import_one_chunk</field>
<field name="channel_id" ref="channel_base_import" />
<field
name="related_action"
eval='{"func_name": "_related_action_attachment"}'
Expand Down
2 changes: 1 addition & 1 deletion queue_job/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ be customized in ``Base._job_prepare_context_before_enqueue_keys``.
When you are developing (ie: connector modules) you might want
to bypass the queue job and run your code immediately.

To do so you can set `QUEUE_JOB__NO_DELAY=1` in your enviroment.
To do so you can set `QUEUE_JOB__NO_DELAY=1` in your environment.

**Bypass jobs in tests**

Expand Down
2 changes: 1 addition & 1 deletion queue_job/delay.py
Original file line number Diff line number Diff line change
Expand Up @@ -609,7 +609,7 @@ def _execute_direct(self):
self._generated_job.perform()


class DelayableRecordset(object):
class DelayableRecordset:
"""Allow to delay a method for a recordset (shortcut way)

Usage::
Expand Down
14 changes: 11 additions & 3 deletions queue_job/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def identity_exact_hasher(job_):


@total_ordering
class Job(object):
class Job:
"""A Job is a task to execute. It is the in-memory representation of a job.

Jobs are stored in the ``queue.job`` Odoo Model, but they are handled
Expand Down Expand Up @@ -539,8 +539,8 @@ def perform(self):

return self.result

def enqueue_waiting(self):
sql = """
def _get_common_dependent_jobs_query(self):
return """
UPDATE queue_job
SET state = %s
FROM (
Expand Down Expand Up @@ -568,9 +568,17 @@ def enqueue_waiting(self):
AND %s = ALL(jobs.parent_states)
AND state = %s;
"""

def enqueue_waiting(self):
sql = self._get_common_dependent_jobs_query()
self.env.cr.execute(sql, (PENDING, self.uuid, DONE, WAIT_DEPENDENCIES))
self.env["queue.job"].invalidate_model(["state"])

def cancel_dependent_jobs(self):
sql = self._get_common_dependent_jobs_query()
self.env.cr.execute(sql, (CANCELLED, self.uuid, CANCELLED, WAIT_DEPENDENCIES))
self.env["queue.job"].invalidate_cache(["state"])

def store(self):
"""Store the Job"""
job_model = self.env["queue.job"]
Expand Down
10 changes: 5 additions & 5 deletions queue_job/jobrunner/channels.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
_logger = logging.getLogger(__name__)


class PriorityQueue(object):
class PriorityQueue:
"""A priority queue that supports removing arbitrary objects.

Adding an object already in the queue is a no op.
Expand Down Expand Up @@ -103,7 +103,7 @@ def pop(self):


@total_ordering
class ChannelJob(object):
class ChannelJob:
"""A channel job is attached to a channel and holds the properties of a
job that are necessary to prioritise them.

Expand Down Expand Up @@ -205,7 +205,7 @@ def __lt__(self, other):
return self.sorting_key() < other.sorting_key()


class ChannelQueue(object):
class ChannelQueue:
"""A channel queue is a priority queue for jobs.

Jobs with an eta are set aside until their eta is past due, at
Expand Down Expand Up @@ -334,7 +334,7 @@ def get_wakeup_time(self, wakeup_time=0):
return wakeup_time


class Channel(object):
class Channel:
"""A channel for jobs, with a maximum capacity.

When jobs are created by queue_job modules, they may be associated
Expand Down Expand Up @@ -581,7 +581,7 @@ def split_strip(s, sep, maxsplit=-1):
return [x.strip() for x in s.split(sep, maxsplit)]


class ChannelManager(object):
class ChannelManager:
"""High level interface for channels

This class handles:
Expand Down
4 changes: 2 additions & 2 deletions queue_job/jobrunner/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ def urlopen():
thread.start()


class Database(object):
class Database:
def __init__(self, db_name):
self.db_name = db_name
connection_info = _connection_info_for(db_name)
Expand Down Expand Up @@ -344,7 +344,7 @@ def set_job_enqueued(self, uuid):
)


class QueueJobRunner(object):
class QueueJobRunner:
def __init__(
self,
scheme="http",
Expand Down
2 changes: 2 additions & 0 deletions queue_job/models/queue_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,8 @@ def _change_job_state(self, state, result=None):
elif state == CANCELLED:
job_.set_cancelled(result=result)
job_.store()
record.env["queue.job"].flush_model()
job_.cancel_dependent_jobs()
else:
raise ValueError("State not supported: %s" % state)

Expand Down
25 changes: 18 additions & 7 deletions queue_job/models/queue_job_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,10 +155,12 @@
try:
# as json can't have integers as keys and the field is stored
# as json, convert back to int
retry_pattern = {
int(try_count): postpone_seconds
for try_count, postpone_seconds in self.retry_pattern.items()
}
retry_pattern = {}
for try_count, postpone_value in self.retry_pattern.items():
if isinstance(postpone_value, int):
retry_pattern[int(try_count)] = postpone_value
else:
retry_pattern[int(try_count)] = tuple(postpone_value)

Check warning on line 163 in queue_job/models/queue_job_function.py

View check run for this annotation

Codecov / codecov/patch

queue_job/models/queue_job_function.py#L163

Added line #L163 was not covered by tests
except ValueError:
_logger.error(
"Invalid retry pattern for job function %s,"
Expand Down Expand Up @@ -187,8 +189,9 @@
def _retry_pattern_format_error_message(self):
return _(
"Unexpected format of Retry Pattern for {}.\n"
"Example of valid format:\n"
"{{1: 300, 5: 600, 10: 1200, 15: 3000}}"
"Example of valid formats:\n"
"{{1: 300, 5: 600, 10: 1200, 15: 3000}}\n"
"{{1: (1, 10), 5: (11, 20), 10: (21, 30), 15: (100, 300)}}"
).format(self.name)

@api.constrains("retry_pattern")
Expand All @@ -201,12 +204,20 @@
all_values = list(retry_pattern) + list(retry_pattern.values())
for value in all_values:
try:
int(value)
self._retry_value_type_check(value)
except ValueError as ex:
raise exceptions.UserError(
record._retry_pattern_format_error_message()
) from ex

def _retry_value_type_check(self, value):
if isinstance(value, (tuple, list)):
if len(value) != 2:
raise ValueError
[self._retry_value_type_check(element) for element in value]
return
int(value)

def _related_action_format_error_message(self):
return _(
"Unexpected format of Related Action for {}.\n"
Expand Down
2 changes: 1 addition & 1 deletion queue_job/readme/USAGE.rst
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ be customized in ``Base._job_prepare_context_before_enqueue_keys``.
When you are developing (ie: connector modules) you might want
to bypass the queue job and run your code immediately.

To do so you can set `QUEUE_JOB__NO_DELAY=1` in your enviroment.
To do so you can set `QUEUE_JOB__NO_DELAY=1` in your environment.

**Bypass jobs in tests**

Expand Down
2 changes: 1 addition & 1 deletion queue_job/static/description/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -752,7 +752,7 @@ <h3><a class="toc-backref" href="#toc-entry-7">Configure default options for job
<strong>Bypass jobs on running Odoo</strong></p>
<p>When you are developing (ie: connector modules) you might want
to bypass the queue job and run your code immediately.</p>
<p>To do so you can set <cite>QUEUE_JOB__NO_DELAY=1</cite> in your enviroment.</p>
<p>To do so you can set <cite>QUEUE_JOB__NO_DELAY=1</cite> in your environment.</p>
<p><strong>Bypass jobs in tests</strong></p>
<p>When writing tests on job-related methods is always tricky to deal with
delayed recordsets. To make your testing life easier
Expand Down
2 changes: 1 addition & 1 deletion queue_job/tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,7 +428,7 @@ def __init__(

def setUp(self):
"""Log an extra statement which test is started."""
super(OdooDocTestCase, self).setUp()
super().setUp()
logging.getLogger(__name__).info("Running tests for %s", self._dt_test.name)


Expand Down
21 changes: 21 additions & 0 deletions queue_job/views/queue_job_views.xml
Original file line number Diff line number Diff line change
Expand Up @@ -250,6 +250,22 @@
string="Cancelled"
domain="[('state', '=', 'cancelled')]"
/>
<separator />
<filter
name="last_24_hours"
string="Last 24 hours"
domain="[('date_created', '&gt;=', (context_today() - datetime.timedelta(days=1)).strftime('%Y-%m-%d'))]"
/>
<filter
name="last_7_days"
string="Last 7 days"
domain="[('date_created', '&gt;=', (context_today() - datetime.timedelta(days=7)).strftime('%Y-%m-%d'))]"
/>
<filter
name="last_30_days"
string="Last 30 days"
domain="[('date_created', '&gt;=', (context_today() - datetime.timedelta(days=30)).strftime('%Y-%m-%d'))]"
/>
<group expand="0" string="Group By">
<filter
name="group_by_channel"
Expand Down Expand Up @@ -286,6 +302,11 @@
string="Graph"
context="{'group_by': 'graph_uuid'}"
/>
<filter
name="group_by_date_created"
string="Created date"
context="{'group_by': 'date_created'}"
/>
</group>
</search>
</field>
Expand Down
5 changes: 4 additions & 1 deletion queue_job_cron/models/ir_cron.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,16 @@
comodel_name="queue.job.channel",
compute="_compute_run_as_queue_job",
readonly=False,
store=True,
string="Channel",
)

@api.depends("run_as_queue_job")
def _compute_run_as_queue_job(self):
for cron in self:
if cron.run_as_queue_job and not cron.channel_id:
if cron.channel_id:
continue

Check warning on line 39 in queue_job_cron/models/ir_cron.py

View check run for this annotation

Codecov / codecov/patch

queue_job_cron/models/ir_cron.py#L39

Added line #L39 was not covered by tests
if cron.run_as_queue_job:
cron.channel_id = self.env.ref("queue_job_cron.channel_root_ir_cron").id
else:
cron.channel_id = False
Expand Down
2 changes: 1 addition & 1 deletion queue_job_cron_jobrunner/models/queue_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def _acquire_one_job(self):
FROM queue_job
WHERE state = 'pending'
AND (eta IS NULL OR eta <= (now() AT TIME ZONE 'UTC'))
ORDER BY date_created DESC
ORDER BY priority, date_created
LIMIT 1 FOR NO KEY UPDATE SKIP LOCKED
"""
)
Expand Down
30 changes: 29 additions & 1 deletion queue_job_cron_jobrunner/tests/test_queue_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,5 +67,33 @@ def test_queue_job_cron_trigger_enqueue_dependencies(self):

self.assertEqual(job_record.state, "done", "Processed OK")
# if the state is "waiting_dependencies", it means the "enqueue_waiting()"
# step has not been doen when the parent job has been done
# step has not been done when the parent job has been done
self.assertEqual(job_record_depends.state, "done", "Processed OK")

def test_acquire_one_job_use_priority(self):
with freeze_time("2024-01-01 10:01:01"):
self.env["res.partner"].with_delay(priority=3).create({"name": "test"})

with freeze_time("2024-01-01 10:02:01"):
job = (
self.env["res.partner"].with_delay(priority=1).create({"name": "test"})
)

with freeze_time("2024-01-01 10:03:01"):
self.env["res.partner"].with_delay(priority=2).create({"name": "test"})

self.assertEqual(self.env["queue.job"]._acquire_one_job(), job.db_record())

def test_acquire_one_job_consume_the_oldest_first(self):
with freeze_time("2024-01-01 10:01:01"):
job = (
self.env["res.partner"].with_delay(priority=30).create({"name": "test"})
)

with freeze_time("2024-01-01 10:02:01"):
self.env["res.partner"].with_delay(priority=30).create({"name": "test"})

with freeze_time("2024-01-01 10:03:01"):
self.env["res.partner"].with_delay(priority=30).create({"name": "test"})

self.assertEqual(self.env["queue.job"]._acquire_one_job(), job.db_record())
2 changes: 1 addition & 1 deletion queue_job_subscribe/tests/test_job_subscribe.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

class TestJobSubscribe(common.TransactionCase):
def setUp(self):
super(TestJobSubscribe, self).setUp()
super().setUp()
grp_queue_job_manager = self.ref("queue_job.group_queue_job_manager")
self.other_partner_a = self.env["res.partner"].create(
{"name": "My Company a", "is_company": True, "email": "[email protected]"}
Expand Down
4 changes: 2 additions & 2 deletions test_queue_job/models/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class ModelTestQueueJob(models.Model):
# to test the context is serialized/deserialized properly
@api.model
def _job_prepare_context_before_enqueue_keys(self):
return ("tz", "lang")
return ("tz", "lang", "allowed_company_ids")

def testing_method(self, *args, **kwargs):
"""Method used for tests
Expand Down Expand Up @@ -76,7 +76,7 @@ def job_with_retry_pattern__no_zero(self):
return

def mapped(self, func):
return super(ModelTestQueueJob, self).mapped(func)
return super().mapped(func)

def job_alter_mutable(self, mutable_arg, mutable_kwarg=None):
mutable_arg.append(2)
Expand Down
Loading
Loading