1
+ import asyncio
1
2
import pytest
2
3
3
4
from sentry_sdk import start_transaction
4
5
from sentry_sdk .integrations .arq import ArqIntegration
5
6
7
+ import arq .worker
8
+ from arq import cron
6
9
from arq .connections import ArqRedis
7
10
from arq .jobs import Job
8
11
from arq .utils import timestamp_ms
9
- from arq .worker import Retry , Worker
10
12
11
13
from fakeredis .aioredis import FakeRedis
12
14
13
15
16
+ def async_partial (async_fn , * args , ** kwargs ):
17
+ # asyncio.iscoroutinefunction (Used in the integration code) in Python < 3.8
18
+ # does not detect async functions in functools.partial objects.
19
+ # This partial implementation returns a coroutine instead.
20
+ async def wrapped (ctx ):
21
+ return await async_fn (ctx , * args , ** kwargs )
22
+
23
+ return wrapped
24
+
25
+
14
26
@pytest .fixture (autouse = True )
15
27
def patch_fakeredis_info_command ():
16
28
from fakeredis ._fakesocket import FakeSocket
@@ -28,7 +40,10 @@ def info(self, section):
28
40
29
41
@pytest .fixture
30
42
def init_arq (sentry_init ):
31
- def inner (functions , allow_abort_jobs = False ):
43
+ def inner (functions_ = None , cron_jobs_ = None , allow_abort_jobs_ = False ):
44
+ functions_ = functions_ or []
45
+ cron_jobs_ = cron_jobs_ or []
46
+
32
47
sentry_init (
33
48
integrations = [ArqIntegration ()],
34
49
traces_sample_rate = 1.0 ,
@@ -38,9 +53,16 @@ def inner(functions, allow_abort_jobs=False):
38
53
39
54
server = FakeRedis ()
40
55
pool = ArqRedis (pool_or_conn = server .connection_pool )
41
- return pool , Worker (
42
- functions , redis_pool = pool , allow_abort_jobs = allow_abort_jobs
43
- )
56
+
57
+ class WorkerSettings :
58
+ functions = functions_
59
+ cron_jobs = cron_jobs_
60
+ redis_pool = pool
61
+ allow_abort_jobs = allow_abort_jobs_
62
+
63
+ worker = arq .worker .create_worker (WorkerSettings )
64
+
65
+ return pool , worker
44
66
45
67
return inner
46
68
@@ -70,7 +92,7 @@ async def increase(ctx, num):
70
92
async def test_job_retry (capture_events , init_arq ):
71
93
async def retry_job (ctx ):
72
94
if ctx ["job_try" ] < 2 :
73
- raise Retry
95
+ raise arq . worker . Retry
74
96
75
97
retry_job .__qualname__ = retry_job .__name__
76
98
@@ -105,36 +127,69 @@ async def division(_, a, b=0):
105
127
106
128
division .__qualname__ = division .__name__
107
129
108
- pool , worker = init_arq ([division ])
130
+ cron_func = async_partial (division , a = 1 , b = int (not job_fails ))
131
+ cron_func .__qualname__ = division .__name__
132
+
133
+ cron_job = cron (cron_func , minute = 0 , run_at_startup = True )
134
+
135
+ pool , worker = init_arq (functions_ = [division ], cron_jobs_ = [cron_job ])
109
136
110
137
events = capture_events ()
111
138
112
139
job = await pool .enqueue_job ("division" , 1 , b = int (not job_fails ))
113
140
await worker .run_job (job .job_id , timestamp_ms ())
114
141
115
- if job_fails :
116
- error_event = events .pop (0 )
117
- assert error_event ["exception" ]["values" ][0 ]["type" ] == "ZeroDivisionError"
118
- assert error_event ["exception" ]["values" ][0 ]["mechanism" ]["type" ] == "arq"
142
+ loop = asyncio .get_event_loop ()
143
+ task = loop .create_task (worker .async_run ())
144
+ await asyncio .sleep (1 )
119
145
120
- (event ,) = events
121
- assert event ["type" ] == "transaction"
122
- assert event ["transaction" ] == "division"
123
- assert event ["transaction_info" ] == {"source" : "task" }
146
+ task .cancel ()
147
+
148
+ await worker .close ()
124
149
125
150
if job_fails :
126
- assert event ["contexts" ]["trace" ]["status" ] == "internal_error"
127
- else :
128
- assert event ["contexts" ]["trace" ]["status" ] == "ok"
129
-
130
- assert "arq_task_id" in event ["tags" ]
131
- assert "arq_task_retry" in event ["tags" ]
132
-
133
- extra = event ["extra" ]["arq-job" ]
134
- assert extra ["task" ] == "division"
135
- assert extra ["args" ] == [1 ]
136
- assert extra ["kwargs" ] == {"b" : int (not job_fails )}
137
- assert extra ["retry" ] == 1
151
+ error_func_event = events .pop (0 )
152
+ error_cron_event = events .pop (1 )
153
+
154
+ assert error_func_event ["exception" ]["values" ][0 ]["type" ] == "ZeroDivisionError"
155
+ assert error_func_event ["exception" ]["values" ][0 ]["mechanism" ]["type" ] == "arq"
156
+
157
+ func_extra = error_func_event ["extra" ]["arq-job" ]
158
+ assert func_extra ["task" ] == "division"
159
+
160
+ assert error_cron_event ["exception" ]["values" ][0 ]["type" ] == "ZeroDivisionError"
161
+ assert error_cron_event ["exception" ]["values" ][0 ]["mechanism" ]["type" ] == "arq"
162
+
163
+ cron_extra = error_cron_event ["extra" ]["arq-job" ]
164
+ assert cron_extra ["task" ] == "cron:division"
165
+
166
+ [func_event , cron_event ] = events
167
+
168
+ assert func_event ["type" ] == "transaction"
169
+ assert func_event ["transaction" ] == "division"
170
+ assert func_event ["transaction_info" ] == {"source" : "task" }
171
+
172
+ assert "arq_task_id" in func_event ["tags" ]
173
+ assert "arq_task_retry" in func_event ["tags" ]
174
+
175
+ func_extra = func_event ["extra" ]["arq-job" ]
176
+
177
+ assert func_extra ["task" ] == "division"
178
+ assert func_extra ["kwargs" ] == {"b" : int (not job_fails )}
179
+ assert func_extra ["retry" ] == 1
180
+
181
+ assert cron_event ["type" ] == "transaction"
182
+ assert cron_event ["transaction" ] == "cron:division"
183
+ assert cron_event ["transaction_info" ] == {"source" : "task" }
184
+
185
+ assert "arq_task_id" in cron_event ["tags" ]
186
+ assert "arq_task_retry" in cron_event ["tags" ]
187
+
188
+ cron_extra = cron_event ["extra" ]["arq-job" ]
189
+
190
+ assert cron_extra ["task" ] == "cron:division"
191
+ assert cron_extra ["kwargs" ] == {}
192
+ assert cron_extra ["retry" ] == 1
138
193
139
194
140
195
@pytest .mark .asyncio
0 commit comments