apache-airflow-providers-edge3 1.1.2rc2__py3-none-any.whl → 1.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.1.2"
32
+ __version__ = "1.1.3"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -301,7 +301,7 @@ with DAG(
301
301
  except AirflowNotFoundException:
302
302
  print("Connection 'integration_test' not found... but also OK.")
303
303
 
304
- command = CmdOperator(task_id="command", command="echo Parameter is {{params.mapping_count}}")
304
+ command = CmdOperator(task_id="command", command="echo Hello World")
305
305
 
306
306
  def python_call():
307
307
  print("Hello world")
@@ -140,20 +140,40 @@ class EdgeExecutor(BaseExecutor):
140
140
  del self.edge_queued_tasks[key]
141
141
 
142
142
  self.validate_airflow_tasks_run_command(command) # type: ignore[attr-defined]
143
- session.add(
144
- EdgeJobModel(
143
+
144
+ # Check if job already exists with same dag_id, task_id, run_id, map_index, try_number
145
+ existing_job = (
146
+ session.query(EdgeJobModel)
147
+ .filter_by(
145
148
  dag_id=key.dag_id,
146
149
  task_id=key.task_id,
147
150
  run_id=key.run_id,
148
151
  map_index=key.map_index,
149
152
  try_number=key.try_number,
150
- state=TaskInstanceState.QUEUED,
151
- queue=queue or DEFAULT_QUEUE,
152
- concurrency_slots=task_instance.pool_slots,
153
- command=str(command),
154
153
  )
154
+ .first()
155
155
  )
156
156
 
157
+ if existing_job:
158
+ existing_job.state = TaskInstanceState.QUEUED
159
+ existing_job.queue = queue or DEFAULT_QUEUE
160
+ existing_job.concurrency_slots = task_instance.pool_slots
161
+ existing_job.command = str(command)
162
+ else:
163
+ session.add(
164
+ EdgeJobModel(
165
+ dag_id=key.dag_id,
166
+ task_id=key.task_id,
167
+ run_id=key.run_id,
168
+ map_index=key.map_index,
169
+ try_number=key.try_number,
170
+ state=TaskInstanceState.QUEUED,
171
+ queue=queue or DEFAULT_QUEUE,
172
+ concurrency_slots=task_instance.pool_slots,
173
+ command=str(command),
174
+ )
175
+ )
176
+
157
177
  @provide_session
158
178
  def queue_workload(
159
179
  self,
@@ -168,20 +188,40 @@ class EdgeExecutor(BaseExecutor):
168
188
 
169
189
  task_instance = workload.ti
170
190
  key = task_instance.key
171
- session.add(
172
- EdgeJobModel(
191
+
192
+ # Check if job already exists with same dag_id, task_id, run_id, map_index, try_number
193
+ existing_job = (
194
+ session.query(EdgeJobModel)
195
+ .filter_by(
173
196
  dag_id=key.dag_id,
174
197
  task_id=key.task_id,
175
198
  run_id=key.run_id,
176
199
  map_index=key.map_index,
177
200
  try_number=key.try_number,
178
- state=TaskInstanceState.QUEUED,
179
- queue=task_instance.queue,
180
- concurrency_slots=task_instance.pool_slots,
181
- command=workload.model_dump_json(),
182
201
  )
202
+ .first()
183
203
  )
184
204
 
205
+ if existing_job:
206
+ existing_job.state = TaskInstanceState.QUEUED
207
+ existing_job.queue = task_instance.queue
208
+ existing_job.concurrency_slots = task_instance.pool_slots
209
+ existing_job.command = workload.model_dump_json()
210
+ else:
211
+ session.add(
212
+ EdgeJobModel(
213
+ dag_id=key.dag_id,
214
+ task_id=key.task_id,
215
+ run_id=key.run_id,
216
+ map_index=key.map_index,
217
+ try_number=key.try_number,
218
+ state=TaskInstanceState.QUEUED,
219
+ queue=task_instance.queue,
220
+ concurrency_slots=task_instance.pool_slots,
221
+ command=workload.model_dump_json(),
222
+ )
223
+ )
224
+
185
225
  def _check_worker_liveness(self, session: Session) -> bool:
186
226
  """Reset worker state if heartbeat timed out."""
187
227
  changed = False
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-edge3
3
- Version: 1.1.2rc2
3
+ Version: 1.1.3
4
4
  Summary: Provider package apache-airflow-providers-edge3 for Apache Airflow
5
5
  Keywords: airflow-provider,edge3,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,12 +20,12 @@ Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0rc1
23
+ Requires-Dist: apache-airflow>=2.10.0
24
24
  Requires-Dist: pydantic>=2.11.0
25
25
  Requires-Dist: retryhttp>=1.2.0,!=1.3.0
26
26
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
27
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html
28
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-edge3/1.1.2
27
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3/changelog.html
28
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3
29
29
  Project-URL: Mastodon, https://fosstodon.org/@airflow
30
30
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
31
31
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -56,7 +56,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
56
56
 
57
57
  Package ``apache-airflow-providers-edge3``
58
58
 
59
- Release: ``1.1.2``
59
+ Release: ``1.1.3``
60
60
 
61
61
  Release Date: ``|PypiReleaseDate|``
62
62
 
@@ -82,7 +82,7 @@ This is a provider package for ``edge3`` provider. All classes for this provider
82
82
  are in ``airflow.providers.edge3`` python package.
83
83
 
84
84
  You can find package information and changelog for the provider
85
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/>`_.
85
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3/>`_.
86
86
 
87
87
  Installation
88
88
  ------------
@@ -105,5 +105,5 @@ PIP package Version required
105
105
  ================== ===================
106
106
 
107
107
  The changelog for the provider package can be found in the
108
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.2/changelog.html>`_.
108
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-edge3/1.1.3/changelog.html>`_.
109
109
 
@@ -1,5 +1,5 @@
1
1
  airflow/providers/edge3/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/edge3/__init__.py,sha256=cSvc8gMIo0Zq7u48rdO-1fw66BpyQg8IRkn6osMkTlY,1494
2
+ airflow/providers/edge3/__init__.py,sha256=BjORgGO_bF2ofz3p_XipVRgv7thzIxVbHYrpwdW4qvA,1494
3
3
  airflow/providers/edge3/get_provider_info.py,sha256=Ek27-dB4UALHUFYoYjtoQIGq0p7zeHcEgmELHvpVmCU,6836
4
4
  airflow/providers/edge3/version_compat.py,sha256=j5PCtXvZ71aBjixu-EFTNtVDPsngzzs7os0ZQDgFVDk,1536
5
5
  airflow/providers/edge3/cli/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -11,9 +11,9 @@ airflow/providers/edge3/cli/worker.py,sha256=x1SX5J2mFgfUTERKv5m8lDxNnoss3BV4F7J
11
11
  airflow/providers/edge3/example_dags/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
12
12
  airflow/providers/edge3/example_dags/integration_test.py,sha256=4iXvv379nFPxF1_gfbqb5NW1z-24MscYWLDxDwTNbPo,6120
13
13
  airflow/providers/edge3/example_dags/win_notepad.py,sha256=zYcrKqODN4KLZQ-5wNnZQQskrDd5LA-nKJNgKQDntSE,2832
14
- airflow/providers/edge3/example_dags/win_test.py,sha256=wJCeij-iPQOnWZlDU7Oqcv6mkE8cVe03rOFiB8We_oQ,13418
14
+ airflow/providers/edge3/example_dags/win_test.py,sha256=aZ5UbgrDsxmxPgxE6OXZV-LBhRgH0QR7xCt5mcxT7uQ,13392
15
15
  airflow/providers/edge3/executors/__init__.py,sha256=y830gGSKCvjOcLwLuCDp84NCrHWWB9RSSH1qvJpFhyY,923
16
- airflow/providers/edge3/executors/edge_executor.py,sha256=arcq959o4l5x8yjt6f4ck1pDSDFzkGiAh4jGQ_jmqso,16512
16
+ airflow/providers/edge3/executors/edge_executor.py,sha256=fsGOiJNp6RNT1xGFtV8G0Y-nUfT2daBtZeJBh2MZAZ4,18013
17
17
  airflow/providers/edge3/models/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
18
  airflow/providers/edge3/models/edge_job.py,sha256=3D5HAzcVkyI2bxl3pVbbRxjIz--Tnr_eNFiw2oI6gEQ,3167
19
19
  airflow/providers/edge3/models/edge_logs.py,sha256=bNstp7gR54O2vbxzz4NTL0erbifFbGUjZ-YOM0I4sqk,2768
@@ -35,7 +35,7 @@ airflow/providers/edge3/worker_api/routes/health.py,sha256=XxqIppnRA138Q6mAHCdyL
35
35
  airflow/providers/edge3/worker_api/routes/jobs.py,sha256=UK1w6nXEUadOLwE9abZ4jHH4KtbvXcwaAF0EnwSa3y4,5733
36
36
  airflow/providers/edge3/worker_api/routes/logs.py,sha256=uk0SZ5hAimj3sAcq1FYCDu0AXYNeTeyjZDGBvw-986E,4945
37
37
  airflow/providers/edge3/worker_api/routes/worker.py,sha256=BGARu1RZ74lW9X-ltuMYbbVXczm_MZdqHaai2MhDWtY,8969
38
- apache_airflow_providers_edge3-1.1.2rc2.dist-info/entry_points.txt,sha256=7WUIGfd3o9NvvbK5trbZxNXTgYGc6pqg74wZPigbx5o,206
39
- apache_airflow_providers_edge3-1.1.2rc2.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
40
- apache_airflow_providers_edge3-1.1.2rc2.dist-info/METADATA,sha256=khGdhmJivDTg4Jpo-QLB4KGFCN0W62SKy6eUTTn42SU,4799
41
- apache_airflow_providers_edge3-1.1.2rc2.dist-info/RECORD,,
38
+ apache_airflow_providers_edge3-1.1.3.dist-info/entry_points.txt,sha256=7WUIGfd3o9NvvbK5trbZxNXTgYGc6pqg74wZPigbx5o,206
39
+ apache_airflow_providers_edge3-1.1.3.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
40
+ apache_airflow_providers_edge3-1.1.3.dist-info/METADATA,sha256=Q_0GBApe8amF0E0-_69-wVMrDOc9QBYaKTNkTgsY8yA,4779
41
+ apache_airflow_providers_edge3-1.1.3.dist-info/RECORD,,