airflow-cli 0.1.1__tar.gz → 0.1.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airflow-cli
3
- Version: 0.1.1
3
+ Version: 0.1.12
4
4
  Summary: CLI para facilitar o setup de Airflow com Docker.
5
5
  Author-email: LEMA-UFPB <ufpb.lema@gmail.com>
6
6
  License-Expression: MIT
@@ -135,7 +135,7 @@ This will run flake8 linter on the `dags/` folder to check for Python syntax iss
135
135
  python -m build
136
136
 
137
137
  # Install built package
138
- pip install dist/airflow_docker_helper-*.whl
138
+ pip install dist/airflow_cli-*.whl
139
139
  ```
140
140
 
141
141
  ## Publishing to PyPI
@@ -120,7 +120,7 @@ This will run flake8 linter on the `dags/` folder to check for Python syntax iss
120
120
  python -m build
121
121
 
122
122
  # Install built package
123
- pip install dist/airflow_docker_helper-*.whl
123
+ pip install dist/airflow_cli-*.whl
124
124
  ```
125
125
 
126
126
  ## Publishing to PyPI
@@ -0,0 +1,251 @@
1
+ ---
2
+ x-airflow-common: &airflow-common
3
+ environment: &airflow-common-env
4
+ AIRFLOW__CORE__EXECUTOR: CeleryExecutor
5
+ AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://postgres:postgres@postgres/airflow
6
+ AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://postgres:postgres@postgres/airflow
7
+ AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://postgres:postgres@postgres/airflow
8
+ AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
9
+ AIRFLOW_CONN_MONGO_DEFAULT: mongo://mongo:mongo@mongo:27017/?authSource=admin
10
+ AIRFLOW__CORE__FERNET_KEY: "pMrhjIcqUNHMYRk_ZOBmMptWR6o1DahCXCKn5lEMpzM="
11
+ AIRFLOW__API__SECRET_KEY: "891b3faded3f6bb751a452c8566b2b90becf3d"
12
+ AIRFLOW__API_AUTH__JWT_SECRET: "231b3f1ded3f1bb754a352c8166c2b90bqcf32"
13
+ AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: "true"
14
+ AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: "true"
15
+ AIRFLOW__API__AUTH_BACKENDS: "airflow.api.auth.backend.basic_auth"
16
+ AIRFLOW__CORE__LOAD_EXAMPLES: "false"
17
+ AIRFLOW__CORE__EXECUTION_API_SERVER_URL: "http://airflow-apiserver:8080/execution/"
18
+ AIRFLOW_VAR_DATA_DIR: "/opt/airflow/data"
19
+ AIRFLOW__CORE__SIMPLE_AUTH_MANAGER_ALL_ADMINS: "true"
20
+ AIRFLOW__WEBSERVER__SIMPLE_AUTH_MANAGER_USERS: "airflow:admin"
21
+ volumes:
22
+ - ./dags:/opt/airflow/dags
23
+ user: "50000:0"
24
+ image: "harbor.lema.ufpb.br/public/airflow:v3.0.2-python3.11-spark3.5.5-r2"
25
+ depends_on: &airflow-common-depends-on
26
+ redis:
27
+ condition: service_healthy
28
+ postgres:
29
+ condition: service_healthy
30
+ services:
31
+ redis:
32
+ image: redis:7.0.4-alpine
33
+ container_name: redis-container
34
+ hostname: redis
35
+ expose:
36
+ - 6379
37
+ ports:
38
+ - 6379:6379
39
+ healthcheck:
40
+ test: ["CMD", "redis-cli", "ping"]
41
+ interval: 5s
42
+ timeout: 30s
43
+ retries: 50
44
+ restart: unless-stopped
45
+ networks:
46
+ - dev
47
+ postgres:
48
+ container_name: postgres-container
49
+ hostname: postgres
50
+ image: postgres:16-alpine
51
+ ports:
52
+ - 5432:5432
53
+ environment:
54
+ POSTGRES_USER: postgres
55
+ POSTGRES_PASSWORD: postgres
56
+ POSTGRES_DB: airflow
57
+ volumes:
58
+ - postgresdb_data:/var/lib/postgresql/data"
59
+ healthcheck:
60
+ test: ["CMD", "pg_isready", "-U", "postgres"]
61
+ interval: 5s
62
+ retries: 5
63
+ restart: unless-stopped
64
+ networks:
65
+ - dev
66
+ mongo:
67
+ container_name: mongo-container
68
+ hostname: mongo
69
+ image: mongo:8.0
70
+ environment:
71
+ MONGO_INITDB_ROOT_USERNAME: mongo
72
+ MONGO_INITDB_ROOT_PASSWORD: mongo
73
+ ports:
74
+ - 27017:27017
75
+ volumes:
76
+ - mongodb_data:/data/db
77
+ restart: unless-stopped
78
+ networks:
79
+ - dev
80
+ dbgate:
81
+ image: dbgate/dbgate:6.4.1-alpine
82
+ container_name: dbgate
83
+ hostname: dbgate-webserver
84
+ volumes:
85
+ - dbgate-data:/root/.dbgate
86
+ ports:
87
+ - 3100:3000
88
+ environment:
89
+ CONNECTIONS: con1,con2,con3
90
+ LABEL_con1: Postgres
91
+ SERVER_con1: postgres
92
+ USER_con1: postgres
93
+ PASSWORD_con1: postgres
94
+ PORT_con1: 5432
95
+ ENGINE_con1: postgres@dbgate-plugin-postgres
96
+ LABEL_con2: MongoDB
97
+ URL_con2: mongodb://mongo:mongo@mongo:27017
98
+ ENGINE_con2: mongo@dbgate-plugin-mongo
99
+ restart: unless-stopped
100
+ networks:
101
+ - dev
102
+
103
+ airflow-apiserver:
104
+ <<: *airflow-common
105
+ container_name: airflow-api-server-container
106
+ command: api-server
107
+ ports:
108
+ - "8080:8080"
109
+ healthcheck:
110
+ test: ["CMD", "curl", "--fail", "http://localhost:8080/api/v2/version"]
111
+ interval: 30s
112
+ timeout: 10s
113
+ retries: 5
114
+ start_period: 30s
115
+ restart: always
116
+ networks:
117
+ - dev
118
+ depends_on:
119
+ <<: *airflow-common-depends-on
120
+ airflow-init:
121
+ condition: service_completed_successfully
122
+
123
+ airflow-scheduler:
124
+ <<: *airflow-common
125
+ container_name: airflow-scheduler-container
126
+ command: scheduler
127
+ healthcheck:
128
+ test: ["CMD", "curl", "--fail", "http://localhost:8974/health"]
129
+ interval: 30s
130
+ timeout: 10s
131
+ retries: 5
132
+ start_period: 30s
133
+ restart: always
134
+ networks:
135
+ - dev
136
+ depends_on:
137
+ <<: *airflow-common-depends-on
138
+ airflow-init:
139
+ condition: service_completed_successfully
140
+
141
+ airflow-dag-processor:
142
+ <<: *airflow-common
143
+ container_name: airflow-dag-processor-container
144
+ command: dag-processor
145
+ healthcheck:
146
+ test:
147
+ [
148
+ "CMD-SHELL",
149
+ 'airflow jobs check --job-type DagProcessorJob --hostname "$${HOSTNAME}"',
150
+ ]
151
+ interval: 30s
152
+ timeout: 10s
153
+ retries: 5
154
+ start_period: 30s
155
+ restart: always
156
+ networks:
157
+ - dev
158
+ depends_on:
159
+ <<: *airflow-common-depends-on
160
+ airflow-init:
161
+ condition: service_completed_successfully
162
+
163
+ airflow-worker:
164
+ <<: *airflow-common
165
+ container_name: airflow-worker-container
166
+ command: celery worker
167
+ healthcheck:
168
+ test:
169
+ - "CMD-SHELL"
170
+ - 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
171
+ interval: 30s
172
+ timeout: 10s
173
+ retries: 5
174
+ start_period: 30s
175
+ environment:
176
+ <<: *airflow-common-env
177
+ DUMB_INIT_SETSID: "0"
178
+ restart: always
179
+ networks:
180
+ - dev
181
+ depends_on:
182
+ <<: *airflow-common-depends-on
183
+ airflow-apiserver:
184
+ condition: service_healthy
185
+ airflow-init:
186
+ condition: service_completed_successfully
187
+
188
+ airflow-triggerer:
189
+ <<: *airflow-common
190
+ container_name: airflow-triggerer-container
191
+ command: triggerer
192
+ healthcheck:
193
+ test:
194
+ [
195
+ "CMD-SHELL",
196
+ 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"',
197
+ ]
198
+ interval: 30s
199
+ timeout: 10s
200
+ retries: 5
201
+ start_period: 30s
202
+ restart: always
203
+ networks:
204
+ - dev
205
+ depends_on:
206
+ <<: *airflow-common-depends-on
207
+ airflow-init:
208
+ condition: service_completed_successfully
209
+
210
+ airflow-init:
211
+ <<: *airflow-common
212
+ container_name: airflow-init
213
+ entrypoint: /bin/bash
214
+ command:
215
+ - -c
216
+ - |
217
+ mkdir -p /sources/logs /sources/dags /sources/plugins
218
+ chown -R "50000:0" /sources/{logs,dags,plugins}
219
+ exec /entrypoint airflow version
220
+ environment:
221
+ <<: *airflow-common-env
222
+ _AIRFLOW_DB_MIGRATE: "true"
223
+ _AIRFLOW_WWW_USER_CREATE: "true"
224
+ _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
225
+ _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
226
+ user: "0:0"
227
+ networks:
228
+ - dev
229
+
230
+ airflow-cli:
231
+ <<: *airflow-common
232
+ profiles:
233
+ - debug
234
+ environment:
235
+ <<: *airflow-common-env
236
+ CONNECTION_CHECK_MAX_COUNT: "0"
237
+ command:
238
+ - bash
239
+ - -c
240
+ - airflow
241
+ depends_on:
242
+ <<: *airflow-common-depends-on
243
+ networks:
244
+ - dev
245
+
246
+ networks:
247
+ dev:
248
+ volumes:
249
+ mongodb_data:
250
+ postgresdb_data:
251
+ dbgate-data:
@@ -3,6 +3,7 @@ import logging
3
3
  from glob import glob
4
4
  import yaml
5
5
  import os
6
+ import shutil
6
7
 
7
8
  log = logging.getLogger(__name__)
8
9
 
@@ -11,14 +12,32 @@ def docker_up():
11
12
  env = os.environ.copy()
12
13
  env["AIRFLOW_UID"] = "50000"
13
14
 
14
- compose_file = os.path.join(os.path.dirname(__file__), "docker-compose.yml")
15
- subprocess.run(["docker", "compose", "-f", compose_file, "up", "-d"], env=env, check=True)
16
- log.info("✅ Docker environment is ready: http://localhost:8080")
15
+ # Verifica se já existe docker-compose.yml no diretório atual
16
+ local_compose_file = "docker-compose.yml"
17
+ if not os.path.exists(local_compose_file):
18
+ log.info("📋 Copiando docker-compose.yml para o projeto...")
19
+ # Copia o arquivo do pacote para o diretório atual
20
+ package_compose_file = os.path.join(os.path.dirname(__file__), "docker-compose.yml")
21
+ shutil.copy2(package_compose_file, local_compose_file)
22
+ log.info("✅ docker-compose.yml copiado com sucesso")
23
+
24
+ # Verifica se a pasta dags existe
25
+ dags_path = "dags"
26
+ if not os.path.exists(dags_path):
27
+ log.info("📁 Criando pasta 'dags'...")
28
+ os.makedirs(dags_path, exist_ok=True)
29
+
30
+ try:
31
+ subprocess.run(["docker", "compose", "up", "-d"], env=env, check=True)
32
+ log.info("✅ Docker environment is ready: http://localhost:8080")
33
+ except subprocess.CalledProcessError as e:
34
+ log.error(f"❌ Erro ao iniciar Docker: {e}")
35
+ log.error("Verifique se o Docker está rodando e funcionando corretamente")
36
+ raise
17
37
 
18
38
  def docker_down():
19
39
  log.info("🐳 Stopping Docker environment...")
20
- compose_file = os.path.join(os.path.dirname(__file__), "docker-compose.yml")
21
- subprocess.run(["docker", "compose", "-f", compose_file, "down"], check=False)
40
+ subprocess.run(["docker", "compose", "down"], check=False)
22
41
 
23
42
  def run_dag():
24
43
  log.info("🚀 Running DAG in Docker...")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airflow-cli
3
- Version: 0.1.1
3
+ Version: 0.1.12
4
4
  Summary: CLI para facilitar o setup de Airflow com Docker.
5
5
  Author-email: LEMA-UFPB <ufpb.lema@gmail.com>
6
6
  License-Expression: MIT
@@ -135,7 +135,7 @@ This will run flake8 linter on the `dags/` folder to check for Python syntax iss
135
135
  python -m build
136
136
 
137
137
  # Install built package
138
- pip install dist/airflow_docker_helper-*.whl
138
+ pip install dist/airflow_cli-*.whl
139
139
  ```
140
140
 
141
141
  ## Publishing to PyPI
@@ -3,6 +3,7 @@ README.md
3
3
  pyproject.toml
4
4
  airflow_cli/__init__.py
5
5
  airflow_cli/cli.py
6
+ airflow_cli/docker-compose.yml
6
7
  airflow_cli/docker_utils.py
7
8
  airflow_cli/env_utils.py
8
9
  airflow_cli/menu.py
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "airflow-cli"
3
- version = "0.1.1"
3
+ version = "0.1.12"
4
4
  description = "CLI para facilitar o setup de Airflow com Docker."
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.7"
@@ -18,6 +18,7 @@ airflow-cli = "airflow_cli.cli:main"
18
18
 
19
19
  [tool.setuptools]
20
20
  packages = ["airflow_cli"]
21
+ package-data = {"airflow_cli" = ["docker-compose.yml"]}
21
22
 
22
23
  [build-system]
23
24
  requires = ["setuptools", "wheel"]
File without changes
File without changes