airflow-cli 0.1.11__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow_cli/cli.py +1 -11
- airflow_cli/docker-compose.yml +251 -0
- airflow_cli/docker_utils.py +17 -11
- airflow_cli/menu.py +0 -13
- airflow_cli/os_utils.py +0 -20
- {airflow_cli-0.1.11.dist-info → airflow_cli-0.1.13.dist-info}/METADATA +1 -1
- airflow_cli-0.1.13.dist-info/RECORD +12 -0
- airflow_cli/env_utils.py +0 -33
- airflow_cli-0.1.11.dist-info/RECORD +0 -12
- {airflow_cli-0.1.11.dist-info → airflow_cli-0.1.13.dist-info}/WHEEL +0 -0
- {airflow_cli-0.1.11.dist-info → airflow_cli-0.1.13.dist-info}/entry_points.txt +0 -0
- {airflow_cli-0.1.11.dist-info → airflow_cli-0.1.13.dist-info}/licenses/LICENSE +0 -0
- {airflow_cli-0.1.11.dist-info → airflow_cli-0.1.13.dist-info}/top_level.txt +0 -0
airflow_cli/cli.py
CHANGED
@@ -1,9 +1,7 @@
|
|
1
1
|
import argparse
|
2
2
|
import logging
|
3
3
|
import sys
|
4
|
-
|
5
|
-
from .env_utils import ensure_venv, is_in_venv
|
6
|
-
from .os_utils import check_docker, check_os
|
4
|
+
from .os_utils import check_docker
|
7
5
|
from .docker_utils import docker_up, docker_down, run_dag, fix_python_code
|
8
6
|
|
9
7
|
logging.basicConfig(level=logging.INFO,
|
@@ -29,18 +27,10 @@ def main():
|
|
29
27
|
|
30
28
|
args = parser.parse_args()
|
31
29
|
|
32
|
-
# Pré-checks antes de qualquer comando
|
33
|
-
ensure_venv()
|
34
|
-
if not is_in_venv():
|
35
|
-
log.warning(
|
36
|
-
"⚠️ Not running inside virtual environment. Interpreter: %s", sys.executable)
|
37
|
-
|
38
30
|
if not check_docker():
|
39
31
|
log.error("❌ Docker not ready.")
|
40
32
|
sys.exit(1)
|
41
33
|
|
42
|
-
check_os()
|
43
|
-
|
44
34
|
# Execução dos comandos
|
45
35
|
if args.command == "up":
|
46
36
|
docker_up()
|
@@ -0,0 +1,251 @@
|
|
1
|
+
---
|
2
|
+
x-airflow-common: &airflow-common
|
3
|
+
environment: &airflow-common-env
|
4
|
+
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
|
5
|
+
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://postgres:postgres@postgres/airflow
|
6
|
+
AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://postgres:postgres@postgres/airflow
|
7
|
+
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://postgres:postgres@postgres/airflow
|
8
|
+
AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
|
9
|
+
AIRFLOW_CONN_MONGO_DEFAULT: mongo://mongo:mongo@mongo:27017/?authSource=admin
|
10
|
+
AIRFLOW__CORE__FERNET_KEY: "pMrhjIcqUNHMYRk_ZOBmMptWR6o1DahCXCKn5lEMpzM="
|
11
|
+
AIRFLOW__API__SECRET_KEY: "891b3faded3f6bb751a452c8566b2b90becf3d"
|
12
|
+
AIRFLOW__API_AUTH__JWT_SECRET: "231b3f1ded3f1bb754a352c8166c2b90bqcf32"
|
13
|
+
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: "true"
|
14
|
+
AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: "true"
|
15
|
+
AIRFLOW__API__AUTH_BACKENDS: "airflow.api.auth.backend.basic_auth"
|
16
|
+
AIRFLOW__CORE__LOAD_EXAMPLES: "false"
|
17
|
+
AIRFLOW__CORE__EXECUTION_API_SERVER_URL: "http://airflow-apiserver:8080/execution/"
|
18
|
+
AIRFLOW_VAR_DATA_DIR: "/opt/airflow/data"
|
19
|
+
AIRFLOW__CORE__SIMPLE_AUTH_MANAGER_ALL_ADMINS: "true"
|
20
|
+
AIRFLOW__WEBSERVER__SIMPLE_AUTH_MANAGER_USERS: "airflow:admin"
|
21
|
+
volumes:
|
22
|
+
- ./dags:/opt/airflow/dags
|
23
|
+
user: "50000:0"
|
24
|
+
image: "harbor.lema.ufpb.br/public/airflow:v3.0.2-python3.11-spark3.5.5-r2"
|
25
|
+
depends_on: &airflow-common-depends-on
|
26
|
+
redis:
|
27
|
+
condition: service_healthy
|
28
|
+
postgres:
|
29
|
+
condition: service_healthy
|
30
|
+
services:
|
31
|
+
redis:
|
32
|
+
image: redis:7.0.4-alpine
|
33
|
+
container_name: redis-container
|
34
|
+
hostname: redis
|
35
|
+
expose:
|
36
|
+
- 6379
|
37
|
+
ports:
|
38
|
+
- 6379:6379
|
39
|
+
healthcheck:
|
40
|
+
test: ["CMD", "redis-cli", "ping"]
|
41
|
+
interval: 5s
|
42
|
+
timeout: 30s
|
43
|
+
retries: 50
|
44
|
+
restart: unless-stopped
|
45
|
+
networks:
|
46
|
+
- dev
|
47
|
+
postgres:
|
48
|
+
container_name: postgres-container
|
49
|
+
hostname: postgres
|
50
|
+
image: postgres:16-alpine
|
51
|
+
ports:
|
52
|
+
- 5432:5432
|
53
|
+
environment:
|
54
|
+
POSTGRES_USER: postgres
|
55
|
+
POSTGRES_PASSWORD: postgres
|
56
|
+
POSTGRES_DB: airflow
|
57
|
+
volumes:
|
58
|
+
- postgresdb_data:/var/lib/postgresql/data"
|
59
|
+
healthcheck:
|
60
|
+
test: ["CMD", "pg_isready", "-U", "postgres"]
|
61
|
+
interval: 5s
|
62
|
+
retries: 5
|
63
|
+
restart: unless-stopped
|
64
|
+
networks:
|
65
|
+
- dev
|
66
|
+
mongo:
|
67
|
+
container_name: mongo-container
|
68
|
+
hostname: mongo
|
69
|
+
image: mongo:8.0
|
70
|
+
environment:
|
71
|
+
MONGO_INITDB_ROOT_USERNAME: mongo
|
72
|
+
MONGO_INITDB_ROOT_PASSWORD: mongo
|
73
|
+
ports:
|
74
|
+
- 27017:27017
|
75
|
+
volumes:
|
76
|
+
- mongodb_data:/data/db
|
77
|
+
restart: unless-stopped
|
78
|
+
networks:
|
79
|
+
- dev
|
80
|
+
dbgate:
|
81
|
+
image: dbgate/dbgate:6.4.1-alpine
|
82
|
+
container_name: dbgate
|
83
|
+
hostname: dbgate-webserver
|
84
|
+
volumes:
|
85
|
+
- dbgate-data:/root/.dbgate
|
86
|
+
ports:
|
87
|
+
- 3100:3000
|
88
|
+
environment:
|
89
|
+
CONNECTIONS: con1,con2,con3
|
90
|
+
LABEL_con1: Postgres
|
91
|
+
SERVER_con1: postgres
|
92
|
+
USER_con1: postgres
|
93
|
+
PASSWORD_con1: postgres
|
94
|
+
PORT_con1: 5432
|
95
|
+
ENGINE_con1: postgres@dbgate-plugin-postgres
|
96
|
+
LABEL_con2: MongoDB
|
97
|
+
URL_con2: mongodb://mongo:mongo@mongo:27017
|
98
|
+
ENGINE_con2: mongo@dbgate-plugin-mongo
|
99
|
+
restart: unless-stopped
|
100
|
+
networks:
|
101
|
+
- dev
|
102
|
+
|
103
|
+
airflow-apiserver:
|
104
|
+
<<: *airflow-common
|
105
|
+
container_name: airflow-api-server-container
|
106
|
+
command: api-server
|
107
|
+
ports:
|
108
|
+
- "8080:8080"
|
109
|
+
healthcheck:
|
110
|
+
test: ["CMD", "curl", "--fail", "http://localhost:8080/api/v2/version"]
|
111
|
+
interval: 30s
|
112
|
+
timeout: 10s
|
113
|
+
retries: 5
|
114
|
+
start_period: 30s
|
115
|
+
restart: always
|
116
|
+
networks:
|
117
|
+
- dev
|
118
|
+
depends_on:
|
119
|
+
<<: *airflow-common-depends-on
|
120
|
+
airflow-init:
|
121
|
+
condition: service_completed_successfully
|
122
|
+
|
123
|
+
airflow-scheduler:
|
124
|
+
<<: *airflow-common
|
125
|
+
container_name: airflow-scheduler-container
|
126
|
+
command: scheduler
|
127
|
+
healthcheck:
|
128
|
+
test: ["CMD", "curl", "--fail", "http://localhost:8974/health"]
|
129
|
+
interval: 30s
|
130
|
+
timeout: 10s
|
131
|
+
retries: 5
|
132
|
+
start_period: 30s
|
133
|
+
restart: always
|
134
|
+
networks:
|
135
|
+
- dev
|
136
|
+
depends_on:
|
137
|
+
<<: *airflow-common-depends-on
|
138
|
+
airflow-init:
|
139
|
+
condition: service_completed_successfully
|
140
|
+
|
141
|
+
airflow-dag-processor:
|
142
|
+
<<: *airflow-common
|
143
|
+
container_name: airflow-dag-processor-container
|
144
|
+
command: dag-processor
|
145
|
+
healthcheck:
|
146
|
+
test:
|
147
|
+
[
|
148
|
+
"CMD-SHELL",
|
149
|
+
'airflow jobs check --job-type DagProcessorJob --hostname "$${HOSTNAME}"',
|
150
|
+
]
|
151
|
+
interval: 30s
|
152
|
+
timeout: 10s
|
153
|
+
retries: 5
|
154
|
+
start_period: 30s
|
155
|
+
restart: always
|
156
|
+
networks:
|
157
|
+
- dev
|
158
|
+
depends_on:
|
159
|
+
<<: *airflow-common-depends-on
|
160
|
+
airflow-init:
|
161
|
+
condition: service_completed_successfully
|
162
|
+
|
163
|
+
airflow-worker:
|
164
|
+
<<: *airflow-common
|
165
|
+
container_name: airflow-worker-container
|
166
|
+
command: celery worker
|
167
|
+
healthcheck:
|
168
|
+
test:
|
169
|
+
- "CMD-SHELL"
|
170
|
+
- 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
|
171
|
+
interval: 30s
|
172
|
+
timeout: 10s
|
173
|
+
retries: 5
|
174
|
+
start_period: 30s
|
175
|
+
environment:
|
176
|
+
<<: *airflow-common-env
|
177
|
+
DUMB_INIT_SETSID: "0"
|
178
|
+
restart: always
|
179
|
+
networks:
|
180
|
+
- dev
|
181
|
+
depends_on:
|
182
|
+
<<: *airflow-common-depends-on
|
183
|
+
airflow-apiserver:
|
184
|
+
condition: service_healthy
|
185
|
+
airflow-init:
|
186
|
+
condition: service_completed_successfully
|
187
|
+
|
188
|
+
airflow-triggerer:
|
189
|
+
<<: *airflow-common
|
190
|
+
container_name: airflow-triggerer-container
|
191
|
+
command: triggerer
|
192
|
+
healthcheck:
|
193
|
+
test:
|
194
|
+
[
|
195
|
+
"CMD-SHELL",
|
196
|
+
'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"',
|
197
|
+
]
|
198
|
+
interval: 30s
|
199
|
+
timeout: 10s
|
200
|
+
retries: 5
|
201
|
+
start_period: 30s
|
202
|
+
restart: always
|
203
|
+
networks:
|
204
|
+
- dev
|
205
|
+
depends_on:
|
206
|
+
<<: *airflow-common-depends-on
|
207
|
+
airflow-init:
|
208
|
+
condition: service_completed_successfully
|
209
|
+
|
210
|
+
airflow-init:
|
211
|
+
<<: *airflow-common
|
212
|
+
container_name: airflow-init
|
213
|
+
entrypoint: /bin/bash
|
214
|
+
command:
|
215
|
+
- -c
|
216
|
+
- |
|
217
|
+
mkdir -p /sources/logs /sources/dags /sources/plugins
|
218
|
+
chown -R "50000:0" /sources/{logs,dags,plugins}
|
219
|
+
exec /entrypoint airflow version
|
220
|
+
environment:
|
221
|
+
<<: *airflow-common-env
|
222
|
+
_AIRFLOW_DB_MIGRATE: "true"
|
223
|
+
_AIRFLOW_WWW_USER_CREATE: "true"
|
224
|
+
_AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
|
225
|
+
_AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
|
226
|
+
user: "0:0"
|
227
|
+
networks:
|
228
|
+
- dev
|
229
|
+
|
230
|
+
airflow-cli:
|
231
|
+
<<: *airflow-common
|
232
|
+
profiles:
|
233
|
+
- debug
|
234
|
+
environment:
|
235
|
+
<<: *airflow-common-env
|
236
|
+
CONNECTION_CHECK_MAX_COUNT: "0"
|
237
|
+
command:
|
238
|
+
- bash
|
239
|
+
- -c
|
240
|
+
- airflow
|
241
|
+
depends_on:
|
242
|
+
<<: *airflow-common-depends-on
|
243
|
+
networks:
|
244
|
+
- dev
|
245
|
+
|
246
|
+
networks:
|
247
|
+
dev:
|
248
|
+
volumes:
|
249
|
+
mongodb_data:
|
250
|
+
postgresdb_data:
|
251
|
+
dbgate-data:
|
airflow_cli/docker_utils.py
CHANGED
@@ -7,38 +7,43 @@ import shutil
|
|
7
7
|
|
8
8
|
log = logging.getLogger(__name__)
|
9
9
|
|
10
|
+
|
10
11
|
def docker_up():
|
11
12
|
log.info("🐳 Starting Docker environment...")
|
12
13
|
env = os.environ.copy()
|
13
14
|
env["AIRFLOW_UID"] = "50000"
|
14
|
-
|
15
|
-
|
15
|
+
env["AIRFLOW_GID"] = "0"
|
16
|
+
env["DOCKER_INSECURE_NO_IPTABLES_RAW"] = "1"
|
17
|
+
|
16
18
|
local_compose_file = "docker-compose.yml"
|
17
19
|
if not os.path.exists(local_compose_file):
|
18
|
-
log.info("📋
|
19
|
-
|
20
|
-
package_compose_file = os.path.join(
|
20
|
+
log.info("📋 Creating docker-compose.yml ...")
|
21
|
+
|
22
|
+
package_compose_file = os.path.join(
|
23
|
+
os.path.dirname(__file__), "docker-compose.yml")
|
21
24
|
shutil.copy2(package_compose_file, local_compose_file)
|
22
|
-
log.info("✅ docker-compose.yml
|
23
|
-
|
25
|
+
log.info("✅ docker-compose.yml create successfully!")
|
26
|
+
|
24
27
|
# Verifica se a pasta dags existe
|
25
28
|
dags_path = "dags"
|
26
29
|
if not os.path.exists(dags_path):
|
27
|
-
log.info("📁
|
30
|
+
log.info("📁 Create 'dags' directory...")
|
28
31
|
os.makedirs(dags_path, exist_ok=True)
|
29
|
-
|
32
|
+
|
30
33
|
try:
|
31
34
|
subprocess.run(["docker", "compose", "up", "-d"], env=env, check=True)
|
32
35
|
log.info("✅ Docker environment is ready: http://localhost:8080")
|
33
36
|
except subprocess.CalledProcessError as e:
|
34
|
-
log.error(f"❌
|
35
|
-
log.error("
|
37
|
+
log.error(f"❌ Error Docker: {e}")
|
38
|
+
log.error("Check if Docker is running and try again.")
|
36
39
|
raise
|
37
40
|
|
41
|
+
|
38
42
|
def docker_down():
|
39
43
|
log.info("🐳 Stopping Docker environment...")
|
40
44
|
subprocess.run(["docker", "compose", "down"], check=False)
|
41
45
|
|
46
|
+
|
42
47
|
def run_dag():
|
43
48
|
log.info("🚀 Running DAG in Docker...")
|
44
49
|
try:
|
@@ -55,6 +60,7 @@ def run_dag():
|
|
55
60
|
except Exception as e:
|
56
61
|
log.error(f"❌ Error running DAG: {e}")
|
57
62
|
|
63
|
+
|
58
64
|
def fix_python_code():
|
59
65
|
log.info("🔧 Running flake8 on 'dags' folder...")
|
60
66
|
try:
|
airflow_cli/menu.py
CHANGED
@@ -1,9 +1,6 @@
|
|
1
1
|
import logging
|
2
|
-
import sys
|
3
2
|
from simple_term_menu import TerminalMenu
|
4
3
|
from .docker_utils import docker_up, docker_down, run_dag, fix_python_code
|
5
|
-
from .env_utils import ensure_venv, is_in_venv
|
6
|
-
from .os_utils import check_os, update_docker_compose, check_docker
|
7
4
|
|
8
5
|
log = logging.getLogger(__name__)
|
9
6
|
|
@@ -22,16 +19,6 @@ def show_menu():
|
|
22
19
|
|
23
20
|
|
24
21
|
def run():
|
25
|
-
ensure_venv()
|
26
|
-
if not is_in_venv():
|
27
|
-
log.warning("⚠️ Not running inside the virtual environment.")
|
28
|
-
log.warning(f"Interpreter: {sys.executable}")
|
29
|
-
|
30
|
-
if not check_docker():
|
31
|
-
log.error("❌ Docker is not ready.")
|
32
|
-
return
|
33
|
-
|
34
|
-
check_os()
|
35
22
|
|
36
23
|
while True:
|
37
24
|
option = show_menu()
|
airflow_cli/os_utils.py
CHANGED
@@ -1,27 +1,8 @@
|
|
1
|
-
import platform
|
2
1
|
import logging
|
3
2
|
import subprocess
|
4
|
-
import os
|
5
|
-
import shutil
|
6
|
-
import pkg_resources
|
7
3
|
|
8
4
|
log = logging.getLogger(__name__)
|
9
5
|
|
10
|
-
def check_os():
|
11
|
-
system = platform.system()
|
12
|
-
if system == "Linux":
|
13
|
-
try:
|
14
|
-
with open("/proc/version", "r") as f:
|
15
|
-
if "microsoft" in f.read().lower():
|
16
|
-
log.info("✅ Running on WSL (Linux under Windows).")
|
17
|
-
else:
|
18
|
-
log.info("✅ Running on native Linux.")
|
19
|
-
except FileNotFoundError:
|
20
|
-
log.info("✅ Running on Linux.")
|
21
|
-
elif system == "Darwin":
|
22
|
-
log.info("✅ Running on MacOS.")
|
23
|
-
else:
|
24
|
-
log.error(f"❌ Unsupported OS: {system}")
|
25
6
|
|
26
7
|
def check_docker():
|
27
8
|
try:
|
@@ -32,4 +13,3 @@ def check_docker():
|
|
32
13
|
except Exception as e:
|
33
14
|
log.error(f"❌ Docker check failed: {e}")
|
34
15
|
return False
|
35
|
-
|
@@ -0,0 +1,12 @@
|
|
1
|
+
airflow_cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
2
|
+
airflow_cli/cli.py,sha256=qHi1DF9joO9LkyLE-1YrjEr97anMcE84WTYocD_vPhY,1230
|
3
|
+
airflow_cli/docker-compose.yml,sha256=9jilz2y47ZLgmsSOzGSrkLsbQuhWuKxXaXaVndqbxbg,6927
|
4
|
+
airflow_cli/docker_utils.py,sha256=q_Nj_gSqrKTfGlM2Kllr9ea7fMQdDjh8x5-prniDFoo,2234
|
5
|
+
airflow_cli/menu.py,sha256=QpyriBMZMCPUgIrUm3FMuJ6ANDqkBNbeXOk1R8Gp9X0,1003
|
6
|
+
airflow_cli/os_utils.py,sha256=Lr8mhHOgnOwI9H1h0F9BtygH2oLDvxA2hVDMS6l_x_w,384
|
7
|
+
airflow_cli-0.1.13.dist-info/licenses/LICENSE,sha256=UrTV1prTxEoB9MSB9MG4BkGhNDk9EQx5tDTmuQ2C4u4,1065
|
8
|
+
airflow_cli-0.1.13.dist-info/METADATA,sha256=eLTL4J8hon0XPXemHlFTSvezp90cTjDKcHk80qaJfWQ,2870
|
9
|
+
airflow_cli-0.1.13.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
10
|
+
airflow_cli-0.1.13.dist-info/entry_points.txt,sha256=TyX3EFPfUxoxQ8aazxJREZl6VsOobW_JJlgA7jGpIBU,53
|
11
|
+
airflow_cli-0.1.13.dist-info/top_level.txt,sha256=oD1Su7hc6_0veTZoMGIZph94T9S-dtsL-G_IOLuqqVM,12
|
12
|
+
airflow_cli-0.1.13.dist-info/RECORD,,
|
airflow_cli/env_utils.py
DELETED
@@ -1,33 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
import subprocess
|
3
|
-
import sys
|
4
|
-
import importlib.util
|
5
|
-
import logging
|
6
|
-
|
7
|
-
log = logging.getLogger(__name__)
|
8
|
-
|
9
|
-
venv_dir = ".venv"
|
10
|
-
|
11
|
-
def is_in_venv():
|
12
|
-
return os.path.abspath(sys.prefix).endswith(os.path.abspath(venv_dir))
|
13
|
-
|
14
|
-
def ensure_venv():
|
15
|
-
if not os.path.exists(venv_dir):
|
16
|
-
log.info(f"🔄 Creating Python virtual environment at {venv_dir}...")
|
17
|
-
try:
|
18
|
-
subprocess.check_call([sys.executable, "-m", "venv", venv_dir])
|
19
|
-
log.info("✅ Virtual environment created.")
|
20
|
-
except subprocess.CalledProcessError as e:
|
21
|
-
log.error(f"❌ Error creating venv: {e}")
|
22
|
-
return False
|
23
|
-
return True
|
24
|
-
|
25
|
-
def install_package_if_missing(packages):
|
26
|
-
for package in packages:
|
27
|
-
if importlib.util.find_spec(package) is None:
|
28
|
-
log.info(f"📦 Installing missing package '{package}'...")
|
29
|
-
try:
|
30
|
-
subprocess.check_call([sys.executable, "-m", "pip", "install", package])
|
31
|
-
except subprocess.CalledProcessError as e:
|
32
|
-
log.error(f"❌ Failed to install '{package}': {e}")
|
33
|
-
sys.exit(1)
|
@@ -1,12 +0,0 @@
|
|
1
|
-
airflow_cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
2
|
-
airflow_cli/cli.py,sha256=CxmcgmV2txkravhZAlimA_r2swPnGHQqm7ppxkahEBM,1507
|
3
|
-
airflow_cli/docker_utils.py,sha256=fhSadDaNyURW-9pu7ZtVt2muh2kJQJRpEOyyOpu5B74,2319
|
4
|
-
airflow_cli/env_utils.py,sha256=f0L9RBwylypGx5U8BjZlLEtX3ldYNYma7xrLwF0aOU8,1110
|
5
|
-
airflow_cli/menu.py,sha256=syKVdaSCxpkt_hPeces3F-wjZdSPZuqOsDyTbqV4gkg,1405
|
6
|
-
airflow_cli/os_utils.py,sha256=2iLi9atS94Rp0YMduChH3poNrXWTSeGSTnSWO5v_9W8,1003
|
7
|
-
airflow_cli-0.1.11.dist-info/licenses/LICENSE,sha256=UrTV1prTxEoB9MSB9MG4BkGhNDk9EQx5tDTmuQ2C4u4,1065
|
8
|
-
airflow_cli-0.1.11.dist-info/METADATA,sha256=VaTXUcMPiY7M9YkCUzjn8ZriRZOq6msc0gGsEb5NDJA,2870
|
9
|
-
airflow_cli-0.1.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
10
|
-
airflow_cli-0.1.11.dist-info/entry_points.txt,sha256=TyX3EFPfUxoxQ8aazxJREZl6VsOobW_JJlgA7jGpIBU,53
|
11
|
-
airflow_cli-0.1.11.dist-info/top_level.txt,sha256=oD1Su7hc6_0veTZoMGIZph94T9S-dtsL-G_IOLuqqVM,12
|
12
|
-
airflow_cli-0.1.11.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|