airflow-cli 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+
airflow_cli/cli.py ADDED
@@ -0,0 +1,55 @@
1
+ import argparse
2
+ import logging
3
+ import sys
4
+
5
+ from .env_utils import ensure_venv, is_in_venv
6
+ from .os_utils import check_docker, check_os, update_docker_compose
7
+ from .docker_utils import docker_up, docker_down, run_dag, fix_python_code
8
+
9
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
10
+ log = logging.getLogger(__name__)
11
+
12
+ def main():
13
+ parser = argparse.ArgumentParser(description="Airflow Docker Helper CLI")
14
+ subparsers = parser.add_subparsers(dest="command")
15
+
16
+ # Subcommand: up
17
+ subparsers.add_parser("up", help="Start Docker environment")
18
+
19
+ # Subcommand: down
20
+ subparsers.add_parser("down", help="Stop Docker environment")
21
+
22
+ # Subcommand: run-dag
23
+ subparsers.add_parser("run-dag", help="Run Airflow DAG inside Docker")
24
+
25
+ # Subcommand: fix-code
26
+ subparsers.add_parser("fix-code", help="Run flake8 linter")
27
+
28
+ args = parser.parse_args()
29
+
30
+ # Pré-checks antes de qualquer comando
31
+ ensure_venv()
32
+ if not is_in_venv():
33
+ log.warning("⚠️ Not running inside virtual environment. Interpreter: %s", sys.executable)
34
+
35
+
36
+
37
+ if not check_docker():
38
+ log.error("❌ Docker not ready.")
39
+ sys.exit(1)
40
+
41
+ check_os()
42
+ update_docker_compose()
43
+
44
+ # Execução dos comandos
45
+ if args.command == "up":
46
+ docker_up()
47
+ elif args.command == "down":
48
+ docker_down()
49
+ elif args.command == "run-dag":
50
+ run_dag()
51
+ elif args.command == "fix-code":
52
+ fix_python_code()
53
+ else:
54
+ parser.print_help()
55
+
@@ -0,0 +1,251 @@
1
+ ---
2
+ x-airflow-common: &airflow-common
3
+ environment: &airflow-common-env
4
+ AIRFLOW__CORE__EXECUTOR: CeleryExecutor
5
+ AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://postgres:postgres@postgres/airflow
6
+ AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://postgres:postgres@postgres/airflow
7
+ AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://postgres:postgres@postgres/airflow
8
+ AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
9
+ AIRFLOW_CONN_MONGO_DEFAULT: mongo://mongo:mongo@mongo:27017/?authSource=admin
10
+ AIRFLOW__CORE__FERNET_KEY: "pMrhjIcqUNHMYRk_ZOBmMptWR6o1DahCXCKn5lEMpzM="
11
+ AIRFLOW__API__SECRET_KEY: "891b3faded3f6bb751a452c8566b2b90becf3d"
12
+ AIRFLOW__API_AUTH__JWT_SECRET: "231b3f1ded3f1bb754a352c8166c2b90bqcf32"
13
+ AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: "true"
14
+ AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: "true"
15
+ AIRFLOW__API__AUTH_BACKENDS: "airflow.api.auth.backend.basic_auth"
16
+ AIRFLOW__CORE__LOAD_EXAMPLES: "false"
17
+ AIRFLOW__CORE__EXECUTION_API_SERVER_URL: "http://airflow-apiserver:8080/execution/"
18
+ AIRFLOW_VAR_DATA_DIR: "/opt/airflow/data"
19
+ AIRFLOW__CORE__SIMPLE_AUTH_MANAGER_ALL_ADMINS: "true"
20
+ AIRFLOW__WEBSERVER__SIMPLE_AUTH_MANAGER_USERS: "airflow:admin"
21
+ volumes:
22
+ - ./dags:/opt/airflow/dags
23
+ user: "50000:0"
24
+ image: "harbor.lema.ufpb.br/public/airflow:v3.0.2-python3.11-spark3.5.5-r2"
25
+ depends_on: &airflow-common-depends-on
26
+ redis:
27
+ condition: service_healthy
28
+ postgres:
29
+ condition: service_healthy
30
+ services:
31
+ redis:
32
+ image: redis:7.0.4-alpine
33
+ container_name: redis-container
34
+ hostname: redis
35
+ expose:
36
+ - 6379
37
+ ports:
38
+ - 6379:6379
39
+ healthcheck:
40
+ test: ["CMD", "redis-cli", "ping"]
41
+ interval: 5s
42
+ timeout: 30s
43
+ retries: 50
44
+ restart: unless-stopped
45
+ networks:
46
+ - dev
47
+ postgres:
48
+ container_name: postgres-container
49
+ hostname: postgres
50
+ image: postgres:16-alpine
51
+ ports:
52
+ - 5432:5432
53
+ environment:
54
+ POSTGRES_USER: postgres
55
+ POSTGRES_PASSWORD: postgres
56
+ POSTGRES_DB: airflow
57
+ volumes:
58
+ - postgresdb_data:/var/lib/postgresql/data"
59
+ healthcheck:
60
+ test: ["CMD", "pg_isready", "-U", "postgres"]
61
+ interval: 5s
62
+ retries: 5
63
+ restart: unless-stopped
64
+ networks:
65
+ - dev
66
+ mongo:
67
+ container_name: mongo-container
68
+ hostname: mongo
69
+ image: mongo:8.0
70
+ environment:
71
+ MONGO_INITDB_ROOT_USERNAME: mongo
72
+ MONGO_INITDB_ROOT_PASSWORD: mongo
73
+ ports:
74
+ - 27017:27017
75
+ volumes:
76
+ - mongodb_data:/data/db
77
+ restart: unless-stopped
78
+ networks:
79
+ - dev
80
+ dbgate:
81
+ image: dbgate/dbgate:6.4.1-alpine
82
+ container_name: dbgate
83
+ hostname: dbgate-webserver
84
+ volumes:
85
+ - dbgate-data:/root/.dbgate
86
+ ports:
87
+ - 3100:3000
88
+ environment:
89
+ CONNECTIONS: con1,con2,con3
90
+ LABEL_con1: Postgres
91
+ SERVER_con1: postgres
92
+ USER_con1: postgres
93
+ PASSWORD_con1: postgres
94
+ PORT_con1: 5432
95
+ ENGINE_con1: postgres@dbgate-plugin-postgres
96
+ LABEL_con2: MongoDB
97
+ URL_con2: mongodb://mongo:mongo@mongo:27017
98
+ ENGINE_con2: mongo@dbgate-plugin-mongo
99
+ restart: unless-stopped
100
+ networks:
101
+ - dev
102
+
103
+ airflow-apiserver:
104
+ <<: *airflow-common
105
+ container_name: airflow-api-server-container
106
+ command: api-server
107
+ ports:
108
+ - "8080:8080"
109
+ healthcheck:
110
+ test: ["CMD", "curl", "--fail", "http://localhost:8080/api/v2/version"]
111
+ interval: 30s
112
+ timeout: 10s
113
+ retries: 5
114
+ start_period: 30s
115
+ restart: always
116
+ networks:
117
+ - dev
118
+ depends_on:
119
+ <<: *airflow-common-depends-on
120
+ airflow-init:
121
+ condition: service_completed_successfully
122
+
123
+ airflow-scheduler:
124
+ <<: *airflow-common
125
+ container_name: airflow-scheduler-container
126
+ command: scheduler
127
+ healthcheck:
128
+ test: ["CMD", "curl", "--fail", "http://localhost:8974/health"]
129
+ interval: 30s
130
+ timeout: 10s
131
+ retries: 5
132
+ start_period: 30s
133
+ restart: always
134
+ networks:
135
+ - dev
136
+ depends_on:
137
+ <<: *airflow-common-depends-on
138
+ airflow-init:
139
+ condition: service_completed_successfully
140
+
141
+ airflow-dag-processor:
142
+ <<: *airflow-common
143
+ container_name: airflow-dag-processor-container
144
+ command: dag-processor
145
+ healthcheck:
146
+ test:
147
+ [
148
+ "CMD-SHELL",
149
+ 'airflow jobs check --job-type DagProcessorJob --hostname "$${HOSTNAME}"',
150
+ ]
151
+ interval: 30s
152
+ timeout: 10s
153
+ retries: 5
154
+ start_period: 30s
155
+ restart: always
156
+ networks:
157
+ - dev
158
+ depends_on:
159
+ <<: *airflow-common-depends-on
160
+ airflow-init:
161
+ condition: service_completed_successfully
162
+
163
+ airflow-worker:
164
+ <<: *airflow-common
165
+ container_name: airflow-worker-container
166
+ command: celery worker
167
+ healthcheck:
168
+ test:
169
+ - "CMD-SHELL"
170
+ - 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
171
+ interval: 30s
172
+ timeout: 10s
173
+ retries: 5
174
+ start_period: 30s
175
+ environment:
176
+ <<: *airflow-common-env
177
+ DUMB_INIT_SETSID: "0"
178
+ restart: always
179
+ networks:
180
+ - dev
181
+ depends_on:
182
+ <<: *airflow-common-depends-on
183
+ airflow-apiserver:
184
+ condition: service_healthy
185
+ airflow-init:
186
+ condition: service_completed_successfully
187
+
188
+ airflow-triggerer:
189
+ <<: *airflow-common
190
+ container_name: airflow-triggerer-container
191
+ command: triggerer
192
+ healthcheck:
193
+ test:
194
+ [
195
+ "CMD-SHELL",
196
+ 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"',
197
+ ]
198
+ interval: 30s
199
+ timeout: 10s
200
+ retries: 5
201
+ start_period: 30s
202
+ restart: always
203
+ networks:
204
+ - dev
205
+ depends_on:
206
+ <<: *airflow-common-depends-on
207
+ airflow-init:
208
+ condition: service_completed_successfully
209
+
210
+ airflow-init:
211
+ <<: *airflow-common
212
+ container_name: airflow-init
213
+ entrypoint: /bin/bash
214
+ command:
215
+ - -c
216
+ - |
217
+ mkdir -p /sources/logs /sources/dags /sources/plugins
218
+ chown -R "50000:0" /sources/{logs,dags,plugins}
219
+ exec /entrypoint airflow version
220
+ environment:
221
+ <<: *airflow-common-env
222
+ _AIRFLOW_DB_MIGRATE: "true"
223
+ _AIRFLOW_WWW_USER_CREATE: "true"
224
+ _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
225
+ _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
226
+ user: "0:0"
227
+ networks:
228
+ - dev
229
+
230
+ airflow-cli:
231
+ <<: *airflow-common
232
+ profiles:
233
+ - debug
234
+ environment:
235
+ <<: *airflow-common-env
236
+ CONNECTION_CHECK_MAX_COUNT: "0"
237
+ command:
238
+ - bash
239
+ - -c
240
+ - airflow
241
+ depends_on:
242
+ <<: *airflow-common-depends-on
243
+ networks:
244
+ - dev
245
+
246
+ networks:
247
+ dev:
248
+ volumes:
249
+ mongodb_data:
250
+ postgresdb_data:
251
+ dbgate-data:
@@ -0,0 +1,39 @@
1
+ import subprocess
2
+ import logging
3
+ from glob import glob
4
+ import yaml
5
+
6
+ log = logging.getLogger(__name__)
7
+
8
+ def docker_up():
9
+ log.info("🐳 Starting Docker environment...")
10
+ subprocess.run(["docker", "compose", "up", "-d"], check=True)
11
+ log.info("✅ Docker environment is ready.")
12
+
13
+ def docker_down():
14
+ log.info("🐳 Stopping Docker environment...")
15
+ subprocess.run(["docker", "compose", "down"], check=False)
16
+
17
+ def run_dag():
18
+ log.info("🚀 Running DAG in Docker...")
19
+ try:
20
+ config = glob("dags/*/config.yml").pop()
21
+ with open(config, "r") as file:
22
+ config_data = yaml.safe_load(file)
23
+ dag_id = config_data['args']["id"]
24
+
25
+ subprocess.run([
26
+ "docker", "exec", "-it", "airflow-worker-container",
27
+ "airflow", "dags", "test", dag_id
28
+ ], check=True)
29
+ log.info(f"✅ DAG '{dag_id}' executed successfully.")
30
+ except Exception as e:
31
+ log.error(f"❌ Error running DAG: {e}")
32
+
33
+ def fix_python_code():
34
+ log.info("🔧 Running flake8 on 'dags' folder...")
35
+ try:
36
+ subprocess.run(["flake8", "dags"], check=True)
37
+ log.info("✅ Code checked with flake8.")
38
+ except subprocess.CalledProcessError as e:
39
+ log.error(f"❌ flake8 found issues: {e}")
@@ -0,0 +1,33 @@
1
+ import os
2
+ import subprocess
3
+ import sys
4
+ import importlib.util
5
+ import logging
6
+
7
+ log = logging.getLogger(__name__)
8
+
9
+ venv_dir = ".venv"
10
+
11
+ def is_in_venv():
12
+ return os.path.abspath(sys.prefix).endswith(os.path.abspath(venv_dir))
13
+
14
+ def ensure_venv():
15
+ if not os.path.exists(venv_dir):
16
+ log.info(f"🔄 Creating Python virtual environment at {venv_dir}...")
17
+ try:
18
+ subprocess.check_call([sys.executable, "-m", "venv", venv_dir])
19
+ log.info("✅ Virtual environment created.")
20
+ except subprocess.CalledProcessError as e:
21
+ log.error(f"❌ Error creating venv: {e}")
22
+ return False
23
+ return True
24
+
25
+ def install_package_if_missing(packages):
26
+ for package in packages:
27
+ if importlib.util.find_spec(package) is None:
28
+ log.info(f"📦 Installing missing package '{package}'...")
29
+ try:
30
+ subprocess.check_call([sys.executable, "-m", "pip", "install", package])
31
+ except subprocess.CalledProcessError as e:
32
+ log.error(f"❌ Failed to install '{package}': {e}")
33
+ sys.exit(1)
airflow_cli/menu.py ADDED
@@ -0,0 +1,51 @@
1
+ import logging
2
+ import sys
3
+ from simple_term_menu import TerminalMenu
4
+ from .docker_utils import docker_up, docker_down, run_dag, fix_python_code
5
+ from .env_utils import ensure_venv, is_in_venv
6
+ from .os_utils import check_os, update_docker_compose, check_docker
7
+
8
+ log = logging.getLogger(__name__)
9
+
10
+ def show_menu():
11
+ options = [
12
+ "🐳 Docker Environment Up",
13
+ "🚀 Run DAG on Terminal",
14
+ "🐳 Docker Environment Down",
15
+ "🔧 Fix Python Code",
16
+ "🚪 Exit"
17
+ ]
18
+ menu = TerminalMenu(options, title="🎛️ Choose an option:")
19
+ choice = menu.show()
20
+ return options[choice] if choice is not None else None
21
+
22
+
23
+
24
+ def run():
25
+ ensure_venv()
26
+ if not is_in_venv():
27
+ log.warning("⚠️ Not running inside the virtual environment.")
28
+ log.warning(f"Interpreter: {sys.executable}")
29
+
30
+
31
+
32
+ if not check_docker():
33
+ log.error("❌ Docker is not ready.")
34
+ return
35
+
36
+ check_os()
37
+ update_docker_compose()
38
+
39
+ while True:
40
+ option = show_menu()
41
+ if option == "🐳 Docker Environment Up":
42
+ docker_up()
43
+ elif option == "🚀 Run DAG on Terminal":
44
+ run_dag()
45
+ elif option == "🐳 Docker Environment Down":
46
+ docker_down()
47
+ elif option == "🔧 Fix Python Code":
48
+ fix_python_code()
49
+ elif option == "🚪 Exit" or option is None:
50
+ log.info("👋 Exiting...")
51
+ break
@@ -0,0 +1,47 @@
1
+ import platform
2
+ import logging
3
+ import subprocess
4
+ import os
5
+ import shutil
6
+ import pkg_resources
7
+
8
+ log = logging.getLogger(__name__)
9
+
10
+ def check_os():
11
+ system = platform.system()
12
+ if system == "Linux":
13
+ try:
14
+ with open("/proc/version", "r") as f:
15
+ if "microsoft" in f.read().lower():
16
+ log.info("✅ Running on WSL (Linux under Windows).")
17
+ else:
18
+ log.info("✅ Running on native Linux.")
19
+ except FileNotFoundError:
20
+ log.info("✅ Running on Linux.")
21
+ elif system == "Darwin":
22
+ log.info("✅ Running on MacOS.")
23
+ else:
24
+ log.error(f"❌ Unsupported OS: {system}")
25
+
26
+ def check_docker():
27
+ try:
28
+ subprocess.check_output(["docker", "--version"])
29
+ subprocess.check_output(["docker", "info"])
30
+ log.info("✅ Docker is installed and running.")
31
+ return True
32
+ except Exception as e:
33
+ log.error(f"❌ Docker check failed: {e}")
34
+ return False
35
+
36
+ def update_docker_compose():
37
+ """Copy docker-compose.yml from package to current directory if it doesn't exist"""
38
+ if not os.path.exists("docker-compose.yml"):
39
+ try:
40
+ # Get the docker-compose.yml from the package
41
+ source = pkg_resources.resource_filename('airflow_docker', 'docker-compose.yml')
42
+ shutil.copy2(source, "docker-compose.yml")
43
+ log.info("✅ docker-compose.yml copied to current directory.")
44
+ except Exception as e:
45
+ log.error(f"❌ Failed to copy docker-compose.yml: {e}")
46
+ else:
47
+ log.info("✅ docker-compose.yml already exists.")
@@ -0,0 +1,381 @@
1
+ Metadata-Version: 2.4
2
+ Name: airflow-cli
3
+ Version: 0.1.0
4
+ Summary: CLI para facilitar o setup de Airflow com Docker.
5
+ Author-email: LEMA-UFPB <ufpb.lema@gmail.com>
6
+ License: MIT
7
+ Requires-Python: >=3.7
8
+ Description-Content-Type: text/markdown
9
+ License-File: LICENSE
10
+ Requires-Dist: simple-term-menu>=1.0.0
11
+ Requires-Dist: requests>=2.25.0
12
+ Requires-Dist: flake8>=4.0.0
13
+ Requires-Dist: pyyaml>=5.4.0
14
+ Dynamic: license-file
15
+
16
+ # Airflow Docker Helper
17
+
18
+ A command-line tool to facilitate the setup of Apache Airflow using Docker and enable local DAG development and testing.
19
+
20
+ ## Features
21
+
22
+ - 🚀 Quick Airflow setup with Docker Compose
23
+ - 🔧 Local DAG development and testing
24
+ - 📦 Pre-configured Docker environment
25
+ - 🛠️ CLI interface for common Airflow operations
26
+ - 🧪 Testing utilities for DAG validation
27
+
28
+ ## Prerequisites
29
+
30
+ - Python 3.7+
31
+ - Docker and Docker Compose
32
+ - Git
33
+
34
+ ## Installation
35
+
36
+ ### From PyPI (Recommended)
37
+
38
+ ```bash
39
+ pip install airflow-cli
40
+ ```
41
+
42
+ ### From Source
43
+
44
+ ```bash
45
+ git clone https://gitlab.lema.ufpb.br/back-end/lema-ufpb/airflow-docker-helper.git
46
+ cd airflow-docker-helper
47
+ pip install -e .
48
+ ```
49
+
50
+ ### Development Installation
51
+
52
+ ```bash
53
+ git clone https://gitlab.lema.ufpb.br/back-end/lema-ufpb/airflow-docker-helper.git
54
+ cd airflow-docker-helper
55
+ pip install -e ".[dev]"
56
+ ```
57
+
58
+ ## Quick Start
59
+
60
+ ### 1. Start Airflow Environment
61
+
62
+ ```bash
63
+ airflow-cli up
64
+ ```
65
+
66
+ This command will:
67
+ - Check Docker installation and environment
68
+ - Download the latest docker-compose.yml from LEMA UFPB repository
69
+ - Start Airflow services with Docker Compose
70
+
71
+ ### 2. Access Airflow UI
72
+
73
+ Open your browser and navigate to http://localhost:8080
74
+
75
+ Default credentials:
76
+ - Username: `airflow`
77
+ - Password: `airflow`
78
+
79
+ ### 3. Stop Airflow Environment
80
+
81
+ ```bash
82
+ airflow-cli down
83
+ ```
84
+
85
+ ## Usage
86
+
87
+ ### Available Commands
88
+
89
+ ```bash
90
+ # Start Docker environment
91
+ airflow-cli up
92
+
93
+ # Stop Docker environment
94
+ airflow-cli down
95
+
96
+ # Run Airflow DAG inside Docker
97
+ airflow-cli run-dag
98
+
99
+ # Run flake8 linter on DAGs
100
+ airflow-cli fix-code
101
+
102
+ # Show help
103
+ airflow-cli --help
104
+ ```
105
+
106
+ ### DAG Development
107
+
108
+ 1. Place your DAG files in the `dags/` directory
109
+ 2. The directory is automatically mounted to the Airflow container
110
+ 3. Changes are reflected immediately (no restart required)
111
+
112
+ Expected DAG structure for `run-dag` command:
113
+ ```
114
+ project/
115
+ ├── dags/
116
+ │ └── my_dag/
117
+ │ ├── config.yml
118
+ │ └── dag.py
119
+ └── docker-compose.yml
120
+ ```
121
+
122
+ Example `config.yml`:
123
+ ```yaml
124
+ args:
125
+ id: "my_dag_id"
126
+ ```
127
+
128
+ ### Testing DAGs
129
+
130
+ #### Run a DAG inside Docker
131
+ ```bash
132
+ airflow-cli run-dag
133
+ ```
134
+
135
+ This command will:
136
+ - Look for DAG configuration files in `dags/*/config.yml`
137
+ - Execute the DAG inside the running Airflow container
138
+
139
+ #### Validate DAG syntax with flake8
140
+ ```bash
141
+ airflow-cli fix-code
142
+ ```
143
+
144
+ This will run flake8 linter on the `dags/` folder to check for Python syntax issues.
145
+
146
+ ## Configuration
147
+
148
+ ### Environment Variables
149
+
150
+ Create a `.env` file in your project root:
151
+
152
+ ```env
153
+ # Airflow Configuration
154
+ AIRFLOW_VERSION=2.7.0
155
+ AIRFLOW_UID=50000
156
+ AIRFLOW_GID=0
157
+
158
+ # Database
159
+ POSTGRES_USER=airflow
160
+ POSTGRES_PASSWORD=airflow
161
+ POSTGRES_DB=airflow
162
+
163
+ # Airflow Core
164
+ AIRFLOW__CORE__EXECUTOR=CeleryExecutor
165
+ AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION=true
166
+ AIRFLOW__CORE__LOAD_EXAMPLES=false
167
+
168
+ # Webserver
169
+ AIRFLOW__WEBSERVER__EXPOSE_CONFIG=true
170
+ ```
171
+
172
+ ### Docker Compose Source
173
+
174
+ The tool automatically downloads the `docker-compose.yml` from:
175
+ ```
176
+ https://gitlab.lema.ufpb.br/hub/airflow/-/raw/main/docker-compose.yml
177
+ ```
178
+
179
+ This ensures you're always using the latest configuration from the LEMA UFPB Airflow repository.
180
+
181
+ ## Development
182
+
183
+ ### Setting up Development Environment
184
+
185
+ ```bash
186
+ # Clone the repository
187
+ git clone https://gitlab.lema.ufpb.br/back-end/lema-ufpb/airflow-docker-helper.git
188
+ cd airflow-docker-helper
189
+
190
+ # Create virtual environment
191
+ python -m venv venv
192
+ source venv/bin/activate # On Windows: venv\Scripts\activate
193
+
194
+ # Install in development mode
195
+ pip install -e ".[dev]"
196
+
197
+ # Install pre-commit hooks
198
+ pre-commit install
199
+ ```
200
+
201
+ ### Building from Source
202
+
203
+ ```bash
204
+ # Build wheel
205
+ python -m build
206
+
207
+ # Install built package
208
+ pip install dist/airflow_docker_helper-*.whl
209
+ ```
210
+
211
+ ### Running Tests
212
+
213
+ ```bash
214
+ # Run all tests
215
+ pytest
216
+
217
+ # Run with coverage
218
+ pytest --cov=airflow_docker_helper
219
+
220
+ # Run specific test file
221
+ pytest tests/test_cli.py
222
+
223
+ # Run with verbose output
224
+ pytest -v
225
+ ```
226
+
227
+ ### Code Quality
228
+
229
+ ```bash
230
+ # Format code
231
+ black airflow_docker_helper/
232
+
233
+ # Sort imports
234
+ isort airflow_docker_helper/
235
+
236
+ # Lint code
237
+ flake8 airflow_docker_helper/
238
+
239
+ # Type checking
240
+ mypy airflow_docker_helper/
241
+ ```
242
+
243
+ ## Troubleshooting
244
+
245
+ ### Common Issues
246
+
247
+ #### Permission Errors
248
+ ```bash
249
+ # Fix permissions for Airflow directories
250
+ sudo chown -R $(id -u):$(id -g) dags/ logs/ plugins/
251
+ ```
252
+
253
+ #### Port Already in Use
254
+ ```bash
255
+ # Check what's using port 8080
256
+ lsof -i :8080
257
+
258
+ # Use different port
259
+ AIRFLOW_WEBSERVER_PORT=8081 airflow-helper start
260
+ ```
261
+
262
+ #### Database Connection Issues
263
+ ```bash
264
+ # Reset database
265
+ airflow-helper clean
266
+ airflow-helper init
267
+ ```
268
+
269
+ #### DAG Import Errors
270
+ ```bash
271
+ # Check DAG syntax
272
+ airflow-helper validate-dags
273
+
274
+ # View detailed logs
275
+ airflow-helper logs scheduler
276
+ ```
277
+
278
+ ### Getting Help
279
+
280
+ ```bash
281
+ # Show help for main command
282
+ airflow-helper --help
283
+
284
+ # Show help for specific command
285
+ airflow-helper init --help
286
+ ```
287
+
288
+ ## Examples
289
+
290
+ ### Example DAG
291
+
292
+ ```python
293
+ from datetime import datetime, timedelta
294
+ from airflow import DAG
295
+ from airflow.operators.python import PythonOperator
296
+
297
+ default_args = {
298
+ 'owner': 'data-team',
299
+ 'depends_on_past': False,
300
+ 'start_date': datetime(2023, 1, 1),
301
+ 'email_on_failure': False,
302
+ 'email_on_retry': False,
303
+ 'retries': 1,
304
+ 'retry_delay': timedelta(minutes=5),
305
+ }
306
+
307
+ dag = DAG(
308
+ 'example_pipeline',
309
+ default_args=default_args,
310
+ description='An example data pipeline',
311
+ schedule_interval=timedelta(days=1),
312
+ catchup=False,
313
+ tags=['example', 'tutorial'],
314
+ )
315
+
316
+ def extract_data():
317
+ print("Extracting data...")
318
+ return "data_extracted"
319
+
320
+ def transform_data():
321
+ print("Transforming data...")
322
+ return "data_transformed"
323
+
324
+ def load_data():
325
+ print("Loading data...")
326
+ return "data_loaded"
327
+
328
+ extract_task = PythonOperator(
329
+ task_id='extract',
330
+ python_callable=extract_data,
331
+ dag=dag,
332
+ )
333
+
334
+ transform_task = PythonOperator(
335
+ task_id='transform',
336
+ python_callable=transform_data,
337
+ dag=dag,
338
+ )
339
+
340
+ load_task = PythonOperator(
341
+ task_id='load',
342
+ python_callable=load_data,
343
+ dag=dag,
344
+ )
345
+
346
+ extract_task >> transform_task >> load_task
347
+ ```
348
+
349
+ ## Contributing
350
+
351
+ 1. Fork the repository
352
+ 2. Create a feature branch (`git checkout -b feature/amazing-feature`)
353
+ 3. Commit your changes (`git commit -m 'Add amazing feature'`)
354
+ 4. Push to the branch (`git push origin feature/amazing-feature`)
355
+ 5. Open a Merge Request
356
+
357
+ ### Development Guidelines
358
+
359
+ - Follow PEP 8 style guide
360
+ - Add tests for new features
361
+ - Update documentation
362
+ - Ensure all tests pass
363
+ - Use meaningful commit messages
364
+
365
+ ## License
366
+
367
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
368
+
369
+ ## Support
370
+
371
+ - 📧 Email: [your-email@ufpb.br]
372
+ - 🐛 Issues: [GitLab Issues](https://gitlab.lema.ufpb.br/back-end/lema-ufpb/airflow-docker-helper/-/issues)
373
+ - 📖 Documentation: [Wiki](https://gitlab.lema.ufpb.br/back-end/lema-ufpb/airflow-docker-helper/-/wikis/home)
374
+
375
+ ## Changelog
376
+
377
+ See [CHANGELOG.md](CHANGELOG.md) for a list of changes and version history.
378
+
379
+ ---
380
+
381
+ Made with ❤️ by the LEMA UFPB team
@@ -0,0 +1,13 @@
1
+ airflow_cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
2
+ airflow_cli/cli.py,sha256=Sddvg-_7rGZSqhoKL4RJjMnn3sn09cjDkLhH0yOsfMA,1527
3
+ airflow_cli/docker-compose.yml,sha256=9jilz2y47ZLgmsSOzGSrkLsbQuhWuKxXaXaVndqbxbg,6927
4
+ airflow_cli/docker_utils.py,sha256=UhwlmkWAoiIeFt7KaWrTAYPpRhcilp_TR0vtW3tM42w,1267
5
+ airflow_cli/env_utils.py,sha256=f0L9RBwylypGx5U8BjZlLEtX3ldYNYma7xrLwF0aOU8,1110
6
+ airflow_cli/menu.py,sha256=ACEvSnCR410IGaSL129Bl4LGuturme5ZY0p67T_Iy4s,1435
7
+ airflow_cli/os_utils.py,sha256=bzvJbIRQwYErfs7oyCUVEg2sD5vLxapMAQgjFM9fuC0,1633
8
+ airflow_cli-0.1.0.dist-info/licenses/LICENSE,sha256=UrTV1prTxEoB9MSB9MG4BkGhNDk9EQx5tDTmuQ2C4u4,1065
9
+ airflow_cli-0.1.0.dist-info/METADATA,sha256=5K5CGo5wtEVsOONaFlUj5vRe1lurXMZq0isXNSI8bBY,7353
10
+ airflow_cli-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
11
+ airflow_cli-0.1.0.dist-info/entry_points.txt,sha256=TyX3EFPfUxoxQ8aazxJREZl6VsOobW_JJlgA7jGpIBU,53
12
+ airflow_cli-0.1.0.dist-info/top_level.txt,sha256=oD1Su7hc6_0veTZoMGIZph94T9S-dtsL-G_IOLuqqVM,12
13
+ airflow_cli-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ airflow-cli = airflow_cli.cli:main
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 LEMA-UFPB
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ airflow_cli