gemini-cli-headless 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,89 @@
1
+ Metadata-Version: 2.4
2
+ Name: gemini-cli-headless
3
+ Version: 1.0.0
4
+ Summary: A resilient, zero-dependency Python wrapper for the official Google Gemini Node.js CLI.
5
+ Author: jarek108
6
+ Project-URL: Homepage, https://github.com/jarek108/gemini-cli-headless
7
+ Project-URL: Repository, https://github.com/jarek108/gemini-cli-headless
8
+ Project-URL: Issues, https://github.com/jarek108/gemini-cli-headless/issues
9
+ Keywords: gemini,google-ai,cli,headless,wrapper,llm
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: License :: OSI Approved :: MIT License
12
+ Classifier: Operating System :: OS Independent
13
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
14
+ Classifier: Intended Audience :: Developers
15
+ Classifier: Development Status :: 5 - Production/Stable
16
+ Requires-Python: >=3.9
17
+ Description-Content-Type: text/markdown
18
+
19
+ # gemini-cli-headless
20
+
21
+ A standalone, zero-dependency Python wrapper for executing the official Node.js Google Gemini CLI (`@google/gemini-cli`) in fully programmatic, headless mode.
22
+
23
+ > **Note:**
24
+ > While `gemini-cli-headless` is a powerful standalone library, it also serves as the foundational execution engine for **[Cortex](https://github.com/jarek108/Cortex)**, an Autonomous Developer OS for multi-agent software engineering.
25
+
26
+ ## Why this wrapper?
27
+ While the official Python SDKs are excellent for standard API calls, the `@google/gemini-cli` provides powerful built-in features for developers working with local codebases (e.g., attaching entire directories via `@files` or resuming specific `sessionId` chat histories from the CLI's internal cache).
28
+
29
+ This wrapper allows you to leverage those CLI-specific features headlessly within your Python scripts, Data pipelines, or RAG systems. It is built for absolute resilience, featuring native retry loops for transient infrastructure drops.
30
+
31
+ ## Features
32
+ * **Zero Dependencies**: Pure Python standard library (no `requests`, no `aiohttp`).
33
+ * **JSON Parsing**: Automatically requests and safely parses the `--output-format json` from the Node CLI into a clean Python `GeminiSession` dataclass.
34
+ * **Token & Cost Stats**: Aggregates `inputTokens`, `outputTokens`, and `cachedTokens` from the raw JSON response.
35
+ * **Session Resumption**: Supports the `-r <sessionId>` flag, and even allows you to inject local `.json` session files directly into the Node CLI cache before execution.
36
+ * **Built-in Resilience**: Automatically catches transient API drops (like 503 errors) and malformed JSON, retrying the subprocess call seamlessly without crashing your script.
37
+
38
+ ## Installation
39
+
40
+ ```bash
41
+ # Make sure you have the Node.js CLI installed globally first:
42
+ npm install -g @google/gemini-cli
43
+
44
+ # Then install this Python wrapper:
45
+ pip install git+https://github.com/jarek108/gemini-cli-headless.git
46
+ ```
47
+
48
+ ## Quick Start
49
+
50
+ ```python
51
+ from gemini_cli_headless import run_gemini_cli_headless
52
+
53
+ # Provide your API key explicitly, or let the wrapper use your environment variables
54
+ my_key = "AIzaSy..."
55
+
56
+ # Execute a command headlessly with built-in retries
57
+ session = run_gemini_cli_headless(
58
+ prompt="Explain quantum computing in one sentence.",
59
+ api_key=my_key,
60
+ max_retries=3
61
+ )
62
+
63
+ print(f"Cost basis - Input: {session.stats.get('inputTokens')}, Output: {session.stats.get('outputTokens')}")
64
+ print(f"Response: {session.text}")
65
+ print(f"Session ID: {session.session_id}")
66
+ ```
67
+
68
+ ## Portable Memory (Resuming from a local file)
69
+
70
+ Instead of relying on the global CLI cache, you can keep session files directly in your project and inject them on the fly.
71
+
72
+ ```python
73
+ import shutil
74
+ from gemini_cli_headless import run_gemini_cli_headless
75
+
76
+ # 1. First interaction
77
+ session = run_gemini_cli_headless("Remember the secret password is 'Rosebud'.")
78
+
79
+ # 2. Save the session to your local project
80
+ shutil.copy2(session.session_path, "my_context.json")
81
+
82
+ # ... Days later on a different machine ...
83
+
84
+ # 3. Resume the conversation later from your local file!
85
+ new_session = run_gemini_cli_headless(
86
+ prompt="What was the secret password?",
87
+ session_to_resume="my_context.json"
88
+ )
89
+ ```
@@ -0,0 +1,71 @@
1
+ # gemini-cli-headless
2
+
3
+ A standalone, zero-dependency Python wrapper for executing the official Node.js Google Gemini CLI (`@google/gemini-cli`) in fully programmatic, headless mode.
4
+
5
+ > **Note:**
6
+ > While `gemini-cli-headless` is a powerful standalone library, it also serves as the foundational execution engine for **[Cortex](https://github.com/jarek108/Cortex)**, an Autonomous Developer OS for multi-agent software engineering.
7
+
8
+ ## Why this wrapper?
9
+ While the official Python SDKs are excellent for standard API calls, the `@google/gemini-cli` provides powerful built-in features for developers working with local codebases (e.g., attaching entire directories via `@files` or resuming specific `sessionId` chat histories from the CLI's internal cache).
10
+
11
+ This wrapper allows you to leverage those CLI-specific features headlessly within your Python scripts, Data pipelines, or RAG systems. It is built for absolute resilience, featuring native retry loops for transient infrastructure drops.
12
+
13
+ ## Features
14
+ * **Zero Dependencies**: Pure Python standard library (no `requests`, no `aiohttp`).
15
+ * **JSON Parsing**: Automatically requests and safely parses the `--output-format json` from the Node CLI into a clean Python `GeminiSession` dataclass.
16
+ * **Token & Cost Stats**: Aggregates `inputTokens`, `outputTokens`, and `cachedTokens` from the raw JSON response.
17
+ * **Session Resumption**: Supports the `-r <sessionId>` flag, and even allows you to inject local `.json` session files directly into the Node CLI cache before execution.
18
+ * **Built-in Resilience**: Automatically catches transient API drops (like 503 errors) and malformed JSON, retrying the subprocess call seamlessly without crashing your script.
19
+
20
+ ## Installation
21
+
22
+ ```bash
23
+ # Make sure you have the Node.js CLI installed globally first:
24
+ npm install -g @google/gemini-cli
25
+
26
+ # Then install this Python wrapper:
27
+ pip install git+https://github.com/jarek108/gemini-cli-headless.git
28
+ ```
29
+
30
+ ## Quick Start
31
+
32
+ ```python
33
+ from gemini_cli_headless import run_gemini_cli_headless
34
+
35
+ # Provide your API key explicitly, or let the wrapper use your environment variables
36
+ my_key = "AIzaSy..."
37
+
38
+ # Execute a command headlessly with built-in retries
39
+ session = run_gemini_cli_headless(
40
+ prompt="Explain quantum computing in one sentence.",
41
+ api_key=my_key,
42
+ max_retries=3
43
+ )
44
+
45
+ print(f"Cost basis - Input: {session.stats.get('inputTokens')}, Output: {session.stats.get('outputTokens')}")
46
+ print(f"Response: {session.text}")
47
+ print(f"Session ID: {session.session_id}")
48
+ ```
49
+
50
+ ## Portable Memory (Resuming from a local file)
51
+
52
+ Instead of relying on the global CLI cache, you can keep session files directly in your project and inject them on the fly.
53
+
54
+ ```python
55
+ import shutil
56
+ from gemini_cli_headless import run_gemini_cli_headless
57
+
58
+ # 1. First interaction
59
+ session = run_gemini_cli_headless("Remember the secret password is 'Rosebud'.")
60
+
61
+ # 2. Save the session to your local project
62
+ shutil.copy2(session.session_path, "my_context.json")
63
+
64
+ # ... Days later on a different machine ...
65
+
66
+ # 3. Resume the conversation later from your local file!
67
+ new_session = run_gemini_cli_headless(
68
+ prompt="What was the secret password?",
69
+ session_to_resume="my_context.json"
70
+ )
71
+ ```
@@ -0,0 +1,89 @@
1
+ Metadata-Version: 2.4
2
+ Name: gemini-cli-headless
3
+ Version: 1.0.0
4
+ Summary: A resilient, zero-dependency Python wrapper for the official Google Gemini Node.js CLI.
5
+ Author: jarek108
6
+ Project-URL: Homepage, https://github.com/jarek108/gemini-cli-headless
7
+ Project-URL: Repository, https://github.com/jarek108/gemini-cli-headless
8
+ Project-URL: Issues, https://github.com/jarek108/gemini-cli-headless/issues
9
+ Keywords: gemini,google-ai,cli,headless,wrapper,llm
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: License :: OSI Approved :: MIT License
12
+ Classifier: Operating System :: OS Independent
13
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
14
+ Classifier: Intended Audience :: Developers
15
+ Classifier: Development Status :: 5 - Production/Stable
16
+ Requires-Python: >=3.9
17
+ Description-Content-Type: text/markdown
18
+
19
+ # gemini-cli-headless
20
+
21
+ A standalone, zero-dependency Python wrapper for executing the official Node.js Google Gemini CLI (`@google/gemini-cli`) in fully programmatic, headless mode.
22
+
23
+ > **Note:**
24
+ > While `gemini-cli-headless` is a powerful standalone library, it also serves as the foundational execution engine for **[Cortex](https://github.com/jarek108/Cortex)**, an Autonomous Developer OS for multi-agent software engineering.
25
+
26
+ ## Why this wrapper?
27
+ While the official Python SDKs are excellent for standard API calls, the `@google/gemini-cli` provides powerful built-in features for developers working with local codebases (e.g., attaching entire directories via `@files` or resuming specific `sessionId` chat histories from the CLI's internal cache).
28
+
29
+ This wrapper allows you to leverage those CLI-specific features headlessly within your Python scripts, Data pipelines, or RAG systems. It is built for absolute resilience, featuring native retry loops for transient infrastructure drops.
30
+
31
+ ## Features
32
+ * **Zero Dependencies**: Pure Python standard library (no `requests`, no `aiohttp`).
33
+ * **JSON Parsing**: Automatically requests and safely parses the `--output-format json` from the Node CLI into a clean Python `GeminiSession` dataclass.
34
+ * **Token & Cost Stats**: Aggregates `inputTokens`, `outputTokens`, and `cachedTokens` from the raw JSON response.
35
+ * **Session Resumption**: Supports the `-r <sessionId>` flag, and even allows you to inject local `.json` session files directly into the Node CLI cache before execution.
36
+ * **Built-in Resilience**: Automatically catches transient API drops (like 503 errors) and malformed JSON, retrying the subprocess call seamlessly without crashing your script.
37
+
38
+ ## Installation
39
+
40
+ ```bash
41
+ # Make sure you have the Node.js CLI installed globally first:
42
+ npm install -g @google/gemini-cli
43
+
44
+ # Then install this Python wrapper:
45
+ pip install git+https://github.com/jarek108/gemini-cli-headless.git
46
+ ```
47
+
48
+ ## Quick Start
49
+
50
+ ```python
51
+ from gemini_cli_headless import run_gemini_cli_headless
52
+
53
+ # Provide your API key explicitly, or let the wrapper use your environment variables
54
+ my_key = "AIzaSy..."
55
+
56
+ # Execute a command headlessly with built-in retries
57
+ session = run_gemini_cli_headless(
58
+ prompt="Explain quantum computing in one sentence.",
59
+ api_key=my_key,
60
+ max_retries=3
61
+ )
62
+
63
+ print(f"Cost basis - Input: {session.stats.get('inputTokens')}, Output: {session.stats.get('outputTokens')}")
64
+ print(f"Response: {session.text}")
65
+ print(f"Session ID: {session.session_id}")
66
+ ```
67
+
68
+ ## Portable Memory (Resuming from a local file)
69
+
70
+ Instead of relying on the global CLI cache, you can keep session files directly in your project and inject them on the fly.
71
+
72
+ ```python
73
+ import shutil
74
+ from gemini_cli_headless import run_gemini_cli_headless
75
+
76
+ # 1. First interaction
77
+ session = run_gemini_cli_headless("Remember the secret password is 'Rosebud'.")
78
+
79
+ # 2. Save the session to your local project
80
+ shutil.copy2(session.session_path, "my_context.json")
81
+
82
+ # ... Days later on a different machine ...
83
+
84
+ # 3. Resume the conversation later from your local file!
85
+ new_session = run_gemini_cli_headless(
86
+ prompt="What was the secret password?",
87
+ session_to_resume="my_context.json"
88
+ )
89
+ ```
@@ -0,0 +1,7 @@
1
+ README.md
2
+ gemini_cli_headless.py
3
+ pyproject.toml
4
+ gemini_cli_headless.egg-info/PKG-INFO
5
+ gemini_cli_headless.egg-info/SOURCES.txt
6
+ gemini_cli_headless.egg-info/dependency_links.txt
7
+ gemini_cli_headless.egg-info/top_level.txt
@@ -0,0 +1 @@
1
+ gemini_cli_headless
@@ -0,0 +1,278 @@
1
+ """
2
+ Standalone programmatic wrapper for the Gemini CLI in headless mode.
3
+ """
4
+
5
+ import subprocess
6
+ import os
7
+ import json
8
+ import shutil
9
+ import logging
10
+ import re
11
+ import glob
12
+ import time
13
+ from dataclasses import dataclass, field
14
+ from typing import Optional, List, Dict, Any, Tuple
15
+
16
+ logger = logging.getLogger("gemini_cli_headless")
17
+
18
+ @dataclass
19
+ class GeminiSession:
20
+ """
21
+ Represents a completed Gemini CLI session interaction.
22
+ """
23
+ text: str
24
+ session_id: str
25
+ session_path: str
26
+ stats: Dict[str, Any] = field(default_factory=dict)
27
+ api_errors: List[Dict[str, Any]] = field(default_factory=list)
28
+ raw_data: Dict[str, Any] = field(default_factory=dict)
29
+
30
+ def _get_cli_chat_dir(project_name: str) -> str:
31
+ """Returns the internal Gemini CLI chat directory for a given project."""
32
+ return os.path.join(os.path.expanduser("~"), ".gemini", "tmp", project_name, "chats")
33
+
34
+ def _sanitize_project_name(name: str) -> str:
35
+ """Sanitizes a string to match the Gemini CLI project name convention."""
36
+ sanitized = re.sub(r'[^a-z0-9]+', '-', name.lower())
37
+ return sanitized.strip('-')
38
+
39
+ def _find_session_file(directory: str, session_id: str) -> Optional[str]:
40
+ """Locates a session file matching the ID prefix in the given directory."""
41
+ if not os.path.exists(directory):
42
+ return None
43
+ short_id = session_id[:8]
44
+ patterns = [f"session-*{short_id}*.json", f"*{short_id}*.json"]
45
+ for pattern in patterns:
46
+ matches = glob.glob(os.path.join(directory, pattern))
47
+ if matches:
48
+ return sorted(matches, key=os.path.getmtime, reverse=True)[0]
49
+ return None
50
+
51
+ def run_gemini_cli_headless(
52
+ prompt: str,
53
+ model_id: Optional[str] = None,
54
+ files: Optional[List[str]] = None,
55
+ session_id: Optional[str] = None,
56
+ session_to_resume: Optional[str] = None,
57
+ project_name: Optional[str] = None,
58
+ cwd: Optional[str] = None,
59
+ extra_args: Optional[List[str]] = None,
60
+ stream_output: bool = False,
61
+ # --- Resilience & Auth Params ---
62
+ api_key: Optional[str] = None,
63
+ max_retries: int = 3,
64
+ retry_delay_seconds: float = 5.0
65
+ ) -> GeminiSession:
66
+ """
67
+ Standalone wrapper for the Gemini CLI in headless mode.
68
+ """
69
+ last_exception = None
70
+
71
+ for attempt in range(max_retries):
72
+ try:
73
+ return _execute_single_run(
74
+ prompt=prompt,
75
+ model_id=model_id,
76
+ files=files,
77
+ session_id=session_id,
78
+ session_to_resume=session_to_resume,
79
+ project_name=project_name,
80
+ cwd=cwd,
81
+ extra_args=extra_args,
82
+ stream_output=stream_output,
83
+ api_key=api_key
84
+ )
85
+ except (RuntimeError, json.JSONDecodeError) as e:
86
+ last_exception = e
87
+ if attempt < max_retries - 1:
88
+ logger.warning(f"Gemini CLI failed (Attempt {attempt + 1}/{max_retries}). Retrying in {retry_delay_seconds}s... Error: {e}")
89
+ time.sleep(retry_delay_seconds)
90
+ else:
91
+ logger.error(f"Gemini CLI failed all {max_retries} attempts.")
92
+ raise last_exception
93
+
94
+ def _execute_single_run(
95
+ prompt: str,
96
+ model_id: Optional[str] = None,
97
+ files: Optional[List[str]] = None,
98
+ session_id: Optional[str] = None,
99
+ session_to_resume: Optional[str] = None,
100
+ project_name: Optional[str] = None,
101
+ cwd: Optional[str] = None,
102
+ extra_args: Optional[List[str]] = None,
103
+ stream_output: bool = False,
104
+ api_key: Optional[str] = None
105
+ ) -> GeminiSession:
106
+ """Internal execution logic for a single CLI invocation."""
107
+
108
+ if not project_name:
109
+ base_dir = cwd if cwd else os.getcwd()
110
+ project_name = _sanitize_project_name(os.path.basename(base_dir))
111
+
112
+ session_id_to_use = session_id
113
+ cli_dir = _get_cli_chat_dir(project_name)
114
+
115
+ if session_to_resume:
116
+ if session_to_resume.lower().endswith('.json') or os.path.isfile(session_to_resume):
117
+ if not os.path.exists(session_to_resume):
118
+ raise FileNotFoundError(f"Session file not found: {session_to_resume}")
119
+ with open(session_to_resume, 'r', encoding='utf-8') as f:
120
+ data = json.load(f)
121
+ session_id_to_use = data.get("sessionId")
122
+ if not session_id_to_use:
123
+ raise ValueError(f"File {session_to_resume} is not a valid Gemini session")
124
+ if session_id_to_use.startswith('init-'):
125
+ session_id_to_use = None
126
+ else:
127
+ os.makedirs(cli_dir, exist_ok=True)
128
+ target_path = os.path.join(cli_dir, f"session-{session_id_to_use}.json")
129
+ shutil.copy2(session_to_resume, target_path)
130
+ else:
131
+ session_id_to_use = session_to_resume
132
+
133
+ # Construct prompt string with attachments
134
+ full_prompt = prompt
135
+ if files:
136
+ for f_path in files:
137
+ if os.path.exists(f_path):
138
+ full_prompt += f" @{os.path.abspath(f_path)}"
139
+
140
+ # Build command
141
+ cmd_executable = "gemini.cmd" if os.name == 'nt' else "gemini"
142
+ cmd = [cmd_executable, "-y", "-o", "json"]
143
+ if model_id: cmd.extend(["-m", model_id])
144
+ if session_id_to_use: cmd.extend(["-r", session_id_to_use])
145
+ if extra_args: cmd.extend(extra_args)
146
+
147
+ # Environment injection
148
+ env = os.environ.copy()
149
+ if api_key:
150
+ env["GEMINI_API_KEY"] = api_key
151
+
152
+ # Execute by piping the prompt to stdin
153
+ process = subprocess.Popen(
154
+ cmd,
155
+ cwd=cwd,
156
+ env=env,
157
+ stdin=subprocess.PIPE,
158
+ stdout=subprocess.PIPE,
159
+ stderr=subprocess.STDOUT,
160
+ text=True,
161
+ encoding='utf-8',
162
+ bufsize=1 # Line buffered
163
+ )
164
+
165
+ # Send the prompt and close stdin
166
+ if full_prompt:
167
+ process.stdin.write(full_prompt)
168
+ process.stdin.close()
169
+
170
+ combined_output = ""
171
+ # Read output in real-time
172
+ while True:
173
+ line = process.stdout.readline()
174
+ if not line and process.poll() is not None:
175
+ break
176
+ if line:
177
+ combined_output += line
178
+ if stream_output:
179
+ print(line, end="", flush=True)
180
+
181
+ process.stdout.close()
182
+ return_code = process.wait()
183
+
184
+ if not combined_output.strip():
185
+ raise RuntimeError(f"CLI returned absolutely empty output.")
186
+
187
+ # Find all JSON-looking blocks and try to parse them from the end
188
+ response_data = None
189
+ last_error = None
190
+
191
+ search_pos = len(combined_output)
192
+ while search_pos > 0:
193
+ start_idx = combined_output.rfind('{', 0, search_pos)
194
+ if start_idx == -1:
195
+ break
196
+
197
+ brace_count = 0
198
+ end_idx = -1
199
+ for i in range(start_idx, len(combined_output)):
200
+ if combined_output[i] == '{':
201
+ brace_count += 1
202
+ elif combined_output[i] == '}':
203
+ brace_count -= 1
204
+ if brace_count == 0:
205
+ end_idx = i
206
+ break
207
+
208
+ if end_idx != -1:
209
+ json_str = combined_output[start_idx:end_idx+1]
210
+ try:
211
+ candidate = json.loads(json_str)
212
+ if isinstance(candidate, dict) and ("session_id" in candidate or "response" in candidate or "text" in candidate or "error" in candidate):
213
+ response_data = candidate
214
+ break
215
+ except json.JSONDecodeError as e:
216
+ last_error = e
217
+
218
+ search_pos = start_idx
219
+
220
+ if not response_data:
221
+ raise RuntimeError(f"CLI output did not contain a valid Gemini response JSON. Last error: {last_error}\nOutput: {combined_output[:500]}...")
222
+
223
+ api_errors = []
224
+ retry_matches = re.findall(r"failed with status (\d+)", combined_output)
225
+ for code in retry_matches:
226
+ api_errors.append({"code": int(code), "message": "Transient API Error (Retry)"})
227
+
228
+ if "error" in response_data and response_data["error"]:
229
+ err = response_data["error"]
230
+ msg = err.get("message") if isinstance(err, dict) else str(err)
231
+ code = err.get("code", "unknown") if isinstance(err, dict) else "unknown"
232
+ api_errors.append({"code": code, "message": msg})
233
+ if not response_data.get("response") and not response_data.get("text"):
234
+ raise RuntimeError(f"Gemini Error: {msg} (Code: {code})")
235
+
236
+ final_session_id = response_data.get("session_id") or session_id_to_use
237
+ final_session_path = _find_session_file(cli_dir, final_session_id)
238
+ if not final_session_path:
239
+ tmp_root = os.path.join(os.path.expanduser("~"), ".gemini", "tmp")
240
+ if os.path.exists(tmp_root):
241
+ for p_dir in os.listdir(tmp_root):
242
+ candidate = _find_session_file(os.path.join(tmp_root, p_dir, "chats"), final_session_id)
243
+ if candidate:
244
+ final_session_path = candidate
245
+ break
246
+ if not final_session_path:
247
+ final_session_path = os.path.join(cli_dir, f"session-{final_session_id}.json")
248
+
249
+ stats_raw = response_data.get("stats", {})
250
+ aggregated_stats = {
251
+ "inputTokens": 0,
252
+ "outputTokens": 0,
253
+ "thoughtTokens": 0,
254
+ "cachedTokens": 0,
255
+ "totalRequests": 0,
256
+ "totalErrors": 0
257
+ }
258
+
259
+ if "models" in stats_raw:
260
+ for model_data in stats_raw["models"].values():
261
+ tokens = model_data.get("tokens", {})
262
+ aggregated_stats["inputTokens"] += tokens.get("input", 0)
263
+ aggregated_stats["outputTokens"] += tokens.get("candidates", 0)
264
+ aggregated_stats["thoughtTokens"] += tokens.get("thoughts", 0)
265
+ aggregated_stats["cachedTokens"] += tokens.get("cached", 0)
266
+
267
+ api = model_data.get("api", {})
268
+ aggregated_stats["totalRequests"] += api.get("totalRequests", 0)
269
+ aggregated_stats["totalErrors"] += api.get("totalErrors", 0)
270
+
271
+ return GeminiSession(
272
+ text=response_data.get("text", "") or response_data.get("response", ""),
273
+ session_id=final_session_id,
274
+ session_path=final_session_path,
275
+ stats=aggregated_stats,
276
+ api_errors=api_errors,
277
+ raw_data=response_data
278
+ )
@@ -0,0 +1,30 @@
1
+ [build-system]
2
+ requires = ["setuptools>=61.0"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "gemini-cli-headless"
7
+ version = "1.0.0"
8
+ description = "A resilient, zero-dependency Python wrapper for the official Google Gemini Node.js CLI."
9
+ readme = "README.md"
10
+ requires-python = ">=3.9"
11
+ authors = [
12
+ { name="jarek108" }
13
+ ]
14
+ keywords = ["gemini", "google-ai", "cli", "headless", "wrapper", "llm"]
15
+ classifiers = [
16
+ "Programming Language :: Python :: 3",
17
+ "License :: OSI Approved :: MIT License",
18
+ "Operating System :: OS Independent",
19
+ "Topic :: Software Development :: Libraries :: Python Modules",
20
+ "Intended Audience :: Developers",
21
+ "Development Status :: 5 - Production/Stable",
22
+ ]
23
+
24
+ [project.urls]
25
+ Homepage = "https://github.com/jarek108/gemini-cli-headless"
26
+ Repository = "https://github.com/jarek108/gemini-cli-headless"
27
+ Issues = "https://github.com/jarek108/gemini-cli-headless/issues"
28
+
29
+ [tool.setuptools]
30
+ py-modules = ["gemini_cli_headless"]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+