brynq 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. brynq-0.1.0/LICENSE +98 -0
  2. brynq-0.1.0/PKG-INFO +123 -0
  3. brynq-0.1.0/README.md +111 -0
  4. brynq-0.1.0/brynq.egg-info/PKG-INFO +123 -0
  5. brynq-0.1.0/brynq.egg-info/SOURCES.txt +38 -0
  6. brynq-0.1.0/brynq.egg-info/dependency_links.txt +1 -0
  7. brynq-0.1.0/brynq.egg-info/entry_points.txt +2 -0
  8. brynq-0.1.0/brynq.egg-info/requires.txt +2 -0
  9. brynq-0.1.0/brynq.egg-info/top_level.txt +1 -0
  10. brynq-0.1.0/pyproject.toml +21 -0
  11. brynq-0.1.0/runtime/__init__.py +12 -0
  12. brynq-0.1.0/runtime/app.py +1548 -0
  13. brynq-0.1.0/runtime/auth/__init__.py +51 -0
  14. brynq-0.1.0/runtime/auth/callback_server.py +311 -0
  15. brynq-0.1.0/runtime/auth/oauth_manager.py +500 -0
  16. brynq-0.1.0/runtime/auth/providers.py +102 -0
  17. brynq-0.1.0/runtime/auth/token_store.py +295 -0
  18. brynq-0.1.0/runtime/cli.py +989 -0
  19. brynq-0.1.0/runtime/cloud_client.py +380 -0
  20. brynq-0.1.0/runtime/config.py +75 -0
  21. brynq-0.1.0/runtime/executor/__init__.py +56 -0
  22. brynq-0.1.0/runtime/executor/agent_bridge.py +353 -0
  23. brynq-0.1.0/runtime/executor/cloud_bridge.py +647 -0
  24. brynq-0.1.0/runtime/executor/context.py +97 -0
  25. brynq-0.1.0/runtime/executor/local_bridge.py +359 -0
  26. brynq-0.1.0/runtime/executor/plan_executor.py +472 -0
  27. brynq-0.1.0/runtime/executor/plan_validator.py +99 -0
  28. brynq-0.1.0/runtime/protocol/__init__.py +22 -0
  29. brynq-0.1.0/runtime/protocol/schema.py +161 -0
  30. brynq-0.1.0/runtime/protocol/serializer.py +136 -0
  31. brynq-0.1.0/runtime/protocol/signer.py +118 -0
  32. brynq-0.1.0/runtime/reporter/__init__.py +22 -0
  33. brynq-0.1.0/runtime/reporter/metrics.py +473 -0
  34. brynq-0.1.0/runtime/reporter/privacy.py +273 -0
  35. brynq-0.1.0/runtime/sandbox/__init__.py +37 -0
  36. brynq-0.1.0/runtime/sandbox/agent_protocol.py +230 -0
  37. brynq-0.1.0/runtime/sandbox/agent_sandbox.py +706 -0
  38. brynq-0.1.0/runtime/vault/__init__.py +10 -0
  39. brynq-0.1.0/runtime/vault/key_vault.py +694 -0
  40. brynq-0.1.0/setup.cfg +4 -0
brynq-0.1.0/LICENSE ADDED
@@ -0,0 +1,98 @@
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction, and
10
+ distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by the copyright
13
+ owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all other entities
16
+ that control, are controlled by, or are under common control with that entity.
17
+
18
+ "You" (or "Your") shall mean an individual or Legal Entity exercising
19
+ permissions granted by this License.
20
+
21
+ "Source" form shall mean the preferred form for making modifications.
22
+
23
+ "Object" form shall mean any form resulting from mechanical transformation or
24
+ translation of a Source form.
25
+
26
+ "Work" shall mean the work of authorship made available under the License, as
27
+ indicated by a copyright notice.
28
+
29
+ "Derivative Works" shall mean any work that is based on the Work.
30
+
31
+ "Contribution" shall mean any work of authorship submitted to the Licensor for
32
+ inclusion in the Work.
33
+
34
+ "Contributor" shall mean Licensor and any Legal Entity on behalf of whom a
35
+ Contribution has been received by the Licensor.
36
+
37
+ 2. Grant of Copyright License. Subject to the terms and conditions of this
38
+ License, each Contributor hereby grants to You a perpetual, worldwide,
39
+ non-exclusive, no-charge, royalty-free, irrevocable copyright license to
40
+ reproduce, prepare Derivative Works of, publicly display, publicly perform,
41
+ sublicense, and distribute the Work and such Derivative Works in Source or
42
+ Object form.
43
+
44
+ 3. Grant of Patent License. Subject to the terms and conditions of this
45
+ License, each Contributor hereby grants to You a perpetual, worldwide,
46
+ non-exclusive, no-charge, royalty-free, irrevocable patent license to make,
47
+ have made, use, offer to sell, sell, import, and otherwise transfer the Work.
48
+
49
+ 4. Redistribution. You may reproduce and distribute copies of the Work or
50
+ Derivative Works thereof in any medium, with or without modifications, and in
51
+ Source or Object form, provided that You meet the following conditions:
52
+
53
+ (a) You must give any other recipients of the Work or Derivative Works a copy
54
+ of this License; and
55
+
56
+ (b) You must cause any modified files to carry prominent notices stating that
57
+ You changed the files; and
58
+
59
+ (c) You must retain, in the Source form of any Derivative Works that You
60
+ distribute, all copyright, patent, trademark, and attribution notices
61
+ from the Source form of the Work; and
62
+
63
+ (d) If the Work includes a "NOTICE" text file, You must include a readable
64
+ copy of the attribution notices contained within such NOTICE file.
65
+
66
+ 5. Submission of Contributions. Unless You explicitly state otherwise, any
67
+ Contribution intentionally submitted for inclusion in the Work by You to the
68
+ Licensor shall be under the terms and conditions of this License.
69
+
70
+ 6. Trademarks. This License does not grant permission to use the trade names,
71
+ trademarks, service marks, or product names of the Licensor.
72
+
73
+ 7. Disclaimer of Warranty. The Work is provided on an "AS IS" BASIS, WITHOUT
74
+ WARRANTIES OR CONDITIONS OF ANY KIND.
75
+
76
+ 8. Limitation of Liability. In no event shall any Contributor be liable to You
77
+ for damages, including any direct, indirect, special, incidental, or
78
+ consequential damages.
79
+
80
+ 9. Accepting Warranty or Additional Liability. You may choose to offer, and
81
+ charge a fee for, acceptance of support, warranty, indemnity, or other
82
+ liability obligations.
83
+
84
+ END OF TERMS AND CONDITIONS
85
+
86
+ Copyright 2026 Brynq Inc.
87
+
88
+ Licensed under the Apache License, Version 2.0 (the "License");
89
+ you may not use this file except in compliance with the License.
90
+ You may obtain a copy of the License at
91
+
92
+ http://www.apache.org/licenses/LICENSE-2.0
93
+
94
+ Unless required by applicable law or agreed to in writing, software
95
+ distributed under the License is distributed on an "AS IS" BASIS,
96
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
97
+ See the License for the specific language governing permissions and
98
+ limitations under the License.
brynq-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,123 @@
1
+ Metadata-Version: 2.4
2
+ Name: brynq
3
+ Version: 0.1.0
4
+ Summary: The operating system for AI workforces
5
+ License: Apache-2.0
6
+ Requires-Python: >=3.10
7
+ Description-Content-Type: text/markdown
8
+ License-File: LICENSE
9
+ Requires-Dist: fastapi>=0.110
10
+ Requires-Dist: uvicorn>=0.27
11
+ Dynamic: license-file
12
+
13
+ # Brynq
14
+
15
+ **The operating system for AI workforces.**
16
+
17
+ Chain Claude, Gemini, and open-source models together. Brynq orchestrates them — you keep your keys.
18
+
19
+ ```
20
+ You: "Analyze this report with Claude, get Gemini's opinion, summarize with Llama"
21
+ Brynq: Done. 3 models, 1 chain, 45 seconds.
22
+ ```
23
+
24
+ ## What is Brynq?
25
+
26
+ Brynq lets non-technical users chain AI models together through a simple chat interface. Connect your existing Claude/Gemini subscriptions, add free local models via Ollama, and Brynq orchestrates them into multi-step workflows.
27
+
28
+ - **Subscription login** — Log in with your Claude Pro, Gemini, or ChatGPT account
29
+ - **BYOK API keys** — Or bring your own API keys
30
+ - **Free tier** — Ollama runs locally on your hardware, no keys needed
31
+ - **5 chain strategies** — Sequential, Fan-Out, Debate, Refine, Single
32
+
33
+ ## Quick Start
34
+
35
+ ```bash
36
+ # Install
37
+ pip install brynq
38
+
39
+ # Start
40
+ brynq-runtime start
41
+
42
+ # Chat
43
+ brynq-runtime chat
44
+
45
+ # Run a multi-model chain
46
+ brynq-runtime chain "analyze this data with Claude then summarize with Llama"
47
+
48
+ # Check available models
49
+ brynq-runtime models
50
+ ```
51
+
52
+ ## Three Access Tiers
53
+
54
+ | Tier | Models | Auth | Cost |
55
+ |------|--------|------|------|
56
+ | **Free** | Ollama (Llama 3, Mistral, Phi) | None needed | $0 |
57
+ | **Subscriber** | Claude + Gemini + ChatGPT | OAuth login | Your existing subscription |
58
+ | **Developer** | All + custom models | BYOK API keys | Pay per token |
59
+
60
+ ## Architecture
61
+
62
+ ```
63
+ brynq.ai (Cloud) Your Machine (Runtime)
64
+ ┌──────────────────────┐ ┌──────────────────────────┐
65
+ │ Chain Planner │ signed │ Plan Executor │
66
+ │ Model Scorer │ plans │ ├── Ollama Bridge │→ Local models
67
+ │ Prompt Engine │◄──────────────►│ ├── Cloud Bridge │→ Claude/Gemini
68
+ │ Skill Catalog │ metadata │ ├── Agent Bridge │→ 3rd party
69
+ │ │ only │ OAuth Manager │
70
+ │ Your content NEVER │ │ Key Vault (encrypted) │
71
+ │ touches our servers │ │ Privacy Filter │
72
+ └──────────────────────┘ └──────────────────────────┘
73
+ ```
74
+
75
+ **Your data stays on your machine.** The cloud only sees request descriptions and execution metadata (timing, token counts). Never your documents, never your API keys.
76
+
77
+ ## Project Structure
78
+
79
+ ```
80
+ brynq/
81
+ ├── runtime/ # The thin client (runs on your machine)
82
+ │ ├── app.py # Local server (port 8003)
83
+ │ ├── cli.py # brynq-runtime CLI
84
+ │ ├── executor/ # Plan execution engine
85
+ │ ├── auth/ # OAuth (Anthropic, Google, OpenAI)
86
+ │ ├── vault/ # Encrypted API key storage
87
+ │ ├── sandbox/ # 3rd party agent isolation
88
+ │ └── reporter/ # Privacy-safe metrics
89
+ ├── app/ # Customer-facing web app (React + Vite)
90
+ └── docs/ # Documentation
91
+ ```
92
+
93
+ ## CLI Reference
94
+
95
+ ```bash
96
+ brynq-runtime start # Start runtime + open browser
97
+ brynq-runtime start --no-browser # Start without opening browser
98
+ brynq-runtime stop # Stop the runtime
99
+ brynq-runtime status # Show runtime status
100
+ brynq-runtime models # List available models
101
+ brynq-runtime login claude # OAuth login for Claude
102
+ brynq-runtime login gemini # OAuth login for Gemini
103
+ brynq-runtime chat # Interactive terminal chat
104
+ brynq-runtime chat --model llama3 # Chat with specific model
105
+ brynq-runtime chain "prompt" # Run a multi-model chain
106
+ ```
107
+
108
+ ## Security
109
+
110
+ - **BYOK only** — Brynq never stores or proxies your API keys on our servers
111
+ - **Encrypted vault** — Local keys encrypted with Fernet (AES-128-CBC + HMAC-SHA256), PBKDF2 480K iterations, machine-bound
112
+ - **Signed plans** — HMAC-SHA256 signed execution plans with TTL
113
+ - **Privacy filter** — Only timing + token counts leave your machine, never content
114
+ - **Agent sandbox** — 3rd party agents run isolated with path protection and env scrubbing
115
+
116
+ ## License
117
+
118
+ Apache License 2.0 — see [LICENSE](LICENSE)
119
+
120
+ ## Links
121
+
122
+ - **Website**: [brynq.ai](https://brynq.ai)
123
+ - **Docs**: [docs/](docs/)
brynq-0.1.0/README.md ADDED
@@ -0,0 +1,111 @@
1
+ # Brynq
2
+
3
+ **The operating system for AI workforces.**
4
+
5
+ Chain Claude, Gemini, and open-source models together. Brynq orchestrates them — you keep your keys.
6
+
7
+ ```
8
+ You: "Analyze this report with Claude, get Gemini's opinion, summarize with Llama"
9
+ Brynq: Done. 3 models, 1 chain, 45 seconds.
10
+ ```
11
+
12
+ ## What is Brynq?
13
+
14
+ Brynq lets non-technical users chain AI models together through a simple chat interface. Connect your existing Claude/Gemini subscriptions, add free local models via Ollama, and Brynq orchestrates them into multi-step workflows.
15
+
16
+ - **Subscription login** — Log in with your Claude Pro, Gemini, or ChatGPT account
17
+ - **BYOK API keys** — Or bring your own API keys
18
+ - **Free tier** — Ollama runs locally on your hardware, no keys needed
19
+ - **5 chain strategies** — Sequential, Fan-Out, Debate, Refine, Single
20
+
21
+ ## Quick Start
22
+
23
+ ```bash
24
+ # Install
25
+ pip install brynq
26
+
27
+ # Start
28
+ brynq-runtime start
29
+
30
+ # Chat
31
+ brynq-runtime chat
32
+
33
+ # Run a multi-model chain
34
+ brynq-runtime chain "analyze this data with Claude then summarize with Llama"
35
+
36
+ # Check available models
37
+ brynq-runtime models
38
+ ```
39
+
40
+ ## Three Access Tiers
41
+
42
+ | Tier | Models | Auth | Cost |
43
+ |------|--------|------|------|
44
+ | **Free** | Ollama (Llama 3, Mistral, Phi) | None needed | $0 |
45
+ | **Subscriber** | Claude + Gemini + ChatGPT | OAuth login | Your existing subscription |
46
+ | **Developer** | All + custom models | BYOK API keys | Pay per token |
47
+
48
+ ## Architecture
49
+
50
+ ```
51
+ brynq.ai (Cloud) Your Machine (Runtime)
52
+ ┌──────────────────────┐ ┌──────────────────────────┐
53
+ │ Chain Planner │ signed │ Plan Executor │
54
+ │ Model Scorer │ plans │ ├── Ollama Bridge │→ Local models
55
+ │ Prompt Engine │◄──────────────►│ ├── Cloud Bridge │→ Claude/Gemini
56
+ │ Skill Catalog │ metadata │ ├── Agent Bridge │→ 3rd party
57
+ │ │ only │ OAuth Manager │
58
+ │ Your content NEVER │ │ Key Vault (encrypted) │
59
+ │ touches our servers │ │ Privacy Filter │
60
+ └──────────────────────┘ └──────────────────────────┘
61
+ ```
62
+
63
+ **Your data stays on your machine.** The cloud only sees request descriptions and execution metadata (timing, token counts). Never your documents, never your API keys.
64
+
65
+ ## Project Structure
66
+
67
+ ```
68
+ brynq/
69
+ ├── runtime/ # The thin client (runs on your machine)
70
+ │ ├── app.py # Local server (port 8003)
71
+ │ ├── cli.py # brynq-runtime CLI
72
+ │ ├── executor/ # Plan execution engine
73
+ │ ├── auth/ # OAuth (Anthropic, Google, OpenAI)
74
+ │ ├── vault/ # Encrypted API key storage
75
+ │ ├── sandbox/ # 3rd party agent isolation
76
+ │ └── reporter/ # Privacy-safe metrics
77
+ ├── app/ # Customer-facing web app (React + Vite)
78
+ └── docs/ # Documentation
79
+ ```
80
+
81
+ ## CLI Reference
82
+
83
+ ```bash
84
+ brynq-runtime start # Start runtime + open browser
85
+ brynq-runtime start --no-browser # Start without opening browser
86
+ brynq-runtime stop # Stop the runtime
87
+ brynq-runtime status # Show runtime status
88
+ brynq-runtime models # List available models
89
+ brynq-runtime login claude # OAuth login for Claude
90
+ brynq-runtime login gemini # OAuth login for Gemini
91
+ brynq-runtime chat # Interactive terminal chat
92
+ brynq-runtime chat --model llama3 # Chat with specific model
93
+ brynq-runtime chain "prompt" # Run a multi-model chain
94
+ ```
95
+
96
+ ## Security
97
+
98
+ - **BYOK only** — Brynq never stores or proxies your API keys on our servers
99
+ - **Encrypted vault** — Local keys encrypted with Fernet (AES-128-CBC + HMAC-SHA256), PBKDF2 480K iterations, machine-bound
100
+ - **Signed plans** — HMAC-SHA256 signed execution plans with TTL
101
+ - **Privacy filter** — Only timing + token counts leave your machine, never content
102
+ - **Agent sandbox** — 3rd party agents run isolated with path protection and env scrubbing
103
+
104
+ ## License
105
+
106
+ Apache License 2.0 — see [LICENSE](LICENSE)
107
+
108
+ ## Links
109
+
110
+ - **Website**: [brynq.ai](https://brynq.ai)
111
+ - **Docs**: [docs/](docs/)
@@ -0,0 +1,123 @@
1
+ Metadata-Version: 2.4
2
+ Name: brynq
3
+ Version: 0.1.0
4
+ Summary: The operating system for AI workforces
5
+ License: Apache-2.0
6
+ Requires-Python: >=3.10
7
+ Description-Content-Type: text/markdown
8
+ License-File: LICENSE
9
+ Requires-Dist: fastapi>=0.110
10
+ Requires-Dist: uvicorn>=0.27
11
+ Dynamic: license-file
12
+
13
+ # Brynq
14
+
15
+ **The operating system for AI workforces.**
16
+
17
+ Chain Claude, Gemini, and open-source models together. Brynq orchestrates them — you keep your keys.
18
+
19
+ ```
20
+ You: "Analyze this report with Claude, get Gemini's opinion, summarize with Llama"
21
+ Brynq: Done. 3 models, 1 chain, 45 seconds.
22
+ ```
23
+
24
+ ## What is Brynq?
25
+
26
+ Brynq lets non-technical users chain AI models together through a simple chat interface. Connect your existing Claude/Gemini subscriptions, add free local models via Ollama, and Brynq orchestrates them into multi-step workflows.
27
+
28
+ - **Subscription login** — Log in with your Claude Pro, Gemini, or ChatGPT account
29
+ - **BYOK API keys** — Or bring your own API keys
30
+ - **Free tier** — Ollama runs locally on your hardware, no keys needed
31
+ - **5 chain strategies** — Sequential, Fan-Out, Debate, Refine, Single
32
+
33
+ ## Quick Start
34
+
35
+ ```bash
36
+ # Install
37
+ pip install brynq
38
+
39
+ # Start
40
+ brynq-runtime start
41
+
42
+ # Chat
43
+ brynq-runtime chat
44
+
45
+ # Run a multi-model chain
46
+ brynq-runtime chain "analyze this data with Claude then summarize with Llama"
47
+
48
+ # Check available models
49
+ brynq-runtime models
50
+ ```
51
+
52
+ ## Three Access Tiers
53
+
54
+ | Tier | Models | Auth | Cost |
55
+ |------|--------|------|------|
56
+ | **Free** | Ollama (Llama 3, Mistral, Phi) | None needed | $0 |
57
+ | **Subscriber** | Claude + Gemini + ChatGPT | OAuth login | Your existing subscription |
58
+ | **Developer** | All + custom models | BYOK API keys | Pay per token |
59
+
60
+ ## Architecture
61
+
62
+ ```
63
+ brynq.ai (Cloud) Your Machine (Runtime)
64
+ ┌──────────────────────┐ ┌──────────────────────────┐
65
+ │ Chain Planner │ signed │ Plan Executor │
66
+ │ Model Scorer │ plans │ ├── Ollama Bridge │→ Local models
67
+ │ Prompt Engine │◄──────────────►│ ├── Cloud Bridge │→ Claude/Gemini
68
+ │ Skill Catalog │ metadata │ ├── Agent Bridge │→ 3rd party
69
+ │ │ only │ OAuth Manager │
70
+ │ Your content NEVER │ │ Key Vault (encrypted) │
71
+ │ touches our servers │ │ Privacy Filter │
72
+ └──────────────────────┘ └──────────────────────────┘
73
+ ```
74
+
75
+ **Your data stays on your machine.** The cloud only sees request descriptions and execution metadata (timing, token counts). Never your documents, never your API keys.
76
+
77
+ ## Project Structure
78
+
79
+ ```
80
+ brynq/
81
+ ├── runtime/ # The thin client (runs on your machine)
82
+ │ ├── app.py # Local server (port 8003)
83
+ │ ├── cli.py # brynq-runtime CLI
84
+ │ ├── executor/ # Plan execution engine
85
+ │ ├── auth/ # OAuth (Anthropic, Google, OpenAI)
86
+ │ ├── vault/ # Encrypted API key storage
87
+ │ ├── sandbox/ # 3rd party agent isolation
88
+ │ └── reporter/ # Privacy-safe metrics
89
+ ├── app/ # Customer-facing web app (React + Vite)
90
+ └── docs/ # Documentation
91
+ ```
92
+
93
+ ## CLI Reference
94
+
95
+ ```bash
96
+ brynq-runtime start # Start runtime + open browser
97
+ brynq-runtime start --no-browser # Start without opening browser
98
+ brynq-runtime stop # Stop the runtime
99
+ brynq-runtime status # Show runtime status
100
+ brynq-runtime models # List available models
101
+ brynq-runtime login claude # OAuth login for Claude
102
+ brynq-runtime login gemini # OAuth login for Gemini
103
+ brynq-runtime chat # Interactive terminal chat
104
+ brynq-runtime chat --model llama3 # Chat with specific model
105
+ brynq-runtime chain "prompt" # Run a multi-model chain
106
+ ```
107
+
108
+ ## Security
109
+
110
+ - **BYOK only** — Brynq never stores or proxies your API keys on our servers
111
+ - **Encrypted vault** — Local keys encrypted with Fernet (AES-128-CBC + HMAC-SHA256), PBKDF2 480K iterations, machine-bound
112
+ - **Signed plans** — HMAC-SHA256 signed execution plans with TTL
113
+ - **Privacy filter** — Only timing + token counts leave your machine, never content
114
+ - **Agent sandbox** — 3rd party agents run isolated with path protection and env scrubbing
115
+
116
+ ## License
117
+
118
+ Apache License 2.0 — see [LICENSE](LICENSE)
119
+
120
+ ## Links
121
+
122
+ - **Website**: [brynq.ai](https://brynq.ai)
123
+ - **Docs**: [docs/](docs/)
@@ -0,0 +1,38 @@
1
+ LICENSE
2
+ README.md
3
+ pyproject.toml
4
+ brynq.egg-info/PKG-INFO
5
+ brynq.egg-info/SOURCES.txt
6
+ brynq.egg-info/dependency_links.txt
7
+ brynq.egg-info/entry_points.txt
8
+ brynq.egg-info/requires.txt
9
+ brynq.egg-info/top_level.txt
10
+ runtime/__init__.py
11
+ runtime/app.py
12
+ runtime/cli.py
13
+ runtime/cloud_client.py
14
+ runtime/config.py
15
+ runtime/auth/__init__.py
16
+ runtime/auth/callback_server.py
17
+ runtime/auth/oauth_manager.py
18
+ runtime/auth/providers.py
19
+ runtime/auth/token_store.py
20
+ runtime/executor/__init__.py
21
+ runtime/executor/agent_bridge.py
22
+ runtime/executor/cloud_bridge.py
23
+ runtime/executor/context.py
24
+ runtime/executor/local_bridge.py
25
+ runtime/executor/plan_executor.py
26
+ runtime/executor/plan_validator.py
27
+ runtime/protocol/__init__.py
28
+ runtime/protocol/schema.py
29
+ runtime/protocol/serializer.py
30
+ runtime/protocol/signer.py
31
+ runtime/reporter/__init__.py
32
+ runtime/reporter/metrics.py
33
+ runtime/reporter/privacy.py
34
+ runtime/sandbox/__init__.py
35
+ runtime/sandbox/agent_protocol.py
36
+ runtime/sandbox/agent_sandbox.py
37
+ runtime/vault/__init__.py
38
+ runtime/vault/key_vault.py
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ brynq-runtime = runtime.cli:main
@@ -0,0 +1,2 @@
1
+ fastapi>=0.110
2
+ uvicorn>=0.27
@@ -0,0 +1 @@
1
+ runtime
@@ -0,0 +1,21 @@
1
+ [build-system]
2
+ requires = ["setuptools>=68.0", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "brynq"
7
+ version = "0.1.0"
8
+ description = "The operating system for AI workforces"
9
+ readme = "README.md"
10
+ license = {text = "Apache-2.0"}
11
+ requires-python = ">=3.10"
12
+ dependencies = [
13
+ "fastapi>=0.110",
14
+ "uvicorn>=0.27",
15
+ ]
16
+
17
+ [project.scripts]
18
+ brynq-runtime = "runtime.cli:main"
19
+
20
+ [tool.setuptools.packages.find]
21
+ include = ["runtime*"]
@@ -0,0 +1,12 @@
1
+ """
2
+ Brynq Runtime — The Hands.
3
+
4
+ Executes plans created by the cloud. Never decides what to do —
5
+ only how to do it (call Ollama, call Claude, call an agent).
6
+
7
+ Subpackages:
8
+ executor — Plan validation, step-by-step execution
9
+ vault — Local encrypted key storage (user's API keys)
10
+ reporter — Execution metadata telemetry (timing, tokens, no content)
11
+ sandbox — Filesystem + network isolation for local agents
12
+ """