airtrain 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airtrain-0.1.0/.flake8 +14 -0
- airtrain-0.1.0/.github/workflows/publish.yml +26 -0
- airtrain-0.1.0/.mypy.ini +7 -0
- airtrain-0.1.0/.pre-commit-config.yaml +29 -0
- airtrain-0.1.0/.vscode/extensions.json +7 -0
- airtrain-0.1.0/.vscode/launch.json +27 -0
- airtrain-0.1.0/.vscode/settings.json +25 -0
- airtrain-0.1.0/EXPERIMENTS/schema_exps/pydantic_schemas.py +37 -0
- airtrain-0.1.0/PKG-INFO +106 -0
- airtrain-0.1.0/README.md +73 -0
- airtrain-0.1.0/airtrain/core/__pycache__/credentials.cpython-310.pyc +0 -0
- airtrain-0.1.0/airtrain/core/__pycache__/schemas.cpython-310.pyc +0 -0
- airtrain-0.1.0/airtrain/core/__pycache__/skills.cpython-310.pyc +0 -0
- airtrain-0.1.0/airtrain/core/credentials.py +153 -0
- airtrain-0.1.0/airtrain/core/schemas.py +237 -0
- airtrain-0.1.0/airtrain/core/skills.py +167 -0
- airtrain-0.1.0/airtrain.egg-info/PKG-INFO +106 -0
- airtrain-0.1.0/airtrain.egg-info/SOURCES.txt +33 -0
- airtrain-0.1.0/airtrain.egg-info/dependency_links.txt +1 -0
- airtrain-0.1.0/airtrain.egg-info/requires.txt +6 -0
- airtrain-0.1.0/airtrain.egg-info/top_level.txt +1 -0
- airtrain-0.1.0/examples/creating-skills/image1.jpg +0 -0
- airtrain-0.1.0/examples/creating-skills/image2.jpg +0 -0
- airtrain-0.1.0/examples/creating-skills/openai_skills.py +192 -0
- airtrain-0.1.0/examples/creating-skills/openai_structured_skills.py +144 -0
- airtrain-0.1.0/examples/credentials_usage.py +48 -0
- airtrain-0.1.0/examples/schema_usage.py +77 -0
- airtrain-0.1.0/examples/skill_usage.py +83 -0
- airtrain-0.1.0/pyproject.toml +11 -0
- airtrain-0.1.0/scripts/build.sh +10 -0
- airtrain-0.1.0/scripts/publish.sh +10 -0
- airtrain-0.1.0/services/firebase_service.py +181 -0
- airtrain-0.1.0/services/openai_service.py +366 -0
- airtrain-0.1.0/setup.cfg +4 -0
- airtrain-0.1.0/setup.py +35 -0
airtrain-0.1.0/.flake8
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
[flake8]
|
2
|
+
max-line-length = 88
|
3
|
+
# E203 Whitespace before ':' https://www.flake8rules.com/rules/E203.html
|
4
|
+
# W503 Line break occurred before a binary operator https://www.flake8rules.com/rules/W503.html
|
5
|
+
# W504 Line break occurred after a binary operator https://www.flake8rules.com/rules/W504.html
|
6
|
+
# * Rationale for E203, W503, W504:
|
7
|
+
# https://black.readthedocs.io/en/stable/faq.html#why-are-flake8-s-e203-and-w503-violated
|
8
|
+
# https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#line-breaks-binary-operators
|
9
|
+
ignore = E203,W503
|
10
|
+
enable-extensions = W504
|
11
|
+
per-file-ignores =
|
12
|
+
entities/prompts.py: E501
|
13
|
+
entities/error.py: E501
|
14
|
+
validators/sigma_validator.py: E501
|
@@ -0,0 +1,26 @@
|
|
1
|
+
name: Publish to PyPI
|
2
|
+
|
3
|
+
on:
|
4
|
+
release:
|
5
|
+
types: [created]
|
6
|
+
|
7
|
+
jobs:
|
8
|
+
deploy:
|
9
|
+
runs-on: ubuntu-latest
|
10
|
+
steps:
|
11
|
+
- uses: actions/checkout@v2
|
12
|
+
- name: Set up Python
|
13
|
+
uses: actions/setup-python@v2
|
14
|
+
with:
|
15
|
+
python-version: '3.x'
|
16
|
+
- name: Install dependencies
|
17
|
+
run: |
|
18
|
+
python -m pip install --upgrade pip
|
19
|
+
pip install build twine
|
20
|
+
- name: Build and publish
|
21
|
+
env:
|
22
|
+
TWINE_USERNAME: __token__
|
23
|
+
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
|
24
|
+
run: |
|
25
|
+
python -m build
|
26
|
+
python -m twine upload dist/*
|
airtrain-0.1.0/.mypy.ini
ADDED
@@ -0,0 +1,29 @@
|
|
1
|
+
default_stages: [pre-commit]
|
2
|
+
repos:
|
3
|
+
- repo: https://github.com/ambv/black
|
4
|
+
rev: '24.3.0'
|
5
|
+
hooks:
|
6
|
+
- id: black
|
7
|
+
language_version: python3.10
|
8
|
+
- repo: https://github.com/pycqa/flake8
|
9
|
+
rev: '6.1.0'
|
10
|
+
hooks:
|
11
|
+
- id: flake8
|
12
|
+
- repo: https://github.com/PyCQA/isort
|
13
|
+
rev: '5.12.0'
|
14
|
+
hooks:
|
15
|
+
- id: isort
|
16
|
+
args: ["--profile", "black"]
|
17
|
+
- repo: https://github.com/pre-commit/mirrors-mypy
|
18
|
+
rev: "v1.9.0"
|
19
|
+
hooks:
|
20
|
+
- id: mypy
|
21
|
+
additional_dependencies: [types-PyYAML, types-requests, types-Markdown]
|
22
|
+
# - repo: local
|
23
|
+
# hooks:
|
24
|
+
# - id: pytest-check
|
25
|
+
# name: pytest-check
|
26
|
+
# entry: pytest
|
27
|
+
# language: system
|
28
|
+
# pass_filenames: false
|
29
|
+
# always_run: false
|
@@ -0,0 +1,27 @@
|
|
1
|
+
{
|
2
|
+
// Use IntelliSense to learn about possible attributes.
|
3
|
+
// Hover to view descriptions of existing attributes.
|
4
|
+
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
5
|
+
"version": "0.2.0",
|
6
|
+
"configurations": [
|
7
|
+
{
|
8
|
+
"name": "Debug: Linux",
|
9
|
+
"type": "debugpy",
|
10
|
+
"request": "launch",
|
11
|
+
// "program": "${workspaceFolder}/.venv/bin/uvicorn",
|
12
|
+
"program": "/Users/ddod/miniconda3/envs/dh/bin/uvicorn",
|
13
|
+
"args": [
|
14
|
+
"FastAPIServer.main:app",
|
15
|
+
"--port",
|
16
|
+
"5003",
|
17
|
+
"--reload",
|
18
|
+
// "False"
|
19
|
+
],
|
20
|
+
"env": {
|
21
|
+
"S2S_FILE_LOG_LEVEL": "TRACE",
|
22
|
+
"S2S_CONSOLE_LOG_LEVEL": "INFO",
|
23
|
+
"S2S_CONSOLE_LOG_COLORIZE": "true"
|
24
|
+
}
|
25
|
+
},
|
26
|
+
]
|
27
|
+
}
|
@@ -0,0 +1,25 @@
|
|
1
|
+
{
|
2
|
+
"editor.formatOnSave": true,
|
3
|
+
"editor.codeActionsOnSave": {
|
4
|
+
"source.organizeImports": "explicit"
|
5
|
+
},
|
6
|
+
"python.testing.pytestEnabled": true,
|
7
|
+
"isort.check": true,
|
8
|
+
"isort.args": [
|
9
|
+
"--profile",
|
10
|
+
"black"
|
11
|
+
],
|
12
|
+
"[python]": {
|
13
|
+
"editor.defaultFormatter": "ms-python.black-formatter",
|
14
|
+
"editor.rulers": [
|
15
|
+
88,
|
16
|
+
]
|
17
|
+
},
|
18
|
+
"flake8.args": [
|
19
|
+
"--show-source",
|
20
|
+
"--config=.flake8"
|
21
|
+
],
|
22
|
+
"cSpell.words": [],
|
23
|
+
"mypy.runUsingActiveInterpreter": true,
|
24
|
+
"mypy.debugLogging": true,
|
25
|
+
}
|
@@ -0,0 +1,37 @@
|
|
1
|
+
from pydantic import BaseModel, create_model
|
2
|
+
import json
|
3
|
+
from typing import Optional, List
|
4
|
+
|
5
|
+
# 1. Define original model
|
6
|
+
class User(BaseModel):
|
7
|
+
id: int
|
8
|
+
name: str
|
9
|
+
email: str
|
10
|
+
|
11
|
+
# 2. Export schema
|
12
|
+
schema = User.model_json_schema()
|
13
|
+
with open('schema.json', 'w') as f:
|
14
|
+
json.dump(schema, f)
|
15
|
+
|
16
|
+
# 3. Load schema and create model
|
17
|
+
with open('schema.json', 'r') as f:
|
18
|
+
loaded_schema = json.load(f)
|
19
|
+
|
20
|
+
# 4. Create model using create_model
|
21
|
+
fields = {}
|
22
|
+
for field_name, field_info in loaded_schema['properties'].items():
|
23
|
+
field_type = {
|
24
|
+
'integer': int,
|
25
|
+
'string': str,
|
26
|
+
'number': float,
|
27
|
+
'boolean': bool
|
28
|
+
}.get(field_info['type'])
|
29
|
+
|
30
|
+
is_required = field_name in loaded_schema.get('required', [])
|
31
|
+
fields[field_name] = (field_type, ... if is_required else None)
|
32
|
+
|
33
|
+
DynamicModel = create_model('AirtrainSchema', **fields)
|
34
|
+
|
35
|
+
# Test
|
36
|
+
user = DynamicModel(id=1, name="John", email="john@example.com")
|
37
|
+
print(user.model_dump())
|
airtrain-0.1.0/PKG-INFO
ADDED
@@ -0,0 +1,106 @@
|
|
1
|
+
Metadata-Version: 2.2
|
2
|
+
Name: airtrain
|
3
|
+
Version: 0.1.0
|
4
|
+
Summary: A platform for building and deploying AI agents with structured skills
|
5
|
+
Home-page: https://github.com/rosaboyle/airtrain.dev
|
6
|
+
Author: Dheeraj Pai
|
7
|
+
Author-email: helloworldcmu@gmail.com
|
8
|
+
Classifier: Development Status :: 3 - Alpha
|
9
|
+
Classifier: Intended Audience :: Developers
|
10
|
+
Classifier: License :: OSI Approved :: MIT License
|
11
|
+
Classifier: Operating System :: OS Independent
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
13
|
+
Classifier: Programming Language :: Python :: 3.8
|
14
|
+
Classifier: Programming Language :: Python :: 3.9
|
15
|
+
Classifier: Programming Language :: Python :: 3.10
|
16
|
+
Requires-Python: >=3.8
|
17
|
+
Description-Content-Type: text/markdown
|
18
|
+
Requires-Dist: pydantic>=2.0.0
|
19
|
+
Requires-Dist: openai>=1.0.0
|
20
|
+
Requires-Dist: python-dotenv>=0.19.0
|
21
|
+
Requires-Dist: PyYAML>=5.4.1
|
22
|
+
Requires-Dist: firebase-admin>=5.0.0
|
23
|
+
Requires-Dist: loguru>=0.5.3
|
24
|
+
Dynamic: author
|
25
|
+
Dynamic: author-email
|
26
|
+
Dynamic: classifier
|
27
|
+
Dynamic: description
|
28
|
+
Dynamic: description-content-type
|
29
|
+
Dynamic: home-page
|
30
|
+
Dynamic: requires-dist
|
31
|
+
Dynamic: requires-python
|
32
|
+
Dynamic: summary
|
33
|
+
|
34
|
+
# Airtrain
|
35
|
+
|
36
|
+
A powerful platform for building and deploying AI agents with structured skills and capabilities.
|
37
|
+
|
38
|
+
## Features
|
39
|
+
|
40
|
+
- **Structured Skills**: Build modular AI skills with defined input/output schemas
|
41
|
+
- **OpenAI Integration**: Built-in support for OpenAI's GPT models with structured outputs
|
42
|
+
- **Credential Management**: Secure handling of API keys and credentials
|
43
|
+
- **Type Safety**: Full type hints and Pydantic model support
|
44
|
+
- **Async Support**: Both synchronous and asynchronous API implementations
|
45
|
+
|
46
|
+
## Installation
|
47
|
+
|
48
|
+
```bash
|
49
|
+
pip install airtrain
|
50
|
+
```
|
51
|
+
|
52
|
+
## Quick Start
|
53
|
+
|
54
|
+
### Creating a Structured OpenAI Skill
|
55
|
+
|
56
|
+
```python
|
57
|
+
from airtrain.core.skills import Skill
|
58
|
+
from airtrain.core.schemas import InputSchema, OutputSchema
|
59
|
+
from pydantic import BaseModel
|
60
|
+
from typing import List
|
61
|
+
|
62
|
+
# Define your response model
|
63
|
+
class PersonInfo(BaseModel):
|
64
|
+
name: str
|
65
|
+
age: int
|
66
|
+
occupation: str
|
67
|
+
skills: List[str]
|
68
|
+
|
69
|
+
# Create a skill
|
70
|
+
class OpenAIParserSkill(Skill):
|
71
|
+
def process(self, input_data):
|
72
|
+
# Implementation
|
73
|
+
return parsed_response
|
74
|
+
|
75
|
+
# Use the skill
|
76
|
+
skill = OpenAIParserSkill()
|
77
|
+
result = skill.process(input_data)
|
78
|
+
```
|
79
|
+
|
80
|
+
### Managing Credentials
|
81
|
+
|
82
|
+
```python
|
83
|
+
from airtrain.core.credentials import OpenAICredentials
|
84
|
+
from pathlib import Path
|
85
|
+
|
86
|
+
# Load credentials
|
87
|
+
creds = OpenAICredentials(
|
88
|
+
api_key="your-api-key",
|
89
|
+
organization_id="optional-org-id"
|
90
|
+
)
|
91
|
+
|
92
|
+
# Save to environment
|
93
|
+
creds.load_to_env()
|
94
|
+
```
|
95
|
+
|
96
|
+
## Documentation
|
97
|
+
|
98
|
+
For detailed documentation, visit [our documentation site](https://airtrain.readthedocs.io/).
|
99
|
+
|
100
|
+
## Contributing
|
101
|
+
|
102
|
+
Contributions are welcome! Please feel free to submit a Pull Request.
|
103
|
+
|
104
|
+
## License
|
105
|
+
|
106
|
+
This project is licensed under the MIT License - see the LICENSE file for details.
|
airtrain-0.1.0/README.md
ADDED
@@ -0,0 +1,73 @@
|
|
1
|
+
# Airtrain
|
2
|
+
|
3
|
+
A powerful platform for building and deploying AI agents with structured skills and capabilities.
|
4
|
+
|
5
|
+
## Features
|
6
|
+
|
7
|
+
- **Structured Skills**: Build modular AI skills with defined input/output schemas
|
8
|
+
- **OpenAI Integration**: Built-in support for OpenAI's GPT models with structured outputs
|
9
|
+
- **Credential Management**: Secure handling of API keys and credentials
|
10
|
+
- **Type Safety**: Full type hints and Pydantic model support
|
11
|
+
- **Async Support**: Both synchronous and asynchronous API implementations
|
12
|
+
|
13
|
+
## Installation
|
14
|
+
|
15
|
+
```bash
|
16
|
+
pip install airtrain
|
17
|
+
```
|
18
|
+
|
19
|
+
## Quick Start
|
20
|
+
|
21
|
+
### Creating a Structured OpenAI Skill
|
22
|
+
|
23
|
+
```python
|
24
|
+
from airtrain.core.skills import Skill
|
25
|
+
from airtrain.core.schemas import InputSchema, OutputSchema
|
26
|
+
from pydantic import BaseModel
|
27
|
+
from typing import List
|
28
|
+
|
29
|
+
# Define your response model
|
30
|
+
class PersonInfo(BaseModel):
|
31
|
+
name: str
|
32
|
+
age: int
|
33
|
+
occupation: str
|
34
|
+
skills: List[str]
|
35
|
+
|
36
|
+
# Create a skill
|
37
|
+
class OpenAIParserSkill(Skill):
|
38
|
+
def process(self, input_data):
|
39
|
+
# Implementation
|
40
|
+
return parsed_response
|
41
|
+
|
42
|
+
# Use the skill
|
43
|
+
skill = OpenAIParserSkill()
|
44
|
+
result = skill.process(input_data)
|
45
|
+
```
|
46
|
+
|
47
|
+
### Managing Credentials
|
48
|
+
|
49
|
+
```python
|
50
|
+
from airtrain.core.credentials import OpenAICredentials
|
51
|
+
from pathlib import Path
|
52
|
+
|
53
|
+
# Load credentials
|
54
|
+
creds = OpenAICredentials(
|
55
|
+
api_key="your-api-key",
|
56
|
+
organization_id="optional-org-id"
|
57
|
+
)
|
58
|
+
|
59
|
+
# Save to environment
|
60
|
+
creds.load_to_env()
|
61
|
+
```
|
62
|
+
|
63
|
+
## Documentation
|
64
|
+
|
65
|
+
For detailed documentation, visit [our documentation site](https://airtrain.readthedocs.io/).
|
66
|
+
|
67
|
+
## Contributing
|
68
|
+
|
69
|
+
Contributions are welcome! Please feel free to submit a Pull Request.
|
70
|
+
|
71
|
+
## License
|
72
|
+
|
73
|
+
This project is licensed under the MIT License - see the LICENSE file for details.
|
Binary file
|
Binary file
|
Binary file
|
@@ -0,0 +1,153 @@
|
|
1
|
+
from typing import Dict, List, Optional, Set
|
2
|
+
import os
|
3
|
+
import json
|
4
|
+
from pathlib import Path
|
5
|
+
from abc import ABC, abstractmethod
|
6
|
+
import dotenv
|
7
|
+
from pydantic import BaseModel, Field, SecretStr
|
8
|
+
import yaml # type: ignore
|
9
|
+
|
10
|
+
|
11
|
+
class CredentialError(Exception):
|
12
|
+
"""Base exception for credential-related errors"""
|
13
|
+
|
14
|
+
pass
|
15
|
+
|
16
|
+
|
17
|
+
class CredentialNotFoundError(CredentialError):
|
18
|
+
"""Raised when a required credential is not found"""
|
19
|
+
|
20
|
+
pass
|
21
|
+
|
22
|
+
|
23
|
+
class CredentialValidationError(CredentialError):
|
24
|
+
"""Raised when credentials fail validation"""
|
25
|
+
|
26
|
+
pass
|
27
|
+
|
28
|
+
|
29
|
+
class BaseCredentials(BaseModel):
|
30
|
+
"""Base class for all credential configurations"""
|
31
|
+
|
32
|
+
_loaded: bool = False
|
33
|
+
_required_credentials: Set[str] = set()
|
34
|
+
|
35
|
+
def load_to_env(self) -> None:
|
36
|
+
"""Load credentials into environment variables"""
|
37
|
+
for field_name, field_value in self:
|
38
|
+
if isinstance(field_value, SecretStr):
|
39
|
+
value = field_value.get_secret_value()
|
40
|
+
else:
|
41
|
+
value = str(field_value)
|
42
|
+
os.environ[field_name.upper()] = value
|
43
|
+
self._loaded = True
|
44
|
+
|
45
|
+
@classmethod
|
46
|
+
def from_env(cls) -> "BaseCredentials":
|
47
|
+
"""Create credentials instance from environment variables"""
|
48
|
+
field_values = {}
|
49
|
+
for field_name in cls.model_fields:
|
50
|
+
env_key = field_name.upper()
|
51
|
+
if env_value := os.getenv(env_key):
|
52
|
+
field_values[field_name] = env_value
|
53
|
+
return cls(**field_values)
|
54
|
+
|
55
|
+
@classmethod
|
56
|
+
def from_file(cls, file_path: Path) -> "BaseCredentials":
|
57
|
+
"""Load credentials from a file (supports .env, .json, .yaml)"""
|
58
|
+
if not file_path.exists():
|
59
|
+
raise FileNotFoundError(f"Credentials file not found: {file_path}")
|
60
|
+
|
61
|
+
if file_path.suffix == ".env":
|
62
|
+
dotenv.load_dotenv(file_path)
|
63
|
+
return cls.from_env()
|
64
|
+
|
65
|
+
elif file_path.suffix == ".json":
|
66
|
+
with open(file_path) as f:
|
67
|
+
data = json.load(f)
|
68
|
+
return cls(**data)
|
69
|
+
|
70
|
+
elif file_path.suffix in {".yaml", ".yml"}:
|
71
|
+
with open(file_path) as f:
|
72
|
+
data = yaml.safe_load(f)
|
73
|
+
return cls(**data)
|
74
|
+
|
75
|
+
else:
|
76
|
+
raise ValueError(f"Unsupported file format: {file_path.suffix}")
|
77
|
+
|
78
|
+
def save_to_file(self, file_path: Path) -> None:
|
79
|
+
"""Save credentials to a file"""
|
80
|
+
data = self.model_dump(exclude={"_loaded"})
|
81
|
+
|
82
|
+
# Convert SecretStr to plain strings for saving
|
83
|
+
for key, value in data.items():
|
84
|
+
if isinstance(value, SecretStr):
|
85
|
+
data[key] = value.get_secret_value()
|
86
|
+
|
87
|
+
if file_path.suffix == ".env":
|
88
|
+
with open(file_path, "w") as f:
|
89
|
+
for key, value in data.items():
|
90
|
+
f.write(f"{key.upper()}={value}\n")
|
91
|
+
|
92
|
+
elif file_path.suffix == ".json":
|
93
|
+
with open(file_path, "w") as f:
|
94
|
+
json.dump(data, f, indent=2)
|
95
|
+
|
96
|
+
elif file_path.suffix in {".yaml", ".yml"}:
|
97
|
+
with open(file_path, "w") as f:
|
98
|
+
yaml.dump(data, f)
|
99
|
+
|
100
|
+
else:
|
101
|
+
raise ValueError(f"Unsupported file format: {file_path.suffix}")
|
102
|
+
|
103
|
+
def validate_credentials(self) -> None:
|
104
|
+
"""Validate that all required credentials are present"""
|
105
|
+
missing = []
|
106
|
+
for field_name in self._required_credentials:
|
107
|
+
value = getattr(self, field_name, None)
|
108
|
+
if value is None or (
|
109
|
+
isinstance(value, SecretStr) and not value.get_secret_value()
|
110
|
+
):
|
111
|
+
missing.append(field_name)
|
112
|
+
|
113
|
+
if missing:
|
114
|
+
raise CredentialValidationError(
|
115
|
+
f"Missing required credentials: {', '.join(missing)}"
|
116
|
+
)
|
117
|
+
|
118
|
+
def clear_from_env(self) -> None:
|
119
|
+
"""Remove credentials from environment variables"""
|
120
|
+
for field_name in self.model_fields:
|
121
|
+
env_key = field_name.upper()
|
122
|
+
if env_key in os.environ:
|
123
|
+
del os.environ[env_key]
|
124
|
+
self._loaded = False
|
125
|
+
|
126
|
+
|
127
|
+
class OpenAICredentials(BaseCredentials):
|
128
|
+
"""OpenAI API credentials"""
|
129
|
+
|
130
|
+
api_key: SecretStr = Field(..., description="OpenAI API key")
|
131
|
+
organization_id: Optional[str] = Field(None, description="OpenAI organization ID")
|
132
|
+
|
133
|
+
_required_credentials = {"api_key"}
|
134
|
+
|
135
|
+
|
136
|
+
class AWSCredentials(BaseCredentials):
|
137
|
+
"""AWS credentials"""
|
138
|
+
|
139
|
+
aws_access_key_id: SecretStr
|
140
|
+
aws_secret_access_key: SecretStr
|
141
|
+
aws_region: str = "us-east-1"
|
142
|
+
aws_session_token: Optional[SecretStr] = None
|
143
|
+
|
144
|
+
_required_credentials = {"aws_access_key_id", "aws_secret_access_key"}
|
145
|
+
|
146
|
+
|
147
|
+
class GoogleCloudCredentials(BaseCredentials):
|
148
|
+
"""Google Cloud credentials"""
|
149
|
+
|
150
|
+
project_id: str
|
151
|
+
service_account_key: SecretStr
|
152
|
+
|
153
|
+
_required_credentials = {"project_id", "service_account_key"}
|