ApiLogicServer 15.0.52__py3-none-any.whl → 15.0.55__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- api_logic_server_cli/api_logic_server.py +6 -3
- api_logic_server_cli/api_logic_server_info.yaml +3 -3
- api_logic_server_cli/create_from_model/api_logic_server_utils.py +9 -0
- api_logic_server_cli/prototypes/base/.github/.copilot-instructions.md +49 -21
- api_logic_server_cli/prototypes/base/database/alembic/alembic_run.py +98 -0
- api_logic_server_cli/prototypes/base/database/alembic/readme_alembic.md +36 -0
- api_logic_server_cli/prototypes/base/docs/training/admin_app_1_context.prompt.md +40 -0
- api_logic_server_cli/prototypes/base/integration/mcp/mcp_client_executor.py +4 -4
- api_logic_server_cli/prototypes/base/readme.md +11 -13
- api_logic_server_cli/prototypes/base/security/readme_security.md +0 -1
- api_logic_server_cli/prototypes/basic_demo/README.md +267 -272
- api_logic_server_cli/prototypes/basic_demo/_config.yml +8 -0
- api_logic_server_cli/prototypes/basic_demo/_layouts/redirect.html +15 -0
- api_logic_server_cli/prototypes/basic_demo/docs/system-creation-vibe.md +158 -0
- api_logic_server_cli/prototypes/basic_demo/logic/declarative-vs-procedural-comparison.html +110 -0
- api_logic_server_cli/prototypes/basic_demo/logic/procedural/declarative-vs-procedural-comparison.md +295 -0
- api_logic_server_cli/prototypes/manager/samples/prompts/add_email.prompt +8 -0
- api_logic_server_cli/prototypes/manager/samples/prompts/elections.prompt +3 -0
- api_logic_server_cli/prototypes/manager/samples/prompts/emp_dept.prompt +4 -0
- api_logic_server_cli/prototypes/manager/samples/prompts/genai_demo.prompt +13 -0
- api_logic_server_cli/prototypes/manager/samples/readme_samples.md +3 -1
- api_logic_server_cli/prototypes/manager/system/genai/app_templates/app_learning/Admin-App-Resource-Learning-Prompt.md +1 -1
- api_logic_server_cli/prototypes/manager/system/genai/app_templates/react-admin-template/package.json +1 -0
- api_logic_server_cli/sqlacodegen_wrapper/sqlacodegen_wrapper.py +5 -2
- {apilogicserver-15.0.52.dist-info → apilogicserver-15.0.55.dist-info}/METADATA +1 -1
- {apilogicserver-15.0.52.dist-info → apilogicserver-15.0.55.dist-info}/RECORD +30 -23
- api_logic_server_cli/prototypes/base/.devcontainer-option/.copilot-instructions.md +0 -178
- api_logic_server_cli/prototypes/base/database/alembic/readme.md +0 -18
- api_logic_server_cli/prototypes/base/database/system/SAFRSBaseX.pyZ +0 -73
- api_logic_server_cli/prototypes/basic_demo/customizations/database/system/SAFRSBaseX.py +0 -139
- {apilogicserver-15.0.52.dist-info → apilogicserver-15.0.55.dist-info}/WHEEL +0 -0
- {apilogicserver-15.0.52.dist-info → apilogicserver-15.0.55.dist-info}/entry_points.txt +0 -0
- {apilogicserver-15.0.52.dist-info → apilogicserver-15.0.55.dist-info}/licenses/LICENSE +0 -0
- {apilogicserver-15.0.52.dist-info → apilogicserver-15.0.55.dist-info}/top_level.txt +0 -0
|
@@ -12,9 +12,10 @@ ApiLogicServer CLI: given a database url, create [and run] customizable ApiLogic
|
|
|
12
12
|
Called from api_logic_server_cli.py, by instantiating the ProjectRun object.
|
|
13
13
|
'''
|
|
14
14
|
|
|
15
|
-
__version__ = "15.00.
|
|
15
|
+
__version__ = "15.00.55" # last public release: 15.00.52 (15.00.12)
|
|
16
16
|
recent_changes = \
|
|
17
17
|
f'\n\nRecent Changes:\n' +\
|
|
18
|
+
"\t07/23/2024 - 15.00.55: system vibe support - initial testing \n"\
|
|
18
19
|
"\t07/20/2024 - 15.00.52: Python 3.13 compatibility fixes - psycopg2→psycopg3, SQLAlchemy 2.0+, pkg_resources→importlib.metadata. mgr dbs \n"\
|
|
19
20
|
"\t07/17/2024 - 15.00.49: venv fix+, ext bldr * fix, copilot vibe tweaks - creation, mcp logic, basic_demo autonums \n"\
|
|
20
21
|
"\t07/10/2024 - 15.00.41: copilot vibe support for logic, UI, MCP, bug[98] \n"\
|
|
@@ -384,12 +385,14 @@ def create_project_and_overlay_prototypes(project: 'ProjectRun', msg: str) -> st
|
|
|
384
385
|
joinpath('prototypes/BudgetApp')
|
|
385
386
|
recursive_overwrite(nw_dir, project.project_directory)
|
|
386
387
|
|
|
387
|
-
if project.db_url
|
|
388
|
+
if 'basic_demo' in project.db_url:
|
|
388
389
|
log.debug(".. ..Copy in basic_demo customizations: readme, logic, tests")
|
|
389
390
|
nw_dir = (Path(api_logic_server_dir_str)).\
|
|
390
391
|
joinpath('prototypes/basic_demo')
|
|
391
392
|
recursive_overwrite(nw_dir, project.project_directory)
|
|
392
|
-
|
|
393
|
+
os.rename(project.project_directory_path / 'readme.md', project.project_directory_path / 'readme_standard.md')
|
|
394
|
+
create_utils.copy_md(project = project, from_doc_file = "Sample-Basic-Demo.md", to_project_file = "readme.md")
|
|
395
|
+
create_utils.copy_md(project = project, from_doc_file = "Sample-Basic-Demo-Vibe.md", to_project_file="readme_vibe.md")
|
|
393
396
|
|
|
394
397
|
|
|
395
398
|
if project.db_url == "mysql+pymysql://root:p@localhost:3306/classicmodels":
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
last_created_date: July
|
|
2
|
-
last_created_project_name:
|
|
3
|
-
last_created_version: 15.00.
|
|
1
|
+
last_created_date: July 23, 2025 21:48:33
|
|
2
|
+
last_created_project_name: ../../../servers/basic_demo
|
|
3
|
+
last_created_version: 15.00.54
|
|
@@ -146,6 +146,7 @@ def copy_md(project, from_doc_file: str, to_project_file: str = "README.md"):
|
|
|
146
146
|
readme_lines_mkdocs = readme_file.readlines()
|
|
147
147
|
readme_lines_md = []
|
|
148
148
|
in_mkdocs_block = False
|
|
149
|
+
do_process_code_block_titles = False
|
|
149
150
|
db_line_num = 0
|
|
150
151
|
for each_line in readme_lines_mkdocs:
|
|
151
152
|
db_line_num += 1
|
|
@@ -197,6 +198,14 @@ def copy_md(project, from_doc_file: str, to_project_file: str = "README.md"):
|
|
|
197
198
|
else:
|
|
198
199
|
each_line = each_line.replace('.md', '') # hmm... todo: find out why this exists
|
|
199
200
|
pass
|
|
201
|
+
if 'process_code_block_titles' in each_line: # update front matter to get this
|
|
202
|
+
do_process_code_block_titles = True
|
|
203
|
+
if do_process_code_block_titles and 'title=' in each_line:
|
|
204
|
+
# extract the quoted text after 'title=' into title
|
|
205
|
+
match = re.search(r'title\s*=\s*["\']([^"\']+)["\']', each_line)
|
|
206
|
+
if match:
|
|
207
|
+
title = match.group(1)
|
|
208
|
+
readme_lines_md.append('**' + title + ':**\n')
|
|
200
209
|
readme_lines_md.append(each_line)
|
|
201
210
|
with open(str(to_file), "w") as readme_file:
|
|
202
211
|
readme_file.writelines(readme_lines_md)
|
|
@@ -37,11 +37,14 @@ python api_logic_server_run.py
|
|
|
37
37
|
Rule.sum(derive=Customer.Balance, as_sum_of=Order.AmountTotal)
|
|
38
38
|
Rule.constraint(validate=Customer, as_condition=lambda row: row.Balance <= row.CreditLimit)
|
|
39
39
|
```
|
|
40
|
+
|
|
41
|
+
As described in `docs/training/logic_bank_api.prompt`, logic includes rules for sums, formulas, constraints, and more. They also include events, which are used to trigger actions like sending emails or updating related records.
|
|
42
|
+
|
|
40
43
|
For project `basic_demo`, the best place to begin is to provide CoPilot with the natural language logic below. CoPilot will then offer to translate the logic into LogicBank rules. Also
|
|
41
44
|
1. Provide the analysis (below) showing the 44X code reduction.
|
|
42
45
|
2. Stress the importance of business logic - it's half the project, and requires an underlying rules engine. Using AI alone is not sufficient - you will wind up with procedural code, with the issues noted in the analysis below.
|
|
43
46
|
|
|
44
|
-
Here's the natural language logic to provide to
|
|
47
|
+
Here's the natural language logic to provide to CoPilot:
|
|
45
48
|
|
|
46
49
|
```text
|
|
47
50
|
Use case: Check Credit
|
|
@@ -79,25 +82,12 @@ but you can also offer to suggest rules.
|
|
|
79
82
|
|
|
80
83
|
### Adding MCP
|
|
81
84
|
|
|
82
|
-
The API is automatically MCP-enabled.
|
|
83
|
-
|
|
84
|
-
> **Important**: For project `basic_demo` (only), add customizations first, then MCP Client, then fix logic:
|
|
85
|
+
The API is automatically MCP-enabled. This adds the MCP Client:
|
|
85
86
|
|
|
86
87
|
```bash
|
|
87
|
-
# 1. Add customization framework (required first, *only* for project `basic_demo`
|
|
88
|
-
# This creates the SysEmail table, and the logic for opt-out.
|
|
89
|
-
genai-logic add-cust
|
|
90
|
-
|
|
91
|
-
# 2. Then add MCP Client (requires OpenAI key)
|
|
92
88
|
genai-logic genai-add-mcp-client
|
|
93
|
-
|
|
94
|
-
# 3. Rename the logic file `logic/logic_discovery/use_case.py` to `logic/logic_discovery/use_case.py.duplicate` (it is duplicated by using `genai-logic add-cust`)
|
|
95
|
-
rm
|
|
96
89
|
```
|
|
97
90
|
|
|
98
|
-
|
|
99
|
-
**Note**: If you already ran `genai-add-mcp-client`, you should run `add-cust` afterwards to ensure the customization framework is properly set up.
|
|
100
|
-
|
|
101
91
|
### Configuring Admin UI
|
|
102
92
|
|
|
103
93
|
This is built when project is created - no need to add it.
|
|
@@ -126,6 +116,8 @@ Then, `npm install` and `npm start`
|
|
|
126
116
|
|
|
127
117
|
Temporary restriction: security must be disabled.
|
|
128
118
|
|
|
119
|
+
**IMPORTANT**: When working with React apps, ALWAYS read `docs/training` first. This file contains critical data access provider configuration that was built when the project was created. The data provider handles JSON:API communication and record context - ignore this at your peril.
|
|
120
|
+
|
|
129
121
|
Customize using CoPilot chat, with `docs/training`.
|
|
130
122
|
|
|
131
123
|
### Security - Role-Based Access Control
|
|
@@ -164,9 +156,18 @@ def my_endpoint():
|
|
|
164
156
|
return {"message": "Custom endpoint"}
|
|
165
157
|
```
|
|
166
158
|
|
|
167
|
-
### Customize Models - Add Attributes
|
|
159
|
+
### Customize Models - Add Tables, Attributes
|
|
160
|
+
|
|
161
|
+
To add tables / columns to the database (highly impactful - request permission):
|
|
162
|
+
|
|
163
|
+
1. Update `database/model.py`
|
|
164
|
+
2. Use `database/alembic/alembic_run.py` to update the database. This will generate a migration script and apply it to the database, so you do not have to run `alembic revision --autogenerate` manually.
|
|
165
|
+
3. Offer to update ui/admin/admin.yaml to add the new table or column to the Admin UI.
|
|
166
|
+
|
|
167
|
+
NEVER start by updating the database directly, since some platforms may not have database CLI tools, although you can present this as an option.
|
|
168
|
+
|
|
169
|
+
If altering `database/models.py`, be sure to follow the patterns shown in the existing models. Note they do not contain a `__bind_key__`.
|
|
168
170
|
|
|
169
|
-
Update the model, and use `database/alembic` to update the database (highly impactful - request permission).
|
|
170
171
|
|
|
171
172
|
### Addressing `Missing Attributes` during logic loading at project startup
|
|
172
173
|
|
|
@@ -211,10 +212,37 @@ models.Employee.ProperSalary = __proper_salary__
|
|
|
211
212
|
|
|
212
213
|
When customizing SQLAlchemy models:
|
|
213
214
|
|
|
214
|
-
Don't use direct comparisons with database fields in computed properties
|
|
215
|
-
Convert to Python values first using float(), int(), str()
|
|
216
|
-
Use property() function instead of @jsonapi_attr for computed properties
|
|
217
|
-
Always add error handling for type conversions
|
|
215
|
+
* Don't use direct comparisons with database fields in computed properties
|
|
216
|
+
* Convert to Python values first using float(), int(), str()
|
|
217
|
+
* Use property() function instead of @jsonapi_attr for computed properties
|
|
218
|
+
* Always add error handling for type conversions
|
|
219
|
+
|
|
220
|
+
### Adding events
|
|
221
|
+
LogicBank rules are the preferred approach to logic, but you will sometimes need to add events. This is done in `logic/declare_logic.py` (important: the function MUST come first):
|
|
222
|
+
|
|
223
|
+
```python
|
|
224
|
+
# Example: Log email activity after SysEmail is committed
|
|
225
|
+
|
|
226
|
+
def sys_email_after_commit(row: models.SysEmail, old_row: models.SysEmail, logic_row: LogicRow):
|
|
227
|
+
"""
|
|
228
|
+
After SysEmail is committed, log 'email sent'
|
|
229
|
+
unless the customer has opted out
|
|
230
|
+
"""
|
|
231
|
+
if not row.customer.email_opt_out:
|
|
232
|
+
logic_row.log(f"📧 Email sent to {row.customer.name} - Subject: {row.subject}")
|
|
233
|
+
else:
|
|
234
|
+
logic_row.log(f"🚫 Email blocked for {row.customer.name} - Customer opted out")
|
|
235
|
+
|
|
236
|
+
Rule.commit_row_event(on_class=SysEmail, calling=sys_email_after_commit)
|
|
237
|
+
```
|
|
238
|
+
|
|
239
|
+
LogicBank event types include:
|
|
240
|
+
- `Rule.commit_row_event()` - fires after transaction commits
|
|
241
|
+
- `Rule.after_insert()` - fires after row insert
|
|
242
|
+
- `Rule.after_update()` - fires after row update
|
|
243
|
+
- `Rule.after_delete()` - fires after row delete
|
|
244
|
+
|
|
245
|
+
All events receive `(row, old_row, logic_row)` parameters and should use `logic_row.log()` for logging.
|
|
218
246
|
|
|
219
247
|
## 📁 Key Directories
|
|
220
248
|
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import subprocess
|
|
3
|
+
import sys
|
|
4
|
+
|
|
5
|
+
'''
|
|
6
|
+
Pushes the current database/models.py to the database.
|
|
7
|
+
python database/alembic/alembic_run.py [--non-interactive]
|
|
8
|
+
'''
|
|
9
|
+
|
|
10
|
+
def prompt(msg, non_interactive=False):
|
|
11
|
+
if non_interactive:
|
|
12
|
+
print(f"{msg}")
|
|
13
|
+
print("Running in non-interactive mode...")
|
|
14
|
+
return
|
|
15
|
+
try:
|
|
16
|
+
input(f"{msg}\nPress Enter to continue or Ctrl+C to abort...")
|
|
17
|
+
except EOFError:
|
|
18
|
+
print("Running in non-interactive mode (no stdin available)...")
|
|
19
|
+
return
|
|
20
|
+
|
|
21
|
+
def run(cmd, env=None):
|
|
22
|
+
print(f"Running: {cmd}")
|
|
23
|
+
result = subprocess.run(cmd, shell=True, env=env)
|
|
24
|
+
if result.returncode != 0:
|
|
25
|
+
print(f"Command failed: {cmd}")
|
|
26
|
+
sys.exit(result.returncode)
|
|
27
|
+
|
|
28
|
+
def main():
|
|
29
|
+
# Check for non-interactive mode more safely
|
|
30
|
+
non_interactive = "--non-interactive" in sys.argv
|
|
31
|
+
if not non_interactive:
|
|
32
|
+
try:
|
|
33
|
+
non_interactive = not sys.stdin.isatty()
|
|
34
|
+
except:
|
|
35
|
+
# If we can't check stdin, assume non-interactive
|
|
36
|
+
non_interactive = True
|
|
37
|
+
|
|
38
|
+
orig_dir = os.getcwd()
|
|
39
|
+
# Change to the database directory (parent of alembic directory) where alembic.ini is located
|
|
40
|
+
db_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
41
|
+
print(f"Changing directory to {db_dir}")
|
|
42
|
+
os.chdir(db_dir)
|
|
43
|
+
|
|
44
|
+
env = os.environ.copy()
|
|
45
|
+
env["APILOGICPROJECT_NO_FLASK"] = "True" # ~ export APILOGICPROJECT_NO_FLASK=True
|
|
46
|
+
|
|
47
|
+
print("\n\nThis script will update your database schema to match models.py using Alembic.")
|
|
48
|
+
print("Steps:")
|
|
49
|
+
print("1. Set APILOGICPROJECT_NO_FLASK=True -- eg, export APILOGICPROJECT_NO_FLASK=True")
|
|
50
|
+
print("2. Run: alembic upgrade head")
|
|
51
|
+
print("3. Run: alembic revision --autogenerate -m \"message\"")
|
|
52
|
+
prompt("Ready to proceed?", non_interactive)
|
|
53
|
+
|
|
54
|
+
run("alembic upgrade head", env=env)
|
|
55
|
+
print("Database schema updated to latest migration.")
|
|
56
|
+
|
|
57
|
+
prompt("Now, a new migration will be generated to match models.py.", non_interactive)
|
|
58
|
+
if non_interactive:
|
|
59
|
+
msg = "autogenerated"
|
|
60
|
+
else:
|
|
61
|
+
try:
|
|
62
|
+
msg = input("Enter a message for the migration (default: 'autogenerated'): ") or "autogenerated"
|
|
63
|
+
except EOFError:
|
|
64
|
+
msg = "autogenerated"
|
|
65
|
+
run(f'alembic revision --autogenerate -m "{msg}"', env=env)
|
|
66
|
+
|
|
67
|
+
# Find the latest migration file
|
|
68
|
+
versions_dir = os.path.join(db_dir, "alembic", "versions")
|
|
69
|
+
migration_files = sorted(
|
|
70
|
+
[f for f in os.listdir(versions_dir) if f.endswith(".py")],
|
|
71
|
+
key=lambda x: os.path.getmtime(os.path.join(versions_dir, x)),
|
|
72
|
+
reverse=True
|
|
73
|
+
)
|
|
74
|
+
if migration_files:
|
|
75
|
+
latest_file = os.path.join(versions_dir, migration_files[0])
|
|
76
|
+
with open(latest_file, "r") as f:
|
|
77
|
+
lines = f.readlines()
|
|
78
|
+
with open(latest_file, "w") as f:
|
|
79
|
+
for line in lines:
|
|
80
|
+
f.write(line)
|
|
81
|
+
if line.strip().startswith("def downgrade"):
|
|
82
|
+
f.write(" return\n")
|
|
83
|
+
break
|
|
84
|
+
else:
|
|
85
|
+
print("No migration file found.")
|
|
86
|
+
sys.exit(1)
|
|
87
|
+
|
|
88
|
+
# Apply the newly generated migration after modifying the migration file
|
|
89
|
+
prompt(f"Migration file generated: {latest_file}\nIt is recommended to review this migration file before proceeding.", non_interactive)
|
|
90
|
+
run("alembic upgrade head", env=env)
|
|
91
|
+
|
|
92
|
+
print("Migration file updated: downgrade will do nothing.")
|
|
93
|
+
print("Consider updating ui/admin/admin.yaml to reflect schema changes.")
|
|
94
|
+
os.chdir(orig_dir)
|
|
95
|
+
print("\nSuccess! Database schema and migrations are up to date.\n\n")
|
|
96
|
+
|
|
97
|
+
if __name__ == "__main__":
|
|
98
|
+
main()
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
You can push changes to `database/models.py' to your database automatically, or manually.
|
|
2
|
+
|
|
3
|
+
<br>
|
|
4
|
+
|
|
5
|
+
## Automatic
|
|
6
|
+
|
|
7
|
+
Use:
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
python database/alembic/alembic_run.py [--non-interactive]
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
<br>
|
|
14
|
+
|
|
15
|
+
## Manual
|
|
16
|
+
|
|
17
|
+
The diagram below illustrates a path for enacting changes to the data model, and using [Alembic](https://alembic.sqlalchemy.org/en/latest/index.html) to automate the database changes:
|
|
18
|
+
|
|
19
|
+
1. Update `database/models.py` (e.g., add columns, tables)
|
|
20
|
+
2. Use alembic to compute the revisions
|
|
21
|
+
```bash
|
|
22
|
+
cd database
|
|
23
|
+
export APILOGICPROJECT_NO_FLASK=True
|
|
24
|
+
alembic revision --autogenerate -m "Added Tables and Columns"
|
|
25
|
+
```
|
|
26
|
+
3. **Edit the revision file** to signify your understanding (see below)
|
|
27
|
+
4. Activate the change
|
|
28
|
+
```bash
|
|
29
|
+
alembic upgrade head
|
|
30
|
+
unset APILOGICPROJECT_NO_FLASK
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+

|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
To update your admin app, run `rebuild-from-model`. For more information, see [Database Design Changes](https://apilogicserver.github.io/Docs/Database-Changes/).
|
|
@@ -1,3 +1,43 @@
|
|
|
1
1
|
|
|
2
2
|
Generate a full React Admin application using the following instructions.
|
|
3
3
|
The result must be a runnable React app (`npm start`) that connects to the supplied JSON:API, with fully implemented components (no placeholders or empty files).
|
|
4
|
+
|
|
5
|
+
## Critical Data Access Provider Configuration
|
|
6
|
+
|
|
7
|
+
This project uses a **pre-configured JSON:API data provider** that was built when the project was created.
|
|
8
|
+
|
|
9
|
+
### Key Requirements:
|
|
10
|
+
|
|
11
|
+
1. **Data Provider**: Use the existing `jsonapiClient` from `./rav4-jsonapi-client/ra-jsonapi-client`
|
|
12
|
+
2. **Record Context**: For custom components (like cards), ALWAYS wrap with `<RecordContextProvider value={record}>`
|
|
13
|
+
3. **List Data Access**: Use `useListContext()` to get data and loading state
|
|
14
|
+
4. **Individual Records**: Use `useRecordContext()` to access record data within providers
|
|
15
|
+
5. **API Root**: The data provider connects to `conf.api_root` (typically `http://localhost:5656/api`)
|
|
16
|
+
|
|
17
|
+
### Example Pattern for Custom List Views:
|
|
18
|
+
```javascript
|
|
19
|
+
import { useListContext, RecordContextProvider, useRecordContext } from 'react-admin';
|
|
20
|
+
|
|
21
|
+
const CustomGrid = () => {
|
|
22
|
+
const { data, isLoading } = useListContext();
|
|
23
|
+
|
|
24
|
+
return (
|
|
25
|
+
<Grid container>
|
|
26
|
+
{data?.map(record => (
|
|
27
|
+
<Grid item key={record.id}>
|
|
28
|
+
<RecordContextProvider value={record}>
|
|
29
|
+
<CustomCard />
|
|
30
|
+
</RecordContextProvider>
|
|
31
|
+
</Grid>
|
|
32
|
+
))}
|
|
33
|
+
</Grid>
|
|
34
|
+
);
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
const CustomCard = () => {
|
|
38
|
+
const record = useRecordContext();
|
|
39
|
+
return <Card>{record.name}</Card>;
|
|
40
|
+
};
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
### CRITICAL: Do NOT create new data providers or modify the existing JSON:API client configuration. The project's data flow depends on the pre-built provider.
|
|
@@ -21,7 +21,7 @@ See: https://apilogicserver.github.io/Docs/Integration-MCP/
|
|
|
21
21
|
################
|
|
22
22
|
|
|
23
23
|
create_tool_context_from_llm = False
|
|
24
|
-
''' set to False to bypass LLM call and
|
|
24
|
+
''' set to False to bypass LLM call and OpenAI API Key requirement. '''
|
|
25
25
|
|
|
26
26
|
import os, logging, logging.config, sys
|
|
27
27
|
from pathlib import Path
|
|
@@ -144,15 +144,15 @@ def query_llm_with_nl(learnings_and_schema: str, nl_query: str):
|
|
|
144
144
|
temperature=0.2
|
|
145
145
|
)
|
|
146
146
|
tool_context_str = response.choices[0].message.content
|
|
147
|
-
else:
|
|
147
|
+
else: # this is so folks can try mcp without an OpenAI API key
|
|
148
148
|
# read integration/mcp/mcp_tool_context.json
|
|
149
149
|
tool_context_file_path = os.path.join(os.path.dirname(__file__), "../../integration/mcp/examples/mcp_tool_context_response_get.json")
|
|
150
|
-
if
|
|
150
|
+
if 'email' in nl_query:
|
|
151
151
|
tool_context_file_path = os.path.join(os.path.dirname(__file__), "../../integration/mcp/examples/mcp_tool_context_response.json")
|
|
152
152
|
try:
|
|
153
153
|
with open(tool_context_file_path, "r") as tool_context_file:
|
|
154
154
|
tool_context_str = tool_context_file.read()
|
|
155
|
-
|
|
155
|
+
log.info(f"\n\n2c. Tool context from file {tool_context_file_path}:\n" + tool_context_str)
|
|
156
156
|
except FileNotFoundError:
|
|
157
157
|
raise ConstraintException(f"Tool context file not found at {tool_context_file_path}.")
|
|
158
158
|
|
|
@@ -7,37 +7,35 @@
|
|
|
7
7
|
- ✅ **JSON:API Endpoints** - REST API for all database tables at `/api/*`
|
|
8
8
|
- ✅ **Swagger Documentation** - Interactive API docs at `/api`
|
|
9
9
|
- ✅ **Business Logic Engine** - Declarative rules in `logic/declare_logic.py`
|
|
10
|
-
- ✅ **Security Framework** - Authentication/authorization in `security
|
|
11
|
-
```
|
|
12
|
-
als add-auth --provider-type=sql --db-url=
|
|
13
|
-
als add-auth --provider-type=sql --db_url=postgresql://postgres:p@localhost/authdb
|
|
14
|
-
|
|
15
|
-
als add-auth --provider-type=keycloak --db-url=localhost
|
|
16
|
-
als add-auth --provider-type=keycloak --db-url=hardened
|
|
17
|
-
|
|
18
|
-
als add-auth --provider-type=None # to disable
|
|
19
|
-
```
|
|
10
|
+
- ✅ **Security Framework** - Authentication/authorization in `security/`
|
|
20
11
|
- ✅ **Database Models** - SQLAlchemy ORM in `database/models.py`
|
|
21
12
|
|
|
13
|
+
See readme files under api, logic and security.
|
|
14
|
+
|
|
22
15
|
**🚀 Ready to Run:** This is a complete, working system. Just press F5 or run `python api_logic_server_run.py`
|
|
23
16
|
|
|
17
|
+
<br>
|
|
18
|
+
|
|
24
19
|
---
|
|
25
20
|
|
|
26
21
|
# 🚀 Quick Start
|
|
27
22
|
|
|
28
|
-
> 🤖 **For GitHub Copilot Users:** See `.github/.copilot-instructions.md` for AI assistant guidance on getting started with this project.
|
|
29
|
-
|
|
30
23
|
**Bootstrap Copilot by pasting the following into the chat:**
|
|
31
24
|
```
|
|
32
25
|
Please find and read `.github/.copilot-instructions.md`.
|
|
33
26
|
```
|
|
34
27
|
|
|
28
|
+
<br>
|
|
29
|
+
|
|
35
30
|
**Microservice Automation Complete -- run to verify:** for **VSCode** projects except those downloaded from Web/GenAI:
|
|
36
31
|
1. `Press F5 to Run` (your venv is defaulted)
|
|
37
32
|
|
|
38
33
|
    For **other IDEs,** please follow the [Setup and Run](#1-setup-and-run) procedure, below.
|
|
39
34
|
|
|
40
|
-
>
|
|
35
|
+
<br>
|
|
36
|
+
|
|
37
|
+
> 💡 **Tip:** Create the sample app for customization examples:
|
|
38
|
+
> `ApiLogicServer create --project-name=nw_sample --db_url=nw+`
|
|
41
39
|
|
|
42
40
|
|
|
43
41
|
|