ApiLogicServer 15.0.0__py3-none-any.whl → 15.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. api_logic_server_cli/add_cust/add_cust.py +8 -2
  2. api_logic_server_cli/api_logic_server.py +2 -2
  3. api_logic_server_cli/api_logic_server_info.yaml +3 -3
  4. api_logic_server_cli/create_from_model/__pycache__/dbml.cpython-312.pyc +0 -0
  5. api_logic_server_cli/create_from_model/dbml.py +1 -0
  6. api_logic_server_cli/genai/genai_svcs.py +5 -2
  7. api_logic_server_cli/manager.py +1 -0
  8. api_logic_server_cli/prototypes/base/api/api_discovery/mcp_discovery.py +63 -24
  9. api_logic_server_cli/prototypes/base/config/logging.yml +5 -0
  10. api_logic_server_cli/prototypes/base/config/server_setup.py +73 -0
  11. api_logic_server_cli/prototypes/base/integration/mcp/examples/mcp_discovery_response.json +150 -0
  12. api_logic_server_cli/prototypes/base/integration/mcp/examples/mcp_request.prompt +46 -0
  13. api_logic_server_cli/prototypes/base/integration/mcp/examples/mcp_tool_context_response.json +34 -0
  14. api_logic_server_cli/prototypes/base/integration/mcp/examples/mcp_tool_context_response_get.json +18 -0
  15. api_logic_server_cli/prototypes/base/integration/mcp/mcp_client_executor.py +395 -203
  16. api_logic_server_cli/prototypes/basic_demo/customizations/logic/logic_discovery/mcp_client_executor_request.py +11 -282
  17. api_logic_server_cli/prototypes/basic_demo/customizations/ui/admin/admin.yaml +3 -3
  18. api_logic_server_cli/prototypes/basic_demo/customizations/ui/admin/home.js +48 -0
  19. api_logic_server_cli/prototypes/manager/system/genai/mcp_learning/mcp.prompt +12 -0
  20. {apilogicserver-15.0.0.dist-info → apilogicserver-15.0.10.dist-info}/METADATA +1 -1
  21. {apilogicserver-15.0.0.dist-info → apilogicserver-15.0.10.dist-info}/RECORD +26 -32
  22. api_logic_server_cli/prototypes/base/integration/mcp/README_mcp.md +0 -15
  23. api_logic_server_cli/prototypes/base/integration/mcp/test_notes.txt +0 -37
  24. api_logic_server_cli/prototypes/basic_demo/customizations/api/api_discovery/mcp_discovery.py +0 -96
  25. api_logic_server_cli/prototypes/basic_demo/customizations/config/server_setup.py +0 -388
  26. api_logic_server_cli/prototypes/basic_demo/customizations/integration/mcp/.DS_Store +0 -0
  27. api_logic_server_cli/prototypes/basic_demo/customizations/integration/mcp/README_mcp.md +0 -15
  28. api_logic_server_cli/prototypes/basic_demo/customizations/integration/mcp/Zmcp_client_executor.py +0 -294
  29. api_logic_server_cli/prototypes/basic_demo/customizations/integration/mcp/mcp_schema.txt +0 -47
  30. api_logic_server_cli/prototypes/basic_demo/customizations/integration/mcp/mcp_server_discovery.json +0 -9
  31. api_logic_server_cli/prototypes/basic_demo/customizations/integration/mcp/mcp_tool_context.json +0 -25
  32. api_logic_server_cli/prototypes/basic_demo/customizations/integration/mcp/test_notes.txt +0 -37
  33. /api_logic_server_cli/prototypes/base/integration/mcp/{mcp_schema.txt → examples/mcp_schema.txt} +0 -0
  34. {apilogicserver-15.0.0.dist-info → apilogicserver-15.0.10.dist-info}/WHEEL +0 -0
  35. {apilogicserver-15.0.0.dist-info → apilogicserver-15.0.10.dist-info}/entry_points.txt +0 -0
  36. {apilogicserver-15.0.0.dist-info → apilogicserver-15.0.10.dist-info}/licenses/LICENSE +0 -0
  37. {apilogicserver-15.0.0.dist-info → apilogicserver-15.0.10.dist-info}/top_level.txt +0 -0
@@ -1,37 +0,0 @@
1
- LLM request - new...?
2
-
3
- [
4
- {
5
- "role": "system",
6
- "content": "You are an API planner that converts natural language queries into MCP Tool Context blocks using JSON:API. Return only the tool context as JSON."
7
- },
8
- {
9
- "role": "user",
10
- "content": "Schema:\n{\"base_url\": \"http://localhost:5656/api\", \"description\": \"API Logic Project: basic_demo\", \"email_services\": \"iff email is requested, Send email by issing a POST request to the Email endpoint, setting the subject, message and customer_id in the body.\", \"expected_response\": \"Respond with a JSON object with schema_version and a resource array including: tool_type, base_url, path, method, query_params array or body, headers.\", \"query_params\": \"- JSON:API custom filtering using a filter array (e.g., filter=[{\\\"name\\\":\\\"date_shipped\\\",\\\"op\\\":\\\"gt\\\",\\\"val\\\":\\\"2023-07-14\\\"}])\", \"resources\": [{\"fields\": [\"id\", \"name\", \"balance\", \"credit_limit\"], \"filterable\": [\"id\", \"name\", \"balance\", \"credit_limit\"], \"methods\": [\"GET\", \"PATCH\", \"POST\", \"DELETE\"], \"name\": \"Customer\", \"path\": \"/Customer\"}, {\"fields\": [\"id\", \"order_id\", \"product_id\", \"quantity\", \"amount\", \"unit_price\"], \"filterable\": [\"id\", \"order_id\", \"product_id\", \"quantity\", \"amount\", \"unit_price\"], \"methods\": [\"GET\", \"PATCH\", \"POST\", \"DELETE\"], \"name\": \"Item\", \"path\": \"/Item\"}, {\"fields\": [\"id\", \"notes\", \"customer_id\", \"date_shipped\", \"amount_total\"], \"filterable\": [\"id\", \"notes\", \"customer_id\", \"date_shipped\", \"amount_total\"], \"methods\": [\"GET\", \"PATCH\", \"POST\", \"DELETE\"], \"name\": \"Order\", \"path\": \"/Order\"}, {\"fields\": [\"id\", \"name\", \"unit_price\"], \"filterable\": [\"id\", \"name\", \"unit_price\"], \"methods\": [\"GET\", \"PATCH\", \"POST\", \"DELETE\"], \"name\": \"Product\", \"path\": \"/Product\"}, {\"fields\": [\"id\", \"request\", \"request_prompt\", \"completion\"], \"filterable\": [\"id\", \"request\", \"request_prompt\", \"completion\"], \"methods\": [\"GET\", \"PATCH\", \"POST\", \"DELETE\"], \"name\": \"Mcp\", \"path\": \"/Mcp\"}], \"schema_version\": \"1.0\", \"tool_type\": \"json-api\"}\n\nNatural language query: 'List the unshipped orders created before 2023-07-14, and send a discount email (subject: 'Discount Offer') to the customer for each one.'"
11
- }
12
- ]
13
-
14
- LLM request - old
15
- [
16
- {
17
- "role": "system",
18
- "content": "You are an API planner that converts natural language queries into MCP Tool Context blocks using JSON:API. Return only the tool context as JSON."
19
- },
20
- {
21
- "role": "user",
22
- "content": "Schema:\n{\"base_url\": \"http://localhost:5656/api\", \"description\": \"API Logic Project: basic_demo\", \"email_services\": \"iff email is requested, Send email by issing a POST request to the Email endpoint, setting the subject, message and customer_id in the body.\", \"expected_response\": \"Respond with a JSON object with schema_version and a resource array including: tool_type, base_url, path, method, query_params array or body, headers.\", \"query_params\": \"- JSON:API custom filtering using a filter array (e.g., filter=[{\\\"name\\\":\\\"date_shipped\\\",\\\"op\\\":\\\"gt\\\",\\\"val\\\":\\\"2023-07-14\\\"}])\", \"resources\": [{\"fields\": [\"id\", \"name\", \"balance\", \"credit_limit\"], \"filterable\": [\"id\", \"name\", \"balance\", \"credit_limit\"], \"methods\": [\"GET\", \"PATCH\", \"POST\", \"DELETE\"], \"name\": \"Customer\", \"path\": \"/Customer\"}, {\"fields\": [\"id\", \"order_id\", \"product_id\", \"quantity\", \"amount\", \"unit_price\"], \"filterable\": [\"id\", \"order_id\", \"product_id\", \"quantity\", \"amount\", \"unit_price\"], \"methods\": [\"GET\", \"PATCH\", \"POST\", \"DELETE\"], \"name\": \"Item\", \"path\": \"/Item\"}, {\"fields\": [\"id\", \"notes\", \"customer_id\", \"date_shipped\", \"amount_total\"], \"filterable\": [\"id\", \"notes\", \"customer_id\", \"date_shipped\", \"amount_total\"], \"methods\": [\"GET\", \"PATCH\", \"POST\", \"DELETE\"], \"name\": \"Order\", \"path\": \"/Order\"}, {\"fields\": [\"id\", \"name\", \"unit_price\"], \"filterable\": [\"id\", \"name\", \"unit_price\"], \"methods\": [\"GET\", \"PATCH\", \"POST\", \"DELETE\"], \"name\": \"Product\", \"path\": \"/Product\"}, {\"fields\": [\"id\", \"request\", \"request_prompt\", \"completion\"], \"filterable\": [\"id\", \"request\", \"request_prompt\", \"completion\"], \"methods\": [\"GET\", \"PATCH\", \"POST\", \"DELETE\"], \"name\": \"Mcp\", \"path\": \"/Mcp\"}], \"schema_version\": \"1.0\", \"tool_type\": \"json-api\"}\n\nNatural language query: 'List the unshipped orders created before 2023-07-14, and send a discount email (subject: 'Discount Offer') to the customer for each one.'"
23
- }
24
- ]
25
-
26
-
27
- messages - new
28
- [{'role': 'system', 'content': 'You are an API planner that converts natural language queries into MCP Tool Context b...API. Return only the tool context as JSON.'}, {'role': 'user', 'content': 'Schema:\n{"base_url": "http://localhost:5656/api", "description": "API Logic Project: ...) to the customer for each one.'"}]
29
-
30
- messages- old
31
- [{'role': 'system', 'content': 'You are an API planner that converts natural language queries into MCP Tool Context blocks using JSON:API. Return only the tool context as JSON.'}, {'role': 'user', 'content': 'Schema:\n{"base_url": "http://localhost:5656/api", "description": "API Logic Project: basic_demo", "email_services": "iff email is requested, Send email by issing a POST request to the Email endpoint, setting the subject, message and customer_id in the body.", "expected_response": "Respond with a JSON object with schema_version and a resource array including: tool_type, base_url, path, method, query_params array or body, headers.", "query_params": "- JSON:API custom filtering using a filter array (e.g., filter=[{\\"name\\":\\"date_shipped\\",\\"op\\":\\"gt\\",\\"val\\":\\"2023-07-14\\"}])", "resources": [{"fields": ["id", "name", "balance", "credit_limit"], "filterable": ["id", "name", "balance", "credit_limit"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Customer", "path": "/Customer"}, {"fields": ["id", "order_id", "product_id", "quantity", "amount", "unit_price"], "filterable": ["id", "order_id", "product_id", "quantity", "amount", "unit_price"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Item", "path": "/Item"}, {"fields": ["id", "notes", "customer_id", "date_shipped", "amount_total"], "filterable": ["id", "notes", "customer_id", "date_shipped", "amount_total"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Order", "path": "/Order"}, {"fields": ["id", "name", "unit_price"], "filterable": ["id", "name", "unit_price"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Product", "path": "/Product"}, {"fields": ["id", "request", "request_prompt", "completion"], "filterable": ["id", "request", "request_prompt", "completion"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Mcp", "path": "/Mcp"}], "schema_version": "1.0", "tool_type": "json-api"}\n\nNatural language query: \'List the unshipped orders created before 2023-07-14, and send a discount email (subject: \'Discount Offer\') to the customer for each one.\''}]
32
-
33
- old content
34
- 'Schema:\n{"base_url": "http://localhost:5656/api", "description": "API Logic Project: basic_demo", "email_services": "iff email is requested, Send email by issing a POST request to the Email endpoint, setting the subject, message and customer_id in the body.", "expected_response": "Respond with a JSON object with schema_version and a resource array including: tool_type, base_url, path, method, query_params array or body, headers.", "query_params": "- JSON:API custom filtering using a filter array (e.g., filter=[{\\"name\\":\\"date_shipped\\",\\"op\\":\\"gt\\",\\"val\\":\\"2023-07-14\\"}])", "resources": [{"fields": ["id", "name", "balance", "credit_limit"], "filterable": ["id", "name", "balance", "credit_limit"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Customer", "path": "/Customer"}, {"fields": ["id", "order_id", "product_id", "quantity", "amount", "unit_price"], "filterable": ["id", "order_id", "product_id", "quantity", "amount", "unit_price"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Item", "path": "/Item"}, {"fields": ["id", "notes", "customer_id", "date_shipped", "amount_total"], "filterable": ["id", "notes", "customer_id", "date_shipped", "amount_total"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Order", "path": "/Order"}, {"fields": ["id", "name", "unit_price"], "filterable": ["id", "name", "unit_price"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Product", "path": "/Product"}, {"fields": ["id", "request", "request_prompt", "completion"], "filterable": ["id", "request", "request_prompt", "completion"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Mcp", "path": "/Mcp"}], "schema_version": "1.0", "tool_type": "json-api"}\n\nNatural language query: \'List the unshipped orders created before 2023-07-14, and send a discount email (subject: \'Discount Offer\') to the customer for each one.\''
35
-
36
- where new content looks correct:
37
- 'Schema:\n{"base_url": "http://localhost:5656/api", "description": "API Logic Project: basic_demo", "email_services": "iff email is requested, Send email by issing a POST request to the Email endpoint, setting the subject, message and customer_id in the body.", "expected_response": "Respond with a JSON object with schema_version and a resource array including: tool_type, base_url, path, method, query_params array or body, headers.", "query_params": "- JSON:API custom filtering using a filter array (e.g., filter=[{\\"name\\":\\"date_shipped\\",\\"op\\":\\"gt\\",\\"val\\":\\"2023-07-14\\"}])", "resources": [{"fields": ["id", "name", "balance", "credit_limit"], "filterable": ["id", "name", "balance", "credit_limit"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Customer", "path": "/Customer"}, {"fields": ["id", "order_id", "product_id", "quantity", "amount", "unit_price"], "filterable": ["id", "order_id", "product_id", "quantity", "amount", "unit_price"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Item", "path": "/Item"}, {"fields": ["id", "notes", "customer_id", "date_shipped", "amount_total"], "filterable": ["id", "notes", "customer_id", "date_shipped", "amount_total"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Order", "path": "/Order"}, {"fields": ["id", "name", "unit_price"], "filterable": ["id", "name", "unit_price"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Product", "path": "/Product"}, {"fields": ["id", "request", "request_prompt", "completion"], "filterable": ["id", "request", "request_prompt", "completion"], "methods": ["GET", "PATCH", "POST", "DELETE"], "name": "Mcp", "path": "/Mcp"}], "schema_version": "1.0", "tool_type": "json-api"}\n\nNatural language query: \'List the unshipped orders created before 2023-07-14, and send a discount email (subject: \'Discount Offer\') to the customer for each one.\''
@@ -1,96 +0,0 @@
1
- from flask import request, jsonify
2
- from flask import Flask, redirect, send_from_directory, send_file
3
- import logging
4
- import os
5
- import json
6
- import io
7
-
8
- import requests
9
- from config.config import Args # circular import error if at top
10
-
11
- app_logger = logging.getLogger("api_logic_server_app")
12
-
13
- def add_service(app, api, project_dir, swagger_host: str, PORT: str, method_decorators = []):
14
- pass
15
-
16
- def get_server_url():
17
- """ return the server URL for the OpenAPI spec """
18
- result = f'http://{Args.instance.swagger_host}:{Args.instance.swagger_port}'
19
- # get env variable API_LOGIC_SERVER_TUNNEL (or None)
20
- if tunnel_url := os.getenv("API_LOGIC_SERVER_TUNNEL", None):
21
- app_logger.info(f".. tunnel URL: {tunnel_url}")
22
- result = tunnel_url
23
- return result # + '/api'
24
-
25
-
26
- @app.before_request
27
- def before_any_request():
28
- # print(f"[DEBUG] Incoming request: {request.method} {request.url}")
29
- if activate_openapi_logging := True:
30
- if request.content_type == 'application/json' and request.method in ['POST', 'PUT', 'PATCH']:
31
- # openapi: Incoming request: PATCH http://localhost:5656/api/Customer/1/ {'data': {'attributes': {'credit_limit': 5555}, 'type': 'Customer', 'id': '1'}}
32
- # openapi: Incoming request: PATCH http://6f6f-2601-644-4900-d6f0-ecc9-6df3-8863-c5b2.ngrok-free.app/api/Customer/1 {'credit_limit': 5555}
33
-
34
- app_logger.info(f"openapi: Incoming request: {request.method} {request.url} {str(request.json)}")
35
- else:
36
- app_logger.info(f"openapi: Incoming request: {request.method} {request.url}")
37
- # app_logger.info(f"openapi: Incoming request headers: {request.headers}")
38
-
39
- chatgpt_request_json = {
40
- "credit_limit": 25000,
41
- }
42
- standard_request_json = {
43
- "data": {
44
- "type": "Customer",
45
- "id": "ALFKI",
46
- "attributes": {
47
- "name": "Alice",
48
- "credit_limit": 25000,
49
- "balance": 12345
50
- }
51
- }
52
- }
53
- swagger_request_json = {
54
- 'data': {
55
- 'attributes': {
56
- 'credit_limit': 5555
57
- },
58
- 'type': 'Customer',
59
- 'id': '1'
60
- }
61
- }
62
- pass
63
-
64
-
65
- @app.route('/.well-known/mcp.json', methods=['GET'])
66
- def mcp_discovery(path=None):
67
- ''' called by mcp_client_executor for discovery, eg:
68
- ```
69
- {
70
- "tool_type": "json-api",
71
- "schema_version": "1.0",
72
- "base_url": "https://crm.company.com",
73
- "resources": [
74
- {
75
- "name": "Customer",
76
- "path": "/Customer",
77
- "methods": ["GET", "PATCH"],
78
- "fields": ["id", "name", "balance", "credit_limit"],
79
- "filterable": ["name", "credit_limit"],
80
- "example": "List customers with credit over 5000"
81
- }
82
- ]
83
- }
84
- ```
85
- test: curl -X GET "http://localhost:5656/.well-known/mcp.json"
86
- '''
87
- # return docs/mcp_schema.json
88
- schema_path = os.path.join(project_dir, "docs/mcp_learning/mcp_schema.json")
89
- try:
90
- with open(schema_path, "r") as schema_file:
91
- schema = json.load(schema_file)
92
- return jsonify(schema), 200
93
- except Exception as e:
94
- app_logger.error(f"Error loading MCP schema: {e}")
95
- return jsonify({"error": "MCP schema not found"}), 404
96
- pass
@@ -1,388 +0,0 @@
1
- #!/usr/bin/env python3
2
-
3
- ###############################################################################
4
- #
5
- # Initalization functions used by api_logic_server_run.py
6
- #
7
- # You typically do not customize this file,
8
- # except to override Creation Defaults and Logging, below.
9
- #
10
- ###############################################################################
11
-
12
- """
13
- Operation:
14
- 1. api_logic_server_run.py - imports config
15
- 1. captures args
16
- 2. api_logic_server_run.py - imports server_setup
17
- 1. server_setup#logging_setup()
18
- 3. api_logic_server_run.py - server_setup.api_logic_server_setup
19
- On error, NOT CALLED: constraint_handler or ValidationErrorExt (!)
20
-
21
- + Operation:
22
- 1. api_logic_server_run.py - imports config
23
- 1. captures args
24
- 1. config#logging_setup()
25
- 2. api_logic_server_run.py - imports server_setup
26
- 3. api_logic_server_run.py - server_setup.api_logic_server_setup
27
-
28
- """
29
-
30
- start_up_message = "normal start"
31
-
32
- import traceback
33
- try:
34
- import os, logging, logging.config, sys, yaml # failure here means venv probably not set
35
- except:
36
- track = traceback.format_exc()
37
- print(" ")
38
- print(track)
39
- print("venv probably not set")
40
- print("Please see https://apilogicserver.github.io/Docs/Project-Env/ \n")
41
- exit(1)
42
-
43
- from flask_sqlalchemy import SQLAlchemy
44
- import json
45
- from pathlib import Path
46
- if os.getenv("EXPERIMENT") == '+':
47
- import config
48
- from config.config import Args
49
-
50
-
51
- setup_path = Path(os.path.abspath(os.path.dirname(__file__)))
52
- project_path = setup_path.parent
53
-
54
-
55
- def is_docker() -> bool:
56
- """ running docker? dir exists: /home/api_logic_server """
57
- path = '/home/api_logic_server'
58
- path_result = os.path.isdir(path) # this *should* exist only on docker
59
- env_result = "DOCKER" == os.getenv('APILOGICSERVER_RUNNING')
60
- # assert path_result == env_result
61
- return path_result
62
-
63
-
64
- if is_docker():
65
- sys.path.append(os.path.abspath('/home/api_logic_server'))
66
-
67
- logic_alerts = True
68
- """ Set False to silence startup message """
69
- declare_logic_message = ""
70
- declare_security_message = "ALERT: *** Security Not Enabled ***"
71
-
72
- project_dir = str(project_path)
73
- os.chdir(project_dir) # so admin app can find images, code
74
- import api.system.api_utils as api_utils
75
- logic_logger_activate_debug = False
76
- """ True prints all rules on startup """
77
-
78
- args = ""
79
- arg_num = 0
80
- for each_arg in sys.argv:
81
- args += each_arg
82
- arg_num += 1
83
- if arg_num < len(sys.argv):
84
- args += ", "
85
- project_name = os.path.basename(os.path.normpath(project_path))
86
-
87
- from typing import TypedDict
88
- import safrs # fails without venv - see https://apilogicserver.github.io/Docs/Project-Env/
89
- from database.system.SAFRSBaseX import SAFRSBase
90
- from safrs import ValidationError, SAFRSAPI as _SAFRSAPI
91
- #from safrs import ValidationError, SAFRSBase, SAFRSAPI as _SAFRSAPI
92
- from logic_bank.logic_bank import LogicBank
93
- from logic_bank.exceptions import LBActivateException
94
- from logic_bank.exec_row_logic.logic_row import LogicRow
95
- from logic_bank.rule_type.constraint import Constraint
96
- from .activate_logicbank import activate_logicbank
97
- from sqlalchemy.ext.declarative import declarative_base
98
- from sqlalchemy.orm import Session
99
- import socket
100
- import warnings
101
- from flask import Flask, redirect, send_from_directory, send_file
102
- from flask_cors import CORS
103
- from safrs import ValidationError, SAFRSAPI
104
- import ui.admin.admin_loader as AdminLoader
105
- from security.system.authentication import configure_auth
106
- import database.bind_dbs as bind_dbs
107
- import oracledb
108
- import integration.kafka.kafka_producer as kafka_producer
109
- import integration.kafka.kafka_consumer as kafka_consumer
110
- import integration.n8n.n8n_producer as n8n_producer
111
-
112
-
113
- if os.getenv("EXPERIMENT") == '+':
114
- app_logger = logging.getLogger("api_logic_server_app")
115
-
116
-
117
- class SAFRSAPI(_SAFRSAPI):
118
- """
119
- Extends SAFRSAPI to handle client_uri
120
-
121
- Args:
122
- _SAFRSAPI (_type_): _description_
123
- """
124
-
125
- def __init__(self, *args, **kwargs):
126
- client_uri = kwargs.pop('client_uri', None)
127
- if client_uri:
128
- kwargs['port'] = None
129
- kwargs['host'] = client_uri
130
- super().__init__(*args, **kwargs)
131
-
132
-
133
-
134
- # ==================================
135
- # Set
136
- # ==================================
137
-
138
- def get_args(flask_app: Flask) -> Args:
139
- """
140
- Get Args, update logging
141
-
142
- Returns:
143
- Args: typed access to flask_app.config
144
- """
145
- args = Args(flask_app=flask_app) # creation defaults
146
-
147
- import config.config as config
148
- flask_app.config.from_object(config.Config)
149
- app_logger.debug(f"\nserver_setup - get_args: Config args: \n{args}") # # config file (e.g., db uri's)
150
-
151
- args.get_cli_args(dunder_name=__name__, args=args)
152
- app_logger.debug(f"\nserver_setup - get_args: CLI args: \n{args}") # api_logic_server_run cl args
153
-
154
- flask_app.config.from_prefixed_env(prefix="APILOGICPROJECT") # env overrides (e.g., docker)
155
- app_logger.debug(f"\nserver_setup - get_args: ENV args: \n{args}\n\n")
156
-
157
- if args.verbose: # export APILOGICPROJECT_VERBOSE=True
158
- app_logger.setLevel(logging.DEBUG)
159
- safrs.log.setLevel(logging.DEBUG) # notset 0, debug 10, info 20, warn 30, error 40, critical 50
160
- authentication_logger = logging.getLogger('security.system.authentication')
161
- authentication_logger.setLevel(logging.DEBUG)
162
- authorization_logger = logging.getLogger('security.system.authorization')
163
- authorization_logger.setLevel(logging.DEBUG)
164
- auth_provider_logger = logging.getLogger('security.authentication_provider.sql.auth_provider')
165
- auth_provider_logger.setLevel(logging.DEBUG)
166
- # sqlachemy_logger = logging.getLogger('sqlalchemy.engine')
167
- # sqlachemy_logger.setLevel(logging.DEBUG)
168
-
169
- if app_logger.getEffectiveLevel() <= logging.DEBUG:
170
- api_utils.sys_info(flask_app.config)
171
- app_logger.debug(f"\nserver_setup - get_args: ENV args: \n{args}\n\n")
172
- return args
173
-
174
-
175
- # ==================================
176
- # LOGGING SETUP
177
- # ==================================
178
-
179
- def logging_setup() -> logging.Logger:
180
- """
181
- Setup Logging
182
- """
183
- global app_logger, debug_value, project_path
184
- logging_config = f'{project_path}/config/logging.yml'
185
- if os.getenv('APILOGICPROJECT_LOGGING_CONFIG'):
186
- logging_config = project_path.joinpath(os.getenv("APILOGICPROJECT_LOGGING_CONFIG"))
187
- with open(logging_config,'rt') as f: # see also logic/declare_logic.py
188
- config=yaml.safe_load(f.read())
189
- f.close()
190
- logging.config.dictConfig(config) # log levels: notset 0, debug 10, info 20, warn 30, error 40, critical 50
191
- app_logger = logging.getLogger("api_logic_server_app")
192
- debug_value = os.getenv('APILOGICPROJECT_DEBUG')
193
- if debug_value is not None: # > export APILOGICPROJECT_DEBUG=True
194
- debug_value = debug_value.upper()
195
- if debug_value.startswith("F") or debug_value.startswith("N"):
196
- app_logger.setLevel(logging.INFO)
197
- else:
198
- app_logger.setLevel(logging.DEBUG)
199
- app_logger.debug(f'\nDEBUG level set from env\n')
200
- app_logger.info(f'\nAPI Logic Project Server Setup ({project_name}) Starting with CLI args: \n.. {args}\n')
201
- app_logger.info(f'Created August 03, 2024 09:34:01 at {str(project_path)}\n')
202
- return app_logger
203
-
204
-
205
- class ValidationErrorExt(ValidationError):
206
- """
207
- This exception is raised when invalid input has been detected (client side input)
208
- Always send back the message to the client in the response
209
- """
210
-
211
- def __init__(self, message="", status_code=400, api_code=2001, detail=None, error_attributes=None):
212
- Exception.__init__(self)
213
- self.error_attributes = error_attributes
214
- self.status_code = status_code
215
- self.message = message
216
- self.api_code = api_code
217
- self.detail: TypedDict = detail
218
-
219
-
220
- def validate_db_uri(flask_app):
221
- """
222
-
223
- For sqlite, verify the SQLALCHEMY_DATABASE_URI file exists
224
-
225
- * Since the name is not reported by SQLAlchemy
226
-
227
- Args:
228
- flask_app (_type_): initialize flask app
229
- """
230
-
231
- db_uri = flask_app.config['SQLALCHEMY_DATABASE_URI']
232
- app_logger.debug(f'sqlite_db_path validity check with db_uri: {db_uri}')
233
- if 'sqlite' not in db_uri:
234
- return
235
- sqlite_db_path = ""
236
- if db_uri.startswith('sqlite:////'): # eg, sqlite:////Users/val/dev/ApiLogicServer/ApiLogicServer-dev/servers/ai_customer_orders/database/db.sqlite
237
- sqlite_db_path = Path(db_uri[9:])
238
- app_logger.debug(f'\t.. Absolute: {str(sqlite_db_path)}')
239
- else: # eg, sqlite:///../database/db.sqlite
240
- db_relative_path = db_uri[10:]
241
- db_relative_path = db_relative_path.replace('../', '') # relative
242
- sqlite_db_path = Path(os.getcwd()).joinpath(db_relative_path)
243
- app_logger.debug(f'\t.. Relative: {str(sqlite_db_path)}')
244
- if db_uri == 'sqlite:///database/db.sqlite':
245
- raise ValueError(f'This fails, please use; sqlite:///../database/db.sqlite')
246
- if sqlite_db_path.is_file():
247
- app_logger.debug(f'\t.. sqlite_db_path is a valid file\n')
248
- else: # remove this if you wish
249
- raise ValueError(f'sqlite database does not exist: {str(sqlite_db_path)}')
250
-
251
-
252
-
253
- # ==========================================================
254
- # API Logic Server Setup
255
- # - Opens Database(s)
256
- # - Setup API, Logic, Security, Optimistic Locking
257
- # ==========================================================
258
-
259
- def api_logic_server_setup(flask_app: Flask, args: Args):
260
- """
261
- API Logic Server Setup
262
-
263
- 1. Opens Database(s)
264
- 2. Setup API, Logic, Security, Optimistic Locking
265
-
266
-
267
- Args:
268
- flask_app (_type_): configured flask_app (servers, ports, db uri's)
269
- args (_type_): typed access to flask_app.config
270
-
271
- Raises:
272
- ValidationErrorExt: rebadge LogicBank errors for SAFRS API
273
- """
274
-
275
- from sqlalchemy import exc as sa_exc
276
-
277
- global logic_logger_activate_debug, declare_logic_message, declare_security_message
278
-
279
- with warnings.catch_warnings():
280
-
281
- safrs_log_level = safrs.log.getEffectiveLevel()
282
- db_logger = logging.getLogger('sqlalchemy')
283
- db_log_level = db_logger.getEffectiveLevel()
284
- safrs_init_logger = logging.getLogger("safrs.safrs_init")
285
- authorization_logger = logging.getLogger('security.system.authorization')
286
- authorization_log_level = authorization_logger.getEffectiveLevel()
287
- do_hide_chatty_logging = True and not args.verbose
288
- # eg, system startup health check: read on API and relationship - hide many log entries
289
- if do_hide_chatty_logging and app_logger.getEffectiveLevel() <= logging.INFO:
290
- safrs.log.setLevel(logging.WARN) # notset 0, debug 10, info 20, warn 30, error 40, critical 50
291
- db_logger.setLevel(logging.WARN)
292
- safrs_init_logger.setLevel(logging.WARN)
293
- authorization_logger.setLevel(logging.WARN)
294
-
295
- bind_dbs.bind_dbs(flask_app)
296
-
297
- # https://stackoverflow.com/questions/34674029/sqlalchemy-query-raises-unnecessary-warning-about-sqlite-and-decimal-how-to-spe
298
- warnings.simplefilter("ignore", category=sa_exc.SAWarning) # alert - disable for safety msgs
299
-
300
- def constraint_handler(message: str, constraint: Constraint, logic_row: LogicRow):
301
- """ format LogicBank constraint exception for SAFRS """
302
- if constraint is not None and hasattr(constraint, 'error_attributes'):
303
-
304
- detail = {"model": logic_row.name, "error_attributes": constraint.error_attributes}
305
- else:
306
- detail = {"model": logic_row.name}
307
- raise ValidationErrorExt(message=message, detail=detail)
308
-
309
- admin_enabled = os.name != "nt"
310
- admin_enabled = False
311
- """ internal use, for future enhancements """
312
- if admin_enabled:
313
- flask_app.config.update(SQLALCHEMY_BINDS={'admin': 'sqlite:////tmp/4LSBE.sqlite.4'})
314
-
315
- db = SQLAlchemy()
316
- db.init_app(flask_app)
317
- flask_app.db = db
318
- with flask_app.app_context():
319
-
320
- with open(Path(project_path).joinpath('security/system/custom_swagger.json')) as json_file:
321
- custom_swagger = json.load(json_file)
322
- safrs_api = SAFRSAPI(flask_app, app_db= db, host=args.swagger_host, port=args.swagger_port, client_uri=args.client_uri,
323
- prefix = args.api_prefix, custom_swagger=custom_swagger)
324
-
325
- if os.getenv('APILOGICSERVER_ORACLE_THICK'):
326
- oracledb.init_oracle_client(lib_dir=os.getenv('APILOGICSERVER_ORACLE_THICK'))
327
-
328
- db = safrs.DB # valid only after is initialized, above
329
- session: Session = db.session
330
-
331
- if admin_enabled: # unused (internal dev use)
332
- db.create_all()
333
- db.create_all(bind='admin')
334
- session.commit()
335
-
336
- from api import expose_api_models, customize_api
337
-
338
- import database.models
339
- app_logger.info("Data Model Loaded, customizing...")
340
- from database import customize_models
341
-
342
- activate_logicbank(session, constraint_handler)
343
-
344
- method_decorators : list = []
345
- safrs_init_logger.setLevel(logging.WARN)
346
- expose_api_models.expose_models(safrs_api, method_decorators)
347
- app_logger.info(f'Declare API - api/expose_api_models, endpoint for each table on {args.swagger_host}:{args.swagger_port}, customizing...')
348
- customize_api.expose_services(flask_app, safrs_api, project_dir, swagger_host=args.swagger_host, PORT=args.port) # custom services
349
-
350
- if args.security_enabled:
351
- configure_auth(flask_app, database, method_decorators)
352
-
353
- if args.security_enabled:
354
- from security import declare_security # activate security
355
- app_logger.info("..declare security - security/declare_security.py"
356
- # not accurate: + f' -- {len(database.database_discovery.authentication_models.metadata.tables)}'
357
- + ' authentication tables loaded')
358
- declare_security_message = declare_security.declare_security_message
359
-
360
- from api.system.opt_locking import opt_locking
361
- from config.config import OptLocking
362
- if args.opt_locking == OptLocking.IGNORED.value:
363
- app_logger.info("\nOptimistic Locking: ignored")
364
- else:
365
- opt_locking.opt_locking_setup(session)
366
-
367
- kafka_producer.kafka_producer()
368
- kafka_consumer.kafka_consumer(safrs_api = safrs_api)
369
-
370
- n8n_producer.n8n_producer()
371
-
372
- SAFRSBase._s_auto_commit = False
373
- session.close()
374
-
375
- safrs.log.setLevel(safrs_log_level)
376
- db_logger.setLevel(db_log_level)
377
- authorization_logger.setLevel(authorization_log_level)
378
-
379
- if os.getenv('APILOGICPROJECT_DEBUG'): # temp debug since logging in config is not happening
380
- KAFKA_SERVER = os.getenv('KAFKA_SERVER')
381
- is_empty = False
382
- if KAFKA_SERVER is not None:
383
- is_empty = KAFKA_SERVER == ""
384
- is_none = KAFKA_SERVER is None
385
- app_logger.debug(f'\nDEBUG KAFKA_SERVER: [{KAFKA_SERVER}] (is_empty: {is_empty}) (is_none: {is_none}) \n')
386
- app_logger.debug(f'... Args.instance.kafka_producer: {Args.instance.kafka_producer}\n')
387
-
388
-
@@ -1,15 +0,0 @@
1
- Model Context Protocol is a way for:
2
-
3
- 1. **Bus User ad hoc flows** using existing published mcp services (vs. hard-coding in IT as an endpoint; flows can be cached for repeated use)
4
-
5
- * ***Natural Language access*** to corporate databases for improved user interfaces
6
-
7
- * LLMs ***choreograph*** multiple MCP calls (to 1 or more MCP servers) in a chain of calls - an agentic workflow. MCPs support shared contexts and goals, enabling the LLM to use the result from 1 call to determine whether the goals has been reached, or which service is appropriate to call next
8
-
9
- 3. Chat agents to ***discover*** and ***call*** external servers, be they databases, APIs, file systems, etc. MCPs support shared contexts and goals, enabling the LLM
10
-
11
- * ***Corporate database participation*** in such flows, by making key functions available as MCP calls.
12
-
13
- This example is [explained here](https://apilogicserver.github.io/Docs/Integration-MCP/).
14
-
15
- > Note: this sample uses multi-term filters. These are usually OR'd together, but this example requires AND. This is provided by `database/system/SAFRSBaseX.py` (see `return query.filter(operator.and_(*expressions)`) in `_s_filter()`), activated in `config/server_setup.py`.