jaclang 0.5.18__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of jaclang might be problematic. Click here for more details.
- jaclang/cli/cli.py +4 -2
- jaclang/compiler/__init__.py +12 -5
- jaclang/compiler/absyntree.py +3 -3
- jaclang/compiler/generated/jac_parser.py +2 -2
- jaclang/compiler/jac.lark +2 -2
- jaclang/compiler/parser.py +47 -7
- jaclang/compiler/passes/main/__init__.py +3 -2
- jaclang/compiler/passes/main/access_modifier_pass.py +173 -0
- jaclang/compiler/passes/main/import_pass.py +32 -19
- jaclang/compiler/passes/main/pyast_gen_pass.py +41 -26
- jaclang/compiler/passes/main/pyast_load_pass.py +136 -73
- jaclang/compiler/passes/main/pyout_pass.py +14 -13
- jaclang/compiler/passes/main/registry_pass.py +8 -3
- jaclang/compiler/passes/main/schedules.py +5 -3
- jaclang/compiler/passes/main/sym_tab_build_pass.py +23 -26
- jaclang/compiler/passes/main/tests/test_import_pass.py +2 -2
- jaclang/compiler/passes/tool/jac_formatter_pass.py +83 -21
- jaclang/compiler/passes/tool/tests/test_jac_format_pass.py +11 -4
- jaclang/compiler/passes/transform.py +2 -0
- jaclang/compiler/symtable.py +10 -3
- jaclang/compiler/tests/test_importer.py +9 -0
- jaclang/compiler/workspace.py +17 -5
- jaclang/core/aott.py +34 -63
- jaclang/core/importer.py +73 -65
- jaclang/core/llms/__init__.py +20 -0
- jaclang/core/llms/anthropic.py +61 -0
- jaclang/core/llms/base.py +206 -0
- jaclang/core/llms/groq.py +67 -0
- jaclang/core/llms/huggingface.py +73 -0
- jaclang/core/llms/ollama.py +78 -0
- jaclang/core/llms/openai.py +61 -0
- jaclang/core/llms/togetherai.py +60 -0
- jaclang/core/llms/utils.py +9 -0
- jaclang/core/utils.py +16 -1
- jaclang/plugin/default.py +37 -14
- jaclang/plugin/feature.py +9 -6
- jaclang/plugin/spec.py +8 -1
- jaclang/settings.py +1 -1
- jaclang/utils/helpers.py +6 -2
- jaclang/utils/treeprinter.py +9 -6
- jaclang-0.6.0.dist-info/METADATA +17 -0
- {jaclang-0.5.18.dist-info → jaclang-0.6.0.dist-info}/RECORD +45 -36
- jaclang/core/llms.py +0 -111
- jaclang-0.5.18.dist-info/METADATA +0 -7
- {jaclang-0.5.18.dist-info → jaclang-0.6.0.dist-info}/WHEEL +0 -0
- {jaclang-0.5.18.dist-info → jaclang-0.6.0.dist-info}/entry_points.txt +0 -0
- {jaclang-0.5.18.dist-info → jaclang-0.6.0.dist-info}/top_level.txt +0 -0
jaclang/core/llms.py
DELETED
|
@@ -1,111 +0,0 @@
|
|
|
1
|
-
"""LLMs (Large Language Models) module for Jaclang."""
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
class Anthropic:
|
|
5
|
-
"""Anthropic API client for Large Language Models (LLMs)."""
|
|
6
|
-
|
|
7
|
-
MTLLM_PROMPT: str = ""
|
|
8
|
-
MTLLM_REASON_SUFFIX: str = ""
|
|
9
|
-
MTLLM_WO_REASON_SUFFIX: str = ""
|
|
10
|
-
|
|
11
|
-
def __init__(self, **kwargs: dict) -> None:
|
|
12
|
-
"""Initialize the Anthropic API client."""
|
|
13
|
-
import anthropic
|
|
14
|
-
|
|
15
|
-
self.client = anthropic.Anthropic()
|
|
16
|
-
self.model_name = kwargs.get("model_name", "claude-3-sonnet-20240229")
|
|
17
|
-
self.temperature = kwargs.get("temperature", 0.7)
|
|
18
|
-
self.max_tokens = kwargs.get("max_tokens", 1024)
|
|
19
|
-
|
|
20
|
-
def __infer__(self, meaning_in: str, **kwargs: dict) -> str:
|
|
21
|
-
"""Infer a response from the input meaning."""
|
|
22
|
-
messages = [{"role": "user", "content": meaning_in}]
|
|
23
|
-
output = self.client.messages.create(
|
|
24
|
-
model=kwargs.get("model_name", self.model_name),
|
|
25
|
-
temperature=kwargs.get("temperature", self.temperature),
|
|
26
|
-
max_tokens=kwargs.get("max_tokens", self.max_tokens),
|
|
27
|
-
messages=messages,
|
|
28
|
-
)
|
|
29
|
-
return output.content[0].text
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
class Huggingface:
|
|
33
|
-
"""Huggingface API client for Large Language Models (LLMs)."""
|
|
34
|
-
|
|
35
|
-
MTLLM_PROMPT: str = ""
|
|
36
|
-
MTLLM_REASON_SUFFIX: str = ""
|
|
37
|
-
MTLLM_WO_REASON_SUFFIX: str = ""
|
|
38
|
-
|
|
39
|
-
def __init__(self, **kwargs: dict) -> None:
|
|
40
|
-
"""Initialize the Huggingface API client."""
|
|
41
|
-
import torch
|
|
42
|
-
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
|
43
|
-
|
|
44
|
-
torch.random.manual_seed(0)
|
|
45
|
-
model = AutoModelForCausalLM.from_pretrained(
|
|
46
|
-
kwargs.get("model_name", "microsoft/Phi-3-mini-128k-instruct"),
|
|
47
|
-
device_map=kwargs.get("device_map", "cuda"),
|
|
48
|
-
torch_dtype="auto",
|
|
49
|
-
trust_remote_code=True,
|
|
50
|
-
)
|
|
51
|
-
tokenizer = AutoTokenizer.from_pretrained(
|
|
52
|
-
kwargs.get("model_name", "microsoft/Phi-3-mini-128k-instruct")
|
|
53
|
-
)
|
|
54
|
-
self.pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
|
55
|
-
self.temperature = kwargs.get("temperature", 0.7)
|
|
56
|
-
self.max_tokens = kwargs.get("max_new_tokens", 1024)
|
|
57
|
-
|
|
58
|
-
def __infer__(self, meaning_in: str, **kwargs: dict) -> str:
|
|
59
|
-
"""Infer a response from the input meaning."""
|
|
60
|
-
messages = [{"role": "user", "content": meaning_in}]
|
|
61
|
-
output = self.pipe(
|
|
62
|
-
messages,
|
|
63
|
-
temperature=kwargs.get("temperature", self.temperature),
|
|
64
|
-
max_length=kwargs.get("max_new_tokens", self.max_tokens),
|
|
65
|
-
**kwargs
|
|
66
|
-
)
|
|
67
|
-
return output[0]["generated_text"][-1]["content"]
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
class Ollama:
|
|
71
|
-
"""Ollama API client for Large Language Models (LLMs)."""
|
|
72
|
-
|
|
73
|
-
MTLLM_PROMPT: str = ""
|
|
74
|
-
MTLLM_REASON_SUFFIX: str = ""
|
|
75
|
-
MTLLM_WO_REASON_SUFFIX: str = ""
|
|
76
|
-
|
|
77
|
-
def __init__(self, **kwargs: dict) -> None:
|
|
78
|
-
"""Initialize the Ollama API client."""
|
|
79
|
-
import ollama
|
|
80
|
-
|
|
81
|
-
self.client = ollama.Client(host=kwargs.get("host", "http://localhost:11434"))
|
|
82
|
-
self.model_name = kwargs.get("model_name", "phi3")
|
|
83
|
-
self.default_model_params = {
|
|
84
|
-
k: v for k, v in kwargs.items() if k not in ["model_name", "host"]
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
def __infer__(self, meaning_in: str, **kwargs: dict) -> str:
|
|
88
|
-
"""Infer a response from the input meaning."""
|
|
89
|
-
model = str(kwargs.get("model_name", self.model_name))
|
|
90
|
-
if not self.check_model(model):
|
|
91
|
-
self.download_model(model)
|
|
92
|
-
model_params = {k: v for k, v in kwargs.items() if k not in ["model_name"]}
|
|
93
|
-
messages = [{"role": "user", "content": meaning_in}]
|
|
94
|
-
output = self.client.chat(
|
|
95
|
-
model=model,
|
|
96
|
-
messages=messages,
|
|
97
|
-
options={**self.default_model_params, **model_params},
|
|
98
|
-
)
|
|
99
|
-
return output["message"]["content"]
|
|
100
|
-
|
|
101
|
-
def check_model(self, model_name: str) -> bool:
|
|
102
|
-
"""Check if the model is available."""
|
|
103
|
-
try:
|
|
104
|
-
self.client.show(model_name)
|
|
105
|
-
return True
|
|
106
|
-
except Exception:
|
|
107
|
-
return False
|
|
108
|
-
|
|
109
|
-
def download_model(self, model_name: str) -> None:
|
|
110
|
-
"""Download the model."""
|
|
111
|
-
self.client.pull(model_name)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|