tactus 0.29.3__py3-none-any.whl → 0.30.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tactus/__init__.py +1 -1
- tactus/broker/server.py +295 -3
- tactus/cli/app.py +51 -0
- tactus/formatting/__init__.py +7 -0
- tactus/formatting/formatter.py +437 -0
- {tactus-0.29.3.dist-info → tactus-0.30.0.dist-info}/METADATA +9 -1
- {tactus-0.29.3.dist-info → tactus-0.30.0.dist-info}/RECORD +10 -8
- {tactus-0.29.3.dist-info → tactus-0.30.0.dist-info}/WHEEL +0 -0
- {tactus-0.29.3.dist-info → tactus-0.30.0.dist-info}/entry_points.txt +0 -0
- {tactus-0.29.3.dist-info → tactus-0.30.0.dist-info}/licenses/LICENSE +0 -0
tactus/__init__.py
CHANGED
tactus/broker/server.py
CHANGED
|
@@ -10,7 +10,7 @@ import json
|
|
|
10
10
|
import logging
|
|
11
11
|
import os
|
|
12
12
|
import ssl
|
|
13
|
-
from collections.abc import Callable
|
|
13
|
+
from collections.abc import Awaitable, Callable
|
|
14
14
|
from dataclasses import dataclass
|
|
15
15
|
from pathlib import Path
|
|
16
16
|
from typing import Any, Optional
|
|
@@ -19,7 +19,12 @@ import anyio
|
|
|
19
19
|
from anyio.streams.buffered import BufferedByteReceiveStream
|
|
20
20
|
from anyio.streams.tls import TLSStream
|
|
21
21
|
|
|
22
|
-
from tactus.broker.protocol import
|
|
22
|
+
from tactus.broker.protocol import (
|
|
23
|
+
read_message,
|
|
24
|
+
read_message_anyio,
|
|
25
|
+
write_message,
|
|
26
|
+
write_message_anyio,
|
|
27
|
+
)
|
|
23
28
|
|
|
24
29
|
logger = logging.getLogger(__name__)
|
|
25
30
|
|
|
@@ -446,6 +451,7 @@ class BrokerServer(_BaseBrokerServer):
|
|
|
446
451
|
openai_backend=openai_backend, tool_registry=tool_registry, event_handler=event_handler
|
|
447
452
|
)
|
|
448
453
|
self.socket_path = Path(socket_path)
|
|
454
|
+
self._server: asyncio.AbstractServer | None = None
|
|
449
455
|
|
|
450
456
|
async def start(self) -> None:
|
|
451
457
|
# Most platforms enforce a short maximum length for AF_UNIX socket paths.
|
|
@@ -461,19 +467,286 @@ class BrokerServer(_BaseBrokerServer):
|
|
|
461
467
|
self.socket_path.unlink()
|
|
462
468
|
|
|
463
469
|
self._server = await asyncio.start_unix_server(
|
|
464
|
-
self.
|
|
470
|
+
self._handle_connection_asyncio, path=str(self.socket_path)
|
|
465
471
|
)
|
|
466
472
|
logger.info(f"[BROKER] Listening on UDS: {self.socket_path}")
|
|
467
473
|
|
|
468
474
|
async def aclose(self) -> None:
|
|
469
475
|
await super().aclose()
|
|
470
476
|
|
|
477
|
+
if self._server is not None:
|
|
478
|
+
self._server.close()
|
|
479
|
+
try:
|
|
480
|
+
await self._server.wait_closed()
|
|
481
|
+
finally:
|
|
482
|
+
self._server = None
|
|
483
|
+
|
|
471
484
|
try:
|
|
472
485
|
if self.socket_path.exists():
|
|
473
486
|
self.socket_path.unlink()
|
|
474
487
|
except Exception:
|
|
475
488
|
logger.debug("[BROKER] Failed to unlink socket path", exc_info=True)
|
|
476
489
|
|
|
490
|
+
async def _handle_connection_asyncio(
|
|
491
|
+
self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
|
|
492
|
+
) -> None:
|
|
493
|
+
try:
|
|
494
|
+
req = await read_message(reader)
|
|
495
|
+
req_id = req.get("id")
|
|
496
|
+
method = req.get("method")
|
|
497
|
+
params = req.get("params") or {}
|
|
498
|
+
|
|
499
|
+
async def write_event(event: dict[str, Any]) -> None:
|
|
500
|
+
await write_message(writer, event)
|
|
501
|
+
|
|
502
|
+
if not req_id or not method:
|
|
503
|
+
await write_event(
|
|
504
|
+
{
|
|
505
|
+
"id": req_id or "",
|
|
506
|
+
"event": "error",
|
|
507
|
+
"error": {"type": "BadRequest", "message": "Missing id/method"},
|
|
508
|
+
}
|
|
509
|
+
)
|
|
510
|
+
return
|
|
511
|
+
|
|
512
|
+
if method == "events.emit":
|
|
513
|
+
await self._handle_events_emit_asyncio(req_id, params, write_event)
|
|
514
|
+
return
|
|
515
|
+
|
|
516
|
+
if method == "llm.chat":
|
|
517
|
+
await self._handle_llm_chat_asyncio(req_id, params, write_event)
|
|
518
|
+
return
|
|
519
|
+
|
|
520
|
+
if method == "tool.call":
|
|
521
|
+
await self._handle_tool_call_asyncio(req_id, params, write_event)
|
|
522
|
+
return
|
|
523
|
+
|
|
524
|
+
await write_event(
|
|
525
|
+
{
|
|
526
|
+
"id": req_id,
|
|
527
|
+
"event": "error",
|
|
528
|
+
"error": {"type": "MethodNotFound", "message": f"Unknown method: {method}"},
|
|
529
|
+
}
|
|
530
|
+
)
|
|
531
|
+
|
|
532
|
+
except Exception as e:
|
|
533
|
+
logger.debug("[BROKER] Connection handler error", exc_info=True)
|
|
534
|
+
try:
|
|
535
|
+
await write_message(
|
|
536
|
+
writer,
|
|
537
|
+
{
|
|
538
|
+
"id": "",
|
|
539
|
+
"event": "error",
|
|
540
|
+
"error": {"type": type(e).__name__, "message": str(e)},
|
|
541
|
+
},
|
|
542
|
+
)
|
|
543
|
+
except Exception:
|
|
544
|
+
pass
|
|
545
|
+
finally:
|
|
546
|
+
try:
|
|
547
|
+
writer.close()
|
|
548
|
+
await writer.wait_closed()
|
|
549
|
+
except Exception:
|
|
550
|
+
pass
|
|
551
|
+
|
|
552
|
+
async def _handle_events_emit_asyncio(
|
|
553
|
+
self,
|
|
554
|
+
req_id: str,
|
|
555
|
+
params: dict[str, Any],
|
|
556
|
+
write_event: Callable[[dict[str, Any]], Awaitable[None]],
|
|
557
|
+
) -> None:
|
|
558
|
+
event = params.get("event")
|
|
559
|
+
if not isinstance(event, dict):
|
|
560
|
+
await write_event(
|
|
561
|
+
{
|
|
562
|
+
"id": req_id,
|
|
563
|
+
"event": "error",
|
|
564
|
+
"error": {"type": "BadRequest", "message": "params.event must be an object"},
|
|
565
|
+
}
|
|
566
|
+
)
|
|
567
|
+
return
|
|
568
|
+
|
|
569
|
+
try:
|
|
570
|
+
if self._event_handler is not None:
|
|
571
|
+
self._event_handler(event)
|
|
572
|
+
except Exception:
|
|
573
|
+
logger.debug("[BROKER] event_handler raised", exc_info=True)
|
|
574
|
+
|
|
575
|
+
await write_event({"id": req_id, "event": "done", "data": {"ok": True}})
|
|
576
|
+
|
|
577
|
+
async def _handle_tool_call_asyncio(
|
|
578
|
+
self,
|
|
579
|
+
req_id: str,
|
|
580
|
+
params: dict[str, Any],
|
|
581
|
+
write_event: Callable[[dict[str, Any]], Awaitable[None]],
|
|
582
|
+
) -> None:
|
|
583
|
+
name = params.get("name")
|
|
584
|
+
args = params.get("args") or {}
|
|
585
|
+
|
|
586
|
+
if not isinstance(name, str) or not name:
|
|
587
|
+
await write_event(
|
|
588
|
+
{
|
|
589
|
+
"id": req_id,
|
|
590
|
+
"event": "error",
|
|
591
|
+
"error": {"type": "BadRequest", "message": "params.name must be a string"},
|
|
592
|
+
}
|
|
593
|
+
)
|
|
594
|
+
return
|
|
595
|
+
if not isinstance(args, dict):
|
|
596
|
+
await write_event(
|
|
597
|
+
{
|
|
598
|
+
"id": req_id,
|
|
599
|
+
"event": "error",
|
|
600
|
+
"error": {"type": "BadRequest", "message": "params.args must be an object"},
|
|
601
|
+
}
|
|
602
|
+
)
|
|
603
|
+
return
|
|
604
|
+
|
|
605
|
+
try:
|
|
606
|
+
result = self._tools.call(name, args)
|
|
607
|
+
except KeyError:
|
|
608
|
+
await write_event(
|
|
609
|
+
{
|
|
610
|
+
"id": req_id,
|
|
611
|
+
"event": "error",
|
|
612
|
+
"error": {
|
|
613
|
+
"type": "ToolNotAllowed",
|
|
614
|
+
"message": f"Tool not allowlisted: {name}",
|
|
615
|
+
},
|
|
616
|
+
}
|
|
617
|
+
)
|
|
618
|
+
return
|
|
619
|
+
except Exception as e:
|
|
620
|
+
logger.debug("[BROKER] tool.call error", exc_info=True)
|
|
621
|
+
await write_event(
|
|
622
|
+
{
|
|
623
|
+
"id": req_id,
|
|
624
|
+
"event": "error",
|
|
625
|
+
"error": {"type": type(e).__name__, "message": str(e)},
|
|
626
|
+
}
|
|
627
|
+
)
|
|
628
|
+
return
|
|
629
|
+
|
|
630
|
+
await write_event({"id": req_id, "event": "done", "data": {"result": result}})
|
|
631
|
+
|
|
632
|
+
async def _handle_llm_chat_asyncio(
|
|
633
|
+
self,
|
|
634
|
+
req_id: str,
|
|
635
|
+
params: dict[str, Any],
|
|
636
|
+
write_event: Callable[[dict[str, Any]], Awaitable[None]],
|
|
637
|
+
) -> None:
|
|
638
|
+
provider = params.get("provider") or "openai"
|
|
639
|
+
if provider != "openai":
|
|
640
|
+
await write_event(
|
|
641
|
+
{
|
|
642
|
+
"id": req_id,
|
|
643
|
+
"event": "error",
|
|
644
|
+
"error": {
|
|
645
|
+
"type": "UnsupportedProvider",
|
|
646
|
+
"message": f"Unsupported provider: {provider}",
|
|
647
|
+
},
|
|
648
|
+
}
|
|
649
|
+
)
|
|
650
|
+
return
|
|
651
|
+
|
|
652
|
+
model = params.get("model")
|
|
653
|
+
messages = params.get("messages")
|
|
654
|
+
stream = bool(params.get("stream", False))
|
|
655
|
+
temperature = params.get("temperature")
|
|
656
|
+
max_tokens = params.get("max_tokens")
|
|
657
|
+
|
|
658
|
+
if not isinstance(model, str) or not model:
|
|
659
|
+
await write_event(
|
|
660
|
+
{
|
|
661
|
+
"id": req_id,
|
|
662
|
+
"event": "error",
|
|
663
|
+
"error": {"type": "BadRequest", "message": "params.model must be a string"},
|
|
664
|
+
}
|
|
665
|
+
)
|
|
666
|
+
return
|
|
667
|
+
if not isinstance(messages, list):
|
|
668
|
+
await write_event(
|
|
669
|
+
{
|
|
670
|
+
"id": req_id,
|
|
671
|
+
"event": "error",
|
|
672
|
+
"error": {"type": "BadRequest", "message": "params.messages must be a list"},
|
|
673
|
+
}
|
|
674
|
+
)
|
|
675
|
+
return
|
|
676
|
+
|
|
677
|
+
try:
|
|
678
|
+
if stream:
|
|
679
|
+
stream_iter = await self._openai.chat(
|
|
680
|
+
model=model,
|
|
681
|
+
messages=messages,
|
|
682
|
+
temperature=temperature,
|
|
683
|
+
max_tokens=max_tokens,
|
|
684
|
+
stream=True,
|
|
685
|
+
)
|
|
686
|
+
|
|
687
|
+
full_text = ""
|
|
688
|
+
async for chunk in stream_iter:
|
|
689
|
+
try:
|
|
690
|
+
delta = chunk.choices[0].delta
|
|
691
|
+
text = getattr(delta, "content", None)
|
|
692
|
+
except Exception:
|
|
693
|
+
text = None
|
|
694
|
+
|
|
695
|
+
if not text:
|
|
696
|
+
continue
|
|
697
|
+
|
|
698
|
+
full_text += text
|
|
699
|
+
await write_event({"id": req_id, "event": "delta", "data": {"text": text}})
|
|
700
|
+
|
|
701
|
+
await write_event(
|
|
702
|
+
{
|
|
703
|
+
"id": req_id,
|
|
704
|
+
"event": "done",
|
|
705
|
+
"data": {
|
|
706
|
+
"text": full_text,
|
|
707
|
+
"usage": {
|
|
708
|
+
"prompt_tokens": 0,
|
|
709
|
+
"completion_tokens": 0,
|
|
710
|
+
"total_tokens": 0,
|
|
711
|
+
},
|
|
712
|
+
},
|
|
713
|
+
}
|
|
714
|
+
)
|
|
715
|
+
return
|
|
716
|
+
|
|
717
|
+
resp = await self._openai.chat(
|
|
718
|
+
model=model,
|
|
719
|
+
messages=messages,
|
|
720
|
+
temperature=temperature,
|
|
721
|
+
max_tokens=max_tokens,
|
|
722
|
+
stream=False,
|
|
723
|
+
)
|
|
724
|
+
text = ""
|
|
725
|
+
try:
|
|
726
|
+
text = resp.choices[0].message.content or ""
|
|
727
|
+
except Exception:
|
|
728
|
+
text = ""
|
|
729
|
+
|
|
730
|
+
await write_event(
|
|
731
|
+
{
|
|
732
|
+
"id": req_id,
|
|
733
|
+
"event": "done",
|
|
734
|
+
"data": {
|
|
735
|
+
"text": text,
|
|
736
|
+
"usage": {"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0},
|
|
737
|
+
},
|
|
738
|
+
}
|
|
739
|
+
)
|
|
740
|
+
except Exception as e:
|
|
741
|
+
logger.debug("[BROKER] llm.chat error", exc_info=True)
|
|
742
|
+
await write_event(
|
|
743
|
+
{
|
|
744
|
+
"id": req_id,
|
|
745
|
+
"event": "error",
|
|
746
|
+
"error": {"type": type(e).__name__, "message": str(e)},
|
|
747
|
+
}
|
|
748
|
+
)
|
|
749
|
+
|
|
477
750
|
|
|
478
751
|
class TcpBrokerServer(_BaseBrokerServer):
|
|
479
752
|
"""
|
|
@@ -499,6 +772,7 @@ class TcpBrokerServer(_BaseBrokerServer):
|
|
|
499
772
|
self.port = port
|
|
500
773
|
self.ssl_context = ssl_context
|
|
501
774
|
self.bound_port: int | None = None
|
|
775
|
+
self._serve_task: asyncio.Task[None] | None = None
|
|
502
776
|
|
|
503
777
|
async def start(self) -> None:
|
|
504
778
|
# Create AnyIO TCP listener (doesn't block, just binds to port)
|
|
@@ -515,3 +789,21 @@ class TcpBrokerServer(_BaseBrokerServer):
|
|
|
515
789
|
logger.info(
|
|
516
790
|
f"[BROKER] Listening on {scheme}: {self.host}:{self.bound_port if self.bound_port is not None else self.port}"
|
|
517
791
|
)
|
|
792
|
+
|
|
793
|
+
# Unlike asyncio's start_server(), AnyIO listeners don't automatically start
|
|
794
|
+
# serving on enter; they require an explicit serve() loop. Run it in the
|
|
795
|
+
# background for the duration of the async context manager.
|
|
796
|
+
if self._serve_task is None or self._serve_task.done():
|
|
797
|
+
self._serve_task = asyncio.create_task(self.serve(), name="tactus-broker-tcp-serve")
|
|
798
|
+
|
|
799
|
+
async def aclose(self) -> None:
|
|
800
|
+
task = self._serve_task
|
|
801
|
+
self._serve_task = None
|
|
802
|
+
if task is not None and not task.done():
|
|
803
|
+
task.cancel()
|
|
804
|
+
try:
|
|
805
|
+
await task
|
|
806
|
+
except asyncio.CancelledError:
|
|
807
|
+
pass
|
|
808
|
+
|
|
809
|
+
await super().aclose()
|
tactus/cli/app.py
CHANGED
|
@@ -28,6 +28,7 @@ from rich.table import Table
|
|
|
28
28
|
from tactus.core import TactusRuntime
|
|
29
29
|
from tactus.core.yaml_parser import ProcedureYAMLParser, ProcedureConfigError
|
|
30
30
|
from tactus.validation import TactusValidator, ValidationMode
|
|
31
|
+
from tactus.formatting import TactusFormatter, FormattingError
|
|
31
32
|
from tactus.adapters.memory import MemoryStorage
|
|
32
33
|
from tactus.adapters.file_storage import FileStorage
|
|
33
34
|
from tactus.adapters.cli_hitl import CLIHITLHandler
|
|
@@ -1116,6 +1117,56 @@ def validate(
|
|
|
1116
1117
|
raise typer.Exit(1)
|
|
1117
1118
|
|
|
1118
1119
|
|
|
1120
|
+
@app.command("format")
|
|
1121
|
+
def format_(
|
|
1122
|
+
workflow_file: Path = typer.Argument(..., help="Path to workflow file (.tac or .lua)"),
|
|
1123
|
+
check: bool = typer.Option(
|
|
1124
|
+
False,
|
|
1125
|
+
"--check",
|
|
1126
|
+
help="Don't write files back; exit 1 if changes are needed",
|
|
1127
|
+
),
|
|
1128
|
+
stdout: bool = typer.Option(False, "--stdout", help="Write formatted code to stdout"),
|
|
1129
|
+
):
|
|
1130
|
+
"""
|
|
1131
|
+
Format a Tactus Lua DSL file.
|
|
1132
|
+
|
|
1133
|
+
Currently enforces semantic indentation using 2-space soft tabs.
|
|
1134
|
+
"""
|
|
1135
|
+
if not workflow_file.exists():
|
|
1136
|
+
console.print(f"[red]Error:[/red] Workflow file not found: {workflow_file}")
|
|
1137
|
+
raise typer.Exit(1)
|
|
1138
|
+
|
|
1139
|
+
if workflow_file.suffix not in [".tac", ".lua"]:
|
|
1140
|
+
console.print("[red]Error:[/red] Formatting is only supported for .tac/.lua files")
|
|
1141
|
+
raise typer.Exit(1)
|
|
1142
|
+
|
|
1143
|
+
formatter = TactusFormatter(indent_width=2)
|
|
1144
|
+
source_content = workflow_file.read_text()
|
|
1145
|
+
|
|
1146
|
+
try:
|
|
1147
|
+
result = formatter.format_source(source_content)
|
|
1148
|
+
except FormattingError as e:
|
|
1149
|
+
console.print(f"[red]✗[/red] {e}")
|
|
1150
|
+
raise typer.Exit(1)
|
|
1151
|
+
|
|
1152
|
+
if stdout:
|
|
1153
|
+
sys.stdout.write(result.formatted)
|
|
1154
|
+
return
|
|
1155
|
+
|
|
1156
|
+
if check:
|
|
1157
|
+
if result.changed:
|
|
1158
|
+
console.print(f"[red]✗ Would reformat:[/red] {workflow_file}")
|
|
1159
|
+
raise typer.Exit(1)
|
|
1160
|
+
console.print(f"[green]✓ Already formatted:[/green] {workflow_file}")
|
|
1161
|
+
return
|
|
1162
|
+
|
|
1163
|
+
if result.changed:
|
|
1164
|
+
workflow_file.write_text(result.formatted)
|
|
1165
|
+
console.print(f"[green]✓ Formatted:[/green] {workflow_file}")
|
|
1166
|
+
else:
|
|
1167
|
+
console.print(f"[green]✓ No changes:[/green] {workflow_file}")
|
|
1168
|
+
|
|
1169
|
+
|
|
1119
1170
|
@app.command()
|
|
1120
1171
|
def info(
|
|
1121
1172
|
workflow_file: Path = typer.Argument(..., help="Path to workflow file (.tac or .lua)"),
|
|
@@ -0,0 +1,437 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Dict, Iterable, Set
|
|
7
|
+
|
|
8
|
+
from antlr4 import CommonTokenStream, InputStream
|
|
9
|
+
from antlr4.Token import Token
|
|
10
|
+
from antlr4.TokenStreamRewriter import TokenStreamRewriter
|
|
11
|
+
|
|
12
|
+
from tactus.validation.error_listener import TactusErrorListener
|
|
13
|
+
from tactus.validation.generated.LuaLexer import LuaLexer
|
|
14
|
+
from tactus.validation.generated.LuaParser import LuaParser
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class FormattingError(RuntimeError):
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass(frozen=True)
|
|
22
|
+
class FormatResult:
|
|
23
|
+
formatted: str
|
|
24
|
+
changed: bool
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class TactusFormatter:
|
|
28
|
+
"""
|
|
29
|
+
ANTLR-based formatter for Tactus Lua DSL files.
|
|
30
|
+
|
|
31
|
+
Current scope: semantic indentation (2-space soft tabs) while preserving
|
|
32
|
+
token text, comments, and multi-line string/comment contents.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
def __init__(self, indent_width: int = 2):
|
|
36
|
+
if indent_width <= 0:
|
|
37
|
+
raise ValueError("indent_width must be positive")
|
|
38
|
+
self._indent_width = indent_width
|
|
39
|
+
|
|
40
|
+
def format_source(self, source: str) -> FormatResult:
|
|
41
|
+
original_source = source
|
|
42
|
+
token_stream, error_listener = self._parse_to_tokens(source)
|
|
43
|
+
if error_listener.errors:
|
|
44
|
+
first = error_listener.errors[0]
|
|
45
|
+
raise FormattingError(f"Cannot format invalid source: {first.message}")
|
|
46
|
+
|
|
47
|
+
tokens = list(token_stream.tokens)
|
|
48
|
+
source = _rewrite_token_text(source, tokens, token_stream)
|
|
49
|
+
protected_lines = self._protected_lines_from_multiline_tokens(tokens)
|
|
50
|
+
indent_by_line = self._indentation_by_line(tokens, num_lines=_count_lines(source))
|
|
51
|
+
|
|
52
|
+
formatted = _rewrite_leading_indentation(
|
|
53
|
+
source,
|
|
54
|
+
indent_by_line=indent_by_line,
|
|
55
|
+
indent_width=self._indent_width,
|
|
56
|
+
protected_lines=protected_lines,
|
|
57
|
+
)
|
|
58
|
+
return FormatResult(formatted=formatted, changed=(formatted != original_source))
|
|
59
|
+
|
|
60
|
+
def format_file(self, file_path: Path) -> FormatResult:
|
|
61
|
+
source = file_path.read_text()
|
|
62
|
+
return self.format_source(source)
|
|
63
|
+
|
|
64
|
+
def _parse_to_tokens(self, source: str) -> tuple[CommonTokenStream, TactusErrorListener]:
|
|
65
|
+
input_stream = InputStream(source)
|
|
66
|
+
lexer = LuaLexer(input_stream)
|
|
67
|
+
token_stream = CommonTokenStream(lexer)
|
|
68
|
+
token_stream.fill()
|
|
69
|
+
|
|
70
|
+
parser = LuaParser(token_stream)
|
|
71
|
+
error_listener = TactusErrorListener()
|
|
72
|
+
parser.removeErrorListeners()
|
|
73
|
+
parser.addErrorListener(error_listener)
|
|
74
|
+
parser.start_()
|
|
75
|
+
return token_stream, error_listener
|
|
76
|
+
|
|
77
|
+
def _protected_lines_from_multiline_tokens(self, tokens: Iterable[Token]) -> Set[int]:
|
|
78
|
+
protected: Set[int] = set()
|
|
79
|
+
for tok in tokens:
|
|
80
|
+
text = getattr(tok, "text", None)
|
|
81
|
+
if not text or "\n" not in text:
|
|
82
|
+
continue
|
|
83
|
+
|
|
84
|
+
if tok.type in (LuaLexer.LONGSTRING, LuaLexer.COMMENT):
|
|
85
|
+
start = int(getattr(tok, "line", 0) or 0)
|
|
86
|
+
if start <= 0:
|
|
87
|
+
continue
|
|
88
|
+
end = start + text.count("\n")
|
|
89
|
+
protected.update(range(start, end + 1))
|
|
90
|
+
return protected
|
|
91
|
+
|
|
92
|
+
def _indentation_by_line(self, tokens: Iterable[Token], *, num_lines: int) -> Dict[int, int]:
|
|
93
|
+
line_to_default_tokens: Dict[int, list[Token]] = {}
|
|
94
|
+
for tok in tokens:
|
|
95
|
+
if tok.type == Token.EOF:
|
|
96
|
+
continue
|
|
97
|
+
if tok.channel != Token.DEFAULT_CHANNEL:
|
|
98
|
+
continue
|
|
99
|
+
line_to_default_tokens.setdefault(int(tok.line), []).append(tok)
|
|
100
|
+
|
|
101
|
+
open_tokens = {
|
|
102
|
+
LuaLexer.THEN,
|
|
103
|
+
LuaLexer.DO,
|
|
104
|
+
LuaLexer.FUNCTION,
|
|
105
|
+
LuaLexer.REPEAT,
|
|
106
|
+
LuaLexer.OCU, # {
|
|
107
|
+
LuaLexer.ELSE,
|
|
108
|
+
LuaLexer.ELSEIF,
|
|
109
|
+
}
|
|
110
|
+
close_tokens = {
|
|
111
|
+
LuaLexer.END,
|
|
112
|
+
LuaLexer.UNTIL,
|
|
113
|
+
LuaLexer.CCU, # }
|
|
114
|
+
}
|
|
115
|
+
dedent_at_line_start = close_tokens | {LuaLexer.ELSE, LuaLexer.ELSEIF}
|
|
116
|
+
|
|
117
|
+
indent_level = 0
|
|
118
|
+
indent_by_line: Dict[int, int] = {}
|
|
119
|
+
|
|
120
|
+
for line_no in range(1, num_lines + 1):
|
|
121
|
+
tokens_on_line = line_to_default_tokens.get(line_no, [])
|
|
122
|
+
first = tokens_on_line[0] if tokens_on_line else None
|
|
123
|
+
|
|
124
|
+
if first is not None and first.type in dedent_at_line_start:
|
|
125
|
+
indent_level = max(0, indent_level - 1)
|
|
126
|
+
|
|
127
|
+
indent_by_line[line_no] = indent_level
|
|
128
|
+
|
|
129
|
+
handled_first_dedent = first is not None and first.type in dedent_at_line_start
|
|
130
|
+
for idx, tok in enumerate(tokens_on_line):
|
|
131
|
+
if tok.type in close_tokens:
|
|
132
|
+
if idx == 0 and handled_first_dedent:
|
|
133
|
+
continue
|
|
134
|
+
indent_level = max(0, indent_level - 1)
|
|
135
|
+
if tok.type in open_tokens:
|
|
136
|
+
indent_level += 1
|
|
137
|
+
|
|
138
|
+
return indent_by_line
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def _count_lines(source: str) -> int:
|
|
142
|
+
if not source:
|
|
143
|
+
return 1
|
|
144
|
+
return source.count("\n") + 1
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def _split_line_ending(line: str) -> tuple[str, str]:
|
|
148
|
+
if line.endswith("\r\n"):
|
|
149
|
+
return line[:-2], "\r\n"
|
|
150
|
+
if line.endswith("\n"):
|
|
151
|
+
return line[:-1], "\n"
|
|
152
|
+
if line.endswith("\r"):
|
|
153
|
+
return line[:-1], "\r"
|
|
154
|
+
return line, ""
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def _rewrite_leading_indentation(
|
|
158
|
+
source: str,
|
|
159
|
+
*,
|
|
160
|
+
indent_by_line: Dict[int, int],
|
|
161
|
+
indent_width: int,
|
|
162
|
+
protected_lines: Set[int],
|
|
163
|
+
) -> str:
|
|
164
|
+
lines = source.splitlines(keepends=True)
|
|
165
|
+
out: list[str] = []
|
|
166
|
+
|
|
167
|
+
for i, raw in enumerate(lines, start=1):
|
|
168
|
+
if i in protected_lines:
|
|
169
|
+
out.append(raw)
|
|
170
|
+
continue
|
|
171
|
+
|
|
172
|
+
body, ending = _split_line_ending(raw)
|
|
173
|
+
if body.strip() == "":
|
|
174
|
+
out.append(ending)
|
|
175
|
+
continue
|
|
176
|
+
|
|
177
|
+
desired = " " * (indent_width * max(0, int(indent_by_line.get(i, 0))))
|
|
178
|
+
stripped = body.lstrip(" \t")
|
|
179
|
+
out.append(desired + stripped + ending)
|
|
180
|
+
|
|
181
|
+
return "".join(out)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
_LONGSTRING_OPEN_RE = re.compile(r"^\[(?P<eq>=*)\[(?P<body>.*)\](?P=eq)\]$", re.DOTALL)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def _rewrite_token_text(source: str, tokens: list[Token], token_stream: CommonTokenStream) -> str:
|
|
188
|
+
"""
|
|
189
|
+
Token-based, semantic rewrites (idempotent):
|
|
190
|
+
- Indent embedded Specifications longstrings.
|
|
191
|
+
- Enforce spaces around '=' (within a single line).
|
|
192
|
+
- Remove trailing commas in multi-line table constructors.
|
|
193
|
+
"""
|
|
194
|
+
rewriter = TokenStreamRewriter(token_stream)
|
|
195
|
+
default_tokens: list[Token] = [
|
|
196
|
+
t for t in tokens if t.type != Token.EOF and t.channel == Token.DEFAULT_CHANNEL
|
|
197
|
+
]
|
|
198
|
+
|
|
199
|
+
_apply_specifications_longstring_rewrites(tokens, default_tokens, rewriter)
|
|
200
|
+
_apply_assignment_spacing(default_tokens, tokens, rewriter)
|
|
201
|
+
_apply_comma_spacing(default_tokens, tokens, rewriter)
|
|
202
|
+
_apply_binary_operator_spacing(default_tokens, tokens, rewriter)
|
|
203
|
+
_apply_multiline_table_trailing_comma_removal(default_tokens, tokens, rewriter)
|
|
204
|
+
|
|
205
|
+
rewritten = rewriter.getDefaultText()
|
|
206
|
+
return rewritten if rewritten != source else source
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def _apply_specifications_longstring_rewrites(
|
|
210
|
+
tokens: list[Token], default_tokens: list[Token], rewriter: TokenStreamRewriter
|
|
211
|
+
) -> None:
|
|
212
|
+
longstring_token_indices: list[int] = []
|
|
213
|
+
for idx, tok in enumerate(default_tokens):
|
|
214
|
+
if tok.type != LuaLexer.LONGSTRING:
|
|
215
|
+
continue
|
|
216
|
+
if _is_specifications_call_longstring(default_tokens, idx):
|
|
217
|
+
longstring_token_indices.append(tok.tokenIndex)
|
|
218
|
+
|
|
219
|
+
for token_index in longstring_token_indices:
|
|
220
|
+
tok = tokens[token_index]
|
|
221
|
+
new_text = _format_specifications_longstring_text(tok.text or "")
|
|
222
|
+
if new_text != (tok.text or ""):
|
|
223
|
+
rewriter.replaceIndex(token_index, new_text)
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def _apply_assignment_spacing(
|
|
227
|
+
default_tokens: list[Token], tokens: list[Token], rewriter: TokenStreamRewriter
|
|
228
|
+
) -> None:
|
|
229
|
+
for i, tok in enumerate(default_tokens):
|
|
230
|
+
if tok.type != LuaLexer.EQ:
|
|
231
|
+
continue
|
|
232
|
+
if i == 0 or i + 1 >= len(default_tokens):
|
|
233
|
+
continue
|
|
234
|
+
|
|
235
|
+
prev = default_tokens[i - 1]
|
|
236
|
+
nxt = default_tokens[i + 1]
|
|
237
|
+
if prev.line != tok.line or nxt.line != tok.line:
|
|
238
|
+
continue
|
|
239
|
+
|
|
240
|
+
# Normalize the hidden token region between prev and '=' to a single space.
|
|
241
|
+
if prev.tokenIndex + 1 <= tok.tokenIndex - 1:
|
|
242
|
+
rewriter.replaceRange(prev.tokenIndex + 1, tok.tokenIndex - 1, " ")
|
|
243
|
+
else:
|
|
244
|
+
rewriter.insertBeforeIndex(tok.tokenIndex, " ")
|
|
245
|
+
|
|
246
|
+
# Normalize the hidden token region between '=' and next to a single space.
|
|
247
|
+
if tok.tokenIndex + 1 <= nxt.tokenIndex - 1:
|
|
248
|
+
rewriter.replaceRange(tok.tokenIndex + 1, nxt.tokenIndex - 1, " ")
|
|
249
|
+
else:
|
|
250
|
+
rewriter.insertAfterToken(tok, " ")
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def _has_comment_or_newline_between(tokens: list[Token], left: Token, right: Token) -> bool:
|
|
254
|
+
if left.tokenIndex + 1 > right.tokenIndex - 1:
|
|
255
|
+
return False
|
|
256
|
+
for t in tokens[left.tokenIndex + 1 : right.tokenIndex]:
|
|
257
|
+
if t.type in (LuaLexer.NL, LuaLexer.COMMENT):
|
|
258
|
+
return True
|
|
259
|
+
return False
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def _apply_comma_spacing(
|
|
263
|
+
default_tokens: list[Token], tokens: list[Token], rewriter: TokenStreamRewriter
|
|
264
|
+
) -> None:
|
|
265
|
+
for i, tok in enumerate(default_tokens):
|
|
266
|
+
if tok.type != LuaLexer.COMMA:
|
|
267
|
+
continue
|
|
268
|
+
if i == 0 or i + 1 >= len(default_tokens):
|
|
269
|
+
continue
|
|
270
|
+
prev = default_tokens[i - 1]
|
|
271
|
+
nxt = default_tokens[i + 1]
|
|
272
|
+
if prev.line != tok.line or nxt.line != tok.line:
|
|
273
|
+
continue
|
|
274
|
+
if _has_comment_or_newline_between(tokens, prev, tok) or _has_comment_or_newline_between(
|
|
275
|
+
tokens, tok, nxt
|
|
276
|
+
):
|
|
277
|
+
continue
|
|
278
|
+
|
|
279
|
+
if prev.tokenIndex + 1 <= tok.tokenIndex - 1:
|
|
280
|
+
rewriter.replaceRange(prev.tokenIndex + 1, tok.tokenIndex - 1, "")
|
|
281
|
+
|
|
282
|
+
if tok.tokenIndex + 1 <= nxt.tokenIndex - 1:
|
|
283
|
+
rewriter.replaceRange(tok.tokenIndex + 1, nxt.tokenIndex - 1, " ")
|
|
284
|
+
else:
|
|
285
|
+
rewriter.insertAfterToken(tok, " ")
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def _apply_binary_operator_spacing(
|
|
289
|
+
default_tokens: list[Token], tokens: list[Token], rewriter: TokenStreamRewriter
|
|
290
|
+
) -> None:
|
|
291
|
+
binary_ops = {
|
|
292
|
+
LuaLexer.PLUS,
|
|
293
|
+
LuaLexer.MINUS,
|
|
294
|
+
LuaLexer.STAR,
|
|
295
|
+
LuaLexer.SLASH,
|
|
296
|
+
LuaLexer.PER,
|
|
297
|
+
LuaLexer.SS, # //
|
|
298
|
+
LuaLexer.DD, # ..
|
|
299
|
+
LuaLexer.CARET,
|
|
300
|
+
LuaLexer.PIPE,
|
|
301
|
+
LuaLexer.AMP,
|
|
302
|
+
LuaLexer.LL,
|
|
303
|
+
LuaLexer.GG,
|
|
304
|
+
LuaLexer.LT,
|
|
305
|
+
LuaLexer.GT,
|
|
306
|
+
LuaLexer.LE,
|
|
307
|
+
LuaLexer.GE,
|
|
308
|
+
LuaLexer.EE,
|
|
309
|
+
LuaLexer.SQEQ,
|
|
310
|
+
LuaLexer.AND,
|
|
311
|
+
LuaLexer.OR,
|
|
312
|
+
}
|
|
313
|
+
unary_preceders = {
|
|
314
|
+
LuaLexer.EQ,
|
|
315
|
+
LuaLexer.COMMA,
|
|
316
|
+
LuaLexer.OP,
|
|
317
|
+
LuaLexer.OB,
|
|
318
|
+
LuaLexer.OCU,
|
|
319
|
+
LuaLexer.CC, # '::'
|
|
320
|
+
LuaLexer.THEN,
|
|
321
|
+
LuaLexer.DO,
|
|
322
|
+
LuaLexer.ELSE,
|
|
323
|
+
LuaLexer.ELSEIF,
|
|
324
|
+
LuaLexer.RETURN,
|
|
325
|
+
LuaLexer.FOR,
|
|
326
|
+
LuaLexer.WHILE,
|
|
327
|
+
LuaLexer.IF,
|
|
328
|
+
LuaLexer.IN,
|
|
329
|
+
LuaLexer.AND,
|
|
330
|
+
LuaLexer.OR,
|
|
331
|
+
} | binary_ops
|
|
332
|
+
|
|
333
|
+
for i, tok in enumerate(default_tokens):
|
|
334
|
+
if tok.type not in binary_ops:
|
|
335
|
+
continue
|
|
336
|
+
if i == 0 or i + 1 >= len(default_tokens):
|
|
337
|
+
continue
|
|
338
|
+
prev = default_tokens[i - 1]
|
|
339
|
+
nxt = default_tokens[i + 1]
|
|
340
|
+
if prev.line != tok.line or nxt.line != tok.line:
|
|
341
|
+
continue
|
|
342
|
+
if _has_comment_or_newline_between(tokens, prev, tok) or _has_comment_or_newline_between(
|
|
343
|
+
tokens, tok, nxt
|
|
344
|
+
):
|
|
345
|
+
continue
|
|
346
|
+
|
|
347
|
+
if tok.type in (LuaLexer.MINUS, LuaLexer.PLUS) and prev.type in unary_preceders:
|
|
348
|
+
continue
|
|
349
|
+
|
|
350
|
+
if prev.tokenIndex + 1 <= tok.tokenIndex - 1:
|
|
351
|
+
rewriter.replaceRange(prev.tokenIndex + 1, tok.tokenIndex - 1, " ")
|
|
352
|
+
else:
|
|
353
|
+
rewriter.insertBeforeIndex(tok.tokenIndex, " ")
|
|
354
|
+
|
|
355
|
+
if tok.tokenIndex + 1 <= nxt.tokenIndex - 1:
|
|
356
|
+
rewriter.replaceRange(tok.tokenIndex + 1, nxt.tokenIndex - 1, " ")
|
|
357
|
+
else:
|
|
358
|
+
rewriter.insertAfterToken(tok, " ")
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
def _apply_multiline_table_trailing_comma_removal(
|
|
362
|
+
default_tokens: list[Token], tokens: list[Token], rewriter: TokenStreamRewriter
|
|
363
|
+
) -> None:
|
|
364
|
+
stack: list[int] = []
|
|
365
|
+
for idx, tok in enumerate(default_tokens):
|
|
366
|
+
if tok.type == LuaLexer.OCU:
|
|
367
|
+
stack.append(idx)
|
|
368
|
+
continue
|
|
369
|
+
if tok.type != LuaLexer.CCU:
|
|
370
|
+
continue
|
|
371
|
+
if not stack:
|
|
372
|
+
continue
|
|
373
|
+
open_idx = stack.pop()
|
|
374
|
+
open_tok = default_tokens[open_idx]
|
|
375
|
+
close_tok = tok
|
|
376
|
+
if open_tok.line == close_tok.line:
|
|
377
|
+
continue
|
|
378
|
+
|
|
379
|
+
j = idx - 1
|
|
380
|
+
if j <= open_idx:
|
|
381
|
+
continue
|
|
382
|
+
last = default_tokens[j]
|
|
383
|
+
if last.type == LuaLexer.COMMA:
|
|
384
|
+
rewriter.replaceIndex(last.tokenIndex, "")
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
def _is_specifications_call_longstring(default_tokens: list[Token], longstring_idx: int) -> bool:
|
|
388
|
+
# Accept: Specifications(LONGSTRING) where LONGSTRING is the first argument.
|
|
389
|
+
if longstring_idx < 1:
|
|
390
|
+
return False
|
|
391
|
+
prev = default_tokens[longstring_idx - 1]
|
|
392
|
+
if prev.type == LuaLexer.OP and longstring_idx >= 2:
|
|
393
|
+
name = default_tokens[longstring_idx - 2]
|
|
394
|
+
return name.type == LuaLexer.NAME and (name.text or "") == "Specifications"
|
|
395
|
+
name = prev
|
|
396
|
+
return name.type == LuaLexer.NAME and (name.text or "") == "Specifications"
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
def _format_specifications_longstring_text(text: str) -> str:
|
|
400
|
+
match = _LONGSTRING_OPEN_RE.match(text)
|
|
401
|
+
if not match:
|
|
402
|
+
return text
|
|
403
|
+
|
|
404
|
+
eq = match.group("eq")
|
|
405
|
+
body = match.group("body")
|
|
406
|
+
open_delim = f"[{eq}["
|
|
407
|
+
close_delim = f"]{eq}]"
|
|
408
|
+
|
|
409
|
+
formatted_body = _shift_longstring_body_indent_once(body, shift_spaces=2)
|
|
410
|
+
return f"{open_delim}{formatted_body}{close_delim}"
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
def _shift_longstring_body_indent_once(body: str, *, shift_spaces: int) -> str:
|
|
414
|
+
lines = body.splitlines(keepends=True)
|
|
415
|
+
already_formatted = False
|
|
416
|
+
for line in lines:
|
|
417
|
+
raw, _ending = _split_line_ending(line)
|
|
418
|
+
if raw.strip() == "":
|
|
419
|
+
continue
|
|
420
|
+
expanded = raw.replace("\t", " ")
|
|
421
|
+
already_formatted = expanded.startswith(" " * shift_spaces)
|
|
422
|
+
break
|
|
423
|
+
|
|
424
|
+
if already_formatted:
|
|
425
|
+
return body
|
|
426
|
+
|
|
427
|
+
out: list[str] = []
|
|
428
|
+
prefix = " " * shift_spaces
|
|
429
|
+
for line in lines:
|
|
430
|
+
raw, ending = _split_line_ending(line)
|
|
431
|
+
if raw.strip() == "":
|
|
432
|
+
out.append(raw + ending)
|
|
433
|
+
continue
|
|
434
|
+
|
|
435
|
+
expanded = raw.replace("\t", " ")
|
|
436
|
+
out.append(prefix + expanded + ending)
|
|
437
|
+
return "".join(out)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: tactus
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.30.0
|
|
4
4
|
Summary: Tactus: Lua-based DSL for agentic workflows
|
|
5
5
|
Project-URL: Homepage, https://github.com/AnthusAI/Tactus
|
|
6
6
|
Project-URL: Documentation, https://github.com/AnthusAI/Tactus/tree/main/docs
|
|
@@ -1665,6 +1665,12 @@ Agents:
|
|
|
1665
1665
|
# Validate syntax and structure
|
|
1666
1666
|
tactus validate workflow.tac
|
|
1667
1667
|
|
|
1668
|
+
# Format a workflow file (2-space indentation + normalized spacing)
|
|
1669
|
+
tactus format workflow.tac
|
|
1670
|
+
|
|
1671
|
+
# Check formatting without rewriting (exit 1 if changes needed)
|
|
1672
|
+
tactus format workflow.tac --check
|
|
1673
|
+
|
|
1668
1674
|
# Run BDD specifications
|
|
1669
1675
|
tactus test workflow.tac
|
|
1670
1676
|
|
|
@@ -1675,6 +1681,8 @@ tactus test workflow.tac --runs 10
|
|
|
1675
1681
|
tactus eval workflow.tac --runs 10
|
|
1676
1682
|
```
|
|
1677
1683
|
|
|
1684
|
+
The `format` command uses Tactus's Lua parser to reindent and normalize whitespace while preserving the structure of the code.
|
|
1685
|
+
|
|
1678
1686
|
### Understanding Output
|
|
1679
1687
|
|
|
1680
1688
|
The CLI displays several types of events:
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
tactus/__init__.py,sha256=
|
|
1
|
+
tactus/__init__.py,sha256=bxlJnTV0P7LYntLZsAFANekhPuuBckJ5pE-BXXAWwAo,1245
|
|
2
2
|
tactus/adapters/__init__.py,sha256=lU8uUxuryFRIpVrn_KeVK7aUhsvOT1tYsuE3FOOIFpI,289
|
|
3
3
|
tactus/adapters/broker_log.py,sha256=mIjARt1Q6ouWVbVri6zep1e8tzm9y28l4WOEdqiK39Q,2849
|
|
4
4
|
tactus/adapters/cli_hitl.py,sha256=3dH58du0lN4k-OvQrAHrAqHFqBjolqNKFb94JaNHtn8,6964
|
|
@@ -18,10 +18,10 @@ tactus/backends/pytorch_backend.py,sha256=I7H7UTa_Scx9_FtmPWn-G4noadaNVEQj-9Kjtj
|
|
|
18
18
|
tactus/broker/__init__.py,sha256=UTvqLofrgE3c4m6u2iNOg5R7BrS4dmfzMRO4Oq_0A9U,396
|
|
19
19
|
tactus/broker/client.py,sha256=sDi1Cimuv0zH5S4n597nnUy_qis7pLBZf5IkwF81oDc,8618
|
|
20
20
|
tactus/broker/protocol.py,sha256=fnVgMKU_R7oK2xBATnrYSH_r2R-yOmV2o_pJYw1JvVk,5238
|
|
21
|
-
tactus/broker/server.py,sha256=
|
|
21
|
+
tactus/broker/server.py,sha256=3FCaz7gWHcK1ZCKsRPVkhGCof2ND798FTv-lDirZQek,26783
|
|
22
22
|
tactus/broker/stdio.py,sha256=JXkEz-PCU3IQXNkt16YJtYmwkR43eS6CfjxAHc-YCfQ,439
|
|
23
23
|
tactus/cli/__init__.py,sha256=kVhdCkwWEPdt3vn9si-iKvh6M9817aOH6rLSsNzRuyg,80
|
|
24
|
-
tactus/cli/app.py,sha256=
|
|
24
|
+
tactus/cli/app.py,sha256=DCRJRQoxJbGCOizmOT6krSqSJHZ0ujNpTluZoG56cMg,85186
|
|
25
25
|
tactus/cli/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
26
|
tactus/core/__init__.py,sha256=TK5rWr3HmOO_igFa5ESGp6teWwS58vnvQhIWqkcgqwk,880
|
|
27
27
|
tactus/core/config_manager.py,sha256=u90XChTeb2reZ3I6c3o_zI2UlzlFCNtL9-NhE1Vm6L0,31577
|
|
@@ -48,6 +48,8 @@ tactus/dspy/history.py,sha256=0yGi3P5ruRUPoRyaCWsUDeuEYYsfproc_7pMVZuhmUo,5980
|
|
|
48
48
|
tactus/dspy/module.py,sha256=sJdFS-5A4SpuiMLjbwiZJCvg3pTtEx8x8MRVaqjCQ2I,15423
|
|
49
49
|
tactus/dspy/prediction.py,sha256=nnofvBPGFX7bvYdTVcEMVcIXC5EVrRQ21QsnC1PRHeU,9758
|
|
50
50
|
tactus/dspy/signature.py,sha256=jdLHBa5BOEBwXTfmLui6fjViEDQDhdUzQm2__STHquU,6053
|
|
51
|
+
tactus/formatting/__init__.py,sha256=pkwfAJwMxdRha2oahXoUrVjk6if7QH5d1U5t5dF2fXc,162
|
|
52
|
+
tactus/formatting/formatter.py,sha256=DfHp977t5reMPIWZwRChRE5Yflw7xGgTNUM0AOcS8LQ,14510
|
|
51
53
|
tactus/ide/__init__.py,sha256=1fSC0xWP-Lq5wl4FgDq7SMnkvZ0DxXupreTl3ZRX1zw,143
|
|
52
54
|
tactus/ide/coding_assistant.py,sha256=GgmspWIn9IPgBK0ZYapeISIOrcDfRyK7yyPDPV85r8g,12184
|
|
53
55
|
tactus/ide/server.py,sha256=TSCnx0Sc9EhRnX5Kl3dUZk-REzxUWmkDoPeHHZ4-AHg,94459
|
|
@@ -152,8 +154,8 @@ tactus/validation/generated/LuaParserVisitor.py,sha256=ageKSmHPxnO3jBS2fBtkmYBOd
|
|
|
152
154
|
tactus/validation/generated/__init__.py,sha256=5gWlwRI0UvmHw2fnBpj_IG6N8oZeabr5tbj1AODDvjc,196
|
|
153
155
|
tactus/validation/grammar/LuaLexer.g4,sha256=t2MXiTCr127RWAyQGvamkcU_m4veqPzSuHUtAKwalw4,2771
|
|
154
156
|
tactus/validation/grammar/LuaParser.g4,sha256=ceZenb90BdiZmVdOxMGj9qJk3QbbWVZe5HUqPgoePfY,3202
|
|
155
|
-
tactus-0.
|
|
156
|
-
tactus-0.
|
|
157
|
-
tactus-0.
|
|
158
|
-
tactus-0.
|
|
159
|
-
tactus-0.
|
|
157
|
+
tactus-0.30.0.dist-info/METADATA,sha256=OJMinYvQWWE8vKf6K1mFNBOKRdu5YK6ZPMpjC9AWbRA,58765
|
|
158
|
+
tactus-0.30.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
159
|
+
tactus-0.30.0.dist-info/entry_points.txt,sha256=vWseqty8m3z-Worje0IYxlioMjPDCoSsm0AtY4GghBY,47
|
|
160
|
+
tactus-0.30.0.dist-info/licenses/LICENSE,sha256=ivohBcAIYnaLPQ-lKEeCXSMvQUVISpQfKyxHBHoa4GA,1066
|
|
161
|
+
tactus-0.30.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|