blue-assistant 4.297.1__py3-none-any.whl → 4.319.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- blue_assistant/.abcli/script/run.sh +1 -1
- blue_assistant/.abcli/tests/script_run.sh +2 -2
- blue_assistant/__init__.py +1 -1
- blue_assistant/help/script.py +1 -0
- blue_assistant/script/__main__.py +10 -1
- blue_assistant/script/actions/__init__.py +2 -2
- blue_assistant/script/actions/generate_image.py +4 -4
- blue_assistant/script/actions/generate_text.py +8 -4
- blue_assistant/script/actions/generic.py +3 -3
- blue_assistant/script/actions/web_crawl.py +7 -6
- blue_assistant/script/repository/__init__.py +4 -3
- blue_assistant/script/repository/base/classes.py +9 -181
- blue_assistant/script/repository/base/root.py +276 -0
- blue_assistant/script/repository/blue_amo/actions/__init__.py +2 -2
- blue_assistant/script/repository/blue_amo/actions/{slicing_into_frames.py → setting_frame_prompts.py} +5 -5
- blue_assistant/script/repository/blue_amo/classes.py +21 -44
- blue_assistant/script/repository/hue/classes.py +2 -2
- blue_assistant/script/repository/orbital_data_explorer/actions/__init__.py +11 -0
- blue_assistant/script/repository/orbital_data_explorer/actions/expanding_the_extractions.py +109 -0
- blue_assistant/script/repository/orbital_data_explorer/classes.py +10 -2
- blue_assistant/web/crawl.py +5 -5
- blue_assistant/web/fetch.py +2 -2
- blue_assistant/web/functions.py +11 -0
- {blue_assistant-4.297.1.dist-info → blue_assistant-4.319.1.dist-info}/METADATA +2 -2
- {blue_assistant-4.297.1.dist-info → blue_assistant-4.319.1.dist-info}/RECORD +28 -27
- blue_assistant/script/repository/generic/__init__.py +0 -1
- blue_assistant/script/repository/generic/classes.py +0 -88
- {blue_assistant-4.297.1.dist-info → blue_assistant-4.319.1.dist-info}/LICENSE +0 -0
- {blue_assistant-4.297.1.dist-info → blue_assistant-4.319.1.dist-info}/WHEEL +0 -0
- {blue_assistant-4.297.1.dist-info → blue_assistant-4.319.1.dist-info}/top_level.txt +0 -0
@@ -7,7 +7,7 @@ function blue_assistant_script_run() {
|
|
7
7
|
local do_upload=$(abcli_option_int "$options" upload $(abcli_not $do_dryrun))
|
8
8
|
|
9
9
|
local script_options=$2
|
10
|
-
local script_name=$(abcli_option "$script_options" script
|
10
|
+
local script_name=$(abcli_option "$script_options" script base)
|
11
11
|
|
12
12
|
local object_name=$(abcli_clarify_object $3 $script_name-$(abcli_string_timestamp_short))
|
13
13
|
[[ "$do_download" == 1 ]] &&
|
blue_assistant/__init__.py
CHANGED
blue_assistant/help/script.py
CHANGED
@@ -48,6 +48,13 @@ parser.add_argument(
|
|
48
48
|
default=1,
|
49
49
|
help="0 | 1",
|
50
50
|
)
|
51
|
+
parser.add_argument(
|
52
|
+
"--runnable",
|
53
|
+
type=str,
|
54
|
+
default="",
|
55
|
+
help="~node_1,~node_2",
|
56
|
+
)
|
57
|
+
|
51
58
|
args = parser.parse_args()
|
52
59
|
|
53
60
|
delim = " " if args.delim == "space" else args.delim
|
@@ -70,7 +77,9 @@ elif args.task == "run":
|
|
70
77
|
)
|
71
78
|
|
72
79
|
if success:
|
73
|
-
success = script.run(
|
80
|
+
success = script.run(
|
81
|
+
runnable=args.runnable,
|
82
|
+
)
|
74
83
|
else:
|
75
84
|
success = None
|
76
85
|
|
@@ -1,13 +1,13 @@
|
|
1
1
|
from typing import Dict, Callable
|
2
2
|
|
3
|
-
from blue_assistant.script.repository.base.
|
3
|
+
from blue_assistant.script.repository.base.root import RootScript
|
4
4
|
from blue_assistant.script.actions.generic import generic_action
|
5
5
|
from blue_assistant.script.actions.generate_image import generate_image
|
6
6
|
from blue_assistant.script.actions.generate_text import generate_text
|
7
7
|
from blue_assistant.script.actions.web_crawl import web_crawl
|
8
8
|
|
9
9
|
|
10
|
-
dict_of_actions: Dict[str, Callable[[
|
10
|
+
dict_of_actions: Dict[str, Callable[[RootScript, str], bool]] = {
|
11
11
|
"generic": generic_action,
|
12
12
|
"generate_image": generate_image,
|
13
13
|
"generate_text": generate_text,
|
@@ -7,7 +7,7 @@ from blue_assistant.env import (
|
|
7
7
|
BLUE_ASSISTANT_IMAGE_DEFAULT_SIZE,
|
8
8
|
BLUE_ASSISTANT_IMAGE_DEFAULT_QUALITY,
|
9
9
|
)
|
10
|
-
from blue_assistant.script.repository.base.
|
10
|
+
from blue_assistant.script.repository.base.root import RootScript
|
11
11
|
from blue_assistant.logger import logger
|
12
12
|
|
13
13
|
NAME = module.name(__file__, NAME)
|
@@ -15,15 +15,15 @@ NAME = module.name(__file__, NAME)
|
|
15
15
|
|
16
16
|
# https://platform.openai.com/docs/guides/images
|
17
17
|
def generate_image(
|
18
|
-
script:
|
18
|
+
script: RootScript,
|
19
19
|
node_name: str,
|
20
20
|
) -> bool:
|
21
|
-
logger.info(f"{NAME}:
|
21
|
+
logger.info(f"{NAME}: @ {node_name} ...")
|
22
22
|
|
23
23
|
filename = f"{node_name}.png"
|
24
24
|
|
25
25
|
success, _ = api.generate_image(
|
26
|
-
prompt=script.nodes[node_name]["prompt"],
|
26
|
+
prompt=script.apply_vars(script.nodes[node_name]["prompt"]),
|
27
27
|
filename=filename,
|
28
28
|
object_name=script.object_name,
|
29
29
|
model=BLUE_ASSISTANT_IMAGE_DEFAULT_MODEL,
|
@@ -4,7 +4,7 @@ from blueness import module
|
|
4
4
|
from openai_commands.text_generation import api
|
5
5
|
|
6
6
|
from blue_assistant import NAME
|
7
|
-
from blue_assistant.script.repository.base.
|
7
|
+
from blue_assistant.script.repository.base.root import RootScript
|
8
8
|
from blue_assistant.env import (
|
9
9
|
BLUE_ASSISTANT_TEXT_DEFAULT_MODEL,
|
10
10
|
BLUE_ASSISTANT_TEXT_MAX_TOKENS,
|
@@ -16,13 +16,17 @@ NAME = module.name(__file__, NAME)
|
|
16
16
|
|
17
17
|
# https://platform.openai.com/docs/guides/text-generation
|
18
18
|
def generate_text(
|
19
|
-
script:
|
19
|
+
script: RootScript,
|
20
20
|
node_name: str,
|
21
21
|
) -> bool:
|
22
|
-
logger.info(f"{NAME}:
|
22
|
+
logger.info(f"{NAME}: @ {node_name} ...")
|
23
|
+
|
24
|
+
list_of_context_nodes = [node_name]
|
25
|
+
if script.nodes[node_name].get("use_context", False):
|
26
|
+
logger.info("📜 using context.")
|
27
|
+
list_of_context_nodes = script.get_context(node_name)
|
23
28
|
|
24
29
|
messages: List = []
|
25
|
-
list_of_context_nodes = script.get_context(node_name)
|
26
30
|
logger.info("node context: {}".format(" <- ".join(list_of_context_nodes)))
|
27
31
|
for context_node in reversed(list_of_context_nodes):
|
28
32
|
messages += [
|
@@ -1,15 +1,15 @@
|
|
1
1
|
from blueness import module
|
2
2
|
|
3
3
|
from blue_assistant import NAME
|
4
|
-
from blue_assistant.script.repository.base.
|
4
|
+
from blue_assistant.script.repository.base.root import RootScript
|
5
5
|
from blue_assistant.logger import logger
|
6
6
|
|
7
7
|
NAME = module.name(__file__, NAME)
|
8
8
|
|
9
9
|
|
10
10
|
def generic_action(
|
11
|
-
script:
|
11
|
+
script: RootScript,
|
12
12
|
node_name: str,
|
13
13
|
) -> bool:
|
14
|
-
logger.info(f"{NAME}:
|
14
|
+
logger.info(f"{NAME}: @ {node_name} ...")
|
15
15
|
return True
|
@@ -4,7 +4,8 @@ from blue_options.logger import log_list
|
|
4
4
|
|
5
5
|
from blue_assistant import NAME
|
6
6
|
from blue_assistant.web.crawl import crawl_list_of_urls
|
7
|
-
from blue_assistant.
|
7
|
+
from blue_assistant.web.functions import normalize_url
|
8
|
+
from blue_assistant.script.repository.base.root import RootScript
|
8
9
|
from blue_assistant.logger import logger
|
9
10
|
|
10
11
|
|
@@ -12,10 +13,10 @@ NAME = module.name(__file__, NAME)
|
|
12
13
|
|
13
14
|
|
14
15
|
def web_crawl(
|
15
|
-
script:
|
16
|
+
script: RootScript,
|
16
17
|
node_name: str,
|
17
18
|
) -> bool:
|
18
|
-
logger.info(f"{NAME}:
|
19
|
+
logger.info(f"{NAME}: @ {node_name} ...")
|
19
20
|
|
20
21
|
seed_url_var_name = script.nodes[node_name].get("seed_urls", "")
|
21
22
|
if not isinstance(seed_url_var_name, str):
|
@@ -32,16 +33,16 @@ def web_crawl(
|
|
32
33
|
if seed_url_var_name not in script.vars:
|
33
34
|
logger.error(f"{node_name}: {seed_url_var_name}: seed_urls not found in vars.")
|
34
35
|
return False
|
35
|
-
seed_urls = script.vars[seed_url_var_name]
|
36
|
+
seed_urls = list({normalize_url(url) for url in script.vars[seed_url_var_name]})
|
36
37
|
log_list(logger, "using", seed_urls, "seed url(s)")
|
37
38
|
|
38
|
-
success,
|
39
|
+
success, crawl_cache = crawl_list_of_urls(
|
39
40
|
seed_urls=seed_urls,
|
40
41
|
object_name=script.object_name,
|
41
42
|
max_iterations=script.nodes[node_name]["max_iterations"],
|
42
43
|
cache_prefix=node_name,
|
43
44
|
)
|
44
45
|
|
45
|
-
script.nodes[node_name]["output"] =
|
46
|
+
script.nodes[node_name]["output"] = crawl_cache
|
46
47
|
|
47
48
|
return success
|
@@ -1,14 +1,15 @@
|
|
1
1
|
from typing import List, Type
|
2
2
|
|
3
|
-
from blue_assistant.script.repository.
|
3
|
+
from blue_assistant.script.repository.base.root import RootScript
|
4
|
+
from blue_assistant.script.repository.base.classes import BaseScript
|
4
5
|
from blue_assistant.script.repository.blue_amo.classes import BlueAmoScript
|
5
6
|
from blue_assistant.script.repository.hue.classes import HueScript
|
6
7
|
from blue_assistant.script.repository.orbital_data_explorer.classes import (
|
7
8
|
OrbitalDataExplorerScript,
|
8
9
|
)
|
9
10
|
|
10
|
-
list_of_script_classes: List[Type[
|
11
|
-
|
11
|
+
list_of_script_classes: List[Type[RootScript]] = [
|
12
|
+
BaseScript,
|
12
13
|
BlueAmoScript,
|
13
14
|
HueScript,
|
14
15
|
OrbitalDataExplorerScript,
|
@@ -1,193 +1,21 @@
|
|
1
|
-
|
2
|
-
import os
|
3
|
-
import networkx as nx
|
4
|
-
from functools import reduce
|
1
|
+
import copy
|
5
2
|
|
6
3
|
from blueness import module
|
7
|
-
from blue_objects import file,
|
8
|
-
from blue_objects.metadata import post_to_object
|
9
|
-
from blueflow.workflow import dot_file
|
4
|
+
from blue_objects import file, path
|
10
5
|
|
11
6
|
from blue_assistant import NAME
|
7
|
+
from blue_assistant.script.repository.base.root import RootScript
|
8
|
+
from blue_assistant.script.actions import dict_of_actions
|
12
9
|
from blue_assistant.logger import logger
|
13
10
|
|
14
11
|
|
15
12
|
NAME = module.name(__file__, NAME)
|
16
13
|
|
17
14
|
|
18
|
-
class BaseScript:
|
19
|
-
name =
|
15
|
+
class BaseScript(RootScript):
|
16
|
+
name = path.name(file.path(__file__))
|
20
17
|
|
21
|
-
def __init__(
|
22
|
-
|
23
|
-
object_name: str,
|
24
|
-
test_mode: bool = False,
|
25
|
-
verbose: bool = False,
|
26
|
-
):
|
27
|
-
self.object_name = object_name
|
18
|
+
def __init__(self, **kwargs):
|
19
|
+
super().__init__(**kwargs)
|
28
20
|
|
29
|
-
self.
|
30
|
-
|
31
|
-
self.verbose = verbose
|
32
|
-
|
33
|
-
metadata_filename = os.path.join(
|
34
|
-
file.path(__file__),
|
35
|
-
f"../{self.name}",
|
36
|
-
"metadata.yaml",
|
37
|
-
)
|
38
|
-
self.metadata: Dict
|
39
|
-
success, self.metadata = file.load_yaml(metadata_filename)
|
40
|
-
assert success, f"cannot load {self.name}/metadata.yaml"
|
41
|
-
|
42
|
-
self.metadata.setdefault("script", {})
|
43
|
-
assert isinstance(
|
44
|
-
self.script,
|
45
|
-
dict,
|
46
|
-
), "script: expected dict, received {}.".format(
|
47
|
-
self.script.__class__.__name__,
|
48
|
-
)
|
49
|
-
|
50
|
-
self.script.setdefault("nodes", {})
|
51
|
-
assert isinstance(
|
52
|
-
self.nodes,
|
53
|
-
dict,
|
54
|
-
), "nodes: expected dict, received {}.".format(
|
55
|
-
self.nodes.__class__.__name__,
|
56
|
-
)
|
57
|
-
|
58
|
-
self.script.setdefault("vars", {})
|
59
|
-
assert isinstance(
|
60
|
-
self.vars,
|
61
|
-
dict,
|
62
|
-
), "vars: expected dict, received {}.".format(
|
63
|
-
self.vars.__class__.__name__,
|
64
|
-
)
|
65
|
-
|
66
|
-
if self.test_mode:
|
67
|
-
logger.info("🧪 test mode is on.")
|
68
|
-
|
69
|
-
for node_name, node in self.nodes.items():
|
70
|
-
if "test_mode" in self.script:
|
71
|
-
updates = self.script["test_mode"]
|
72
|
-
logger.info(f"🧪 vars.update({updates})")
|
73
|
-
self.vars.update(updates)
|
74
|
-
|
75
|
-
if "test_mode" in node:
|
76
|
-
updates = node["test_mode"]
|
77
|
-
logger.info(f"🧪 {node_name}.update({updates})")
|
78
|
-
node.update(updates)
|
79
|
-
|
80
|
-
logger.info(
|
81
|
-
"loaded {} node(s): {}".format(
|
82
|
-
len(self.nodes),
|
83
|
-
", ".join(self.nodes.keys()),
|
84
|
-
)
|
85
|
-
)
|
86
|
-
|
87
|
-
logger.info(
|
88
|
-
"loaded {} var(s){}".format(
|
89
|
-
len(self.vars),
|
90
|
-
"" if verbose else ": {}".format(", ".join(self.vars.keys())),
|
91
|
-
)
|
92
|
-
)
|
93
|
-
if verbose:
|
94
|
-
for var_name, var_value in self.vars.items():
|
95
|
-
logger.info("{}: {}".format(var_name, var_value))
|
96
|
-
|
97
|
-
assert self.generate_graph(), "cannot generate graph."
|
98
|
-
|
99
|
-
def __str__(self) -> str:
|
100
|
-
return "{}[{} var(s), {} node(s) -> {}]".format(
|
101
|
-
self.__class__.__name__,
|
102
|
-
len(self.vars),
|
103
|
-
len(self.nodes),
|
104
|
-
self.object_name,
|
105
|
-
)
|
106
|
-
|
107
|
-
def apply_vars(self, text: str) -> str:
|
108
|
-
for var_name, var_value in self.vars.items():
|
109
|
-
text = text.replace(f":::{var_name}", str(var_value))
|
110
|
-
|
111
|
-
return text
|
112
|
-
|
113
|
-
def generate_graph(self) -> bool:
|
114
|
-
self.G: nx.DiGraph = nx.DiGraph()
|
115
|
-
|
116
|
-
list_of_nodes = list(self.nodes.keys())
|
117
|
-
for node in self.nodes.values():
|
118
|
-
list_of_nodes += node.get("depends-on", "").split(",")
|
119
|
-
|
120
|
-
list_of_nodes = list({node_name for node_name in list_of_nodes if node_name})
|
121
|
-
logger.info(
|
122
|
-
"{} node(s): {}".format(
|
123
|
-
len(list_of_nodes),
|
124
|
-
", ".join(list_of_nodes),
|
125
|
-
)
|
126
|
-
)
|
127
|
-
|
128
|
-
for node_name in list_of_nodes:
|
129
|
-
self.G.add_node(node_name)
|
130
|
-
|
131
|
-
for node_name, node in self.nodes.items():
|
132
|
-
for dependency in node.get("depends-on", "").split(","):
|
133
|
-
if dependency:
|
134
|
-
self.G.add_edge(node_name, dependency)
|
135
|
-
|
136
|
-
return self.save_graph()
|
137
|
-
|
138
|
-
def get_context(
|
139
|
-
self,
|
140
|
-
node_name: str,
|
141
|
-
) -> List[str]:
|
142
|
-
return reduce(
|
143
|
-
lambda x, y: x + y,
|
144
|
-
[self.get_context(successor) for successor in self.G.successors(node_name)],
|
145
|
-
[node_name],
|
146
|
-
)
|
147
|
-
|
148
|
-
def run(
|
149
|
-
self,
|
150
|
-
) -> bool:
|
151
|
-
logger.info(
|
152
|
-
"{}.run: {}:{} -> {}".format(
|
153
|
-
NAME,
|
154
|
-
self.__class__.__name__,
|
155
|
-
self.name,
|
156
|
-
self.object_name,
|
157
|
-
)
|
158
|
-
)
|
159
|
-
|
160
|
-
return post_to_object(
|
161
|
-
self.object_name,
|
162
|
-
"script",
|
163
|
-
self.script,
|
164
|
-
)
|
165
|
-
|
166
|
-
def save_graph(self) -> bool:
|
167
|
-
return dot_file.save_to_file(
|
168
|
-
objects.path_of(
|
169
|
-
filename="workflow.dot",
|
170
|
-
object_name=self.object_name,
|
171
|
-
),
|
172
|
-
self.G,
|
173
|
-
caption=" | ".join(
|
174
|
-
[
|
175
|
-
self.name,
|
176
|
-
self.object_name,
|
177
|
-
]
|
178
|
-
),
|
179
|
-
add_legend=False,
|
180
|
-
)
|
181
|
-
|
182
|
-
# Aliases
|
183
|
-
@property
|
184
|
-
def script(self) -> Dict:
|
185
|
-
return self.metadata["script"]
|
186
|
-
|
187
|
-
@property
|
188
|
-
def nodes(self) -> Dict[str, Dict]:
|
189
|
-
return self.metadata["script"]["nodes"]
|
190
|
-
|
191
|
-
@property
|
192
|
-
def vars(self) -> Dict:
|
193
|
-
return self.metadata["script"]["vars"]
|
21
|
+
self.dict_of_actions = copy.deepcopy(dict_of_actions)
|
@@ -0,0 +1,276 @@
|
|
1
|
+
import os
|
2
|
+
from typing import Dict, List
|
3
|
+
from functools import reduce
|
4
|
+
import networkx as nx
|
5
|
+
from tqdm import tqdm
|
6
|
+
|
7
|
+
from blue_options.options import Options
|
8
|
+
from blue_objects import file, path, objects
|
9
|
+
from blue_objects.metadata import post_to_object
|
10
|
+
from blueflow.workflow import dot_file
|
11
|
+
|
12
|
+
from blue_assistant.logger import logger
|
13
|
+
|
14
|
+
|
15
|
+
class RootScript:
|
16
|
+
name = path.name(file.path(__file__))
|
17
|
+
|
18
|
+
def __init__(
|
19
|
+
self,
|
20
|
+
object_name: str,
|
21
|
+
test_mode: bool = False,
|
22
|
+
verbose: bool = False,
|
23
|
+
):
|
24
|
+
self.nodes_changed = False
|
25
|
+
|
26
|
+
self.object_name = object_name
|
27
|
+
|
28
|
+
self.test_mode = test_mode
|
29
|
+
|
30
|
+
self.verbose = verbose
|
31
|
+
|
32
|
+
self.dict_of_actions = {}
|
33
|
+
|
34
|
+
metadata_filename = os.path.join(
|
35
|
+
file.path(__file__),
|
36
|
+
f"../{self.name}",
|
37
|
+
"metadata.yaml",
|
38
|
+
)
|
39
|
+
self.metadata: Dict
|
40
|
+
success, self.metadata = file.load_yaml(metadata_filename)
|
41
|
+
assert success, f"cannot load {self.name}/metadata.yaml"
|
42
|
+
|
43
|
+
self.metadata.setdefault("script", {})
|
44
|
+
assert isinstance(
|
45
|
+
self.script,
|
46
|
+
dict,
|
47
|
+
), "script: expected dict, received {}.".format(
|
48
|
+
self.script.__class__.__name__,
|
49
|
+
)
|
50
|
+
|
51
|
+
self.script.setdefault("nodes", {})
|
52
|
+
assert isinstance(
|
53
|
+
self.nodes,
|
54
|
+
dict,
|
55
|
+
), "nodes: expected dict, received {}.".format(
|
56
|
+
self.nodes.__class__.__name__,
|
57
|
+
)
|
58
|
+
|
59
|
+
self.script.setdefault("vars", {})
|
60
|
+
assert isinstance(
|
61
|
+
self.vars,
|
62
|
+
dict,
|
63
|
+
), "vars: expected dict, received {}.".format(
|
64
|
+
self.vars.__class__.__name__,
|
65
|
+
)
|
66
|
+
|
67
|
+
if self.test_mode:
|
68
|
+
logger.info("🧪 test mode is on.")
|
69
|
+
|
70
|
+
if "test_mode" in self.script:
|
71
|
+
updates = self.script["test_mode"]
|
72
|
+
logger.info(f"🧪 vars.update({updates})")
|
73
|
+
self.vars.update(updates)
|
74
|
+
|
75
|
+
for node_name, node in self.nodes.items():
|
76
|
+
if "test_mode" in node:
|
77
|
+
updates = node["test_mode"]
|
78
|
+
logger.info(f"🧪 {node_name}.update({updates})")
|
79
|
+
node.update(updates)
|
80
|
+
|
81
|
+
logger.info(
|
82
|
+
"loaded {} node(s): {}".format(
|
83
|
+
len(self.nodes),
|
84
|
+
", ".join(self.nodes.keys()),
|
85
|
+
)
|
86
|
+
)
|
87
|
+
|
88
|
+
logger.info(
|
89
|
+
"loaded {} var(s): {}".format(
|
90
|
+
len(self.vars),
|
91
|
+
", ".join(self.vars.keys()),
|
92
|
+
)
|
93
|
+
)
|
94
|
+
if verbose:
|
95
|
+
for var_name, var_value in self.vars.items():
|
96
|
+
logger.info("{}: {}".format(var_name, var_value))
|
97
|
+
|
98
|
+
assert self.generate_graph(), "cannot generate graph."
|
99
|
+
|
100
|
+
def __str__(self) -> str:
|
101
|
+
return "{}[{} var(s), {} node(s) -> {}]".format(
|
102
|
+
self.__class__.__name__,
|
103
|
+
len(self.vars),
|
104
|
+
len(self.nodes),
|
105
|
+
self.object_name,
|
106
|
+
)
|
107
|
+
|
108
|
+
def apply_vars(self, text: str) -> str:
|
109
|
+
for var_name, var_value in self.vars.items():
|
110
|
+
text = text.replace(f":::{var_name}", str(var_value))
|
111
|
+
|
112
|
+
for node_name, node in self.nodes.items():
|
113
|
+
node_output = node.get("output", "")
|
114
|
+
if isinstance(node_output, str):
|
115
|
+
text = text.replace(f":::{node_name}", node_output)
|
116
|
+
|
117
|
+
return text
|
118
|
+
|
119
|
+
def generate_graph(
|
120
|
+
self,
|
121
|
+
verbose: bool = False,
|
122
|
+
) -> bool:
|
123
|
+
self.G: nx.DiGraph = nx.DiGraph()
|
124
|
+
|
125
|
+
list_of_nodes = list(self.nodes.keys())
|
126
|
+
for node in self.nodes.values():
|
127
|
+
list_of_nodes += node.get("depends-on", "").split(",")
|
128
|
+
|
129
|
+
list_of_nodes = list({node_name for node_name in list_of_nodes if node_name})
|
130
|
+
if verbose:
|
131
|
+
logger.info(
|
132
|
+
"{} node(s): {}".format(
|
133
|
+
len(list_of_nodes),
|
134
|
+
", ".join(list_of_nodes),
|
135
|
+
)
|
136
|
+
)
|
137
|
+
|
138
|
+
for node_name in list_of_nodes:
|
139
|
+
self.G.add_node(node_name)
|
140
|
+
|
141
|
+
for node_name, node in self.nodes.items():
|
142
|
+
for dependency in node.get("depends-on", "").split(","):
|
143
|
+
if dependency:
|
144
|
+
self.G.add_edge(node_name, dependency)
|
145
|
+
|
146
|
+
return self.save_graph()
|
147
|
+
|
148
|
+
def get_context(
|
149
|
+
self,
|
150
|
+
node_name: str,
|
151
|
+
) -> List[str]:
|
152
|
+
return reduce(
|
153
|
+
lambda x, y: x + y,
|
154
|
+
[self.get_context(successor) for successor in self.G.successors(node_name)],
|
155
|
+
[node_name],
|
156
|
+
)
|
157
|
+
|
158
|
+
def perform_action(
|
159
|
+
self,
|
160
|
+
node_name: str,
|
161
|
+
) -> bool:
|
162
|
+
action_name = self.nodes[node_name].get("action", "unknown")
|
163
|
+
logger.info(f"---- node: {node_name} ---- ")
|
164
|
+
|
165
|
+
if action_name in self.dict_of_actions:
|
166
|
+
return self.dict_of_actions[action_name](
|
167
|
+
script=self,
|
168
|
+
node_name=node_name,
|
169
|
+
)
|
170
|
+
|
171
|
+
logger.error(f"{action_name}: action not found.")
|
172
|
+
return False
|
173
|
+
|
174
|
+
def run(
|
175
|
+
self,
|
176
|
+
runnable: str = "",
|
177
|
+
) -> bool:
|
178
|
+
logger.info(f"{self.name}.run -> {self.object_name}")
|
179
|
+
|
180
|
+
if runnable:
|
181
|
+
logger.info(f"applying runnables: {runnable}")
|
182
|
+
runnable_options = Options(runnable)
|
183
|
+
for node_name, node_is_runnable in runnable_options.items():
|
184
|
+
logger.info(f"{node_name}.runnable={node_is_runnable}")
|
185
|
+
self.nodes[node_name]["runnable"] = node_is_runnable
|
186
|
+
|
187
|
+
success: bool = True
|
188
|
+
while (
|
189
|
+
not all(self.nodes[node].get("completed", False) for node in self.nodes)
|
190
|
+
and success
|
191
|
+
):
|
192
|
+
self.nodes_changed = False
|
193
|
+
|
194
|
+
for node_name in tqdm(self.nodes):
|
195
|
+
if self.nodes[node_name].get("completed", False):
|
196
|
+
continue
|
197
|
+
|
198
|
+
if not self.nodes[node_name].get("runnable", True):
|
199
|
+
logger.info(f"Not runnable, skipped: {node_name}.")
|
200
|
+
self.nodes[node_name]["completed"] = True
|
201
|
+
continue
|
202
|
+
|
203
|
+
pending_dependencies = [
|
204
|
+
node_name_
|
205
|
+
for node_name_ in self.G.successors(node_name)
|
206
|
+
if not self.nodes[node_name_].get("completed", False)
|
207
|
+
]
|
208
|
+
if pending_dependencies:
|
209
|
+
logger.info(
|
210
|
+
'node "{}": {} pending dependenci(es): {}'.format(
|
211
|
+
node_name,
|
212
|
+
len(pending_dependencies),
|
213
|
+
", ".join(pending_dependencies),
|
214
|
+
)
|
215
|
+
)
|
216
|
+
continue
|
217
|
+
|
218
|
+
if not self.perform_action(node_name=node_name):
|
219
|
+
success = False
|
220
|
+
break
|
221
|
+
|
222
|
+
self.nodes[node_name]["completed"] = True
|
223
|
+
|
224
|
+
cache_filename = self.nodes[node_name].get("cache", "")
|
225
|
+
if cache_filename:
|
226
|
+
if not file.save_text(
|
227
|
+
objects.path_of(
|
228
|
+
object_name=self.object_name,
|
229
|
+
filename=cache_filename,
|
230
|
+
),
|
231
|
+
[self.nodes[node_name].get("output", "")],
|
232
|
+
):
|
233
|
+
success = False
|
234
|
+
break
|
235
|
+
|
236
|
+
if self.nodes_changed:
|
237
|
+
logger.info("🪄 nodes changed.")
|
238
|
+
break
|
239
|
+
|
240
|
+
if not post_to_object(
|
241
|
+
self.object_name,
|
242
|
+
"output",
|
243
|
+
self.metadata,
|
244
|
+
):
|
245
|
+
success = False
|
246
|
+
|
247
|
+
return success
|
248
|
+
|
249
|
+
def save_graph(self) -> bool:
|
250
|
+
return dot_file.save_to_file(
|
251
|
+
objects.path_of(
|
252
|
+
filename="workflow.dot",
|
253
|
+
object_name=self.object_name,
|
254
|
+
),
|
255
|
+
self.G,
|
256
|
+
caption=" | ".join(
|
257
|
+
[
|
258
|
+
self.name,
|
259
|
+
self.object_name,
|
260
|
+
]
|
261
|
+
),
|
262
|
+
add_legend=False,
|
263
|
+
)
|
264
|
+
|
265
|
+
# Aliases
|
266
|
+
@property
|
267
|
+
def script(self) -> Dict:
|
268
|
+
return self.metadata["script"]
|
269
|
+
|
270
|
+
@property
|
271
|
+
def nodes(self) -> Dict[str, Dict]:
|
272
|
+
return self.metadata["script"]["nodes"]
|
273
|
+
|
274
|
+
@property
|
275
|
+
def vars(self) -> Dict:
|
276
|
+
return self.metadata["script"]["vars"]
|
@@ -2,12 +2,12 @@ from typing import Dict, Callable
|
|
2
2
|
|
3
3
|
from blue_assistant.script.repository.base.classes import BaseScript
|
4
4
|
from blue_assistant.script.repository.blue_amo.actions import (
|
5
|
-
|
5
|
+
setting_frame_prompts,
|
6
6
|
stitching_the_frames,
|
7
7
|
)
|
8
8
|
|
9
9
|
|
10
10
|
dict_of_actions: Dict[str, Callable[[BaseScript, str], bool]] = {
|
11
|
-
"
|
11
|
+
"setting_frame_prompts": setting_frame_prompts.setting_frame_prompts,
|
12
12
|
"stitching_the_frames": stitching_the_frames.stitching_the_frames,
|
13
13
|
}
|
@@ -7,13 +7,13 @@ from blue_assistant.logger import logger
|
|
7
7
|
NAME = module.name(__file__, NAME)
|
8
8
|
|
9
9
|
|
10
|
-
def
|
10
|
+
def setting_frame_prompts(
|
11
11
|
script: BaseScript,
|
12
12
|
node_name: str,
|
13
13
|
) -> bool:
|
14
|
-
logger.info(
|
14
|
+
logger.info(NAME)
|
15
15
|
|
16
|
-
list_of_frame_prompts = script.nodes[
|
16
|
+
list_of_frame_prompts = script.nodes["slicing_into_frames"]["output"].split("---")
|
17
17
|
if len(list_of_frame_prompts) != script.vars["frame_count"]:
|
18
18
|
logger.warning(
|
19
19
|
"{} != {}, frame count doesn't match, bad AI! 😁".format(
|
@@ -31,8 +31,8 @@ def slicing_into_frames(
|
|
31
31
|
|
32
32
|
script.nodes[node_name]["prompt"] = (
|
33
33
|
script.nodes[node_name]["prompt"]
|
34
|
-
.replace(":::
|
35
|
-
.replace(":::
|
34
|
+
.replace(":::story_so_far", " ".join(list_of_frame_prompts[:index]))
|
35
|
+
.replace(":::story_of_this_frame", list_of_frame_prompts[index])
|
36
36
|
)
|
37
37
|
|
38
38
|
return True
|
@@ -4,78 +4,55 @@ from blueness import module
|
|
4
4
|
from blue_objects import file, path
|
5
5
|
|
6
6
|
from blue_assistant import NAME
|
7
|
-
from blue_assistant.script.repository.
|
7
|
+
from blue_assistant.script.repository.base.classes import BaseScript
|
8
8
|
from blue_assistant.script.repository.blue_amo.actions import dict_of_actions
|
9
9
|
from blue_assistant.logger import logger
|
10
10
|
|
11
11
|
NAME = module.name(__file__, NAME)
|
12
12
|
|
13
13
|
|
14
|
-
class BlueAmoScript(
|
14
|
+
class BlueAmoScript(BaseScript):
|
15
15
|
name = path.name(file.path(__file__))
|
16
16
|
|
17
|
-
def __init__(
|
17
|
+
def __init__(self, **kwargs):
|
18
|
+
super().__init__(**kwargs)
|
19
|
+
|
20
|
+
self.dict_of_actions.update(dict_of_actions)
|
21
|
+
|
22
|
+
def generate_graph(
|
18
23
|
self,
|
19
|
-
object_name: str,
|
20
|
-
test_mode: bool = False,
|
21
24
|
verbose: bool = False,
|
22
|
-
):
|
23
|
-
super().
|
24
|
-
|
25
|
-
test_mode=test_mode,
|
26
|
-
verbose=verbose,
|
27
|
-
)
|
28
|
-
|
29
|
-
if self.test_mode:
|
30
|
-
self.vars["frame_count"] = 1
|
25
|
+
) -> bool:
|
26
|
+
if not super().generate_graph(verbose=verbose):
|
27
|
+
return False
|
31
28
|
|
32
|
-
|
29
|
+
map_node_name = "generating_the_frames"
|
33
30
|
logger.info(
|
34
31
|
"{}: expanding {} X {}...".format(
|
35
32
|
NAME,
|
36
|
-
|
33
|
+
map_node_name,
|
37
34
|
self.vars["frame_count"],
|
38
35
|
)
|
39
36
|
)
|
40
37
|
|
41
|
-
|
42
|
-
del self.nodes[
|
43
|
-
self.G.remove_node(
|
44
|
-
|
45
|
-
reduce_node = "stitching_the_frames"
|
46
|
-
self.G.add_node(reduce_node)
|
47
|
-
self.nodes[reduce_node] = {"action": "generic"}
|
38
|
+
map_node = self.nodes[map_node_name]
|
39
|
+
del self.nodes[map_node_name]
|
40
|
+
self.G.remove_node(map_node_name)
|
48
41
|
|
42
|
+
reduce_node_name = "stitching_the_frames"
|
49
43
|
for index in range(self.vars["frame_count"]):
|
50
44
|
node_name = f"generating_frame_{index+1:03d}"
|
51
45
|
|
52
|
-
self.nodes[node_name] = copy.deepcopy(
|
46
|
+
self.nodes[node_name] = copy.deepcopy(map_node)
|
53
47
|
|
54
48
|
self.G.add_node(node_name)
|
55
49
|
self.G.add_edge(
|
56
50
|
node_name,
|
57
|
-
"
|
51
|
+
"setting_frame_prompts",
|
58
52
|
)
|
59
53
|
self.G.add_edge(
|
60
|
-
|
54
|
+
reduce_node_name,
|
61
55
|
node_name,
|
62
56
|
)
|
63
57
|
|
64
|
-
|
65
|
-
|
66
|
-
def perform_action(
|
67
|
-
self,
|
68
|
-
node_name: str,
|
69
|
-
) -> bool:
|
70
|
-
if not super().perform_action(
|
71
|
-
node_name=node_name,
|
72
|
-
):
|
73
|
-
return False
|
74
|
-
|
75
|
-
if node_name in dict_of_actions:
|
76
|
-
return dict_of_actions[node_name](
|
77
|
-
script=self,
|
78
|
-
node_name=node_name,
|
79
|
-
)
|
80
|
-
|
81
|
-
return True
|
58
|
+
return self.save_graph()
|
@@ -1,7 +1,7 @@
|
|
1
1
|
from blue_objects import file, path
|
2
2
|
|
3
|
-
from blue_assistant.script.repository.
|
3
|
+
from blue_assistant.script.repository.base.classes import BaseScript
|
4
4
|
|
5
5
|
|
6
|
-
class HueScript(
|
6
|
+
class HueScript(BaseScript):
|
7
7
|
name = path.name(file.path(__file__))
|
@@ -0,0 +1,11 @@
|
|
1
|
+
from typing import Dict, Callable
|
2
|
+
|
3
|
+
from blue_assistant.script.repository.base.classes import BaseScript
|
4
|
+
from blue_assistant.script.repository.orbital_data_explorer.actions import (
|
5
|
+
expanding_the_extractions,
|
6
|
+
)
|
7
|
+
|
8
|
+
|
9
|
+
dict_of_actions: Dict[str, Callable[[BaseScript, str], bool]] = {
|
10
|
+
"expanding_the_extractions": expanding_the_extractions.expanding_the_extractions,
|
11
|
+
}
|
@@ -0,0 +1,109 @@
|
|
1
|
+
import copy
|
2
|
+
from typing import Dict
|
3
|
+
|
4
|
+
from blueness import module
|
5
|
+
from blue_options.logger import log_dict, log_list
|
6
|
+
from blue_objects import file, objects
|
7
|
+
from blue_objects.metadata import get_from_object
|
8
|
+
|
9
|
+
from blue_assistant import NAME
|
10
|
+
from blue_assistant.script.repository.base.classes import BaseScript
|
11
|
+
from blue_assistant.web.functions import url_to_filename
|
12
|
+
from blue_assistant.logger import logger
|
13
|
+
|
14
|
+
NAME = module.name(__file__, NAME)
|
15
|
+
|
16
|
+
|
17
|
+
def expanding_the_extractions(
|
18
|
+
script: BaseScript,
|
19
|
+
node_name: str,
|
20
|
+
) -> bool:
|
21
|
+
map_node_name = "extraction"
|
22
|
+
|
23
|
+
crawl_cache: Dict[str, str] = get_from_object(
|
24
|
+
script.object_name,
|
25
|
+
"web_crawl_cache",
|
26
|
+
{},
|
27
|
+
)
|
28
|
+
log_dict(logger, "using", crawl_cache, "crawled url(s)")
|
29
|
+
|
30
|
+
list_of_urls = [
|
31
|
+
url
|
32
|
+
for url, content_type in crawl_cache.items()
|
33
|
+
if "html" in content_type
|
34
|
+
and not file.exists(
|
35
|
+
objects.path_of(
|
36
|
+
object_name=script.object_name,
|
37
|
+
filename="{}_cache/{}.txt".format(
|
38
|
+
map_node_name,
|
39
|
+
url_to_filename(url),
|
40
|
+
),
|
41
|
+
)
|
42
|
+
)
|
43
|
+
]
|
44
|
+
log_list(logger, "using", list_of_urls, "crawled unextracted html(s).")
|
45
|
+
|
46
|
+
max_nodes = min(
|
47
|
+
len(list_of_urls),
|
48
|
+
script.nodes[node_name]["max_nodes"],
|
49
|
+
)
|
50
|
+
logger.info(
|
51
|
+
"{}: expanding {} X {}...".format(
|
52
|
+
NAME,
|
53
|
+
map_node_name,
|
54
|
+
max_nodes,
|
55
|
+
)
|
56
|
+
)
|
57
|
+
|
58
|
+
map_node = script.nodes[map_node_name]
|
59
|
+
del script.nodes[map_node_name]
|
60
|
+
script.G.remove_node(map_node_name)
|
61
|
+
|
62
|
+
reduce_node_name = "generating_summary"
|
63
|
+
for index in range(max_nodes):
|
64
|
+
url = list_of_urls[index]
|
65
|
+
index_node_name = f"{map_node_name}_{index+1:03d}"
|
66
|
+
|
67
|
+
success, url_content = file.load_yaml(
|
68
|
+
filename=objects.path_of(
|
69
|
+
object_name=script.object_name,
|
70
|
+
filename="web_crawl_cache/{}.yaml".format(
|
71
|
+
url_to_filename(url),
|
72
|
+
),
|
73
|
+
),
|
74
|
+
)
|
75
|
+
if not success:
|
76
|
+
logger.warning(f"{url}: failed to load url content.")
|
77
|
+
continue
|
78
|
+
if "text" not in url_content:
|
79
|
+
logger.warning(f"{url}: no text found in url content.")
|
80
|
+
continue
|
81
|
+
|
82
|
+
logger.info(f"{url} -{map_node_name}-> {index_node_name}")
|
83
|
+
|
84
|
+
script.nodes[index_node_name] = copy.deepcopy(map_node)
|
85
|
+
|
86
|
+
script.nodes[index_node_name]["prompt"] = map_node["prompt"].replace(
|
87
|
+
":::url_content",
|
88
|
+
url_content["text"],
|
89
|
+
)
|
90
|
+
|
91
|
+
script.nodes[index_node_name]["url"] = url
|
92
|
+
script.nodes[index_node_name]["cache"] = "{}_cache/{}.txt".format(
|
93
|
+
map_node_name,
|
94
|
+
url_to_filename(url),
|
95
|
+
)
|
96
|
+
|
97
|
+
script.G.add_node(index_node_name)
|
98
|
+
script.G.add_edge(
|
99
|
+
index_node_name,
|
100
|
+
node_name,
|
101
|
+
)
|
102
|
+
script.G.add_edge(
|
103
|
+
reduce_node_name,
|
104
|
+
index_node_name,
|
105
|
+
)
|
106
|
+
|
107
|
+
script.nodes_changed = True
|
108
|
+
|
109
|
+
return script.save_graph()
|
@@ -1,7 +1,15 @@
|
|
1
1
|
from blue_objects import file, path
|
2
2
|
|
3
|
-
from blue_assistant.script.repository.
|
3
|
+
from blue_assistant.script.repository.base.classes import BaseScript
|
4
|
+
from blue_assistant.script.repository.orbital_data_explorer.actions import (
|
5
|
+
dict_of_actions,
|
6
|
+
)
|
4
7
|
|
5
8
|
|
6
|
-
class OrbitalDataExplorerScript(
|
9
|
+
class OrbitalDataExplorerScript(BaseScript):
|
7
10
|
name = path.name(file.path(__file__))
|
11
|
+
|
12
|
+
def __init__(self, **kwargs):
|
13
|
+
super().__init__(**kwargs)
|
14
|
+
|
15
|
+
self.dict_of_actions.update(dict_of_actions)
|
blue_assistant/web/crawl.py
CHANGED
@@ -35,7 +35,7 @@ def crawl_list_of_urls(
|
|
35
35
|
seed_urls
|
36
36
|
+ get_from_object(
|
37
37
|
object_name,
|
38
|
-
f"{cache_prefix}
|
38
|
+
f"{cache_prefix}_queue",
|
39
39
|
[],
|
40
40
|
)
|
41
41
|
)
|
@@ -44,7 +44,7 @@ def crawl_list_of_urls(
|
|
44
44
|
|
45
45
|
crawl_cache: Dict[str, str] = get_from_object(
|
46
46
|
object_name,
|
47
|
-
f"{cache_prefix}
|
47
|
+
f"{cache_prefix}_cache",
|
48
48
|
{},
|
49
49
|
)
|
50
50
|
log_dict(logger, "loaded cache:", crawl_cache, "url(s)")
|
@@ -75,7 +75,7 @@ def crawl_list_of_urls(
|
|
75
75
|
if not file.save_yaml(
|
76
76
|
filename=objects.path_of(
|
77
77
|
object_name=object_name,
|
78
|
-
filename="{}
|
78
|
+
filename="{}_cache/{}.yaml".format(
|
79
79
|
cache_prefix,
|
80
80
|
url_to_filename(url),
|
81
81
|
),
|
@@ -108,12 +108,12 @@ def crawl_list_of_urls(
|
|
108
108
|
if not (
|
109
109
|
post_to_object(
|
110
110
|
object_name,
|
111
|
-
f"{cache_prefix}
|
111
|
+
f"{cache_prefix}_cache",
|
112
112
|
crawl_cache,
|
113
113
|
)
|
114
114
|
and post_to_object(
|
115
115
|
object_name,
|
116
|
-
f"{cache_prefix}
|
116
|
+
f"{cache_prefix}_queue",
|
117
117
|
queue,
|
118
118
|
)
|
119
119
|
):
|
blue_assistant/web/fetch.py
CHANGED
@@ -9,6 +9,7 @@ from blueness import module
|
|
9
9
|
from blue_options.logger import log_long_text, log_list
|
10
10
|
|
11
11
|
from blue_assistant import NAME
|
12
|
+
from blue_assistant.web.functions import normalize_url
|
12
13
|
from blue_assistant.logger import logger
|
13
14
|
|
14
15
|
warnings.filterwarnings("ignore", category=XMLParsedAsHTMLWarning)
|
@@ -49,8 +50,7 @@ def fetch_links_and_text(
|
|
49
50
|
for a_tag in soup.find_all("a", href=True):
|
50
51
|
a_url = urljoin(url, a_tag["href"])
|
51
52
|
|
52
|
-
|
53
|
-
a_url = a_url.split("#", 1)[0]
|
53
|
+
a_url = normalize_url(a_url)
|
54
54
|
|
55
55
|
if a_url.startswith(url):
|
56
56
|
if url not in list_of_urls:
|
blue_assistant/web/functions.py
CHANGED
@@ -1,4 +1,15 @@
|
|
1
1
|
import re
|
2
|
+
from urllib.parse import urlparse, urlunparse
|
3
|
+
|
4
|
+
|
5
|
+
# https://chatgpt.com/c/67d733a0-4be4-8005-bf52-fb9ba32487c2
|
6
|
+
def normalize_url(url: str) -> str:
|
7
|
+
return urlunparse(
|
8
|
+
urlparse(url)._replace(
|
9
|
+
query="",
|
10
|
+
fragment="",
|
11
|
+
),
|
12
|
+
)
|
2
13
|
|
3
14
|
|
4
15
|
def url_to_filename(
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: blue_assistant
|
3
|
-
Version: 4.
|
3
|
+
Version: 4.319.1
|
4
4
|
Summary: 🧠 An AI Assistant.
|
5
5
|
Home-page: https://github.com/kamangir/blue-assistant
|
6
6
|
Author: Arash Abadpour (Kamangir)
|
@@ -124,4 +124,4 @@ Also home to [`@web`](https://raw.githubusercontent.com/kamangir/blue-assistant/
|
|
124
124
|
|
125
125
|
[](https://github.com/kamangir/blue-assistant/actions/workflows/pylint.yml) [](https://github.com/kamangir/blue-assistant/actions/workflows/pytest.yml) [](https://github.com/kamangir/blue-assistant/actions/workflows/bashtest.yml) [](https://pypi.org/project/blue-assistant/) [](https://pypistats.org/packages/blue-assistant)
|
126
126
|
|
127
|
-
built by 🌀 [`blue_options-4.240.1`](https://github.com/kamangir/awesome-bash-cli), based on 🧠 [`blue_assistant-4.
|
127
|
+
built by 🌀 [`blue_options-4.240.1`](https://github.com/kamangir/awesome-bash-cli), based on 🧠 [`blue_assistant-4.319.1`](https://github.com/kamangir/blue-assistant).
|
@@ -1,5 +1,5 @@
|
|
1
1
|
blue_assistant/README.py,sha256=EJORj3I5pucJplI86lrFaZBN5C9IYNgKoG_V7h27NHw,2586
|
2
|
-
blue_assistant/__init__.py,sha256=
|
2
|
+
blue_assistant/__init__.py,sha256=QCw7ze05yQxWjXRvjx1vYtxUHZldMVhjcBd1-l39zvE,311
|
3
3
|
blue_assistant/__main__.py,sha256=URtal70XZc0--3FDTYWcLtnGOqBYjMX9gt-L1k8hDXI,361
|
4
4
|
blue_assistant/config.env,sha256=npodyuuhkZUHUv9FnEiQQZkKxFbg8nQb1YpOCURqV3Y,301
|
5
5
|
blue_assistant/env.py,sha256=FTSdJ8-J4jAyI0-h3MBgOweQBWd3YEFIibBHSXpClrY,760
|
@@ -20,11 +20,11 @@ blue_assistant/.abcli/hue/create_user.sh,sha256=Nh8FhnGweB2JZB7SVh-6jp8ud5YHeJSa
|
|
20
20
|
blue_assistant/.abcli/hue/list.sh,sha256=ynptjPo6jZnwm-7wAVgGx-mZvyPKZ9b5JaJoY0xidCg,268
|
21
21
|
blue_assistant/.abcli/hue/set.sh,sha256=VcADsfbjjbrxIMX9cVVHeK0MH649ZRY29V8YDTgflms,266
|
22
22
|
blue_assistant/.abcli/script/list.sh,sha256=2lcVfqDfZP50NszF8o5YCo3TrJKeDc_qo7MTAF3XTGw,131
|
23
|
-
blue_assistant/.abcli/script/run.sh,sha256=
|
23
|
+
blue_assistant/.abcli/script/run.sh,sha256=1pzzzHeviUZ9tLJZrCsAleQgNfoq8hnlwPuBCY2Ofso,930
|
24
24
|
blue_assistant/.abcli/tests/README.sh,sha256=Qs0YUxVB1OZZ70Nqw2kT1LKXeUnC5-XfQRMfqb8Cbwg,152
|
25
25
|
blue_assistant/.abcli/tests/help.sh,sha256=7AAZzCEo5vZ1cBAMfj4virDClabaUMdOV-NqXSJQVUM,918
|
26
26
|
blue_assistant/.abcli/tests/script_list.sh,sha256=OVOwWO9wR0eeDZTM6uub-eTKbz3eswU3vEUPWXcK-gQ,178
|
27
|
-
blue_assistant/.abcli/tests/script_run.sh,sha256=
|
27
|
+
blue_assistant/.abcli/tests/script_run.sh,sha256=N0sg5j5a60x5v4V8PCEVjyjPP8xO-uC9JduSCBU0EyE,809
|
28
28
|
blue_assistant/.abcli/tests/version.sh,sha256=oR2rvYR8zi-0VDPIdPJsmsmWwYaamT8dmNTqUh3-8Gw,154
|
29
29
|
blue_assistant/.abcli/tests/web_crawl.sh,sha256=sz3LbpidWvjG7kQoWxQBtdBe5yntm14ylAUsgPJWhko,372
|
30
30
|
blue_assistant/.abcli/tests/web_fetch.sh,sha256=C8PFWlmRa9heNdP9yhshriCBKG1uUlps-oxhAM70AZI,312
|
@@ -34,40 +34,41 @@ blue_assistant/help/__init__.py,sha256=ajz1GSNU9xYVrFEDSz6Xwg7amWQ_yvW75tQa1ZvRI
|
|
34
34
|
blue_assistant/help/__main__.py,sha256=cVejR7OpoWPg0qLbm-PZf5TuJS27x49jzfiyCLyzEns,241
|
35
35
|
blue_assistant/help/functions.py,sha256=O85zVEMtnm32O7KB6W6uQRoFXnE_4dW5pwYZtMakYDg,865
|
36
36
|
blue_assistant/help/hue.py,sha256=ZElPG24ekiS7eIGLVrP2gB_womlGUuwln2cded4Li-c,2319
|
37
|
-
blue_assistant/help/script.py,sha256=
|
37
|
+
blue_assistant/help/script.py,sha256=THeHam9PoS_2I9G06ggMH1R8kB0lzz_8qOQNYPhl7Zw,1164
|
38
38
|
blue_assistant/help/web.py,sha256=LNJRbMXipXUojJmmTghY9YAxFqPDLTCvcRCfpJrfgvk,918
|
39
39
|
blue_assistant/script/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
40
|
-
blue_assistant/script/__main__.py,sha256=
|
40
|
+
blue_assistant/script/__main__.py,sha256=O8zPwcgil-x0YmqIl0GWbbSEcpFDrx0e_VTvqBwG8Io,1802
|
41
41
|
blue_assistant/script/load.py,sha256=JsDY9T3HTM9vXngvKsA0Mt_erxAnRR_jI62-JhrOBMU,831
|
42
|
-
blue_assistant/script/actions/__init__.py,sha256=
|
43
|
-
blue_assistant/script/actions/generate_image.py,sha256=
|
44
|
-
blue_assistant/script/actions/generate_text.py,sha256=
|
45
|
-
blue_assistant/script/actions/generic.py,sha256=
|
46
|
-
blue_assistant/script/actions/web_crawl.py,sha256=
|
47
|
-
blue_assistant/script/repository/__init__.py,sha256=
|
42
|
+
blue_assistant/script/actions/__init__.py,sha256=rTJw42KtMcsOMU1Z-h1e5Nch2Iax-t2P84vPZ-ccR_c,573
|
43
|
+
blue_assistant/script/actions/generate_image.py,sha256=SJLKkeduWkJgN-0Y8H3ov8xSw3MBpmjQSBTR9vwXstA,1343
|
44
|
+
blue_assistant/script/actions/generate_text.py,sha256=LJmXHZBpLdMMnE5SJGbv03bELTlG1zLav8XW2QLtRWI,2023
|
45
|
+
blue_assistant/script/actions/generic.py,sha256=UkyaM16qXdmTAVfduo6niCpHk5nB7rir-9oIW1VdwOg,343
|
46
|
+
blue_assistant/script/actions/web_crawl.py,sha256=aq-Jq2sFeUcU2EzlOnK_X_L3Lim_x8SMhxNJJ9hNV-g,1553
|
47
|
+
blue_assistant/script/repository/__init__.py,sha256=zxqxFim6RdNhQLU3SWVytMwsf0NyhX1c_Mhi-ZUFi2w,658
|
48
48
|
blue_assistant/script/repository/base/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
49
|
-
blue_assistant/script/repository/base/classes.py,sha256=
|
49
|
+
blue_assistant/script/repository/base/classes.py,sha256=AW-fK0CiAosIDPVPA9aKOIlEgn3oQq-2L265vzTphSQ,519
|
50
|
+
blue_assistant/script/repository/base/root.py,sha256=4yGbMTlSmbQXGcOhOInElBiN1AqBY9TED02qa_ee4Hk,8347
|
50
51
|
blue_assistant/script/repository/blue_amo/__init__.py,sha256=WjL9GIlN-DBnbUMJ8O_FxTp0rcVGlsIS3H9YtXEefTk,76
|
51
|
-
blue_assistant/script/repository/blue_amo/classes.py,sha256=
|
52
|
-
blue_assistant/script/repository/blue_amo/actions/__init__.py,sha256=
|
53
|
-
blue_assistant/script/repository/blue_amo/actions/
|
52
|
+
blue_assistant/script/repository/blue_amo/classes.py,sha256=ZYjnCUocGB-3K-gBrq4_CasR5vBQ1OfIECcpf_XZfgE,1576
|
53
|
+
blue_assistant/script/repository/blue_amo/actions/__init__.py,sha256=JE4XK5Do64kLfAUxKTa15vkqUQ5JMCiHZfm03blBxi8,438
|
54
|
+
blue_assistant/script/repository/blue_amo/actions/setting_frame_prompts.py,sha256=4YkEsWNhFg_5crYDMPglUVjlWpoG0ditKbUittYiFo4,1205
|
54
55
|
blue_assistant/script/repository/blue_amo/actions/stitching_the_frames.py,sha256=mbXriat6deEAmuo5Y1ValySnUXDENR7TZS_3nVPlQ6M,3622
|
55
|
-
blue_assistant/script/repository/generic/__init__.py,sha256=kLffGsQMQAFJTw6IZBE5eBxvshP1x9wwHHR4hsDJblo,75
|
56
|
-
blue_assistant/script/repository/generic/classes.py,sha256=VhZpyA0sOBSlamopRPJ1EyPTd0i5-LvTzCAYlBJR-Es,2589
|
57
56
|
blue_assistant/script/repository/hue/__init__.py,sha256=WjL9GIlN-DBnbUMJ8O_FxTp0rcVGlsIS3H9YtXEefTk,76
|
58
57
|
blue_assistant/script/repository/hue/__main__.py,sha256=jaesrONQsrpVdg8A7NzzT8xpsdXs5gmrywOTE_TWD6c,2321
|
59
58
|
blue_assistant/script/repository/hue/api.py,sha256=C3KzT_MG868gsznUXpwEbUleBjnJObWzZgzvN6wi3uo,4774
|
60
|
-
blue_assistant/script/repository/hue/classes.py,sha256=
|
59
|
+
blue_assistant/script/repository/hue/classes.py,sha256=AbihLKw4Ii9MHnCQj1qOrZiZhQh6LhbDcN3kLTigQ00,179
|
61
60
|
blue_assistant/script/repository/hue/colors.py,sha256=rUdtCroNAnzm1zUuVp8eVhvfIie1f7sd208ypsFAJ_w,625
|
62
61
|
blue_assistant/script/repository/orbital_data_explorer/__init__.py,sha256=yy5FtCeHlr9dRfqxw4QYWr7_yRjnQpwVyuAY2vLrh4Q,110
|
63
|
-
blue_assistant/script/repository/orbital_data_explorer/classes.py,sha256=
|
62
|
+
blue_assistant/script/repository/orbital_data_explorer/classes.py,sha256=C75k_GNq6lG8lPiETR3n1E6T2ier52_DqXxk1HmELlY,419
|
63
|
+
blue_assistant/script/repository/orbital_data_explorer/actions/__init__.py,sha256=TKOLK61Z18ygpnVjOtjeB4h4irvyyAlc04Wlu7QrSxI,370
|
64
|
+
blue_assistant/script/repository/orbital_data_explorer/actions/expanding_the_extractions.py,sha256=MZ729BIVrTniE-r-1kh7F21QB7dqZmgbXw4cQ6Nnkjw,3060
|
64
65
|
blue_assistant/web/__init__.py,sha256=70_JSpnfX1mLm8Xv3xHIujfr2FfGeHPRs6HraWDP1XA,114
|
65
66
|
blue_assistant/web/__main__.py,sha256=aLkMmUpeWSOxa7YQVbtL90ZNbOcr1OeT0rymw90jx7A,1436
|
66
|
-
blue_assistant/web/crawl.py,sha256=
|
67
|
-
blue_assistant/web/fetch.py,sha256=
|
68
|
-
blue_assistant/web/functions.py,sha256=
|
69
|
-
blue_assistant-4.
|
70
|
-
blue_assistant-4.
|
71
|
-
blue_assistant-4.
|
72
|
-
blue_assistant-4.
|
73
|
-
blue_assistant-4.
|
67
|
+
blue_assistant/web/crawl.py,sha256=w77MNqVSLDE6nm7XuwWU7JMOcm26ISdA_fjT7Ec2bi8,3343
|
68
|
+
blue_assistant/web/fetch.py,sha256=WihKsEdF4q8SVMa1IJa-O2BnYbNSr3uzNykJnVuSyrQ,2566
|
69
|
+
blue_assistant/web/functions.py,sha256=TVsQbgtkWg4Hy6E2hLJ1bJqjrL6rcmGAxmYuqLUFeSw,882
|
70
|
+
blue_assistant-4.319.1.dist-info/LICENSE,sha256=ogEPNDSH0_dhiv_lT3ifVIdgIzHAqNA_SemnxUfPBJk,7048
|
71
|
+
blue_assistant-4.319.1.dist-info/METADATA,sha256=N5fnp_I56AXZwdFeI7D5Oc8p_t3dxxO7JITgJj1fLYs,5380
|
72
|
+
blue_assistant-4.319.1.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
|
73
|
+
blue_assistant-4.319.1.dist-info/top_level.txt,sha256=ud0BkBbdOVze13bNqHuhZj1rwCztaBtDf5ChEYzASOs,15
|
74
|
+
blue_assistant-4.319.1.dist-info/RECORD,,
|
@@ -1 +0,0 @@
|
|
1
|
-
from blue_assistant.script.repository.generic.classes import GenericScript
|
@@ -1,88 +0,0 @@
|
|
1
|
-
from typing import Dict, List
|
2
|
-
import os
|
3
|
-
from tqdm import tqdm
|
4
|
-
|
5
|
-
|
6
|
-
from blueness import module
|
7
|
-
from blue_objects import file, path
|
8
|
-
from blue_objects.metadata import post_to_object
|
9
|
-
|
10
|
-
from blue_assistant import NAME
|
11
|
-
from blue_assistant.script.repository.base.classes import BaseScript
|
12
|
-
from blue_assistant.script.actions import dict_of_actions
|
13
|
-
from blue_assistant.logger import logger
|
14
|
-
|
15
|
-
|
16
|
-
NAME = module.name(__file__, NAME)
|
17
|
-
|
18
|
-
|
19
|
-
class GenericScript(BaseScript):
|
20
|
-
name = path.name(file.path(__file__))
|
21
|
-
|
22
|
-
def perform_action(
|
23
|
-
self,
|
24
|
-
node_name: str,
|
25
|
-
) -> bool:
|
26
|
-
action_name = self.nodes[node_name].get("action", "unknown")
|
27
|
-
logger.info(f"---- node: {node_name} ---- ")
|
28
|
-
|
29
|
-
if action_name not in dict_of_actions:
|
30
|
-
logger.error(f"{action_name}: action not found.")
|
31
|
-
return False
|
32
|
-
|
33
|
-
return dict_of_actions[action_name](
|
34
|
-
script=self,
|
35
|
-
node_name=node_name,
|
36
|
-
)
|
37
|
-
|
38
|
-
def run(
|
39
|
-
self,
|
40
|
-
) -> bool:
|
41
|
-
if not super().run():
|
42
|
-
return False
|
43
|
-
|
44
|
-
success: bool = True
|
45
|
-
while (
|
46
|
-
not all(self.nodes[node].get("completed", False) for node in self.nodes)
|
47
|
-
and success
|
48
|
-
):
|
49
|
-
for node_name in tqdm(self.nodes):
|
50
|
-
if self.nodes[node_name].get("completed", False):
|
51
|
-
continue
|
52
|
-
|
53
|
-
if not self.nodes[node_name].get("runnable", True):
|
54
|
-
logger.info(f"Not runnable, skipped: {node_name}.")
|
55
|
-
self.nodes[node_name]["completed"] = True
|
56
|
-
continue
|
57
|
-
|
58
|
-
pending_dependencies = [
|
59
|
-
node_name_
|
60
|
-
for node_name_ in self.G.successors(node_name)
|
61
|
-
if not self.nodes[node_name_].get("completed", False)
|
62
|
-
]
|
63
|
-
if pending_dependencies:
|
64
|
-
logger.info(
|
65
|
-
'node "{}": {} pending dependenci(es): {}'.format(
|
66
|
-
node_name,
|
67
|
-
len(pending_dependencies),
|
68
|
-
", ".join(pending_dependencies),
|
69
|
-
)
|
70
|
-
)
|
71
|
-
continue
|
72
|
-
|
73
|
-
if not self.perform_action(
|
74
|
-
node_name=node_name,
|
75
|
-
):
|
76
|
-
success = False
|
77
|
-
break
|
78
|
-
|
79
|
-
self.nodes[node_name]["completed"] = True
|
80
|
-
|
81
|
-
if not post_to_object(
|
82
|
-
self.object_name,
|
83
|
-
"output",
|
84
|
-
self.metadata,
|
85
|
-
):
|
86
|
-
return False
|
87
|
-
|
88
|
-
return success
|
File without changes
|
File without changes
|
File without changes
|