blue-assistant 4.142.1__py3-none-any.whl → 4.154.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -23,6 +23,9 @@ function test_blue_assistant_help() {
23
23
  "@assistant script list" \
24
24
  "@assistant script run" \
25
25
  \
26
+ "@assistant web" \
27
+ "@assistant web crawl" \
28
+ \
26
29
  "blue_assistant"; do
27
30
  abcli_eval ,$options \
28
31
  abcli_help $module
@@ -0,0 +1,12 @@
1
+ #! /usr/bin/env bash
2
+
3
+ function test_blue_assistant_web_crawl() {
4
+ local options=$1
5
+
6
+ abcli_eval ,$options \
7
+ blue_assistant_web_crawl \
8
+ ~upload \
9
+ https://ode.rsl.wustl.edu/+https://oderest.rsl.wustl.edu/ \
10
+ test_blue_assistant_web_crawl-$(abcli_string_timestamp_short) \
11
+ --max_iterations 3
12
+ }
@@ -0,0 +1,26 @@
1
+ #! /usr/bin/env bash
2
+
3
+ function blue_assistant_web_crawl() {
4
+ local options=$1
5
+ local do_dryrun=$(abcli_option_int "$options" dryrun 0)
6
+ local do_upload=$(abcli_option_int "$options" upload $(abcli_not $do_dryrun))
7
+
8
+ local seed_urls=${2:-void}
9
+
10
+ local object_name=$(abcli_clarify_object $3 web-crawl-$(abcli_string_timestamp_short))
11
+
12
+ abcli_log "crawling $seed_urls -> $object_name ..."
13
+
14
+ abcli_eval dryrun=$do_dryrun \
15
+ python3 -m blue_assistant.web \
16
+ crawl \
17
+ --seed_urls $seed_urls \
18
+ --object_name $object_name \
19
+ "${@:4}"
20
+ [[ $? -ne 0 ]] && return 1
21
+
22
+ [[ "$do_upload" == 1 ]] &&
23
+ abcli_upload - $object_name
24
+
25
+ return 0
26
+ }
@@ -0,0 +1,15 @@
1
+ #! /usr/bin/env bash
2
+
3
+ function blue_assistant_web() {
4
+ local task=$(abcli_unpack_keyword $1 help)
5
+
6
+ local function_name=blue_assistant_web_$task
7
+ if [[ $(type -t $function_name) == "function" ]]; then
8
+ $function_name "${@:2}"
9
+ return
10
+ fi
11
+
12
+ python3 -m blue_assistant.web "$@"
13
+ }
14
+
15
+ abcli_source_caller_suffix_path /web
@@ -4,7 +4,7 @@ ICON = "🧠"
4
4
 
5
5
  DESCRIPTION = f"{ICON} An AI Assistant."
6
6
 
7
- VERSION = "4.142.1"
7
+ VERSION = "4.154.1"
8
8
 
9
9
  REPO_NAME = "blue-assistant"
10
10
 
@@ -5,6 +5,7 @@ from abcli.help.generic import help_functions as generic_help_functions
5
5
 
6
6
  from blue_assistant import ALIAS
7
7
  from blue_assistant.help.script import help_functions as help_script
8
+ from blue_assistant.help.web import help_functions as help_web
8
9
 
9
10
 
10
11
  def help_browse(
@@ -30,5 +31,6 @@ help_functions.update(
30
31
  {
31
32
  "browse": help_browse,
32
33
  "script": help_script,
34
+ "web": help_web,
33
35
  }
34
36
  )
@@ -0,0 +1,33 @@
1
+ from typing import List
2
+
3
+ from blue_options.terminal import show_usage, xtra
4
+
5
+
6
+ def help_crawl(
7
+ tokens: List[str],
8
+ mono: bool,
9
+ ) -> str:
10
+ options = xtra("dryrun,~upload", mono=mono)
11
+
12
+ args = [
13
+ "[--max_iterations <100000>]",
14
+ ]
15
+
16
+ return show_usage(
17
+ [
18
+ "@assistant",
19
+ "web",
20
+ "crawl",
21
+ f"[{options}]",
22
+ "<url-1>+<url-2>+<url-3>",
23
+ "[-|<object-name>]",
24
+ ]
25
+ + args,
26
+ "crawl the web.",
27
+ mono=mono,
28
+ )
29
+
30
+
31
+ help_functions = {
32
+ "crawl": help_crawl,
33
+ }
@@ -4,13 +4,11 @@ from blue_assistant.script.repository.base.classes import BaseScript
4
4
  from blue_assistant.script.actions.generic import generic_action
5
5
  from blue_assistant.script.actions.generate_image import generate_image
6
6
  from blue_assistant.script.actions.generate_text import generate_text
7
- from blue_assistant.script.actions.skip import skip_action
8
7
  from blue_assistant.logger import logger
9
8
 
10
9
 
11
10
  dict_of_actions: Dict[str, Callable[[BaseScript, str], bool]] = {
12
- "generic_action": generic_action,
11
+ "generic": generic_action,
13
12
  "generate_image": generate_image,
14
13
  "generate_text": generate_text,
15
- "skip": skip_action,
16
14
  }
@@ -44,7 +44,7 @@ class BlueAmoScript(GenericScript):
44
44
 
45
45
  reduce_node = "stitching_the_frames"
46
46
  self.G.add_node(reduce_node)
47
- self.nodes[reduce_node] = {"action": "skip"}
47
+ self.nodes[reduce_node] = {"action": "generic"}
48
48
 
49
49
  for index in range(self.vars["frame_count"]):
50
50
  node_name = f"generating_frame_{index+1:03d}"
File without changes
@@ -0,0 +1,60 @@
1
+ import argparse
2
+
3
+ from blueness import module
4
+ from blueness.argparse.generic import sys_exit
5
+
6
+ from blue_assistant import NAME
7
+ from blue_assistant.web.crawl import crawl_list_of_urls
8
+ from blue_assistant.logger import logger
9
+
10
+ NAME = module.name(__file__, NAME)
11
+
12
+ parser = argparse.ArgumentParser(NAME)
13
+ parser.add_argument(
14
+ "task",
15
+ type=str,
16
+ help="crawl",
17
+ )
18
+ parser.add_argument(
19
+ "--max_iterations",
20
+ type=int,
21
+ default=10,
22
+ )
23
+ parser.add_argument(
24
+ "--verbose",
25
+ type=int,
26
+ default=0,
27
+ help="0 | 1",
28
+ )
29
+ parser.add_argument(
30
+ "--seed_urls",
31
+ type=str,
32
+ )
33
+ parser.add_argument(
34
+ "--object_name",
35
+ type=str,
36
+ )
37
+ args = parser.parse_args()
38
+
39
+ success = False
40
+ if args.task == "crawl":
41
+ success = True
42
+
43
+ output = crawl_list_of_urls(
44
+ seed_urls=args.seed_urls.split("+"),
45
+ object_name=args.object_name,
46
+ max_iterations=args.max_iterations,
47
+ )
48
+
49
+ if args.verbose == 1:
50
+ logger.info(f"{len(output)} url(s)")
51
+ for index, (url, content) in enumerate(output.items()):
52
+ logger.info(f"#{index: 4} - {url}: {content[:200]}...\n")
53
+ if index > 10:
54
+ logger.info("...")
55
+ break
56
+
57
+ else:
58
+ success = None
59
+
60
+ sys_exit(logger, NAME, args.task, success)
@@ -0,0 +1,82 @@
1
+ from typing import List, Dict, Set
2
+ import requests
3
+ from bs4 import BeautifulSoup
4
+ from urllib.parse import urljoin, urlparse
5
+
6
+ from blueness import module
7
+
8
+
9
+ from blue_assistant import NAME
10
+ from blue_assistant.logger import logger
11
+
12
+ NAME = module.name(__file__, NAME)
13
+
14
+
15
+ def fetch_links_and_content(url, base_url, original_path):
16
+ try:
17
+ response = requests.get(url, timeout=5)
18
+ response.raise_for_status()
19
+ except requests.RequestException:
20
+ return set(), ""
21
+
22
+ soup = BeautifulSoup(response.text, "html.parser")
23
+ links = set()
24
+
25
+ for a_tag in soup.find_all("a", href=True):
26
+ full_url = urljoin(base_url, a_tag["href"])
27
+ parsed_url = urlparse(full_url)
28
+
29
+ # Ensure link is from the same domain and in the same directory tree
30
+ if parsed_url.netloc == urlparse(
31
+ base_url
32
+ ).netloc and parsed_url.path.startswith(original_path):
33
+ links.add(full_url)
34
+
35
+ plain_text = soup.get_text(separator=" ", strip=True)
36
+
37
+ return links, plain_text
38
+
39
+
40
+ def crawl_list_of_urls(
41
+ seed_urls: List[str],
42
+ object_name: str,
43
+ max_iterations: int = 10,
44
+ ) -> Dict[str, str]:
45
+ logger.info(
46
+ "{}.crawl_list_of_urls({}): {} -> {}".format(
47
+ NAME,
48
+ len(seed_urls),
49
+ ", ".join(seed_urls),
50
+ object_name,
51
+ )
52
+ )
53
+
54
+ visited: Dict[str, str] = {}
55
+ queue: Set[str] = set(seed_urls)
56
+ base_url = urlparse(seed_urls[0]).scheme + "://" + urlparse(seed_urls[0]).netloc
57
+ original_path = (
58
+ urlparse(seed_urls[0]).path.rsplit("/", 1)[0] + "/"
59
+ ) # Get base directory
60
+
61
+ iteration: int = 0
62
+ while queue:
63
+ current_url = queue.pop()
64
+ if current_url in visited:
65
+ continue
66
+
67
+ logger.info(f"🔗 {current_url} ...")
68
+ new_links, content = fetch_links_and_content(
69
+ current_url, base_url, original_path
70
+ )
71
+ visited[current_url] = content
72
+ queue.update(new_links - visited.keys())
73
+
74
+ iteration += 1
75
+ if max_iterations != -1 and iteration >= max_iterations:
76
+ logger.warning(f"max iteration of {max_iterations} reached.")
77
+ break
78
+
79
+ if queue:
80
+ logger.warning(f"queue: {len(queue)}")
81
+
82
+ return visited
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: blue_assistant
3
- Version: 4.142.1
3
+ Version: 4.154.1
4
4
  Summary: 🧠 An AI Assistant.
5
5
  Home-page: https://github.com/kamangir/blue-assistant
6
6
  Author: Arash Abadpour (Kamangir)
@@ -30,6 +30,7 @@ Requires-Dist: pylint
30
30
  Requires-Dist: pytest
31
31
  Requires-Dist: python-dotenv[cli]
32
32
  Requires-Dist: tqdm
33
+ Requires-Dist: beautifulsoup4
33
34
  Requires-Dist: networkx
34
35
  Requires-Dist: pydot
35
36
  Dynamic: author
@@ -44,7 +45,7 @@ Dynamic: summary
44
45
 
45
46
  # 🧠 blue-assistant
46
47
 
47
- 🧠 `@assistant` runs AI scripts; DAGs that combine deterministic and AI operations, such as below,
48
+ 🧠 `@assistant` runs AI scripts; DAGs that combine deterministic and AI operations.
48
49
 
49
50
  | | |
50
51
  | --- | --- |
@@ -59,7 +60,10 @@ graph LR
59
60
  assistant_script_list["@assistant<br>script<br>list"]
60
61
  assistant_script_run["@assistant<br>script<br>run -<br>&lt;script&gt;<br>&lt;object-name&gt;"]
61
62
 
63
+ assistant_web_crawl["@assistant<br>web<br>crawl -<br>&lt;url-1&gt;+&lt;url-2&gt;+&lt;url-3&gt;<br>&lt;object-name&gt;"]
64
+
62
65
  script["📜 script"]:::folder
66
+ url["🔗 url"]:::folder
63
67
  object["📂 object"]:::folder
64
68
 
65
69
  script --> assistant_script_list
@@ -68,6 +72,9 @@ graph LR
68
72
  object --> assistant_script_run
69
73
  assistant_script_run --> object
70
74
 
75
+ url --> assistant_web_crawl
76
+ assistant_web_crawl --> object
77
+
71
78
  classDef folder fill:#999,stroke:#333,stroke-width:2px;
72
79
  ```
73
80
 
@@ -76,4 +83,4 @@ graph LR
76
83
 
77
84
  [![pylint](https://github.com/kamangir/blue-assistant/actions/workflows/pylint.yml/badge.svg)](https://github.com/kamangir/blue-assistant/actions/workflows/pylint.yml) [![pytest](https://github.com/kamangir/blue-assistant/actions/workflows/pytest.yml/badge.svg)](https://github.com/kamangir/blue-assistant/actions/workflows/pytest.yml) [![bashtest](https://github.com/kamangir/blue-assistant/actions/workflows/bashtest.yml/badge.svg)](https://github.com/kamangir/blue-assistant/actions/workflows/bashtest.yml) [![PyPI version](https://img.shields.io/pypi/v/blue-assistant.svg)](https://pypi.org/project/blue-assistant/) [![PyPI - Downloads](https://img.shields.io/pypi/dd/blue-assistant)](https://pypistats.org/packages/blue-assistant)
78
85
 
79
- built by 🌀 [`blue_options-4.207.1`](https://github.com/kamangir/awesome-bash-cli), based on 🧠 [`blue_assistant-4.142.1`](https://github.com/kamangir/blue-assistant).
86
+ built by 🌀 [`blue_options-4.207.1`](https://github.com/kamangir/awesome-bash-cli), based on 🧠 [`blue_assistant-4.154.1`](https://github.com/kamangir/blue-assistant).
@@ -1,5 +1,5 @@
1
1
  blue_assistant/README.py,sha256=aB7xoxE3SpI_QbXzJRqh9R-QSWFJSfX3JG-ZY5nfDD4,1503
2
- blue_assistant/__init__.py,sha256=dR1gRga50p7ATJUZqjToyki00k-bUoT6VR9PDwi-MQ0,311
2
+ blue_assistant/__init__.py,sha256=sTU1-9vRRZrZ5UxhvlCspgS_j1hjIvM-MWr2wTGXBpo,311
3
3
  blue_assistant/__main__.py,sha256=URtal70XZc0--3FDTYWcLtnGOqBYjMX9gt-L1k8hDXI,361
4
4
  blue_assistant/config.env,sha256=PUR0GHwzmpm7xNdWHjIv8DzlMAesOAa1NEnYDsIUTs8,211
5
5
  blue_assistant/env.py,sha256=e3YPPpMCrKoTcOX8QmzndEwDnM-nC-hFTSUvhnaKpSc,737
@@ -14,30 +14,33 @@ blue_assistant/.abcli/alias.sh,sha256=MCJzXaDnX1QMllWsZJJkDePBYt1nY9ZWa3o4msfGD2
14
14
  blue_assistant/.abcli/blue_assistant.sh,sha256=plLTQQerVmfb_SNlOkv0MEaQCF7YdsOHzCq0M3FWT4c,239
15
15
  blue_assistant/.abcli/browse.sh,sha256=qZ_RK_WnsjmF-hfWKiMEOnnv22QtZh9HQ0VFJUbP6aI,294
16
16
  blue_assistant/.abcli/script.sh,sha256=XIkY4eZyFPKLi_mLoPMbnq76E4K1GG3xxha8VYJC2zI,356
17
+ blue_assistant/.abcli/web.sh,sha256=1HA3u6umxwZro_CnxD2H9_WABypeSfMU-X2ncFMnU8c,344
17
18
  blue_assistant/.abcli/script/list.sh,sha256=2lcVfqDfZP50NszF8o5YCo3TrJKeDc_qo7MTAF3XTGw,131
18
19
  blue_assistant/.abcli/script/run.sh,sha256=kSXmyM9NUj2X2orSGyu5t_P5frG-gyumbRq-xqF692c,911
19
20
  blue_assistant/.abcli/tests/README.sh,sha256=Qs0YUxVB1OZZ70Nqw2kT1LKXeUnC5-XfQRMfqb8Cbwg,152
20
- blue_assistant/.abcli/tests/help.sh,sha256=mENB9ZNBEEPmIs9tp8WkQW3dq75_US7EI7_-d4IJQpo,724
21
+ blue_assistant/.abcli/tests/help.sh,sha256=h7_Z8yIp7-twQugKxdmsn9g77aHRgj8hj1ZA7jY5jns,794
21
22
  blue_assistant/.abcli/tests/script_list.sh,sha256=OVOwWO9wR0eeDZTM6uub-eTKbz3eswU3vEUPWXcK-gQ,178
22
23
  blue_assistant/.abcli/tests/script_run.sh,sha256=vfmK8sjkMfSQPwCacQppiL6inMbvQP7nci7qLppFSL0,769
23
24
  blue_assistant/.abcli/tests/version.sh,sha256=oR2rvYR8zi-0VDPIdPJsmsmWwYaamT8dmNTqUh3-8Gw,154
25
+ blue_assistant/.abcli/tests/web_crawl.sh,sha256=nQkynpAA0rthCpEmuMAwYrEuBuBBFQ7B1mrplt3VMhY,336
26
+ blue_assistant/.abcli/web/crawl.sh,sha256=M9YoKKJBKZT2OtmFPvRCSSKpiAq0zyacRAVZ6s7i3FM,698
24
27
  blue_assistant/help/__init__.py,sha256=ajz1GSNU9xYVrFEDSz6Xwg7amWQ_yvW75tQa1ZvRIWc,3
25
28
  blue_assistant/help/__main__.py,sha256=cVejR7OpoWPg0qLbm-PZf5TuJS27x49jzfiyCLyzEns,241
26
- blue_assistant/help/functions.py,sha256=9WsmXGMN-R7sqlkGLK0nY90Peg8Gah4rIu75QbLhImo,689
29
+ blue_assistant/help/functions.py,sha256=6WgIQr4pGZZ_ljNvW0PAnj8QmqoX5hoKgIY2dk2M59U,777
27
30
  blue_assistant/help/script.py,sha256=tofv49tIBqoH8ed9hDCFHqzWaXmyyPofvqElk2n976w,1121
31
+ blue_assistant/help/web.py,sha256=yOtA1IdQLBbhx_0Q1xLNwzdHozXdpALMYtwS5X3889o,575
28
32
  blue_assistant/script/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
33
  blue_assistant/script/__main__.py,sha256=eOSOo5yYTPMwIXZ0GkuWkmOcsDWrZtHvClyJizXSk2w,1657
30
34
  blue_assistant/script/load.py,sha256=JsDY9T3HTM9vXngvKsA0Mt_erxAnRR_jI62-JhrOBMU,831
31
- blue_assistant/script/actions/__init__.py,sha256=W0PisTP1H0RRqivzwiaXlN-kvE3m1xlJtn5WGZ-PFtg,625
35
+ blue_assistant/script/actions/__init__.py,sha256=tiNL9HT1ql-Ut6CPYuj6B29PXQPuf-B-z8dJcksWACY,534
32
36
  blue_assistant/script/actions/generate_image.py,sha256=PgvOspDV8n2M7ZmgVOdZzJwQ1tnJNJ6V8gV94P74ksA,1336
33
37
  blue_assistant/script/actions/generate_text.py,sha256=nD30y8hoFbYoDT2QsrhKCvUfJUrOubiiLl3OjabtASg,1881
34
38
  blue_assistant/script/actions/generic.py,sha256=ET1RaKcUABM8HdIv8JecSpUFasYqmwHacL-5LjF-8NM,355
35
- blue_assistant/script/actions/skip.py,sha256=G9gbGBbOLiCqcsmEUobdoxkB6wohFYmyi1arQGorZSg,352
36
39
  blue_assistant/script/repository/__init__.py,sha256=zVI3cubRqM9H6WgF0EUP9idILVLCumPFmJgKPM7iVlM,604
37
40
  blue_assistant/script/repository/base/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
41
  blue_assistant/script/repository/base/classes.py,sha256=-G81it3Beb3fbhE5H6g0BPJwvZKn3qELiBaqRWXP4OY,3717
39
42
  blue_assistant/script/repository/blue_amo/__init__.py,sha256=WjL9GIlN-DBnbUMJ8O_FxTp0rcVGlsIS3H9YtXEefTk,76
40
- blue_assistant/script/repository/blue_amo/classes.py,sha256=vWQ_qMdJ2LmpEjDGFnSxOMZPd34yj-DX4UUTnx5aMtY,2082
43
+ blue_assistant/script/repository/blue_amo/classes.py,sha256=Rl_UX67AlfhVEhv8tfgdWKItOxI3cUfiGsN12-k8sWI,2085
41
44
  blue_assistant/script/repository/blue_amo/actions/__init__.py,sha256=je2S21KvYB3QkbABs71parwUh8MCh2mdlNZfLx_QuDg,430
42
45
  blue_assistant/script/repository/blue_amo/actions/slicing_into_frames.py,sha256=79SI7_69FKKLeX_jHlfXnUWGtG4Sj7sBJOfeFwK8I9U,1201
43
46
  blue_assistant/script/repository/blue_amo/actions/stitching_the_frames.py,sha256=mbXriat6deEAmuo5Y1ValySnUXDENR7TZS_3nVPlQ6M,3622
@@ -49,8 +52,11 @@ blue_assistant/script/repository/orbital_data_explorer/__init__.py,sha256=yy5FtC
49
52
  blue_assistant/script/repository/orbital_data_explorer/classes.py,sha256=i4cVCR6ge8FhipPs-H1HZ_5xcok4mzxrqwRr_hLz_UI,657
50
53
  blue_assistant/script/repository/orbital_data_explorer/actions/__init__.py,sha256=RcrFUAwnvhuwNh3gC65w9G26vd_cIa7LV1lFvGFcigk,370
51
54
  blue_assistant/script/repository/orbital_data_explorer/actions/researching_the_questions.py,sha256=Wb8sU9UmdRRhHUhNj7_VQ74_Bio20xGajzeGz4HJqZQ,369
52
- blue_assistant-4.142.1.dist-info/LICENSE,sha256=ogEPNDSH0_dhiv_lT3ifVIdgIzHAqNA_SemnxUfPBJk,7048
53
- blue_assistant-4.142.1.dist-info/METADATA,sha256=ifohOQ1zpJdsKWyUNHlE3Yke1DbnTbFon2EfpwilPRg,3578
54
- blue_assistant-4.142.1.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
55
- blue_assistant-4.142.1.dist-info/top_level.txt,sha256=ud0BkBbdOVze13bNqHuhZj1rwCztaBtDf5ChEYzASOs,15
56
- blue_assistant-4.142.1.dist-info/RECORD,,
55
+ blue_assistant/web/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
+ blue_assistant/web/__main__.py,sha256=wriAODGqb_WRaXxq5KF5ZJp1pOzuTMlVElDafgZo5f0,1258
57
+ blue_assistant/web/crawl.py,sha256=Sxkxg9b0IsQxL0ecEAcb13JwPOmwkwUie8XUeDChf1c,2249
58
+ blue_assistant-4.154.1.dist-info/LICENSE,sha256=ogEPNDSH0_dhiv_lT3ifVIdgIzHAqNA_SemnxUfPBJk,7048
59
+ blue_assistant-4.154.1.dist-info/METADATA,sha256=64601-HQMnP9wWL1BNdavvvJacKJq8NV_stNLRn6LEk,3815
60
+ blue_assistant-4.154.1.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
61
+ blue_assistant-4.154.1.dist-info/top_level.txt,sha256=ud0BkBbdOVze13bNqHuhZj1rwCztaBtDf5ChEYzASOs,15
62
+ blue_assistant-4.154.1.dist-info/RECORD,,
@@ -1,15 +0,0 @@
1
- from blueness import module
2
-
3
- from blue_assistant import NAME
4
- from blue_assistant.script.repository.base.classes import BaseScript
5
- from blue_assistant.logger import logger
6
-
7
- NAME = module.name(__file__, NAME)
8
-
9
-
10
- def skip_action(
11
- script: BaseScript,
12
- node_name: str,
13
- ) -> bool:
14
- logger.info(f"{NAME}: {script} @ {node_name} ...")
15
- return True