blue-assistant 4.221.1__py3-none-any.whl → 4.243.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,3 +3,5 @@
3
3
  alias @assistant=blue_assistant
4
4
 
5
5
  alias @hue=blue_assistant_hue
6
+
7
+ alias @web=blue_assistant_web
@@ -23,15 +23,16 @@ function test_blue_assistant_help() {
23
23
  "@assistant script list" \
24
24
  "@assistant script run" \
25
25
  \
26
- "@assistant web" \
27
- "@assistant web crawl" \
28
- \
29
26
  "@hue" \
30
27
  "@hue create_user" \
31
28
  "@hue list" \
32
29
  "@hue set" \
33
30
  "@hue test" \
34
31
  \
32
+ "@web" \
33
+ "@web crawl" \
34
+ "@web fetch" \
35
+ \
35
36
  "blue_assistant"; do
36
37
  abcli_eval ,$options \
37
38
  abcli_help $module
@@ -3,10 +3,12 @@
3
3
  function test_blue_assistant_web_crawl() {
4
4
  local options=$1
5
5
 
6
+ local object_name=test_blue_assistant_web_crawl-$(abcli_string_timestamp_short)
7
+
6
8
  abcli_eval ,$options \
7
9
  blue_assistant_web_crawl \
8
10
  ~upload \
9
11
  https://ode.rsl.wustl.edu/+https://oderest.rsl.wustl.edu/ \
10
- test_blue_assistant_web_crawl-$(abcli_string_timestamp_short) \
12
+ $object_name \
11
13
  --max_iterations 3
12
14
  }
@@ -0,0 +1,13 @@
1
+ #! /usr/bin/env bash
2
+
3
+ function test_blue_assistant_web_fetch() {
4
+ local options=$1
5
+
6
+ local object_name=test_blue_assistant_web_fetch-$(abcli_string_timestamp_short)
7
+
8
+ abcli_eval ,$options \
9
+ blue_assistant_web_fetch \
10
+ ~upload \
11
+ https://ode.rsl.wustl.edu/ \
12
+ $object_name
13
+ }
@@ -0,0 +1,26 @@
1
+ #! /usr/bin/env bash
2
+
3
+ function blue_assistant_web_fetch() {
4
+ local options=$1
5
+ local do_dryrun=$(abcli_option_int "$options" dryrun 0)
6
+ local do_upload=$(abcli_option_int "$options" upload $(abcli_not $do_dryrun))
7
+
8
+ local url=${2:-void}
9
+
10
+ local object_name=$(abcli_clarify_object $3 web-fetch-$(abcli_string_timestamp_short))
11
+
12
+ abcli_log "fetching $url -> $object_name ..."
13
+
14
+ abcli_eval dryrun=$do_dryrun \
15
+ python3 -m blue_assistant.web \
16
+ fetch \
17
+ --url $url \
18
+ --object_name $object_name \
19
+ "${@:4}"
20
+ [[ $? -ne 0 ]] && return 1
21
+
22
+ [[ "$do_upload" == 1 ]] &&
23
+ abcli_upload - $object_name
24
+
25
+ return 0
26
+ }
blue_assistant/README.py CHANGED
@@ -21,7 +21,7 @@ items = README.Items(
21
21
  },
22
22
  {
23
23
  "name": "orbital-data-explorer",
24
- "url": "./blue_assistant/script/repository/orbital_data_explorer/README.md",
24
+ "url": "./blue_assistant/script/repository/orbital_data_explorer",
25
25
  "marquee": "https://github.com/kamangir/assets/blob/main/blue-assistant/orbital-data-explorer.png?raw=true",
26
26
  "description": "Access to the [Orbital Data Explorer](https://ode.rsl.wustl.edu/), through AI. ⏸️",
27
27
  },
@@ -47,8 +47,13 @@ def build():
47
47
  "path": "..",
48
48
  },
49
49
  {"path": "script/repository/blue_amo"},
50
- {"path": "script/repository/orbital_data_explorer"},
50
+ #
51
+ {"path": "script/repository/orbital_data_explorer/docs/round-1.md"},
52
+ {"path": "script/repository/orbital_data_explorer/docs"},
53
+ #
51
54
  {"path": "script/repository/hue/docs/round-1.md"},
52
55
  {"path": "script/repository/hue/docs"},
56
+ #
57
+ {"path": "web/"},
53
58
  ]
54
59
  )
@@ -4,7 +4,7 @@ ICON = "🧠"
4
4
 
5
5
  DESCRIPTION = f"{ICON} An AI Assistant."
6
6
 
7
- VERSION = "4.221.1"
7
+ VERSION = "4.243.1"
8
8
 
9
9
  REPO_NAME = "blue-assistant"
10
10
 
blue_assistant/config.env CHANGED
@@ -5,4 +5,6 @@ BLUE_ASSISTANT_IMAGE_DEFAULT_MODEL=dall-e-3
5
5
  BLUE_ASSISTANT_IMAGE_DEFAULT_QUALITY=standard
6
6
  BLUE_ASSISTANT_IMAGE_DEFAULT_SIZE=1024x1024
7
7
 
8
- HUE_BRIDGE_IP_ADDRESS=192.168.1.95
8
+ HUE_BRIDGE_IP_ADDRESS=192.168.1.95
9
+ HUE_TEST_DEFAULT_INTERVAL=0.01
10
+ HUE_MAX_SATURATION=254
blue_assistant/env.py CHANGED
@@ -17,3 +17,5 @@ BLUE_ASSISTANT_IMAGE_DEFAULT_SIZE = get_env("BLUE_ASSISTANT_IMAGE_DEFAULT_SIZE")
17
17
  HUE_BRIDGE_IP_ADDRESS = get_env("HUE_BRIDGE_IP_ADDRESS")
18
18
 
19
19
  HUE_BRIDGE_USERNAME = get_env("HUE_BRIDGE_USERNAME")
20
+ HUE_TEST_DEFAULT_INTERVAL = get_env("HUE_TEST_DEFAULT_INTERVAL", 0.01)
21
+ HUE_MAX_SATURATION = get_env("HUE_MAX_SATURATION", 254)
@@ -1,4 +1,5 @@
1
1
  from typing import List
2
+ import cv2
2
3
 
3
4
  from blue_options.terminal import show_usage, xtra
4
5
 
@@ -62,7 +63,7 @@ def help_set(
62
63
  f"[--username <{env.HUE_BRIDGE_USERNAME}>]",
63
64
  "[--light_id <light_id>]",
64
65
  "[--hue <65535>]",
65
- "[--saturation <254>]",
66
+ f"[--saturation <{env.HUE_MAX_SATURATION}>]",
66
67
  "[--verbose 1]",
67
68
  ]
68
69
 
@@ -88,8 +89,8 @@ def help_test(
88
89
  f"[--bridge_ip <{env.HUE_BRIDGE_IP_ADDRESS}>]",
89
90
  f"[--username <{env.HUE_BRIDGE_USERNAME}>]",
90
91
  "[--light_id all | <light_id>]",
91
- "[--interval <1>]",
92
- "[--hue <65535>]",
92
+ f"[--interval <{env.HUE_TEST_DEFAULT_INTERVAL:.2f}>]",
93
+ f"[--colormap <{cv2.COLORMAP_HOT}>]",
93
94
  "[--verbose 1]",
94
95
  ]
95
96
 
@@ -101,6 +102,9 @@ def help_test(
101
102
  ]
102
103
  + args,
103
104
  "test hue.",
105
+ {
106
+ "colormap: https://docs.opencv.org/4.x/d3/d50/group__imgproc__colormap.html": [],
107
+ },
104
108
  mono=mono,
105
109
  )
106
110
 
@@ -15,19 +15,38 @@ def help_crawl(
15
15
 
16
16
  return show_usage(
17
17
  [
18
- "@assistant",
19
- "web",
18
+ "@web",
20
19
  "crawl",
21
20
  f"[{options}]",
22
21
  "<url-1>+<url-2>+<url-3>",
23
22
  "[-|<object-name>]",
24
23
  ]
25
24
  + args,
26
- "crawl the web.",
25
+ "crawl the urls.",
26
+ mono=mono,
27
+ )
28
+
29
+
30
+ def help_fetch(
31
+ tokens: List[str],
32
+ mono: bool,
33
+ ) -> str:
34
+ options = xtra("dryrun,~upload", mono=mono)
35
+
36
+ return show_usage(
37
+ [
38
+ "@web",
39
+ "fetch",
40
+ f"[{options}]",
41
+ "<url>",
42
+ "[-|<object-name>]",
43
+ ],
44
+ "fetch <url>.",
27
45
  mono=mono,
28
46
  )
29
47
 
30
48
 
31
49
  help_functions = {
32
50
  "crawl": help_crawl,
51
+ "fetch": help_fetch,
33
52
  }
@@ -35,9 +35,7 @@ class GenericScript(BaseScript):
35
35
  node_name=node_name,
36
36
  )
37
37
 
38
- def run(
39
- self,
40
- ) -> bool:
38
+ def run(self) -> bool:
41
39
  if not super().run():
42
40
  return False
43
41
 
@@ -50,6 +48,11 @@ class GenericScript(BaseScript):
50
48
  if self.nodes[node_name].get("completed", False):
51
49
  continue
52
50
 
51
+ if not self.nodes[node_name].get("runnable", True):
52
+ logger.info(f"Not runnable, skipped: {node_name}.")
53
+ self.nodes[node_name]["completed"] = True
54
+ continue
55
+
53
56
  pending_dependencies = [
54
57
  node_name_
55
58
  for node_name_ in self.G.successors(node_name)
@@ -1,11 +1,12 @@
1
1
  import argparse
2
+ import cv2
2
3
 
3
4
  from blueness import module
4
5
  from blueness.argparse.generic import sys_exit
5
6
 
6
7
  from blue_assistant import NAME
7
8
  from blue_assistant import env
8
- from blue_assistant.script.repository.hue.functions import (
9
+ from blue_assistant.script.repository.hue.api import (
9
10
  create_user,
10
11
  list_lights,
11
12
  set_light_color,
@@ -47,8 +48,8 @@ parser.add_argument(
47
48
  parser.add_argument(
48
49
  "--saturation",
49
50
  type=int,
50
- default=254,
51
- help="0 to 254",
51
+ default=env.HUE_MAX_SATURATION,
52
+ help=f"0 to {env.HUE_MAX_SATURATION}",
52
53
  )
53
54
  parser.add_argument(
54
55
  "--verbose",
@@ -59,9 +60,15 @@ parser.add_argument(
59
60
  parser.add_argument(
60
61
  "--interval",
61
62
  type=float,
62
- default=0.1,
63
+ default=env.HUE_TEST_DEFAULT_INTERVAL,
63
64
  help="in seconds",
64
65
  )
66
+ parser.add_argument(
67
+ "--colormap",
68
+ type=int,
69
+ default=cv2.COLORMAP_HOT,
70
+ help="//docs.opencv.org/4.x/d3/d50/group__imgproc__colormap.html",
71
+ )
65
72
  args = parser.parse_args()
66
73
 
67
74
  success = False
@@ -92,7 +99,7 @@ elif args.task == "test":
92
99
  username=args.username,
93
100
  light_id=args.light_id,
94
101
  interval=args.interval,
95
- hue=args.hue,
102
+ colormap=args.colormap,
96
103
  verbose=args.verbose == 1,
97
104
  )
98
105
  else:
@@ -2,11 +2,14 @@ from typing import Tuple, Dict, List
2
2
  import requests
3
3
  from time import sleep
4
4
  from tqdm import tqdm
5
+ import cv2
6
+ import random
5
7
 
6
8
  from blueness import module
7
9
 
8
10
  from blue_assistant import NAME
9
11
  from blue_assistant import env
12
+ from blue_assistant.script.repository.hue.colors import get_hue_values
10
13
  from blue_assistant.logger import logger
11
14
 
12
15
  NAME = module.name(__file__, NAME)
@@ -118,9 +121,9 @@ def set_light_color(
118
121
 
119
122
 
120
123
  def test(
121
- hue: int,
124
+ colormap: int = cv2.COLORMAP_HOT,
122
125
  light_id: str = "all",
123
- interval: float = 0.1,
126
+ interval: float = env.HUE_TEST_DEFAULT_INTERVAL,
124
127
  bridge_ip: str = env.HUE_BRIDGE_IP_ADDRESS,
125
128
  username: str = env.HUE_BRIDGE_USERNAME,
126
129
  verbose: bool = False,
@@ -129,12 +132,12 @@ def test(
129
132
  light_id = "all"
130
133
 
131
134
  logger.info(
132
- "{}.test({}@{}:{}) @ hue=0x{:x}, interval={} s".format(
135
+ "{}.test({}@{}:{}) @ colormap #{}, interval={} s".format(
133
136
  NAME,
134
137
  username,
135
138
  bridge_ip,
136
139
  light_id,
137
- hue,
140
+ colormap,
138
141
  interval,
139
142
  )
140
143
  )
@@ -152,20 +155,32 @@ def test(
152
155
  else:
153
156
  list_of_lights = [light_id]
154
157
 
155
- saturation = 0
156
- while True:
157
- for light_id_ in tqdm(list_of_lights):
158
- set_light_color(
159
- light_id=light_id_,
160
- hue=hue,
161
- saturation=saturation,
162
- bridge_ip=bridge_ip,
163
- username=username,
164
- verbose=verbose,
165
- )
166
-
167
- sleep(interval)
168
-
169
- saturation = 254 - saturation
158
+ list_of_hue_values = get_hue_values(
159
+ colormap=colormap,
160
+ length=len(list_of_lights),
161
+ )
162
+ list_of_hue_values = list_of_hue_values + list_of_hue_values
163
+
164
+ hue_offset: int = 0
165
+ try:
166
+ while True:
167
+ logger.info(f"hue_offset={hue_offset}")
168
+
169
+ for light_index in tqdm(range(len(list_of_lights))):
170
+ set_light_color(
171
+ light_id=list_of_lights[light_index],
172
+ hue=list_of_hue_values[hue_offset + light_index],
173
+ saturation=random.randint(1, env.HUE_MAX_SATURATION),
174
+ bridge_ip=bridge_ip,
175
+ username=username,
176
+ verbose=verbose,
177
+ )
178
+
179
+ sleep(interval)
180
+
181
+ hue_offset = (hue_offset + 1) % len(list_of_lights)
182
+
183
+ except KeyboardInterrupt:
184
+ logger.info("Ctrl+C detected.")
170
185
 
171
186
  return True
@@ -0,0 +1,21 @@
1
+ from typing import List
2
+ import numpy as np
3
+ import cv2
4
+
5
+
6
+ # https://chatgpt.com/c/67d4c06c-2168-8005-9d8b-4b6c9848957e
7
+ def get_hue_values(
8
+ colormap: int = cv2.COLORMAP_HOT,
9
+ length: int = 255,
10
+ ) -> List[int]:
11
+ # Create a gradient from 0 to 255
12
+ gradient = np.linspace(0, 255, length).astype("uint8")
13
+ gradient = np.repeat(gradient[np.newaxis, :], 1, axis=0)
14
+
15
+ # Apply the colormap
16
+ color_mapped_image = cv2.applyColorMap(gradient, colormap)
17
+
18
+ # Convert BGR to HSV
19
+ hsv_image = cv2.cvtColor(color_mapped_image, cv2.COLOR_BGR2HSV)
20
+
21
+ return (hsv_image[0, :, 0] * (65535 / 179)).astype(int).tolist()
@@ -5,7 +5,7 @@ from tqdm import tqdm
5
5
  from openai_commands.text_generation import api
6
6
 
7
7
  from blue_assistant import NAME
8
- from blue_assistant.web.crawl import crawl_list_of_urls
8
+ from blue_assistant.web.functions import crawl_list_of_urls
9
9
  from blue_assistant.script.repository.base.classes import BaseScript
10
10
  from blue_assistant.logger import logger
11
11
 
@@ -2,9 +2,11 @@ import argparse
2
2
 
3
3
  from blueness import module
4
4
  from blueness.argparse.generic import sys_exit
5
+ from blue_options.logger import log_dict
6
+ from blue_objects.metadata import post_to_object
5
7
 
6
8
  from blue_assistant import NAME
7
- from blue_assistant.web.crawl import crawl_list_of_urls
9
+ from blue_assistant.web.functions import crawl_list_of_urls, fetch_links_and_text
8
10
  from blue_assistant.logger import logger
9
11
 
10
12
  NAME = module.name(__file__, NAME)
@@ -13,7 +15,7 @@ parser = argparse.ArgumentParser(NAME)
13
15
  parser.add_argument(
14
16
  "task",
15
17
  type=str,
16
- help="crawl",
18
+ help="crawl | fetch",
17
19
  )
18
20
  parser.add_argument(
19
21
  "--max_iterations",
@@ -26,6 +28,10 @@ parser.add_argument(
26
28
  default=0,
27
29
  help="0 | 1",
28
30
  )
31
+ parser.add_argument(
32
+ "--url",
33
+ type=str,
34
+ )
29
35
  parser.add_argument(
30
36
  "--seed_urls",
31
37
  type=str,
@@ -38,22 +44,34 @@ args = parser.parse_args()
38
44
 
39
45
  success = False
40
46
  if args.task == "crawl":
41
- success = True
42
-
43
- output = crawl_list_of_urls(
47
+ dict_of_urls = crawl_list_of_urls(
44
48
  seed_urls=args.seed_urls.split("+"),
45
49
  object_name=args.object_name,
46
50
  max_iterations=args.max_iterations,
47
51
  )
48
52
 
49
53
  if args.verbose == 1:
50
- logger.info(f"{len(output)} url(s)")
51
- for index, (url, content) in enumerate(output.items()):
52
- logger.info(f"#{index: 4} - {url}: {content[:200]}...\n")
53
- if index > 10:
54
- logger.info("...")
55
- break
54
+ log_dict(logger, dict_of_urls, "url(s)")
56
55
 
56
+ success = post_to_object(
57
+ args.object_name,
58
+ NAME.replace(".", "-"),
59
+ dict_of_urls,
60
+ )
61
+ elif args.task == "fetch":
62
+ links, text = fetch_links_and_text(
63
+ url=args.url,
64
+ verbose=True,
65
+ )
66
+
67
+ success = post_to_object(
68
+ args.object_name,
69
+ NAME.replace(".", "-"),
70
+ {
71
+ "links": list(links),
72
+ "text": text,
73
+ },
74
+ )
57
75
  else:
58
76
  success = None
59
77
 
@@ -1,18 +1,25 @@
1
- from typing import List, Dict, Set
1
+ from typing import List, Dict, Set, Tuple
2
2
  import requests
3
- from bs4 import BeautifulSoup
4
- from urllib.parse import urljoin, urlparse
3
+ from bs4 import BeautifulSoup, XMLParsedAsHTMLWarning
4
+ from urllib.parse import urljoin
5
+ import re
6
+ import warnings
5
7
 
6
8
  from blueness import module
7
-
9
+ from blue_options.logger import log_long_text, log_list
8
10
 
9
11
  from blue_assistant import NAME
10
12
  from blue_assistant.logger import logger
11
13
 
14
+ warnings.filterwarnings("ignore", category=XMLParsedAsHTMLWarning)
15
+
12
16
  NAME = module.name(__file__, NAME)
13
17
 
14
18
 
15
- def fetch_links_and_content(url, base_url, original_path):
19
+ def fetch_links_and_text(
20
+ url: str,
21
+ verbose: bool = False,
22
+ ) -> Tuple[List[str], str]:
16
23
  try:
17
24
  response = requests.get(url, timeout=5)
18
25
  response.raise_for_status()
@@ -23,17 +30,27 @@ def fetch_links_and_content(url, base_url, original_path):
23
30
  links = set()
24
31
 
25
32
  for a_tag in soup.find_all("a", href=True):
26
- full_url = urljoin(base_url, a_tag["href"])
27
- parsed_url = urlparse(full_url)
33
+ a_url = urljoin(url, a_tag["href"])
34
+
35
+ if a_url.startswith(url):
36
+ logger.info(f"+= {a_url}")
37
+ links.add(a_url)
38
+ continue
28
39
 
29
- # Ensure link is from the same domain and in the same directory tree
30
- if parsed_url.netloc == urlparse(
31
- base_url
32
- ).netloc and parsed_url.path.startswith(original_path):
33
- links.add(full_url)
40
+ logger.info(f"ignored: {a_url}")
34
41
 
35
42
  plain_text = soup.get_text(separator=" ", strip=True)
36
43
 
44
+ # remove non-ASCII characters
45
+ plain_text = re.sub(r"[^\x20-\x7E]+", "", plain_text)
46
+ for thing in ["\r", "\n", "\t"]:
47
+ plain_text = plain_text.replace(thing, " ")
48
+ plain_text = re.sub(r"\s+", " ", plain_text).strip()
49
+
50
+ if verbose:
51
+ log_list(logger, list(links), "link(s)")
52
+ log_long_text(logger, plain_text)
53
+
37
54
  return links, plain_text
38
55
 
39
56
 
@@ -41,6 +58,7 @@ def crawl_list_of_urls(
41
58
  seed_urls: List[str],
42
59
  object_name: str,
43
60
  max_iterations: int = 10,
61
+ verbose: bool = False,
44
62
  ) -> Dict[str, str]:
45
63
  logger.info(
46
64
  "{}.crawl_list_of_urls({}): {} -> {}".format(
@@ -53,23 +71,20 @@ def crawl_list_of_urls(
53
71
 
54
72
  visited: Dict[str, str] = {}
55
73
  queue: Set[str] = set(seed_urls)
56
- base_url = urlparse(seed_urls[0]).scheme + "://" + urlparse(seed_urls[0]).netloc
57
- original_path = (
58
- urlparse(seed_urls[0]).path.rsplit("/", 1)[0] + "/"
59
- ) # Get base directory
60
74
 
61
75
  iteration: int = 0
62
76
  while queue:
63
- current_url = queue.pop()
64
- if current_url in visited:
77
+ url = queue.pop()
78
+ if url in visited:
65
79
  continue
66
80
 
67
- logger.info(f"🔗 {current_url} ...")
68
- new_links, content = fetch_links_and_content(
69
- current_url, base_url, original_path
81
+ logger.info(f"🔗 {url} ...")
82
+ url_links, url_text = fetch_links_and_text(
83
+ url=url,
84
+ verbose=verbose,
70
85
  )
71
- visited[current_url] = content
72
- queue.update(new_links - visited.keys())
86
+ visited[url] = url_text
87
+ queue.update(url_links - visited.keys())
73
88
 
74
89
  iteration += 1
75
90
  if max_iterations != -1 and iteration >= max_iterations:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: blue_assistant
3
- Version: 4.221.1
3
+ Version: 4.243.1
4
4
  Summary: 🧠 An AI Assistant.
5
5
  Home-page: https://github.com/kamangir/blue-assistant
6
6
  Author: Arash Abadpour (Kamangir)
@@ -56,10 +56,14 @@ graph LR
56
56
  assistant_script_list["@assistant<br>script<br>list"]
57
57
  assistant_script_run["@assistant<br>script<br>run -<br>&lt;script&gt;<br>&lt;object-name&gt;"]
58
58
 
59
- assistant_web_crawl["@assistant<br>web<br>crawl -<br>&lt;url-1&gt;+&lt;url-2&gt;+&lt;url-3&gt;<br>&lt;object-name&gt;"]
59
+ web_crawl["@web<br>crawl -<br>&lt;url-1&gt;+&lt;url-2&gt;<br>&lt;object-name&gt;"]
60
+
61
+ web_fetch["@web<br>fetch -<br>&lt;url&gt;<br>&lt;object-name&gt;"]
60
62
 
61
63
  script["📜 script"]:::folder
62
64
  url["🔗 url"]:::folder
65
+ url2["🔗 url"]:::folder
66
+ url3["🔗 url"]:::folder
63
67
  object["📂 object"]:::folder
64
68
 
65
69
 
@@ -69,9 +73,13 @@ graph LR
69
73
  object --> assistant_script_run
70
74
  assistant_script_run --> object
71
75
 
72
- url --> assistant_web_crawl
73
- assistant_web_crawl --> object
76
+ url --> web_crawl
77
+ url2 --> web_crawl
78
+ web_crawl --> url3
79
+ web_crawl --> object
74
80
 
81
+ url --> web_fetch
82
+ web_fetch --> object
75
83
 
76
84
  bridge_ip["🔗 bridge_ip"]:::folder
77
85
  hue_username["🔗 hue_username"]:::folder
@@ -105,11 +113,15 @@ graph LR
105
113
  | | |
106
114
  | --- | --- |
107
115
  | [`hue`](https://raw.githubusercontent.com/kamangir/blue-assistant/main/blue_assistant/script/repository/hue) [![image](https://github.com/kamangir/assets/raw/main/blue-assistant/20250314_143702.jpg?raw=true)](https://raw.githubusercontent.com/kamangir/blue-assistant/main/blue_assistant/script/repository/hue) "send a color command to the Hue LED lights in my apartment." | [`blue-amo`](https://raw.githubusercontent.com/kamangir/blue-assistant/main/blue_assistant/script/repository/blue_amo/README.md) [![image](https://github.com/kamangir/assets/raw/main/blue-amo-2025-02-03-nswnx6/stitching_the_frames-2.png?raw=true)](https://raw.githubusercontent.com/kamangir/blue-assistant/main/blue_assistant/script/repository/blue_amo/README.md) A story developed and visualized, by AI. |
108
- | [`orbital-data-explorer`](https://raw.githubusercontent.com/kamangir/blue-assistant/main/blue_assistant/script/repository/orbital_data_explorer/README.md) [![image](https://github.com/kamangir/assets/blob/main/blue-assistant/orbital-data-explorer.png?raw=true)](https://raw.githubusercontent.com/kamangir/blue-assistant/main/blue_assistant/script/repository/orbital_data_explorer/README.md) Access to the [Orbital Data Explorer](https://ode.rsl.wustl.edu/), through AI. ⏸️ | |
116
+ | [`orbital-data-explorer`](https://raw.githubusercontent.com/kamangir/blue-assistant/main/blue_assistant/script/repository/orbital_data_explorer) [![image](https://github.com/kamangir/assets/blob/main/blue-assistant/orbital-data-explorer.png?raw=true)](https://raw.githubusercontent.com/kamangir/blue-assistant/main/blue_assistant/script/repository/orbital_data_explorer) Access to the [Orbital Data Explorer](https://ode.rsl.wustl.edu/), through AI. ⏸️ | |
117
+
118
+ ---
119
+
120
+ Also home to [`@web`](https://raw.githubusercontent.com/kamangir/blue-assistant/main/blue_assistant/web/)
109
121
 
110
122
  ---
111
123
 
112
124
 
113
125
  [![pylint](https://github.com/kamangir/blue-assistant/actions/workflows/pylint.yml/badge.svg)](https://github.com/kamangir/blue-assistant/actions/workflows/pylint.yml) [![pytest](https://github.com/kamangir/blue-assistant/actions/workflows/pytest.yml/badge.svg)](https://github.com/kamangir/blue-assistant/actions/workflows/pytest.yml) [![bashtest](https://github.com/kamangir/blue-assistant/actions/workflows/bashtest.yml/badge.svg)](https://github.com/kamangir/blue-assistant/actions/workflows/bashtest.yml) [![PyPI version](https://img.shields.io/pypi/v/blue-assistant.svg)](https://pypi.org/project/blue-assistant/) [![PyPI - Downloads](https://img.shields.io/pypi/dd/blue-assistant)](https://pypistats.org/packages/blue-assistant)
114
126
 
115
- built by 🌀 [`blue_options-4.234.1`](https://github.com/kamangir/awesome-bash-cli), based on 🧠 [`blue_assistant-4.221.1`](https://github.com/kamangir/blue-assistant).
127
+ built by 🌀 [`blue_options-4.236.1`](https://github.com/kamangir/awesome-bash-cli), based on 🧠 [`blue_assistant-4.243.1`](https://github.com/kamangir/blue-assistant).
@@ -1,8 +1,8 @@
1
- blue_assistant/README.py,sha256=GEtSZDyrctYxvOWNkRRMbfuJiX6U-ttM8FU3kWFhZkI,1898
2
- blue_assistant/__init__.py,sha256=Vawh9IzcpQbt-fsBQ-UhBtRilKjbsnDHbJDa2JNIyFg,311
1
+ blue_assistant/README.py,sha256=HIBukmtee6QW_oX-10mLX-0QrJoKECnEj79AXbqbDOs,2046
2
+ blue_assistant/__init__.py,sha256=-mT913AX8fMIeCH6D0yVQhxEdzJMpO0SnmIcr1fH_iw,311
3
3
  blue_assistant/__main__.py,sha256=URtal70XZc0--3FDTYWcLtnGOqBYjMX9gt-L1k8hDXI,361
4
- blue_assistant/config.env,sha256=hH4mWFF8fQaBscocDiHLsJ3tstoLSq0MvNRqTTCscww,247
5
- blue_assistant/env.py,sha256=YU20gy7_qlS3nG_U-ZKp8K2pXz2dxDp__IS0bQDEWnY,633
4
+ blue_assistant/config.env,sha256=npodyuuhkZUHUv9FnEiQQZkKxFbg8nQb1YpOCURqV3Y,301
5
+ blue_assistant/env.py,sha256=FTSdJ8-J4jAyI0-h3MBgOweQBWd3YEFIibBHSXpClrY,760
6
6
  blue_assistant/functions.py,sha256=ajz1GSNU9xYVrFEDSz6Xwg7amWQ_yvW75tQa1ZvRIWc,3
7
7
  blue_assistant/host.py,sha256=SapEe4s9J-7gV3F9JuWEmSfslCeWuJ5f7a-nFObFBrI,208
8
8
  blue_assistant/logger.py,sha256=3MfsXwivdRfVPjAjqdQld3iOg9JB6olbACL8t8gIRgI,105
@@ -10,7 +10,7 @@ blue_assistant/sample.env,sha256=rFfaN3lwiVm1CW28Pi0ZPwJPuu7_r3QWL54jIgHK_fY,20
10
10
  blue_assistant/urls.py,sha256=59Op4CwgZeo1ZtFouisZxMk07zJNBOqlVAi8tXpsidM,20
11
11
  blue_assistant/.abcli/abcli.sh,sha256=56ZicaXpbZ4zuaGPZJTEgfajokNUWTklzl38vENGzz0,198
12
12
  blue_assistant/.abcli/actions.sh,sha256=vW1hNMuhjghvqib0775kDzDwqGnqPo3mqLTUkPCd8z4,236
13
- blue_assistant/.abcli/alias.sh,sha256=UOAS8mrAr-kKx0QQpfPf-nYBKL7lkR8tSF6bWviA09Y,85
13
+ blue_assistant/.abcli/alias.sh,sha256=C9AzxECPt1FaBmJIt3JPR9BcSzkoZS1w_1gaInvcBdw,116
14
14
  blue_assistant/.abcli/blue_assistant.sh,sha256=plLTQQerVmfb_SNlOkv0MEaQCF7YdsOHzCq0M3FWT4c,239
15
15
  blue_assistant/.abcli/browse.sh,sha256=qZ_RK_WnsjmF-hfWKiMEOnnv22QtZh9HQ0VFJUbP6aI,294
16
16
  blue_assistant/.abcli/hue.sh,sha256=avQT49SlA2ZPDvSdme1vWqDAYtAOHJQI8-3LdqXvBZc,362
@@ -22,18 +22,20 @@ blue_assistant/.abcli/hue/set.sh,sha256=VcADsfbjjbrxIMX9cVVHeK0MH649ZRY29V8YDTgf
22
22
  blue_assistant/.abcli/script/list.sh,sha256=2lcVfqDfZP50NszF8o5YCo3TrJKeDc_qo7MTAF3XTGw,131
23
23
  blue_assistant/.abcli/script/run.sh,sha256=kSXmyM9NUj2X2orSGyu5t_P5frG-gyumbRq-xqF692c,911
24
24
  blue_assistant/.abcli/tests/README.sh,sha256=Qs0YUxVB1OZZ70Nqw2kT1LKXeUnC5-XfQRMfqb8Cbwg,152
25
- blue_assistant/.abcli/tests/help.sh,sha256=V9eOnJFClPumxX69TtZ8fR6HBdueRQOfqwRUQNPj27M,915
25
+ blue_assistant/.abcli/tests/help.sh,sha256=7AAZzCEo5vZ1cBAMfj4virDClabaUMdOV-NqXSJQVUM,918
26
26
  blue_assistant/.abcli/tests/script_list.sh,sha256=OVOwWO9wR0eeDZTM6uub-eTKbz3eswU3vEUPWXcK-gQ,178
27
27
  blue_assistant/.abcli/tests/script_run.sh,sha256=vfmK8sjkMfSQPwCacQppiL6inMbvQP7nci7qLppFSL0,769
28
28
  blue_assistant/.abcli/tests/version.sh,sha256=oR2rvYR8zi-0VDPIdPJsmsmWwYaamT8dmNTqUh3-8Gw,154
29
- blue_assistant/.abcli/tests/web_crawl.sh,sha256=nQkynpAA0rthCpEmuMAwYrEuBuBBFQ7B1mrplt3VMhY,336
29
+ blue_assistant/.abcli/tests/web_crawl.sh,sha256=sz3LbpidWvjG7kQoWxQBtdBe5yntm14ylAUsgPJWhko,372
30
+ blue_assistant/.abcli/tests/web_fetch.sh,sha256=C8PFWlmRa9heNdP9yhshriCBKG1uUlps-oxhAM70AZI,312
30
31
  blue_assistant/.abcli/web/crawl.sh,sha256=M9YoKKJBKZT2OtmFPvRCSSKpiAq0zyacRAVZ6s7i3FM,698
32
+ blue_assistant/.abcli/web/fetch.sh,sha256=9SggFZTtpff-gnCd987zP6UqzG4So5D4px2jMg2Vicc,674
31
33
  blue_assistant/help/__init__.py,sha256=ajz1GSNU9xYVrFEDSz6Xwg7amWQ_yvW75tQa1ZvRIWc,3
32
34
  blue_assistant/help/__main__.py,sha256=cVejR7OpoWPg0qLbm-PZf5TuJS27x49jzfiyCLyzEns,241
33
35
  blue_assistant/help/functions.py,sha256=O85zVEMtnm32O7KB6W6uQRoFXnE_4dW5pwYZtMakYDg,865
34
- blue_assistant/help/hue.py,sha256=1CfhpUF3MjXkNacmsII3UD0inMZ8HNjw9-4lw3Oo0PU,2117
36
+ blue_assistant/help/hue.py,sha256=ZElPG24ekiS7eIGLVrP2gB_womlGUuwln2cded4Li-c,2319
35
37
  blue_assistant/help/script.py,sha256=tofv49tIBqoH8ed9hDCFHqzWaXmyyPofvqElk2n976w,1121
36
- blue_assistant/help/web.py,sha256=yOtA1IdQLBbhx_0Q1xLNwzdHozXdpALMYtwS5X3889o,575
38
+ blue_assistant/help/web.py,sha256=-vi1P3p6zKQFbQzg9qhUMsjtZBLTjdJ0PZh7RokE9Wc,908
37
39
  blue_assistant/script/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
40
  blue_assistant/script/__main__.py,sha256=eOSOo5yYTPMwIXZ0GkuWkmOcsDWrZtHvClyJizXSk2w,1657
39
41
  blue_assistant/script/load.py,sha256=JsDY9T3HTM9vXngvKsA0Mt_erxAnRR_jI62-JhrOBMU,831
@@ -50,20 +52,21 @@ blue_assistant/script/repository/blue_amo/actions/__init__.py,sha256=je2S21KvYB3
50
52
  blue_assistant/script/repository/blue_amo/actions/slicing_into_frames.py,sha256=79SI7_69FKKLeX_jHlfXnUWGtG4Sj7sBJOfeFwK8I9U,1201
51
53
  blue_assistant/script/repository/blue_amo/actions/stitching_the_frames.py,sha256=mbXriat6deEAmuo5Y1ValySnUXDENR7TZS_3nVPlQ6M,3622
52
54
  blue_assistant/script/repository/generic/__init__.py,sha256=kLffGsQMQAFJTw6IZBE5eBxvshP1x9wwHHR4hsDJblo,75
53
- blue_assistant/script/repository/generic/classes.py,sha256=LqxQGRbakikvGwdQB8jjqlpsjt-PGKC9BcHMSO9wN-E,2318
55
+ blue_assistant/script/repository/generic/classes.py,sha256=0_AE8TcSvtew5oM9BuJGJoi8jzRqImCmAwSpbn4dHuQ,2535
54
56
  blue_assistant/script/repository/hue/__init__.py,sha256=WjL9GIlN-DBnbUMJ8O_FxTp0rcVGlsIS3H9YtXEefTk,76
55
- blue_assistant/script/repository/hue/__main__.py,sha256=8H-NyzQKvbJK9EuiPKdjT3gtLnRGKwbXKwVStYc5y_8,2083
57
+ blue_assistant/script/repository/hue/__main__.py,sha256=jaesrONQsrpVdg8A7NzzT8xpsdXs5gmrywOTE_TWD6c,2321
58
+ blue_assistant/script/repository/hue/api.py,sha256=C3KzT_MG868gsznUXpwEbUleBjnJObWzZgzvN6wi3uo,4774
56
59
  blue_assistant/script/repository/hue/classes.py,sha256=YhifmcuylnZuI0_BjBPmwrSbsO-BOHDHNJ0pSLIExiE,188
57
- blue_assistant/script/repository/hue/functions.py,sha256=paxyJ5MInXUuwpEBdukj079FEcFjFlfxitDHDq7ZOz4,4133
60
+ blue_assistant/script/repository/hue/colors.py,sha256=rUdtCroNAnzm1zUuVp8eVhvfIie1f7sd208ypsFAJ_w,625
58
61
  blue_assistant/script/repository/orbital_data_explorer/__init__.py,sha256=yy5FtCeHlr9dRfqxw4QYWr7_yRjnQpwVyuAY2vLrh4Q,110
59
62
  blue_assistant/script/repository/orbital_data_explorer/classes.py,sha256=NEJeud6UPXarnAIEvAX_ZKFoRnyK1tiEIORSsrixLxQ,1024
60
63
  blue_assistant/script/repository/orbital_data_explorer/actions/__init__.py,sha256=RcrFUAwnvhuwNh3gC65w9G26vd_cIa7LV1lFvGFcigk,370
61
- blue_assistant/script/repository/orbital_data_explorer/actions/researching_the_questions.py,sha256=MDhncDBCLH-T7nfHjlfiN_nKv6gsY4YmiNWguVvKq_g,1100
64
+ blue_assistant/script/repository/orbital_data_explorer/actions/researching_the_questions.py,sha256=rBJtrttp1LKFVkG5Lvntip6Dl3uO7CtlQRyd5PddVEU,1104
62
65
  blue_assistant/web/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
- blue_assistant/web/__main__.py,sha256=wriAODGqb_WRaXxq5KF5ZJp1pOzuTMlVElDafgZo5f0,1258
64
- blue_assistant/web/crawl.py,sha256=Sxkxg9b0IsQxL0ecEAcb13JwPOmwkwUie8XUeDChf1c,2249
65
- blue_assistant-4.221.1.dist-info/LICENSE,sha256=ogEPNDSH0_dhiv_lT3ifVIdgIzHAqNA_SemnxUfPBJk,7048
66
- blue_assistant-4.221.1.dist-info/METADATA,sha256=XGsZlBMRwKosIrLNSsr6viAJ66grSUGOcQj6uYerF6o,4826
67
- blue_assistant-4.221.1.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
68
- blue_assistant-4.221.1.dist-info/top_level.txt,sha256=ud0BkBbdOVze13bNqHuhZj1rwCztaBtDf5ChEYzASOs,15
69
- blue_assistant-4.221.1.dist-info/RECORD,,
66
+ blue_assistant/web/__main__.py,sha256=f6uPh7LnvIuVtrC9kZGbCr8nDFEJju-GcUgdRVsQvQE,1613
67
+ blue_assistant/web/functions.py,sha256=AiezAvbw0eHG9XaYnXw8KQ_OjCfDvVPzsZxrzxwyPyg,2483
68
+ blue_assistant-4.243.1.dist-info/LICENSE,sha256=ogEPNDSH0_dhiv_lT3ifVIdgIzHAqNA_SemnxUfPBJk,7048
69
+ blue_assistant-4.243.1.dist-info/METADATA,sha256=VCI_zXnhWT1wAv9X-AQxh5fqgMXkoO9jh_L6MfHdXcY,5086
70
+ blue_assistant-4.243.1.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
71
+ blue_assistant-4.243.1.dist-info/top_level.txt,sha256=ud0BkBbdOVze13bNqHuhZj1rwCztaBtDf5ChEYzASOs,15
72
+ blue_assistant-4.243.1.dist-info/RECORD,,