inspect-ai 0.3.74__py3-none-any.whl → 0.3.76__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. inspect_ai/__init__.py +3 -2
  2. inspect_ai/_cli/cache.py +1 -1
  3. inspect_ai/_cli/common.py +15 -0
  4. inspect_ai/_cli/eval.py +4 -5
  5. inspect_ai/_cli/log.py +1 -1
  6. inspect_ai/_cli/sandbox.py +1 -1
  7. inspect_ai/_cli/trace.py +1 -1
  8. inspect_ai/_cli/view.py +1 -1
  9. inspect_ai/_display/core/config.py +3 -1
  10. inspect_ai/_eval/eval.py +55 -61
  11. inspect_ai/_eval/evalset.py +64 -154
  12. inspect_ai/_eval/loader.py +27 -54
  13. inspect_ai/_eval/registry.py +4 -15
  14. inspect_ai/_eval/run.py +7 -4
  15. inspect_ai/_eval/task/__init__.py +8 -2
  16. inspect_ai/_eval/task/log.py +9 -1
  17. inspect_ai/_eval/task/resolved.py +35 -0
  18. inspect_ai/_eval/task/run.py +4 -0
  19. inspect_ai/_eval/task/task.py +50 -69
  20. inspect_ai/_eval/task/tasks.py +30 -0
  21. inspect_ai/_util/constants.py +3 -0
  22. inspect_ai/_util/dotenv.py +17 -0
  23. inspect_ai/_util/logger.py +3 -0
  24. inspect_ai/_util/registry.py +43 -2
  25. inspect_ai/_view/server.py +28 -10
  26. inspect_ai/_view/www/dist/assets/index.css +32 -19
  27. inspect_ai/_view/www/dist/assets/index.js +17682 -29989
  28. inspect_ai/_view/www/log-schema.json +79 -9
  29. inspect_ai/_view/www/package.json +2 -2
  30. inspect_ai/_view/www/src/appearance/styles.ts +6 -5
  31. inspect_ai/_view/www/src/components/AnsiDisplay.tsx +2 -2
  32. inspect_ai/_view/www/src/constants.ts +3 -0
  33. inspect_ai/_view/www/src/logfile/remoteZipFile.ts +141 -20
  34. inspect_ai/_view/www/src/plan/PlanDetailView.tsx +2 -1
  35. inspect_ai/_view/www/src/samples/SampleSummaryView.tsx +1 -1
  36. inspect_ai/_view/www/src/samples/chat/tools/tool.ts +7 -5
  37. inspect_ai/_view/www/src/samples/descriptor/score/CategoricalScoreDescriptor.tsx +1 -1
  38. inspect_ai/_view/www/src/samples/descriptor/score/NumericScoreDescriptor.tsx +2 -2
  39. inspect_ai/_view/www/src/samples/error/FlatSampleErrorView.module.css +1 -0
  40. inspect_ai/_view/www/src/samples/error/FlatSampleErrorView.tsx +3 -1
  41. inspect_ai/_view/www/src/samples/sample-tools/SortFilter.tsx +1 -1
  42. inspect_ai/_view/www/src/samples/sample-tools/sample-filter/SampleFilter.tsx +5 -2
  43. inspect_ai/_view/www/src/samples/transcript/ModelEventView.module.css +2 -2
  44. inspect_ai/_view/www/src/samples/transcript/state/StateEventView.tsx +5 -1
  45. inspect_ai/_view/www/src/types/log.d.ts +11 -5
  46. inspect_ai/_view/www/src/workspace/navbar/PrimaryBar.tsx +17 -12
  47. inspect_ai/_view/www/src/workspace/sidebar/SidebarLogEntry.tsx +2 -1
  48. inspect_ai/_view/www/yarn.lock +12 -5
  49. inspect_ai/log/_log.py +10 -1
  50. inspect_ai/log/_recorders/eval.py +27 -8
  51. inspect_ai/log/_recorders/json.py +10 -2
  52. inspect_ai/log/_transcript.py +13 -4
  53. inspect_ai/model/_call_tools.py +13 -4
  54. inspect_ai/model/_chat_message.py +15 -1
  55. inspect_ai/model/_model.py +30 -12
  56. inspect_ai/model/_model_output.py +6 -1
  57. inspect_ai/model/_openai.py +11 -6
  58. inspect_ai/model/_providers/anthropic.py +167 -77
  59. inspect_ai/model/_providers/google.py +6 -2
  60. inspect_ai/model/_providers/none.py +31 -0
  61. inspect_ai/model/_providers/openai.py +11 -8
  62. inspect_ai/model/_providers/providers.py +7 -0
  63. inspect_ai/model/_providers/vertex.py +5 -2
  64. inspect_ai/solver/_bridge/bridge.py +1 -1
  65. inspect_ai/solver/_chain.py +7 -6
  66. inspect_ai/tool/__init__.py +4 -0
  67. inspect_ai/tool/_tool_call.py +5 -2
  68. inspect_ai/tool/_tool_support_helpers.py +200 -0
  69. inspect_ai/tool/_tools/_bash_session.py +119 -0
  70. inspect_ai/tool/_tools/_computer/_computer.py +1 -1
  71. inspect_ai/tool/_tools/_text_editor.py +121 -0
  72. inspect_ai/tool/_tools/_web_browser/_back_compat.py +150 -0
  73. inspect_ai/tool/_tools/_web_browser/_web_browser.py +75 -130
  74. inspect_ai/tool/_tools/_web_search.py +2 -2
  75. inspect_ai/util/_json.py +28 -0
  76. inspect_ai/util/_sandbox/context.py +18 -8
  77. inspect_ai/util/_sandbox/docker/config.py +1 -1
  78. inspect_ai/util/_sandbox/docker/internal.py +3 -3
  79. inspect_ai/util/_sandbox/environment.py +17 -2
  80. {inspect_ai-0.3.74.dist-info → inspect_ai-0.3.76.dist-info}/METADATA +8 -5
  81. {inspect_ai-0.3.74.dist-info → inspect_ai-0.3.76.dist-info}/RECORD +85 -108
  82. {inspect_ai-0.3.74.dist-info → inspect_ai-0.3.76.dist-info}/WHEEL +1 -1
  83. inspect_ai/tool/_tools/_web_browser/_resources/.pylintrc +0 -8
  84. inspect_ai/tool/_tools/_web_browser/_resources/.vscode/launch.json +0 -24
  85. inspect_ai/tool/_tools/_web_browser/_resources/.vscode/settings.json +0 -25
  86. inspect_ai/tool/_tools/_web_browser/_resources/Dockerfile +0 -22
  87. inspect_ai/tool/_tools/_web_browser/_resources/README.md +0 -63
  88. inspect_ai/tool/_tools/_web_browser/_resources/accessibility_tree.py +0 -71
  89. inspect_ai/tool/_tools/_web_browser/_resources/accessibility_tree_node.py +0 -323
  90. inspect_ai/tool/_tools/_web_browser/_resources/cdp/__init__.py +0 -5
  91. inspect_ai/tool/_tools/_web_browser/_resources/cdp/a11y.py +0 -279
  92. inspect_ai/tool/_tools/_web_browser/_resources/cdp/dom.py +0 -9
  93. inspect_ai/tool/_tools/_web_browser/_resources/cdp/dom_snapshot.py +0 -293
  94. inspect_ai/tool/_tools/_web_browser/_resources/cdp/page.py +0 -94
  95. inspect_ai/tool/_tools/_web_browser/_resources/constants.py +0 -2
  96. inspect_ai/tool/_tools/_web_browser/_resources/images/usage_diagram.svg +0 -2
  97. inspect_ai/tool/_tools/_web_browser/_resources/mock_environment.py +0 -45
  98. inspect_ai/tool/_tools/_web_browser/_resources/playwright_browser.py +0 -50
  99. inspect_ai/tool/_tools/_web_browser/_resources/playwright_crawler.py +0 -48
  100. inspect_ai/tool/_tools/_web_browser/_resources/playwright_page_crawler.py +0 -280
  101. inspect_ai/tool/_tools/_web_browser/_resources/pyproject.toml +0 -65
  102. inspect_ai/tool/_tools/_web_browser/_resources/rectangle.py +0 -64
  103. inspect_ai/tool/_tools/_web_browser/_resources/rpc_client_helpers.py +0 -146
  104. inspect_ai/tool/_tools/_web_browser/_resources/scale_factor.py +0 -64
  105. inspect_ai/tool/_tools/_web_browser/_resources/test_accessibility_tree_node.py +0 -180
  106. inspect_ai/tool/_tools/_web_browser/_resources/test_playwright_crawler.py +0 -99
  107. inspect_ai/tool/_tools/_web_browser/_resources/test_rectangle.py +0 -15
  108. inspect_ai/tool/_tools/_web_browser/_resources/test_web_client.py +0 -44
  109. inspect_ai/tool/_tools/_web_browser/_resources/web_browser_rpc_types.py +0 -39
  110. inspect_ai/tool/_tools/_web_browser/_resources/web_client.py +0 -214
  111. inspect_ai/tool/_tools/_web_browser/_resources/web_client_new_session.py +0 -35
  112. inspect_ai/tool/_tools/_web_browser/_resources/web_server.py +0 -192
  113. {inspect_ai-0.3.74.dist-info → inspect_ai-0.3.76.dist-info}/entry_points.txt +0 -0
  114. {inspect_ai-0.3.74.dist-info → inspect_ai-0.3.76.dist-info/licenses}/LICENSE +0 -0
  115. {inspect_ai-0.3.74.dist-info → inspect_ai-0.3.76.dist-info}/top_level.txt +0 -0
@@ -1,214 +0,0 @@
1
- import argparse
2
- import sys
3
- from typing import Literal
4
-
5
- from constants import DEFAULT_SESSION_NAME, SERVER_PORT
6
- from rpc_client_helpers import RPCError, rpc_call
7
- from web_browser_rpc_types import (
8
- ClickArgs,
9
- CrawlerBaseArgs,
10
- CrawlerResponse,
11
- GoArgs,
12
- NewSessionArgs,
13
- NewSessionResponse,
14
- ScrollArgs,
15
- TypeOrSubmitArgs,
16
- )
17
-
18
- _SERVER_URL = f"http://localhost:{SERVER_PORT}/"
19
-
20
-
21
- def main() -> None:
22
- if len(sys.argv) > 1:
23
- command, params = _parse_args()
24
- _execute_command(command, params)
25
- else:
26
- _interactive_mode()
27
-
28
-
29
- def _execute_command(
30
- command: str,
31
- params: NewSessionArgs
32
- | GoArgs
33
- | ClickArgs
34
- | TypeOrSubmitArgs
35
- | ScrollArgs
36
- | CrawlerBaseArgs,
37
- ) -> None:
38
- try:
39
- if command == "new_session":
40
- print(
41
- rpc_call(
42
- _SERVER_URL, command, dict(params), NewSessionResponse
43
- ).session_name
44
- )
45
- else:
46
- response = rpc_call(
47
- _SERVER_URL,
48
- command,
49
- dict(params),
50
- CrawlerResponse,
51
- )
52
- for key, value in vars(response).items():
53
- if value is not None:
54
- print(key, ": ", value)
55
-
56
- except RPCError as rpc_error:
57
- _return_error(f"error: {rpc_error}")
58
-
59
-
60
- def _interactive_mode() -> None:
61
- print(
62
- "Welcome to the Playwright Crawler interactive mode!\n"
63
- "commands:\n"
64
- " web_go <URL> - goes to the specified url.\n"
65
- " web_click <ELEMENT_ID> - clicks on a given element.\n"
66
- " web_scroll <up/down> - scrolls up or down one page.\n"
67
- " web_forward - navigates forward a page.\n"
68
- " web_back - navigates back a page.\n"
69
- " web_refresh - reloads current page (F5).\n"
70
- " web_type <ELEMENT_ID> <TEXT> - types the specified text into the input with the specified id.\n"
71
- " web_type_submit <ELEMENT_ID> <TEXT> - types the specified text into the input with the specified id and presses ENTER to submit the form."
72
- )
73
-
74
- session_created = False
75
- while True:
76
- try:
77
- user_input = input("Enter command: ").strip()
78
- if user_input.lower() in {"exit", "quit"}:
79
- break
80
- args = user_input.split()
81
- sys.argv = ["cli"] + args
82
- command, params = _parse_args()
83
- print(f"command: {command}, params: {params}")
84
- if not session_created:
85
- _execute_command("new_session", NewSessionArgs(headful=True))
86
- session_created = True
87
- _execute_command(command, params)
88
- except Exception as e: # pylint: disable=broad-exception-caught
89
- print(f"Error: {e}")
90
-
91
-
92
- def _return_error(error: str) -> None:
93
- print(error, file=sys.stderr)
94
- sys.exit(1)
95
-
96
-
97
- def _create_main_parser() -> argparse.ArgumentParser:
98
- parser = argparse.ArgumentParser(prog="web_client")
99
- parser.add_argument(
100
- "--session_name",
101
- type=str,
102
- required=False,
103
- default=DEFAULT_SESSION_NAME,
104
- help="Session name",
105
- )
106
- return parser
107
-
108
-
109
- def _create_command_parser() -> argparse.ArgumentParser:
110
- result = argparse.ArgumentParser(prog="web_client")
111
-
112
- subparsers = result.add_subparsers(dest="command", required=True)
113
-
114
- go_parser = subparsers.add_parser("web_go")
115
- go_parser.add_argument("url", type=str, help="URL to navigate to")
116
-
117
- click_parser = subparsers.add_parser("web_click")
118
- click_parser.add_argument("element_id", type=str, help="ID of the element to click")
119
-
120
- scroll_parser = subparsers.add_parser("web_scroll")
121
- scroll_parser.add_argument(
122
- "direction",
123
- type=str,
124
- choices=["up", "down"],
125
- help="Direction to scroll (up or down)",
126
- )
127
- subparsers.add_parser("web_forward")
128
- subparsers.add_parser("web_back")
129
- subparsers.add_parser("web_refresh")
130
-
131
- type_parser = subparsers.add_parser("web_type")
132
- type_parser.add_argument(
133
- "element_id", type=str, help="ID of the element to type into"
134
- )
135
- type_parser.add_argument("text", type=str, help="The text to type")
136
-
137
- submit_parser = subparsers.add_parser("web_type_submit")
138
- submit_parser.add_argument(
139
- "element_id",
140
- type=str,
141
- help="ID of the element to type into and submit",
142
- )
143
- submit_parser.add_argument("text", type=str, help="The text to type")
144
-
145
- # Add common argument to all subparsers
146
- for name, subparser in subparsers.choices.items():
147
- if name != "new_session":
148
- subparser.add_argument(
149
- "--session_name",
150
- type=str,
151
- nargs="?",
152
- required=False,
153
- default=DEFAULT_SESSION_NAME,
154
- help="Session name",
155
- )
156
-
157
- return result
158
-
159
-
160
- main_parser = _create_main_parser()
161
- command_parser = _create_command_parser()
162
-
163
-
164
- def _parse_args() -> (
165
- tuple[Literal["web_go"], GoArgs]
166
- | tuple[Literal["web_click"], ClickArgs]
167
- | tuple[Literal["web_type", "web_type_submit"], TypeOrSubmitArgs]
168
- | tuple[Literal["web_scroll"], ScrollArgs]
169
- | tuple[Literal["web_forward", "web_back", "web_refresh"], CrawlerBaseArgs]
170
- ):
171
- # web_client.py supports a very non-standard command line. It has a required named
172
- # parameter, --session_name, before the command.
173
- # Unfortunately, because we can't break backwards compatibility, we're stuck
174
- # with that. To properly parse it, we'll be forced to have a separate parser
175
- # for --session_name and merge the results with the normal command parser.
176
-
177
- main_args, remaining_args = main_parser.parse_known_args()
178
- session_name = main_args.session_name or DEFAULT_SESSION_NAME
179
-
180
- command_args = command_parser.parse_args(remaining_args)
181
- command_args_dict = vars(command_args)
182
-
183
- match command_args.command:
184
- case "web_go":
185
- return command_args_dict["command"], GoArgs(
186
- url=command_args_dict["url"],
187
- session_name=session_name,
188
- )
189
- case "web_click":
190
- return command_args_dict["command"], ClickArgs(
191
- element_id=command_args_dict["element_id"],
192
- session_name=session_name,
193
- )
194
- case "web_type" | "web_type_submit":
195
- return command_args_dict["command"], TypeOrSubmitArgs(
196
- element_id=command_args_dict["element_id"],
197
- text=command_args_dict["text"],
198
- session_name=session_name,
199
- )
200
- case "web_scroll":
201
- return command_args_dict["command"], ScrollArgs(
202
- direction=command_args_dict["direction"],
203
- session_name=session_name,
204
- )
205
- case "web_forward" | "web_back" | "web_refresh":
206
- return command_args_dict["command"], CrawlerBaseArgs(
207
- session_name=session_name,
208
- )
209
- case _:
210
- raise ValueError("Unexpected command")
211
-
212
-
213
- if __name__ == "__main__":
214
- main()
@@ -1,35 +0,0 @@
1
- import argparse
2
- import sys
3
-
4
- from constants import SERVER_PORT
5
- from rpc_client_helpers import RPCError, rpc_call
6
- from web_browser_rpc_types import NewSessionArgs, NewSessionResponse
7
-
8
-
9
- def main() -> None:
10
- parser = argparse.ArgumentParser(prog="web_client_new_session")
11
- parser.add_argument(
12
- "--headful", action="store_true", help="Run in headful mode for testing"
13
- )
14
- args_class = parser.parse_args()
15
- args_dict = vars(args_class)
16
- # TODO: Frick. this does no validation
17
- params_typed_dict = NewSessionArgs(headful=args_dict["headful"])
18
- params = dict(params_typed_dict)
19
-
20
- try:
21
- print(
22
- rpc_call(
23
- f"http://localhost:{SERVER_PORT}/",
24
- "new_session",
25
- params,
26
- NewSessionResponse,
27
- ).session_name
28
- )
29
- except RPCError as rpc_error:
30
- print(rpc_error, file=sys.stderr)
31
- sys.exit(1)
32
-
33
-
34
- if __name__ == "__main__":
35
- main()
@@ -1,192 +0,0 @@
1
- import threading
2
- from typing import Awaitable, Callable, Unpack
3
-
4
- from aiohttp.web import Application, Request, Response, run_app
5
- from jsonrpcserver import Result, Success, async_dispatch, method
6
-
7
- from constants import DEFAULT_SESSION_NAME, SERVER_PORT
8
- from playwright_browser import PlaywrightBrowser
9
- from playwright_crawler import PlaywrightCrawler
10
- from scale_factor import get_screen_scale_factor
11
- from web_browser_rpc_types import (
12
- ClickArgs,
13
- CrawlerBaseArgs,
14
- CrawlerResponse,
15
- GoArgs,
16
- NewSessionArgs,
17
- NewSessionResponse,
18
- ScrollArgs,
19
- TypeOrSubmitArgs,
20
- )
21
-
22
-
23
- class Sessions:
24
- def __init__(self) -> None:
25
- self._lock = threading.Lock()
26
- self._browser: PlaywrightBrowser | None = None
27
- self._sessions: dict[str, PlaywrightCrawler] = {}
28
-
29
- async def new_session(self, headful: bool) -> str:
30
- with self._lock:
31
- if not self._browser:
32
- self._browser = await PlaywrightBrowser.create(headless=not headful)
33
- current_count = len(self._sessions)
34
- name = (
35
- DEFAULT_SESSION_NAME
36
- if current_count == 0
37
- else f"{DEFAULT_SESSION_NAME}_{current_count}"
38
- )
39
- crawler = await PlaywrightCrawler.create(
40
- await self._browser.get_new_context(),
41
- device_scale_factor=get_screen_scale_factor() if headful else 1,
42
- )
43
- self._sessions[name] = crawler
44
- return name
45
-
46
- async def get_crawler_for_session(self, name: str) -> PlaywrightCrawler:
47
- if not self._sessions:
48
- await self.new_session(False)
49
- return self._sessions[name]
50
-
51
-
52
- sessions = Sessions()
53
-
54
-
55
- @method
56
- async def new_session(**kwargs: Unpack[NewSessionArgs]) -> NewSessionResponse:
57
- return Success(
58
- NewSessionResponse(
59
- session_name=await sessions.new_session(kwargs.get("headful", False))
60
- ).model_dump()
61
- )
62
-
63
-
64
- @method
65
- async def web_go(**kwargs: Unpack[GoArgs]) -> Result:
66
- async def handler(crawler: PlaywrightCrawler):
67
- await (await crawler.current_page).go_to_url(kwargs["url"])
68
-
69
- return await _execute_crawler_command(kwargs["session_name"], handler)
70
-
71
-
72
- @method
73
- async def web_click(**kwargs: Unpack[ClickArgs]) -> Result:
74
- async def handler(crawler: PlaywrightCrawler):
75
- await (await crawler.current_page).click(kwargs["element_id"])
76
-
77
- return await _execute_crawler_command(kwargs["session_name"], handler)
78
-
79
-
80
- @method
81
- async def web_scroll(**kwargs: Unpack[ScrollArgs]) -> Result:
82
- async def handler(crawler: PlaywrightCrawler):
83
- await (await crawler.current_page).scroll(kwargs["direction"])
84
-
85
- return await _execute_crawler_command(kwargs["session_name"], handler)
86
-
87
-
88
- @method
89
- async def web_forward(**kwargs: Unpack[CrawlerBaseArgs]) -> Result:
90
- async def handler(crawler: PlaywrightCrawler):
91
- await (await crawler.current_page).forward()
92
-
93
- return await _execute_crawler_command(kwargs["session_name"], handler)
94
-
95
-
96
- @method
97
- async def web_back(**kwargs: Unpack[CrawlerBaseArgs]) -> Result:
98
- async def handler(crawler: PlaywrightCrawler):
99
- await (await crawler.current_page).back()
100
-
101
- return await _execute_crawler_command(kwargs["session_name"], handler)
102
-
103
-
104
- @method
105
- async def web_refresh(**kwargs: Unpack[CrawlerBaseArgs]) -> Result:
106
- async def handler(crawler: PlaywrightCrawler):
107
- await (await crawler.current_page).refresh()
108
-
109
- return await _execute_crawler_command(kwargs["session_name"], handler)
110
-
111
-
112
- @method
113
- async def web_type(**kwargs: Unpack[TypeOrSubmitArgs]) -> Result:
114
- async def handler(crawler: PlaywrightCrawler):
115
- await (await crawler.current_page).type(
116
- kwargs["element_id"], _str_from_str_or_list(kwargs["text"])
117
- )
118
-
119
- return await _execute_crawler_command(kwargs["session_name"], handler)
120
-
121
-
122
- @method
123
- async def web_type_submit(**kwargs: Unpack[TypeOrSubmitArgs]) -> Result:
124
- async def handler(crawler: PlaywrightCrawler):
125
- await (await crawler.current_page).clear(kwargs["element_id"])
126
- await (await crawler.current_page).type(
127
- kwargs["element_id"], _str_from_str_or_list(kwargs["text"]) + "\n"
128
- )
129
-
130
- return await _execute_crawler_command(kwargs["session_name"], handler)
131
-
132
-
133
- async def _execute_crawler_command(
134
- session_name: str, handler: Callable[[PlaywrightCrawler], Awaitable[None]]
135
- ) -> Result:
136
- if not sessions:
137
- await new_session()
138
- try:
139
- crawler = await sessions.get_crawler_for_session(session_name)
140
- await handler(crawler)
141
- await (await crawler.current_page).update()
142
-
143
- # If there's a cookies message click to sort it out.
144
- await _auto_click_cookies(crawler)
145
-
146
- return Success(
147
- CrawlerResponse(
148
- web_url=(await crawler.current_page).url.split("?")[0],
149
- main_content=(await crawler.current_page).render_main_content(),
150
- web_at=(await crawler.current_page).render_at(),
151
- error=None,
152
- ).model_dump()
153
- )
154
- except Exception as e: # pylint: disable=broad-exception-caught
155
- return Success(
156
- CrawlerResponse(
157
- web_url=(await crawler.current_page).url.split("?")[0],
158
- web_at="encountered error",
159
- error=str(e),
160
- ).model_dump()
161
- )
162
-
163
-
164
- def _str_from_str_or_list(str_or_list: str | list[str]) -> str:
165
- return str_or_list if isinstance(str_or_list, str) else " ".join(str_or_list)
166
-
167
-
168
- async def _auto_click_cookies(crawler: PlaywrightCrawler):
169
- """Autoclick any cookies popup."""
170
- try:
171
- accept_node = (await crawler.current_page).lookup_node("<Accept all>")
172
- except LookupError:
173
- return
174
- await (await crawler.current_page).click(accept_node.node_id)
175
- await (await crawler.current_page).update()
176
-
177
-
178
- def main():
179
- async def handle_request(request: Request) -> Response:
180
- return Response(
181
- text=await async_dispatch(await request.text()),
182
- content_type="application/json",
183
- )
184
-
185
- app = Application()
186
- app.router.add_post("/", handle_request)
187
-
188
- run_app(app, port=SERVER_PORT)
189
-
190
-
191
- if __name__ == "__main__":
192
- main()