workers-runtime-sdk 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- workers_runtime_sdk-0.1.0/.gitattributes +1 -0
- workers_runtime_sdk-0.1.0/.github/workflows/ci.yml +30 -0
- workers_runtime_sdk-0.1.0/.gitignore +1 -0
- workers_runtime_sdk-0.1.0/.pre-commit-config.yaml +35 -0
- workers_runtime_sdk-0.1.0/PKG-INFO +5 -0
- workers_runtime_sdk-0.1.0/commitlint.yml +15 -0
- workers_runtime_sdk-0.1.0/pyproject.toml +20 -0
- workers_runtime_sdk-0.1.0/release.yml +139 -0
- workers_runtime_sdk-0.1.0/src/asgi.py +306 -0
- workers_runtime_sdk-0.1.0/src/workers/__init__.py +51 -0
- workers_runtime_sdk-0.1.0/src/workers/_workers.py +1194 -0
- workers_runtime_sdk-0.1.0/src/workers/py.typed +0 -0
- workers_runtime_sdk-0.1.0/src/workers/workflows.py +12 -0
- workers_runtime_sdk-0.1.0/test/introspection.py +88 -0
- workers_runtime_sdk-0.1.0/test/test_introspection.py +57 -0
- workers_runtime_sdk-0.1.0/uv.lock +238 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
uv.lock binary
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
name: CI
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches: [main]
|
|
6
|
+
pull_request:
|
|
7
|
+
branches: [main]
|
|
8
|
+
|
|
9
|
+
jobs:
|
|
10
|
+
test:
|
|
11
|
+
runs-on: ubuntu-latest
|
|
12
|
+
|
|
13
|
+
steps:
|
|
14
|
+
- uses: actions/checkout@v4
|
|
15
|
+
|
|
16
|
+
- name: Install uv
|
|
17
|
+
uses: astral-sh/setup-uv@v4
|
|
18
|
+
with:
|
|
19
|
+
enable-cache: true
|
|
20
|
+
|
|
21
|
+
- name: Set up Python
|
|
22
|
+
uses: actions/setup-python@v5
|
|
23
|
+
with:
|
|
24
|
+
python-version: '3.12'
|
|
25
|
+
|
|
26
|
+
- name: Install dependencies
|
|
27
|
+
run: uv sync --dev
|
|
28
|
+
|
|
29
|
+
- name: Run tests
|
|
30
|
+
run: uv run pytest
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__pycache__
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
default_language_version:
|
|
2
|
+
python: "3.13"
|
|
3
|
+
repos:
|
|
4
|
+
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
5
|
+
rev: "v5.0.0"
|
|
6
|
+
hooks:
|
|
7
|
+
- id: check-added-large-files
|
|
8
|
+
- id: check-case-conflict
|
|
9
|
+
- id: check-merge-conflict
|
|
10
|
+
- id: check-symlinks
|
|
11
|
+
- id: check-yaml
|
|
12
|
+
- id: debug-statements
|
|
13
|
+
- id: end-of-file-fixer
|
|
14
|
+
- id: mixed-line-ending
|
|
15
|
+
- id: trailing-whitespace
|
|
16
|
+
- id: requirements-txt-fixer
|
|
17
|
+
|
|
18
|
+
- repo: https://github.com/astral-sh/ruff-pre-commit
|
|
19
|
+
rev: "v0.9.1"
|
|
20
|
+
hooks:
|
|
21
|
+
- id: ruff
|
|
22
|
+
args: [--fix]
|
|
23
|
+
- id: ruff-format
|
|
24
|
+
|
|
25
|
+
- repo: https://github.com/shellcheck-py/shellcheck-py
|
|
26
|
+
rev: "v0.10.0.1"
|
|
27
|
+
hooks:
|
|
28
|
+
- id: shellcheck
|
|
29
|
+
|
|
30
|
+
- repo: https://github.com/codespell-project/codespell
|
|
31
|
+
rev: "v2.3.0"
|
|
32
|
+
hooks:
|
|
33
|
+
- id: codespell
|
|
34
|
+
ci:
|
|
35
|
+
autoupdate_schedule: "quarterly"
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
name: Lint Commit Messages
|
|
2
|
+
on:
|
|
3
|
+
pull_request:
|
|
4
|
+
branches: [ main ]
|
|
5
|
+
|
|
6
|
+
permissions:
|
|
7
|
+
contents: read
|
|
8
|
+
pull-requests: read
|
|
9
|
+
|
|
10
|
+
jobs:
|
|
11
|
+
commitlint:
|
|
12
|
+
runs-on: ubuntu-latest
|
|
13
|
+
steps:
|
|
14
|
+
- uses: actions/checkout@v4
|
|
15
|
+
- uses: wagoid/commitlint-github-action@v6
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["hatchling"]
|
|
3
|
+
build-backend = "hatchling.build"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "workers-runtime-sdk"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
description = "Python runtime SDK for Cloudflare Workers"
|
|
9
|
+
requires-python = ">=3.12"
|
|
10
|
+
|
|
11
|
+
[tool.hatch.build.targets.wheel]
|
|
12
|
+
sources = ["src"]
|
|
13
|
+
include = ["src/*"]
|
|
14
|
+
|
|
15
|
+
[dependency-groups]
|
|
16
|
+
dev = [
|
|
17
|
+
"build",
|
|
18
|
+
"pre-commit>=4.3.0",
|
|
19
|
+
"pytest>=8.4.2",
|
|
20
|
+
]
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
# Based on https://python-semantic-release.readthedocs.io/en/latest/configuration/automatic-releases/github-actions.html#examples
|
|
2
|
+
|
|
3
|
+
name: Continuous Delivery
|
|
4
|
+
|
|
5
|
+
on:
|
|
6
|
+
push:
|
|
7
|
+
branches:
|
|
8
|
+
- main
|
|
9
|
+
|
|
10
|
+
# default: least privileged permissions across all jobs
|
|
11
|
+
permissions:
|
|
12
|
+
contents: read
|
|
13
|
+
|
|
14
|
+
jobs:
|
|
15
|
+
release:
|
|
16
|
+
runs-on: ubuntu-latest
|
|
17
|
+
concurrency:
|
|
18
|
+
group: ${{ github.workflow }}-release-${{ github.ref_name }}
|
|
19
|
+
cancel-in-progress: false
|
|
20
|
+
|
|
21
|
+
outputs:
|
|
22
|
+
released: ${{ steps.release.outputs.released }}
|
|
23
|
+
|
|
24
|
+
permissions:
|
|
25
|
+
contents: write
|
|
26
|
+
|
|
27
|
+
steps:
|
|
28
|
+
# Note: We checkout the repository at the branch that triggered the workflow
|
|
29
|
+
# with the entire history to ensure to match PSR's release branch detection
|
|
30
|
+
# and history evaluation.
|
|
31
|
+
# However, we forcefully reset the branch to the workflow sha because it is
|
|
32
|
+
# possible that the branch was updated while the workflow was running. This
|
|
33
|
+
# prevents accidentally releasing un-evaluated changes.
|
|
34
|
+
- name: Setup | Checkout Repository on Release Branch
|
|
35
|
+
uses: actions/checkout@v4
|
|
36
|
+
with:
|
|
37
|
+
token: ${{ secrets.DEVPROD_PAT }}
|
|
38
|
+
ref: main
|
|
39
|
+
fetch-depth: 0
|
|
40
|
+
|
|
41
|
+
- name: Setup | Force release branch to be at workflow sha
|
|
42
|
+
run: |
|
|
43
|
+
git reset --hard ${{ github.sha }}
|
|
44
|
+
|
|
45
|
+
- name: Evaluate | Verify upstream has NOT changed
|
|
46
|
+
# Last chance to abort before causing an error as another PR/push was applied to
|
|
47
|
+
# the upstream branch while this workflow was running. This is important
|
|
48
|
+
# because we are committing a version change (--commit). You may omit this step
|
|
49
|
+
# if you have 'commit: false' in your configuration.
|
|
50
|
+
#
|
|
51
|
+
# You may consider moving this to a repo script and call it from this step instead
|
|
52
|
+
# of writing it in-line.
|
|
53
|
+
shell: bash
|
|
54
|
+
run: |
|
|
55
|
+
set +o pipefail
|
|
56
|
+
|
|
57
|
+
UPSTREAM_BRANCH_NAME="$(git status -sb | head -n 1 | cut -d' ' -f2 | grep -E '\.{3}' | cut -d'.' -f4)"
|
|
58
|
+
printf '%s\n' "Upstream branch name: $UPSTREAM_BRANCH_NAME"
|
|
59
|
+
|
|
60
|
+
set -o pipefail
|
|
61
|
+
|
|
62
|
+
if [ -z "$UPSTREAM_BRANCH_NAME" ]; then
|
|
63
|
+
printf >&2 '%s\n' "::error::Unable to determine upstream branch name!"
|
|
64
|
+
exit 1
|
|
65
|
+
fi
|
|
66
|
+
|
|
67
|
+
git fetch "${UPSTREAM_BRANCH_NAME%%/*}"
|
|
68
|
+
|
|
69
|
+
if ! UPSTREAM_SHA="$(git rev-parse "$UPSTREAM_BRANCH_NAME")"; then
|
|
70
|
+
printf >&2 '%s\n' "::error::Unable to determine upstream branch sha!"
|
|
71
|
+
exit 1
|
|
72
|
+
fi
|
|
73
|
+
|
|
74
|
+
HEAD_SHA="$(git rev-parse HEAD)"
|
|
75
|
+
|
|
76
|
+
if [ "$HEAD_SHA" != "$UPSTREAM_SHA" ]; then
|
|
77
|
+
printf >&2 '%s\n' "[HEAD SHA] $HEAD_SHA != $UPSTREAM_SHA [UPSTREAM SHA]"
|
|
78
|
+
printf >&2 '%s\n' "::error::Upstream has changed, aborting release..."
|
|
79
|
+
exit 1
|
|
80
|
+
fi
|
|
81
|
+
|
|
82
|
+
printf '%s\n' "Verified upstream branch has not changed, continuing with release..."
|
|
83
|
+
|
|
84
|
+
- name: Install uv
|
|
85
|
+
uses: astral-sh/setup-uv@v4
|
|
86
|
+
with:
|
|
87
|
+
enable-cache: true
|
|
88
|
+
|
|
89
|
+
- name: Action | Semantic Version Release
|
|
90
|
+
id: release
|
|
91
|
+
uses: python-semantic-release/python-semantic-release@v10.2.0
|
|
92
|
+
with:
|
|
93
|
+
github_token: ${{ secrets.DEVPROD_PAT }}
|
|
94
|
+
git_committer_name: "Workers DevProd"
|
|
95
|
+
git_committer_email: "workers-devprod@cloudflare.com"
|
|
96
|
+
|
|
97
|
+
- name: Publish | Upload to GitHub Release Assets
|
|
98
|
+
uses: python-semantic-release/publish-action@v10.2.0
|
|
99
|
+
if: steps.release.outputs.released == 'true'
|
|
100
|
+
with:
|
|
101
|
+
github_token: ${{ secrets.DEVPROD_PAT }}
|
|
102
|
+
tag: ${{ steps.release.outputs.tag }}
|
|
103
|
+
|
|
104
|
+
- name: Upload | Distribution Artifacts
|
|
105
|
+
uses: actions/upload-artifact@v4
|
|
106
|
+
if: steps.release.outputs.released == 'true'
|
|
107
|
+
with:
|
|
108
|
+
name: distribution-artifacts
|
|
109
|
+
path: dist
|
|
110
|
+
if-no-files-found: error
|
|
111
|
+
|
|
112
|
+
deploy:
|
|
113
|
+
# 1. Separate out the deploy step from the publish step to run each step at
|
|
114
|
+
# the least amount of token privilege
|
|
115
|
+
# 2. Also, deployments can fail, and its better to have a separate job if you need to retry
|
|
116
|
+
# and it won't require reversing the release.
|
|
117
|
+
runs-on: ubuntu-latest
|
|
118
|
+
needs: release
|
|
119
|
+
if: ${{ needs.release.outputs.released == 'true' }}
|
|
120
|
+
|
|
121
|
+
permissions:
|
|
122
|
+
contents: read
|
|
123
|
+
id-token: write
|
|
124
|
+
|
|
125
|
+
steps:
|
|
126
|
+
- name: Setup | Download Build Artifacts
|
|
127
|
+
uses: actions/download-artifact@v4
|
|
128
|
+
id: artifact-download
|
|
129
|
+
with:
|
|
130
|
+
name: distribution-artifacts
|
|
131
|
+
path: dist
|
|
132
|
+
|
|
133
|
+
# see https://docs.pypi.org/trusted-publishers/
|
|
134
|
+
- name: Publish package distributions to PyPI
|
|
135
|
+
uses: pypa/gh-action-pypi-publish@release/v1
|
|
136
|
+
with:
|
|
137
|
+
packages-dir: dist
|
|
138
|
+
print-hash: true
|
|
139
|
+
verbose: true
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
from asyncio import Event, Future, Queue, create_task, ensure_future, sleep
|
|
2
|
+
from collections.abc import Awaitable
|
|
3
|
+
from contextlib import contextmanager
|
|
4
|
+
from inspect import isawaitable
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
import js
|
|
8
|
+
from workers import Context, Request
|
|
9
|
+
|
|
10
|
+
ASGI = {"spec_version": "2.0", "version": "3.0"}
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
background_tasks = set()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def run_in_background(coro: Awaitable[Any]) -> None:
|
|
17
|
+
fut = ensure_future(coro)
|
|
18
|
+
background_tasks.add(fut)
|
|
19
|
+
fut.add_done_callback(background_tasks.discard)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@contextmanager
|
|
23
|
+
def acquire_js_buffer(pybuffer):
|
|
24
|
+
from pyodide.ffi import create_proxy
|
|
25
|
+
|
|
26
|
+
px = create_proxy(pybuffer)
|
|
27
|
+
buf = px.getBuffer()
|
|
28
|
+
px.destroy()
|
|
29
|
+
try:
|
|
30
|
+
yield buf.data
|
|
31
|
+
finally:
|
|
32
|
+
buf.release()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def request_to_scope(req, env, ws=False):
|
|
36
|
+
from js import URL
|
|
37
|
+
|
|
38
|
+
# @app.get("/example")
|
|
39
|
+
# async def example(request: Request):
|
|
40
|
+
# request.headers.get("content-type")
|
|
41
|
+
# - this will error if header is not "bytes" as in ASGI spec.
|
|
42
|
+
|
|
43
|
+
# Support both JS and Python http.client.HTTPMessage headers.
|
|
44
|
+
req_headers = req.headers.items() if isinstance(req, Request) else req.headers
|
|
45
|
+
|
|
46
|
+
headers = [(k.lower().encode(), v.encode()) for k, v in req_headers]
|
|
47
|
+
url = URL.new(req.url)
|
|
48
|
+
assert url.protocol[-1] == ":"
|
|
49
|
+
scheme = url.protocol[:-1]
|
|
50
|
+
path = url.pathname
|
|
51
|
+
assert "?".startswith(url.search[0:1])
|
|
52
|
+
query_string = url.search[1:].encode()
|
|
53
|
+
if ws:
|
|
54
|
+
ty = "websocket"
|
|
55
|
+
else:
|
|
56
|
+
ty = "http"
|
|
57
|
+
return {
|
|
58
|
+
"asgi": ASGI,
|
|
59
|
+
"headers": headers,
|
|
60
|
+
"http_version": "1.1",
|
|
61
|
+
"method": req.method,
|
|
62
|
+
"scheme": scheme,
|
|
63
|
+
"path": path,
|
|
64
|
+
"query_string": query_string,
|
|
65
|
+
"type": ty,
|
|
66
|
+
"env": env,
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
async def start_application(app):
|
|
71
|
+
shutdown_future = Future()
|
|
72
|
+
|
|
73
|
+
async def shutdown():
|
|
74
|
+
shutdown_future.set_result(None)
|
|
75
|
+
await sleep(0)
|
|
76
|
+
|
|
77
|
+
it = iter([{"type": "lifespan.startup"}, Future()])
|
|
78
|
+
|
|
79
|
+
async def receive():
|
|
80
|
+
res = next(it)
|
|
81
|
+
if isawaitable(res):
|
|
82
|
+
await res
|
|
83
|
+
return res
|
|
84
|
+
|
|
85
|
+
ready = Future()
|
|
86
|
+
|
|
87
|
+
async def send(got):
|
|
88
|
+
if got["type"] == "lifespan.startup.complete":
|
|
89
|
+
ready.set_result(None)
|
|
90
|
+
return
|
|
91
|
+
if got["type"] == "lifespan.shutdown.complete":
|
|
92
|
+
return
|
|
93
|
+
raise RuntimeError(f"Unexpected lifespan event {got['type']}")
|
|
94
|
+
|
|
95
|
+
run_in_background(
|
|
96
|
+
app(
|
|
97
|
+
{
|
|
98
|
+
"asgi": ASGI,
|
|
99
|
+
"state": {},
|
|
100
|
+
"type": "lifespan",
|
|
101
|
+
},
|
|
102
|
+
receive,
|
|
103
|
+
send,
|
|
104
|
+
)
|
|
105
|
+
)
|
|
106
|
+
await ready
|
|
107
|
+
return shutdown
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
async def process_request(
|
|
111
|
+
app: Any, req: "Request | js.Request", env: Any, ctx: Context
|
|
112
|
+
) -> js.Response:
|
|
113
|
+
from js import Object, Response, TransformStream
|
|
114
|
+
|
|
115
|
+
from pyodide.ffi import create_proxy
|
|
116
|
+
|
|
117
|
+
status = None
|
|
118
|
+
headers = None
|
|
119
|
+
result = Future()
|
|
120
|
+
is_sse = False
|
|
121
|
+
finished_response = Event()
|
|
122
|
+
|
|
123
|
+
receive_queue = Queue()
|
|
124
|
+
if req.body:
|
|
125
|
+
async for data in req.body:
|
|
126
|
+
await receive_queue.put(
|
|
127
|
+
{
|
|
128
|
+
"body": data.to_bytes(),
|
|
129
|
+
"more_body": True,
|
|
130
|
+
"type": "http.request",
|
|
131
|
+
}
|
|
132
|
+
)
|
|
133
|
+
await receive_queue.put({"body": b"", "more_body": False, "type": "http.request"})
|
|
134
|
+
|
|
135
|
+
async def receive():
|
|
136
|
+
message = None
|
|
137
|
+
if not receive_queue.empty():
|
|
138
|
+
message = await receive_queue.get()
|
|
139
|
+
else:
|
|
140
|
+
await finished_response.wait()
|
|
141
|
+
message = {"type": "http.disconnect"}
|
|
142
|
+
return message
|
|
143
|
+
|
|
144
|
+
# Create a transform stream for handling streaming responses
|
|
145
|
+
transform_stream = TransformStream.new()
|
|
146
|
+
readable = transform_stream.readable
|
|
147
|
+
writable = transform_stream.writable
|
|
148
|
+
writer = writable.getWriter()
|
|
149
|
+
|
|
150
|
+
async def send(got):
|
|
151
|
+
nonlocal status
|
|
152
|
+
nonlocal headers
|
|
153
|
+
nonlocal is_sse
|
|
154
|
+
|
|
155
|
+
if got["type"] == "http.response.start":
|
|
156
|
+
status = got["status"]
|
|
157
|
+
# Like above, we need to convert byte-pairs into string explicitly.
|
|
158
|
+
headers = [(k.decode(), v.decode()) for k, v in got["headers"]]
|
|
159
|
+
# Check if this is a server-sent events response
|
|
160
|
+
for k, v in headers:
|
|
161
|
+
if k.lower() == "content-type" and v.lower().startswith(
|
|
162
|
+
"text/event-stream"
|
|
163
|
+
):
|
|
164
|
+
is_sse = True
|
|
165
|
+
break
|
|
166
|
+
if is_sse:
|
|
167
|
+
# For SSE, create and return the response immediately after http.response.start
|
|
168
|
+
resp = Response.new(
|
|
169
|
+
readable, headers=Object.fromEntries(headers), status=status
|
|
170
|
+
)
|
|
171
|
+
result.set_result(resp)
|
|
172
|
+
|
|
173
|
+
elif got["type"] == "http.response.body":
|
|
174
|
+
body = got["body"]
|
|
175
|
+
more_body = got.get("more_body", False)
|
|
176
|
+
|
|
177
|
+
# Convert body to JS buffer
|
|
178
|
+
px = create_proxy(body)
|
|
179
|
+
buf = px.getBuffer()
|
|
180
|
+
px.destroy()
|
|
181
|
+
|
|
182
|
+
if is_sse:
|
|
183
|
+
# For SSE, write chunk to the stream
|
|
184
|
+
await writer.write(buf.data)
|
|
185
|
+
# If this is the last chunk, close the writer
|
|
186
|
+
if not more_body:
|
|
187
|
+
await writer.close()
|
|
188
|
+
finished_response.set()
|
|
189
|
+
else:
|
|
190
|
+
resp = Response.new(
|
|
191
|
+
buf.data, headers=Object.fromEntries(headers), status=status
|
|
192
|
+
)
|
|
193
|
+
result.set_result(resp)
|
|
194
|
+
await writer.close()
|
|
195
|
+
finished_response.set()
|
|
196
|
+
|
|
197
|
+
# Run the application in the background to handle SSE
|
|
198
|
+
async def run_app():
|
|
199
|
+
try:
|
|
200
|
+
await app(request_to_scope(req, env), receive, send)
|
|
201
|
+
|
|
202
|
+
# If we get here and no response has been set yet, the app didn't generate a response
|
|
203
|
+
if not result.done():
|
|
204
|
+
raise RuntimeError("The application did not generate a response") # noqa: TRY301
|
|
205
|
+
except Exception as e:
|
|
206
|
+
# Handle any errors in the application
|
|
207
|
+
if not result.done():
|
|
208
|
+
result.set_exception(e)
|
|
209
|
+
await writer.close() # Close the writer
|
|
210
|
+
finished_response.set()
|
|
211
|
+
|
|
212
|
+
# Create task to run the application in the background
|
|
213
|
+
app_task = create_task(run_app())
|
|
214
|
+
|
|
215
|
+
# Wait for the result (the response)
|
|
216
|
+
response = await result
|
|
217
|
+
|
|
218
|
+
# For non-SSE responses, we need to wait for the application to complete
|
|
219
|
+
if not is_sse:
|
|
220
|
+
await app_task
|
|
221
|
+
else: # noqa: PLR5501
|
|
222
|
+
if ctx is not None:
|
|
223
|
+
ctx.waitUntil(create_proxy(app_task))
|
|
224
|
+
else:
|
|
225
|
+
raise RuntimeError(
|
|
226
|
+
"Server-Side-Events require ctx to be passed to asgi.fetch"
|
|
227
|
+
)
|
|
228
|
+
return response
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
async def process_websocket(app: Any, req: "Request | js.Request") -> js.Response:
|
|
232
|
+
from js import Response, WebSocketPair
|
|
233
|
+
|
|
234
|
+
client, server = WebSocketPair.new().object_values()
|
|
235
|
+
server.accept()
|
|
236
|
+
queue = Queue()
|
|
237
|
+
|
|
238
|
+
def onopen(evt):
|
|
239
|
+
msg = {"type": "websocket.connect"}
|
|
240
|
+
queue.put_nowait(msg)
|
|
241
|
+
|
|
242
|
+
# onopen doesn't seem to get called. WS lifecycle events are a bit messed up
|
|
243
|
+
# here.
|
|
244
|
+
onopen(1)
|
|
245
|
+
|
|
246
|
+
def onclose(evt):
|
|
247
|
+
msg = {"type": "websocket.close", "code": evt.code, "reason": evt.reason}
|
|
248
|
+
queue.put_nowait(msg)
|
|
249
|
+
|
|
250
|
+
def onmessage(evt):
|
|
251
|
+
msg = {"type": "websocket.receive", "text": evt.data}
|
|
252
|
+
queue.put_nowait(msg)
|
|
253
|
+
|
|
254
|
+
server.onopen = onopen
|
|
255
|
+
server.onopen = onclose
|
|
256
|
+
server.onmessage = onmessage
|
|
257
|
+
|
|
258
|
+
async def ws_send(got):
|
|
259
|
+
if got["type"] == "websocket.send":
|
|
260
|
+
b = got.get("bytes", None)
|
|
261
|
+
s = got.get("text", None)
|
|
262
|
+
if b:
|
|
263
|
+
with acquire_js_buffer(b) as jsbytes:
|
|
264
|
+
# Unlike the `Response` constructor, server.send seems to
|
|
265
|
+
# eagerly copy the source buffer
|
|
266
|
+
server.send(jsbytes)
|
|
267
|
+
if s:
|
|
268
|
+
server.send(s)
|
|
269
|
+
|
|
270
|
+
else:
|
|
271
|
+
print(" == Not implemented", got["type"])
|
|
272
|
+
|
|
273
|
+
async def ws_receive():
|
|
274
|
+
received = await queue.get()
|
|
275
|
+
return received
|
|
276
|
+
|
|
277
|
+
env = {}
|
|
278
|
+
run_in_background(app(request_to_scope(req, env, ws=True), ws_receive, ws_send))
|
|
279
|
+
|
|
280
|
+
return Response.new(None, status=101, webSocket=client)
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
async def fetch(
|
|
284
|
+
app: Any, req: "Request | js.Request", env: Any, ctx: Context | None = None
|
|
285
|
+
) -> js.Response:
|
|
286
|
+
shutdown = await start_application(app)
|
|
287
|
+
result = await process_request(app, req, env, ctx)
|
|
288
|
+
await shutdown()
|
|
289
|
+
return result
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
async def websocket(app: Any, req: "Request | js.Request") -> js.Response:
|
|
293
|
+
return await process_websocket(app, req)
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
def __getattr__(name):
|
|
297
|
+
if name == "env":
|
|
298
|
+
from fastapi import Depends, Request
|
|
299
|
+
|
|
300
|
+
@Depends
|
|
301
|
+
async def env(request: Request):
|
|
302
|
+
return request.scope["env"]
|
|
303
|
+
|
|
304
|
+
return env
|
|
305
|
+
|
|
306
|
+
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
from ._workers import (
|
|
2
|
+
Blob,
|
|
3
|
+
BlobEnding,
|
|
4
|
+
BlobValue,
|
|
5
|
+
Body,
|
|
6
|
+
Context,
|
|
7
|
+
DurableObject,
|
|
8
|
+
FetchKwargs,
|
|
9
|
+
FetchResponse,
|
|
10
|
+
File,
|
|
11
|
+
FormData,
|
|
12
|
+
FormDataValue,
|
|
13
|
+
Headers,
|
|
14
|
+
JSBody,
|
|
15
|
+
Request,
|
|
16
|
+
RequestInitCfProperties,
|
|
17
|
+
Response,
|
|
18
|
+
WorkerEntrypoint,
|
|
19
|
+
WorkflowEntrypoint,
|
|
20
|
+
fetch,
|
|
21
|
+
handler,
|
|
22
|
+
import_from_javascript,
|
|
23
|
+
python_from_rpc,
|
|
24
|
+
python_to_rpc,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
__all__ = [
|
|
28
|
+
"Blob",
|
|
29
|
+
"BlobEnding",
|
|
30
|
+
"BlobValue",
|
|
31
|
+
"Body",
|
|
32
|
+
"Context",
|
|
33
|
+
"DurableObject",
|
|
34
|
+
"FetchKwargs",
|
|
35
|
+
"FetchResponse",
|
|
36
|
+
"File",
|
|
37
|
+
"FormData",
|
|
38
|
+
"FormDataValue",
|
|
39
|
+
"Headers",
|
|
40
|
+
"JSBody",
|
|
41
|
+
"Request",
|
|
42
|
+
"RequestInitCfProperties",
|
|
43
|
+
"Response",
|
|
44
|
+
"WorkerEntrypoint",
|
|
45
|
+
"WorkflowEntrypoint",
|
|
46
|
+
"fetch",
|
|
47
|
+
"handler",
|
|
48
|
+
"import_from_javascript",
|
|
49
|
+
"python_from_rpc",
|
|
50
|
+
"python_to_rpc",
|
|
51
|
+
]
|