robyn 0.73.0__cp311-cp311-macosx_10_12_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of robyn might be problematic. Click here for more details.

Files changed (57) hide show
  1. robyn/__init__.py +757 -0
  2. robyn/__main__.py +4 -0
  3. robyn/ai.py +308 -0
  4. robyn/argument_parser.py +129 -0
  5. robyn/authentication.py +96 -0
  6. robyn/cli.py +136 -0
  7. robyn/dependency_injection.py +71 -0
  8. robyn/env_populator.py +35 -0
  9. robyn/events.py +6 -0
  10. robyn/exceptions.py +32 -0
  11. robyn/jsonify.py +13 -0
  12. robyn/logger.py +80 -0
  13. robyn/mcp.py +461 -0
  14. robyn/openapi.py +448 -0
  15. robyn/processpool.py +226 -0
  16. robyn/py.typed +0 -0
  17. robyn/reloader.py +164 -0
  18. robyn/responses.py +208 -0
  19. robyn/robyn.cpython-311-darwin.so +0 -0
  20. robyn/robyn.pyi +421 -0
  21. robyn/router.py +410 -0
  22. robyn/scaffold/mongo/Dockerfile +12 -0
  23. robyn/scaffold/mongo/app.py +43 -0
  24. robyn/scaffold/mongo/requirements.txt +2 -0
  25. robyn/scaffold/no-db/Dockerfile +12 -0
  26. robyn/scaffold/no-db/app.py +12 -0
  27. robyn/scaffold/no-db/requirements.txt +1 -0
  28. robyn/scaffold/postgres/Dockerfile +32 -0
  29. robyn/scaffold/postgres/app.py +31 -0
  30. robyn/scaffold/postgres/requirements.txt +3 -0
  31. robyn/scaffold/postgres/supervisord.conf +14 -0
  32. robyn/scaffold/prisma/Dockerfile +15 -0
  33. robyn/scaffold/prisma/app.py +32 -0
  34. robyn/scaffold/prisma/requirements.txt +2 -0
  35. robyn/scaffold/prisma/schema.prisma +13 -0
  36. robyn/scaffold/sqlalchemy/Dockerfile +12 -0
  37. robyn/scaffold/sqlalchemy/__init__.py +0 -0
  38. robyn/scaffold/sqlalchemy/app.py +13 -0
  39. robyn/scaffold/sqlalchemy/models.py +21 -0
  40. robyn/scaffold/sqlalchemy/requirements.txt +2 -0
  41. robyn/scaffold/sqlite/Dockerfile +12 -0
  42. robyn/scaffold/sqlite/app.py +22 -0
  43. robyn/scaffold/sqlite/requirements.txt +1 -0
  44. robyn/scaffold/sqlmodel/Dockerfile +11 -0
  45. robyn/scaffold/sqlmodel/app.py +46 -0
  46. robyn/scaffold/sqlmodel/models.py +10 -0
  47. robyn/scaffold/sqlmodel/requirements.txt +2 -0
  48. robyn/status_codes.py +137 -0
  49. robyn/swagger.html +32 -0
  50. robyn/templating.py +30 -0
  51. robyn/types.py +44 -0
  52. robyn/ws.py +67 -0
  53. robyn-0.73.0.dist-info/METADATA +32 -0
  54. robyn-0.73.0.dist-info/RECORD +57 -0
  55. robyn-0.73.0.dist-info/WHEEL +4 -0
  56. robyn-0.73.0.dist-info/entry_points.txt +3 -0
  57. robyn-0.73.0.dist-info/licenses/LICENSE +25 -0
robyn/reloader.py ADDED
@@ -0,0 +1,164 @@
1
+ import glob
2
+ import os
3
+ import signal
4
+ import subprocess
5
+ import sys
6
+ import time
7
+ from typing import List, Union
8
+
9
+ from watchdog.events import FileSystemEventHandler
10
+ from watchdog.observers import Observer
11
+
12
+ from robyn.logger import Colors, logger
13
+
14
+
15
+ def compile_rust_files(directory_path: str) -> List[str]:
16
+ rust_files = glob.glob(os.path.join(directory_path, "**/*.rs"), recursive=True)
17
+ rust_binaries: list[str] = []
18
+
19
+ for rust_file in rust_files:
20
+ print(f"Compiling rust file: {rust_file}")
21
+
22
+ result = subprocess.run(
23
+ [sys.executable, "-m", "rustimport", "build", rust_file],
24
+ stdout=subprocess.PIPE,
25
+ stderr=subprocess.PIPE,
26
+ start_new_session=False,
27
+ )
28
+ if result.returncode != 0:
29
+ print(f"Error compiling rust file: {rust_file} \n {result.stderr.decode('utf-8')} \n {result.stdout.decode('utf-8')}")
30
+ else:
31
+ print(f"Compiled rust file: {rust_file}")
32
+ rust_file_base = rust_file.removesuffix(".rs")
33
+
34
+ # Define the search pattern for the binary file
35
+ if sys.platform == "win32":
36
+ binary_extension = ".dll"
37
+ elif sys.platform == "darwin":
38
+ binary_extension = ".so"
39
+ elif sys.platform == "linux":
40
+ binary_extension = ".so"
41
+ else:
42
+ raise ValueError(f"Unsupported platform: {sys.platform}")
43
+
44
+ search_pattern = f"{rust_file_base}.*{binary_extension}"
45
+ # Use glob to find matching binary files
46
+ matching_binaries = glob.glob(search_pattern)
47
+ rust_binaries.extend(matching_binaries)
48
+
49
+ return rust_binaries
50
+
51
+
52
+ def create_rust_file(file_name: str) -> None:
53
+ if file_name.endswith(".rs"):
54
+ file_name = file_name.removesuffix(".rs")
55
+
56
+ rust_file = f"{file_name}.rs"
57
+
58
+ result = subprocess.run(
59
+ [sys.executable, "-m", "rustimport", "new", rust_file],
60
+ stdout=subprocess.PIPE,
61
+ stderr=subprocess.PIPE,
62
+ start_new_session=False,
63
+ )
64
+
65
+ if result.returncode != 0:
66
+ print(
67
+ "Error creating rust file : %s %s",
68
+ result.stderr.decode("utf-8"),
69
+ result.stdout.decode("utf-8"),
70
+ )
71
+ else:
72
+ print("Created rust file : %s", rust_file)
73
+
74
+
75
+ def clean_rust_binaries(rust_binaries: List[str]) -> None:
76
+ for file in rust_binaries:
77
+ print("Cleaning rust file : %s", file)
78
+ os.remove(file)
79
+
80
+
81
+ def setup_reloader(directory_path: str, file_path: str) -> None:
82
+ event_handler = EventHandler(file_path, directory_path)
83
+
84
+ # sets the IS_RELOADER_RUNNING environment variable to True
85
+ event_handler.reload()
86
+
87
+ logger.info(
88
+ "Dev server initialized with the directory_path : %s",
89
+ directory_path,
90
+ color=Colors.BLUE,
91
+ )
92
+
93
+ def terminating_signal_handler(_sig, _frame):
94
+ event_handler.stop_server()
95
+ logger.info("Terminating reloader", bold=True)
96
+ observer.stop()
97
+ observer.join()
98
+
99
+ signal.signal(signal.SIGINT, terminating_signal_handler)
100
+ signal.signal(signal.SIGTERM, terminating_signal_handler)
101
+
102
+ observer = Observer()
103
+ observer.schedule(event_handler, path=directory_path, recursive=True)
104
+ observer.start()
105
+
106
+ try:
107
+ while observer.is_alive():
108
+ observer.join(1)
109
+ finally:
110
+ observer.stop()
111
+ observer.join()
112
+ event_handler.process.wait()
113
+
114
+
115
+ class EventHandler(FileSystemEventHandler):
116
+ def __init__(self, file_path: str, directory_path: str) -> None:
117
+ self.file_path = file_path
118
+ self.directory_path = directory_path
119
+ self.process: Union[subprocess.Popen[bytes], None] = None # Keep track of the subprocess
120
+ self.built_rust_binaries: List = [] # Keep track of the built rust binaries
121
+
122
+ self.last_reload = time.time() # Keep track of the last reload. EventHandler is initialized with the process.
123
+
124
+ def stop_server(self) -> None:
125
+ if self.process:
126
+ os.kill(self.process.pid, signal.SIGTERM) # Stop the subprocess using os.kill()
127
+
128
+ def reload(self) -> None:
129
+ self.stop_server()
130
+ print("Reloading the server")
131
+
132
+ new_env = os.environ.copy()
133
+ new_env["IS_RELOADER_RUNNING"] = "True" # This is used to check if a reloader is already running
134
+ # IS_RELOADER_RUNNING is specifically used for IPC between the reloader and the server
135
+
136
+ arguments = [arg for arg in sys.argv[1:] if not arg.startswith("--dev")]
137
+
138
+ clean_rust_binaries(self.built_rust_binaries)
139
+ self.built_rust_binaries = compile_rust_files(self.directory_path)
140
+
141
+ prev_process = self.process
142
+ if prev_process:
143
+ prev_process.kill()
144
+
145
+ self.process = subprocess.Popen(
146
+ [sys.executable, *arguments],
147
+ env=new_env,
148
+ )
149
+
150
+ self.last_reload = time.time()
151
+
152
+ def on_modified(self, event) -> None:
153
+ """
154
+ This function is a callback that will start a new server on every even change
155
+
156
+ :param event FSEvent: a data structure with info about the events
157
+ """
158
+
159
+ # Avoid reloading multiple times when watchdog detects multiple events
160
+ if time.time() - self.last_reload < 0.5:
161
+ return
162
+
163
+ time.sleep(0.2) # Wait for the file to be fully written
164
+ self.reload()
robyn/responses.py ADDED
@@ -0,0 +1,208 @@
1
+ import asyncio
2
+ import mimetypes
3
+ import os
4
+ from typing import AsyncGenerator, Generator, Optional, Union
5
+
6
+ from robyn.robyn import Headers, Response
7
+
8
+
9
+ class FileResponse:
10
+ def __init__(
11
+ self,
12
+ file_path: str,
13
+ status_code: Optional[int] = None,
14
+ headers: Optional[Headers] = None,
15
+ ):
16
+ self.file_path = file_path
17
+ self.description = ""
18
+ self.status_code = status_code or 200
19
+ self.headers = headers or Headers({"Content-Disposition": "attachment"})
20
+
21
+
22
+ def html(html: str) -> Response:
23
+ """
24
+ This function will help in serving a simple html string
25
+
26
+ :param html str: html to serve as a response
27
+ """
28
+ return Response(
29
+ description=html,
30
+ status_code=200,
31
+ headers=Headers({"Content-Type": "text/html"}),
32
+ )
33
+
34
+
35
+ def serve_html(file_path: str) -> FileResponse:
36
+ """
37
+ This function will help in serving a single html file
38
+
39
+ :param file_path str: file path to serve as a response
40
+ """
41
+
42
+ return FileResponse(file_path, headers=Headers({"Content-Type": "text/html"}))
43
+
44
+
45
+ def serve_file(file_path: str, file_name: Optional[str] = None) -> FileResponse:
46
+ """
47
+ This function will help in serving a file
48
+
49
+ :param file_path str: file path to serve as a response
50
+ :param file_name [str | None]: file name to serve as a response, defaults to None
51
+ """
52
+ file_name = file_name or os.path.basename(file_path)
53
+
54
+ mime_type = mimetypes.guess_type(file_name)[0]
55
+
56
+ headers = Headers({"Content-Type": mime_type})
57
+ headers.append("Content-Disposition", f"attachment; filename={file_name}")
58
+
59
+ return FileResponse(
60
+ file_path,
61
+ headers=headers,
62
+ )
63
+
64
+
65
+ class AsyncGeneratorWrapper:
66
+ """Optimized true-streaming wrapper for async generators"""
67
+
68
+ def __init__(self, async_gen: AsyncGenerator[str, None]):
69
+ self.async_gen = async_gen
70
+ self._loop = None
71
+ self._iterator = None
72
+ self._exhausted = False
73
+
74
+ def __iter__(self):
75
+ return self
76
+
77
+ def __next__(self):
78
+ if self._exhausted:
79
+ raise StopIteration
80
+
81
+ # Initialize the loop and iterator only once
82
+ if self._iterator is None:
83
+ self._init_async_iterator()
84
+
85
+ try:
86
+ # Get the next value from the async generator
87
+ # This is the key optimization - we don't buffer, we get one value at a time
88
+ return self._get_next_value()
89
+ except StopIteration:
90
+ self._exhausted = True
91
+ raise
92
+
93
+ def _init_async_iterator(self):
94
+ """Initialize the async iterator with proper loop handling"""
95
+ try:
96
+ # Try to get the running event loop
97
+ self._loop = asyncio.get_running_loop()
98
+ except RuntimeError:
99
+ # No running loop, create a new one
100
+ self._loop = asyncio.new_event_loop()
101
+ asyncio.set_event_loop(self._loop)
102
+
103
+ # Create the async iterator
104
+ self._iterator = self.async_gen.__aiter__()
105
+
106
+ def _get_next_value(self):
107
+ """Get the next value from async generator without buffering"""
108
+ try:
109
+ # Create a coroutine to get the next value
110
+ async def get_next():
111
+ return await self._iterator.__anext__()
112
+
113
+ # Run the coroutine to get the next value
114
+ return self._loop.run_until_complete(get_next())
115
+ except StopAsyncIteration:
116
+ # Convert StopAsyncIteration to StopIteration for sync generator protocol
117
+ raise StopIteration
118
+ except Exception as e:
119
+ # Log error and stop iteration
120
+ print(f"Error in async generator: {e}")
121
+ raise StopIteration
122
+
123
+
124
+ class StreamingResponse:
125
+ def __init__(
126
+ self,
127
+ content: Union[Generator[str, None, None], AsyncGenerator[str, None]],
128
+ status_code: Optional[int] = None,
129
+ headers: Optional[Headers] = None,
130
+ media_type: str = "text/event-stream",
131
+ ):
132
+ # Convert async generator to sync generator if needed
133
+ # The Rust implementation detects async generators but falls back to Python wrapper
134
+ if hasattr(content, "__anext__"):
135
+ # This is an async generator - wrap it with optimized wrapper
136
+ self.content = AsyncGeneratorWrapper(content)
137
+ else:
138
+ # This is a sync generator - use as is
139
+ self.content = content
140
+
141
+ self.status_code = status_code or 200
142
+ self.headers = headers or Headers({})
143
+ self.media_type = media_type
144
+
145
+ # Set default SSE headers
146
+ if media_type == "text/event-stream":
147
+ self.headers.set("Content-Type", "text/event-stream")
148
+ # Cache-Control and Connection headers are set by Rust layer with optimized headers
149
+ self.headers.set("Access-Control-Allow-Origin", "*")
150
+ self.headers.set("Access-Control-Allow-Headers", "Cache-Control")
151
+
152
+
153
+ def SSEResponse(
154
+ content: Union[Generator[str, None, None], AsyncGenerator[str, None]],
155
+ status_code: Optional[int] = None,
156
+ headers: Optional[Headers] = None,
157
+ ) -> StreamingResponse:
158
+ """
159
+ Create a Server-Sent Events (SSE) streaming response.
160
+
161
+ :param content: Generator or AsyncGenerator yielding SSE-formatted strings
162
+ :param status_code: HTTP status code (default: 200)
163
+ :param headers: Additional headers
164
+ :return: StreamingResponse configured for SSE
165
+ """
166
+ return StreamingResponse(content=content, status_code=status_code, headers=headers, media_type="text/event-stream")
167
+
168
+
169
+ def SSEMessage(data: str, event: Optional[str] = None, id: Optional[str] = None, retry: Optional[int] = None) -> str:
170
+ """
171
+ Optimized SSE message formatting with minimal allocations.
172
+
173
+ :param data: The message data
174
+ :param event: Optional event type
175
+ :param id: Optional event ID
176
+ :param retry: Optional retry time in milliseconds
177
+ :return: SSE-formatted string
178
+ """
179
+ # Pre-calculate size to avoid multiple string concatenations
180
+ parts = []
181
+
182
+ # Add optional fields first
183
+ if event:
184
+ parts.append(f"event: {event}\n")
185
+ if id:
186
+ parts.append(f"id: {id}\n")
187
+ if retry:
188
+ parts.append(f"retry: {retry}\n")
189
+
190
+ # Handle data with optimized multi-line processing
191
+ if data:
192
+ data_str = str(data)
193
+ # Fast path for single-line data (most common case)
194
+ if "\n" not in data_str and "\r" not in data_str:
195
+ parts.append(f"data: {data_str}\n")
196
+ else:
197
+ # Multi-line data handling
198
+ normalized_data = data_str.replace("\r\n", "\n").replace("\r", "\n")
199
+ for line in normalized_data.split("\n"):
200
+ parts.append(f"data: {line}\n")
201
+ else:
202
+ parts.append("data: \n")
203
+
204
+ # Add the required double newline terminator
205
+ parts.append("\n")
206
+
207
+ # Single join operation for optimal performance
208
+ return "".join(parts)
Binary file