pixeltable 0.2.29__py3-none-any.whl → 0.2.30__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pixeltable might be problematic. Click here for more details.

@@ -1,322 +0,0 @@
1
- import datetime
2
- import json
3
- import logging
4
- import os
5
- import pathlib
6
- import subprocess
7
- import sys
8
- import time
9
- from typing import Any
10
- from zoneinfo import ZoneInfo
11
-
12
- import pixeltable_pgserver
13
- import toml
14
-
15
- import pixeltable as pxt
16
- import pixeltable.metadata as metadata
17
- from pixeltable.env import Env
18
- from pixeltable.func import Batch
19
- from pixeltable.io.external_store import Project
20
- from pixeltable.tool import embed_udf
21
- from pixeltable.type_system import BoolType, FloatType, ImageType, IntType, JsonType, StringType, TimestampType
22
-
23
- _logger = logging.getLogger('pixeltable')
24
-
25
-
26
- class Dumper:
27
-
28
- def __init__(self, output_dir='target', db_name='pxtdump') -> None:
29
- if sys.version_info >= (3, 10):
30
- raise RuntimeError(
31
- 'This script must be run on Python 3.9. '
32
- 'DB dumps are incompatible across versions due to issues with pickling anonymous UDFs.'
33
- )
34
-
35
- self.output_dir = pathlib.Path(output_dir)
36
- shared_home = pathlib.Path(os.environ.get('PIXELTABLE_HOME', '~/.pixeltable')).expanduser()
37
- mock_home_dir = self.output_dir / '.pixeltable'
38
- mock_home_dir.mkdir(parents=True, exist_ok=True)
39
- os.environ['PIXELTABLE_HOME'] = str(mock_home_dir)
40
- os.environ['PIXELTABLE_CONFIG'] = str(shared_home / 'config.toml')
41
- os.environ['PIXELTABLE_DB'] = db_name
42
- os.environ['PIXELTABLE_PGDATA'] = str(shared_home / 'pgdata')
43
-
44
- Env._init_env(reinit_db=True)
45
-
46
- Env.get().configure_logging(level=logging.DEBUG, to_stdout=True)
47
-
48
- def dump_db(self) -> None:
49
- md_version = metadata.VERSION
50
- dump_file = self.output_dir / f'pixeltable-v{md_version:03d}-test.dump.gz'
51
- _logger.info(f'Creating database dump at: {dump_file}')
52
- pg_package_dir = os.path.dirname(pixeltable_pgserver.__file__)
53
- pg_dump_binary = f'{pg_package_dir}/pginstall/bin/pg_dump'
54
- _logger.info(f'Using pg_dump binary at: {pg_dump_binary}')
55
- # We need the raw DB URL, without a driver qualifier. (The driver qualifier is needed by
56
- # SQLAlchemy, but command-line Postgres won't know how to interpret it.)
57
- db_url = Env.get()._db_server.get_uri(Env.get()._db_name)
58
- with open(dump_file, 'wb') as dump:
59
- pg_dump_process = subprocess.Popen(
60
- (pg_dump_binary, db_url, '-U', 'postgres', '-Fc'),
61
- stdout=subprocess.PIPE
62
- )
63
- subprocess.run(
64
- ('gzip', '-9'),
65
- stdin=pg_dump_process.stdout,
66
- stdout=dump,
67
- check=True
68
- )
69
- if pg_dump_process.poll() != 0:
70
- # wait for a 2s before checking again & raising error
71
- time.sleep(2)
72
- if pg_dump_process.poll() != 0:
73
- raise RuntimeError(f'pg_dump failed with return code {pg_dump_process.returncode}')
74
- info_file = self.output_dir / f'pixeltable-v{md_version:03d}-test-info.toml'
75
- git_sha = subprocess.check_output(('git', 'rev-parse', 'HEAD')).decode('ascii').strip()
76
- user = os.environ.get('USER', os.environ.get('USERNAME'))
77
- info_dict = {'pixeltable-dump': {
78
- 'metadata-version': md_version,
79
- 'git-sha': git_sha,
80
- 'datetime': datetime.datetime.now(tz=datetime.timezone.utc),
81
- 'user': user
82
- }}
83
- with open(info_file, 'w') as info:
84
- toml.dump(info_dict, info)
85
-
86
- # Expression types, predicate types, embedding indices, views on views
87
- def create_tables(self) -> None:
88
- schema = {
89
- 'c1': StringType(nullable=False),
90
- 'c1n': StringType(nullable=True),
91
- 'c2': IntType(nullable=False),
92
- 'c3': FloatType(nullable=False),
93
- 'c4': BoolType(nullable=False),
94
- 'c5': TimestampType(nullable=False),
95
- 'c6': JsonType(nullable=False),
96
- 'c7': JsonType(nullable=False),
97
- 'c8': ImageType(nullable=True)
98
- }
99
- t = pxt.create_table('base_table', schema, primary_key='c2')
100
-
101
- num_rows = 20
102
- d1 = {
103
- 'f1': 'test string 1',
104
- 'f2': 1,
105
- 'f3': 1.0,
106
- 'f4': True,
107
- 'f5': [1.0, 2.0, 3.0, 4.0],
108
- 'f6': {
109
- 'f7': 'test string 2',
110
- 'f8': [1.0, 2.0, 3.0, 4.0],
111
- },
112
- }
113
- d2 = [d1, d1]
114
-
115
- c1_data = [f'test string {i}' for i in range(num_rows)]
116
- c2_data = [i for i in range(num_rows)]
117
- c3_data = [float(i) for i in range(num_rows)]
118
- c4_data = [bool(i % 2) for i in range(num_rows)]
119
- c5_data = [datetime.datetime.now()] * num_rows
120
- c6_data = [
121
- {
122
- 'f1': f'test string {i}',
123
- 'f2': i,
124
- 'f3': float(i),
125
- 'f4': bool(i % 2),
126
- 'f5': [1.0, 2.0, 3.0, 4.0],
127
- 'f6': {
128
- 'f7': 'test string 2',
129
- 'f8': [1.0, 2.0, 3.0, 4.0],
130
- },
131
- }
132
- for i in range(num_rows)
133
- ]
134
- c7_data = [d2] * num_rows
135
- rows = [
136
- {
137
- 'c1': c1_data[i],
138
- 'c1n': c1_data[i] if i % 10 != 0 else None,
139
- 'c2': c2_data[i],
140
- 'c3': c3_data[i],
141
- 'c4': c4_data[i],
142
- 'c5': c5_data[i],
143
- 'c6': c6_data[i],
144
- 'c7': c7_data[i],
145
- 'c8': None
146
- }
147
- for i in range(num_rows)
148
- ]
149
-
150
- self.__add_expr_columns(t, 'base_table')
151
- t.insert(rows)
152
-
153
- pxt.create_dir('views')
154
-
155
- # simple view
156
- v = pxt.create_view('views.view', t.where(t.c2 < 50))
157
- self.__add_expr_columns(v, 'view')
158
-
159
- # snapshot
160
- _ = pxt.create_snapshot('views.snapshot', t.where(t.c2 >= 75))
161
-
162
- # view of views
163
- vv = pxt.create_view('views.view_of_views', v.where(t.c2 >= 25))
164
- self.__add_expr_columns(vv, 'view_of_views')
165
-
166
- # empty view
167
- e = pxt.create_view('views.empty_view', t.where(t.c2 == 4171780))
168
- assert e.count() == 0
169
- self.__add_expr_columns(e, 'empty_view', include_expensive_functions=True)
170
-
171
- # Add external stores
172
- from pixeltable.io.external_store import MockProject
173
- v._link_external_store(
174
- MockProject.create(
175
- v,
176
- 'project',
177
- {'int_field': pxt.IntType()},
178
- {'str_field': pxt.StringType()},
179
- {'view_test_udf': 'int_field', 'c1': 'str_field'}
180
- )
181
- )
182
- # We're just trying to test metadata here, so it's ok to link a false Label Studio project.
183
- # We include a computed image column in order to ensure the creation of a stored proxy.
184
- from pixeltable.io.label_studio import LabelStudioProject
185
- col_mapping = Project.validate_columns(
186
- v, {'str_field': pxt.StringType(), 'img_field': pxt.ImageType()}, {},
187
- {'view_function_call': 'str_field', 'base_table_image_rot': 'img_field'}
188
- )
189
- project = LabelStudioProject('ls_project_0', 4171780, media_import_method='file', col_mapping=col_mapping)
190
- v._link_external_store(project)
191
- # Sanity check that the stored proxy column did get created
192
- assert len(project.stored_proxies) == 1
193
- assert t.base_table_image_rot.col in project.stored_proxies
194
-
195
- def __add_expr_columns(self, t: pxt.Table, col_prefix: str, include_expensive_functions=False) -> None:
196
- def add_column(col_name: str, col_expr: Any, stored: bool = True) -> None:
197
- t.add_column(**{f'{col_prefix}_{col_name}': col_expr}, stored=stored)
198
-
199
- # arithmetic_expr
200
- add_column('plus', t.c2 + 6)
201
- add_column('minus', t.c2 - 5)
202
- add_column('times', t.c3 * 1.2)
203
- add_column('div', t.c3 / 1.7)
204
- add_column('mod', t.c2 % 11)
205
-
206
- # column_property_ref
207
- add_column('fileurl', t.c8.fileurl)
208
- add_column('localpath', t.c8.localpath)
209
-
210
- # comparison
211
- add_column('lt', t.c2 < t.c3)
212
- add_column('le', t.c2 <= t.c3)
213
- add_column('gt', t.c2 > t.c3)
214
- add_column('ge', t.c2 >= t.c3)
215
- add_column('ne', t.c2 != t.c3)
216
- add_column('eq', t.c2 == t.c3)
217
-
218
- # compound_predicate
219
- add_column('and', (t.c2 >= 5) & (t.c2 < 8))
220
- add_column('or', (t.c2 > 1) | t.c4)
221
- add_column('not', ~(t.c2 > 20))
222
-
223
- # function_call
224
- add_column('function_call', pxt.functions.string.format('{0} {key}', t.c1, key=t.c1)) # library function
225
- add_column('test_udf', test_udf_stored(t.c2)) # stored udf
226
- add_column('test_udf_batched', test_udf_stored_batched(t.c1, upper=False)) # batched stored udf
227
- if include_expensive_functions:
228
- # batched library function
229
- add_column('batched', pxt.functions.huggingface.clip_text(t.c1, model_id='openai/clip-vit-base-patch32'))
230
-
231
- # image_member_access
232
- add_column('image_mode', t.c8.mode)
233
- add_column('image_rot', t.c8.rotate(180), stored=False)
234
-
235
- # in_predicate
236
- add_column('isin_1', t.c1.isin(['test string 1', 'test string 2', 'test string 3']))
237
- add_column('isin_2', t.c2.isin([1, 2, 3, 4, 5]))
238
- add_column('isin_3', t.c2.isin(t.c6.f5))
239
-
240
- # inline_array, inline_list, inline_dict
241
- add_column('inline_array_1', pxt.array([[1, 2, 3], [4, 5, 6]]))
242
- add_column('inline_array_2', pxt.array([['a', 'b', 'c'], ['d', 'e', 'f']]))
243
- add_column('inline_array_exprs', pxt.array([[t.c2, t.c2 + 1], [t.c2 + 2, t.c2]]))
244
- add_column('inline_array_mixed', pxt.array([[1, t.c2], [3, t.c2]]))
245
- add_column('inline_list_1', [[1, 2, 3], [4, 5, 6]])
246
- add_column('inline_list_2', [['a', 'b', 'c'], ['d', 'e', 'f']])
247
- add_column('inline_list_exprs', [t.c1, [t.c1n, t.c2]])
248
- add_column('inline_list_mixed', [1, 'a', t.c1, [1, 'a', t.c1n], 1, 'a'])
249
- add_column('inline_dict', {'int': 22, 'dict': {'key': 'val'}, 'expr': t.c1})
250
-
251
- # is_null
252
- add_column('isnull', t.c1 == None)
253
-
254
- # json_mapper and json_path
255
- add_column('json_mapper', t.c6[3])
256
- add_column('json_path', t.c6.f1)
257
- add_column('json_path_nested', t.c6.f6.f7)
258
- add_column('json_path_star', t.c6.f5['*'])
259
- add_column('json_path_idx', t.c6.f5[3])
260
- add_column('json_path_slice', t.c6.f5[1:3:2])
261
-
262
- # literal
263
- add_column('str_const', 'str')
264
- add_column('int_const', 5)
265
- add_column('float_const', 5.0)
266
- add_column('timestamp_const_1', datetime.datetime.now())
267
- add_column('timestamp_const_2', datetime.datetime.now().astimezone(ZoneInfo('America/Anchorage')))
268
-
269
- # type_cast
270
- add_column('astype', t.c2.astype(FloatType()))
271
-
272
- # .apply
273
- add_column('c2_to_string', t.c2.apply(str))
274
- add_column('c6_to_string', t.c6.apply(json.dumps))
275
- add_column('c6_back_to_json', t[f'{col_prefix}_c6_to_string'].apply(json.loads))
276
-
277
- t.add_embedding_index(
278
- f'{col_prefix}_function_call',
279
- string_embed=pxt.functions.huggingface.clip_text.using(model_id='openai/clip-vit-base-patch32')
280
- )
281
-
282
- if t.get_metadata()['is_view']:
283
- # Add an embedding index to the view that is on a column in the base table
284
- t.add_embedding_index(
285
- 'base_table_function_call',
286
- string_embed=pxt.functions.huggingface.clip_text.using(model_id='openai/clip-vit-base-patch32')
287
- )
288
-
289
- # query()
290
- @t.query
291
- def q1(i: int):
292
- # this breaks; TODO: why?
293
- #return t.where(t.c2 < i)
294
- return t.where(t.c2 < i).select(t.c1, t.c2)
295
- add_column('query_output', t.queries.q1(t.c2))
296
-
297
- @t.query
298
- def q2(s: str):
299
- sim = t[f'{col_prefix}_function_call'].similarity(s)
300
- return t.order_by(sim, asc=False).select(t[f'{col_prefix}_function_call']).limit(5)
301
- add_column('sim_output', t.queries.q2(t.c1))
302
-
303
-
304
- @pxt.udf(_force_stored=True)
305
- def test_udf_stored(n: int) -> int:
306
- return n + 1
307
-
308
-
309
- @pxt.udf(batch_size=4, _force_stored=True)
310
- def test_udf_stored_batched(strings: Batch[str], *, upper: bool = True) -> Batch[str]:
311
- return [string.upper() if upper else string.lower() for string in strings]
312
-
313
-
314
- def main() -> None:
315
- _logger.info("Creating pixeltable test artifact.")
316
- dumper = Dumper()
317
- dumper.create_tables()
318
- dumper.dump_db()
319
-
320
-
321
- if __name__ == "__main__":
322
- main()
@@ -1,81 +0,0 @@
1
- import av # type: ignore[import-untyped]
2
- import PIL.Image
3
- import PIL.ImageDraw
4
- import PIL.ImageFont
5
-
6
- from pathlib import Path
7
- from typing import Optional
8
- import tempfile
9
- import math
10
-
11
- def create_test_video(
12
- frame_count: int,
13
- frame_rate: float = 1.0,
14
- frame_width: int = 224,
15
- aspect_ratio: str = '16:9',
16
- frame_height: Optional[int] = None,
17
- output_path: Optional[Path] = None,
18
- font_file: str = '/Library/Fonts/Arial Unicode.ttf',
19
- ) -> Path:
20
- """
21
- Creates an .mp4 video file such as the ones in /tests/data/test_videos
22
- The video contains a frame number in each frame (for visual sanity check).
23
-
24
- Args:
25
- frame_count: Number of frames to create
26
- frame_rate: Frame rate of the video
27
- frame_width (int): Width in pixels of the video frame. Note: cost of decoding increases dramatically
28
- with frame width * frame height.
29
- aspect_ratio: Aspect ratio (width/height) of the video frames string of form 'width:height'
30
- frame_height: Height of the video frame, if given, aspect_ratio is ignored
31
- output_path: Path to save the video file
32
- font_file: Path to the font file used for text.
33
- """
34
-
35
- if output_path is None:
36
- output_path = Path(tempfile.NamedTemporaryFile(suffix='.mp4', delete=False).name)
37
-
38
- parts = [int(p) for p in aspect_ratio.split(':')]
39
- assert len(parts) == 2
40
- aspect_ratio = parts[0] / parts[1]
41
-
42
- if frame_height is None:
43
- frame_height = math.ceil(frame_width / aspect_ratio)
44
-
45
- frame_size = (frame_width, frame_height)
46
-
47
- font_size = min(frame_height, frame_width) // 4
48
- font = PIL.ImageFont.truetype(font=font_file, size=font_size)
49
- font_fill = 0xFFFFFF # white
50
- frame_color = 0xFFFFFF - font_fill # black
51
- # Create a video container
52
- container = av.open(str(output_path), mode='w')
53
-
54
- # Add a video stream
55
- stream = container.add_stream('h264', rate=frame_rate)
56
- stream.width, stream.height = frame_size
57
- stream.pix_fmt = 'yuv420p'
58
-
59
- for frame_number in range(frame_count):
60
- # Create an image with a number in it
61
- image = PIL.Image.new('RGB', frame_size, color=frame_color)
62
- draw = PIL.ImageDraw.Draw(image)
63
- # Optionally, add a font here if you have one
64
- text = str(frame_number)
65
- _, _, text_width, text_height = draw.textbbox((0, 0), text, font=font)
66
- text_position = ((frame_size[0] - text_width) // 2, (frame_size[1] - text_height) // 2)
67
- draw.text(text_position, text, font=font, fill=font_fill)
68
-
69
- # Convert the PIL image to an AVFrame
70
- frame = av.VideoFrame.from_image(image)
71
-
72
- # Encode and write the frame
73
- for packet in stream.encode(frame):
74
- container.mux(packet)
75
-
76
- # Flush and close the stream
77
- for packet in stream.encode():
78
- container.mux(packet)
79
-
80
- container.close()
81
- return output_path
@@ -1,51 +0,0 @@
1
- import ast
2
- import warnings
3
- from typing import Optional, Union
4
-
5
- import griffe
6
- import griffe.expressions
7
- from griffe import Extension, Object, ObjectNode
8
-
9
- import pixeltable as pxt
10
-
11
- logger = griffe.get_logger(__name__)
12
-
13
- class PxtGriffeExtension(Extension):
14
- """Implementation of a Pixeltable custom griffe extension."""
15
-
16
- def on_instance(self, node: Union[ast.AST, ObjectNode], obj: Object) -> None:
17
- if obj.docstring is None:
18
- # Skip over entities without a docstring
19
- return
20
-
21
- if isinstance(obj, griffe.Function):
22
- # See if the (Python) function has a @pxt.udf decorator
23
- if any(
24
- isinstance(dec.value, griffe.expressions.Expr) and dec.value.canonical_path in ['pixeltable.func.udf', 'pixeltable.udf']
25
- for dec in obj.decorators
26
- ):
27
- # Update the template
28
- self.__modify_pxt_udf(obj)
29
-
30
- def __modify_pxt_udf(self, func: griffe.Function) -> None:
31
- """
32
- Instructs the doc snippet for `func` to use the custom Pixeltable UDF jinja template, and
33
- converts all type hints to Pixeltable column type references, in accordance with the @udf
34
- decorator behavior.
35
- """
36
- func.extra['mkdocstrings']['template'] = 'udf.html.jinja'
37
- # Dynamically load the UDF reference so we can inspect the Pixeltable signature directly
38
- warnings.simplefilter("ignore")
39
- udf = griffe.dynamic_import(func.path)
40
- assert isinstance(udf, pxt.Function)
41
- # TODO: Find a way to support multiple signatures?
42
- # Convert the return type to a Pixeltable type reference
43
- func.returns = str(udf.signatures[0].get_return_type())
44
- # Convert the parameter types to Pixeltable type references
45
- for griffe_param in func.parameters:
46
- assert isinstance(griffe_param.annotation, griffe.expressions.Expr)
47
- if griffe_param.name not in udf.signatures[0].parameters:
48
- logger.warning(f'Parameter `{griffe_param.name}` not found in signature for UDF: {udf.display_name}')
49
- continue
50
- pxt_param = udf.signatures[0].parameters[griffe_param.name]
51
- griffe_param.annotation = str(pxt_param.col_type)
@@ -1,6 +0,0 @@
1
- from pathlib import Path
2
-
3
-
4
- def get_templates_path() -> Path:
5
- """Implementation of the 'mkdocstrings.python.templates' plugin for custom jinja templates."""
6
- return Path(__file__).parent / "templates"
@@ -1,135 +0,0 @@
1
- {#- Template for Pixeltable UDFs. Cargo-culted (with modification) from _base/function.html.jinja. -#}
2
-
3
- {% block logs scoped %}
4
- {#- Logging block.
5
-
6
- This block can be used to log debug messages, deprecation messages, warnings, etc.
7
- -#}
8
- {{ log.debug("Rendering " + function.path) }}
9
- {% endblock logs %}
10
-
11
- {% import "language"|get_template as lang with context %}
12
- {#- Language module providing the `t` translation method. -#}
13
-
14
- <div class="doc doc-object doc-function">
15
- {% with obj = function, html_id = function.path %}
16
-
17
- {% if root %}
18
- {% set show_full_path = config.show_root_full_path %}
19
- {% set root_members = True %}
20
- {% elif root_members %}
21
- {% set show_full_path = config.show_root_members_full_path or config.show_object_full_path %}
22
- {% set root_members = False %}
23
- {% else %}
24
- {% set show_full_path = config.show_object_full_path %}
25
- {% endif %}
26
-
27
- {% set function_name = function.path if show_full_path else function.name %}
28
- {#- Brief or full function name depending on configuration. -#}
29
- {% set symbol_type = "udf" %}
30
- {#- Symbol type: method when parent is a class, function otherwise. -#}
31
-
32
- {% if not root or config.show_root_heading %}
33
- {% filter heading(
34
- heading_level,
35
- role="function",
36
- id=html_id,
37
- class="doc doc-heading",
38
- toc_label=(('<code class="doc-symbol doc-symbol-toc doc-symbol-' + symbol_type + '"></code>&nbsp;')|safe if config.show_symbol_type_toc else '') + function.name,
39
- ) %}
40
-
41
- {% block heading scoped %}
42
- {#- Heading block.
43
-
44
- This block renders the heading for the function.
45
- -#}
46
- {% if config.show_symbol_type_heading %}<code class="doc-symbol doc-symbol-heading doc-symbol-{{ symbol_type }}"></code>{% endif %}
47
- {% if config.separate_signature %}
48
- <span class="doc doc-object-name doc-function-name">{{ function_name }}</span>
49
- {% else %}
50
- {%+ filter highlight(language="python", inline=True) %}
51
- {{ function_name }}{% include "signature"|get_template with context %}
52
- {% endfilter %}
53
- {% endif %}
54
- {% endblock heading %}
55
-
56
- {% block labels scoped %}
57
- {#- Labels block.
58
-
59
- This block renders the labels for the function.
60
- -#}
61
- {% with labels = function.labels %}
62
- {% include "labels"|get_template with context %}
63
- {% endwith %}
64
- {% endblock labels %}
65
-
66
- {% endfilter %}
67
-
68
- {% block signature scoped %}
69
- {#- Signature block.
70
-
71
- This block renders the signature for the function.
72
- -#}
73
- {% if config.separate_signature %}
74
- {% filter format_signature(function, config.line_length, crossrefs=config.signature_crossrefs) %}
75
- {{ function.name }}
76
- {% endfilter %}
77
- {% endif %}
78
- {% endblock signature %}
79
-
80
- {% else %}
81
-
82
- {% if config.show_root_toc_entry %}
83
- {% filter heading(
84
- heading_level,
85
- role="function",
86
- id=html_id,
87
- toc_label=(('<code class="doc-symbol doc-symbol-toc doc-symbol-' + symbol_type + '"></code>&nbsp;')|safe if config.show_symbol_type_toc else '') + function.name,
88
- hidden=True,
89
- ) %}
90
- {% endfilter %}
91
- {% endif %}
92
- {% set heading_level = heading_level - 1 %}
93
- {% endif %}
94
-
95
- <div class="doc doc-contents {% if root %}first{% endif %}">
96
- {% block contents scoped %}
97
- {#- Contents block.
98
-
99
- This block renders the contents of the function.
100
- It contains other blocks that users can override.
101
- Overriding the contents block allows to rearrange the order of the blocks.
102
- -#}
103
- {% block docstring scoped %}
104
- {#- Docstring block.
105
-
106
- This block renders the docstring for the function.
107
- -#}
108
- {% with docstring_sections = function.docstring.parsed %}
109
- {% include "docstring"|get_template with context %}
110
- {% endwith %}
111
- {% endblock docstring %}
112
-
113
- {% block source scoped %}
114
- {#- Source block.
115
-
116
- This block renders the source code for the function.
117
- -#}
118
- {% if config.show_source and function.source %}
119
- <details class="quote">
120
- <summary>{{ lang.t("Source code in") }} <code>
121
- {%- if function.relative_filepath.is_absolute() -%}
122
- {{ function.relative_package_filepath }}
123
- {%- else -%}
124
- {{ function.relative_filepath }}
125
- {%- endif -%}
126
- </code></summary>
127
- {{ function.source|highlight(language="python", linestart=function.lineno, linenums=True) }}
128
- </details>
129
- {% endif %}
130
- {% endblock source %}
131
- {% endblock contents %}
132
- </div>
133
-
134
- {% endwith %}
135
- </div>
@@ -1,9 +0,0 @@
1
- import numpy as np
2
-
3
- import pixeltable as pxt
4
-
5
-
6
- # TODO This can go away once we have the ability to inline expr_udf's
7
- @pxt.expr_udf
8
- def clip_text_embed(txt: str) -> np.ndarray:
9
- return pxt.functions.huggingface.clip_text(txt, model_id='openai/clip-vit-base-patch32') # type: ignore[return-value]
@@ -1,55 +0,0 @@
1
- from typing import Callable, Optional
2
-
3
- from mypy import nodes
4
- from mypy.plugin import AnalyzeTypeContext, ClassDefContext, Plugin
5
- from mypy.plugins.common import add_method_to_class
6
- from mypy.types import AnyType, Type, TypeOfAny
7
-
8
- import pixeltable as pxt
9
-
10
-
11
- class PxtPlugin(Plugin):
12
- __UDA_FULLNAME = f'{pxt.uda.__module__}.{pxt.uda.__name__}'
13
- __TYPE_MAP = {
14
- pxt.Json: 'typing.Any',
15
- pxt.Array: 'numpy.ndarray',
16
- pxt.Image: 'PIL.Image.Image',
17
- pxt.Video: 'builtins.str',
18
- pxt.Audio: 'builtins.str',
19
- pxt.Document: 'builtins.str',
20
- }
21
- __FULLNAME_MAP = {
22
- f'{k.__module__}.{k.__name__}': v
23
- for k, v in __TYPE_MAP.items()
24
- }
25
-
26
- def get_type_analyze_hook(self, fullname: str) -> Optional[Callable[[AnalyzeTypeContext], Type]]:
27
- if fullname in self.__FULLNAME_MAP:
28
- subst_name = self.__FULLNAME_MAP[fullname]
29
- return lambda ctx: pxt_hook(ctx, subst_name)
30
- return None
31
-
32
- def get_class_decorator_hook_2(self, fullname: str) -> Optional[Callable[[ClassDefContext], bool]]:
33
- if fullname == self.__UDA_FULLNAME:
34
- return pxt_decorator_hook
35
- return None
36
-
37
- def plugin(version: str) -> type:
38
- return PxtPlugin
39
-
40
- def pxt_hook(ctx: AnalyzeTypeContext, subst_name: str) -> Type:
41
- if subst_name == 'typing.Any':
42
- return AnyType(TypeOfAny.special_form)
43
- return ctx.api.named_type(subst_name, [])
44
-
45
- def pxt_decorator_hook(ctx: ClassDefContext) -> bool:
46
- arg = nodes.Argument(nodes.Var('fn'), AnyType(TypeOfAny.special_form), None, nodes.ARG_POS)
47
- add_method_to_class(
48
- ctx.api,
49
- ctx.cls,
50
- "to_sql",
51
- args=[arg],
52
- return_type=AnyType(TypeOfAny.special_form),
53
- is_staticmethod=True,
54
- )
55
- return True
@@ -1,3 +0,0 @@
1
- [mkdocstrings.python.templates]
2
- extension-name=pixeltable.tool.doc_plugins.mkdocstrings:get_templates_path
3
-