planar 0.9.3__py3-none-any.whl → 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. planar/ai/agent.py +2 -1
  2. planar/ai/agent_base.py +24 -5
  3. planar/ai/state.py +17 -0
  4. planar/app.py +18 -1
  5. planar/data/connection.py +108 -0
  6. planar/data/dataset.py +11 -104
  7. planar/data/utils.py +89 -0
  8. planar/db/alembic/env.py +25 -1
  9. planar/files/storage/azure_blob.py +1 -1
  10. planar/registry_items.py +2 -0
  11. planar/routers/dataset_router.py +213 -0
  12. planar/routers/info.py +79 -36
  13. planar/routers/models.py +1 -0
  14. planar/routers/workflow.py +2 -0
  15. planar/scaffold_templates/pyproject.toml.j2 +1 -1
  16. planar/security/authorization.py +31 -3
  17. planar/security/default_policies.cedar +25 -0
  18. planar/testing/fixtures.py +34 -1
  19. planar/testing/planar_test_client.py +1 -1
  20. planar/workflows/decorators.py +2 -1
  21. planar/workflows/wrappers.py +1 -0
  22. {planar-0.9.3.dist-info → planar-0.11.0.dist-info}/METADATA +9 -1
  23. {planar-0.9.3.dist-info → planar-0.11.0.dist-info}/RECORD +25 -72
  24. {planar-0.9.3.dist-info → planar-0.11.0.dist-info}/WHEEL +1 -1
  25. planar/ai/test_agent_serialization.py +0 -229
  26. planar/ai/test_agent_tool_step_display.py +0 -78
  27. planar/data/test_dataset.py +0 -354
  28. planar/files/storage/test_azure_blob.py +0 -435
  29. planar/files/storage/test_local_directory.py +0 -162
  30. planar/files/storage/test_s3.py +0 -299
  31. planar/files/test_files.py +0 -282
  32. planar/human/test_human.py +0 -385
  33. planar/logging/test_formatter.py +0 -327
  34. planar/modeling/mixins/test_auditable.py +0 -97
  35. planar/modeling/mixins/test_timestamp.py +0 -134
  36. planar/modeling/mixins/test_uuid_primary_key.py +0 -52
  37. planar/routers/test_agents_router.py +0 -174
  38. planar/routers/test_files_router.py +0 -49
  39. planar/routers/test_object_config_router.py +0 -367
  40. planar/routers/test_routes_security.py +0 -168
  41. planar/routers/test_rule_router.py +0 -470
  42. planar/routers/test_workflow_router.py +0 -539
  43. planar/rules/test_data/account_dormancy_management.json +0 -223
  44. planar/rules/test_data/airline_loyalty_points_calculator.json +0 -262
  45. planar/rules/test_data/applicant_risk_assessment.json +0 -435
  46. planar/rules/test_data/booking_fraud_detection.json +0 -407
  47. planar/rules/test_data/cellular_data_rollover_system.json +0 -258
  48. planar/rules/test_data/clinical_trial_eligibility_screener.json +0 -437
  49. planar/rules/test_data/customer_lifetime_value.json +0 -143
  50. planar/rules/test_data/import_duties_calculator.json +0 -289
  51. planar/rules/test_data/insurance_prior_authorization.json +0 -443
  52. planar/rules/test_data/online_check_in_eligibility_system.json +0 -254
  53. planar/rules/test_data/order_consolidation_system.json +0 -375
  54. planar/rules/test_data/portfolio_risk_monitor.json +0 -471
  55. planar/rules/test_data/supply_chain_risk.json +0 -253
  56. planar/rules/test_data/warehouse_cross_docking.json +0 -237
  57. planar/rules/test_rules.py +0 -1494
  58. planar/security/tests/test_auth_middleware.py +0 -162
  59. planar/security/tests/test_authorization_context.py +0 -78
  60. planar/security/tests/test_cedar_basics.py +0 -41
  61. planar/security/tests/test_cedar_policies.py +0 -158
  62. planar/security/tests/test_jwt_principal_context.py +0 -179
  63. planar/test_app.py +0 -142
  64. planar/test_cli.py +0 -394
  65. planar/test_config.py +0 -515
  66. planar/test_object_config.py +0 -527
  67. planar/test_object_registry.py +0 -14
  68. planar/test_sqlalchemy.py +0 -193
  69. planar/test_utils.py +0 -105
  70. planar/testing/test_memory_storage.py +0 -143
  71. planar/workflows/test_concurrency_detection.py +0 -120
  72. planar/workflows/test_lock_timeout.py +0 -140
  73. planar/workflows/test_serialization.py +0 -1203
  74. planar/workflows/test_suspend_deserialization.py +0 -231
  75. planar/workflows/test_workflow.py +0 -2005
  76. {planar-0.9.3.dist-info → planar-0.11.0.dist-info}/entry_points.txt +0 -0
@@ -1,299 +0,0 @@
1
- import asyncio
2
- import os
3
- import uuid
4
- from contextlib import asynccontextmanager
5
-
6
- import boto3
7
- import botocore
8
- import botocore.client
9
- import botocore.exceptions
10
- import pytest
11
-
12
- from planar.files.storage.s3 import S3Storage
13
-
14
- pytestmark = pytest.mark.skipif(
15
- os.getenv("PLANAR_TEST_S3_STORAGE", "0") != "1",
16
- reason="S3 tests must be enabled via PLANAR_TEST_S3_STORAGE env var",
17
- )
18
-
19
- # --- Configuration for LocalStack/S3 Compatible Service ---
20
-
21
- S3_PORT = 4566
22
- # LocalStack S3 endpoint
23
- S3_ENDPOINT_URL = f"http://127.0.0.1:{S3_PORT}"
24
- # Dummy credentials for LocalStack (usually not strictly required)
25
- AWS_ACCESS_KEY_ID = "test"
26
- AWS_SECRET_ACCESS_KEY = "test"
27
- AWS_REGION = "us-east-1"
28
- # Generate a unique bucket name for each test run session
29
- SESSION_BUCKET_NAME = f"planar-test-bucket-{uuid.uuid4()}"
30
-
31
-
32
- @pytest.fixture()
33
- def s3_boto_client(): # Synchronous client
34
- """Provides a boto3 S3 client for direct interaction (e.g., bucket creation)."""
35
- client = boto3.client(
36
- "s3",
37
- endpoint_url=S3_ENDPOINT_URL,
38
- aws_access_key_id=AWS_ACCESS_KEY_ID,
39
- aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
40
- region_name=AWS_REGION,
41
- config=botocore.client.Config(signature_version="s3v4"),
42
- )
43
- return client
44
-
45
-
46
- @pytest.fixture(autouse=True)
47
- async def ensure_s3_bucket(s3_boto_client):
48
- """
49
- Ensures the S3 bucket exists before tests run.
50
- This runs automatically due to autouse=True.
51
- """
52
- print(f"Attempting to create bucket: {SESSION_BUCKET_NAME} at {S3_ENDPOINT_URL}")
53
-
54
- create_kwargs = {
55
- "Bucket": SESSION_BUCKET_NAME,
56
- }
57
-
58
- try:
59
- await asyncio.to_thread(s3_boto_client.create_bucket, **create_kwargs)
60
- print(f"Bucket {SESSION_BUCKET_NAME} created or confirmed existing.")
61
- except botocore.exceptions.ClientError as e:
62
- error_code = e.response.get("Error", {}).get("Code")
63
- if error_code in ("BucketAlreadyOwnedByYou", "BucketAlreadyExists"):
64
- print(f"Bucket {SESSION_BUCKET_NAME} already exists.")
65
- else:
66
- pytest.fail(
67
- f"Failed to create S3 bucket {SESSION_BUCKET_NAME} "
68
- f"at {S3_ENDPOINT_URL}: {e}. Is LocalStack running?"
69
- )
70
- except Exception as e:
71
- pytest.fail(
72
- f"An unexpected error occurred during bucket creation for {SESSION_BUCKET_NAME}: {e}"
73
- )
74
-
75
- yield # Tests run here
76
-
77
-
78
- @pytest.fixture
79
- async def s3_storage() -> S3Storage:
80
- """Provides an instance of S3Storage configured for the test bucket."""
81
- storage_instance = S3Storage(
82
- bucket_name=SESSION_BUCKET_NAME,
83
- endpoint_url=S3_ENDPOINT_URL,
84
- access_key_id=AWS_ACCESS_KEY_ID,
85
- secret_access_key=AWS_SECRET_ACCESS_KEY,
86
- region=AWS_REGION,
87
- presigned_url_ttl=60,
88
- )
89
- return storage_instance
90
-
91
-
92
- @asynccontextmanager
93
- async def cleanup_s3_object(storage: S3Storage, ref: str):
94
- """Context manager to ensure an S3 object is deleted after use."""
95
- try:
96
- yield
97
- finally:
98
- try:
99
- print(f"Cleaning up S3 object: {ref}")
100
- await storage.delete(ref)
101
- except FileNotFoundError:
102
- print(f"Cleanup: S3 object {ref} already deleted or not found.")
103
- except Exception as e:
104
- print(f"Warning: Failed to cleanup S3 object {ref}: {e}")
105
-
106
-
107
- # --- Test Cases ---
108
-
109
-
110
- async def test_put_get_bytes(s3_storage: S3Storage):
111
- """Test storing and retrieving raw bytes."""
112
- test_data = b"some binary data \x00\xff for s3"
113
- mime_type = "application/octet-stream"
114
- ref = None
115
- try:
116
- ref = await s3_storage.put_bytes(test_data, mime_type=mime_type)
117
- assert isinstance(ref, str)
118
- # S3 keys don't have to be UUIDs, but our implementation generates them
119
- try:
120
- uuid.UUID(ref)
121
- except ValueError:
122
- pytest.fail(f"Returned ref '{ref}' is not a valid UUID string")
123
-
124
- async with cleanup_s3_object(s3_storage, ref):
125
- retrieved_data, retrieved_mime = await s3_storage.get_bytes(ref)
126
-
127
- assert retrieved_data == test_data
128
- # S3 might add charset or other params, check starts with
129
- assert retrieved_mime is not None
130
- assert retrieved_mime.startswith(mime_type)
131
-
132
- # Check external URL (should be a presigned URL)
133
- url = await s3_storage.external_url(ref)
134
- assert url is not None
135
- base_expected_url = f"{S3_ENDPOINT_URL}/{SESSION_BUCKET_NAME}/{ref}"
136
- assert url.startswith(base_expected_url)
137
- assert "X-Amz-Signature" in url
138
- assert "X-Amz-Expires" in url
139
-
140
- except Exception as e:
141
- if ref:
142
- await cleanup_s3_object(s3_storage, ref).__aexit__(None, None, None)
143
- raise e
144
-
145
-
146
- async def test_put_get_string(s3_storage: S3Storage):
147
- """Test storing and retrieving a string."""
148
- test_string = "Hello, S3! This is a test string with Unicode: éàçü."
149
- mime_type = "text/plain"
150
- encoding = "utf-16"
151
- ref = None
152
- try:
153
- # Store with explicit encoding and mime type
154
- ref = await s3_storage.put_string(
155
- test_string, encoding=encoding, mime_type=mime_type
156
- )
157
- expected_mime_type = f"{mime_type}; charset={encoding}"
158
-
159
- async with cleanup_s3_object(s3_storage, ref):
160
- retrieved_string, retrieved_mime = await s3_storage.get_string(
161
- ref, encoding=encoding
162
- )
163
-
164
- assert retrieved_string == test_string
165
- assert retrieved_mime == expected_mime_type
166
-
167
- except Exception as e:
168
- if ref:
169
- await cleanup_s3_object(s3_storage, ref).__aexit__(None, None, None)
170
- raise e
171
-
172
- # Test default encoding (utf-8)
173
- ref_utf8 = None
174
- try:
175
- ref_utf8 = await s3_storage.put_string(test_string, mime_type="text/html")
176
- expected_mime_utf8 = "text/html; charset=utf-8"
177
-
178
- async with cleanup_s3_object(s3_storage, ref_utf8):
179
- retrieved_string_utf8, retrieved_mime_utf8 = await s3_storage.get_string(
180
- ref_utf8
181
- )
182
- assert retrieved_string_utf8 == test_string
183
- assert retrieved_mime_utf8 == expected_mime_utf8
184
- except Exception as e:
185
- if ref_utf8:
186
- await cleanup_s3_object(s3_storage, ref_utf8).__aexit__(None, None, None)
187
- raise e
188
-
189
-
190
- async def test_put_get_stream(s3_storage: S3Storage):
191
- """Test storing data from an async generator stream."""
192
- test_chunks = [b"s3_chunk1 ", b"s3_chunk2 ", b"s3_chunk3"]
193
- full_data = b"".join(test_chunks)
194
- mime_type = "image/jpeg" # Different mime type for variety
195
- ref = None
196
-
197
- async def _test_stream():
198
- for chunk in test_chunks:
199
- yield chunk
200
- await asyncio.sleep(0.01) # Simulate async work
201
-
202
- try:
203
- ref = await s3_storage.put(_test_stream(), mime_type=mime_type)
204
-
205
- async with cleanup_s3_object(s3_storage, ref):
206
- stream, retrieved_mime = await s3_storage.get(ref)
207
- retrieved_data = b""
208
- async for chunk in stream:
209
- retrieved_data += chunk
210
-
211
- assert retrieved_data == full_data
212
- assert retrieved_mime is not None
213
- assert retrieved_mime.startswith(mime_type)
214
- except Exception as e:
215
- if ref:
216
- await cleanup_s3_object(s3_storage, ref).__aexit__(None, None, None)
217
- raise e
218
-
219
-
220
- async def test_put_no_mime_type(s3_storage: S3Storage):
221
- """Test storing data without providing a mime type."""
222
- test_data = b"s3 data without mime"
223
- ref = None
224
- try:
225
- ref = await s3_storage.put_bytes(test_data)
226
- async with cleanup_s3_object(s3_storage, ref):
227
- retrieved_data, retrieved_mime = await s3_storage.get_bytes(ref)
228
-
229
- assert retrieved_data == test_data
230
- # S3 might assign a default mime type (like binary/octet-stream) or none
231
- # Depending on the S3 provider, this might be None or a default
232
- print(f"Retrieved mime type (no mime put): {retrieved_mime}")
233
- # assert retrieved_mime is None or retrieved_mime == 'binary/octet-stream'
234
- # For now, let's just check the data
235
- except Exception as e:
236
- if ref:
237
- await cleanup_s3_object(s3_storage, ref).__aexit__(None, None, None)
238
- raise e
239
-
240
-
241
- async def test_delete(s3_storage: S3Storage):
242
- """Test deleting stored data."""
243
- ref = await s3_storage.put_bytes(b"to be deleted from s3", mime_type="text/plain")
244
-
245
- # Verify object exists before delete (optional, get raises if not found)
246
- try:
247
- _, _ = await s3_storage.get(ref)
248
- except FileNotFoundError:
249
- pytest.fail(f"Object {ref} should exist before deletion but was not found.")
250
-
251
- # Delete the object
252
- await s3_storage.delete(ref)
253
-
254
- # Verify object is gone after delete
255
- with pytest.raises(FileNotFoundError):
256
- await s3_storage.get(ref)
257
-
258
- # Deleting again should be idempotent (no error)
259
- try:
260
- await s3_storage.delete(ref)
261
- except Exception as e:
262
- pytest.fail(f"Deleting already deleted ref raised an exception: {e}")
263
-
264
-
265
- async def test_get_non_existent(s3_storage: S3Storage):
266
- """Test getting a reference that does not exist."""
267
- non_existent_ref = str(uuid.uuid4())
268
- with pytest.raises(FileNotFoundError):
269
- await s3_storage.get(non_existent_ref)
270
-
271
-
272
- async def test_delete_non_existent(s3_storage: S3Storage):
273
- """Test deleting a reference that does not exist (should not raise error)."""
274
- non_existent_ref = str(uuid.uuid4())
275
- try:
276
- await s3_storage.delete(non_existent_ref)
277
- except Exception as e:
278
- pytest.fail(f"Deleting non-existent ref raised an exception: {e}")
279
-
280
-
281
- async def test_external_url(s3_storage: S3Storage):
282
- """Test that external_url returns a valid-looking presigned S3 object URL."""
283
- ref = None
284
- try:
285
- ref = await s3_storage.put_bytes(b"some data for url test")
286
- async with cleanup_s3_object(s3_storage, ref):
287
- url = await s3_storage.external_url(ref)
288
- assert url is not None
289
- base_expected_url = f"{S3_ENDPOINT_URL}/{SESSION_BUCKET_NAME}/{ref}"
290
- assert url.startswith(base_expected_url)
291
- assert "X-Amz-Algorithm" in url
292
- assert "X-Amz-Credential" in url
293
- assert "X-Amz-Date" in url
294
- assert "X-Amz-Expires" in url
295
- assert "X-Amz-Signature" in url
296
- except Exception as e:
297
- if ref:
298
- await cleanup_s3_object(s3_storage, ref).__aexit__(None, None, None)
299
- raise e
@@ -1,282 +0,0 @@
1
- """
2
- Test file handling in Planar workflows.
3
- """
4
-
5
- import uuid
6
- from pathlib import Path
7
- from typing import AsyncGenerator, cast
8
-
9
- import pytest
10
- from pydantic import BaseModel, Field
11
- from sqlmodel.ext.asyncio.session import AsyncSession
12
-
13
- from planar.app import PlanarApp
14
- from planar.config import sqlite_config
15
- from planar.files import PlanarFile
16
- from planar.files.models import PlanarFileMetadata
17
- from planar.files.storage.base import Storage
18
- from planar.workflows.decorators import workflow
19
- from planar.workflows.execution import execute
20
- from planar.workflows.models import Workflow
21
-
22
-
23
- @pytest.fixture(name="app")
24
- def app_fixture(tmp_db_path: str):
25
- app = PlanarApp(
26
- config=sqlite_config(tmp_db_path),
27
- title="Planar app for testing file workflows",
28
- description="Testing",
29
- )
30
- yield app
31
-
32
-
33
- @pytest.fixture
34
- async def planar_file(
35
- storage: Storage,
36
- session: AsyncSession, # Change type hint
37
- ) -> PlanarFile:
38
- """Create a PlanarFile instance for testing."""
39
- # Store test content
40
- test_data = b"Test file content for workflow"
41
- mime_type = "text/plain"
42
-
43
- # Store the file and get a reference
44
- storage_ref = await storage.put_bytes(test_data, mime_type=mime_type)
45
-
46
- # Create and store the file metadata
47
- file_metadata = PlanarFileMetadata(
48
- filename="test_file.txt",
49
- content_type=mime_type,
50
- size=len(test_data),
51
- storage_ref=storage_ref,
52
- )
53
- session.add(file_metadata)
54
- await session.commit()
55
- await session.refresh(file_metadata)
56
-
57
- # Return a PlanarFile reference (not the full metadata)
58
- return PlanarFile(
59
- id=file_metadata.id,
60
- filename=file_metadata.filename,
61
- content_type=file_metadata.content_type,
62
- size=file_metadata.size,
63
- )
64
-
65
-
66
- # Define models for workflow testing
67
- class FileProcessingInput(BaseModel):
68
- """Input model for a workflow that processes a file."""
69
-
70
- title: str = Field(description="Title of the processing job")
71
- file: PlanarFile = Field(description="The file to process")
72
- max_chars: int = Field(description="Maximum characters to extract", default=100)
73
-
74
-
75
- class FileProcessingResult(BaseModel):
76
- """Result model for a file processing workflow."""
77
-
78
- title: str = Field(description="Title of the processing job")
79
- characters: int = Field(description="Number of characters in the file")
80
- content_preview: str = Field(description="Preview of the file content")
81
- file_id: uuid.UUID = Field(description="ID of the processed file")
82
-
83
-
84
- async def test_workflow_with_planar_file(
85
- session: AsyncSession,
86
- planar_file: PlanarFile,
87
- ):
88
- """Test that a workflow can accept and process a PlanarFile input."""
89
-
90
- @workflow()
91
- async def file_processing_workflow(input_data: PlanarFile):
92
- file_content = await input_data.get_content()
93
- char_count = len(file_content)
94
- content_str = file_content.decode("utf-8")
95
- preview = content_str[:100]
96
-
97
- # Return structured result
98
- return FileProcessingResult(
99
- title="Test File Processing",
100
- characters=char_count,
101
- content_preview=preview,
102
- file_id=input_data.id,
103
- )
104
-
105
- wf = await file_processing_workflow.start(planar_file)
106
- result = await execute(wf)
107
-
108
- # Verify the result
109
- assert isinstance(result, FileProcessingResult)
110
- assert result.title == "Test File Processing"
111
- assert result.characters == len(b"Test file content for workflow")
112
- assert result.content_preview == "Test file content for workflow"
113
- assert result.file_id == planar_file.id
114
-
115
- # Verify the workflow completed successfully
116
- updated_wf = await session.get(Workflow, wf.id)
117
- assert updated_wf is not None
118
- assert updated_wf.status == "succeeded"
119
- assert updated_wf.args == [planar_file.model_dump(mode="json")]
120
-
121
- # Verify that the result stored in the workflow is correct
122
- workflow_result = cast(dict, updated_wf.result)
123
- assert workflow_result["title"] == "Test File Processing"
124
- assert workflow_result["characters"] == len(b"Test file content for workflow")
125
- assert workflow_result["content_preview"] == "Test file content for workflow"
126
- assert workflow_result["file_id"] == str(planar_file.id)
127
-
128
-
129
- TEST_BYTES = b"Test data for upload"
130
- TEST_FILENAME = "upload_test.txt"
131
- TEST_CONTENT_TYPE = "text/plain"
132
- TEST_SIZE = len(TEST_BYTES)
133
- DEFAULT_CONTENT_TYPE = "application/octet-stream"
134
-
135
-
136
- async def assert_upload_success(
137
- uploaded_file: PlanarFile,
138
- expected_filename: str,
139
- expected_content: bytes,
140
- expected_content_type: str,
141
- expected_size: int,
142
- session: AsyncSession,
143
- ):
144
- """Helper function to assert successful file upload."""
145
- assert isinstance(uploaded_file, PlanarFile)
146
- assert uploaded_file.filename == expected_filename
147
- assert uploaded_file.content_type == expected_content_type
148
- assert uploaded_file.size == expected_size
149
- assert isinstance(uploaded_file.id, uuid.UUID)
150
-
151
- # Verify database record
152
- metadata = await session.get(PlanarFileMetadata, uploaded_file.id)
153
- assert metadata is not None
154
- assert metadata.filename == expected_filename
155
- assert metadata.content_type == expected_content_type
156
- assert metadata.size == expected_size
157
- assert metadata.storage_ref is not None
158
-
159
- # Verify stored content
160
- retrieved_content = await uploaded_file.get_content()
161
- assert retrieved_content == expected_content
162
-
163
-
164
- async def test_planar_file_upload_bytes(storage: Storage, session: AsyncSession):
165
- """Test PlanarFile.upload with bytes content."""
166
- uploaded_file = await PlanarFile.upload(
167
- content=TEST_BYTES,
168
- filename=TEST_FILENAME,
169
- content_type="text/plain",
170
- size=100,
171
- )
172
- await assert_upload_success(
173
- uploaded_file,
174
- TEST_FILENAME,
175
- TEST_BYTES,
176
- "text/plain",
177
- 100,
178
- session,
179
- )
180
-
181
-
182
- async def test_planar_file_upload_bytes_defaults(
183
- storage: Storage, session: AsyncSession
184
- ):
185
- """Test PlanarFile.upload with bytes content using default size/type."""
186
- uploaded_file = await PlanarFile.upload(content=TEST_BYTES, filename=TEST_FILENAME)
187
- await assert_upload_success(
188
- uploaded_file,
189
- TEST_FILENAME,
190
- TEST_BYTES,
191
- DEFAULT_CONTENT_TYPE, # Default type expected
192
- TEST_SIZE, # Size should be calculated
193
- session,
194
- )
195
-
196
-
197
- async def test_planar_file_upload_path(
198
- storage: Storage, session: AsyncSession, tmp_path: Path
199
- ):
200
- """Test PlanarFile.upload with Path content."""
201
- test_file = tmp_path / TEST_FILENAME
202
- test_file.write_bytes(TEST_BYTES)
203
-
204
- uploaded_file = await PlanarFile.upload(
205
- content=test_file,
206
- filename=TEST_FILENAME,
207
- content_type=TEST_CONTENT_TYPE,
208
- size=TEST_SIZE,
209
- )
210
- await assert_upload_success(
211
- uploaded_file,
212
- TEST_FILENAME,
213
- TEST_BYTES,
214
- TEST_CONTENT_TYPE,
215
- TEST_SIZE,
216
- session,
217
- )
218
-
219
-
220
- async def test_planar_file_upload_path_defaults(
221
- storage: Storage, session: AsyncSession, tmp_path: Path
222
- ):
223
- """Test PlanarFile.upload with Path content using default/inferred size/type."""
224
- test_file = tmp_path / "another_test.json" # Use different extension for inference
225
- test_data = b'{"key": "value"}'
226
- test_file.write_bytes(test_data)
227
-
228
- uploaded_file = await PlanarFile.upload(
229
- content=test_file,
230
- filename="data.json", # Ensure filename matches for inference
231
- )
232
- await assert_upload_success(
233
- uploaded_file,
234
- "data.json",
235
- test_data,
236
- "application/json", # Inferred type expected
237
- len(test_data), # Size should be calculated
238
- session,
239
- )
240
-
241
-
242
- async def simple_byte_stream(
243
- data: bytes, chunk_size: int = 10
244
- ) -> AsyncGenerator[bytes, None]:
245
- """Helper async generator for stream tests."""
246
- for i in range(0, len(data), chunk_size):
247
- yield data[i : i + chunk_size]
248
-
249
-
250
- async def test_planar_file_upload_stream(storage: Storage, session: AsyncSession):
251
- """Test PlanarFile.upload with AsyncGenerator content."""
252
- uploaded_file = await PlanarFile.upload(
253
- content=simple_byte_stream(TEST_BYTES),
254
- filename=TEST_FILENAME,
255
- content_type=TEST_CONTENT_TYPE,
256
- size=TEST_SIZE,
257
- )
258
- await assert_upload_success(
259
- uploaded_file,
260
- TEST_FILENAME,
261
- TEST_BYTES,
262
- TEST_CONTENT_TYPE,
263
- TEST_SIZE,
264
- session,
265
- )
266
-
267
-
268
- async def test_planar_file_upload_stream_defaults(
269
- storage: Storage, session: AsyncSession
270
- ):
271
- """Test PlanarFile.upload with AsyncGenerator content using default size/type."""
272
- uploaded_file = await PlanarFile.upload(
273
- content=simple_byte_stream(TEST_BYTES), filename=TEST_FILENAME
274
- )
275
- await assert_upload_success(
276
- uploaded_file,
277
- TEST_FILENAME,
278
- TEST_BYTES,
279
- DEFAULT_CONTENT_TYPE, # Default type expected
280
- -1, # Size should be unknown (-1)
281
- session,
282
- )