parallex 0.3.0__tar.gz → 0.3.2__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {parallex-0.3.0 → parallex-0.3.2}/PKG-INFO +1 -1
- {parallex-0.3.0 → parallex-0.3.2}/parallex/ai/open_ai_client.py +1 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/ai/uploader.py +2 -2
- {parallex-0.3.0 → parallex-0.3.2}/parallex/parallex.py +22 -16
- {parallex-0.3.0 → parallex-0.3.2}/pyproject.toml +1 -1
- {parallex-0.3.0 → parallex-0.3.2}/LICENSE +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/README.md +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/__init__.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/ai/batch_processor.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/ai/output_processor.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/file_management/converter.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/file_management/file_finder.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/file_management/remote_file_handler.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/file_management/utils.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/models/batch_file.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/models/image_file.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/models/page_response.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/models/parallex_callable_output.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/models/parallex_prompts_callable_output.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/models/prompt_response.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/models/raw_file.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/models/upload_batch.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/utils/constants.py +0 -0
- {parallex-0.3.0 → parallex-0.3.2}/parallex/utils/logger.py +0 -0
@@ -76,8 +76,8 @@ async def upload_prompts_for_processing(
|
|
76
76
|
jsonl = _simple_jsonl_format(prompt_custom_id, prompt)
|
77
77
|
with open(upload_file_location, "a") as jsonl_file:
|
78
78
|
jsonl_file.write(json.dumps(jsonl) + "\n")
|
79
|
-
|
80
|
-
|
79
|
+
batch_file = await _create_batch_file(client, trace_id, upload_file_location)
|
80
|
+
batch_files.append(batch_file)
|
81
81
|
return batch_files
|
82
82
|
|
83
83
|
|
@@ -46,6 +46,7 @@ async def parallex(
|
|
46
46
|
)
|
47
47
|
except Exception as e:
|
48
48
|
logger.error(f"Error occurred: {e}")
|
49
|
+
raise e
|
49
50
|
finally:
|
50
51
|
await _delete_associated_files(open_ai_client, remote_file_handler)
|
51
52
|
|
@@ -69,6 +70,7 @@ async def parallex_simple_prompts(
|
|
69
70
|
)
|
70
71
|
except Exception as e:
|
71
72
|
logger.error(f"Error occurred: {e}")
|
73
|
+
raise e
|
72
74
|
finally:
|
73
75
|
await _delete_associated_files(open_ai_client, remote_file_handler)
|
74
76
|
|
@@ -101,30 +103,21 @@ async def _prompts_execute(
|
|
101
103
|
start_batch_tasks.append(batch_task)
|
102
104
|
batch_jobs = await asyncio.gather(*start_batch_tasks)
|
103
105
|
|
106
|
+
process_semaphore = asyncio.Semaphore(concurrency)
|
104
107
|
prompt_tasks = []
|
105
108
|
for batch in batch_jobs:
|
106
109
|
logger.info(
|
107
110
|
f"waiting for batch to complete - {batch.id} - {batch.trace_id}"
|
108
111
|
)
|
109
|
-
|
110
|
-
await
|
111
|
-
)
|
112
|
-
prompt_tasks.append(page_task)
|
113
|
-
|
114
|
-
output_file_ids = await asyncio.gather(*prompt_tasks)
|
115
|
-
|
116
|
-
prompts_output = []
|
117
|
-
for output_file_id in output_file_ids:
|
118
|
-
logger.info(f"batch completed - {batch.id} - {batch.trace_id}")
|
119
|
-
prompts_output.append(
|
120
|
-
await process_prompts_output(
|
121
|
-
client=open_ai_client, output_file_id=output_file_id
|
122
|
-
)
|
112
|
+
prompt_task = asyncio.create_task(
|
113
|
+
await _wait_and_create_prompt_responses(batch=batch, client=open_ai_client, semaphore=process_semaphore)
|
123
114
|
)
|
115
|
+
prompt_tasks.append(prompt_task)
|
116
|
+
prompt_response_groups = await asyncio.gather(*prompt_tasks)
|
124
117
|
|
125
|
-
|
118
|
+
flat_responses = [response for batch in prompt_response_groups for response in batch]
|
126
119
|
|
127
|
-
sorted_responses = sorted(
|
120
|
+
sorted_responses = sorted(flat_responses, key=lambda x: x.prompt_index)
|
128
121
|
callable_output = ParallexPromptsCallableOutput(
|
129
122
|
original_prompts=prompts,
|
130
123
|
trace_id=trace_id,
|
@@ -211,6 +204,19 @@ async def _wait_and_create_pages(
|
|
211
204
|
return page_responses
|
212
205
|
|
213
206
|
|
207
|
+
async def _wait_and_create_prompt_responses(
|
208
|
+
batch: UploadBatch, client: OpenAIClient, semaphore: asyncio.Semaphore
|
209
|
+
):
|
210
|
+
async with semaphore:
|
211
|
+
logger.info(f"waiting for batch to complete - {batch.id} - {batch.trace_id}")
|
212
|
+
output_file_id = await wait_for_batch_completion(client=client, batch=batch)
|
213
|
+
logger.info(f"batch completed - {batch.id} - {batch.trace_id}")
|
214
|
+
prompt_responses = await process_prompts_output(
|
215
|
+
client=client, output_file_id=output_file_id
|
216
|
+
)
|
217
|
+
return prompt_responses
|
218
|
+
|
219
|
+
|
214
220
|
async def _create_batch_jobs(
|
215
221
|
batch_file: BatchFile,
|
216
222
|
client: OpenAIClient,
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "parallex"
|
3
|
-
version = "0.3.
|
3
|
+
version = "0.3.2"
|
4
4
|
description = "PDF to markdown using Azure OpenAI batch processing"
|
5
5
|
authors = ["Jeff Hostetler <jeff@summed.ai>", "Kevin Bao <kevin@summed.ai>"]
|
6
6
|
repository = "https://github.com/Summed-AI/parallex"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|