datasette-libfec 0.0.1a4__py3-none-any.whl → 0.0.1a5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datasette_libfec/__init__.py +5 -1
- datasette_libfec/libfec_client.py +225 -11
- datasette_libfec/libfec_export_rpc_client.py +358 -0
- datasette_libfec/libfec_rpc_client.py +335 -0
- datasette_libfec/libfec_search_rpc_client.py +308 -0
- datasette_libfec/manifest.json +84 -2
- datasette_libfec/page_data.py +87 -0
- datasette_libfec/router.py +3 -0
- datasette_libfec/routes_export.py +125 -0
- datasette_libfec/routes_exports.py +220 -0
- datasette_libfec/routes_pages.py +336 -0
- datasette_libfec/routes_rss.py +411 -0
- datasette_libfec/routes_search.py +77 -0
- datasette_libfec/state.py +6 -0
- datasette_libfec/static/gen/candidate-BEqDafKu.css +1 -0
- datasette_libfec/static/gen/candidate-tqxa29G-.js +3 -0
- datasette_libfec/static/gen/class-C5DDKbJD.js +2 -0
- datasette_libfec/static/gen/committee-Bmki9iKb.css +1 -0
- datasette_libfec/static/gen/committee-DY1GmylW.js +2 -0
- datasette_libfec/static/gen/contest-BbYrzKRg.js +1 -0
- datasette_libfec/static/gen/contest-D4Fj7kGA.css +1 -0
- datasette_libfec/static/gen/each-DkfQbqzj.js +1 -0
- datasette_libfec/static/gen/filing_detail-Ba6_iQwV.css +1 -0
- datasette_libfec/static/gen/filing_detail-D2ib3OM6.js +26 -0
- datasette_libfec/static/gen/index-AHqus2fd.js +9 -0
- datasette_libfec/static/gen/index-client-CDwZ_Ixa.js +1 -0
- datasette_libfec/static/gen/index-jv9_YIKt.css +1 -0
- datasette_libfec/static/gen/load-AXKAVXVj.js +1 -0
- datasette_libfec/templates/libfec_base.html +12 -0
- {datasette_libfec-0.0.1a4.dist-info → datasette_libfec-0.0.1a5.dist-info}/METADATA +2 -2
- datasette_libfec-0.0.1a5.dist-info/RECORD +37 -0
- {datasette_libfec-0.0.1a4.dist-info → datasette_libfec-0.0.1a5.dist-info}/top_level.txt +1 -0
- scripts/typegen-pagedata.py +6 -0
- datasette_libfec/routes.py +0 -189
- datasette_libfec/static/gen/index-6cjSv2YC.css +0 -1
- datasette_libfec/static/gen/index-CaTQMY-X.js +0 -1
- datasette_libfec/templates/libfec.html +0 -14
- datasette_libfec-0.0.1a4.dist-info/RECORD +0 -14
- {datasette_libfec-0.0.1a4.dist-info → datasette_libfec-0.0.1a5.dist-info}/WHEEL +0 -0
- {datasette_libfec-0.0.1a4.dist-info → datasette_libfec-0.0.1a5.dist-info}/entry_points.txt +0 -0
- {datasette_libfec-0.0.1a4.dist-info → datasette_libfec-0.0.1a5.dist-info}/licenses/LICENSE +0 -0
datasette_libfec/manifest.json
CHANGED
|
@@ -1,11 +1,93 @@
|
|
|
1
1
|
{
|
|
2
|
+
"_class-C5DDKbJD.js": {
|
|
3
|
+
"file": "static/gen/class-C5DDKbJD.js",
|
|
4
|
+
"name": "class"
|
|
5
|
+
},
|
|
6
|
+
"_each-DkfQbqzj.js": {
|
|
7
|
+
"file": "static/gen/each-DkfQbqzj.js",
|
|
8
|
+
"name": "each",
|
|
9
|
+
"imports": [
|
|
10
|
+
"_load-AXKAVXVj.js"
|
|
11
|
+
]
|
|
12
|
+
},
|
|
13
|
+
"_index-client-CDwZ_Ixa.js": {
|
|
14
|
+
"file": "static/gen/index-client-CDwZ_Ixa.js",
|
|
15
|
+
"name": "index-client",
|
|
16
|
+
"imports": [
|
|
17
|
+
"_load-AXKAVXVj.js"
|
|
18
|
+
]
|
|
19
|
+
},
|
|
20
|
+
"_load-AXKAVXVj.js": {
|
|
21
|
+
"file": "static/gen/load-AXKAVXVj.js",
|
|
22
|
+
"name": "load"
|
|
23
|
+
},
|
|
24
|
+
"src/candidate_view.ts": {
|
|
25
|
+
"file": "static/gen/candidate-tqxa29G-.js",
|
|
26
|
+
"name": "candidate",
|
|
27
|
+
"src": "src/candidate_view.ts",
|
|
28
|
+
"isEntry": true,
|
|
29
|
+
"imports": [
|
|
30
|
+
"_load-AXKAVXVj.js",
|
|
31
|
+
"_each-DkfQbqzj.js",
|
|
32
|
+
"_class-C5DDKbJD.js"
|
|
33
|
+
],
|
|
34
|
+
"css": [
|
|
35
|
+
"static/gen/candidate-BEqDafKu.css"
|
|
36
|
+
]
|
|
37
|
+
},
|
|
38
|
+
"src/committee_view.ts": {
|
|
39
|
+
"file": "static/gen/committee-DY1GmylW.js",
|
|
40
|
+
"name": "committee",
|
|
41
|
+
"src": "src/committee_view.ts",
|
|
42
|
+
"isEntry": true,
|
|
43
|
+
"imports": [
|
|
44
|
+
"_load-AXKAVXVj.js",
|
|
45
|
+
"_each-DkfQbqzj.js"
|
|
46
|
+
],
|
|
47
|
+
"css": [
|
|
48
|
+
"static/gen/committee-Bmki9iKb.css"
|
|
49
|
+
]
|
|
50
|
+
},
|
|
51
|
+
"src/contest_view.ts": {
|
|
52
|
+
"file": "static/gen/contest-BbYrzKRg.js",
|
|
53
|
+
"name": "contest",
|
|
54
|
+
"src": "src/contest_view.ts",
|
|
55
|
+
"isEntry": true,
|
|
56
|
+
"imports": [
|
|
57
|
+
"_load-AXKAVXVj.js",
|
|
58
|
+
"_each-DkfQbqzj.js",
|
|
59
|
+
"_class-C5DDKbJD.js"
|
|
60
|
+
],
|
|
61
|
+
"css": [
|
|
62
|
+
"static/gen/contest-D4Fj7kGA.css"
|
|
63
|
+
]
|
|
64
|
+
},
|
|
65
|
+
"src/filing_detail_view.ts": {
|
|
66
|
+
"file": "static/gen/filing_detail-D2ib3OM6.js",
|
|
67
|
+
"name": "filing_detail",
|
|
68
|
+
"src": "src/filing_detail_view.ts",
|
|
69
|
+
"isEntry": true,
|
|
70
|
+
"imports": [
|
|
71
|
+
"_load-AXKAVXVj.js",
|
|
72
|
+
"_index-client-CDwZ_Ixa.js"
|
|
73
|
+
],
|
|
74
|
+
"css": [
|
|
75
|
+
"static/gen/filing_detail-Ba6_iQwV.css"
|
|
76
|
+
]
|
|
77
|
+
},
|
|
2
78
|
"src/index_view.ts": {
|
|
3
|
-
"file": "static/gen/index-
|
|
79
|
+
"file": "static/gen/index-AHqus2fd.js",
|
|
4
80
|
"name": "index",
|
|
5
81
|
"src": "src/index_view.ts",
|
|
6
82
|
"isEntry": true,
|
|
83
|
+
"imports": [
|
|
84
|
+
"_load-AXKAVXVj.js",
|
|
85
|
+
"_index-client-CDwZ_Ixa.js",
|
|
86
|
+
"_each-DkfQbqzj.js",
|
|
87
|
+
"_class-C5DDKbJD.js"
|
|
88
|
+
],
|
|
7
89
|
"css": [
|
|
8
|
-
"static/gen/index-
|
|
90
|
+
"static/gen/index-jv9_YIKt.css"
|
|
9
91
|
]
|
|
10
92
|
}
|
|
11
93
|
}
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
from pydantic import BaseModel
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class Candidate(BaseModel):
|
|
5
|
+
candidate_id: str
|
|
6
|
+
name: str | None = None
|
|
7
|
+
party_affiliation: str | None = None
|
|
8
|
+
state: str | None = None
|
|
9
|
+
office: str | None = None
|
|
10
|
+
district: str | None = None
|
|
11
|
+
incumbent_challenger_status: str | None = None
|
|
12
|
+
principal_campaign_committee: str | None = None
|
|
13
|
+
address_street1: str | None = None
|
|
14
|
+
address_street2: str | None = None
|
|
15
|
+
address_city: str | None = None
|
|
16
|
+
address_state: str | None = None
|
|
17
|
+
address_zip: str | None = None
|
|
18
|
+
cycle: int | None = None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class Committee(BaseModel):
|
|
22
|
+
committee_id: str
|
|
23
|
+
name: str | None = None
|
|
24
|
+
committee_type: str | None = None
|
|
25
|
+
designation: str | None = None
|
|
26
|
+
candidate_id: str | None = None
|
|
27
|
+
party_affiliation: str | None = None
|
|
28
|
+
filing_frequency: str | None = None
|
|
29
|
+
address_street1: str | None = None
|
|
30
|
+
address_street2: str | None = None
|
|
31
|
+
address_city: str | None = None
|
|
32
|
+
address_state: str | None = None
|
|
33
|
+
address_zip: str | None = None
|
|
34
|
+
treasurer_name: str | None = None
|
|
35
|
+
cycle: int | None = None
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class Filing(BaseModel):
|
|
39
|
+
filing_id: str
|
|
40
|
+
cover_record_form: str | None = None
|
|
41
|
+
filer_id: str | None = None
|
|
42
|
+
filer_name: str | None = None
|
|
43
|
+
coverage_from_date: str | None = None
|
|
44
|
+
coverage_through_date: str | None = None
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class CandidatePageData(BaseModel):
|
|
48
|
+
candidate_id: str
|
|
49
|
+
cycle: int
|
|
50
|
+
candidate: Candidate | None = None
|
|
51
|
+
committee: Committee | None = None
|
|
52
|
+
filings: list[Filing] = []
|
|
53
|
+
error: str | None = None
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class CommitteePageData(BaseModel):
|
|
57
|
+
committee_id: str
|
|
58
|
+
cycle: int
|
|
59
|
+
committee: Committee | None = None
|
|
60
|
+
candidate: Candidate | None = None
|
|
61
|
+
filings: list[Filing] = []
|
|
62
|
+
error: str | None = None
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class ContestPageData(BaseModel):
|
|
66
|
+
state: str
|
|
67
|
+
office: str
|
|
68
|
+
district: str | None = None
|
|
69
|
+
cycle: int
|
|
70
|
+
contest_description: str
|
|
71
|
+
candidates: list[Candidate] = []
|
|
72
|
+
error: str | None = None
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class FilingDetailPageData(BaseModel):
|
|
76
|
+
filing_id: str
|
|
77
|
+
filing: Filing | None = None
|
|
78
|
+
form_data: dict | None = None
|
|
79
|
+
database_name: str
|
|
80
|
+
error: str | None = None
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class IndexPageData(BaseModel):
|
|
84
|
+
database_name: str
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
__exports__ = [CandidatePageData, CommitteePageData, ContestPageData, FilingDetailPageData, IndexPageData]
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
from pydantic import BaseModel
|
|
2
|
+
from datasette import Response
|
|
3
|
+
from datasette_plugin_router import Body
|
|
4
|
+
from typing import Optional, List
|
|
5
|
+
import asyncio
|
|
6
|
+
import uuid
|
|
7
|
+
|
|
8
|
+
from .router import router
|
|
9
|
+
from .state import libfec_client, export_state
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ExportStartParams(BaseModel):
|
|
13
|
+
filings: Optional[List[str]] = None
|
|
14
|
+
cycle: Optional[int] = None
|
|
15
|
+
cover_only: bool = False
|
|
16
|
+
clobber: bool = False
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ExportResponse(BaseModel):
|
|
20
|
+
status: str
|
|
21
|
+
message: str
|
|
22
|
+
export_id: Optional[str] = None
|
|
23
|
+
phase: Optional[str] = None
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@router.POST("/-/api/libfec/export/start", output=ExportResponse)
|
|
27
|
+
async def export_start(datasette, params: Body[ExportStartParams]):
|
|
28
|
+
if export_state.running:
|
|
29
|
+
return Response.json({
|
|
30
|
+
"status": "error",
|
|
31
|
+
"message": "Export already in progress",
|
|
32
|
+
"phase": export_state.phase
|
|
33
|
+
}, status=400)
|
|
34
|
+
|
|
35
|
+
# Get output database
|
|
36
|
+
output_db = None
|
|
37
|
+
for name, db in datasette.databases.items():
|
|
38
|
+
if not db.is_memory:
|
|
39
|
+
output_db = db
|
|
40
|
+
break
|
|
41
|
+
if output_db is None:
|
|
42
|
+
return Response.json({
|
|
43
|
+
"status": "error",
|
|
44
|
+
"message": "No writable database found."
|
|
45
|
+
}, status=500)
|
|
46
|
+
|
|
47
|
+
# Start export in background task
|
|
48
|
+
async def run_export():
|
|
49
|
+
await libfec_client.export_with_progress(
|
|
50
|
+
output_db=output_db.path,
|
|
51
|
+
filings=params.filings,
|
|
52
|
+
cycle=params.cycle,
|
|
53
|
+
cover_only=params.cover_only,
|
|
54
|
+
clobber=params.clobber,
|
|
55
|
+
export_state=export_state
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
export_state.export_id = f"export-{uuid.uuid4()}"
|
|
59
|
+
asyncio.create_task(run_export())
|
|
60
|
+
|
|
61
|
+
# Give it a moment to start
|
|
62
|
+
await asyncio.sleep(0.1)
|
|
63
|
+
|
|
64
|
+
return Response.json(
|
|
65
|
+
ExportResponse(
|
|
66
|
+
status="success",
|
|
67
|
+
message="Export started",
|
|
68
|
+
export_id=export_state.export_id,
|
|
69
|
+
phase=export_state.phase
|
|
70
|
+
).model_dump()
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@router.GET("/-/api/libfec/export/status", output=ExportResponse)
|
|
75
|
+
async def export_status(datasette):
|
|
76
|
+
response_data = {
|
|
77
|
+
"status": "success",
|
|
78
|
+
"message": "Export status",
|
|
79
|
+
"export_id": export_state.export_id,
|
|
80
|
+
"phase": export_state.phase,
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
# Add additional fields based on phase
|
|
84
|
+
if export_state.phase in ("sourcing", "downloading_bulk", "exporting"):
|
|
85
|
+
response_data["completed"] = export_state.completed
|
|
86
|
+
response_data["total"] = export_state.total
|
|
87
|
+
|
|
88
|
+
if export_state.phase == "downloading_bulk" and export_state.current:
|
|
89
|
+
response_data["current"] = export_state.current
|
|
90
|
+
elif export_state.phase == "exporting" and export_state.current_filing_id:
|
|
91
|
+
response_data["current_filing_id"] = export_state.current_filing_id
|
|
92
|
+
|
|
93
|
+
elif export_state.phase == "complete":
|
|
94
|
+
response_data["total_exported"] = export_state.total_exported
|
|
95
|
+
response_data["warnings"] = export_state.warnings
|
|
96
|
+
|
|
97
|
+
elif export_state.phase == "error":
|
|
98
|
+
response_data["error_message"] = export_state.error_message
|
|
99
|
+
|
|
100
|
+
return Response.json(response_data)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@router.POST("/-/api/libfec/export/cancel", output=ExportResponse)
|
|
104
|
+
async def export_cancel(datasette):
|
|
105
|
+
if not export_state.running:
|
|
106
|
+
return Response.json({
|
|
107
|
+
"status": "error",
|
|
108
|
+
"message": "No export in progress"
|
|
109
|
+
}, status=400)
|
|
110
|
+
|
|
111
|
+
# Cancel RPC export if in progress
|
|
112
|
+
if export_state.rpc_client:
|
|
113
|
+
try:
|
|
114
|
+
await export_state.rpc_client.export_cancel()
|
|
115
|
+
except Exception as e:
|
|
116
|
+
print(f"Error canceling RPC export: {e}")
|
|
117
|
+
|
|
118
|
+
return Response.json(
|
|
119
|
+
ExportResponse(
|
|
120
|
+
status="success",
|
|
121
|
+
message="Export canceled",
|
|
122
|
+
export_id=export_state.export_id,
|
|
123
|
+
phase="canceled"
|
|
124
|
+
).model_dump()
|
|
125
|
+
)
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
from pydantic import BaseModel
|
|
2
|
+
from datasette import Response
|
|
3
|
+
from typing import Optional, List, Literal
|
|
4
|
+
|
|
5
|
+
from .router import router
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ExportRecord(BaseModel):
|
|
9
|
+
export_id: int
|
|
10
|
+
export_uuid: str
|
|
11
|
+
created_at: str
|
|
12
|
+
filings_count: int
|
|
13
|
+
cover_only: bool
|
|
14
|
+
status: str
|
|
15
|
+
error_message: Optional[str] = None
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ExportFilingRecord(BaseModel):
|
|
19
|
+
filing_id: str
|
|
20
|
+
success: bool
|
|
21
|
+
message: Optional[str] = None
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class ExportInputRecord(BaseModel):
|
|
25
|
+
id: int
|
|
26
|
+
input_type: str
|
|
27
|
+
input_value: str
|
|
28
|
+
cycle: Optional[int] = None
|
|
29
|
+
office: Optional[str] = None
|
|
30
|
+
state: Optional[str] = None
|
|
31
|
+
district: Optional[str] = None
|
|
32
|
+
filing_ids: List[str] = []
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class ExportDetailResponse(BaseModel):
|
|
36
|
+
export: ExportRecord
|
|
37
|
+
inputs: List[ExportInputRecord]
|
|
38
|
+
filings: List[ExportFilingRecord]
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class ApiExportsListResponse(BaseModel):
|
|
42
|
+
status: Literal['success']
|
|
43
|
+
exports: List[ExportRecord]
|
|
44
|
+
message: Optional[str] = None
|
|
45
|
+
|
|
46
|
+
@router.GET("/-/api/libfec/exports$", output=ApiExportsListResponse)
|
|
47
|
+
async def list_exports(datasette):
|
|
48
|
+
"""List all export operations from the metadata tables"""
|
|
49
|
+
db = datasette.get_database()
|
|
50
|
+
|
|
51
|
+
# Check if the table exists
|
|
52
|
+
try:
|
|
53
|
+
tables = await db.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='libfec_exports'")
|
|
54
|
+
if not tables.first():
|
|
55
|
+
return Response.json({
|
|
56
|
+
"status": "success",
|
|
57
|
+
"exports": [],
|
|
58
|
+
"message": "No exports yet"
|
|
59
|
+
})
|
|
60
|
+
except Exception as e:
|
|
61
|
+
return Response.json({
|
|
62
|
+
"status": "error",
|
|
63
|
+
"message": f"Database error: {str(e)}"
|
|
64
|
+
}, status=500)
|
|
65
|
+
|
|
66
|
+
try:
|
|
67
|
+
exports_result = await db.execute("""
|
|
68
|
+
SELECT
|
|
69
|
+
export_id,
|
|
70
|
+
export_uuid,
|
|
71
|
+
created_at,
|
|
72
|
+
filings_count,
|
|
73
|
+
cover_only,
|
|
74
|
+
status,
|
|
75
|
+
error_message
|
|
76
|
+
FROM libfec_exports
|
|
77
|
+
ORDER BY created_at DESC
|
|
78
|
+
LIMIT 50
|
|
79
|
+
""")
|
|
80
|
+
|
|
81
|
+
exports = []
|
|
82
|
+
for row in exports_result.rows:
|
|
83
|
+
exports.append({
|
|
84
|
+
"export_id": row[0],
|
|
85
|
+
"export_uuid": row[1],
|
|
86
|
+
"created_at": row[2],
|
|
87
|
+
"filings_count": row[3],
|
|
88
|
+
"cover_only": bool(row[4]),
|
|
89
|
+
"status": row[5],
|
|
90
|
+
"error_message": row[6]
|
|
91
|
+
})
|
|
92
|
+
return Response.json(ApiExportsListResponse(
|
|
93
|
+
status="success",
|
|
94
|
+
exports=exports
|
|
95
|
+
).model_dump_json())
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
except Exception as e:
|
|
99
|
+
return Response.json({
|
|
100
|
+
"status": "error",
|
|
101
|
+
"message": f"Failed to fetch exports: {str(e)}"
|
|
102
|
+
}, status=500)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
@router.GET("/-/api/libfec/exports/(?P<export_id>\\d+)")
|
|
106
|
+
async def get_export_detail(datasette, export_id: str):
|
|
107
|
+
"""Get detailed information about a specific export"""
|
|
108
|
+
db = datasette.get_database()
|
|
109
|
+
export_id_int = int(export_id)
|
|
110
|
+
|
|
111
|
+
try:
|
|
112
|
+
# Get export record
|
|
113
|
+
export_result = await db.execute("""
|
|
114
|
+
SELECT
|
|
115
|
+
export_id,
|
|
116
|
+
export_uuid,
|
|
117
|
+
created_at,
|
|
118
|
+
filings_count,
|
|
119
|
+
cover_only,
|
|
120
|
+
status,
|
|
121
|
+
error_message
|
|
122
|
+
FROM libfec_exports
|
|
123
|
+
WHERE export_id = ?
|
|
124
|
+
""", [export_id_int])
|
|
125
|
+
|
|
126
|
+
export_row = export_result.first()
|
|
127
|
+
if not export_row:
|
|
128
|
+
return Response.json({
|
|
129
|
+
"status": "error",
|
|
130
|
+
"message": "Export not found"
|
|
131
|
+
}, status=404)
|
|
132
|
+
|
|
133
|
+
export = {
|
|
134
|
+
"export_id": export_row[0],
|
|
135
|
+
"export_uuid": export_row[1],
|
|
136
|
+
"created_at": export_row[2],
|
|
137
|
+
"filings_count": export_row[3],
|
|
138
|
+
"cover_only": bool(export_row[4]),
|
|
139
|
+
"status": export_row[5],
|
|
140
|
+
"error_message": export_row[6]
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
# Get inputs with their resolved filing IDs
|
|
144
|
+
inputs = []
|
|
145
|
+
try:
|
|
146
|
+
inputs_result = await db.execute("""
|
|
147
|
+
SELECT
|
|
148
|
+
i.id,
|
|
149
|
+
i.input_type,
|
|
150
|
+
i.input_value,
|
|
151
|
+
i.cycle,
|
|
152
|
+
i.office,
|
|
153
|
+
i.state,
|
|
154
|
+
i.district
|
|
155
|
+
FROM libfec_export_inputs i
|
|
156
|
+
WHERE i.export_id = ?
|
|
157
|
+
ORDER BY i.id
|
|
158
|
+
""", [export_id_int])
|
|
159
|
+
|
|
160
|
+
for row in inputs_result.rows:
|
|
161
|
+
input_record = {
|
|
162
|
+
"id": row[0],
|
|
163
|
+
"input_type": row[1],
|
|
164
|
+
"input_value": row[2],
|
|
165
|
+
"cycle": row[3],
|
|
166
|
+
"office": row[4],
|
|
167
|
+
"state": row[5],
|
|
168
|
+
"district": row[6],
|
|
169
|
+
"filing_ids": []
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
# Get filing IDs for this input
|
|
173
|
+
filings_for_input = await db.execute("""
|
|
174
|
+
SELECT filing_id
|
|
175
|
+
FROM libfec_export_input_filings
|
|
176
|
+
WHERE input_id = ?
|
|
177
|
+
""", [row[0]])
|
|
178
|
+
|
|
179
|
+
input_record["filing_ids"] = [f[0] for f in filings_for_input.rows]
|
|
180
|
+
inputs.append(input_record)
|
|
181
|
+
|
|
182
|
+
except Exception:
|
|
183
|
+
# Table might not exist
|
|
184
|
+
pass
|
|
185
|
+
|
|
186
|
+
# Get filings with their success/failure status
|
|
187
|
+
filings = []
|
|
188
|
+
try:
|
|
189
|
+
filings_result = await db.execute("""
|
|
190
|
+
SELECT
|
|
191
|
+
filing_id,
|
|
192
|
+
success,
|
|
193
|
+
message
|
|
194
|
+
FROM libfec_export_filings
|
|
195
|
+
WHERE export_id = ?
|
|
196
|
+
ORDER BY filing_id
|
|
197
|
+
""", [export_id_int])
|
|
198
|
+
|
|
199
|
+
for row in filings_result.rows:
|
|
200
|
+
filings.append({
|
|
201
|
+
"filing_id": row[0],
|
|
202
|
+
"success": bool(row[1]),
|
|
203
|
+
"message": row[2]
|
|
204
|
+
})
|
|
205
|
+
except Exception:
|
|
206
|
+
# Table might not exist
|
|
207
|
+
pass
|
|
208
|
+
|
|
209
|
+
return Response.json({
|
|
210
|
+
"status": "success",
|
|
211
|
+
"export": export,
|
|
212
|
+
"inputs": inputs,
|
|
213
|
+
"filings": filings
|
|
214
|
+
})
|
|
215
|
+
|
|
216
|
+
except Exception as e:
|
|
217
|
+
return Response.json({
|
|
218
|
+
"status": "error",
|
|
219
|
+
"message": f"Failed to fetch export detail: {str(e)}"
|
|
220
|
+
}, status=500)
|