datasette-libfec 0.0.1a4__py3-none-any.whl → 0.0.1a6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. datasette_libfec/__init__.py +18 -2
  2. datasette_libfec/libfec_client.py +225 -11
  3. datasette_libfec/libfec_export_rpc_client.py +358 -0
  4. datasette_libfec/libfec_rpc_client.py +335 -0
  5. datasette_libfec/libfec_search_rpc_client.py +308 -0
  6. datasette_libfec/manifest.json +84 -2
  7. datasette_libfec/page_data.py +87 -0
  8. datasette_libfec/router.py +24 -0
  9. datasette_libfec/routes_export.py +128 -0
  10. datasette_libfec/routes_exports.py +222 -0
  11. datasette_libfec/routes_pages.py +341 -0
  12. datasette_libfec/routes_rss.py +416 -0
  13. datasette_libfec/routes_search.py +78 -0
  14. datasette_libfec/state.py +6 -0
  15. datasette_libfec/static/gen/candidate-BEqDafKu.css +1 -0
  16. datasette_libfec/static/gen/candidate-tqxa29G-.js +3 -0
  17. datasette_libfec/static/gen/class-C5DDKbJD.js +2 -0
  18. datasette_libfec/static/gen/committee-Bmki9iKb.css +1 -0
  19. datasette_libfec/static/gen/committee-DY1GmylW.js +2 -0
  20. datasette_libfec/static/gen/contest-BbYrzKRg.js +1 -0
  21. datasette_libfec/static/gen/contest-D4Fj7kGA.css +1 -0
  22. datasette_libfec/static/gen/each-DkfQbqzj.js +1 -0
  23. datasette_libfec/static/gen/filing_detail-Ba6_iQwV.css +1 -0
  24. datasette_libfec/static/gen/filing_detail-D2ib3OM6.js +26 -0
  25. datasette_libfec/static/gen/index-AHqus2fd.js +9 -0
  26. datasette_libfec/static/gen/index-client-CDwZ_Ixa.js +1 -0
  27. datasette_libfec/static/gen/index-jv9_YIKt.css +1 -0
  28. datasette_libfec/static/gen/load-AXKAVXVj.js +1 -0
  29. datasette_libfec/templates/libfec_base.html +12 -0
  30. {datasette_libfec-0.0.1a4.dist-info → datasette_libfec-0.0.1a6.dist-info}/METADATA +2 -2
  31. datasette_libfec-0.0.1a6.dist-info/RECORD +37 -0
  32. {datasette_libfec-0.0.1a4.dist-info → datasette_libfec-0.0.1a6.dist-info}/top_level.txt +1 -0
  33. scripts/typegen-pagedata.py +6 -0
  34. datasette_libfec/routes.py +0 -189
  35. datasette_libfec/static/gen/index-6cjSv2YC.css +0 -1
  36. datasette_libfec/static/gen/index-CaTQMY-X.js +0 -1
  37. datasette_libfec/templates/libfec.html +0 -14
  38. datasette_libfec-0.0.1a4.dist-info/RECORD +0 -14
  39. {datasette_libfec-0.0.1a4.dist-info → datasette_libfec-0.0.1a6.dist-info}/WHEEL +0 -0
  40. {datasette_libfec-0.0.1a4.dist-info → datasette_libfec-0.0.1a6.dist-info}/entry_points.txt +0 -0
  41. {datasette_libfec-0.0.1a4.dist-info → datasette_libfec-0.0.1a6.dist-info}/licenses/LICENSE +0 -0
@@ -1,11 +1,93 @@
1
1
  {
2
+ "_class-C5DDKbJD.js": {
3
+ "file": "static/gen/class-C5DDKbJD.js",
4
+ "name": "class"
5
+ },
6
+ "_each-DkfQbqzj.js": {
7
+ "file": "static/gen/each-DkfQbqzj.js",
8
+ "name": "each",
9
+ "imports": [
10
+ "_load-AXKAVXVj.js"
11
+ ]
12
+ },
13
+ "_index-client-CDwZ_Ixa.js": {
14
+ "file": "static/gen/index-client-CDwZ_Ixa.js",
15
+ "name": "index-client",
16
+ "imports": [
17
+ "_load-AXKAVXVj.js"
18
+ ]
19
+ },
20
+ "_load-AXKAVXVj.js": {
21
+ "file": "static/gen/load-AXKAVXVj.js",
22
+ "name": "load"
23
+ },
24
+ "src/candidate_view.ts": {
25
+ "file": "static/gen/candidate-tqxa29G-.js",
26
+ "name": "candidate",
27
+ "src": "src/candidate_view.ts",
28
+ "isEntry": true,
29
+ "imports": [
30
+ "_load-AXKAVXVj.js",
31
+ "_each-DkfQbqzj.js",
32
+ "_class-C5DDKbJD.js"
33
+ ],
34
+ "css": [
35
+ "static/gen/candidate-BEqDafKu.css"
36
+ ]
37
+ },
38
+ "src/committee_view.ts": {
39
+ "file": "static/gen/committee-DY1GmylW.js",
40
+ "name": "committee",
41
+ "src": "src/committee_view.ts",
42
+ "isEntry": true,
43
+ "imports": [
44
+ "_load-AXKAVXVj.js",
45
+ "_each-DkfQbqzj.js"
46
+ ],
47
+ "css": [
48
+ "static/gen/committee-Bmki9iKb.css"
49
+ ]
50
+ },
51
+ "src/contest_view.ts": {
52
+ "file": "static/gen/contest-BbYrzKRg.js",
53
+ "name": "contest",
54
+ "src": "src/contest_view.ts",
55
+ "isEntry": true,
56
+ "imports": [
57
+ "_load-AXKAVXVj.js",
58
+ "_each-DkfQbqzj.js",
59
+ "_class-C5DDKbJD.js"
60
+ ],
61
+ "css": [
62
+ "static/gen/contest-D4Fj7kGA.css"
63
+ ]
64
+ },
65
+ "src/filing_detail_view.ts": {
66
+ "file": "static/gen/filing_detail-D2ib3OM6.js",
67
+ "name": "filing_detail",
68
+ "src": "src/filing_detail_view.ts",
69
+ "isEntry": true,
70
+ "imports": [
71
+ "_load-AXKAVXVj.js",
72
+ "_index-client-CDwZ_Ixa.js"
73
+ ],
74
+ "css": [
75
+ "static/gen/filing_detail-Ba6_iQwV.css"
76
+ ]
77
+ },
2
78
  "src/index_view.ts": {
3
- "file": "static/gen/index-CaTQMY-X.js",
79
+ "file": "static/gen/index-AHqus2fd.js",
4
80
  "name": "index",
5
81
  "src": "src/index_view.ts",
6
82
  "isEntry": true,
83
+ "imports": [
84
+ "_load-AXKAVXVj.js",
85
+ "_index-client-CDwZ_Ixa.js",
86
+ "_each-DkfQbqzj.js",
87
+ "_class-C5DDKbJD.js"
88
+ ],
7
89
  "css": [
8
- "static/gen/index-6cjSv2YC.css"
90
+ "static/gen/index-jv9_YIKt.css"
9
91
  ]
10
92
  }
11
93
  }
@@ -0,0 +1,87 @@
1
+ from pydantic import BaseModel
2
+
3
+
4
+ class Candidate(BaseModel):
5
+ candidate_id: str
6
+ name: str | None = None
7
+ party_affiliation: str | None = None
8
+ state: str | None = None
9
+ office: str | None = None
10
+ district: str | None = None
11
+ incumbent_challenger_status: str | None = None
12
+ principal_campaign_committee: str | None = None
13
+ address_street1: str | None = None
14
+ address_street2: str | None = None
15
+ address_city: str | None = None
16
+ address_state: str | None = None
17
+ address_zip: str | None = None
18
+ cycle: int | None = None
19
+
20
+
21
+ class Committee(BaseModel):
22
+ committee_id: str
23
+ name: str | None = None
24
+ committee_type: str | None = None
25
+ designation: str | None = None
26
+ candidate_id: str | None = None
27
+ party_affiliation: str | None = None
28
+ filing_frequency: str | None = None
29
+ address_street1: str | None = None
30
+ address_street2: str | None = None
31
+ address_city: str | None = None
32
+ address_state: str | None = None
33
+ address_zip: str | None = None
34
+ treasurer_name: str | None = None
35
+ cycle: int | None = None
36
+
37
+
38
+ class Filing(BaseModel):
39
+ filing_id: str
40
+ cover_record_form: str | None = None
41
+ filer_id: str | None = None
42
+ filer_name: str | None = None
43
+ coverage_from_date: str | None = None
44
+ coverage_through_date: str | None = None
45
+
46
+
47
+ class CandidatePageData(BaseModel):
48
+ candidate_id: str
49
+ cycle: int
50
+ candidate: Candidate | None = None
51
+ committee: Committee | None = None
52
+ filings: list[Filing] = []
53
+ error: str | None = None
54
+
55
+
56
+ class CommitteePageData(BaseModel):
57
+ committee_id: str
58
+ cycle: int
59
+ committee: Committee | None = None
60
+ candidate: Candidate | None = None
61
+ filings: list[Filing] = []
62
+ error: str | None = None
63
+
64
+
65
+ class ContestPageData(BaseModel):
66
+ state: str
67
+ office: str
68
+ district: str | None = None
69
+ cycle: int
70
+ contest_description: str
71
+ candidates: list[Candidate] = []
72
+ error: str | None = None
73
+
74
+
75
+ class FilingDetailPageData(BaseModel):
76
+ filing_id: str
77
+ filing: Filing | None = None
78
+ form_data: dict | None = None
79
+ database_name: str
80
+ error: str | None = None
81
+
82
+
83
+ class IndexPageData(BaseModel):
84
+ database_name: str
85
+
86
+
87
+ __exports__ = [CandidatePageData, CommitteePageData, ContestPageData, FilingDetailPageData, IndexPageData]
@@ -0,0 +1,24 @@
1
+ from datasette import Forbidden
2
+ from datasette_plugin_router import Router
3
+ from functools import wraps
4
+
5
+ router = Router()
6
+
7
+ LIBFEC_ACCESS_NAME = "datasette_libfec_access"
8
+
9
+ # decorator for routes, to ensure the proper permissions are checked
10
+ def check_permission():
11
+ def decorator(func):
12
+ @wraps(func)
13
+ async def wrapper(datasette, request, **kwargs):
14
+ result = await datasette.allowed(
15
+ action=LIBFEC_ACCESS_NAME, actor=request.actor
16
+ )
17
+ if not result:
18
+ raise Forbidden("Permission denied for datasette-libfec access")
19
+ return await func(datasette=datasette, request=request, **kwargs)
20
+
21
+ return wrapper
22
+
23
+ return decorator
24
+
@@ -0,0 +1,128 @@
1
+ from pydantic import BaseModel
2
+ from datasette import Response
3
+ from datasette_plugin_router import Body
4
+ from typing import Optional, List
5
+ import asyncio
6
+ import uuid
7
+
8
+ from .router import router, check_permission
9
+ from .state import libfec_client, export_state
10
+
11
+
12
+ class ExportStartParams(BaseModel):
13
+ filings: Optional[List[str]] = None
14
+ cycle: Optional[int] = None
15
+ cover_only: bool = False
16
+ clobber: bool = False
17
+
18
+
19
+ class ExportResponse(BaseModel):
20
+ status: str
21
+ message: str
22
+ export_id: Optional[str] = None
23
+ phase: Optional[str] = None
24
+
25
+
26
+ @router.POST("/-/api/libfec/export/start", output=ExportResponse)
27
+ @check_permission()
28
+ async def export_start(datasette, request, params: Body[ExportStartParams]):
29
+ if export_state.running:
30
+ return Response.json({
31
+ "status": "error",
32
+ "message": "Export already in progress",
33
+ "phase": export_state.phase
34
+ }, status=400)
35
+
36
+ # Get output database
37
+ output_db = None
38
+ for name, db in datasette.databases.items():
39
+ if not db.is_memory:
40
+ output_db = db
41
+ break
42
+ if output_db is None:
43
+ return Response.json({
44
+ "status": "error",
45
+ "message": "No writable database found."
46
+ }, status=500)
47
+
48
+ # Start export in background task
49
+ async def run_export():
50
+ await libfec_client.export_with_progress(
51
+ output_db=output_db.path,
52
+ filings=params.filings,
53
+ cycle=params.cycle,
54
+ cover_only=params.cover_only,
55
+ clobber=params.clobber,
56
+ export_state=export_state
57
+ )
58
+
59
+ export_state.export_id = f"export-{uuid.uuid4()}"
60
+ asyncio.create_task(run_export())
61
+
62
+ # Give it a moment to start
63
+ await asyncio.sleep(0.1)
64
+
65
+ return Response.json(
66
+ ExportResponse(
67
+ status="success",
68
+ message="Export started",
69
+ export_id=export_state.export_id,
70
+ phase=export_state.phase
71
+ ).model_dump()
72
+ )
73
+
74
+
75
+ @router.GET("/-/api/libfec/export/status", output=ExportResponse)
76
+ @check_permission()
77
+ async def export_status(datasette, request):
78
+ response_data = {
79
+ "status": "success",
80
+ "message": "Export status",
81
+ "export_id": export_state.export_id,
82
+ "phase": export_state.phase,
83
+ }
84
+
85
+ # Add additional fields based on phase
86
+ if export_state.phase in ("sourcing", "downloading_bulk", "exporting"):
87
+ response_data["completed"] = export_state.completed
88
+ response_data["total"] = export_state.total
89
+
90
+ if export_state.phase == "downloading_bulk" and export_state.current:
91
+ response_data["current"] = export_state.current
92
+ elif export_state.phase == "exporting" and export_state.current_filing_id:
93
+ response_data["current_filing_id"] = export_state.current_filing_id
94
+
95
+ elif export_state.phase == "complete":
96
+ response_data["total_exported"] = export_state.total_exported
97
+ response_data["warnings"] = export_state.warnings
98
+
99
+ elif export_state.phase == "error":
100
+ response_data["error_message"] = export_state.error_message
101
+
102
+ return Response.json(response_data)
103
+
104
+
105
+ @router.POST("/-/api/libfec/export/cancel", output=ExportResponse)
106
+ @check_permission()
107
+ async def export_cancel(datasette, request):
108
+ if not export_state.running:
109
+ return Response.json({
110
+ "status": "error",
111
+ "message": "No export in progress"
112
+ }, status=400)
113
+
114
+ # Cancel RPC export if in progress
115
+ if export_state.rpc_client:
116
+ try:
117
+ await export_state.rpc_client.export_cancel()
118
+ except Exception as e:
119
+ print(f"Error canceling RPC export: {e}")
120
+
121
+ return Response.json(
122
+ ExportResponse(
123
+ status="success",
124
+ message="Export canceled",
125
+ export_id=export_state.export_id,
126
+ phase="canceled"
127
+ ).model_dump()
128
+ )
@@ -0,0 +1,222 @@
1
+ from pydantic import BaseModel
2
+ from datasette import Response
3
+ from typing import Optional, List, Literal
4
+
5
+ from .router import router, check_permission
6
+
7
+
8
+ class ExportRecord(BaseModel):
9
+ export_id: int
10
+ export_uuid: str
11
+ created_at: str
12
+ filings_count: int
13
+ cover_only: bool
14
+ status: str
15
+ error_message: Optional[str] = None
16
+
17
+
18
+ class ExportFilingRecord(BaseModel):
19
+ filing_id: str
20
+ success: bool
21
+ message: Optional[str] = None
22
+
23
+
24
+ class ExportInputRecord(BaseModel):
25
+ id: int
26
+ input_type: str
27
+ input_value: str
28
+ cycle: Optional[int] = None
29
+ office: Optional[str] = None
30
+ state: Optional[str] = None
31
+ district: Optional[str] = None
32
+ filing_ids: List[str] = []
33
+
34
+
35
+ class ExportDetailResponse(BaseModel):
36
+ export: ExportRecord
37
+ inputs: List[ExportInputRecord]
38
+ filings: List[ExportFilingRecord]
39
+
40
+
41
+ class ApiExportsListResponse(BaseModel):
42
+ status: Literal['success']
43
+ exports: List[ExportRecord]
44
+ message: Optional[str] = None
45
+
46
+ @router.GET("/-/api/libfec/exports$", output=ApiExportsListResponse)
47
+ @check_permission()
48
+ async def list_exports(datasette, request):
49
+ """List all export operations from the metadata tables"""
50
+ db = datasette.get_database()
51
+
52
+ # Check if the table exists
53
+ try:
54
+ tables = await db.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='libfec_exports'")
55
+ if not tables.first():
56
+ return Response.json({
57
+ "status": "success",
58
+ "exports": [],
59
+ "message": "No exports yet"
60
+ })
61
+ except Exception as e:
62
+ return Response.json({
63
+ "status": "error",
64
+ "message": f"Database error: {str(e)}"
65
+ }, status=500)
66
+
67
+ try:
68
+ exports_result = await db.execute("""
69
+ SELECT
70
+ export_id,
71
+ export_uuid,
72
+ created_at,
73
+ filings_count,
74
+ cover_only,
75
+ status,
76
+ error_message
77
+ FROM libfec_exports
78
+ ORDER BY created_at DESC
79
+ LIMIT 50
80
+ """)
81
+
82
+ exports = []
83
+ for row in exports_result.rows:
84
+ exports.append({
85
+ "export_id": row[0],
86
+ "export_uuid": row[1],
87
+ "created_at": row[2],
88
+ "filings_count": row[3],
89
+ "cover_only": bool(row[4]),
90
+ "status": row[5],
91
+ "error_message": row[6]
92
+ })
93
+ return Response.json(ApiExportsListResponse(
94
+ status="success",
95
+ exports=exports
96
+ ).model_dump_json())
97
+
98
+
99
+ except Exception as e:
100
+ return Response.json({
101
+ "status": "error",
102
+ "message": f"Failed to fetch exports: {str(e)}"
103
+ }, status=500)
104
+
105
+
106
+ @router.GET("/-/api/libfec/exports/(?P<export_id>\\d+)")
107
+ @check_permission()
108
+ async def get_export_detail(datasette, request, export_id: str):
109
+ """Get detailed information about a specific export"""
110
+ db = datasette.get_database()
111
+ export_id_int = int(export_id)
112
+
113
+ try:
114
+ # Get export record
115
+ export_result = await db.execute("""
116
+ SELECT
117
+ export_id,
118
+ export_uuid,
119
+ created_at,
120
+ filings_count,
121
+ cover_only,
122
+ status,
123
+ error_message
124
+ FROM libfec_exports
125
+ WHERE export_id = ?
126
+ """, [export_id_int])
127
+
128
+ export_row = export_result.first()
129
+ if not export_row:
130
+ return Response.json({
131
+ "status": "error",
132
+ "message": "Export not found"
133
+ }, status=404)
134
+
135
+ export = {
136
+ "export_id": export_row[0],
137
+ "export_uuid": export_row[1],
138
+ "created_at": export_row[2],
139
+ "filings_count": export_row[3],
140
+ "cover_only": bool(export_row[4]),
141
+ "status": export_row[5],
142
+ "error_message": export_row[6]
143
+ }
144
+
145
+ # Get inputs with their resolved filing IDs
146
+ inputs = []
147
+ try:
148
+ inputs_result = await db.execute("""
149
+ SELECT
150
+ i.id,
151
+ i.input_type,
152
+ i.input_value,
153
+ i.cycle,
154
+ i.office,
155
+ i.state,
156
+ i.district
157
+ FROM libfec_export_inputs i
158
+ WHERE i.export_id = ?
159
+ ORDER BY i.id
160
+ """, [export_id_int])
161
+
162
+ for row in inputs_result.rows:
163
+ input_record = {
164
+ "id": row[0],
165
+ "input_type": row[1],
166
+ "input_value": row[2],
167
+ "cycle": row[3],
168
+ "office": row[4],
169
+ "state": row[5],
170
+ "district": row[6],
171
+ "filing_ids": []
172
+ }
173
+
174
+ # Get filing IDs for this input
175
+ filings_for_input = await db.execute("""
176
+ SELECT filing_id
177
+ FROM libfec_export_input_filings
178
+ WHERE input_id = ?
179
+ """, [row[0]])
180
+
181
+ input_record["filing_ids"] = [f[0] for f in filings_for_input.rows]
182
+ inputs.append(input_record)
183
+
184
+ except Exception:
185
+ # Table might not exist
186
+ pass
187
+
188
+ # Get filings with their success/failure status
189
+ filings = []
190
+ try:
191
+ filings_result = await db.execute("""
192
+ SELECT
193
+ filing_id,
194
+ success,
195
+ message
196
+ FROM libfec_export_filings
197
+ WHERE export_id = ?
198
+ ORDER BY filing_id
199
+ """, [export_id_int])
200
+
201
+ for row in filings_result.rows:
202
+ filings.append({
203
+ "filing_id": row[0],
204
+ "success": bool(row[1]),
205
+ "message": row[2]
206
+ })
207
+ except Exception:
208
+ # Table might not exist
209
+ pass
210
+
211
+ return Response.json({
212
+ "status": "success",
213
+ "export": export,
214
+ "inputs": inputs,
215
+ "filings": filings
216
+ })
217
+
218
+ except Exception as e:
219
+ return Response.json({
220
+ "status": "error",
221
+ "message": f"Failed to fetch export detail: {str(e)}"
222
+ }, status=500)