secondmate 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
secondmate/__init__.py ADDED
@@ -0,0 +1 @@
1
+
secondmate/cli.py ADDED
@@ -0,0 +1,26 @@
1
+ import argparse
2
+ import uvicorn
3
+ import os
4
+
5
+ def main():
6
+ parser = argparse.ArgumentParser(description="Run the SecondMate application.")
7
+ parser.add_argument("--port", type=int, default=4050, help="Port to run the server on (default: 4050)")
8
+ parser.add_argument("--host", type=str, default="0.0.0.0", help="Host to bind to (default: 0.0.0.0)")
9
+ parser.add_argument("--reload", action="store_true", help="Enable auto-reload")
10
+
11
+ args = parser.parse_args()
12
+
13
+ # Set the static directory path relative to this file if not set
14
+ # This allows the package to find its static assets
15
+ package_dir = os.path.dirname(os.path.abspath(__file__))
16
+ static_dir = os.path.join(package_dir, "static")
17
+ os.environ["SECONDMATE_STATIC_DIR"] = static_dir
18
+
19
+ print(f"Starting SecondMate on http://{args.host}:{args.port}")
20
+ if args.reload:
21
+ uvicorn.run("secondmate.main:app", host=args.host, port=args.port, reload=True)
22
+ else:
23
+ uvicorn.run("secondmate.main:app", host=args.host, port=args.port)
24
+
25
+ if __name__ == "__main__":
26
+ main()
@@ -0,0 +1,38 @@
1
+ import os
2
+ import importlib
3
+ from functools import lru_cache
4
+ from pyspark.sql import SparkSession
5
+ from fastapi import Depends, HTTPException
6
+ from secondmate.providers.spark_interface import SparkProvider
7
+ from secondmate.providers.local_spark import LocalSparkProvider
8
+
9
+ @lru_cache()
10
+ def get_spark_provider() -> SparkProvider:
11
+ provider_class_path = os.getenv("SPARK_PROVIDER_CLASS")
12
+
13
+ if not provider_class_path:
14
+ return LocalSparkProvider()
15
+
16
+ try:
17
+ module_name, class_name = provider_class_path.rsplit(".", 1)
18
+ module = importlib.import_module(module_name)
19
+ provider_cls = getattr(module, class_name)
20
+
21
+ # Instantiate the provider.
22
+ # We assume it takes no arguments or we might need to support passing args via env vars too.
23
+ # For now, let's assume valid providers typically don't need args or handle configuration internally.
24
+ provider_instance = provider_cls()
25
+
26
+ if not hasattr(provider_instance, "get_session"):
27
+ raise ValueError(f"Provider '{provider_class_path}' must implement 'get_session' method.")
28
+
29
+ return provider_instance
30
+
31
+ except (ImportError, AttributeError, ValueError) as e:
32
+ # Raise a clear error if the configured provider cannot be loaded
33
+ raise ImportError(f"Could not load Spark provider '{provider_class_path}': {e}") from e
34
+
35
+ def get_spark_session(
36
+ provider: SparkProvider = Depends(get_spark_provider)
37
+ ) -> SparkSession:
38
+ return provider.get_session()
secondmate/main.py ADDED
@@ -0,0 +1,264 @@
1
+ from fastapi import FastAPI, Depends, APIRouter
2
+ from pyspark.sql import SparkSession
3
+ from secondmate.dependencies import get_spark_session
4
+ import sys
5
+
6
+ # Hack to make sure pyspark finds the right python executable if needed,
7
+ # or relying on environment.
8
+ # For local spark, PYSPARK_PYTHON usually defaults to sys.executable
9
+
10
+ from contextlib import asynccontextmanager
11
+ from secondmate.dependencies import get_spark_provider
12
+ import random
13
+
14
+ from secondmate.providers.local_spark import LocalSparkProvider
15
+
16
+ @asynccontextmanager
17
+ async def lifespan(app: FastAPI):
18
+ # Startup: Ensure table exists and has data
19
+ provider = get_spark_provider()
20
+ spark = provider.get_session()
21
+
22
+ # Only create fake data if using LocalSparkProvider
23
+ if isinstance(provider, LocalSparkProvider):
24
+ # helper to create table with data
25
+ def create_table_if_not_exists(table_name, schema_ddl, data_records, schema_cols):
26
+ if not spark.catalog.tableExists(table_name):
27
+ print(f"Creating {table_name} table...")
28
+ spark.sql(f"CREATE TABLE IF NOT EXISTS {table_name} ({schema_ddl}) USING iceberg")
29
+
30
+ # Check if empty
31
+ count = spark.table(table_name).count()
32
+ if count == 0 and data_records:
33
+ print(f"Populating {table_name} with {len(data_records)} rows...")
34
+ df = spark.createDataFrame(data_records, schema_cols)
35
+ df.writeTo(table_name).append()
36
+ print(f"Initialized {table_name} with {len(data_records)} rows.")
37
+ else:
38
+ print(f"Table {table_name} already has {count} rows.")
39
+
40
+ # 1. user.ipgeo
41
+ create_table_if_not_exists(
42
+ "user.ipgeo",
43
+ "id LONG, ip STRING, city STRING, country STRING",
44
+ [(i, f"192.168.1.{i % 255}", f"City_{i}", f"Country_{i % 10}") for i in range(1000)],
45
+ ["id", "ip", "city", "country"]
46
+ )
47
+
48
+ # 2. user.sales.transactions
49
+ from datetime import date
50
+ spark.sql("CREATE NAMESPACE IF NOT EXISTS user.sales")
51
+ create_table_if_not_exists(
52
+ "user.sales.transactions",
53
+ "tx_id LONG, amount DOUBLE, currency STRING, tx_date DATE",
54
+ [
55
+ (1, 100.50, "USD", date(2023, 1, 1)),
56
+ (2, 200.00, "EUR", date(2023, 1, 2)),
57
+ (3, 50.25, "GBP", date(2023, 1, 3))
58
+ ],
59
+ ["tx_id", "amount", "currency", "tx_date"]
60
+ )
61
+
62
+ # 3. user.finance.budget
63
+ spark.sql("CREATE NAMESPACE IF NOT EXISTS user.finance")
64
+ create_table_if_not_exists(
65
+ "user.finance.budget",
66
+ "dept_id LONG, dept_name STRING, budget_amount DOUBLE, fiscal_year INT",
67
+ [
68
+ (101, "Engineering", 500000.0, 2024),
69
+ (102, "Marketing", 200000.0, 2024),
70
+ (103, "HR", 150000.0, 2024)
71
+ ],
72
+ ["dept_id", "dept_name", "budget_amount", "fiscal_year"]
73
+ )
74
+
75
+ yield
76
+ # Shutdown logic if needed
77
+
78
+ app = FastAPI(title="SecondMate Backend", lifespan=lifespan)
79
+ router = APIRouter()
80
+
81
+ from pydantic import BaseModel
82
+
83
+ class QueryRequest(BaseModel):
84
+ query: str
85
+
86
+ @router.post("/query/execute")
87
+ def execute_query(request: QueryRequest, spark: SparkSession = Depends(get_spark_session)):
88
+ """Execute a raw SQL query and return results."""
89
+ try:
90
+ df = spark.sql(request.query)
91
+ # Limit to 1000 to prevent overloading
92
+ df = df.limit(1000)
93
+
94
+ # Get schema
95
+ schema = [{"name": field.name, "type": str(field.dataType)} for field in df.schema.fields]
96
+
97
+ # Get data
98
+ data = [row.asDict() for row in df.collect()]
99
+
100
+ return {"schema": schema, "data": data}
101
+ except Exception as e:
102
+ import traceback
103
+ traceback.print_exc()
104
+ return {"error": str(e)}
105
+
106
+ @router.get("/catalogs")
107
+ def get_catalogs(spark: SparkSession = Depends(get_spark_session)):
108
+ """List available catalogs."""
109
+ # In PySpark 3.4+, spark.catalog.listCatalogs() exists but might not be standard in all setups.
110
+ # For now, we can try to inspect catalogs.
111
+ # A standard Spark setup usually has 'spark_catalog' and any defined custom catalogs.
112
+ # We can try to query `SHOW CATALOGS` via SQL
113
+ try:
114
+ df = spark.sql("SHOW CATALOGS")
115
+ catalogs = [row.catalog for row in df.collect()]
116
+ return {"catalogs": catalogs}
117
+ except Exception as e:
118
+ return {"catalogs": [], "error": str(e)}
119
+
120
+ @router.get("/catalogs/{catalog_name}/namespaces")
121
+ def get_namespaces(catalog_name: str, spark: SparkSession = Depends(get_spark_session)):
122
+ """List namespaces in a specific catalog."""
123
+ try:
124
+ # Switch to catalog to list namespaces easily, or use `SHOW NAMESPACES IN catalog`
125
+ df = spark.sql(f"SHOW NAMESPACES IN {catalog_name}")
126
+ # The column name is usually 'namespace'
127
+ namespaces = [row.namespace for row in df.collect()]
128
+ return {"namespaces": namespaces}
129
+ except Exception as e:
130
+ return {"namespaces": [], "error": str(e)}
131
+
132
+ @router.get("/catalogs/{catalog_name}/namespaces/{namespace}/tables")
133
+ def get_tables(catalog_name: str, namespace: str, spark: SparkSession = Depends(get_spark_session)):
134
+ """List tables in a specific namespace."""
135
+ try:
136
+ df = spark.sql(f"SHOW TABLES IN {catalog_name}.{namespace}")
137
+ # Columns: 'namespace', 'tableName', 'isTemporary'
138
+ tables = [row.tableName for row in df.collect()]
139
+ return {"tables": tables}
140
+ except Exception as e:
141
+ return {"tables": [], "error": str(e)}
142
+
143
+ @router.get("/info")
144
+ def get_info(spark: SparkSession = Depends(get_spark_session)):
145
+ return {
146
+ "app_name": spark.sparkContext.appName,
147
+ "spark_version": spark.version,
148
+ "master": spark.sparkContext.master,
149
+ "python_version": sys.version
150
+ }
151
+
152
+
153
+
154
+ @router.get("/health")
155
+ def health_check():
156
+ return {"status": "ok"}
157
+
158
+ @router.get("/search")
159
+ def search_catalog(q: str, spark: SparkSession = Depends(get_spark_session)):
160
+ """Search for catalogs, namespaces, and tables matching the query."""
161
+ query = q.lower()
162
+ results = []
163
+
164
+ try:
165
+ # 1. Search Catalogs
166
+ df_catalogs = spark.sql("SHOW CATALOGS")
167
+ catalogs = [row.catalog for row in df_catalogs.collect()]
168
+
169
+ for cat in catalogs:
170
+ if query in cat.lower():
171
+ results.append({"type": "catalog", "catalog": cat, "name": cat})
172
+
173
+ try:
174
+ # 2. Search Namespaces within Catalog
175
+ df_ns = spark.sql(f"SHOW NAMESPACES IN {cat}")
176
+ namespaces = [row.namespace for row in df_ns.collect()]
177
+
178
+ for ns in namespaces:
179
+ if query in ns.lower():
180
+ results.append({"type": "namespace", "catalog": cat, "namespace": ns, "name": ns})
181
+
182
+ try:
183
+ # 3. Search Tables within Namespace
184
+ df_tables = spark.sql(f"SHOW TABLES IN {cat}.{ns}")
185
+ tables = [row.tableName for row in df_tables.collect()]
186
+
187
+ for table in tables:
188
+ if query in table.lower():
189
+ results.append({
190
+ "type": "table",
191
+ "catalog": cat,
192
+ "namespace": ns,
193
+ "table": table,
194
+ "name": table
195
+ })
196
+ except Exception:
197
+ continue # Ignore individual failures
198
+ except Exception:
199
+ continue
200
+
201
+ except Exception as e:
202
+ return {"results": [], "error": str(e)}
203
+
204
+ return {"results": results}
205
+
206
+ # Include the API router
207
+ app.include_router(router, prefix="/api")
208
+
209
+ from fastapi.staticfiles import StaticFiles
210
+ from fastapi.responses import FileResponse, HTMLResponse
211
+ import os
212
+
213
+ # Mount static files
214
+ # Use environment variable or fallback to local assumption
215
+ static_dir = os.getenv("SECONDMATE_STATIC_DIR", os.path.join(os.path.dirname(__file__), "static"))
216
+
217
+ if os.path.exists(static_dir):
218
+ app.mount("/assets", StaticFiles(directory=os.path.join(static_dir, "assets")), name="assets")
219
+
220
+ # Catch-all for SPA
221
+ @app.get("/{full_path:path}")
222
+ async def serve_spa(full_path: str):
223
+ # Exclude API routes from catch-all
224
+ if full_path.startswith("api") or full_path.startswith("docs") or full_path.startswith("openapi.json"):
225
+ from fastapi import HTTPException
226
+ raise HTTPException(status_code=404, detail="Not Found")
227
+
228
+ # Check if it's a static file that exists
229
+ file_path = os.path.join(static_dir, full_path)
230
+ if os.path.isfile(file_path):
231
+ return FileResponse(file_path)
232
+
233
+ # Fallback to index.html for SPA routing
234
+ index_path = os.path.join(static_dir, "index.html")
235
+ if os.path.exists(index_path):
236
+ with open(index_path, "r", encoding="utf-8") as f:
237
+ content = f.read()
238
+
239
+ # Determine base path for API
240
+ # Priority: PROXY_PREFIX env var -> JUPYTERHUB_SERVICE_PREFIX -> default
241
+ # Note: JUPYTERHUB_SERVICE_PREFIX is usually /user/<name>/
242
+ # If running via jupyter-server-proxy, the URL is typically /user/<name>/proxy/<port>/
243
+ # The user should ideally set PROXY_PREFIX to the full proxy path.
244
+ proxy_prefix = os.getenv("PROXY_PREFIX", "")
245
+ if not proxy_prefix and os.getenv("JUPYTERHUB_SERVICE_PREFIX"):
246
+ # Fallback/heuristic: if in jupyterhub, might still need port if using proxy
247
+ pass
248
+
249
+ # Ensure prefix has trailing slash if it exists and we're appending 'api'
250
+ # But wait, if prefix is "/foo", we want "/foo/api"
251
+ # If prefix is "/foo/", we want "/foo/api"
252
+ api_base = "/api"
253
+ if proxy_prefix:
254
+ clean_prefix = proxy_prefix.rstrip("/")
255
+ api_base = f"{clean_prefix}/api"
256
+
257
+ injection = f'<script>window.SECONDMATE_CONFIG = {{ apiBaseUrl: "{api_base}" }};</script>'
258
+ # Inject before </head>
259
+ content = content.replace("</head>", f"{injection}</head>")
260
+
261
+ return HTMLResponse(content=content)
262
+ return {"error": "Frontend not built or static files missing."}
263
+ else:
264
+ print(f"Warning: Static directory {static_dir} not found. Frontend will not be served.")
@@ -0,0 +1 @@
1
+
@@ -0,0 +1,25 @@
1
+ from pyspark.sql import SparkSession
2
+ from secondmate.providers.spark_interface import SparkProvider
3
+
4
+ class LocalSparkProvider(SparkProvider):
5
+ def __init__(self, app_name: str = "SecondMateLocal"):
6
+ self.app_name = app_name
7
+ self._session = None
8
+
9
+ def get_session(self) -> SparkSession:
10
+ if self._session is None:
11
+ self._session = (
12
+ SparkSession.builder
13
+ .appName(self.app_name)
14
+ .master("local[*]")
15
+ .config("spark.driver.bindAddress", "127.0.0.1")
16
+ .config("spark.driver.host", "127.0.0.1")
17
+ # Iceberg Configuration
18
+ .config("spark.jars.packages", "org.apache.iceberg:iceberg-spark-runtime-4.0_2.13:1.10.0")
19
+ .config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions")
20
+ .config("spark.sql.catalog.user", "org.apache.iceberg.spark.SparkCatalog")
21
+ .config("spark.sql.catalog.user.type", "hadoop")
22
+ .config("spark.sql.catalog.user.warehouse", "warehouse")
23
+ .getOrCreate()
24
+ )
25
+ return self._session
@@ -0,0 +1,7 @@
1
+ from typing import Protocol
2
+ from pyspark.sql import SparkSession
3
+
4
+ class SparkProvider(Protocol):
5
+ def get_session(self) -> SparkSession:
6
+ """Get or create a SparkSession."""
7
+ ...
@@ -0,0 +1 @@
1
+ :root{--bg-primary: #0f172a;--bg-secondary: #1e293b;--bg-tertiary: #334155;--text-primary: #f8fafc;--text-secondary: #94a3b8;--text-muted: #64748b;--accent-primary: #38bdf8;--accent-secondary: #818cf8;--accent-hover: #0ea5e9;--border-color: #334155;--border-hover: #475569;--font-sans: "Inter", system-ui, -apple-system, sans-serif;--font-mono: "JetBrains Mono", "Fira Code", monospace}body{margin:0;padding:0;background-color:var(--bg-primary);color:var(--text-primary);font-family:var(--font-sans);-webkit-font-smoothing:antialiased;height:100vh;width:100vw;overflow:hidden}*{box-sizing:border-box}::-webkit-scrollbar{width:10px;height:10px}::-webkit-scrollbar-track{background:var(--bg-primary)}::-webkit-scrollbar-thumb{background:var(--bg-tertiary);border-radius:5px;border:2px solid var(--bg-primary)}::-webkit-scrollbar-thumb:hover{background:var(--text-muted)}._sidebar_1o808_1{display:flex;flex-direction:column;height:100%;color:var(--text-secondary);font-size:13px}._header_1o808_17{padding:12px;display:flex;justify-content:space-between;align-items:center;border-bottom:1px solid var(--border-color)}._title_1o808_33{font-weight:600;color:var(--text-primary);text-transform:uppercase;font-size:11px;letter-spacing:.05em}._iconButton_1o808_49{background:none;border:none;color:var(--text-secondary);cursor:pointer;padding:4px;border-radius:4px}._iconButton_1o808_49:hover{background-color:var(--bg-tertiary);color:var(--text-primary)}._searchBox_1o808_77{padding:8px 12px;position:relative;border-bottom:1px solid var(--border-color)}._searchIcon_1o808_89{position:absolute;left:20px;top:50%;transform:translateY(-50%);color:var(--text-muted)}._searchInput_1o808_105{width:100%;background-color:var(--bg-primary);border:1px solid var(--border-color);border-radius:4px;padding:6px 30px;color:var(--text-primary);font-size:12px;outline:none}._clearIcon_1o808_127{position:absolute;right:20px;top:50%;transform:translateY(-50%);color:var(--text-muted);cursor:pointer}._clearIcon_1o808_127:hover{color:var(--text-primary)}._searchInput_1o808_105:focus{border-color:var(--accent-primary)}._tree_1o808_161{flex:1;overflow-y:auto;padding:8px 0}._treeItem_1o808_173{display:flex;flex-direction:column}._treeRow_1o808_183{display:flex;align-items:center;padding:4px 12px;cursor:pointer;gap:6px;-webkit-user-select:none;user-select:none}._treeRow_1o808_183:hover{background-color:var(--bg-tertiary);color:var(--text-primary)}._treeChildren_1o808_211{padding-left:16px}._icon_1o808_49{opacity:.9}._spinner_1o808_227{animation:_spin_1o808_227 1s linear infinite}@keyframes _spin_1o808_227{0%{transform:rotate(0)}to{transform:rotate(360deg)}}._tabs_1o808_255{display:flex;border-bottom:1px solid var(--border-color);background-color:var(--bg-secondary)}._tab_1o808_255{flex:1;padding:8px;text-align:center;cursor:pointer;font-size:11px;font-weight:500;color:var(--text-muted);border-bottom:2px solid transparent;transition:all .2s}._tab_1o808_255:hover{color:var(--text-primary);background-color:var(--bg-tertiary)}._activeTab_1o808_301{color:var(--accent-primary);border-bottom-color:var(--accent-primary);background-color:var(--bg-primary)}._container_14arr_1{height:100vh;display:flex;flex-direction:column;background-color:var(--bg-primary)}._sidebarPanel_14arr_15{background-color:var(--bg-secondary);border-right:1px solid var(--border-color);display:flex;flex-direction:column}._contentPanel_14arr_29{background-color:var(--bg-primary);display:flex;flex-direction:column}._resizeHandle_14arr_41{width:2px;background-color:var(--border-color);transition:background-color .2s;cursor:col-resize}._resizeHandle_14arr_41:hover,._resizeHandle_14arr_41:active{background-color:var(--accent-primary)}._statusBar_14arr_65{height:24px;background-color:var(--accent-primary);color:var(--bg-primary);display:flex;align-items:center;padding:0 12px;font-size:12px;font-family:var(--font-mono);justify-content:space-between;font-weight:600}._editorContainer_1tof5_1{height:100%;width:100%;overflow:hidden;background-color:var(--bg-secondary)}._gridContainer_12pjg_1{height:100%;width:100%;overflow:auto;background-color:var(--bg-primary)}._table_12pjg_15{width:100%;border-collapse:collapse;font-size:13px;color:var(--text-primary);font-family:var(--font-mono)}._table_12pjg_15 th{background-color:var(--bg-secondary);position:sticky;top:0;text-align:left;border-bottom:1px solid var(--border-color);border-right:1px solid var(--border-color);padding:8px 12px;font-weight:600;color:var(--text-secondary);-webkit-user-select:none;user-select:none}._table_12pjg_15 td{padding:6px 12px;border-bottom:1px solid var(--border-color);border-right:1px solid var(--border-color);white-space:nowrap}._table_12pjg_15 tr:hover td{background-color:var(--bg-tertiary)}._headerCell_12pjg_79{display:flex;flex-direction:column;gap:2px}._typeLabel_12pjg_91{font-size:10px;color:var(--text-muted);font-weight:400}._container_1wn1h_1{display:flex;flex-direction:column;align-items:center;justify-content:center;height:100%;width:100%;overflow:visible;position:relative;min-height:200px}._scene_1wn1h_16{position:relative;width:20%;min-width:150px;max-width:300px;aspect-ratio:4/3}._river_1wn1h_29{position:absolute;bottom:0;left:-20%;width:140%;height:20%;display:flex;align-items:flex-end;justify-content:space-around;pointer-events:none}._wave_1wn1h_44{width:20%;height:30%;background:#60a5fa;border-radius:50%;opacity:.6;animation:_waveMove_1wn1h_1 2s ease-in-out infinite alternate}._wave_1wn1h_44:nth-child(2n){animation-delay:1s;background:#93c5fd}._boatContainer_1wn1h_61{position:absolute;bottom:15%;left:50%;width:80%;transform:translate(-50%);animation:_bob_1wn1h_1 2s ease-in-out infinite;z-index:10}._smoke_1wn1h_74{position:absolute;top:-10%;left:50%;width:8%;height:8%;background:#cbd5e1;border-radius:50%;opacity:0;animation:_puff_1wn1h_1 2s infinite}._smoke_1wn1h_74:nth-child(2){animation-delay:.5s}._smoke_1wn1h_74:nth-child(3){animation-delay:1s}._text_1wn1h_95{margin-top:1rem;font-family:Courier New,Courier,monospace;font-weight:600;color:#1e40af;letter-spacing:1px;animation:_pulse_1wn1h_1 1.5s ease-in-out infinite;text-align:center}@keyframes _waveMove_1wn1h_1{0%{transform:translateY(0)}to{transform:translateY(-5px)}}@keyframes _bob_1wn1h_1{0%,to{transform:translate(-50%) translateY(0) rotate(0)}50%{transform:translate(-50%) translateY(-2%) rotate(1deg)}}@keyframes _puff_1wn1h_1{0%{transform:translateY(0) scale(1);opacity:.8}to{transform:translateY(-300%) scale(3);opacity:0}}@keyframes _pulse_1wn1h_1{0%,to{opacity:1}50%{opacity:.7}}._workspace_3frhl_1{display:flex;flex-direction:column;height:100%}._toolbar_3frhl_13{height:40px;border-bottom:1px solid var(--border-color);display:flex;align-items:center;padding:0 12px;gap:12px;background-color:var(--bg-secondary)}._runButton_3frhl_33{display:flex;align-items:center;gap:6px;background-color:#059669;color:#fff;border:none;padding:6px 12px;border-radius:4px;font-size:12px;font-weight:600;cursor:pointer;transition:background-color .2s}._runButton_3frhl_33:hover{background-color:#047857}._divider_3frhl_71{width:1px;height:20px;background-color:var(--border-color)}._tabTitle_3frhl_83{font-size:13px;color:var(--text-primary)}._content_3frhl_93{flex:1;position:relative}._resizeHandle_3frhl_103{height:2px;background-color:var(--border-color);transition:background-color .2s;cursor:row-resize}._resizeHandle_3frhl_103:hover,._resizeHandle_3frhl_103:active{background-color:var(--accent-primary)}._resultsArea_3frhl_127{display:flex;flex-direction:column;height:100%;background-color:var(--bg-primary)}._resultsHeader_3frhl_141{padding:8px 12px;border-bottom:1px solid var(--border-color);font-size:12px;font-weight:600;color:var(--text-secondary);display:flex;justify-content:space-between;background-color:var(--bg-secondary)}._meta_3frhl_163{font-weight:400;color:var(--text-muted)}._emptyState_3frhl_173{display:flex;flex-direction:column;align-items:center;justify-content:center;height:100%;padding:2rem;text-align:center;animation:_fadeInZoom_3frhl_1 .5s ease-out forwards;color:var(--text-primary)}._emptyStateHeader_3frhl_197{font-size:1.25rem;font-weight:500;color:var(--text-secondary);margin-bottom:1rem;letter-spacing:.025em}._emptyStateMain_3frhl_213{font-size:1.875rem;font-weight:700;color:var(--text-primary);max-width:42rem;line-height:1.25}@keyframes _fadeInZoom_3frhl_1{0%{opacity:0;transform:scale(.95)}to{opacity:1;transform:scale(1)}}