lorax-arg 0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lorax/buffer.py +43 -0
- lorax/cache/__init__.py +43 -0
- lorax/cache/csv_tree_graph.py +59 -0
- lorax/cache/disk.py +467 -0
- lorax/cache/file_cache.py +142 -0
- lorax/cache/file_context.py +72 -0
- lorax/cache/lru.py +90 -0
- lorax/cache/tree_graph.py +293 -0
- lorax/cli.py +312 -0
- lorax/cloud/__init__.py +0 -0
- lorax/cloud/gcs_utils.py +205 -0
- lorax/constants.py +66 -0
- lorax/context.py +80 -0
- lorax/csv/__init__.py +7 -0
- lorax/csv/config.py +250 -0
- lorax/csv/layout.py +182 -0
- lorax/csv/newick_tree.py +234 -0
- lorax/handlers.py +998 -0
- lorax/lineage.py +456 -0
- lorax/loaders/__init__.py +0 -0
- lorax/loaders/csv_loader.py +10 -0
- lorax/loaders/loader.py +31 -0
- lorax/loaders/tskit_loader.py +119 -0
- lorax/lorax_app.py +75 -0
- lorax/manager.py +58 -0
- lorax/metadata/__init__.py +0 -0
- lorax/metadata/loader.py +426 -0
- lorax/metadata/mutations.py +146 -0
- lorax/modes.py +190 -0
- lorax/pg.py +183 -0
- lorax/redis_utils.py +30 -0
- lorax/routes.py +137 -0
- lorax/session_manager.py +206 -0
- lorax/sockets/__init__.py +55 -0
- lorax/sockets/connection.py +99 -0
- lorax/sockets/debug.py +47 -0
- lorax/sockets/decorators.py +112 -0
- lorax/sockets/file_ops.py +200 -0
- lorax/sockets/lineage.py +307 -0
- lorax/sockets/metadata.py +232 -0
- lorax/sockets/mutations.py +154 -0
- lorax/sockets/node_search.py +535 -0
- lorax/sockets/tree_layout.py +117 -0
- lorax/sockets/utils.py +10 -0
- lorax/tree_graph/__init__.py +12 -0
- lorax/tree_graph/tree_graph.py +689 -0
- lorax/utils.py +124 -0
- lorax_app/__init__.py +4 -0
- lorax_app/app.py +159 -0
- lorax_app/cli.py +114 -0
- lorax_app/static/X.png +0 -0
- lorax_app/static/assets/index-BCEGlUFi.js +2361 -0
- lorax_app/static/assets/index-iKjzUpA9.css +1 -0
- lorax_app/static/assets/localBackendWorker-BaWwjSV_.js +2 -0
- lorax_app/static/assets/renderDataWorker-BKLdiU7J.js +2 -0
- lorax_app/static/gestures/gesture-flick.ogv +0 -0
- lorax_app/static/gestures/gesture-two-finger-scroll.ogv +0 -0
- lorax_app/static/index.html +14 -0
- lorax_app/static/logo.png +0 -0
- lorax_app/static/lorax-logo.png +0 -0
- lorax_app/static/vite.svg +1 -0
- lorax_arg-0.1.dist-info/METADATA +131 -0
- lorax_arg-0.1.dist-info/RECORD +66 -0
- lorax_arg-0.1.dist-info/WHEEL +5 -0
- lorax_arg-0.1.dist-info/entry_points.txt +4 -0
- lorax_arg-0.1.dist-info/top_level.txt +2 -0
lorax/modes.py
ADDED
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Lorax Deployment Mode Detection and Configuration
|
|
3
|
+
|
|
4
|
+
Supports three deployment modes:
|
|
5
|
+
- local: Conda package / desktop use (single user, no Redis/GCS)
|
|
6
|
+
- development: Developer mode (single process, optional GCS)
|
|
7
|
+
- production: Cloud deployment (multi-worker, Redis + GCS required)
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import os
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from typing import Optional
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class ModeConfig:
|
|
18
|
+
"""Configuration for a deployment mode."""
|
|
19
|
+
mode: str
|
|
20
|
+
ts_cache_size: int
|
|
21
|
+
config_cache_size: int
|
|
22
|
+
metadata_cache_size: int
|
|
23
|
+
disk_cache_enabled: bool
|
|
24
|
+
disk_cache_max_gb: int
|
|
25
|
+
max_sockets_per_session: int
|
|
26
|
+
enforce_connection_limits: bool
|
|
27
|
+
data_dir: Path
|
|
28
|
+
require_redis: bool
|
|
29
|
+
require_gcs: bool
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# Mode-specific defaults
|
|
33
|
+
MODE_CONFIGS = {
|
|
34
|
+
"local": ModeConfig(
|
|
35
|
+
mode="local",
|
|
36
|
+
ts_cache_size=5,
|
|
37
|
+
config_cache_size=2,
|
|
38
|
+
metadata_cache_size=10,
|
|
39
|
+
disk_cache_enabled=False, # No GCS in local mode
|
|
40
|
+
disk_cache_max_gb=10,
|
|
41
|
+
max_sockets_per_session=100, # Effectively unlimited
|
|
42
|
+
enforce_connection_limits=False,
|
|
43
|
+
data_dir=Path.home() / ".lorax",
|
|
44
|
+
require_redis=False,
|
|
45
|
+
require_gcs=False,
|
|
46
|
+
),
|
|
47
|
+
"development": ModeConfig(
|
|
48
|
+
mode="development",
|
|
49
|
+
ts_cache_size=5,
|
|
50
|
+
config_cache_size=2,
|
|
51
|
+
metadata_cache_size=10,
|
|
52
|
+
disk_cache_enabled=True,
|
|
53
|
+
disk_cache_max_gb=10,
|
|
54
|
+
max_sockets_per_session=100, # Relaxed for testing
|
|
55
|
+
enforce_connection_limits=False,
|
|
56
|
+
data_dir=Path("UPLOADS"),
|
|
57
|
+
require_redis=False,
|
|
58
|
+
require_gcs=False,
|
|
59
|
+
),
|
|
60
|
+
"production": ModeConfig(
|
|
61
|
+
mode="production",
|
|
62
|
+
ts_cache_size=2, # Lower for memory efficiency with many workers
|
|
63
|
+
config_cache_size=2,
|
|
64
|
+
metadata_cache_size=5,
|
|
65
|
+
disk_cache_enabled=True,
|
|
66
|
+
disk_cache_max_gb=50,
|
|
67
|
+
max_sockets_per_session=5,
|
|
68
|
+
enforce_connection_limits=True,
|
|
69
|
+
data_dir=Path("/var/lorax"),
|
|
70
|
+
require_redis=True,
|
|
71
|
+
require_gcs=True,
|
|
72
|
+
),
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def detect_mode() -> str:
|
|
77
|
+
"""
|
|
78
|
+
Detect the deployment mode based on environment variables.
|
|
79
|
+
|
|
80
|
+
Priority:
|
|
81
|
+
1. Explicit LORAX_MODE environment variable
|
|
82
|
+
2. Auto-detect based on REDIS_CLUSTER and GCS_BUCKET_NAME
|
|
83
|
+
3. Default to 'local' for conda package usage
|
|
84
|
+
"""
|
|
85
|
+
explicit_mode = os.getenv("LORAX_MODE", "").lower()
|
|
86
|
+
if explicit_mode in MODE_CONFIGS:
|
|
87
|
+
return explicit_mode
|
|
88
|
+
|
|
89
|
+
# Auto-detect based on environment
|
|
90
|
+
has_redis = bool(os.getenv("REDIS_CLUSTER"))
|
|
91
|
+
has_gcs = bool(os.getenv("GCS_BUCKET_NAME") or os.getenv("BUCKET_NAME"))
|
|
92
|
+
if has_redis and has_gcs:
|
|
93
|
+
return "production"
|
|
94
|
+
elif has_gcs:
|
|
95
|
+
return "development"
|
|
96
|
+
else:
|
|
97
|
+
return "local"
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def get_mode_config(mode: Optional[str] = None) -> ModeConfig:
|
|
101
|
+
"""
|
|
102
|
+
Get configuration for a specific mode.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
mode: Mode name or None to auto-detect
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
ModeConfig with all settings for the mode
|
|
109
|
+
"""
|
|
110
|
+
if mode is None:
|
|
111
|
+
mode = detect_mode()
|
|
112
|
+
|
|
113
|
+
config = MODE_CONFIGS.get(mode, MODE_CONFIGS["local"])
|
|
114
|
+
|
|
115
|
+
# Apply environment overrides
|
|
116
|
+
return ModeConfig(
|
|
117
|
+
mode=config.mode,
|
|
118
|
+
ts_cache_size=int(os.getenv("TS_CACHE_SIZE", config.ts_cache_size)),
|
|
119
|
+
config_cache_size=int(os.getenv("CONFIG_CACHE_SIZE", config.config_cache_size)),
|
|
120
|
+
metadata_cache_size=int(os.getenv("METADATA_CACHE_SIZE", config.metadata_cache_size)),
|
|
121
|
+
disk_cache_enabled=config.disk_cache_enabled,
|
|
122
|
+
disk_cache_max_gb=int(os.getenv("DISK_CACHE_MAX_GB", config.disk_cache_max_gb)),
|
|
123
|
+
max_sockets_per_session=int(os.getenv("MAX_SOCKETS_PER_SESSION", config.max_sockets_per_session)),
|
|
124
|
+
enforce_connection_limits=config.enforce_connection_limits,
|
|
125
|
+
data_dir=Path(os.getenv("LORAX_DATA_DIR", config.data_dir)),
|
|
126
|
+
require_redis=config.require_redis,
|
|
127
|
+
require_gcs=config.require_gcs,
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def get_data_dir(mode_config: Optional[ModeConfig] = None) -> Path:
|
|
132
|
+
"""Get the data directory for the current mode."""
|
|
133
|
+
if mode_config is None:
|
|
134
|
+
mode_config = get_mode_config()
|
|
135
|
+
mode_config.data_dir.mkdir(parents=True, exist_ok=True)
|
|
136
|
+
return mode_config.data_dir
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def get_uploads_dir(mode_config: Optional[ModeConfig] = None) -> Path:
|
|
140
|
+
"""Get the uploads directory for the current mode."""
|
|
141
|
+
data_dir = get_data_dir(mode_config)
|
|
142
|
+
uploads_dir = data_dir / "uploads" if mode_config and mode_config.mode == "local" else data_dir
|
|
143
|
+
uploads_dir.mkdir(parents=True, exist_ok=True)
|
|
144
|
+
return uploads_dir
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def get_cache_dir(mode_config: Optional[ModeConfig] = None) -> Path:
|
|
148
|
+
"""Get the disk cache directory for the current mode."""
|
|
149
|
+
if mode_config is None:
|
|
150
|
+
mode_config = get_mode_config()
|
|
151
|
+
|
|
152
|
+
cache_dir_env = os.getenv("DISK_CACHE_DIR")
|
|
153
|
+
if cache_dir_env:
|
|
154
|
+
cache_dir = Path(cache_dir_env)
|
|
155
|
+
elif mode_config.mode == "local":
|
|
156
|
+
cache_dir = mode_config.data_dir / "cache"
|
|
157
|
+
elif mode_config.mode == "development":
|
|
158
|
+
cache_dir = Path("/tmp/lorax_cache")
|
|
159
|
+
else:
|
|
160
|
+
cache_dir = Path("/var/lorax/cache")
|
|
161
|
+
|
|
162
|
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
163
|
+
return cache_dir
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def validate_mode_requirements(mode_config: ModeConfig) -> list:
|
|
167
|
+
"""
|
|
168
|
+
Validate that required services are available for the mode.
|
|
169
|
+
|
|
170
|
+
Returns:
|
|
171
|
+
List of validation errors (empty if all requirements met)
|
|
172
|
+
"""
|
|
173
|
+
errors = []
|
|
174
|
+
|
|
175
|
+
if mode_config.require_redis and not os.getenv("REDIS_CLUSTER"):
|
|
176
|
+
errors.append(f"{mode_config.mode} mode requires REDIS_CLUSTER environment variable")
|
|
177
|
+
|
|
178
|
+
if mode_config.require_gcs:
|
|
179
|
+
bucket = os.getenv("GCS_BUCKET_NAME") or os.getenv("BUCKET_NAME")
|
|
180
|
+
if not bucket:
|
|
181
|
+
errors.append(f"{mode_config.mode} mode requires GCS_BUCKET_NAME environment variable")
|
|
182
|
+
|
|
183
|
+
return errors
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
# Initialize on import
|
|
187
|
+
CURRENT_MODE = detect_mode()
|
|
188
|
+
CURRENT_CONFIG = get_mode_config(CURRENT_MODE)
|
|
189
|
+
|
|
190
|
+
print(f"Lorax mode: {CURRENT_MODE}")
|
lorax/pg.py
ADDED
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
|
|
2
|
+
import argparse
|
|
3
|
+
import sys
|
|
4
|
+
import math
|
|
5
|
+
|
|
6
|
+
try:
|
|
7
|
+
import tskit # noqa: F401
|
|
8
|
+
except Exception as e:
|
|
9
|
+
# We allow the script to be inspected without tskit, but bail at runtime.
|
|
10
|
+
pass
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def clamp(v, lo, hi):
|
|
14
|
+
return max(lo, min(hi, v))
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def write_track_line(out, name, description, visibility=2, use_score=True, item_rgb=True):
|
|
18
|
+
parts = [f'track name="{name}"']
|
|
19
|
+
if description:
|
|
20
|
+
parts.append(f'description="{description}"')
|
|
21
|
+
parts.append(f'visibility={int(visibility)}')
|
|
22
|
+
if use_score:
|
|
23
|
+
parts.append("useScore=1")
|
|
24
|
+
if item_rgb:
|
|
25
|
+
parts.append('itemRgb="On"')
|
|
26
|
+
out.write(" ".join(parts) + "\n")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def tree_stat(tree, stat):
|
|
30
|
+
"""
|
|
31
|
+
Return a per-tree statistic used for score/color.
|
|
32
|
+
Currently supported:
|
|
33
|
+
- root_time: time of the (single) root; if multiple roots, max root time
|
|
34
|
+
- total_branch_length: sum of branch lengths (in time units)
|
|
35
|
+
- n_roots: number of roots (integer)
|
|
36
|
+
"""
|
|
37
|
+
if stat == "root_time":
|
|
38
|
+
# If there's a single root, use that; if multiple, take the max time.
|
|
39
|
+
if hasattr(tree, "roots"):
|
|
40
|
+
roots = list(tree.roots)
|
|
41
|
+
else:
|
|
42
|
+
# older tskit: use tree.root
|
|
43
|
+
roots = [tree.root] if tree.root != tskit.NULL else []
|
|
44
|
+
if not roots:
|
|
45
|
+
return 0.0
|
|
46
|
+
return max(tree.time(r) for r in roots)
|
|
47
|
+
|
|
48
|
+
if stat == "total_branch_length":
|
|
49
|
+
# Sum over edges in this tree: sum_t (time(parent) - time(child))
|
|
50
|
+
total = 0.0
|
|
51
|
+
for u in tree.nodes():
|
|
52
|
+
p = tree.parent(u)
|
|
53
|
+
if p != tskit.NULL:
|
|
54
|
+
total += tree.time(p) - tree.time(u)
|
|
55
|
+
return max(0.0, total)
|
|
56
|
+
|
|
57
|
+
if stat == "n_roots":
|
|
58
|
+
return float(len(list(tree.roots)))
|
|
59
|
+
|
|
60
|
+
raise ValueError(f"Unsupported stat: {stat}")
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def main():
|
|
64
|
+
p = argparse.ArgumentParser(
|
|
65
|
+
description="Create a BED file from a tskit tree sequence."
|
|
66
|
+
)
|
|
67
|
+
p.add_argument("input", help="Input .trees (tskit tree sequence)")
|
|
68
|
+
p.add_argument("output", help="Output .bed path, or '-' for stdout")
|
|
69
|
+
p.add_argument("--chrom", default="chr1", help="Chromosome/contig name for BED (default: chr1)")
|
|
70
|
+
p.add_argument("--mode", choices=["trees", "sites"], default="trees",
|
|
71
|
+
help="Emit intervals per tree or per site (default: trees)")
|
|
72
|
+
p.add_argument("--stat", choices=["root_time", "total_branch_length", "n_roots"],
|
|
73
|
+
default="root_time", help="Per-tree stat for score/color (trees mode)")
|
|
74
|
+
p.add_argument("--ucsc-track", action="store_true",
|
|
75
|
+
help="Prepend a UCSC track line to the BED output")
|
|
76
|
+
p.add_argument("--track-name", default="tskit-BED",
|
|
77
|
+
help="Name to use in UCSC track line (if --ucsc-track)")
|
|
78
|
+
p.add_argument("--track-description", default="Generated by tskit_to_bed.py",
|
|
79
|
+
help="Description to use in UCSC track line")
|
|
80
|
+
p.add_argument("--no-bed9", action="store_true",
|
|
81
|
+
help="Write simple 3-column BED (chrom, start, end). Default is 9-column BED with score/color.")
|
|
82
|
+
p.add_argument("--precision", type=int, default=0,
|
|
83
|
+
help="Round interval coordinates to this many decimals before casting to int (default: 0)")
|
|
84
|
+
|
|
85
|
+
args = p.parse_args()
|
|
86
|
+
|
|
87
|
+
ts = tskit.load(args.input)
|
|
88
|
+
|
|
89
|
+
# Choose output
|
|
90
|
+
if args.output == "-":
|
|
91
|
+
out = sys.stdout
|
|
92
|
+
close_out = False
|
|
93
|
+
else:
|
|
94
|
+
out = open(args.output, "w", encoding="utf-8")
|
|
95
|
+
close_out = True
|
|
96
|
+
|
|
97
|
+
try:
|
|
98
|
+
if args.ucsc_track:
|
|
99
|
+
# continue
|
|
100
|
+
write_track_line(
|
|
101
|
+
out,
|
|
102
|
+
name=args.track_name,
|
|
103
|
+
description=args.track_description,
|
|
104
|
+
visibility=2,
|
|
105
|
+
use_score=not args.no_bed9,
|
|
106
|
+
item_rgb=not args.no_bed9,
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
chrom = args.chrom
|
|
110
|
+
|
|
111
|
+
if args.mode == "sites":
|
|
112
|
+
# Emit per-site 3-col or 6/9-col BED. We keep it simple: 3-col unless bed9 is desired.
|
|
113
|
+
for var in ts.variants():
|
|
114
|
+
# tskit positions are float; BED wants ints. Round then cast.
|
|
115
|
+
start = int(round(var.position, args.precision))
|
|
116
|
+
end = start + 1
|
|
117
|
+
if args.no_bed9:
|
|
118
|
+
out.write(f"{chrom}\t{start}\t{end}\n")
|
|
119
|
+
else:
|
|
120
|
+
name = getattr(var, "id", None)
|
|
121
|
+
if name is None:
|
|
122
|
+
# fallback to pos + first alt allele
|
|
123
|
+
alleles = [a for a in var.alleles if a is not None]
|
|
124
|
+
name = f"pos{start}:{'/'.join(alleles[:2])}" if alleles else f"pos{start}"
|
|
125
|
+
score = 0
|
|
126
|
+
strand = "."
|
|
127
|
+
thickStart = start
|
|
128
|
+
thickEnd = end
|
|
129
|
+
rgb = "0,0,0"
|
|
130
|
+
out.write(f"{chrom}\t{start}\t{end}\t{name}\t{score}\t{strand}\t{thickStart}\t{thickEnd}\t{rgb}\n")
|
|
131
|
+
return
|
|
132
|
+
|
|
133
|
+
# trees mode
|
|
134
|
+
# First pass: collect stats and intervals
|
|
135
|
+
intervals = []
|
|
136
|
+
stat_values = []
|
|
137
|
+
for tree in ts.trees(tracked_samples=None):
|
|
138
|
+
left = int(round(tree.interval.left, args.precision))
|
|
139
|
+
right = int(round(tree.interval.right, args.precision))
|
|
140
|
+
if right <= left:
|
|
141
|
+
continue
|
|
142
|
+
val = tree_stat(tree, args.stat)
|
|
143
|
+
intervals.append((left, right, tree.index, val))
|
|
144
|
+
stat_values.append(val)
|
|
145
|
+
|
|
146
|
+
if not intervals:
|
|
147
|
+
return
|
|
148
|
+
|
|
149
|
+
# Determine scaling for score/color
|
|
150
|
+
vmax = max(stat_values) if stat_values else 1.0
|
|
151
|
+
if vmax <= 0:
|
|
152
|
+
vmax = 1.0
|
|
153
|
+
|
|
154
|
+
for left, right, tidx, val in intervals:
|
|
155
|
+
if args.no_bed9:
|
|
156
|
+
out.write(f"{chrom}\t{left}\t{right}\n")
|
|
157
|
+
continue
|
|
158
|
+
|
|
159
|
+
# name, score, strand, thickStart, thickEnd, itemRgb
|
|
160
|
+
name = f"tree_{tidx}"
|
|
161
|
+
score = int(round(1000.0 * (val / vmax)))
|
|
162
|
+
score = clamp(score, 0, 1000)
|
|
163
|
+
strand = "."
|
|
164
|
+
thickStart = left
|
|
165
|
+
thickEnd = right
|
|
166
|
+
|
|
167
|
+
# Map val to grayscale 0..255
|
|
168
|
+
if vmax > 0:
|
|
169
|
+
g = int(round(255.0 * (val / vmax)))
|
|
170
|
+
else:
|
|
171
|
+
g = 0
|
|
172
|
+
g = clamp(g, 0, 255)
|
|
173
|
+
rgb = f"{g},{g},{g}"
|
|
174
|
+
|
|
175
|
+
out.write(f"{chrom}\t{left}\t{right}\t{name}\t{score}\t{strand}\t{thickStart}\t{thickEnd}\t{rgb}\n")
|
|
176
|
+
|
|
177
|
+
finally:
|
|
178
|
+
if close_out:
|
|
179
|
+
out.close()
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
if __name__ == "__main__":
|
|
183
|
+
main()
|
lorax/redis_utils.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Optional, Tuple
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def get_redis_config() -> Tuple[Optional[str], bool]:
|
|
6
|
+
"""
|
|
7
|
+
Returns (redis_url, is_cluster).
|
|
8
|
+
|
|
9
|
+
- REDIS_CLUSTER provides the cluster endpoint URL.
|
|
10
|
+
"""
|
|
11
|
+
url = os.getenv("REDIS_CLUSTER")
|
|
12
|
+
is_cluster = bool(url)
|
|
13
|
+
return url, is_cluster
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def create_redis_client(
|
|
17
|
+
redis_url: Optional[str],
|
|
18
|
+
*,
|
|
19
|
+
decode_responses: bool,
|
|
20
|
+
cluster: bool,
|
|
21
|
+
):
|
|
22
|
+
if not redis_url:
|
|
23
|
+
return None
|
|
24
|
+
|
|
25
|
+
if cluster:
|
|
26
|
+
from redis.asyncio.cluster import RedisCluster
|
|
27
|
+
return RedisCluster.from_url(redis_url, decode_responses=decode_responses)
|
|
28
|
+
|
|
29
|
+
import redis.asyncio as aioredis
|
|
30
|
+
return aioredis.from_url(redis_url, decode_responses=decode_responses)
|
lorax/routes.py
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Optional
|
|
5
|
+
import aiofiles
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter, Request, Response, UploadFile, File, Query
|
|
8
|
+
from fastapi.responses import JSONResponse
|
|
9
|
+
|
|
10
|
+
from lorax.context import session_manager, BUCKET_NAME
|
|
11
|
+
from lorax.modes import CURRENT_MODE
|
|
12
|
+
from lorax.constants import UPLOADS_DIR
|
|
13
|
+
from lorax.cloud.gcs_utils import upload_to_gcs
|
|
14
|
+
from lorax.handlers import (
|
|
15
|
+
handle_upload,
|
|
16
|
+
get_projects,
|
|
17
|
+
cache_status,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
router = APIRouter()
|
|
21
|
+
UPLOAD_DIR = Path(UPLOADS_DIR)
|
|
22
|
+
UPLOAD_DIR.mkdir(exist_ok=True)
|
|
23
|
+
|
|
24
|
+
@router.get("/health")
|
|
25
|
+
async def healthz():
|
|
26
|
+
redis_ok = await session_manager.health_check()
|
|
27
|
+
return {"ok": True, "redis": redis_ok}
|
|
28
|
+
|
|
29
|
+
@router.get("/fevicon.ico")
|
|
30
|
+
async def favicon():
|
|
31
|
+
return Response(content="", media_type="image/x-icon")
|
|
32
|
+
|
|
33
|
+
@router.get("/")
|
|
34
|
+
async def root():
|
|
35
|
+
return Response(content="Lorax Backend is running...", media_type="text/html")
|
|
36
|
+
|
|
37
|
+
@router.post("/init-session")
|
|
38
|
+
async def init_session(request: Request, response: Response):
|
|
39
|
+
sid, session = await session_manager.get_or_create_session(request, response)
|
|
40
|
+
print("init-session:", sid)
|
|
41
|
+
return {"sid": sid}
|
|
42
|
+
|
|
43
|
+
@router.get("/projects")
|
|
44
|
+
async def projects(request: Request, response: Response):
|
|
45
|
+
sid, session = await session_manager.get_or_create_session(request, response)
|
|
46
|
+
projects = await get_projects(UPLOAD_DIR, BUCKET_NAME, sid=sid)
|
|
47
|
+
return {"projects": projects}
|
|
48
|
+
|
|
49
|
+
@router.get("/memory_status")
|
|
50
|
+
async def memory_status():
|
|
51
|
+
print("cache-status")
|
|
52
|
+
return await cache_status()
|
|
53
|
+
|
|
54
|
+
@router.get("/{file}")
|
|
55
|
+
async def get_file(
|
|
56
|
+
request: Request,
|
|
57
|
+
response: Response,
|
|
58
|
+
file: Optional[str] = None,
|
|
59
|
+
project: Optional[str] = Query(None),
|
|
60
|
+
genomiccoordstart: Optional[int] = Query(None),
|
|
61
|
+
genomiccoordend: Optional[int] = Query(None),
|
|
62
|
+
share_sid: Optional[str] = Query(None),
|
|
63
|
+
):
|
|
64
|
+
sid, session = await session_manager.get_or_create_session(request, response)
|
|
65
|
+
if project == "Uploads" and share_sid and share_sid != sid:
|
|
66
|
+
print(f"⚠️ share_sid denied for sid={sid} target={share_sid}")
|
|
67
|
+
return JSONResponse(
|
|
68
|
+
status_code=403,
|
|
69
|
+
content={"error": "Access denied for shared upload."},
|
|
70
|
+
)
|
|
71
|
+
if file and file != "" and file != "ucgb":
|
|
72
|
+
if project == 'Uploads':
|
|
73
|
+
target_sid = share_sid if share_sid else sid
|
|
74
|
+
file_path = UPLOAD_DIR / project / target_sid / file
|
|
75
|
+
else:
|
|
76
|
+
file_path = UPLOAD_DIR / (project or "") / file
|
|
77
|
+
else:
|
|
78
|
+
file = "1kg_chr20.trees.tsz"
|
|
79
|
+
file_path = UPLOAD_DIR / (project or "1000Genomes") / file
|
|
80
|
+
try:
|
|
81
|
+
ctx = await handle_upload(str(file_path), str(UPLOAD_DIR))
|
|
82
|
+
viz_config = ctx.config
|
|
83
|
+
|
|
84
|
+
# Override initial_position if client provided genomic coordinates
|
|
85
|
+
if genomiccoordstart is not None and genomiccoordend is not None:
|
|
86
|
+
viz_config['initial_position'] = [genomiccoordstart, genomiccoordend]
|
|
87
|
+
|
|
88
|
+
# Update session with loaded file path
|
|
89
|
+
session.file_path = str(file_path)
|
|
90
|
+
await session_manager.save_session(session)
|
|
91
|
+
|
|
92
|
+
except Exception as e:
|
|
93
|
+
print(f"❌ Error loading file: {e}")
|
|
94
|
+
return {"error": str(e)}
|
|
95
|
+
|
|
96
|
+
return {
|
|
97
|
+
"sid": sid,
|
|
98
|
+
"file": file,
|
|
99
|
+
"project": project,
|
|
100
|
+
"config": viz_config,
|
|
101
|
+
"genomiccoordstart": genomiccoordstart,
|
|
102
|
+
"genomiccoordend": genomiccoordend,
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
@router.post("/upload")
|
|
106
|
+
async def upload(request: Request, response: Response, file: UploadFile = File(...)):
|
|
107
|
+
"""
|
|
108
|
+
Upload a file to the server. Stores locally for all modes; uploads to GCS
|
|
109
|
+
only when not running in local mode.
|
|
110
|
+
"""
|
|
111
|
+
sid, session = await session_manager.get_or_create_session(request, response)
|
|
112
|
+
|
|
113
|
+
# Local mode: flat under Uploads; Non-local: session-scoped folder
|
|
114
|
+
if CURRENT_MODE == "local":
|
|
115
|
+
user_dir = UPLOAD_DIR / "Uploads"
|
|
116
|
+
else:
|
|
117
|
+
user_dir = UPLOAD_DIR / "Uploads" / sid
|
|
118
|
+
user_dir.mkdir(parents=True, exist_ok=True)
|
|
119
|
+
|
|
120
|
+
file_path = user_dir / file.filename
|
|
121
|
+
|
|
122
|
+
try:
|
|
123
|
+
async with aiofiles.open(file_path, "wb") as f:
|
|
124
|
+
while chunk := await file.read(1024 * 1024):
|
|
125
|
+
await f.write(chunk)
|
|
126
|
+
|
|
127
|
+
# Upload to GCS asynchronously when allowed
|
|
128
|
+
if CURRENT_MODE != "local" and BUCKET_NAME:
|
|
129
|
+
await upload_to_gcs(BUCKET_NAME, file_path, sid)
|
|
130
|
+
|
|
131
|
+
return JSONResponse(
|
|
132
|
+
status_code=200,
|
|
133
|
+
content={"message": "File uploaded", "sid": sid, "owner_sid": sid, "filename": file.filename}
|
|
134
|
+
)
|
|
135
|
+
except Exception as e:
|
|
136
|
+
print("❌ Upload error:", e)
|
|
137
|
+
return JSONResponse(status_code=500, content={"error": "Upload error"})
|