orbitable 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- orbitable/__init__.py +3 -0
- orbitable/_version.py +24 -0
- orbitable/cli.py +216 -0
- orbitable/config.py +118 -0
- orbitable/generator.py +148 -0
- orbitable/ingest.py +211 -0
- orbitable/model.py +181 -0
- orbitable/reader.py +204 -0
- orbitable-0.2.0.dist-info/METADATA +218 -0
- orbitable-0.2.0.dist-info/RECORD +12 -0
- orbitable-0.2.0.dist-info/WHEEL +4 -0
- orbitable-0.2.0.dist-info/entry_points.txt +2 -0
orbitable/__init__.py
ADDED
orbitable/_version.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# file generated by vcs-versioning
|
|
2
|
+
# don't change, don't track in version control
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
__all__ = [
|
|
6
|
+
"__version__",
|
|
7
|
+
"__version_tuple__",
|
|
8
|
+
"version",
|
|
9
|
+
"version_tuple",
|
|
10
|
+
"__commit_id__",
|
|
11
|
+
"commit_id",
|
|
12
|
+
]
|
|
13
|
+
|
|
14
|
+
version: str
|
|
15
|
+
__version__: str
|
|
16
|
+
__version_tuple__: tuple[int | str, ...]
|
|
17
|
+
version_tuple: tuple[int | str, ...]
|
|
18
|
+
commit_id: str | None
|
|
19
|
+
__commit_id__: str | None
|
|
20
|
+
|
|
21
|
+
__version__ = version = '0.2.0'
|
|
22
|
+
__version_tuple__ = version_tuple = (0, 2, 0)
|
|
23
|
+
|
|
24
|
+
__commit_id__ = commit_id = None
|
orbitable/cli.py
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import sys
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from loguru import logger
|
|
6
|
+
from sqlalchemy.orm import sessionmaker
|
|
7
|
+
|
|
8
|
+
from orbitable.config import load_config
|
|
9
|
+
from orbitable.generator import generate
|
|
10
|
+
from orbitable.ingest import ingest_file, ingest_sources
|
|
11
|
+
from orbitable.model import Base
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _setup_logging(level: str) -> None:
|
|
15
|
+
logger.remove()
|
|
16
|
+
logger.add(sys.stderr, level=level.upper())
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _get_session(config):
|
|
20
|
+
engine = config.database.engine
|
|
21
|
+
Base.metadata.create_all(engine)
|
|
22
|
+
Session = sessionmaker(bind=engine)
|
|
23
|
+
return Session(), engine
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
CONFIG_TEMPLATE = """\
|
|
27
|
+
# Orbitable configuration
|
|
28
|
+
# See: https://github.com/jolsten/orbitable
|
|
29
|
+
|
|
30
|
+
# ----------------------------------------------
|
|
31
|
+
# Database connection
|
|
32
|
+
# ----------------------------------------------
|
|
33
|
+
[database]
|
|
34
|
+
# SQLAlchemy driver string. Examples:
|
|
35
|
+
# "sqlite" - local SQLite file (default)
|
|
36
|
+
# "mysql+pymysql" - MariaDB/MySQL via PyMySQL (install with: pip install orbitable[mysql])
|
|
37
|
+
drivername = "sqlite"
|
|
38
|
+
|
|
39
|
+
# For SQLite: path to the database file (relative or absolute)
|
|
40
|
+
# For MariaDB/MySQL: the database name
|
|
41
|
+
name = "orbitable.db"
|
|
42
|
+
|
|
43
|
+
# Uncomment for MariaDB/MySQL:
|
|
44
|
+
# host = "localhost"
|
|
45
|
+
# port = 3306
|
|
46
|
+
#
|
|
47
|
+
# Credentials are loaded separately (never put passwords here).
|
|
48
|
+
# Resolution order (highest priority first):
|
|
49
|
+
# 1. Environment variables: ORBITABLE_DATABASE__USERNAME / ORBITABLE_DATABASE__PASSWORD
|
|
50
|
+
# 2. User secrets file: ~/.config/orbitable.toml
|
|
51
|
+
# 3. System secrets file: set secrets_file below
|
|
52
|
+
#
|
|
53
|
+
# secrets_file = "/etc/orbitable/secrets.toml"
|
|
54
|
+
|
|
55
|
+
# ----------------------------------------------
|
|
56
|
+
# Ingest sources
|
|
57
|
+
# ----------------------------------------------
|
|
58
|
+
# Each [[ingest.sources]] entry defines a directory to scan for TLE/OMM files.
|
|
59
|
+
# You can have multiple entries. File format is auto-detected by extension:
|
|
60
|
+
# .tle / .txt / .3le -> Two-Line Element format
|
|
61
|
+
# .json -> Space-Track OMM JSON
|
|
62
|
+
# .csv -> OMM CSV
|
|
63
|
+
# .xml -> OMM XML
|
|
64
|
+
|
|
65
|
+
[[ingest.sources]]
|
|
66
|
+
path = "./incoming"
|
|
67
|
+
pattern = "*.tle" # glob pattern for matching files
|
|
68
|
+
|
|
69
|
+
# Add more sources as needed:
|
|
70
|
+
# [[ingest.sources]]
|
|
71
|
+
# path = "/data/spacetrack/daily"
|
|
72
|
+
# pattern = "*.json"
|
|
73
|
+
|
|
74
|
+
# ----------------------------------------------
|
|
75
|
+
# Output generation
|
|
76
|
+
# ----------------------------------------------
|
|
77
|
+
[output]
|
|
78
|
+
dir = "./output" # directory where generated files are written
|
|
79
|
+
|
|
80
|
+
# Which output formats to produce
|
|
81
|
+
[output.formats]
|
|
82
|
+
tle = true # two-line element format (.tle files)
|
|
83
|
+
omm = true # OMM CSV format (.omm files)
|
|
84
|
+
|
|
85
|
+
# Which output file types to generate
|
|
86
|
+
[output.types]
|
|
87
|
+
date_files = true # one file per date: YYYYMMDD.{tle,omm}
|
|
88
|
+
# contains the latest TLE per satellite for that date
|
|
89
|
+
object_files = true # one file per satellite: NORAD_ID.{tle,omm}
|
|
90
|
+
# contains all TLEs for that satellite, ordered by epoch
|
|
91
|
+
|
|
92
|
+
# ----------------------------------------------
|
|
93
|
+
# Logging
|
|
94
|
+
# ----------------------------------------------
|
|
95
|
+
[logging]
|
|
96
|
+
level = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL
|
|
97
|
+
"""
|
|
98
|
+
|
|
99
|
+
SECRETS_TEMPLATE = """\
|
|
100
|
+
# Orbitable user secrets
|
|
101
|
+
# This file stores database credentials for your user account.
|
|
102
|
+
# Keep this file private (chmod 600 on Linux/macOS).
|
|
103
|
+
#
|
|
104
|
+
# These values are overridden by environment variables:
|
|
105
|
+
# ORBITABLE_DATABASE__USERNAME
|
|
106
|
+
# ORBITABLE_DATABASE__PASSWORD
|
|
107
|
+
|
|
108
|
+
username = ""
|
|
109
|
+
password = ""
|
|
110
|
+
"""
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def cmd_init(args: argparse.Namespace) -> None:
|
|
114
|
+
config_path = args.config
|
|
115
|
+
secrets_path = Path.home() / ".config" / "orbitable.toml"
|
|
116
|
+
|
|
117
|
+
created = []
|
|
118
|
+
|
|
119
|
+
if config_path.exists():
|
|
120
|
+
print(f"Config already exists: {config_path}")
|
|
121
|
+
else:
|
|
122
|
+
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
123
|
+
config_path.write_text(CONFIG_TEMPLATE)
|
|
124
|
+
created.append(str(config_path))
|
|
125
|
+
|
|
126
|
+
if secrets_path.exists():
|
|
127
|
+
print(f"Secrets file already exists: {secrets_path}")
|
|
128
|
+
else:
|
|
129
|
+
secrets_path.parent.mkdir(parents=True, exist_ok=True)
|
|
130
|
+
secrets_path.write_text(SECRETS_TEMPLATE)
|
|
131
|
+
created.append(str(secrets_path))
|
|
132
|
+
|
|
133
|
+
if created:
|
|
134
|
+
print("Created:")
|
|
135
|
+
for path in created:
|
|
136
|
+
print(f" {path}")
|
|
137
|
+
print("\nEdit these files to configure your database and credentials.")
|
|
138
|
+
elif not created:
|
|
139
|
+
print("Nothing to do -all files already exist.")
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def cmd_ingest(args: argparse.Namespace) -> None:
|
|
143
|
+
config = load_config(args.config)
|
|
144
|
+
_setup_logging(config.logging.level)
|
|
145
|
+
|
|
146
|
+
session, engine = _get_session(config)
|
|
147
|
+
try:
|
|
148
|
+
if args.files:
|
|
149
|
+
total = 0
|
|
150
|
+
for file in args.files:
|
|
151
|
+
count = ingest_file(session, Path(file))
|
|
152
|
+
total += count
|
|
153
|
+
logger.info(f"Ingested {total} new records from {len(args.files)} files")
|
|
154
|
+
else:
|
|
155
|
+
total = ingest_sources(session, config.ingest.sources)
|
|
156
|
+
logger.info(f"Ingested {total} new records from configured sources")
|
|
157
|
+
finally:
|
|
158
|
+
session.close()
|
|
159
|
+
engine.dispose()
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def cmd_generate(args: argparse.Namespace) -> None:
|
|
163
|
+
config = load_config(args.config)
|
|
164
|
+
_setup_logging(config.logging.level)
|
|
165
|
+
|
|
166
|
+
session, engine = _get_session(config)
|
|
167
|
+
try:
|
|
168
|
+
generate(session, config.output)
|
|
169
|
+
finally:
|
|
170
|
+
session.close()
|
|
171
|
+
engine.dispose()
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def main() -> None:
|
|
175
|
+
parser = argparse.ArgumentParser(
|
|
176
|
+
prog="orbitable",
|
|
177
|
+
description="TLE database management tool",
|
|
178
|
+
)
|
|
179
|
+
parser.add_argument(
|
|
180
|
+
"-c",
|
|
181
|
+
"--config",
|
|
182
|
+
type=Path,
|
|
183
|
+
default=Path("config.toml"),
|
|
184
|
+
help="Path to config.toml (default: ./config.toml)",
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
subparsers = parser.add_subparsers(dest="command", required=True)
|
|
188
|
+
|
|
189
|
+
# init subcommand
|
|
190
|
+
init_parser = subparsers.add_parser(
|
|
191
|
+
"init",
|
|
192
|
+
help="Scaffold config.toml and ~/.config/orbitable.toml",
|
|
193
|
+
)
|
|
194
|
+
init_parser.set_defaults(func=cmd_init)
|
|
195
|
+
|
|
196
|
+
# ingest subcommand
|
|
197
|
+
ingest_parser = subparsers.add_parser(
|
|
198
|
+
"ingest",
|
|
199
|
+
help="Ingest TLE/OMM files into the database",
|
|
200
|
+
)
|
|
201
|
+
ingest_parser.add_argument(
|
|
202
|
+
"files",
|
|
203
|
+
nargs="*",
|
|
204
|
+
help="Specific files to ingest (if omitted, scans configured source dirs)",
|
|
205
|
+
)
|
|
206
|
+
ingest_parser.set_defaults(func=cmd_ingest)
|
|
207
|
+
|
|
208
|
+
# generate subcommand
|
|
209
|
+
generate_parser = subparsers.add_parser(
|
|
210
|
+
"generate",
|
|
211
|
+
help="Generate output TLE/OMM files from the database",
|
|
212
|
+
)
|
|
213
|
+
generate_parser.set_defaults(func=cmd_generate)
|
|
214
|
+
|
|
215
|
+
args = parser.parse_args()
|
|
216
|
+
args.func(args)
|
orbitable/config.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import tomllib
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
7
|
+
from sqlalchemy import Engine, create_engine
|
|
8
|
+
from sqlalchemy.engine import URL
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class Database(BaseModel):
|
|
12
|
+
drivername: str = "sqlite"
|
|
13
|
+
username: Optional[str] = None
|
|
14
|
+
password: Optional[str] = None
|
|
15
|
+
host: Optional[str] = None
|
|
16
|
+
port: Optional[int] = None
|
|
17
|
+
name: Optional[str] = ":memory:"
|
|
18
|
+
secrets_file: Optional[str] = None
|
|
19
|
+
|
|
20
|
+
@property
|
|
21
|
+
def url(self) -> URL:
|
|
22
|
+
return URL.create(
|
|
23
|
+
drivername=self.drivername,
|
|
24
|
+
username=self.username,
|
|
25
|
+
password=self.password,
|
|
26
|
+
host=self.host,
|
|
27
|
+
port=self.port,
|
|
28
|
+
database=self.name,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
@property
|
|
32
|
+
def engine(self) -> Engine:
|
|
33
|
+
return create_engine(self.url)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class IngestSource(BaseModel):
|
|
37
|
+
path: str
|
|
38
|
+
pattern: str = "*.tle"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class IngestConfig(BaseModel):
|
|
42
|
+
sources: list[IngestSource] = []
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class OutputFormats(BaseModel):
|
|
46
|
+
tle: bool = True
|
|
47
|
+
omm: bool = True
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class OutputTypes(BaseModel):
|
|
51
|
+
date_files: bool = True
|
|
52
|
+
object_files: bool = True
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class OutputConfig(BaseModel):
|
|
56
|
+
dir: str = "./output"
|
|
57
|
+
formats: OutputFormats = OutputFormats()
|
|
58
|
+
types: OutputTypes = OutputTypes()
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class LoggingConfig(BaseModel):
|
|
62
|
+
level: str = "INFO"
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class Settings(BaseSettings):
|
|
66
|
+
model_config = SettingsConfigDict(
|
|
67
|
+
env_prefix="ORBITABLE_",
|
|
68
|
+
env_nested_delimiter="__",
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
database: Database = Database()
|
|
72
|
+
ingest: IngestConfig = IngestConfig()
|
|
73
|
+
output: OutputConfig = OutputConfig()
|
|
74
|
+
logging: LoggingConfig = LoggingConfig()
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def load_config(path: Path | None = None) -> Settings:
|
|
78
|
+
"""Load settings from TOML file with layered credential resolution.
|
|
79
|
+
|
|
80
|
+
Resolution order (highest priority first):
|
|
81
|
+
1. Environment variables (ORBITABLE_DATABASE__USERNAME, etc.)
|
|
82
|
+
2. User secrets (~/.config/orbitable.toml)
|
|
83
|
+
3. System secrets file (database.secrets_file in config)
|
|
84
|
+
4. Values in config.toml
|
|
85
|
+
"""
|
|
86
|
+
toml_data: dict = {}
|
|
87
|
+
|
|
88
|
+
if path is not None and path.exists():
|
|
89
|
+
with open(path, "rb") as f:
|
|
90
|
+
toml_data = tomllib.load(f)
|
|
91
|
+
|
|
92
|
+
# Load system secrets file if specified
|
|
93
|
+
db_data = toml_data.get("database", {})
|
|
94
|
+
secrets_file = db_data.get("secrets_file")
|
|
95
|
+
if secrets_file:
|
|
96
|
+
secrets_path = Path(secrets_file)
|
|
97
|
+
if secrets_path.exists():
|
|
98
|
+
with open(secrets_path, "rb") as f:
|
|
99
|
+
secrets = tomllib.load(f)
|
|
100
|
+
# Merge secrets into database config (secrets_file has lower priority
|
|
101
|
+
# than values already in db_data, which have lower priority than env vars)
|
|
102
|
+
for key in ("username", "password"):
|
|
103
|
+
if key in secrets and key not in db_data:
|
|
104
|
+
db_data[key] = secrets[key]
|
|
105
|
+
toml_data["database"] = db_data
|
|
106
|
+
|
|
107
|
+
# Load user-local secrets (~/.config/orbitable.toml)
|
|
108
|
+
user_secrets_path = Path.home() / ".config" / "orbitable.toml"
|
|
109
|
+
if user_secrets_path.exists():
|
|
110
|
+
with open(user_secrets_path, "rb") as f:
|
|
111
|
+
user_secrets = tomllib.load(f)
|
|
112
|
+
for key in ("username", "password"):
|
|
113
|
+
if key in user_secrets and key not in db_data:
|
|
114
|
+
db_data[key] = user_secrets[key]
|
|
115
|
+
toml_data["database"] = db_data
|
|
116
|
+
|
|
117
|
+
# pydantic-settings handles env var overrides automatically
|
|
118
|
+
return Settings(**toml_data)
|
orbitable/generator.py
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import pathlib
|
|
2
|
+
|
|
3
|
+
from loguru import logger
|
|
4
|
+
from sgp4.api import Satrec
|
|
5
|
+
from sgp4.exporter import export_omm
|
|
6
|
+
from sqlalchemy import func, select
|
|
7
|
+
from sqlalchemy.orm import Session
|
|
8
|
+
|
|
9
|
+
from orbitable.config import OutputConfig
|
|
10
|
+
from orbitable.model import TLE
|
|
11
|
+
from orbitable.reader import TLETuple, write_omm_csv, write_tle
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _reconstruct_satrec(tle: TLE) -> Satrec:
|
|
15
|
+
"""Reconstruct a Satrec object from stored TLE lines."""
|
|
16
|
+
return Satrec.twoline2rv(tle.line1, tle.line2)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _safe_export_omm(sat: Satrec, name: str) -> dict | None:
|
|
20
|
+
"""Export OMM dict from Satrec, returning None if export fails."""
|
|
21
|
+
try:
|
|
22
|
+
return export_omm(sat, name)
|
|
23
|
+
except (ValueError, AttributeError):
|
|
24
|
+
return None
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _get_object_name(tle: TLE) -> str:
|
|
28
|
+
"""Get object name from OmmMetadata if available, else fall back to object_id."""
|
|
29
|
+
if tle.omm_metadata and tle.omm_metadata.object_name:
|
|
30
|
+
return tle.omm_metadata.object_name
|
|
31
|
+
return tle.object_id
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def generate_date_files(
|
|
35
|
+
session: Session, output_dir: pathlib.Path, config: OutputConfig
|
|
36
|
+
) -> None:
|
|
37
|
+
"""Generate one file per date with the latest TLE per object."""
|
|
38
|
+
# Get distinct dates (UTC date truncation of epoch)
|
|
39
|
+
date_query = select(func.date(TLE.epoch).label("d")).distinct()
|
|
40
|
+
dates = session.execute(date_query).scalars().all()
|
|
41
|
+
|
|
42
|
+
logger.info(f"Generating date files for {len(dates)} dates")
|
|
43
|
+
|
|
44
|
+
for date_str in dates:
|
|
45
|
+
if date_str is None:
|
|
46
|
+
continue
|
|
47
|
+
|
|
48
|
+
# Subquery: max epoch per norad_cat_id on this date
|
|
49
|
+
subq = (
|
|
50
|
+
select(
|
|
51
|
+
TLE.norad_cat_id,
|
|
52
|
+
func.max(TLE.epoch).label("max_epoch"),
|
|
53
|
+
)
|
|
54
|
+
.where(func.date(TLE.epoch) == date_str)
|
|
55
|
+
.group_by(TLE.norad_cat_id)
|
|
56
|
+
.subquery()
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
# Join back to get full TLE rows
|
|
60
|
+
stmt = (
|
|
61
|
+
select(TLE)
|
|
62
|
+
.join(
|
|
63
|
+
subq,
|
|
64
|
+
(TLE.norad_cat_id == subq.c.norad_cat_id)
|
|
65
|
+
& (TLE.epoch == subq.c.max_epoch),
|
|
66
|
+
)
|
|
67
|
+
.order_by(TLE.norad_cat_id)
|
|
68
|
+
)
|
|
69
|
+
tles = session.execute(stmt).scalars().all()
|
|
70
|
+
|
|
71
|
+
if not tles:
|
|
72
|
+
continue
|
|
73
|
+
|
|
74
|
+
filename_date = date_str.replace("-", "")
|
|
75
|
+
|
|
76
|
+
# TLE output
|
|
77
|
+
if config.formats.tle:
|
|
78
|
+
tle_path = output_dir / f"{filename_date}.tle"
|
|
79
|
+
tle_tuples: list[TLETuple] = [(t.line1, t.line2) for t in tles]
|
|
80
|
+
write_tle(tle_path, tle_tuples)
|
|
81
|
+
|
|
82
|
+
# OMM output
|
|
83
|
+
if config.formats.omm:
|
|
84
|
+
omm_path = output_dir / f"{filename_date}.omm"
|
|
85
|
+
omm_records = []
|
|
86
|
+
for tle in tles:
|
|
87
|
+
sat = _reconstruct_satrec(tle)
|
|
88
|
+
name = _get_object_name(tle)
|
|
89
|
+
record = _safe_export_omm(sat, name)
|
|
90
|
+
if record is not None:
|
|
91
|
+
omm_records.append(record)
|
|
92
|
+
write_omm_csv(omm_path, omm_records)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def generate_object_files(
|
|
96
|
+
session: Session, output_dir: pathlib.Path, config: OutputConfig
|
|
97
|
+
) -> None:
|
|
98
|
+
"""Generate one file per satellite with all its TLEs."""
|
|
99
|
+
# Get distinct NORAD IDs
|
|
100
|
+
norad_ids = (
|
|
101
|
+
session.execute(select(TLE.norad_cat_id).distinct().order_by(TLE.norad_cat_id))
|
|
102
|
+
.scalars()
|
|
103
|
+
.all()
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
logger.info(f"Generating object files for {len(norad_ids)} satellites")
|
|
107
|
+
|
|
108
|
+
for norad_id in norad_ids:
|
|
109
|
+
if norad_id is None:
|
|
110
|
+
continue
|
|
111
|
+
|
|
112
|
+
stmt = select(TLE).where(TLE.norad_cat_id == norad_id).order_by(TLE.epoch)
|
|
113
|
+
tles = session.execute(stmt).scalars().all()
|
|
114
|
+
|
|
115
|
+
if not tles:
|
|
116
|
+
continue
|
|
117
|
+
|
|
118
|
+
# TLE output
|
|
119
|
+
if config.formats.tle:
|
|
120
|
+
tle_path = output_dir / f"{norad_id}.tle"
|
|
121
|
+
tle_tuples: list[TLETuple] = [(t.line1, t.line2) for t in tles]
|
|
122
|
+
write_tle(tle_path, tle_tuples)
|
|
123
|
+
|
|
124
|
+
# OMM output
|
|
125
|
+
if config.formats.omm:
|
|
126
|
+
omm_path = output_dir / f"{norad_id}.omm"
|
|
127
|
+
omm_records = []
|
|
128
|
+
for tle in tles:
|
|
129
|
+
sat = _reconstruct_satrec(tle)
|
|
130
|
+
name = _get_object_name(tle)
|
|
131
|
+
record = _safe_export_omm(sat, name)
|
|
132
|
+
if record is not None:
|
|
133
|
+
omm_records.append(record)
|
|
134
|
+
write_omm_csv(omm_path, omm_records)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def generate(session: Session, config: OutputConfig) -> None:
|
|
138
|
+
"""Generate all configured output files."""
|
|
139
|
+
output_dir = pathlib.Path(config.dir)
|
|
140
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
141
|
+
|
|
142
|
+
if config.types.date_files:
|
|
143
|
+
generate_date_files(session, output_dir, config)
|
|
144
|
+
|
|
145
|
+
if config.types.object_files:
|
|
146
|
+
generate_object_files(session, output_dir, config)
|
|
147
|
+
|
|
148
|
+
logger.info(f"Output generation complete → {output_dir}")
|
orbitable/ingest.py
ADDED
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
import pathlib
|
|
2
|
+
from typing import Sequence
|
|
3
|
+
|
|
4
|
+
from loguru import logger
|
|
5
|
+
from sqlalchemy import select
|
|
6
|
+
from sqlalchemy.orm import Session
|
|
7
|
+
|
|
8
|
+
from orbitable.config import IngestSource
|
|
9
|
+
from orbitable.model import OmmMetadata, TLE, utcnow
|
|
10
|
+
from orbitable.reader import (
|
|
11
|
+
TLETuple,
|
|
12
|
+
detect_format,
|
|
13
|
+
read_omm_csv,
|
|
14
|
+
read_omm_json,
|
|
15
|
+
read_omm_xml,
|
|
16
|
+
read_tle,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
CHUNK_SIZE = 5000
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _bulk_insert_tles(session: Session, records: list[dict]) -> int:
|
|
23
|
+
"""Bulk insert TLE records with dialect-aware dedup.
|
|
24
|
+
|
|
25
|
+
Returns count of newly inserted rows.
|
|
26
|
+
"""
|
|
27
|
+
if not records:
|
|
28
|
+
return 0
|
|
29
|
+
|
|
30
|
+
dialect = session.bind.dialect.name # type: ignore[union-attr]
|
|
31
|
+
table = TLE.__table__
|
|
32
|
+
total_inserted = 0
|
|
33
|
+
|
|
34
|
+
for i in range(0, len(records), CHUNK_SIZE):
|
|
35
|
+
chunk = records[i : i + CHUNK_SIZE]
|
|
36
|
+
|
|
37
|
+
if dialect == "sqlite":
|
|
38
|
+
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
|
|
39
|
+
|
|
40
|
+
stmt = sqlite_insert(table).on_conflict_do_nothing( # type: ignore[arg-type]
|
|
41
|
+
index_elements=["line1", "line2"]
|
|
42
|
+
)
|
|
43
|
+
elif dialect in ("mysql", "mariadb"):
|
|
44
|
+
from sqlalchemy import insert
|
|
45
|
+
|
|
46
|
+
stmt = insert(table).prefix_with("IGNORE") # type: ignore[arg-type]
|
|
47
|
+
else:
|
|
48
|
+
from sqlalchemy import insert
|
|
49
|
+
|
|
50
|
+
stmt = insert(table).prefix_with("IGNORE") # type: ignore[arg-type]
|
|
51
|
+
|
|
52
|
+
result = session.execute(stmt, chunk)
|
|
53
|
+
total_inserted += result.rowcount
|
|
54
|
+
session.commit()
|
|
55
|
+
|
|
56
|
+
return total_inserted
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def ingest_tles(session: Session, tles: Sequence[TLETuple]) -> int:
|
|
60
|
+
"""Ingest TLE tuples into the database.
|
|
61
|
+
|
|
62
|
+
Skips malformed records and duplicates. Returns count of newly inserted rows.
|
|
63
|
+
"""
|
|
64
|
+
records = []
|
|
65
|
+
for line1, line2 in tles:
|
|
66
|
+
try:
|
|
67
|
+
tle = TLE.from_twoline(line1, line2)
|
|
68
|
+
now = utcnow()
|
|
69
|
+
record = _tle_to_record(tle, now)
|
|
70
|
+
records.append(record)
|
|
71
|
+
except Exception:
|
|
72
|
+
logger.warning(f"Skipping malformed TLE: {line1[:20]}...")
|
|
73
|
+
|
|
74
|
+
return _bulk_insert_tles(session, records)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _tle_to_record(tle: TLE, now=None) -> dict:
|
|
78
|
+
"""Convert a TLE ORM object to a dict for Core-level INSERT."""
|
|
79
|
+
if now is None:
|
|
80
|
+
now = utcnow()
|
|
81
|
+
record = {
|
|
82
|
+
col.name: getattr(tle, col.name)
|
|
83
|
+
for col in TLE.__table__.columns
|
|
84
|
+
if col.name != "id"
|
|
85
|
+
}
|
|
86
|
+
record["created"] = now
|
|
87
|
+
record["modified"] = now
|
|
88
|
+
return record
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def ingest_omm(session: Session, omm_records: list[dict]) -> int:
|
|
92
|
+
"""Ingest OMM records into the database.
|
|
93
|
+
|
|
94
|
+
Extracts TLE_LINE1/TLE_LINE2 from each record, inserts into TLE table,
|
|
95
|
+
then populates OmmMetadata for newly inserted rows.
|
|
96
|
+
Returns count of newly inserted TLE rows.
|
|
97
|
+
"""
|
|
98
|
+
# Build TLE records from OMM data
|
|
99
|
+
tle_records = []
|
|
100
|
+
omm_by_lines: dict[tuple[str, str], dict] = {}
|
|
101
|
+
|
|
102
|
+
for omm in omm_records:
|
|
103
|
+
line1 = omm.get("TLE_LINE1", "").strip()
|
|
104
|
+
line2 = omm.get("TLE_LINE2", "").strip()
|
|
105
|
+
if not line1 or not line2:
|
|
106
|
+
logger.warning(
|
|
107
|
+
f"OMM record missing TLE lines: {omm.get('OBJECT_NAME', 'unknown')}"
|
|
108
|
+
)
|
|
109
|
+
continue
|
|
110
|
+
|
|
111
|
+
try:
|
|
112
|
+
tle = TLE.from_twoline(line1, line2)
|
|
113
|
+
record = _tle_to_record(tle)
|
|
114
|
+
tle_records.append(record)
|
|
115
|
+
omm_by_lines[(line1, line2)] = omm
|
|
116
|
+
except Exception:
|
|
117
|
+
logger.warning(
|
|
118
|
+
f"Skipping malformed OMM record: {omm.get('OBJECT_NAME', 'unknown')}"
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
# Bulk insert TLE rows
|
|
122
|
+
inserted = _bulk_insert_tles(session, tle_records)
|
|
123
|
+
|
|
124
|
+
if not omm_by_lines:
|
|
125
|
+
return inserted
|
|
126
|
+
|
|
127
|
+
# FK resolution: SELECT back by (line1, line2) to get IDs
|
|
128
|
+
line_pairs = list(omm_by_lines.keys())
|
|
129
|
+
for i in range(0, len(line_pairs), CHUNK_SIZE):
|
|
130
|
+
chunk = line_pairs[i : i + CHUNK_SIZE]
|
|
131
|
+
# Query TLE rows that match our line pairs and don't have metadata yet
|
|
132
|
+
stmt = (
|
|
133
|
+
select(TLE.id, TLE.line1, TLE.line2)
|
|
134
|
+
.where(
|
|
135
|
+
TLE.line1.in_([lp[0] for lp in chunk]),
|
|
136
|
+
TLE.line2.in_([lp[1] for lp in chunk]),
|
|
137
|
+
)
|
|
138
|
+
.outerjoin(OmmMetadata, TLE.id == OmmMetadata.tle_id)
|
|
139
|
+
.where(OmmMetadata.id.is_(None))
|
|
140
|
+
)
|
|
141
|
+
rows = session.execute(stmt).all()
|
|
142
|
+
|
|
143
|
+
metadata_records = []
|
|
144
|
+
for tle_id, line1, line2 in rows:
|
|
145
|
+
omm = omm_by_lines.get((line1, line2))
|
|
146
|
+
if omm is None:
|
|
147
|
+
continue
|
|
148
|
+
metadata_records.append(
|
|
149
|
+
OmmMetadata(
|
|
150
|
+
tle_id=tle_id,
|
|
151
|
+
object_name=omm.get("OBJECT_NAME"),
|
|
152
|
+
object_type=omm.get("OBJECT_TYPE"),
|
|
153
|
+
country_code=omm.get("COUNTRY_CODE"),
|
|
154
|
+
rcs_size=omm.get("RCS_SIZE"),
|
|
155
|
+
launch_date=omm.get("LAUNCH_DATE"),
|
|
156
|
+
site=omm.get("SITE"),
|
|
157
|
+
decay_date=omm.get("DECAY_DATE"),
|
|
158
|
+
originator=omm.get("ORIGINATOR"),
|
|
159
|
+
gp_id=int(omm["GP_ID"]) if omm.get("GP_ID") else None,
|
|
160
|
+
)
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
if metadata_records:
|
|
164
|
+
session.add_all(metadata_records)
|
|
165
|
+
session.commit()
|
|
166
|
+
|
|
167
|
+
return inserted
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def ingest_file(session: Session, path: pathlib.Path) -> int:
|
|
171
|
+
"""Detect format and ingest a single file. Returns count of newly inserted rows."""
|
|
172
|
+
fmt = detect_format(path)
|
|
173
|
+
logger.info(f"Ingesting {path} (format: {fmt})")
|
|
174
|
+
|
|
175
|
+
if fmt == "tle":
|
|
176
|
+
tles = read_tle(path)
|
|
177
|
+
return ingest_tles(session, tles)
|
|
178
|
+
elif fmt == "omm_json":
|
|
179
|
+
records = read_omm_json(path)
|
|
180
|
+
return ingest_omm(session, records)
|
|
181
|
+
elif fmt == "omm_csv":
|
|
182
|
+
records = read_omm_csv(path)
|
|
183
|
+
return ingest_omm(session, records)
|
|
184
|
+
elif fmt == "omm_xml":
|
|
185
|
+
records = read_omm_xml(path)
|
|
186
|
+
return ingest_omm(session, records)
|
|
187
|
+
else:
|
|
188
|
+
logger.warning(f"Unknown format for {path}, skipping")
|
|
189
|
+
return 0
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def ingest_sources(session: Session, sources: list[IngestSource]) -> int:
|
|
193
|
+
"""Scan configured source directories and ingest all matching files."""
|
|
194
|
+
total = 0
|
|
195
|
+
for source in sources:
|
|
196
|
+
source_path = pathlib.Path(source.path)
|
|
197
|
+
if not source_path.is_dir():
|
|
198
|
+
logger.warning(f"Source directory does not exist: {source_path}")
|
|
199
|
+
continue
|
|
200
|
+
|
|
201
|
+
files = sorted(source_path.glob(source.pattern))
|
|
202
|
+
logger.info(
|
|
203
|
+
f"Found {len(files)} files in {source_path} matching {source.pattern}"
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
for file in files:
|
|
207
|
+
count = ingest_file(session, file)
|
|
208
|
+
logger.info(f" {file.name}: {count} new records")
|
|
209
|
+
total += count
|
|
210
|
+
|
|
211
|
+
return total
|
orbitable/model.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from math import degrees, pi, sqrt
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from sgp4.alpha5 import to_alpha5
|
|
6
|
+
from sgp4.api import Satrec
|
|
7
|
+
from sgp4.conveniences import sat_epoch_datetime
|
|
8
|
+
from sqlalchemy import DateTime, ForeignKey, String, UniqueConstraint
|
|
9
|
+
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def object_id(sat: Satrec) -> str:
|
|
13
|
+
val = sat.intldesg
|
|
14
|
+
if len(val) < 2:
|
|
15
|
+
return val
|
|
16
|
+
else:
|
|
17
|
+
return ("20" if int(val[0:2]) < 57 else "19") + val
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def period(sat: Satrec) -> float:
|
|
21
|
+
"""Calculate orbital period [min]"""
|
|
22
|
+
a_km = sat.a * sat.radiusearthkm
|
|
23
|
+
return 2 * pi * sqrt(a_km**3 / sat.mu)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def mean_motion(sat: Satrec) -> float:
|
|
27
|
+
"""Mean Motion [revs/day]"""
|
|
28
|
+
return sat.no_kozai * 60 * 24 / (2 * pi)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class Base(DeclarativeBase):
|
|
32
|
+
pass
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def utcnow():
|
|
36
|
+
return datetime.datetime.now(datetime.timezone.utc)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class Mixin:
|
|
40
|
+
id: Mapped[int] = mapped_column(
|
|
41
|
+
primary_key=True,
|
|
42
|
+
autoincrement=True,
|
|
43
|
+
index=True,
|
|
44
|
+
unique=True,
|
|
45
|
+
)
|
|
46
|
+
created: Mapped[datetime.datetime] = mapped_column(
|
|
47
|
+
default=utcnow,
|
|
48
|
+
index=True,
|
|
49
|
+
)
|
|
50
|
+
modified: Mapped[datetime.datetime] = mapped_column(
|
|
51
|
+
default=utcnow,
|
|
52
|
+
onupdate=utcnow,
|
|
53
|
+
index=True,
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class TLE(Base, Mixin):
|
|
58
|
+
__tablename__ = "tle"
|
|
59
|
+
|
|
60
|
+
__table_args__ = (UniqueConstraint("line1", "line2"),)
|
|
61
|
+
|
|
62
|
+
line1: Mapped[str] = mapped_column(String(70), index=True)
|
|
63
|
+
line2: Mapped[str] = mapped_column(String(70), index=True)
|
|
64
|
+
|
|
65
|
+
object_id: Mapped[str] = mapped_column(String(15))
|
|
66
|
+
"""International Designator"""
|
|
67
|
+
norad_cat_id: Mapped[Optional[int]] = mapped_column(index=True)
|
|
68
|
+
"""NORAD Catalog Number ("Satellite Number")"""
|
|
69
|
+
classification: Mapped[str] = mapped_column(String(1))
|
|
70
|
+
"""Classification Type"""
|
|
71
|
+
|
|
72
|
+
# Element Set
|
|
73
|
+
epoch: Mapped[datetime.datetime] = mapped_column(DateTime, index=True)
|
|
74
|
+
"""Epoch of Mean Keplerian elements (mandatory)"""
|
|
75
|
+
mean_motion: Mapped[Optional[float]] = mapped_column(index=True)
|
|
76
|
+
"""Keplerian Mean Motion [rev/day]"""
|
|
77
|
+
eccentricity: Mapped[float] = mapped_column(index=True)
|
|
78
|
+
"""Eccentricity"""
|
|
79
|
+
inclination: Mapped[float] = mapped_column(index=True)
|
|
80
|
+
"""Inclination [deg]"""
|
|
81
|
+
ra_of_asc_node: Mapped[float] = mapped_column(index=True)
|
|
82
|
+
"""Right Ascension of Ascending Node [deg]"""
|
|
83
|
+
arg_of_pericenter: Mapped[float] = mapped_column(index=True)
|
|
84
|
+
"""Argument of Pericenter [deg]"""
|
|
85
|
+
mean_anomaly: Mapped[float] = mapped_column(index=True)
|
|
86
|
+
"""Mean Anomaly [deg]"""
|
|
87
|
+
|
|
88
|
+
# Other TLE Details
|
|
89
|
+
ephemeris_type: Mapped[int]
|
|
90
|
+
"""Element Set Type"""
|
|
91
|
+
element_set_no: Mapped[int]
|
|
92
|
+
"""Element Set Number (for this satellite)
|
|
93
|
+
Normally incremented sequentially, but my be out of sync.
|
|
94
|
+
"""
|
|
95
|
+
rev_at_epoch: Mapped[Optional[int]]
|
|
96
|
+
"""Revolution Number"""
|
|
97
|
+
bstar: Mapped[Optional[float]]
|
|
98
|
+
"""Drag-like ballistic coefficient"""
|
|
99
|
+
mean_motion_dot: Mapped[Optional[float]]
|
|
100
|
+
"""First Time Derivative of Mean Motion (i.e., a drag term)"""
|
|
101
|
+
mean_motion_ddot: Mapped[Optional[float]]
|
|
102
|
+
"""Second Time Derivative of Mean Motion (i.e., a drag term)"""
|
|
103
|
+
|
|
104
|
+
# Derived Values
|
|
105
|
+
semimajor_axis: Mapped[Optional[float]]
|
|
106
|
+
"""Semi-major Axis [km]"""
|
|
107
|
+
period: Mapped[Optional[float]]
|
|
108
|
+
"""Orbital Period [min]"""
|
|
109
|
+
apoapsis_alt: Mapped[Optional[float]]
|
|
110
|
+
"""Apoapsis Altitude [km]"""
|
|
111
|
+
periapsis_alt: Mapped[Optional[float]]
|
|
112
|
+
"""Periapsis Altitude [km]"""
|
|
113
|
+
|
|
114
|
+
omm_metadata: Mapped[Optional["OmmMetadata"]] = relationship(
|
|
115
|
+
back_populates="tle", uselist=False, lazy="select"
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
def __repr__(self) -> str:
|
|
119
|
+
n = to_alpha5(self.norad_cat_id)
|
|
120
|
+
t = self.epoch.isoformat(sep="T", timespec="seconds")
|
|
121
|
+
e = self.element_set_no
|
|
122
|
+
return f"TLE(norad_cat_id={n!r}, epoch={t!r}, element_set_no={e})"
|
|
123
|
+
|
|
124
|
+
@classmethod
|
|
125
|
+
def from_twoline(cls, *lines: str) -> "TLE":
|
|
126
|
+
if len(lines) == 2:
|
|
127
|
+
line1, line2 = lines
|
|
128
|
+
elif len(lines) == 3:
|
|
129
|
+
_, line1, line2 = lines
|
|
130
|
+
else:
|
|
131
|
+
msg = "Must provide an iterable of length 2 or 3"
|
|
132
|
+
raise ValueError(msg)
|
|
133
|
+
|
|
134
|
+
sat = Satrec.twoline2rv(line1, line2)
|
|
135
|
+
fields = _fields_from_satrec(sat)
|
|
136
|
+
|
|
137
|
+
return cls(line1=line1, line2=line2, **fields)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def _fields_from_satrec(sat: Satrec) -> dict:
|
|
141
|
+
"""Extract orbital element fields from a populated Satrec object."""
|
|
142
|
+
epoch = sat_epoch_datetime(sat)
|
|
143
|
+
return dict(
|
|
144
|
+
object_id=object_id(sat),
|
|
145
|
+
norad_cat_id=sat.satnum,
|
|
146
|
+
classification=sat.classification,
|
|
147
|
+
epoch=epoch,
|
|
148
|
+
mean_motion=mean_motion(sat),
|
|
149
|
+
eccentricity=sat.ecco,
|
|
150
|
+
inclination=degrees(sat.inclo),
|
|
151
|
+
ra_of_asc_node=degrees(sat.nodeo),
|
|
152
|
+
arg_of_pericenter=degrees(sat.argpo),
|
|
153
|
+
mean_anomaly=degrees(sat.mo),
|
|
154
|
+
ephemeris_type=sat.ephtype,
|
|
155
|
+
element_set_no=sat.elnum,
|
|
156
|
+
rev_at_epoch=sat.revnum,
|
|
157
|
+
bstar=sat.bstar,
|
|
158
|
+
mean_motion_dot=sat.ndot,
|
|
159
|
+
mean_motion_ddot=sat.nddot,
|
|
160
|
+
semimajor_axis=sat.a * sat.radiusearthkm,
|
|
161
|
+
period=period(sat),
|
|
162
|
+
apoapsis_alt=sat.alta * sat.radiusearthkm,
|
|
163
|
+
periapsis_alt=sat.altp * sat.radiusearthkm,
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
class OmmMetadata(Base, Mixin):
|
|
168
|
+
__tablename__ = "omm_metadata"
|
|
169
|
+
|
|
170
|
+
tle_id: Mapped[int] = mapped_column(ForeignKey("tle.id"), unique=True, index=True)
|
|
171
|
+
tle: Mapped["TLE"] = relationship(back_populates="omm_metadata")
|
|
172
|
+
|
|
173
|
+
object_name: Mapped[Optional[str]] = mapped_column(String(100))
|
|
174
|
+
object_type: Mapped[Optional[str]] = mapped_column(String(20))
|
|
175
|
+
country_code: Mapped[Optional[str]] = mapped_column(String(10))
|
|
176
|
+
rcs_size: Mapped[Optional[str]] = mapped_column(String(10))
|
|
177
|
+
launch_date: Mapped[Optional[str]] = mapped_column(String(10))
|
|
178
|
+
site: Mapped[Optional[str]] = mapped_column(String(20))
|
|
179
|
+
decay_date: Mapped[Optional[str]] = mapped_column(String(10))
|
|
180
|
+
originator: Mapped[Optional[str]] = mapped_column(String(20))
|
|
181
|
+
gp_id: Mapped[Optional[int]] = mapped_column()
|
orbitable/reader.py
ADDED
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
import concurrent.futures
|
|
2
|
+
import csv
|
|
3
|
+
import datetime
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import pathlib
|
|
7
|
+
from typing import Callable, Iterable, TypeVar, Union
|
|
8
|
+
|
|
9
|
+
from sgp4 import omm as sgp4_omm
|
|
10
|
+
|
|
11
|
+
PathLike = Union[str, os.PathLike, pathlib.Path]
|
|
12
|
+
TLETuple = tuple[str, str]
|
|
13
|
+
|
|
14
|
+
T = TypeVar("T")
|
|
15
|
+
|
|
16
|
+
GroupByKey = TypeVar("GroupByKey")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def group_by(
|
|
20
|
+
tles: list[TLETuple], key: Callable[[TLETuple], GroupByKey]
|
|
21
|
+
) -> dict[GroupByKey, list[TLETuple]]:
|
|
22
|
+
"""Groups input TLEs by values from a callable key."""
|
|
23
|
+
results: dict[GroupByKey, list[TLETuple]] = {}
|
|
24
|
+
for tle in tles:
|
|
25
|
+
group = key(tle)
|
|
26
|
+
if group not in results:
|
|
27
|
+
results[group] = []
|
|
28
|
+
results[group].append(tle)
|
|
29
|
+
return results
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def unique(tles: list[T]) -> list[T]:
|
|
33
|
+
"""Returns input as a list list ensuring unique entries."""
|
|
34
|
+
return list(dict.fromkeys(tles).keys())
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def tle_epoch(tle: TLETuple) -> float:
|
|
38
|
+
"""Get the epoch (float) from a TLE, adjusted for Y2K."""
|
|
39
|
+
epoch = float(tle[0][18:32].replace(" ", "0"))
|
|
40
|
+
epoch += 1900_000 if epoch // 1000 >= 57 else 2000_000
|
|
41
|
+
return epoch
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def tle_date(tle: TLETuple) -> str:
|
|
45
|
+
"""Get the date (as str) from a TLE."""
|
|
46
|
+
epoch = tle_epoch(tle)
|
|
47
|
+
year, doy = divmod(epoch, 1000)
|
|
48
|
+
doy = doy // 1
|
|
49
|
+
dt = datetime.datetime(int(year), 1, 1) + datetime.timedelta(days=int(doy - 1))
|
|
50
|
+
return dt.strftime("%Y%m%d")
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def tle_satnum(tle: TLETuple) -> str:
|
|
54
|
+
"""Extract the (Alpha-5) Satnum from a TLE."""
|
|
55
|
+
return tle[0][2:7].replace(" ", "0")
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def read_tle(
|
|
59
|
+
file: PathLike,
|
|
60
|
+
) -> list[TLETuple]:
|
|
61
|
+
"""Read a single TLE file."""
|
|
62
|
+
results: list[TLETuple] = []
|
|
63
|
+
with open(file, "r") as f:
|
|
64
|
+
line1: str | None = None
|
|
65
|
+
for raw in f:
|
|
66
|
+
line = raw.rstrip()
|
|
67
|
+
if not line:
|
|
68
|
+
continue
|
|
69
|
+
if line[0] == "1":
|
|
70
|
+
line1 = line
|
|
71
|
+
elif line[0] == "2" and line1 is not None:
|
|
72
|
+
results.append((line1, line))
|
|
73
|
+
line1 = None
|
|
74
|
+
return results
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def read_tles(files: Iterable[PathLike]) -> list[TLETuple]:
|
|
78
|
+
"""Read multiple TLE files."""
|
|
79
|
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
|
80
|
+
futures = [executor.submit(read_tle, file) for file in files]
|
|
81
|
+
tles = []
|
|
82
|
+
for future in futures:
|
|
83
|
+
results = future.result()
|
|
84
|
+
tles.extend(results)
|
|
85
|
+
return tles
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def write_tle(
|
|
89
|
+
file_path: PathLike,
|
|
90
|
+
tles: Iterable[TLETuple],
|
|
91
|
+
*,
|
|
92
|
+
sort: bool = False,
|
|
93
|
+
deduplicate: bool = False,
|
|
94
|
+
) -> None:
|
|
95
|
+
if deduplicate:
|
|
96
|
+
tles = unique(list(tles))
|
|
97
|
+
|
|
98
|
+
if sort:
|
|
99
|
+
tles = sorted(tles, key=tle_epoch)
|
|
100
|
+
tles = sorted(tles, key=tle_satnum)
|
|
101
|
+
|
|
102
|
+
with open(file_path, "w") as f:
|
|
103
|
+
for line1, line2 in tles:
|
|
104
|
+
print(line1, file=f)
|
|
105
|
+
print(line2, file=f)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def write_tles(
|
|
109
|
+
files: dict[pathlib.Path, Iterable[TLETuple]],
|
|
110
|
+
*,
|
|
111
|
+
deduplicate: bool = True,
|
|
112
|
+
sort: bool = False,
|
|
113
|
+
) -> None:
|
|
114
|
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
|
115
|
+
futures: dict[concurrent.futures.Future, pathlib.Path] = {
|
|
116
|
+
executor.submit(
|
|
117
|
+
write_tle, file, tles, deduplicate=deduplicate, sort=sort
|
|
118
|
+
): file
|
|
119
|
+
for file, tles in files.items()
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
for future in futures:
|
|
123
|
+
future.result()
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
# --- OMM Format Support ---
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def read_omm_json(file: PathLike) -> list[dict]:
|
|
130
|
+
"""Read Space-Track JSON format OMM data.
|
|
131
|
+
|
|
132
|
+
Handles both single-object (bare dict) and array forms.
|
|
133
|
+
"""
|
|
134
|
+
with open(file, "r") as f:
|
|
135
|
+
data = json.load(f)
|
|
136
|
+
if isinstance(data, dict):
|
|
137
|
+
return [data]
|
|
138
|
+
return data
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def read_omm_csv(file: PathLike) -> list[dict]:
|
|
142
|
+
"""Read OMM CSV format using sgp4's parser."""
|
|
143
|
+
with open(file, "r") as f:
|
|
144
|
+
return list(sgp4_omm.parse_csv(f))
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def read_omm_xml(file: PathLike) -> list[dict]:
|
|
148
|
+
"""Read OMM XML format using sgp4's parser."""
|
|
149
|
+
with open(file, "rb") as f:
|
|
150
|
+
return list(sgp4_omm.parse_xml(f))
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
# Standard OMM CSV field order (matches Space-Track output)
|
|
154
|
+
OMM_CSV_FIELDS = [
|
|
155
|
+
"OBJECT_NAME",
|
|
156
|
+
"OBJECT_ID",
|
|
157
|
+
"EPOCH",
|
|
158
|
+
"MEAN_MOTION",
|
|
159
|
+
"ECCENTRICITY",
|
|
160
|
+
"INCLINATION",
|
|
161
|
+
"RA_OF_ASC_NODE",
|
|
162
|
+
"ARG_OF_PERICENTER",
|
|
163
|
+
"MEAN_ANOMALY",
|
|
164
|
+
"EPHEMERIS_TYPE",
|
|
165
|
+
"CLASSIFICATION_TYPE",
|
|
166
|
+
"NORAD_CAT_ID",
|
|
167
|
+
"ELEMENT_SET_NO",
|
|
168
|
+
"REV_AT_EPOCH",
|
|
169
|
+
"BSTAR",
|
|
170
|
+
"MEAN_MOTION_DOT",
|
|
171
|
+
"MEAN_MOTION_DDOT",
|
|
172
|
+
]
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def write_omm_csv(
|
|
176
|
+
file_path: PathLike,
|
|
177
|
+
records: list[dict],
|
|
178
|
+
) -> None:
|
|
179
|
+
"""Write OMM records as CSV."""
|
|
180
|
+
if not records:
|
|
181
|
+
return
|
|
182
|
+
fieldnames = OMM_CSV_FIELDS
|
|
183
|
+
with open(file_path, "w", newline="") as f:
|
|
184
|
+
writer = csv.DictWriter(f, fieldnames=fieldnames, extrasaction="ignore")
|
|
185
|
+
writer.writeheader()
|
|
186
|
+
writer.writerows(records)
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def detect_format(file: PathLike) -> str:
|
|
190
|
+
"""Detect file format by extension.
|
|
191
|
+
|
|
192
|
+
Returns one of: "tle", "omm_json", "omm_csv", "omm_xml"
|
|
193
|
+
"""
|
|
194
|
+
ext = pathlib.Path(file).suffix.lower()
|
|
195
|
+
if ext in (".tle", ".txt", ".3le"):
|
|
196
|
+
return "tle"
|
|
197
|
+
elif ext == ".json":
|
|
198
|
+
return "omm_json"
|
|
199
|
+
elif ext == ".csv":
|
|
200
|
+
return "omm_csv"
|
|
201
|
+
elif ext == ".xml":
|
|
202
|
+
return "omm_xml"
|
|
203
|
+
else:
|
|
204
|
+
return "tle" # default to TLE format
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: orbitable
|
|
3
|
+
Version: 0.2.0
|
|
4
|
+
Summary: Orbital element database management tool
|
|
5
|
+
Author-email: Jonathan Olsten <jolsten@gmail.com>
|
|
6
|
+
Requires-Python: >=3.11
|
|
7
|
+
Requires-Dist: loguru>=0.7.3
|
|
8
|
+
Requires-Dist: pydantic-settings>=2.9.1
|
|
9
|
+
Requires-Dist: sgp4>=2.24
|
|
10
|
+
Requires-Dist: sqlalchemy>=2.0.40
|
|
11
|
+
Provides-Extra: mysql
|
|
12
|
+
Requires-Dist: pymysql>=1.1; extra == 'mysql'
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
|
|
15
|
+
# Orbitable
|
|
16
|
+
|
|
17
|
+
Orbital element database manager. Ingests TLE (Two-Line Element) and OMM (Orbit Mean-Elements Message) files into a database and generates organized output files by date and satellite.
|
|
18
|
+
|
|
19
|
+
## Installation
|
|
20
|
+
|
|
21
|
+
```bash
|
|
22
|
+
pip install orbitable
|
|
23
|
+
|
|
24
|
+
# For MariaDB/MySQL support:
|
|
25
|
+
pip install orbitable[mysql]
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
Or with [uv](https://docs.astral.sh/uv/):
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
uv add orbitable
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
## Quick Start
|
|
35
|
+
|
|
36
|
+
### 1. Scaffold configuration
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
orbitable init
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
This creates two files:
|
|
43
|
+
|
|
44
|
+
- `./config.toml` -- main configuration (database, ingest sources, output settings)
|
|
45
|
+
- `~/.config/orbitable.toml` -- user-local database credentials
|
|
46
|
+
|
|
47
|
+
Use `-c` to specify a different config path:
|
|
48
|
+
|
|
49
|
+
```bash
|
|
50
|
+
orbitable -c /etc/orbitable/config.toml init
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
### 2. Configure
|
|
54
|
+
|
|
55
|
+
Edit `config.toml` to set your database and ingest sources. The generated file is fully commented -- see below for a summary.
|
|
56
|
+
|
|
57
|
+
**SQLite (default):**
|
|
58
|
+
|
|
59
|
+
```toml
|
|
60
|
+
[database]
|
|
61
|
+
drivername = "sqlite"
|
|
62
|
+
name = "orbitable.db"
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
**MariaDB/MySQL:**
|
|
66
|
+
|
|
67
|
+
```toml
|
|
68
|
+
[database]
|
|
69
|
+
drivername = "mysql+pymysql"
|
|
70
|
+
host = "localhost"
|
|
71
|
+
port = 3306
|
|
72
|
+
name = "orbitable"
|
|
73
|
+
secrets_file = "/etc/orbitable/secrets.toml"
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
Then add your credentials to `~/.config/orbitable.toml`:
|
|
77
|
+
|
|
78
|
+
```toml
|
|
79
|
+
username = "myuser"
|
|
80
|
+
password = "mypassword"
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
### 3. Ingest TLE/OMM files
|
|
84
|
+
|
|
85
|
+
Scan configured source directories:
|
|
86
|
+
|
|
87
|
+
```bash
|
|
88
|
+
orbitable ingest
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
Or ingest specific files:
|
|
92
|
+
|
|
93
|
+
```bash
|
|
94
|
+
orbitable ingest /path/to/20260327.tle /path/to/20260327.json
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
File format is auto-detected by extension:
|
|
98
|
+
|
|
99
|
+
| Extension | Format |
|
|
100
|
+
| ------------------ | ------------------- |
|
|
101
|
+
| `.tle`, `.txt`, `.3le` | Two-Line Element |
|
|
102
|
+
| `.json` | Space-Track OMM JSON |
|
|
103
|
+
| `.csv` | OMM CSV |
|
|
104
|
+
| `.xml` | OMM XML |
|
|
105
|
+
|
|
106
|
+
Ingestion is idempotent -- duplicate records are silently skipped.
|
|
107
|
+
|
|
108
|
+
### 4. Generate output files
|
|
109
|
+
|
|
110
|
+
```bash
|
|
111
|
+
orbitable generate
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
This produces files in the configured output directory:
|
|
115
|
+
|
|
116
|
+
- **Date files** (`YYYYMMDD.tle` / `YYYYMMDD.omm`) -- one TLE per satellite for each date (latest epoch that day)
|
|
117
|
+
- **Object files** (`25544.tle` / `25544.omm`) -- all TLEs for a single satellite, ordered by epoch
|
|
118
|
+
|
|
119
|
+
## Automating with Cron
|
|
120
|
+
|
|
121
|
+
Orbitable is designed to run via cron rather than as a long-running service. Both `ingest` and `generate` are idempotent and safe to re-run.
|
|
122
|
+
|
|
123
|
+
**Ingest and generate every 4 hours:**
|
|
124
|
+
|
|
125
|
+
```cron
|
|
126
|
+
0 */4 * * * orbitable -c /etc/orbitable/config.toml ingest && orbitable -c /etc/orbitable/config.toml generate
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
**Ingest hourly, generate once daily at 03:00 UTC:**
|
|
130
|
+
|
|
131
|
+
```cron
|
|
132
|
+
0 * * * * orbitable -c /etc/orbitable/config.toml ingest
|
|
133
|
+
0 3 * * * orbitable -c /etc/orbitable/config.toml generate
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
**With logging to a file:**
|
|
137
|
+
|
|
138
|
+
```cron
|
|
139
|
+
0 */4 * * * orbitable -c /etc/orbitable/config.toml ingest >> /var/log/orbitable.log 2>&1 && orbitable -c /etc/orbitable/config.toml generate >> /var/log/orbitable.log 2>&1
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
## Credential Resolution
|
|
143
|
+
|
|
144
|
+
Database credentials are resolved in priority order:
|
|
145
|
+
|
|
146
|
+
1. **Environment variables** -- `ORBITABLE_DATABASE__USERNAME` / `ORBITABLE_DATABASE__PASSWORD`
|
|
147
|
+
2. **User secrets file** -- `~/.config/orbitable.toml`
|
|
148
|
+
3. **System secrets file** -- path set via `secrets_file` in `config.toml`
|
|
149
|
+
4. **config.toml values** -- not recommended for credentials
|
|
150
|
+
|
|
151
|
+
For cron jobs, either use the user secrets file or export environment variables in the crontab:
|
|
152
|
+
|
|
153
|
+
```cron
|
|
154
|
+
ORBITABLE_DATABASE__USERNAME=myuser
|
|
155
|
+
ORBITABLE_DATABASE__PASSWORD=mypassword
|
|
156
|
+
0 */4 * * * orbitable -c /etc/orbitable/config.toml ingest && orbitable -c /etc/orbitable/config.toml generate
|
|
157
|
+
```
|
|
158
|
+
|
|
159
|
+
## CLI Reference
|
|
160
|
+
|
|
161
|
+
```
|
|
162
|
+
orbitable [-c CONFIG] COMMAND
|
|
163
|
+
|
|
164
|
+
Commands:
|
|
165
|
+
init Scaffold config.toml and ~/.config/orbitable.toml
|
|
166
|
+
ingest Ingest TLE/OMM files into the database
|
|
167
|
+
generate Generate output TLE/OMM files from the database
|
|
168
|
+
|
|
169
|
+
Options:
|
|
170
|
+
-c, --config PATH Path to config.toml (default: ./config.toml)
|
|
171
|
+
```
|
|
172
|
+
|
|
173
|
+
## Configuration Reference
|
|
174
|
+
|
|
175
|
+
### `[database]`
|
|
176
|
+
|
|
177
|
+
| Field | Default | Description |
|
|
178
|
+
| -------------- | ------------ | ------------------------------------------ |
|
|
179
|
+
| `drivername` | `"sqlite"` | SQLAlchemy driver (`sqlite`, `mysql+pymysql`) |
|
|
180
|
+
| `name` | `":memory:"` | Database name or file path |
|
|
181
|
+
| `host` | | Database host |
|
|
182
|
+
| `port` | | Database port |
|
|
183
|
+
| `username` | | Database username (prefer secrets file) |
|
|
184
|
+
| `password` | | Database password (prefer secrets file) |
|
|
185
|
+
| `secrets_file` | | Path to a TOML file with username/password |
|
|
186
|
+
|
|
187
|
+
### `[[ingest.sources]]`
|
|
188
|
+
|
|
189
|
+
| Field | Default | Description |
|
|
190
|
+
| --------- | --------- | ---------------------------------- |
|
|
191
|
+
| `path` | | Directory to scan for files |
|
|
192
|
+
| `pattern` | `"*.tle"` | Glob pattern for matching files |
|
|
193
|
+
|
|
194
|
+
### `[output]`
|
|
195
|
+
|
|
196
|
+
| Field | Default | Description |
|
|
197
|
+
| ------- | ------------ | ------------------------------- |
|
|
198
|
+
| `dir` | `"./output"` | Output directory |
|
|
199
|
+
|
|
200
|
+
### `[output.formats]`
|
|
201
|
+
|
|
202
|
+
| Field | Default | Description |
|
|
203
|
+
| ----- | ------- | --------------------------- |
|
|
204
|
+
| `tle` | `true` | Generate .tle output files |
|
|
205
|
+
| `omm` | `true` | Generate .omm (CSV) output |
|
|
206
|
+
|
|
207
|
+
### `[output.types]`
|
|
208
|
+
|
|
209
|
+
| Field | Default | Description |
|
|
210
|
+
| -------------- | ------- | -------------------------------------------------- |
|
|
211
|
+
| `date_files` | `true` | YYYYMMDD files with latest TLE per satellite |
|
|
212
|
+
| `object_files` | `true` | Per-satellite files with all TLEs ordered by epoch |
|
|
213
|
+
|
|
214
|
+
### `[logging]`
|
|
215
|
+
|
|
216
|
+
| Field | Default | Description |
|
|
217
|
+
| ------- | -------- | ---------------------------------------------- |
|
|
218
|
+
| `level` | `"INFO"` | Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL |
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
orbitable/__init__.py,sha256=YhnTJ1eJoEVD95gMKIAELlWuK3FeoHjIDyl428vpNE4,51
|
|
2
|
+
orbitable/_version.py,sha256=s9U3X54Pdr-Jlh9GP6LBaa_VRos7qs_wtrVefUHHNIA,520
|
|
3
|
+
orbitable/cli.py,sha256=Qoc_96gvVEgN9LDt2YW7HYNHlYnJLOqZBZ3hRyMA2YA,6494
|
|
4
|
+
orbitable/config.py,sha256=Gx6QhQC9KQKnb31FNSmwJM7GsD93We_N1nwZtAXhKQI,3463
|
|
5
|
+
orbitable/generator.py,sha256=JaKaVvC7xGrgMd88FlCbOR8UGoeyQJ97UcE28WraATs,4735
|
|
6
|
+
orbitable/ingest.py,sha256=I-Ig3Azq-GukoLGif7ks7a652Ih9EnGa2iWTl-xkfa0,6729
|
|
7
|
+
orbitable/model.py,sha256=XfteoYgDDmeK4wGZocVOSSgo7xzw2ya8FUQYbDNAWNY,6037
|
|
8
|
+
orbitable/reader.py,sha256=KFTznIM9euet0f4urmeo6rBL5FXQ0hKWFaf1GuOvnR0,5248
|
|
9
|
+
orbitable-0.2.0.dist-info/METADATA,sha256=ttDAhE1r3CaTILiDwxcvKf1MP7Y3ipJ-ZzmawGYB9ps,6356
|
|
10
|
+
orbitable-0.2.0.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
|
|
11
|
+
orbitable-0.2.0.dist-info/entry_points.txt,sha256=VuRn13lX9clQ2ILIQTHCX1CZyMlsPlPE639Yqks4wsg,45
|
|
12
|
+
orbitable-0.2.0.dist-info/RECORD,,
|