ose-core 0.2.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. ose_core-0.2.5/PKG-INFO +21 -0
  2. ose_core-0.2.5/README.md +36 -0
  3. ose_core-0.2.5/pyproject.toml +38 -0
  4. ose_core-0.2.5/setup.cfg +4 -0
  5. ose_core-0.2.5/src/ose/__init__.py +9 -0
  6. ose_core-0.2.5/src/ose/commands/CLICommandContext.py +49 -0
  7. ose_core-0.2.5/src/ose/commands/Command.py +29 -0
  8. ose_core-0.2.5/src/ose/commands/CommandContext.py +25 -0
  9. ose_core-0.2.5/src/ose/commands/ImportExternalCommand.py +87 -0
  10. ose_core-0.2.5/src/ose/commands/ReleaseCommandContext.py +60 -0
  11. ose_core-0.2.5/src/ose/commands/__init__.py +0 -0
  12. ose_core-0.2.5/src/ose/constants.py +3 -0
  13. ose_core-0.2.5/src/ose/database/Base.py +5 -0
  14. ose_core-0.2.5/src/ose/database/NextId.py +10 -0
  15. ose_core-0.2.5/src/ose/database/Release.py +44 -0
  16. ose_core-0.2.5/src/ose/database/User.py +18 -0
  17. ose_core-0.2.5/src/ose/database/__init__.py +17 -0
  18. ose_core-0.2.5/src/ose/database/migrations/README +1 -0
  19. ose_core-0.2.5/src/ose/database/migrations/alembic.ini +50 -0
  20. ose_core-0.2.5/src/ose/database/migrations/env.py +113 -0
  21. ose_core-0.2.5/src/ose/database/migrations/script.py.mako +24 -0
  22. ose_core-0.2.5/src/ose/database/migrations/versions/244abfc3e657_rename_tables.py +22 -0
  23. ose_core-0.2.5/src/ose/database/migrations/versions/487271aa555d_added_release_artifacts.py +33 -0
  24. ose_core-0.2.5/src/ose/database/migrations/versions/5a12a34c96d8_added_release_repo.py +37 -0
  25. ose_core-0.2.5/src/ose/database/migrations/versions/bbe766649a99_initial_migration.py +54 -0
  26. ose_core-0.2.5/src/ose/index/BucketStorage.py +42 -0
  27. ose_core-0.2.5/src/ose/index/ExtendedStorage.py +9 -0
  28. ose_core-0.2.5/src/ose/index/FileStorage.py +7 -0
  29. ose_core-0.2.5/src/ose/index/__init__.py +0 -0
  30. ose_core-0.2.5/src/ose/index/create_index.py +90 -0
  31. ose_core-0.2.5/src/ose/index/schema.py +14 -0
  32. ose_core-0.2.5/src/ose/model/ColumnMapping.py +302 -0
  33. ose_core-0.2.5/src/ose/model/Diff.py +77 -0
  34. ose_core-0.2.5/src/ose/model/ExcelOntology.py +1030 -0
  35. ose_core-0.2.5/src/ose/model/Plugin.py +35 -0
  36. ose_core-0.2.5/src/ose/model/Relation.py +91 -0
  37. ose_core-0.2.5/src/ose/model/ReleaseScript.py +94 -0
  38. ose_core-0.2.5/src/ose/model/RepositoryConfiguration.py +34 -0
  39. ose_core-0.2.5/src/ose/model/Result.py +49 -0
  40. ose_core-0.2.5/src/ose/model/Schema.py +91 -0
  41. ose_core-0.2.5/src/ose/model/Script.py +40 -0
  42. ose_core-0.2.5/src/ose/model/ScriptArgument.py +10 -0
  43. ose_core-0.2.5/src/ose/model/Term.py +131 -0
  44. ose_core-0.2.5/src/ose/model/TermIdentifier.py +50 -0
  45. ose_core-0.2.5/src/ose/model/__init__.py +0 -0
  46. ose_core-0.2.5/src/ose/py.typed +0 -0
  47. ose_core-0.2.5/src/ose/release/BuildReleaseStep.py +77 -0
  48. ose_core-0.2.5/src/ose/release/GithubPublishReleaseStep.py +50 -0
  49. ose_core-0.2.5/src/ose/release/HumanVerificationReleaseStep.py +24 -0
  50. ose_core-0.2.5/src/ose/release/ImportExternalReleaseStep.py +116 -0
  51. ose_core-0.2.5/src/ose/release/ImportExternalWithGitHubActionsReleaseStep.py +22 -0
  52. ose_core-0.2.5/src/ose/release/MergeReleaseStep.py +68 -0
  53. ose_core-0.2.5/src/ose/release/PreparationReleaseStep.py +46 -0
  54. ose_core-0.2.5/src/ose/release/ReleaseStep.py +205 -0
  55. ose_core-0.2.5/src/ose/release/ValidationReleaseStep.py +99 -0
  56. ose_core-0.2.5/src/ose/release/__init__.py +0 -0
  57. ose_core-0.2.5/src/ose/release/common.py +96 -0
  58. ose_core-0.2.5/src/ose/release/do_release.py +114 -0
  59. ose_core-0.2.5/src/ose/services/ConfigurationService.py +76 -0
  60. ose_core-0.2.5/src/ose/services/FileCache.py +153 -0
  61. ose_core-0.2.5/src/ose/services/LocalConfigurationService.py +188 -0
  62. ose_core-0.2.5/src/ose/services/OntoloyBuildService.py +42 -0
  63. ose_core-0.2.5/src/ose/services/PluginService.py +118 -0
  64. ose_core-0.2.5/src/ose/services/RepositoryConfigurationService.py +190 -0
  65. ose_core-0.2.5/src/ose/services/RobotOntologyBuildService.py +412 -0
  66. ose_core-0.2.5/src/ose/services/__init__.py +0 -0
  67. ose_core-0.2.5/src/ose/services/validation.py +15 -0
  68. ose_core-0.2.5/src/ose/utils/__init__.py +8 -0
  69. ose_core-0.2.5/src/ose/utils/github.py +176 -0
  70. ose_core-0.2.5/src/ose/utils/strings.py +42 -0
  71. ose_core-0.2.5/src/ose_core.egg-info/PKG-INFO +21 -0
  72. ose_core-0.2.5/src/ose_core.egg-info/SOURCES.txt +73 -0
  73. ose_core-0.2.5/src/ose_core.egg-info/dependency_links.txt +1 -0
  74. ose_core-0.2.5/src/ose_core.egg-info/requires.txt +17 -0
  75. ose_core-0.2.5/src/ose_core.egg-info/top_level.txt +1 -0
@@ -0,0 +1,21 @@
1
+ Metadata-Version: 2.4
2
+ Name: ose-core
3
+ Version: 0.2.5
4
+ Summary: Core library for OntoSpreadEd - Ontology spreadsheet editor
5
+ Requires-Python: >=3.12
6
+ Requires-Dist: pandas
7
+ Requires-Dist: openpyxl
8
+ Requires-Dist: jsonschema
9
+ Requires-Dist: daff
10
+ Requires-Dist: whoosh
11
+ Requires-Dist: networkx
12
+ Requires-Dist: pydot
13
+ Requires-Dist: py-horned-owl
14
+ Requires-Dist: ontoutils
15
+ Requires-Dist: pyyaml
16
+ Requires-Dist: dacite
17
+ Requires-Dist: aiohttp
18
+ Requires-Dist: async-lru
19
+ Provides-Extra: dev
20
+ Requires-Dist: pytest; extra == "dev"
21
+ Requires-Dist: ruff; extra == "dev"
@@ -0,0 +1,36 @@
1
+ # OSE Core
2
+
3
+ Core library for OntoSpreadEd - Ontology Spreadsheet Editor.
4
+
5
+ ## Description
6
+
7
+ This package contains the business logic, data models, and services for the OntoSpreadEd ontology spreadsheet editor. It provides:
8
+
9
+ - Data models for ontology entities (classes, properties, individuals)
10
+ - Database schema and migrations (using Alembic)
11
+ - Full-text search index (using Whoosh)
12
+ - Ontology processing and validation
13
+ - Release pipeline framework
14
+ - Plugin system infrastructure
15
+
16
+ ## Installation
17
+
18
+ ```bash
19
+ pip install ose-core
20
+ ```
21
+
22
+ ## Requirements
23
+
24
+ - Python 3.12+
25
+
26
+ ## Dependencies
27
+
28
+ Key dependencies include:
29
+ - `py-horned-owl` - OWL ontology processing
30
+ - `pandas` / `openpyxl` - Excel file handling
31
+ - `whoosh` - Full-text search
32
+ - `SQLAlchemy` - Database ORM
33
+
34
+ ## License
35
+
36
+ LGPL-3.0-or-later
@@ -0,0 +1,38 @@
1
+ [build-system]
2
+ requires = ["setuptools >= 61.0"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "ose-core"
7
+ description = "Core library for OntoSpreadEd - Ontology spreadsheet editor"
8
+ requires-python = ">= 3.12"
9
+ version = "0.2.5"
10
+
11
+ dependencies = [
12
+ "pandas",
13
+ "openpyxl",
14
+ "jsonschema",
15
+ "daff",
16
+ "whoosh",
17
+ "networkx",
18
+ "pydot",
19
+ "py-horned-owl",
20
+ "ontoutils",
21
+ "pyyaml",
22
+ "dacite",
23
+ "aiohttp",
24
+ "async-lru",
25
+ ]
26
+
27
+ [project.optional-dependencies]
28
+ dev = [
29
+ "pytest",
30
+ "ruff",
31
+ ]
32
+
33
+ [tool.setuptools.packages.find]
34
+ where = ["src"]
35
+ include = ["ose", "ose.*"]
36
+
37
+ [tool.setuptools.package-data]
38
+ "ose.database.migrations" = ["alembic.ini", "script.py.mako", "README"]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,9 @@
1
+ """
2
+ OSE Core - Core library for OntoSpreadEd
3
+
4
+ This package contains the business logic, models, and services
5
+ for the OntoSpreadEd ontology spreadsheet editor.
6
+ """
7
+
8
+ # Re-export commonly used classes
9
+ from .model import * # noqa: F401, F403
@@ -0,0 +1,49 @@
1
+ import logging
2
+ import os.path
3
+ import shutil
4
+ from tempfile import TemporaryDirectory
5
+ from typing import Optional
6
+
7
+ from ose.commands.CommandContext import CommandContext
8
+
9
+
10
+ class CLICommandContext(CommandContext):
11
+ _logger = logging.getLogger(__name__)
12
+
13
+ _working_dir: str
14
+ _tempdir: Optional[TemporaryDirectory] = None
15
+
16
+ def __init__(self, working_dir: Optional[str] = None):
17
+ if working_dir is None:
18
+ self._tempdir = TemporaryDirectory("ose-cli-context")
19
+ self._working_dir = self._tempdir.name
20
+ else:
21
+ self._working_dir = os.path.abspath(working_dir)
22
+
23
+ def canceled(self) -> bool:
24
+ return False
25
+
26
+ def local_name(self, remote_name, file_ending=None) -> str:
27
+ file_name = os.path.join(self._working_dir, remote_name)
28
+
29
+ if file_ending is not None:
30
+ file_name = file_name[:file_name.rfind(".")] + file_ending
31
+
32
+ self._logger.debug(f"Local name of '{remote_name}' is '{file_name}' (ending: {file_ending})")
33
+
34
+ return file_name
35
+
36
+ def save_file(self, file: str, temporary: Optional[bool] = None, **kwargs):
37
+ target_path = os.path.abspath(os.path.join(self._working_dir, file))
38
+
39
+ if os.path.abspath(target_path) != os.path.abspath(file):
40
+ self._logger.debug(f"Saving file '{file}' to '{target_path}'")
41
+ shutil.copy2(file, target_path)
42
+ else:
43
+ self._logger.debug(f"Saving file '{file}' to '{target_path}' (Already existing)")
44
+
45
+ def cleanup(self) -> None:
46
+ super().cleanup()
47
+
48
+ if self._tempdir is not None:
49
+ self._tempdir.cleanup()
@@ -0,0 +1,29 @@
1
+ import abc
2
+ from typing import Tuple
3
+
4
+ from .CommandContext import CommandContext
5
+ from ..model.Result import Result
6
+
7
+
8
+ class CommandCanceledException(Exception):
9
+ pass
10
+
11
+
12
+ class Command(abc.ABC):
13
+ _context: CommandContext
14
+
15
+ def __init__(self, context: CommandContext):
16
+ self._context = context
17
+
18
+ def run(self, **kwargs) -> Tuple[Result, bool]:
19
+ ...
20
+
21
+ def _raise_if_canceled(self):
22
+ if self._context.canceled():
23
+ raise CommandCanceledException("Release has been canceled!")
24
+
25
+ def _local_name(self, remote_name, file_ending=None) -> str:
26
+ return self._context.local_name(remote_name, file_ending)
27
+
28
+ def cleanup(self):
29
+ pass
@@ -0,0 +1,25 @@
1
+ import abc
2
+ from typing import Optional
3
+
4
+
5
+ class CommandContext(abc.ABC):
6
+ @abc.abstractmethod
7
+ def canceled(self) -> bool:
8
+ ...
9
+
10
+ @abc.abstractmethod
11
+ def local_name(self, remote_name, file_ending=None) -> str:
12
+ ...
13
+
14
+ @abc.abstractmethod
15
+ def save_file(self, file: str, temporary: Optional[bool] = None, **kwargs):
16
+ ...
17
+
18
+ def cleanup(self) -> None:
19
+ pass
20
+
21
+ def __enter__(self):
22
+ return self
23
+
24
+ def __exit__(self, exc_type, exc_val, exc_tb):
25
+ self.cleanup()
@@ -0,0 +1,87 @@
1
+ import csv
2
+ from typing import List, Tuple, Literal
3
+
4
+ from .Command import Command
5
+ from ..model.ExcelOntology import ExcelOntology
6
+ from ..model.ReleaseScript import ReleaseScript, ReleaseScriptFile
7
+ from ..model.Result import Result
8
+ from ..services.RobotOntologyBuildService import RobotOntologyBuildService
9
+
10
+
11
+ class ReleaseCommand(Command):
12
+ def _store_target_artifact(self, file: ReleaseScriptFile,
13
+ kind: Literal["source", "intermediate", "final"] = "final",
14
+ downloadable: bool = True):
15
+ return self._context.save_file(self._local_name(file.target.file), target_file=file.target.file, kind=kind,
16
+ downloadable=downloadable)
17
+
18
+
19
+ class ImportExternalCommand(ReleaseCommand):
20
+ def run(self, release_script: ReleaseScript, working_dir: str) -> Tuple[Result, bool]:
21
+ builder = RobotOntologyBuildService()
22
+
23
+ result = Result(())
24
+
25
+ file = release_script.external
26
+ ontology = ExcelOntology(file.target.iri)
27
+ for s in file.sources:
28
+ xlsx = self._local_name(s.file)
29
+ result += ontology.add_imported_terms(s.file, xlsx)
30
+
31
+ self._raise_if_canceled()
32
+
33
+ new_parents: List[Tuple[str, str, Literal["class", "object property", "data_property"]]] = []
34
+ if file.addParentsFile:
35
+ with open(self._local_name(file.addParentsFile)) as f:
36
+ rows = csv.DictReader(f, skipinitialspace=True)
37
+ for row in rows:
38
+ # Skip potential ROBOT header
39
+ if row.get("ID") == "ID":
40
+ continue
41
+
42
+ id = row.get("ID")
43
+ new_parent = row.get("NEW PARENT ID")
44
+ type = row.get("TYPE", "class").lower()
45
+ if type not in ["class", "object property", "data property"]:
46
+ result.warning(type='unknown-owl-type',
47
+ file=file.renameTermFile,
48
+ msg=f"Unknown OWL type '{type}' in column 'TYPE'")
49
+
50
+ new_parents.append((id, new_parent, type))
51
+
52
+ renamings: List[Tuple[str, str, Literal["class", "object property", "data_property"]]] = []
53
+ if file.renameTermFile is not None:
54
+ with open(self._local_name(file.renameTermFile)) as f:
55
+ rows = csv.DictReader(f, skipinitialspace=True)
56
+ for row in rows:
57
+ # Skip potential ROBOT header
58
+ if row.get("ID") == "ID":
59
+ continue
60
+
61
+ id = row.get("ID")
62
+ new_label = row.get("NEW LABEL")
63
+ type = row.get("TYPE", "class").lower()
64
+ if type not in ["class", "object property", "data property"]:
65
+ result.warning(type='unknown-owl-type',
66
+ file=file.renameTermFile,
67
+ msg=f"Unknown OWL type '{type}' in column 'TYPE'")
68
+ type = "class"
69
+
70
+ renamings.append((id, new_label, type))
71
+
72
+ result += builder.merge_imports(
73
+ ontology.imports(),
74
+ self._local_name(file.target.file),
75
+ file.target.iri,
76
+ release_script.short_repository_name,
77
+ working_dir,
78
+ renamings,
79
+ new_parents
80
+ )
81
+
82
+ self._raise_if_canceled()
83
+
84
+ self._store_target_artifact(file, downloadable=False)
85
+
86
+ # self._set_release_result(result)
87
+ return result, result.ok()
@@ -0,0 +1,60 @@
1
+ from typing import Optional, Literal
2
+
3
+ from flask_github import GitHub
4
+ from flask_sqlalchemy import SQLAlchemy
5
+ from sqlalchemy.orm import Query
6
+
7
+ from .CommandContext import CommandContext
8
+ from ..database.Release import Release, ReleaseArtifact
9
+ from ..model.ReleaseScript import ReleaseScript, ReleaseScriptFile
10
+ from ..release.common import add_artifact, local_name
11
+ from ..services.ConfigurationService import ConfigurationService
12
+
13
+
14
+ class ReleaseCommandContext(CommandContext):
15
+ _config: ConfigurationService
16
+ _working_dir: str
17
+ _release_id: int
18
+ _release_script: ReleaseScript
19
+ _gh: GitHub
20
+ _db: SQLAlchemy
21
+ _q: Query[Release]
22
+
23
+ _total_items: Optional[int] = None
24
+ _current_item: int = 1
25
+
26
+ def __init__(self, db: SQLAlchemy, gh: GitHub, release_script: ReleaseScript, release_id: int, tmp: str,
27
+ config: ConfigurationService) -> None:
28
+ self._config = config
29
+ self._db = db
30
+ self._gh = gh
31
+ self._release_script = release_script
32
+ self._release_id = release_id
33
+ self._q = db.session.query(Release)
34
+ self._a = db.session.query(ReleaseArtifact)
35
+ self._working_dir = tmp
36
+
37
+ def canceled(self) -> bool:
38
+ r: Release = self._q.get(self._release_id)
39
+ return r.state == "canceled"
40
+
41
+ def local_name(self, remote_name, file_ending=None) -> str:
42
+ return local_name(self._working_dir, remote_name, file_ending)
43
+
44
+ def store_artifact(self, local_path: str, target_path: Optional[str] = None,
45
+ kind: Optional[Literal["source", "intermediate", "final"]] = None,
46
+ downloadable: bool = True) -> None:
47
+ kind = kind if kind is not None else ("intermediate" if target_path is None else "final")
48
+
49
+ artifact = ReleaseArtifact(release_id=self._release_id, local_path=local_path, target_path=target_path,
50
+ kind=kind, downloadable=downloadable)
51
+
52
+ add_artifact(self._db, artifact)
53
+
54
+ def store_target_artifact(self, file: ReleaseScriptFile,
55
+ kind: Literal["source", "intermediate", "final"] = "final",
56
+ downloadable: bool = True):
57
+ return self.store_artifact(self._local_name(file.target.file), file.target.file, kind, downloadable)
58
+
59
+ def save_file(self, file: str, temporary: Optional[bool] = None, **kwargs):
60
+ return self.store_artifact(file, kind="intermediate" if temporary else "final", **kwargs)
File without changes
@@ -0,0 +1,3 @@
1
+ """Core constants for OSE"""
2
+
3
+ RDFS_LABEL = "http://www.w3.org/2000/01/rdf-schema#label"
@@ -0,0 +1,5 @@
1
+ from sqlalchemy.orm import DeclarativeBase
2
+
3
+
4
+ class Base(DeclarativeBase):
5
+ pass
@@ -0,0 +1,10 @@
1
+ from sqlalchemy import Column, Integer, String
2
+
3
+ from .Base import Base
4
+
5
+
6
+ class NextId(Base):
7
+ __tablename__ = 'nextids'
8
+ id = Column(Integer, primary_key=True)
9
+ repo_name = Column(String(50))
10
+ next_id = Column(Integer)
@@ -0,0 +1,44 @@
1
+ from sqlalchemy import Integer, String, JSON, DateTime, Boolean, ForeignKey, CheckConstraint
2
+ from sqlalchemy.orm import relationship, mapped_column
3
+
4
+ from .Base import Base
5
+
6
+
7
+ class ReleaseArtifact(Base):
8
+ __tablename__ = 'release_artifacts'
9
+
10
+ id = mapped_column(Integer(), primary_key=True)
11
+ release_id = mapped_column(Integer(), ForeignKey('releases.id'))
12
+
13
+ local_path = mapped_column(String())
14
+ target_path = mapped_column(String(), nullable=True)
15
+ downloadable = mapped_column(Boolean(), default=True)
16
+ kind = mapped_column(String(), CheckConstraint(
17
+ "kind in ('source', 'intermediate', 'final') and (kind <> 'final' or target_path is not null)"), )
18
+
19
+ def as_dict(self):
20
+ return {c.name: getattr(self, c.name) for c in self.__table__.columns}
21
+
22
+
23
+ class Release(Base):
24
+ __tablename__ = 'releases'
25
+
26
+ id = mapped_column(Integer, primary_key=True)
27
+ state = mapped_column(String(20))
28
+ running = mapped_column(Boolean(), default=True)
29
+ step = mapped_column(Integer)
30
+ details = mapped_column(JSON(none_as_null=True)) # Dict from step nr to step info
31
+ start = mapped_column(DateTime)
32
+ started_by = mapped_column(String())
33
+ end = mapped_column(DateTime)
34
+ repo = mapped_column(String(20))
35
+ release_script = mapped_column(JSON(none_as_null=True))
36
+ worker_id = mapped_column(String(20))
37
+ local_dir = mapped_column(String())
38
+
39
+ artifacts = relationship("ReleaseArtifact", lazy="joined")
40
+
41
+ def as_dict(self):
42
+ val = {c.name: getattr(self, c.name) for c in self.__table__.columns}
43
+ val["artifacts"] = [a.as_dict() for a in self.artifacts]
44
+ return val
@@ -0,0 +1,18 @@
1
+ from typing import Any
2
+
3
+ from sqlalchemy import Column, Integer, String
4
+
5
+ from .Base import Base
6
+
7
+
8
+ class User(Base):
9
+ __tablename__ = 'users'
10
+
11
+ id = Column(Integer, primary_key=True)
12
+ github_access_token = Column(String(255))
13
+ github_id = Column(Integer)
14
+ github_login = Column(String(255))
15
+
16
+ def __init__(self, github_access_token, **kw: Any):
17
+ super().__init__(**kw)
18
+ self.github_access_token = github_access_token
@@ -0,0 +1,17 @@
1
+ from os.path import join, dirname
2
+
3
+ from flask_migrate import Migrate
4
+ from flask_sqlalchemy import SQLAlchemy
5
+
6
+ db = SQLAlchemy()
7
+ migrate = Migrate()
8
+
9
+ # Get the migrations directory path relative to this package
10
+ _migrations_dir = join(dirname(__file__), 'migrations')
11
+
12
+
13
+ def init_app(app):
14
+ db.init_app(app)
15
+ migrate.init_app(app, db, directory=_migrations_dir)
16
+
17
+ return db
@@ -0,0 +1 @@
1
+ Single-database configuration for Flask.
@@ -0,0 +1,50 @@
1
+ # A generic, single database configuration.
2
+
3
+ [alembic]
4
+ # template used to generate migration files
5
+ # file_template = %%(rev)s_%%(slug)s
6
+
7
+ # set to 'true' to run the environment during
8
+ # the 'revision' command, regardless of autogenerate
9
+ # revision_environment = false
10
+
11
+
12
+ # Logging configuration
13
+ [loggers]
14
+ keys = root,sqlalchemy,alembic,flask_migrate
15
+
16
+ [handlers]
17
+ keys = console
18
+
19
+ [formatters]
20
+ keys = generic
21
+
22
+ [logger_root]
23
+ level = WARN
24
+ handlers = console
25
+ qualname =
26
+
27
+ [logger_sqlalchemy]
28
+ level = WARN
29
+ handlers =
30
+ qualname = sqlalchemy.engine
31
+
32
+ [logger_alembic]
33
+ level = INFO
34
+ handlers =
35
+ qualname = alembic
36
+
37
+ [logger_flask_migrate]
38
+ level = INFO
39
+ handlers =
40
+ qualname = flask_migrate
41
+
42
+ [handler_console]
43
+ class = StreamHandler
44
+ args = (sys.stderr,)
45
+ level = NOTSET
46
+ formatter = generic
47
+
48
+ [formatter_generic]
49
+ format = %(levelname)-5.5s [%(name)s] %(message)s
50
+ datefmt = %H:%M:%S
@@ -0,0 +1,113 @@
1
+ import logging
2
+ from logging.config import fileConfig
3
+
4
+ from flask import current_app
5
+
6
+ from alembic import context
7
+
8
+ # this is the Alembic Config object, which provides
9
+ # access to the values within the .ini file in use.
10
+ config = context.config
11
+
12
+ # Interpret the config file for Python logging.
13
+ # This line sets up loggers basically.
14
+ fileConfig(config.config_file_name)
15
+ logger = logging.getLogger('alembic.env')
16
+
17
+
18
+ def get_engine():
19
+ try:
20
+ # this works with Flask-SQLAlchemy<3 and Alchemical
21
+ return current_app.extensions['migrate'].db.get_engine()
22
+ except (TypeError, AttributeError):
23
+ # this works with Flask-SQLAlchemy>=3
24
+ return current_app.extensions['migrate'].db.engine
25
+
26
+
27
+ def get_engine_url():
28
+ try:
29
+ return get_engine().url.render_as_string(hide_password=False).replace(
30
+ '%', '%%')
31
+ except AttributeError:
32
+ return str(get_engine().url).replace('%', '%%')
33
+
34
+
35
+ # add your model's MetaData object here
36
+ # for 'autogenerate' support
37
+ # from myapp import mymodel
38
+ # target_metadata = mymodel.Base.metadata
39
+ config.set_main_option('sqlalchemy.url', get_engine_url())
40
+ target_db = current_app.extensions['migrate'].db
41
+
42
+ # other values from the config, defined by the needs of env.py,
43
+ # can be acquired:
44
+ # my_important_option = config.get_main_option("my_important_option")
45
+ # ... etc.
46
+
47
+
48
+ def get_metadata():
49
+ if hasattr(target_db, 'metadatas'):
50
+ return target_db.metadatas[None]
51
+ return target_db.metadata
52
+
53
+
54
+ def run_migrations_offline():
55
+ """Run migrations in 'offline' mode.
56
+
57
+ This configures the context with just a URL
58
+ and not an Engine, though an Engine is acceptable
59
+ here as well. By skipping the Engine creation
60
+ we don't even need a DBAPI to be available.
61
+
62
+ Calls to context.execute() here emit the given string to the
63
+ script output.
64
+
65
+ """
66
+ url = config.get_main_option("sqlalchemy.url")
67
+ context.configure(
68
+ url=url, target_metadata=get_metadata(), literal_binds=True
69
+ )
70
+
71
+ with context.begin_transaction():
72
+ context.run_migrations()
73
+
74
+
75
+ def run_migrations_online():
76
+ """Run migrations in 'online' mode.
77
+
78
+ In this scenario we need to create an Engine
79
+ and associate a connection with the context.
80
+
81
+ """
82
+
83
+ # this callback is used to prevent an auto-migration from being generated
84
+ # when there are no changes to the schema
85
+ # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
86
+ def process_revision_directives(context, revision, directives):
87
+ if getattr(config.cmd_opts, 'autogenerate', False):
88
+ script = directives[0]
89
+ if script.upgrade_ops.is_empty():
90
+ directives[:] = []
91
+ logger.info('No changes in schema detected.')
92
+
93
+ conf_args = current_app.extensions['migrate'].configure_args
94
+ if conf_args.get("process_revision_directives") is None:
95
+ conf_args["process_revision_directives"] = process_revision_directives
96
+
97
+ connectable = get_engine()
98
+
99
+ with connectable.connect() as connection:
100
+ context.configure(
101
+ connection=connection,
102
+ target_metadata=get_metadata(),
103
+ **conf_args
104
+ )
105
+
106
+ with context.begin_transaction():
107
+ context.run_migrations()
108
+
109
+
110
+ if context.is_offline_mode():
111
+ run_migrations_offline()
112
+ else:
113
+ run_migrations_online()
@@ -0,0 +1,24 @@
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from alembic import op
9
+ import sqlalchemy as sa
10
+ ${imports if imports else ""}
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = ${repr(up_revision)}
14
+ down_revision = ${repr(down_revision)}
15
+ branch_labels = ${repr(branch_labels)}
16
+ depends_on = ${repr(depends_on)}
17
+
18
+
19
+ def upgrade():
20
+ ${upgrades if upgrades else "pass"}
21
+
22
+
23
+ def downgrade():
24
+ ${downgrades if downgrades else "pass"}