logdetective 2.4.1__py3-none-any.whl → 2.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,5 +18,3 @@ prompt_template: |
18
18
  Snippets:
19
19
 
20
20
  {}
21
-
22
- Analysis:
logdetective/prompts.yml CHANGED
@@ -19,7 +19,6 @@ prompt_template: |
19
19
 
20
20
  {}
21
21
 
22
- Analysis:
23
22
 
24
23
  snippet_prompt_template: |
25
24
  Analyse following RPM build log snippet. Describe contents accurately, without speculation or suggestions for resolution
@@ -30,7 +29,6 @@ snippet_prompt_template: |
30
29
 
31
30
  {}
32
31
 
33
- Analysis:
34
32
 
35
33
  prompt_template_staged: |
36
34
  Given following log snippets, their explanation, and nothing else, explain what failure, if any, occurred during build of this package.
@@ -47,7 +45,6 @@ prompt_template_staged: |
47
45
 
48
46
  {}
49
47
 
50
- Analysis:
51
48
 
52
49
  # System prompts
53
50
  # System prompts are meant to serve as general guide for model behavior,
@@ -1,15 +1,14 @@
1
1
  from os import getenv
2
- from contextlib import contextmanager
3
- from sqlalchemy import create_engine
4
- from sqlalchemy.orm import sessionmaker, declarative_base
5
-
2
+ from contextlib import asynccontextmanager
3
+ from sqlalchemy.orm import DeclarativeBase
4
+ from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
6
5
  from logdetective import logger
7
6
 
8
7
 
9
8
  def get_pg_url() -> str:
10
9
  """create postgresql connection string"""
11
10
  return (
12
- f"postgresql+psycopg2://{getenv('POSTGRESQL_USER')}"
11
+ f"postgresql+asyncpg://{getenv('POSTGRESQL_USER')}"
13
12
  f":{getenv('POSTGRESQL_PASSWORD')}@{getenv('POSTGRESQL_HOST', 'postgres')}"
14
13
  f":{getenv('POSTGRESQL_PORT', '5432')}/{getenv('POSTGRESQL_DATABASE')}"
15
14
  )
@@ -23,13 +22,16 @@ sqlalchemy_echo = getenv("SQLALCHEMY_ECHO", "False").lower() in (
23
22
  "y",
24
23
  "1",
25
24
  )
26
- engine = create_engine(get_pg_url(), echo=sqlalchemy_echo)
27
- SessionFactory = sessionmaker(autoflush=True, bind=engine)
28
- Base = declarative_base()
25
+ engine = create_async_engine(get_pg_url(), echo=sqlalchemy_echo)
26
+ SessionFactory = async_sessionmaker(autoflush=True, bind=engine) # pylint: disable=invalid-name
27
+
28
+
29
+ class Base(DeclarativeBase):
30
+ """Declarative base class for all ORM models."""
29
31
 
30
32
 
31
- @contextmanager
32
- def transaction(commit: bool = False):
33
+ @asynccontextmanager
34
+ async def transaction(commit: bool = False):
33
35
  """
34
36
  Context manager for 'framing' a db transaction.
35
37
 
@@ -39,27 +41,30 @@ def transaction(commit: bool = False):
39
41
  """
40
42
 
41
43
  session = SessionFactory()
42
- try:
43
- yield session
44
- if commit:
45
- session.commit()
46
- except Exception as ex:
47
- logger.warning("Exception while working with database: %s", str(ex))
48
- session.rollback()
49
- raise
50
- finally:
51
- session.close()
52
-
53
-
54
- def init():
44
+ async with session:
45
+ try:
46
+ yield session
47
+ if commit:
48
+ await session.commit()
49
+ except Exception as ex:
50
+ logger.warning("Exception while working with database: %s", str(ex))
51
+ await session.rollback()
52
+ raise
53
+ finally:
54
+ await session.close()
55
+
56
+
57
+ async def init():
55
58
  """Init db"""
56
- Base.metadata.create_all(engine)
59
+ async with engine.begin() as conn:
60
+ await conn.run_sync(Base.metadata.create_all)
57
61
  logger.debug("Database initialized")
58
62
 
59
63
 
60
- def destroy():
64
+ async def destroy():
61
65
  """Destroy db"""
62
- Base.metadata.drop_all(engine)
66
+ async with engine.begin() as conn:
67
+ await conn.run_sync(Base.metadata.drop_all)
63
68
  logger.warning("Database cleaned")
64
69
 
65
70
 
@@ -1,4 +1,3 @@
1
- from logdetective.server.database.base import Base
2
1
  from logdetective.server.database.models.merge_request_jobs import (
3
2
  Forge,
4
3
  GitlabMergeRequestJobs,
@@ -18,8 +17,9 @@ from logdetective.server.database.models.exceptions import (
18
17
  KojiTaskAnalysisTimeoutError,
19
18
  )
20
19
 
20
+ # pylint: disable=undefined-all-variable
21
+
21
22
  __all__ = [
22
- Base.__name__,
23
23
  GitlabMergeRequestJobs.__name__,
24
24
  Comments.__name__,
25
25
  Reactions.__name__,
@@ -1,6 +1,9 @@
1
+ from __future__ import annotations
2
+ from typing import Optional
1
3
  from datetime import datetime, timedelta, timezone
2
- from sqlalchemy import Column, BigInteger, DateTime, ForeignKey, Integer, String
3
- from sqlalchemy.orm import relationship
4
+ from sqlalchemy import BigInteger, DateTime, ForeignKey, Integer, String, select
5
+ from sqlalchemy.orm import Mapped, mapped_column, relationship
6
+
4
7
  from sqlalchemy.exc import OperationalError
5
8
  import backoff
6
9
 
@@ -21,42 +24,47 @@ class KojiTaskAnalysis(Base):
21
24
 
22
25
  __tablename__ = "koji_task_analysis"
23
26
 
24
- id = Column(Integer, primary_key=True)
25
- koji_instance = Column(String(255), nullable=False, index=True)
26
- task_id = Column(BigInteger, nullable=False, index=True, unique=True)
27
- log_file_name = Column(String(255), nullable=False, index=True)
28
- request_received_at = Column(
29
- DateTime,
27
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
28
+ koji_instance: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
29
+ task_id: Mapped[int] = mapped_column(BigInteger, nullable=False, index=True, unique=True)
30
+ log_file_name: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
31
+ request_received_at: Mapped[datetime] = mapped_column(
32
+ DateTime(timezone=True),
30
33
  nullable=False,
31
34
  index=True,
32
35
  default=datetime.now(timezone.utc),
33
36
  comment="Timestamp when the request was received",
34
37
  )
35
- response_id = Column(
38
+ response_id: Mapped[Optional[int]] = mapped_column(
36
39
  Integer,
37
40
  ForeignKey("analyze_request_metrics.id"),
38
41
  nullable=True,
39
42
  index=False,
40
43
  comment="The id of the analyze request metrics for this task",
41
44
  )
42
- response = relationship("AnalyzeRequestMetrics")
45
+ response: Mapped[Optional["AnalyzeRequestMetrics"]] = relationship(
46
+ "AnalyzeRequestMetrics",
47
+ back_populates="koji_tasks"
48
+ )
43
49
 
44
50
  @classmethod
45
51
  @backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
46
- def create_or_restart(cls, koji_instance: str, task_id: int, log_file_name: str):
52
+ async def create_or_restart(
53
+ cls, koji_instance: str, task_id: int, log_file_name: str
54
+ ):
47
55
  """Create a new koji task analysis"""
48
- with transaction(commit=True) as session:
56
+ query = select(cls).filter(
57
+ cls.koji_instance == koji_instance, cls.task_id == task_id
58
+ )
59
+ async with transaction(commit=True) as session:
49
60
  # Check if the task analysis already exists
50
- koji_task_analysis = (
51
- session.query(cls)
52
- .filter_by(koji_instance=koji_instance, task_id=task_id)
53
- .first()
54
- )
61
+ query_result = await session.execute(query)
62
+ koji_task_analysis = query_result.first()
55
63
  if koji_task_analysis:
56
64
  # If it does, update the request_received_at timestamp
57
65
  koji_task_analysis.request_received_at = datetime.now(timezone.utc)
58
66
  session.add(koji_task_analysis)
59
- session.flush()
67
+ await session.flush()
60
68
  return
61
69
 
62
70
  # If it doesn't, create a new one
@@ -65,14 +73,19 @@ class KojiTaskAnalysis(Base):
65
73
  koji_task_analysis.task_id = task_id
66
74
  koji_task_analysis.log_file_name = log_file_name
67
75
  session.add(koji_task_analysis)
68
- session.flush()
76
+ await session.flush()
69
77
 
70
78
  @classmethod
71
79
  @backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
72
- def add_response(cls, task_id: int, metric_id: int):
80
+ async def add_response(cls, task_id: int, metric_id: int):
73
81
  """Add a response to a koji task analysis"""
74
- with transaction(commit=True) as session:
75
- koji_task_analysis = session.query(cls).filter_by(task_id=task_id).first()
82
+ query = select(cls).filter(cls.task_id == task_id)
83
+ metrics_query = select(AnalyzeRequestMetrics).filter(
84
+ AnalyzeRequestMetrics.id == metric_id
85
+ )
86
+ async with transaction(commit=True) as session:
87
+ query_result = await session.execute(query)
88
+ koji_task_analysis = query_result.scalars().first()
76
89
  # Ensure that the task analysis doesn't already have a response
77
90
  if koji_task_analysis.response:
78
91
  # This is probably due to an analysis that took so long that
@@ -81,20 +94,20 @@ class KojiTaskAnalysis(Base):
81
94
  # returned to the consumer, so we'll just drop this extra one
82
95
  # on the floor and keep the one saved in the database.
83
96
  return
84
-
85
- metric = (
86
- session.query(AnalyzeRequestMetrics).filter_by(id=metric_id).first()
87
- )
97
+ metrics_query_result = await session.execute(metrics_query)
98
+ metric = metrics_query_result.scalars().first()
88
99
  koji_task_analysis.response = metric
89
100
  session.add(koji_task_analysis)
90
- session.flush()
101
+ await session.flush()
91
102
 
92
103
  @classmethod
93
104
  @backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
94
- def get_response_by_task_id(cls, task_id: int) -> KojiStagedResponse:
105
+ async def get_response_by_task_id(cls, task_id: int) -> KojiStagedResponse:
95
106
  """Get a koji task analysis by task id"""
96
- with transaction(commit=False) as session:
97
- koji_task_analysis = session.query(cls).filter_by(task_id=task_id).first()
107
+ query = select(cls).filter(cls.task_id == task_id)
108
+ async with transaction(commit=False) as session:
109
+ query_result = await session.execute(query)
110
+ koji_task_analysis = query_result.scalars().first()
98
111
  if not koji_task_analysis:
99
112
  raise KojiTaskNotFoundError(f"Task {task_id} not yet analyzed")
100
113