investify-utils 2.0.0a5__py3-none-any.whl → 2.0.0a7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of investify-utils might be problematic. Click here for more details.

@@ -4,10 +4,25 @@ Investify Utils - Shared utilities for Investify services.
4
4
  Install with optional dependencies:
5
5
  pip install investify-utils[postgres] # Sync PostgreSQL client
6
6
  pip install investify-utils[postgres-async] # Async PostgreSQL client
7
- pip install investify-utils[postgres-all] # Both clients
7
+ pip install investify-utils[kafka] # Kafka Avro producer/consumer
8
+ pip install investify-utils[s3] # S3 client
9
+ pip install investify-utils[helpers] # Timestamp/SQL utilities
8
10
 
9
11
  Usage:
12
+ # Logging (no extra required)
13
+ from investify_utils.logging import setup_logging
14
+
15
+ # PostgreSQL
10
16
  from investify_utils.postgres import PostgresClient, AsyncPostgresClient
17
+
18
+ # Kafka
19
+ from investify_utils.kafka import AvroProducer, AvroConsumer
20
+
21
+ # S3
22
+ from investify_utils.s3 import S3Client
23
+
24
+ # Helpers
25
+ from investify_utils.helpers import convert_to_pd_timestamp, create_sql_in_filter
11
26
  """
12
27
 
13
- __version__ = "2.0.0a2"
28
+ __version__ = "2.0.0a7"
@@ -0,0 +1,154 @@
1
+ """
2
+ Common helper utilities for Investify services.
3
+
4
+ Usage:
5
+ from investify_utils.helpers import convert_to_pd_timestamp, create_sql_in_filter
6
+ """
7
+
8
+ import datetime as dt
9
+ import importlib.util
10
+ import logging
11
+ import sys
12
+ from numbers import Integral, Number, Real
13
+ from typing import Literal
14
+
15
+ import numpy as np
16
+ import pandas as pd
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ # =============================================================================
22
+ # Timestamp Utilities
23
+ # =============================================================================
24
+
25
+
26
+ def convert_to_pd_timestamp(timestamp) -> pd.Timestamp | None:
27
+ """
28
+ Convert various timestamp formats to pandas Timestamp.
29
+
30
+ Args:
31
+ timestamp: Can be None, pd.Timestamp, number (unix), string, datetime, or np.datetime64
32
+
33
+ Returns:
34
+ pd.Timestamp or None
35
+ """
36
+ if timestamp is None:
37
+ return None
38
+
39
+ if isinstance(timestamp, pd.Timestamp):
40
+ return timestamp
41
+
42
+ if isinstance(timestamp, Number):
43
+ return pd.Timestamp.fromtimestamp(float(timestamp), tz=dt.UTC)
44
+
45
+ if isinstance(timestamp, str | dt.datetime | np.datetime64):
46
+ try:
47
+ return pd.Timestamp(timestamp, tzinfo=dt.UTC)
48
+ except Exception as e:
49
+ logger.error(repr(e))
50
+ return timestamp
51
+
52
+ return timestamp
53
+
54
+
55
+ # =============================================================================
56
+ # SQL Utilities
57
+ # =============================================================================
58
+
59
+
60
+ def convert_to_sql_value(value: Integral | Real | str | dt.datetime | dt.date) -> str:
61
+ """
62
+ Convert Python value to SQL literal string.
63
+
64
+ Args:
65
+ value: Integer, float, string, datetime, or date
66
+
67
+ Returns:
68
+ SQL-safe string representation
69
+ """
70
+ if isinstance(value, Integral):
71
+ value = int(value)
72
+ elif isinstance(value, Real):
73
+ value = float(value)
74
+ elif isinstance(value, str):
75
+ value = f"'{value}'"
76
+ elif isinstance(value, dt.datetime):
77
+ value = value.isoformat(sep=" ")
78
+ value = f"'{value}'"
79
+ elif isinstance(value, dt.date):
80
+ value = value.isoformat()
81
+ value = f"'{value}'"
82
+ else:
83
+ raise ValueError(f"Not supported type={type(value)}")
84
+
85
+ return str(value)
86
+
87
+
88
+ def create_sql_in_filter(
89
+ col_name: str,
90
+ values: list[Integral | Real | str | dt.datetime | dt.date],
91
+ not_in: bool = False,
92
+ ) -> str:
93
+ """
94
+ Create SQL IN or NOT IN filter clause.
95
+
96
+ Args:
97
+ col_name: Column name
98
+ values: List of values
99
+ not_in: Use NOT IN instead of IN
100
+
101
+ Returns:
102
+ SQL filter string like "col IN (1, 2, 3)"
103
+ """
104
+ operator = "NOT IN" if not_in else "IN"
105
+ values_str = ", ".join([convert_to_sql_value(value) for value in values])
106
+ return f"{col_name} {operator} ({values_str})"
107
+
108
+
109
+ def create_sql_logical_filter(
110
+ filters: list[str],
111
+ operator: Literal["AND", "OR"],
112
+ inner_bracket: bool = False,
113
+ outer_bracket: bool = False,
114
+ ) -> str:
115
+ """
116
+ Combine multiple SQL filters with AND/OR.
117
+
118
+ Args:
119
+ filters: List of filter strings
120
+ operator: "AND" or "OR"
121
+ inner_bracket: Wrap each filter in parentheses
122
+ outer_bracket: Wrap result in parentheses
123
+
124
+ Returns:
125
+ Combined filter string
126
+ """
127
+ operator_sep = f" {operator} "
128
+ if inner_bracket:
129
+ filters = [f"({filter})" for filter in filters]
130
+ return f"({operator_sep.join(filters)})" if outer_bracket else operator_sep.join(filters)
131
+
132
+
133
+ # =============================================================================
134
+ # Module Utilities
135
+ # =============================================================================
136
+
137
+
138
+ def import_module_from_path(file_path: str, module_name: str):
139
+ """
140
+ Dynamically import a Python module from a file path.
141
+
142
+ Args:
143
+ file_path: Path to the Python file
144
+ module_name: Name to register the module as
145
+
146
+ Returns:
147
+ Imported module object
148
+ """
149
+ spec = importlib.util.spec_from_file_location(module_name, file_path)
150
+ logger.info(f"Loading `{spec.name}` from `{spec.origin}`")
151
+ module = importlib.util.module_from_spec(spec)
152
+ sys.modules[module_name] = module
153
+ spec.loader.exec_module(module)
154
+ return module
@@ -23,6 +23,7 @@ Usage:
23
23
 
24
24
  import asyncio
25
25
  import logging
26
+ from typing import Callable
26
27
 
27
28
  from confluent_kafka import KafkaException, SerializingProducer
28
29
  from confluent_kafka.schema_registry import SchemaRegistryClient, record_subject_name_strategy
@@ -100,7 +101,7 @@ class AsyncAvroProducer:
100
101
  self,
101
102
  value: dict,
102
103
  key: str | None = None,
103
- on_delivery: callable | None = None,
104
+ on_delivery: Callable | None = None,
104
105
  ) -> asyncio.Future:
105
106
  """
106
107
  Produce a message asynchronously.
@@ -23,6 +23,7 @@ Usage:
23
23
 
24
24
  import logging
25
25
  import threading
26
+ from typing import Callable
26
27
 
27
28
  from confluent_kafka import SerializingProducer
28
29
  from confluent_kafka.schema_registry import SchemaRegistryClient, record_subject_name_strategy
@@ -103,7 +104,7 @@ class AvroProducer:
103
104
  self,
104
105
  value: dict,
105
106
  key: str | None = None,
106
- on_delivery: callable | None = None,
107
+ on_delivery: Callable | None = None,
107
108
  ) -> None:
108
109
  """
109
110
  Produce a message to Kafka.
@@ -0,0 +1,81 @@
1
+ """
2
+ Logging utilities for Investify services.
3
+
4
+ Usage:
5
+ from investify_utils.logging import setup_logging
6
+
7
+ setup_logging()
8
+ logger = logging.getLogger(__name__)
9
+ """
10
+
11
+ import logging
12
+ import os
13
+ from enum import IntEnum, auto
14
+ from logging.handlers import RotatingFileHandler
15
+
16
+ old_factory = logging.getLogRecordFactory()
17
+ default_logging_fmt = "%(asctime)s - %(origin)-30s - %(levelname)s - %(message)s"
18
+
19
+
20
+ class TextColor(IntEnum):
21
+ """ANSI text colors for terminal output."""
22
+
23
+ BLACK = 0
24
+ RED = auto()
25
+ GREEN = auto()
26
+ YELLOW = auto()
27
+ BLUE = auto()
28
+ MAGENTA = auto()
29
+ CYAN = auto()
30
+ WHITE = auto()
31
+
32
+ @staticmethod
33
+ def colorize(text: str, color: "TextColor") -> str:
34
+ """Wrap text with ANSI color codes."""
35
+ return f"\033[0;{30 + color}m{text}\033[0m"
36
+
37
+
38
+ def record_factory(*args, **kwargs):
39
+ """Custom log record factory that adds origin (filename:lineno)."""
40
+ record = old_factory(*args, **kwargs)
41
+ record.origin = f"{record.filename}:{record.lineno}"
42
+ return record
43
+
44
+
45
+ def setup_logging(level=logging.INFO, logging_fmt=default_logging_fmt):
46
+ """
47
+ Configure logging with origin field (filename:lineno).
48
+
49
+ Args:
50
+ level: Logging level (default: INFO)
51
+ logging_fmt: Log format string
52
+ """
53
+ logging.setLogRecordFactory(record_factory)
54
+ logging.basicConfig(format=logging_fmt, level=level)
55
+
56
+
57
+ def setup_file_logging(
58
+ filename: str,
59
+ level=logging.INFO,
60
+ max_megabytes: int = 1,
61
+ backup_count: int = 3,
62
+ logging_fmt: str = default_logging_fmt,
63
+ ):
64
+ """
65
+ Configure rotating file logging.
66
+
67
+ Args:
68
+ filename: Log file path
69
+ level: Logging level
70
+ max_megabytes: Max file size before rotation
71
+ backup_count: Number of backup files to keep
72
+ logging_fmt: Log format string
73
+ """
74
+ filepath, _ = os.path.split(filename)
75
+ if filepath and not os.path.isdir(filepath):
76
+ os.makedirs(filepath)
77
+
78
+ max_log_size = int(max_megabytes * 1024 * 1024)
79
+ handler = RotatingFileHandler(filename=filename, maxBytes=max_log_size, backupCount=backup_count)
80
+ logging.setLogRecordFactory(record_factory)
81
+ logging.basicConfig(format=logging_fmt, level=level, handlers=[handler])
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: investify-utils
3
- Version: 2.0.0a5
3
+ Version: 2.0.0a7
4
4
  Summary: Shared utilities for Investify services
5
5
  Author-Email: Investify <dev@investify.vn>
6
6
  License: MIT
@@ -21,9 +21,12 @@ Requires-Dist: pandas>=2.0; extra == "postgres-async"
21
21
  Requires-Dist: sqlalchemy>=2.0; extra == "postgres-async"
22
22
  Requires-Dist: asyncpg>=0.29; extra == "postgres-async"
23
23
  Provides-Extra: kafka
24
- Requires-Dist: confluent-kafka>=2.0; extra == "kafka"
24
+ Requires-Dist: confluent-kafka[avro,schemaregistry]>=2.0; extra == "kafka"
25
25
  Provides-Extra: s3
26
26
  Requires-Dist: boto3>=1.34; extra == "s3"
27
+ Provides-Extra: helpers
28
+ Requires-Dist: pandas>=2.0; extra == "helpers"
29
+ Requires-Dist: numpy>=2.0; extra == "helpers"
27
30
  Provides-Extra: dev
28
31
  Requires-Dist: pytest; extra == "dev"
29
32
  Requires-Dist: pytest-asyncio; extra == "dev"
@@ -1,15 +1,17 @@
1
- investify_utils-2.0.0a5.dist-info/METADATA,sha256=Qhf1xCy0cR3oxo-WSMT7EeBEeE2v3eCF-kWBvPmQpbA,3529
2
- investify_utils-2.0.0a5.dist-info/WHEEL,sha256=tsUv_t7BDeJeRHaSrczbGeuK-TtDpGsWi_JfpzD255I,90
3
- investify_utils-2.0.0a5.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
- investify_utils/__init__.py,sha256=-Gn2EAJfZ5BRlh7DMSSSvOeDK6JTJq9LWTEekieh3WY,427
1
+ investify_utils-2.0.0a7.dist-info/METADATA,sha256=y2y5IOpjYRfkk7nzVO9JyE2GWr2L5czWO4nz4Ex5jVc,3667
2
+ investify_utils-2.0.0a7.dist-info/WHEEL,sha256=tsUv_t7BDeJeRHaSrczbGeuK-TtDpGsWi_JfpzD255I,90
3
+ investify_utils-2.0.0a7.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
+ investify_utils/__init__.py,sha256=SaM7w3xoHoOo1ii7i4Ii_1Fq8yb_UU_I5JSNlqMcMuo,918
5
+ investify_utils/helpers.py,sha256=1l7nv-P8m-vHQGhjTAJMi-pkvQb8OPzGDIn1KQ499dE,4246
5
6
  investify_utils/kafka/__init__.py,sha256=9PcV_2IFn8PxbggVmoSJlLR9gKrmd_TC2ylsnW_yv1U,1242
6
7
  investify_utils/kafka/async_consumer.py,sha256=yivm8dgCACfFFbG9uxA7fsoRrWfCEtPHCyZi2qkD0Qk,6631
7
- investify_utils/kafka/async_producer.py,sha256=zayeh4jZD2cox-CypIKnRuVZbAC0U3xkjGw0kC6UiNM,4724
8
+ investify_utils/kafka/async_producer.py,sha256=HtQ5SaL5ShJf7RetO5zItfg9EBFcPj1y18i_V0p-vFg,4752
8
9
  investify_utils/kafka/sync_consumer.py,sha256=NJu9tQ5MrqL7-0Cvtt9Gmq-Qro_O4VVFP85qIKMf_ZM,6305
9
- investify_utils/kafka/sync_producer.py,sha256=hWB2vkPtHMlewA4nmfDOY6xEbDp3ZxvhwoxKJ1eYJNc,4470
10
+ investify_utils/kafka/sync_producer.py,sha256=9EyhKZNCgmBgZw50gwSfSRUTnfPnCq520Mh9MHnGVlI,4498
11
+ investify_utils/logging.py,sha256=SyqlpyRd0o_FuO29eB7-5-Fo0mfLRVZP_YHjXcQgjiE,2223
10
12
  investify_utils/postgres/__init__.py,sha256=j4CfUw7U58vWstmxaKQuPkLVbKkOioC4Bc7_knllL_Y,737
11
13
  investify_utils/postgres/async_client.py,sha256=M3F7-AsBJ43WWhfknnvTK9BeiYAyO0R6n-XY4DOnyFA,3168
12
14
  investify_utils/postgres/sync_client.py,sha256=1mozgrNGUUKCR2ETAr9G9dzvW8uG_TmSqcbA63tRpM8,6507
13
15
  investify_utils/s3/__init__.py,sha256=0YX-efJTP38Q5XMCyr7u-fXMjCJXkAR7dG817quTns8,399
14
16
  investify_utils/s3/sync_client.py,sha256=fj6ejhAu06BUBRe2pnceKaNGhbPM79Xf47geL0DB-i0,6771
15
- investify_utils-2.0.0a5.dist-info/RECORD,,
17
+ investify_utils-2.0.0a7.dist-info/RECORD,,