commons-metrics 0.0.19__tar.gz → 0.0.20__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/PKG-INFO +1 -1
  2. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics/__init__.py +8 -2
  3. commons_metrics-0.0.20/commons_metrics/cache_manager.py +74 -0
  4. commons_metrics-0.0.20/commons_metrics/commons_repos_client.py +118 -0
  5. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics/database.py +17 -0
  6. commons_metrics-0.0.20/commons_metrics/date_utils.py +70 -0
  7. commons_metrics-0.0.20/commons_metrics/text_simplifier.py +53 -0
  8. commons_metrics-0.0.20/commons_metrics/variable_finder.py +191 -0
  9. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics.egg-info/PKG-INFO +1 -1
  10. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics.egg-info/SOURCES.txt +0 -1
  11. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/setup.py +1 -1
  12. commons_metrics-0.0.19/commons_metrics/cache_manager.py +0 -71
  13. commons_metrics-0.0.19/commons_metrics/commons_repos_client.py +0 -114
  14. commons_metrics-0.0.19/commons_metrics/connection_database.py +0 -18
  15. commons_metrics-0.0.19/commons_metrics/date_utils.py +0 -66
  16. commons_metrics-0.0.19/commons_metrics/text_simplifier.py +0 -48
  17. commons_metrics-0.0.19/commons_metrics/variable_finder.py +0 -187
  18. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/LICENSE +0 -0
  19. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/README.md +0 -0
  20. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics/azure_devops_client.py +0 -0
  21. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics/github_api_client.py +0 -0
  22. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics/repositories.py +0 -0
  23. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics/s3_file_manager.py +0 -0
  24. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics/update_design_components.py +0 -0
  25. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics/util.py +0 -0
  26. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics.egg-info/dependency_links.txt +0 -0
  27. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics.egg-info/requires.txt +0 -0
  28. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/commons_metrics.egg-info/top_level.txt +0 -0
  29. {commons_metrics-0.0.19 → commons_metrics-0.0.20}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: commons_metrics
3
- Version: 0.0.19
3
+ Version: 0.0.20
4
4
  Summary: A simple library for basic statistical calculations
5
5
  Author: Bancolombia
6
6
  Author-email: omar.david.pino@email.com
@@ -4,6 +4,12 @@ from .repositories import ComponentRepository
4
4
  from .update_design_components import UpdateDesignSystemComponents
5
5
  from .github_api_client import GitHubAPIClient
6
6
  from .azure_devops_client import AzureDevOpsClient
7
+ from .s3_file_manager import S3FileManager
8
+ from .cache_manager import CacheManager
9
+ from .commons_repos_client import CommonsReposClient
10
+ from .date_utils import DateUtils
11
+ from .text_simplifier import TextSimplifier
12
+ from .variable_finder import VariableFinder
7
13
 
8
- __all__ = ['Util', 'DatabaseConnection', 'ComponentRepository', 'UpdateDesignSystemComponents', 'GitHubAPIClient', 'AzureDevOpsClient']
9
- __version__ = '0.0.19'
14
+ __all__ = ['Util', 'DatabaseConnection', 'ComponentRepository', 'UpdateDesignSystemComponents', 'GitHubAPIClient', 'AzureDevOpsClient', 'S3FileManager', 'CacheManager', 'CommonsReposClient', 'DateUtils', 'TextSimplifier', 'VariableFinder']
15
+ __version__ = '0.0.20'
@@ -0,0 +1,74 @@
1
+ import requests
2
+ import urllib3
3
+
4
+ from .s3_file_manager import S3FileManager
5
+
6
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
7
+
8
+
9
+ class CacheManager:
10
+ """Manager for caching data in S3 and making API requests"""
11
+
12
+ def __init__(self):
13
+ self.s3 = S3FileManager()
14
+
15
+ def load_cache_or_fetch(self, path: str, fetch_fn, clear_cache: bool = False):
16
+ """
17
+ Load data from a JSON cache stored in S3 using S3FileManager,
18
+ or fetch the data and update the cache.
19
+
20
+ Args:
21
+ path (str): Cache key relative to "cache/", e.g. "logs/123.json"
22
+ fetch_fn (Callable): Function returning JSON-serializable data
23
+ clear_cache (bool): If True, delete existing cache before fetching
24
+ Returns:
25
+ Any: Cached or freshly fetched data
26
+ """
27
+ key = f"cache/{path}"
28
+ if clear_cache:
29
+ try:
30
+ self.s3.s3.delete_object(Bucket=self.s3.bucket, Key=key)
31
+ except Exception:
32
+ pass
33
+ data = self.s3.load_json(key)
34
+
35
+ if data:
36
+ return data
37
+
38
+ data = fetch_fn()
39
+ self.s3.save_json(data, key)
40
+
41
+ return data
42
+
43
+ def get_data_from_api(self, url: str, headers: dict, auth_api) -> list:
44
+ """
45
+ Sends a GET request to the specified API and returns the JSON response if successful.
46
+
47
+ Args:
48
+ url (str): The API endpoint URL.
49
+ headers (dict): HTTP headers for the request.
50
+ auth_api: Authentication object for the request.
51
+ Returns:
52
+ list or dict: JSON response from the API if successful, otherwise an empty list.
53
+ """
54
+ response = requests.get(url, headers=headers, auth=auth_api, verify=False)
55
+ if response.ok:
56
+ return response.json()
57
+ return []
58
+
59
+ def post_data_to_api(self, url: str, headers: dict, body: dict, auth_api) -> list:
60
+ """
61
+ Sends a POST request to the specified API with the given body and returns the JSON response if successful.
62
+
63
+ Args:
64
+ url (str): The API endpoint URL.
65
+ headers (dict): HTTP headers for the request.
66
+ body (dict): Data to send in the POST request.
67
+ auth_api: Authentication object for the request.
68
+ Returns:
69
+ list or dict: JSON response from the API if successful, otherwise an empty list.
70
+ """
71
+ response = requests.post(url, headers=headers, auth=auth_api, data=body, verify=False)
72
+ if response.ok:
73
+ return response.json()
74
+ return []
@@ -0,0 +1,118 @@
1
+ """
2
+ Módulo con funciones compartidas entre clientes de repositorios (GitHub, Azure DevOps)
3
+ """
4
+ import re
5
+ import json
6
+ from typing import Optional, Dict
7
+
8
+
9
+ class CommonsReposClient:
10
+ """Utilidades compartidas para clientes de repositorios"""
11
+
12
+ @staticmethod
13
+ def extract_package_version_from_pubspec(content: str, package_name: str) -> Optional[Dict]:
14
+ """
15
+ Extrae la versión de un paquete del contenido de pubspec.yaml
16
+
17
+ Args:
18
+ content: Contenido del archivo pubspec.yaml
19
+ package_name: Nombre del paquete a buscar (ej: 'bds_mobile')
20
+
21
+ Returns:
22
+ Diccionario con versión completa y major, o None
23
+ """
24
+ if not content:
25
+ return None
26
+
27
+ # Patrones para diferentes formatos de versión (con posibles espacios antes)
28
+ # IMPORTANTE: debe estar en sección dependencies: para evitar falsos positivos
29
+ simple_patterns = [
30
+ # Formato simple: bds_mobile: ^8.127.0 (en la misma línea)
31
+ rf'^\s*{package_name}\s*:\s*\^?([0-9]+\.[0-9]+\.[0-9]+)',
32
+ # Formato con >= o ~
33
+ rf'^\s*{package_name}\s*:\s*>=([0-9]+\.[0-9]+\.[0-9]+)',
34
+ rf'^\s*{package_name}\s*:\s*~>([0-9]+\.[0-9]+\.[0-9]+)',
35
+ ]
36
+
37
+ for pattern in simple_patterns:
38
+ match = re.search(pattern, content, re.MULTILINE)
39
+ if match:
40
+ version = match.group(1)
41
+ major_version = version.split('.')[0]
42
+ return {
43
+ 'full_version': version,
44
+ 'major_version': major_version
45
+ }
46
+
47
+ # Formato hosted con artifactory (múltiples variantes)
48
+ hosted_patterns = [
49
+ # Patrón 1: hosted simple (una línea)
50
+ rf'^(\s*){package_name}\s*:\s*$\s*\1\s+hosted:\s*https?://.*?$\s*\1\s+version:\s*["\']?\^?([0-9]+\.[0-9]+\.[0-9]+)',
51
+ # Patrón 2: hosted con name/url (multilínea)
52
+ rf'^(\s*){package_name}\s*:\s*$\s*\1\s+hosted:\s*$.*?\s*\1\s+version:\s*["\']?\^?([0-9]+\.[0-9]+\.[0-9]+)',
53
+ ]
54
+
55
+ for hosted_pattern in hosted_patterns:
56
+ hosted_match = re.search(hosted_pattern, content, re.MULTILINE | re.DOTALL)
57
+ if hosted_match:
58
+ # El último grupo siempre es la versión
59
+ version = hosted_match.groups()[-1]
60
+ major_version = version.split('.')[0]
61
+ return {
62
+ 'full_version': version,
63
+ 'major_version': major_version
64
+ }
65
+
66
+ return None
67
+
68
+ @staticmethod
69
+ def extract_package_version_from_package_json(content: str, package_name: str) -> Optional[Dict]:
70
+ """
71
+ Extrae la versión de un paquete del contenido de package.json
72
+
73
+ Args:
74
+ content: Contenido del archivo package.json
75
+ package_name: Nombre del paquete a buscar (ej: '@bancolombia/design-system-web')
76
+
77
+ Returns:
78
+ Diccionario con versión completa y major, o None
79
+ """
80
+ if not content:
81
+ return None
82
+
83
+ try:
84
+ # Intentar parsear como JSON
85
+ package_data = json.loads(content)
86
+
87
+ # Buscar en dependencies y devDependencies
88
+ for dep_key in ['dependencies', 'devDependencies']:
89
+ if dep_key in package_data:
90
+ deps = package_data[dep_key]
91
+ if package_name in deps:
92
+ version_str = deps[package_name]
93
+
94
+ # Extraer versión semántica (eliminar ^, ~, >=, etc.)
95
+ version_match = re.search(r'([0-9]+\.[0-9]+\.[0-9]+)', version_str)
96
+ if version_match:
97
+ version = version_match.group(1)
98
+ major_version = version.split('.')[0]
99
+ return {
100
+ 'full_version': version,
101
+ 'major_version': major_version
102
+ }
103
+ except json.JSONDecodeError:
104
+ # Si falla el parseo JSON, intentar con regex
105
+ pass
106
+
107
+ # Fallback: buscar con regex
108
+ pattern = rf'"{re.escape(package_name)}"\s*:\s*"[\^~>=<]*([0-9]+\.[0-9]+\.[0-9]+)'
109
+ match = re.search(pattern, content)
110
+ if match:
111
+ version = match.group(1)
112
+ major_version = version.split('.')[0]
113
+ return {
114
+ 'full_version': version,
115
+ 'major_version': major_version
116
+ }
117
+
118
+ return None
@@ -1,4 +1,6 @@
1
1
  import psycopg2
2
+ from .repositories import ComponentRepository
3
+ from .util import Util
2
4
 
3
5
  class DatabaseConnection:
4
6
  """
@@ -37,3 +39,18 @@ class DatabaseConnection:
37
39
  """Rolls back pending transactions in case of error"""
38
40
  if self.connection and not self.connection.closed:
39
41
  self.connection.rollback()
42
+
43
+ def get_connection_database_from_secret(secret_name: str, logger: str, aws_region: str) -> ComponentRepository:
44
+ """
45
+ Retrieve connection database from AWS secrets manager
46
+ """
47
+ secret_json = Util.get_secret_aws(secret_name, logger, aws_region)
48
+ db_connection = DatabaseConnection()
49
+ db_connection.connect({
50
+ 'host': secret_json["host"],
51
+ 'port': secret_json["port"],
52
+ 'dbname': secret_json["dbname"],
53
+ 'username': secret_json["username"],
54
+ 'password': secret_json["password"]
55
+ })
56
+ return ComponentRepository(db_connection)
@@ -0,0 +1,70 @@
1
+ from datetime import datetime
2
+ from typing import List, Dict
3
+
4
+ from dateutil import parser
5
+
6
+
7
+ class DateUtils:
8
+ """Utilidades para manejo de fechas y conversiones"""
9
+
10
+ @staticmethod
11
+ def parse_datetime(date_str: str) -> datetime:
12
+ """
13
+ Converts a string in ISO 8601 format to a datetime object.
14
+ If the string is empty or None, it returns the current date and time with the time zone.
15
+
16
+ Args:
17
+ date_str(str): Date in ISO 8601 format (e.g., "2025-12-02T10:00:00+00:00").
18
+ Returns:
19
+ datetime: Datetime object corresponding to the string, or the current date if empty.
20
+ """
21
+ if not date_str:
22
+ return datetime.now().astimezone()
23
+ return parser.isoparse(date_str)
24
+
25
+ @staticmethod
26
+ def get_hours_difference_from_strings(start_date: str, end_date: str) -> float:
27
+ """
28
+ Calculate the difference in hours between two dates given as ISO 8601 strings.
29
+
30
+ Args:
31
+ start_date(str): Start date in ISO 8601 format.
32
+ end_date(str): End date in ISO 8601 format.
33
+ Returns:
34
+ float: Difference in hours (can be negative if end_date < start_date).
35
+ """
36
+ start = DateUtils.parse_datetime(start_date)
37
+ end = DateUtils.parse_datetime(end_date)
38
+ return DateUtils.get_hours_difference(start, end)
39
+
40
+ @staticmethod
41
+ def get_hours_difference(start_date: datetime, end_date: datetime) -> float:
42
+ """
43
+ Calculate the difference in hours between two datetime objects.
44
+
45
+ Args:
46
+ start_date(datetime): Start date.
47
+ end_date(datetime): End date.
48
+ Returns:
49
+ float: Difference in hours (can be negative if end_date < start_date).
50
+ """
51
+ return (end_date - start_date).total_seconds() / 3600
52
+
53
+ @staticmethod
54
+ def sort_by_date(list_dicts: List[Dict], date_attribute_name: str) -> List[Dict]:
55
+ """
56
+ Sorts a list of dictionaries by a specified date attribute in descending order.
57
+ The method uses the `parse_datetime` function to convert date strings into datetime objects
58
+ for accurate sorting.
59
+
60
+ Args:
61
+ list_dicts (List[Dict]): A list of dictionaries containing date attributes.
62
+ date_attribute_name (str): The key name of the date attribute in each dictionary.
63
+ Returns:
64
+ List[Dict]: A new list of dictionaries sorted by the given date attribute in descending order.
65
+ """
66
+ return sorted(
67
+ list_dicts,
68
+ key=lambda x: DateUtils.parse_datetime(x.get(date_attribute_name)),
69
+ reverse=True
70
+ )
@@ -0,0 +1,53 @@
1
+ import html
2
+ import json
3
+ import re
4
+
5
+
6
+ class TextSimplifier:
7
+ """Utilidades para limpieza y simplificación de texto"""
8
+
9
+ @staticmethod
10
+ def compact_data(data: dict) -> str:
11
+ """
12
+ Removes keys with empty values (None, "", [], {}) from a dictionary
13
+ and returns a compact JSON string without unnecessary spaces.
14
+
15
+ Args:
16
+ data(dict): Original dictionary.
17
+ Returns:
18
+ str: Compact JSON representation of the filtered dictionary.
19
+ """
20
+ data = {k: v for k, v in data.items() if v not in (None, "", [], {})}
21
+ return json.dumps(data, separators=(",", ":"))
22
+
23
+ @staticmethod
24
+ def clear_text(text: str) -> str:
25
+ """
26
+ Cleans text by removing HTML tags, Markdown links,
27
+ decoding HTML characters, removing escaped quotes, and
28
+ normalizing spaces.
29
+
30
+ Args:
31
+ text (str): Original text.
32
+ Returns:
33
+ str: Cleaned and simplified text.
34
+ """
35
+ if not text:
36
+ return ""
37
+
38
+ # Remove HTML tags
39
+ text = re.sub(r'<[^>]+>', '', text)
40
+
41
+ # Replace Markdown links [Text](URLxt
42
+ text = re.sub(r'\[([^\]]+)\]\([^)]+\)', r'\1', text)
43
+
44
+ # Decode HTML characters (&nbsp;, &lt;, etc.)
45
+ text = html.unescape(text)
46
+
47
+ # Remove escaped quotes
48
+ text = text.replace('\\"', '"').replace("\\'", "'")
49
+
50
+ # Remove line breaks, tabs, and multiple spaces
51
+ text = re.sub(r'\s+', ' ', text).strip()
52
+
53
+ return text
@@ -0,0 +1,191 @@
1
+ import re
2
+
3
+ from typing import Optional
4
+
5
+
6
+ class VariableFinder:
7
+ """Utilidades para extraer y buscar variables y códigos en texto y JSON"""
8
+
9
+ @staticmethod
10
+ def extract_issue_number(pr_body: str):
11
+ """
12
+ Extracts an issue number from a pull request body text.
13
+ Looks for a pattern like '#123' preceded by whitespace.
14
+
15
+ Args:
16
+ pr_body (str): The pull request body text.
17
+ Returns:
18
+ Optional[int]: The extracted issue number as an integer, or None if not found.
19
+ """
20
+ match = re.search(r"\s+#(\d+)", pr_body or "", re.IGNORECASE)
21
+ return int(match.group(1)) if match else None
22
+
23
+ @staticmethod
24
+ def get_code(text: str) -> Optional[str]:
25
+ """
26
+ Extracts a code matching the pattern 'AW1234567' or 'NU1234567' from a string.
27
+ The code consists of two uppercase letters followed by seven digits.
28
+
29
+ Args:
30
+ text (str): The input string.
31
+ Returns:
32
+ Optional[str]: The extracted code or None if not found.
33
+ """
34
+ for tok in text.split('_'):
35
+ if re.fullmatch(r'[A-Z]{2}\d{7}', tok):
36
+ return tok
37
+ return None
38
+
39
+ @staticmethod
40
+ def get_component_name(text: str) -> Optional[str]:
41
+ """
42
+ Extracts a component name from a string based on underscore-separated parts.
43
+ If the last part is 'dxp', returns the two preceding parts joined by underscore.
44
+ Otherwise, returns the last two parts.
45
+
46
+ Args:
47
+ text (str): The input string.
48
+ Returns:
49
+ Optional[str]: The component name or None if not enough parts.
50
+ """
51
+ parts = [p for p in text.strip('_').split('_') if p]
52
+ if len(parts) >= 2:
53
+ if parts[-1].lower() == "dxp":
54
+ return f"{parts[-3]}_{parts[-2]}"
55
+ return '_'.join(parts[-2:])
56
+ return None
57
+
58
+ @staticmethod
59
+ def get_component_name_from_image(image: str, release_name: str) -> Optional[str]:
60
+ """
61
+ Extracts the component name from an image string.
62
+ If extraction fails, falls back to using release_name.
63
+
64
+ Args:
65
+ image (str): The image string (e.g., 'repo/component:tag').
66
+ release_name (str): The fallback release name.
67
+ Returns:
68
+ Optional[str]: The component name.
69
+ """
70
+ try:
71
+ tag = image.split('/')[-1]
72
+ repository_name = tag.split(':')[0]
73
+ return repository_name
74
+ except Exception:
75
+ return VariableFinder.get_component_name(release_name)
76
+
77
+ @staticmethod
78
+ def collect_all_variables(json_data, txt_variable_groups):
79
+ """
80
+ Collects all variables from a nested JSON structure.
81
+ Searches for keys named 'variables' and merges them into a single dictionary.
82
+
83
+ Args:
84
+ json_data (dict or list): The JSON data.
85
+ txt_variable_groups (str): The key name for variable groups.
86
+ Returns:
87
+ dict: A dictionary of all variables found.
88
+ """
89
+ all_variables = {}
90
+
91
+ def loop_through_json(data):
92
+ if isinstance(data, dict):
93
+ for key, value in data.items():
94
+ if key == 'variables':
95
+ all_variables.update(value)
96
+ elif key == txt_variable_groups:
97
+ if isinstance(value, list):
98
+ for group in value:
99
+ if isinstance(group, dict) and 'variables' in group:
100
+ all_variables.update(group['variables'])
101
+ else:
102
+ loop_through_json(value)
103
+ elif isinstance(data, list):
104
+ for item in data:
105
+ loop_through_json(item)
106
+
107
+ loop_through_json(json_data)
108
+ return all_variables
109
+
110
+ @staticmethod
111
+ def resolve_value(value: str, all_variables: dict, visited=None) -> str:
112
+ """
113
+ Resolves variable references in a string recursively.
114
+ Variables are referenced using the format $(VAR_NAME).
115
+
116
+ Args:
117
+ value (str): The string containing variable references.
118
+ all_variables (dict): Dictionary of variables and their values.
119
+ visited (set): Set of visited variables to detect cycles.
120
+ Returns:
121
+ str: The resolved string with all references replaced.
122
+ """
123
+ if visited is None:
124
+ visited = set()
125
+
126
+ pattern = re.compile(r'\$\(([^)]+)\)')
127
+ while True:
128
+ matches = pattern.findall(value)
129
+ if not matches:
130
+ break
131
+ for match in matches:
132
+ if match in visited:
133
+ return f'$(CYCLE:{match})'
134
+ visited.add(match)
135
+ replacement = all_variables.get(match, {}).get('value', '')
136
+ resolved = VariableFinder.resolve_value(replacement, all_variables, visited.copy())
137
+ value = value.replace(f'$({match})', resolved)
138
+ return value
139
+
140
+ @staticmethod
141
+ def search_in_json(search_value: str, search_type: str, json_data, is_json_from_azure: bool = False) -> Optional[str]:
142
+ """
143
+ Searches for a variable in a nested JSON structure by key or value.
144
+ Resolves references if found.
145
+
146
+ Args:
147
+ search_value (str): The value to search for.
148
+ search_type (str): 'clave' to search by key, 'valor' to search by value.
149
+ json_data (dict or list): The JSON data.
150
+ is_json_from_azure (bool): Whether the JSON is from Azure (changes key names).
151
+ Returns:
152
+ Optional[str]: The resolved value if found, otherwise None.
153
+ """
154
+ txt_variable_groups = 'variableGroups' if is_json_from_azure else 'variable_groups'
155
+ search_value = search_value.lower()
156
+ all_variables = VariableFinder.collect_all_variables(json_data, txt_variable_groups)
157
+
158
+ result_search = all_variables.get(search_value, {}).get('value', '')
159
+ if result_search and '$(' not in result_search:
160
+ return result_search
161
+
162
+ def recursive_search(data):
163
+ if isinstance(data, dict):
164
+ for key, value in data.items():
165
+ if key in ['variables', txt_variable_groups]:
166
+ if isinstance(value, dict):
167
+ for var_key, var_value in value.items():
168
+ if search_type == 'clave' and search_value == var_key.lower():
169
+ return VariableFinder.resolve_value(var_value.get('value', ''), all_variables)
170
+ elif search_type == 'valor' and search_value == var_value.get('value', '').lower():
171
+ return VariableFinder.resolve_value(var_value.get('value', ''), all_variables)
172
+ elif isinstance(value, list):
173
+ for item in value:
174
+ if isinstance(item, dict) and 'variables' in item:
175
+ for var_key, var_value in item['variables'].items():
176
+ if search_type == 'clave' and search_value == var_key.lower():
177
+ return VariableFinder.resolve_value(var_value.get('value', ''), all_variables)
178
+ elif search_type == 'valor' and search_value == var_value.get('value', '').lower():
179
+ return VariableFinder.resolve_value(var_value.get('value', ''), all_variables)
180
+ else:
181
+ result = recursive_search(value)
182
+ if result:
183
+ return result
184
+ elif isinstance(data, list):
185
+ for item in data:
186
+ result = recursive_search(item)
187
+ if result:
188
+ return result
189
+ return None
190
+
191
+ return recursive_search(json_data)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: commons_metrics
3
- Version: 0.0.19
3
+ Version: 0.0.20
4
4
  Summary: A simple library for basic statistical calculations
5
5
  Author: Bancolombia
6
6
  Author-email: omar.david.pino@email.com
@@ -5,7 +5,6 @@ commons_metrics/__init__.py
5
5
  commons_metrics/azure_devops_client.py
6
6
  commons_metrics/cache_manager.py
7
7
  commons_metrics/commons_repos_client.py
8
- commons_metrics/connection_database.py
9
8
  commons_metrics/database.py
10
9
  commons_metrics/date_utils.py
11
10
  commons_metrics/github_api_client.py
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name='commons_metrics',
5
- version='0.0.19',
5
+ version='0.0.20',
6
6
  description='A simple library for basic statistical calculations',
7
7
  #long_description=open('USAGE.md').read(),
8
8
  #long_description_content_type='text/markdown',
@@ -1,71 +0,0 @@
1
- import requests
2
- import urllib3
3
-
4
- from lib.commons_metrics.commons_metrics.s3_file_manager import S3FileManager
5
-
6
- urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
7
-
8
-
9
- def load_cache_or_fetch(path: str, fetch_fn, clear_cache: bool = False):
10
- """
11
- Load data from a JSON cache stored in S3 using S3FileManager,
12
- or fetch the data and update the cache.
13
-
14
- Args:
15
- path (str): Cache key relative to "cache/", e.g. "logs/123.json"
16
- fetch_fn (Callable): Function returning JSON-serializable data
17
- clear_cache (bool): If True, delete existing cache before fetching
18
- Returns:
19
- Any: Cached or freshly fetched data
20
- """
21
- s3 = S3FileManager()
22
- key = f"cache/{path}"
23
- if clear_cache:
24
- try:
25
- s3.s3.delete_object(Bucket=s3.bucket, Key=key)
26
- except Exception:
27
- pass
28
- data = s3.load_json(key)
29
-
30
- if data:
31
- return data
32
-
33
- data = fetch_fn()
34
- s3.save_json(data, key)
35
-
36
- return data
37
-
38
-
39
- def get_data_from_api(url: str, headers: dict, auth_api) -> list:
40
- """
41
- Sends a GET request to the specified API and returns the JSON response if successful.
42
-
43
- Args:
44
- url (str): The API endpoint URL.
45
- headers (dict): HTTP headers for the request.
46
- auth_api: Authentication object for the request.
47
- Returns:
48
- list or dict: JSON response from the API if successful, otherwise an empty list.
49
- """
50
- response = requests.get(url, headers=headers, auth=auth_api, verify=False)
51
- if response.ok:
52
- return response.json()
53
- return []
54
-
55
-
56
- def post_data_to_api(url: str, headers: dict, body: dict, auth_api) -> list:
57
- """
58
- Sends a POST request to the specified API with the given body and returns the JSON response if successful.
59
-
60
- Args:
61
- url (str): The API endpoint URL.
62
- headers (dict): HTTP headers for the request.
63
- body (dict): Data to send in the POST request.
64
- auth_api: Authentication object for the request.
65
- Returns:
66
- list or dict: JSON response from the API if successful, otherwise an empty list.
67
- """
68
- response = requests.post(url, headers=headers, auth=auth_api, data=body, verify=False)
69
- if response.ok:
70
- return response.json()
71
- return []
@@ -1,114 +0,0 @@
1
- """
2
- Módulo con funciones compartidas entre clientes de repositorios (GitHub, Azure DevOps)
3
- """
4
- import re
5
- import json
6
- from typing import Optional, Dict
7
-
8
-
9
- def extract_package_version_from_pubspec(content: str, package_name: str) -> Optional[Dict]:
10
- """
11
- Extrae la versión de un paquete del contenido de pubspec.yaml
12
-
13
- Args:
14
- content: Contenido del archivo pubspec.yaml
15
- package_name: Nombre del paquete a buscar (ej: 'bds_mobile')
16
-
17
- Returns:
18
- Diccionario con versión completa y major, o None
19
- """
20
- if not content:
21
- return None
22
-
23
- # Patrones para diferentes formatos de versión (con posibles espacios antes)
24
- # IMPORTANTE: debe estar en sección dependencies: para evitar falsos positivos
25
- simple_patterns = [
26
- # Formato simple: bds_mobile: ^8.127.0 (en la misma línea)
27
- rf'^\s*{package_name}\s*:\s*\^?([0-9]+\.[0-9]+\.[0-9]+)',
28
- # Formato con >= o ~
29
- rf'^\s*{package_name}\s*:\s*>=([0-9]+\.[0-9]+\.[0-9]+)',
30
- rf'^\s*{package_name}\s*:\s*~>([0-9]+\.[0-9]+\.[0-9]+)',
31
- ]
32
-
33
- for pattern in simple_patterns:
34
- match = re.search(pattern, content, re.MULTILINE)
35
- if match:
36
- version = match.group(1)
37
- major_version = version.split('.')[0]
38
- return {
39
- 'full_version': version,
40
- 'major_version': major_version
41
- }
42
-
43
- # Formato hosted con artifactory (múltiples variantes)
44
- hosted_patterns = [
45
- # Patrón 1: hosted simple (una línea)
46
- rf'^(\s*){package_name}\s*:\s*$\s*\1\s+hosted:\s*https?://.*?$\s*\1\s+version:\s*["\']?\^?([0-9]+\.[0-9]+\.[0-9]+)',
47
- # Patrón 2: hosted con name/url (multilínea)
48
- rf'^(\s*){package_name}\s*:\s*$\s*\1\s+hosted:\s*$.*?\s*\1\s+version:\s*["\']?\^?([0-9]+\.[0-9]+\.[0-9]+)',
49
- ]
50
-
51
- for hosted_pattern in hosted_patterns:
52
- hosted_match = re.search(hosted_pattern, content, re.MULTILINE | re.DOTALL)
53
- if hosted_match:
54
- # El último grupo siempre es la versión
55
- version = hosted_match.groups()[-1]
56
- major_version = version.split('.')[0]
57
- return {
58
- 'full_version': version,
59
- 'major_version': major_version
60
- }
61
-
62
- return None
63
-
64
-
65
- def extract_package_version_from_package_json(content: str, package_name: str) -> Optional[Dict]:
66
- """
67
- Extrae la versión de un paquete del contenido de package.json
68
-
69
- Args:
70
- content: Contenido del archivo package.json
71
- package_name: Nombre del paquete a buscar (ej: '@bancolombia/design-system-web')
72
-
73
- Returns:
74
- Diccionario con versión completa y major, o None
75
- """
76
- if not content:
77
- return None
78
-
79
- try:
80
- # Intentar parsear como JSON
81
- package_data = json.loads(content)
82
-
83
- # Buscar en dependencies y devDependencies
84
- for dep_key in ['dependencies', 'devDependencies']:
85
- if dep_key in package_data:
86
- deps = package_data[dep_key]
87
- if package_name in deps:
88
- version_str = deps[package_name]
89
-
90
- # Extraer versión semántica (eliminar ^, ~, >=, etc.)
91
- version_match = re.search(r'([0-9]+\.[0-9]+\.[0-9]+)', version_str)
92
- if version_match:
93
- version = version_match.group(1)
94
- major_version = version.split('.')[0]
95
- return {
96
- 'full_version': version,
97
- 'major_version': major_version
98
- }
99
- except json.JSONDecodeError:
100
- # Si falla el parseo JSON, intentar con regex
101
- pass
102
-
103
- # Fallback: buscar con regex
104
- pattern = rf'"{re.escape(package_name)}"\s*:\s*"[\^~>=<]*([0-9]+\.[0-9]+\.[0-9]+)'
105
- match = re.search(pattern, content)
106
- if match:
107
- version = match.group(1)
108
- major_version = version.split('.')[0]
109
- return {
110
- 'full_version': version,
111
- 'major_version': major_version
112
- }
113
-
114
- return None
@@ -1,18 +0,0 @@
1
- from lib.commons_metrics.commons_metrics import DatabaseConnection, Util, ComponentRepository
2
-
3
-
4
- def get_connection_database_from_secret(secret_name: str, logger: str, aws_region: str) -> ComponentRepository:
5
- """
6
- Retrieve connection database from AWS secrets manager
7
- """
8
- secret_json = Util.get_secret_aws(secret_name, logger, aws_region)
9
- db_connection = DatabaseConnection()
10
- db_connection.connect({
11
- 'host': secret_json["host"],
12
- 'port': secret_json["port"],
13
- 'dbname': secret_json["dbname"],
14
- 'username': secret_json["username"],
15
- 'password': secret_json["password"]
16
- })
17
-
18
- return ComponentRepository(db_connection)
@@ -1,66 +0,0 @@
1
- from datetime import datetime
2
- from typing import List, Dict
3
-
4
- from dateutil import parser
5
-
6
-
7
- def parse_datetime(date_str: str) -> datetime:
8
- """
9
- Converts a string in ISO 8601 format to a datetime object.
10
- If the string is empty or None, it returns the current date and time with the time zone.
11
-
12
- Args:
13
- date_str(str): Date in ISO 8601 format (e.g., "2025-12-02T10:00:00+00:00").
14
- Returns:
15
- datetime: Datetime object corresponding to the string, or the current date if empty.
16
- """
17
- if not date_str:
18
- return datetime.now().astimezone()
19
- return parser.isoparse(date_str)
20
-
21
-
22
- def get_hours_difference_from_strings(start_date: str, end_date: str) -> float:
23
- """
24
- Calculate the difference in hours between two dates given as ISO 8601 strings.
25
-
26
- Args:
27
- start_date(str): Start date in ISO 8601 format.
28
- end_date(str): End date in ISO 8601 format.
29
- Returns:
30
- float: Difference in hours (can be negative if end_date < start_date).
31
- """
32
- start = parse_datetime(start_date)
33
- end = parse_datetime(end_date)
34
- return get_hours_difference(start, end)
35
-
36
-
37
- def get_hours_difference(start_date: datetime, end_date: datetime) -> float:
38
- """
39
- Calculate the difference in hours between two datetime objects.
40
-
41
- Args:
42
- start_date(datetime): Start date.
43
- end_date(datetime): End date.
44
- Returns:
45
- float: Difference in hours (can be negative if end_date < start_date).
46
- """
47
- return (end_date - start_date).total_seconds() / 3600
48
-
49
-
50
- def sort_by_date(list_dicts: List[Dict], date_attribute_name: str) -> List[Dict]:
51
- """
52
- Sorts a list of dictionaries by a specified date attribute in descending order.
53
- The method uses the `parse_datetime` function to convert date strings into datetime objects
54
- for accurate sorting.
55
-
56
- Args:
57
- list_dicts (List[Dict]): A list of dictionaries containing date attributes.
58
- date_attribute_name (str): The key name of the date attribute in each dictionary.
59
- Returns:
60
- List[Dict]: A new list of dictionaries sorted by the given date attribute in descending order.
61
- """
62
- return sorted(
63
- list_dicts,
64
- key=lambda x: parse_datetime(x.get(date_attribute_name)),
65
- reverse=True
66
- )
@@ -1,48 +0,0 @@
1
- import html
2
- import json
3
- import re
4
-
5
-
6
- def compact_data(data: dict) -> str:
7
- """
8
- Removes keys with empty values (None, "", [], {}) from a dictionary
9
- and returns a compact JSON string without unnecessary spaces.
10
-
11
- Args:
12
- data(dict): Original dictionary.
13
- Returns:
14
- str: Compact JSON representation of the filtered dictionary.
15
- """
16
- data = {k: v for k, v in data.items() if v not in (None, "", [], {})}
17
- return json.dumps(data, separators=(",", ":"))
18
-
19
- def clear_text(text: str) -> str:
20
- """
21
- Cleans text by removing HTML tags, Markdown links,
22
- decoding HTML characters, removing escaped quotes, and
23
- normalizing spaces.
24
-
25
- Args:
26
- text (str): Original text.
27
- Returns:
28
- str: Cleaned and simplified text.
29
- """
30
- if not text:
31
- return ""
32
-
33
- # Remove HTML tags
34
- text = re.sub(r'<[^>]+>', '', text)
35
-
36
- # Replace Markdown links [Text](URLxt
37
- text = re.sub(r'\[([^\]]+)\]\([^)]+\)', r'\1', text)
38
-
39
- # Decode HTML characters (&nbsp;, &lt;, etc.)
40
- text = html.unescape(text)
41
-
42
- # Remove escaped quotes
43
- text = text.replace('\\"', '"').replace("\\'", "'")
44
-
45
- # Remove line breaks, tabs, and multiple spaces
46
- text = re.sub(r'\s+', ' ', text).strip()
47
-
48
- return text
@@ -1,187 +0,0 @@
1
- import re
2
-
3
- from typing import Optional
4
-
5
-
6
- def extract_issue_number(pr_body: str):
7
- """
8
- Extracts an issue number from a pull request body text.
9
- Looks for a pattern like '#123' preceded by whitespace.
10
-
11
- Args:
12
- pr_body (str): The pull request body text.
13
- Returns:
14
- Optional[int]: The extracted issue number as an integer, or None if not found.
15
- """
16
- match = re.search(r"\s+#(\d+)", pr_body or "", re.IGNORECASE)
17
- return int(match.group(1)) if match else None
18
-
19
-
20
- def get_code(text: str) -> Optional[str]:
21
- """
22
- Extracts a code matching the pattern 'AW1234567' or 'NU1234567' from a string.
23
- The code consists of two uppercase letters followed by seven digits.
24
-
25
- Args:
26
- text (str): The input string.
27
- Returns:
28
- Optional[str]: The extracted code or None if not found.
29
- """
30
- for tok in text.split('_'):
31
- if re.fullmatch(r'[A-Z]{2}\d{7}', tok):
32
- return tok
33
- return None
34
-
35
-
36
- def get_component_name(text: str) -> Optional[str]:
37
- """
38
- Extracts a component name from a string based on underscore-separated parts.
39
- If the last part is 'dxp', returns the two preceding parts joined by underscore.
40
- Otherwise, returns the last two parts.
41
-
42
- Args:
43
- text (str): The input string.
44
- Returns:
45
- Optional[str]: The component name or None if not enough parts.
46
- """
47
- parts = [p for p in text.strip('_').split('_') if p]
48
- if len(parts) >= 2:
49
- if parts[-1].lower() == "dxp":
50
- return f"{parts[-3]}_{parts[-2]}"
51
- return '_'.join(parts[-2:])
52
- return None
53
-
54
-
55
- def get_component_name_from_image(image: str, release_name: str) -> Optional[str]:
56
- """
57
- Extracts the component name from an image string.
58
- If extraction fails, falls back to using release_name.
59
-
60
- Args:
61
- image (str): The image string (e.g., 'repo/component:tag').
62
- release_name (str): The fallback release name.
63
- Returns:
64
- Optional[str]: The component name.
65
- """
66
- try:
67
- tag = image.split('/')[-1]
68
- repository_name = tag.split(':')[0]
69
- return repository_name
70
- except Exception:
71
- return get_component_name(release_name)
72
-
73
-
74
- def collect_all_variables(json_data, txt_variable_groups):
75
- """
76
- Collects all variables from a nested JSON structure.
77
- Searches for keys named 'variables' and merges them into a single dictionary.
78
-
79
- Args:
80
- json_data (dict or list): The JSON data.
81
- txt_variable_groups (str): The key name for variable groups.
82
- Returns:
83
- dict: A dictionary of all variables found.
84
- """
85
- all_variables = {}
86
-
87
- def loop_through_json(data):
88
- if isinstance(data, dict):
89
- for key, value in data.items():
90
- if key == 'variables':
91
- all_variables.update(value)
92
- elif key == txt_variable_groups:
93
- if isinstance(value, list):
94
- for group in value:
95
- if isinstance(group, dict) and 'variables' in group:
96
- all_variables.update(group['variables'])
97
- else:
98
- loop_through_json(value)
99
- elif isinstance(data, list):
100
- for item in data:
101
- loop_through_json(item)
102
-
103
- loop_through_json(json_data)
104
- return all_variables
105
-
106
-
107
- def resolve_value(value: str, all_variables: dict, visited=None) -> str:
108
- """
109
- Resolves variable references in a string recursively.
110
- Variables are referenced using the format $(VAR_NAME).
111
-
112
- Args:
113
- value (str): The string containing variable references.
114
- all_variables (dict): Dictionary of variables and their values.
115
- visited (set): Set of visited variables to detect cycles.
116
- Returns:
117
- str: The resolved string with all references replaced.
118
- """
119
- if visited is None:
120
- visited = set()
121
-
122
- pattern = re.compile(r'\$\(([^)]+)\)')
123
- while True:
124
- matches = pattern.findall(value)
125
- if not matches:
126
- break
127
- for match in matches:
128
- if match in visited:
129
- return f'$(CYCLE:{match})'
130
- visited.add(match)
131
- replacement = all_variables.get(match, {}).get('value', '')
132
- resolved = resolve_value(replacement, all_variables, visited.copy())
133
- value = value.replace(f'$({match})', resolved)
134
- return value
135
-
136
-
137
- def search_in_json(search_value: str, search_type: str, json_data, is_json_from_azure: bool = False) -> Optional[str]:
138
- """
139
- Searches for a variable in a nested JSON structure by key or value.
140
- Resolves references if found.
141
-
142
- Args:
143
- search_value (str): The value to search for.
144
- search_type (str): 'clave' to search by key, 'valor' to search by value.
145
- json_data (dict or list): The JSON data.
146
- is_json_from_azure (bool): Whether the JSON is from Azure (changes key names).
147
- Returns:
148
- Optional[str]: The resolved value if found, otherwise None.
149
- """
150
- txt_variable_groups = 'variableGroups' if is_json_from_azure else 'variable_groups'
151
- search_value = search_value.lower()
152
- all_variables = collect_all_variables(json_data, txt_variable_groups)
153
-
154
- result_search = all_variables.get(search_value, {}).get('value', '')
155
- if result_search and '$(' not in result_search:
156
- return result_search
157
-
158
- def recursive_search(data):
159
- if isinstance(data, dict):
160
- for key, value in data.items():
161
- if key in ['variables', txt_variable_groups]:
162
- if isinstance(value, dict):
163
- for var_key, var_value in value.items():
164
- if search_type == 'clave' and search_value == var_key.lower():
165
- return resolve_value(var_value.get('value', ''), all_variables)
166
- elif search_type == 'valor' and search_value == var_value.get('value', '').lower():
167
- return resolve_value(var_value.get('value', ''), all_variables)
168
- elif isinstance(value, list):
169
- for item in value:
170
- if isinstance(item, dict) and 'variables' in item:
171
- for var_key, var_value in item['variables'].items():
172
- if search_type == 'clave' and search_value == var_key.lower():
173
- return resolve_value(var_value.get('value', ''), all_variables)
174
- elif search_type == 'valor' and search_value == var_value.get('value', '').lower():
175
- return resolve_value(var_value.get('value', ''), all_variables)
176
- else:
177
- result = recursive_search(value)
178
- if result:
179
- return result
180
- elif isinstance(data, list):
181
- for item in data:
182
- result = recursive_search(item)
183
- if result:
184
- return result
185
- return None
186
-
187
- return recursive_search(json_data)