ipulse-shared-core-ftredge 2.56__tar.gz → 3.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (38) hide show
  1. {ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-3.1.1}/PKG-INFO +2 -4
  2. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/setup.py +2 -4
  3. ipulse_shared_core_ftredge-3.1.1/src/ipulse_shared_core_ftredge/__init__.py +11 -0
  4. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/src/ipulse_shared_core_ftredge/models/__init__.py +0 -1
  5. ipulse_shared_core_ftredge-3.1.1/src/ipulse_shared_core_ftredge/models/organisation.py +71 -0
  6. ipulse_shared_core_ftredge-3.1.1/src/ipulse_shared_core_ftredge/models/resource_catalog_item.py +115 -0
  7. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/src/ipulse_shared_core_ftredge/models/user_profile.py +3 -3
  8. ipulse_shared_core_ftredge-3.1.1/src/ipulse_shared_core_ftredge/utils/__init__.py +3 -0
  9. ipulse_shared_core_ftredge-3.1.1/src/ipulse_shared_core_ftredge/utils/utils_common.py +10 -0
  10. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +2 -4
  11. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +3 -12
  12. ipulse_shared_core_ftredge-3.1.1/src/ipulse_shared_core_ftredge.egg-info/requires.txt +4 -0
  13. ipulse_shared_core_ftredge-3.1.1/tests/test_utils_gcp.py +189 -0
  14. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/__init__.py +0 -15
  15. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/enums/__init__.py +0 -28
  16. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/enums/enums_common_utils.py +0 -171
  17. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/enums/enums_data_eng.py +0 -44
  18. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/enums/enums_module_fincore.py +0 -58
  19. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/enums/enums_modules.py +0 -33
  20. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/models/audit_log_firestore.py +0 -12
  21. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/models/organisation.py +0 -65
  22. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/models/pulse_enums.py +0 -196
  23. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/models/resource_catalog_item.py +0 -189
  24. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/tests/__init__.py +0 -0
  25. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/tests/test.py +0 -17
  26. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/utils_common.py +0 -543
  27. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/utils_gcp.py +0 -267
  28. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge/utils_templates_and_schemas.py +0 -155
  29. ipulse_shared_core_ftredge-2.56/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -6
  30. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/LICENCE +0 -0
  31. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/README.md +0 -0
  32. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/pyproject.toml +0 -0
  33. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/setup.cfg +0 -0
  34. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
  35. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
  36. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/src/ipulse_shared_core_ftredge/models/user_status.py +0 -0
  37. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  38. {ipulse_shared_core_ftredge-2.56 → ipulse_shared_core_ftredge-3.1.1}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 2.56
3
+ Version: 3.1.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -8,6 +8,4 @@ License-File: LICENCE
8
8
  Requires-Dist: pydantic[email]~=2.5
9
9
  Requires-Dist: python-dateutil~=2.8
10
10
  Requires-Dist: pytest~=7.1
11
- Requires-Dist: google-cloud-logging~=3.10.0
12
- Requires-Dist: google-cloud-error-reporting~=1.11.0
13
- Requires-Dist: google-cloud-bigquery~=3.24.0
11
+ Requires-Dist: ipulse_shared_base_ftredge>=2.2.2
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
 
4
4
  setup(
5
5
  name='ipulse_shared_core_ftredge',
6
- version='2.56',
6
+ version='3.1.1',
7
7
  package_dir={'': 'src'}, # Specify the source directory
8
8
  packages=find_packages(where='src'), # Look for packages in 'src'
9
9
  install_requires=[
@@ -11,9 +11,7 @@ setup(
11
11
  'pydantic[email]~=2.5',
12
12
  'python-dateutil~=2.8',
13
13
  'pytest~=7.1',
14
- 'google-cloud-logging~=3.10.0',
15
- 'google-cloud-error-reporting~=1.11.0',
16
- 'google-cloud-bigquery~=3.24.0'
14
+ 'ipulse_shared_base_ftredge>=2.2.2',
17
15
  ],
18
16
  author='Russlan Ramdowar',
19
17
  description='Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.',
@@ -0,0 +1,11 @@
1
+ # pylint: disable=missing-module-docstring
2
+ from .models import ( UserAuth, UserProfile,
3
+ UserStatus, UserProfileUpdate)
4
+
5
+ from .utils import (list_as_strings)
6
+
7
+ from .logging import (get_logger,
8
+ log_error,
9
+ log_warning,
10
+ log_info,
11
+ log_debug)
@@ -1,7 +1,6 @@
1
1
  from .user_profile import UserProfile
2
2
  from .user_status import UserStatus
3
3
  from .user_profile_update import UserProfileUpdate
4
- from .organisation import Organisation
5
4
  from .user_auth import UserAuth
6
5
 
7
6
 
@@ -0,0 +1,71 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=missing-class-docstring
4
+ # pylint: disable=broad-exception-caught
5
+ # pylint: disable=line-too-long
6
+ # pylint: disable=unused-variable
7
+ # pylint: disable=broad-exception-caught
8
+ # from pydantic import BaseModel, validator, ValidationError, Field
9
+ # from typing import Set, Optional
10
+ # import uuid
11
+ # from datetime import datetime
12
+ # import dateutil.parser
13
+
14
+ # CLASS_VERSION= 1.0
15
+ # MODULE= "core"
16
+ # CLASS_REF="orgn"
17
+
18
+
19
+ # class Organisation(BaseModel):
20
+ # puid: str = Field(default_factory=f"{datetime.utcnow().strftime('%Y%m%d%H%M')}{uuid.uuid4().hex[:8]}_{MODULE}{CLASS_REF}".lower())
21
+ # name: str
22
+ # creat_date: datetime = Field(default_factory=datetime.utcnow())
23
+ # updt_date: datetime = Field(default_factory=datetime.utcnow())
24
+ # creat_by_user: Optional[str] = None
25
+ # updt_by_user: Optional[str] = None
26
+ # relations: Optional[Set[str]]=None
27
+ # description: Optional[str] = None # Updated to use Optional
28
+ # industries: Optional[Set[str]] = None # Updated to use Optional
29
+ # website: Optional[str] = None # Updated to use Optional
30
+ # org_admin_user_uids: Optional[Set[str]] = None # Updated to use Optional
31
+ # class Config:
32
+ # extra = "forbid"
33
+
34
+
35
+ # @validator('relations', pre=True, always=True)
36
+ # def validate_relations(cls, relations):
37
+ # if not set(relations).issubset(enums.organisation_relations):
38
+ # raise ValueError("Invalid relation values provided.")
39
+ # return relations
40
+
41
+
42
+ # @validator('industries', pre=True, always=True)
43
+ # def validate_industries(cls, industries):
44
+ # if industries is not None and not set(industries).issubset(enums.organisation_industries):
45
+ # raise ValueError("Invalid industry values provided.")
46
+ # return industries
47
+
48
+ # @validator('creat_date', 'updt_date', pre=True)
49
+ # def parse_date(cls, value):
50
+ # if value is None:
51
+ # return value
52
+ # if isinstance(value, datetime):
53
+ # return value
54
+ # try:
55
+ # # Assuming Firestore returns an ISO 8601 string, adjust if necessary
56
+ # print("Putting Updt or Creat date in a valid format in a Validator when creating Organisation object")
57
+ # return dateutil.parser.isoparse(value)
58
+ # except (TypeError, ValueError):
59
+ # raise ValidationError(f"Invalid datetime format inside Organisation: {value}")
60
+
61
+
62
+ # ### Description, Industries, and Website are optional for Retail Customer and mandatory for Non Retail Customer
63
+ # @validator('description', 'industries', 'website', pre=True, always=True)
64
+ # def validate_optional_fields(cls, value, values):
65
+ # if values.get('name') == 'Retail Customer' and values.get('relations') == {"retail_customer"} or values.get('relations') == ["retail_customer"]:
66
+ # if value is not None:
67
+ # raise ValueError("For 'Retail Customer' with only 'retail_customer' relations, description, industries, and website should not be provided.")
68
+ # else:
69
+ # if value is None:
70
+ # raise ValueError("For Non Retail Customer, description, industries, and website are mandatory.")
71
+ # return value
@@ -0,0 +1,115 @@
1
+ # import uuid
2
+ # from datetime import datetime
3
+ # from pydantic import BaseModel, validator, ValidationError
4
+ # from typing import Dict, Any, Set, Optional
5
+
6
+ # import dateutil.parser
7
+
8
+ # CLASS_VERSION = 1.0
9
+ # CLASS_REF = "resdes"
10
+ # MODULE = "core"
11
+
12
+ # class ResourceCatalogItem(BaseModel):
13
+
14
+ # resr_puid_or_name: str #Ex: username
15
+ # resr_path: str #Ex: ipulse-401013/cloud/firesotre/Users/{user_uid}/username
16
+ # resr_name: str #Ex: username
17
+ # resr_pulse_module: str #Ex: core
18
+ # resr_type: str
19
+ # resr_classifications: Set[str]
20
+ # resr_contents:Set[str]
21
+ # resr_original_or_processed: str
22
+ # resr_origin: str
23
+ # resr_origin_organisations_uids: Set[str]
24
+ # resr_origin_description: str
25
+ # resr_licences_types: Set[str]
26
+ # resr_description_details: str
27
+ # resr_updtbl_by_non_staff: bool
28
+ # resr_creat_by_user_uid: str
29
+ # resr_creat_date: datetime
30
+ # class_version:float = CLASS_VERSION
31
+ # resr_columns_count: int
32
+ # resr_columns: Optional[Dict[Any, Any]] = None #OPTIONAL
33
+ # resr_structure_version: Optional[str]=None # OPTIONAL
34
+ # resr_structure_updt_date: Optional[str]=None #OPTIONAL
35
+ # resr_structure_updt_by_user_uid: Optional[str]=None # OPTIONAL
36
+ # resr_tags: Optional[Dict[Any, Any]] = None #OPTIONAL
37
+ # resr_content_updt_date: Optional[str]=None #OPTIONAL
38
+ # resr_content_updt_by_user_uid: Optional[str]=None # OPTIONAL
39
+ # puid: Optional[str] = None #TO BE SETUP BY Validator
40
+ # metadata_version: Optional[float] = None #TO BE SETUP BY Validator
41
+ # metadata_creat_date: Optional[datetime] = None #TO BE SETUP BY Management Service
42
+ # metadata_creat_by: Optional[str] = None #TO BE SETUP BY Management Service
43
+ # metadata_updt_date: Optional[datetime] = None #TO BE SETUP BY Management Service
44
+ # metadata_updt_by: Optional[str] = None #TO BE SETUP BY Management Service
45
+
46
+ # @validator('puid', pre=True, always=True)
47
+ # def set_puid(cls, puid, values):
48
+ # if puid is None:
49
+ # return f"{datetime.utcnow().strftime('%Y%m%d%H%M')}{uuid.uuid4().hex[:8]}_{MODULE}{CLASS_REF}".lower()
50
+ # return puid
51
+
52
+ # @validator('metadata_version', pre=True, always=True)
53
+ # def set_metadata_version(cls, metadata_version, values):
54
+ # if metadata_version is None:
55
+ # return 1.0
56
+ # else:
57
+ # return metadata_version + 0.1
58
+
59
+
60
+ # @validator('resr_pulse_module', pre=True, always=True)
61
+ # def validate_resr_pulse_module(cls, resr_pulse_modules):
62
+ # if resr_pulse_modules not in enums.pulse_modules:
63
+ # raise ValueError("Invalid pulse_modules values provided.")
64
+ # return resr_pulse_modules
65
+
66
+ # @validator('resr_type', pre=True, always=True)
67
+ # def validate_resr_type(cls, resr_type):
68
+ # if resr_type not in enums.resource_types:
69
+ # raise ValueError("Invalid resource_types value provided.")
70
+ # return resr_type
71
+
72
+ # @validator('resr_classifications', pre=True, always=True)
73
+ # def validate_resr_classifications(cls, resr_classifications):
74
+ # if not resr_classifications.issubset(enums.resource_classifications):
75
+ # raise ValueError("Invalid resr_classifications values provided.")
76
+ # return resr_classifications
77
+
78
+ # @validator('resr_contents', pre=True, always=True)
79
+ # def validate_resr_contents(cls, resr_contents):
80
+ # if not resr_contents.issubset(enums.resource_contents):
81
+ # raise ValueError("Invalid resr_contents values provided.")
82
+ # return resr_contents
83
+
84
+ # @validator('resr_original_or_processed', pre=True, always=True)
85
+ # def validate_resr_original_or_processed(cls, resr_original_or_processed):
86
+ # if resr_original_or_processed not in enums.resource_original_or_processed:
87
+ # raise ValueError("Invalid resr_original_or_processed value provided.")
88
+ # return resr_original_or_processed
89
+
90
+ # @validator('resr_origin', pre=True, always=True)
91
+ # def validate_resr_origin(cls, resr_origin):
92
+ # if resr_origin not in enums.resource_origins:
93
+ # raise ValueError("Invalid resource_origins value provided.")
94
+ # return resr_origin
95
+
96
+
97
+ # @validator('metadata_creat_date', 'metadata_updt_date', pre=True)
98
+ # def parse_date(cls, value):
99
+ # if value is None:
100
+ # return value
101
+ # if isinstance(value, datetime):
102
+ # return value
103
+ # try:
104
+ # # Assuming Firestore returns an ISO 8601 string, adjust if necessary
105
+ # return dateutil.parser.isoparse(value)
106
+ # except (TypeError, ValueError):
107
+ # raise ValidationError(f"Invalid datetime format inside Resource Description: {value}")
108
+
109
+
110
+
111
+ # @validator('metadata_updt_date', 'metadata_updt_date', pre=True, always=True)
112
+ # def set_default_updt_date(cls, date, values):
113
+ # if date is None:
114
+ # return datetime.utcnow().isoformat()
115
+ # return date
@@ -33,9 +33,9 @@ class UserProfile(BaseModel):
33
33
  provider_id: str #User can Read only
34
34
 
35
35
  username: Optional[str] = None #User can Read and Edit
36
- dob: Optional[date] = None #User can Read and Edit
37
- first_name: Optional[str] = None #User can Read and Edit
38
- last_name: Optional[str] = None #User can Read and Edit
36
+ dob: Optional[date] = None #User can Read and Edit
37
+ first_name: Optional[str] = None #User can Read and Edit
38
+ last_name: Optional[str] = None #User can Read and Edit
39
39
  mobile: Optional[str] = None #User can Read and Edit
40
40
  class Config:
41
41
  extra = "forbid"
@@ -0,0 +1,3 @@
1
+ # pylint: disable=missing-module-docstring
2
+
3
+ from .utils_common import (list_as_strings)
@@ -0,0 +1,10 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+
8
+ def list_as_strings(*enums):
9
+ """Converts a list of Enum members to their string values."""
10
+ return [str(enum) for enum in enums]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 2.56
3
+ Version: 3.1.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -8,6 +8,4 @@ License-File: LICENCE
8
8
  Requires-Dist: pydantic[email]~=2.5
9
9
  Requires-Dist: python-dateutil~=2.8
10
10
  Requires-Dist: pytest~=7.1
11
- Requires-Dist: google-cloud-logging~=3.10.0
12
- Requires-Dist: google-cloud-error-reporting~=1.11.0
13
- Requires-Dist: google-cloud-bigquery~=3.24.0
11
+ Requires-Dist: ipulse_shared_base_ftredge>=2.2.2
@@ -3,27 +3,18 @@ README.md
3
3
  pyproject.toml
4
4
  setup.py
5
5
  src/ipulse_shared_core_ftredge/__init__.py
6
- src/ipulse_shared_core_ftredge/utils_common.py
7
- src/ipulse_shared_core_ftredge/utils_gcp.py
8
- src/ipulse_shared_core_ftredge/utils_templates_and_schemas.py
9
6
  src/ipulse_shared_core_ftredge.egg-info/PKG-INFO
10
7
  src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt
11
8
  src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt
12
9
  src/ipulse_shared_core_ftredge.egg-info/requires.txt
13
10
  src/ipulse_shared_core_ftredge.egg-info/top_level.txt
14
- src/ipulse_shared_core_ftredge/enums/__init__.py
15
- src/ipulse_shared_core_ftredge/enums/enums_common_utils.py
16
- src/ipulse_shared_core_ftredge/enums/enums_data_eng.py
17
- src/ipulse_shared_core_ftredge/enums/enums_module_fincore.py
18
- src/ipulse_shared_core_ftredge/enums/enums_modules.py
19
11
  src/ipulse_shared_core_ftredge/models/__init__.py
20
- src/ipulse_shared_core_ftredge/models/audit_log_firestore.py
21
12
  src/ipulse_shared_core_ftredge/models/organisation.py
22
- src/ipulse_shared_core_ftredge/models/pulse_enums.py
23
13
  src/ipulse_shared_core_ftredge/models/resource_catalog_item.py
24
14
  src/ipulse_shared_core_ftredge/models/user_auth.py
25
15
  src/ipulse_shared_core_ftredge/models/user_profile.py
26
16
  src/ipulse_shared_core_ftredge/models/user_profile_update.py
27
17
  src/ipulse_shared_core_ftredge/models/user_status.py
28
- src/ipulse_shared_core_ftredge/tests/__init__.py
29
- src/ipulse_shared_core_ftredge/tests/test.py
18
+ src/ipulse_shared_core_ftredge/utils/__init__.py
19
+ src/ipulse_shared_core_ftredge/utils/utils_common.py
20
+ tests/test_utils_gcp.py
@@ -0,0 +1,4 @@
1
+ pydantic[email]~=2.5
2
+ python-dateutil~=2.8
3
+ pytest~=7.1
4
+ ipulse_shared_base_ftredge>=2.2.2
@@ -0,0 +1,189 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=import-error
3
+ # pylint: disable=missing-module-docstring
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-function-docstring
6
+
7
+ import os
8
+ import json
9
+ from unittest.mock import MagicMock, patch
10
+ import pytest
11
+ from ipulse_shared_core_ftredge.utils_cloud_gcp import write_json_to_gcs
12
+
13
+
14
+ # Mocking Google Cloud Storage components for testing using pytest-mock
15
+
16
+ @pytest.fixture
17
+ def mock_blob(mocker):
18
+ mock_blob_class = mocker.patch('google.cloud.storage.Blob', autospec=True)
19
+ mock_blob = mock_blob_class.return_value
20
+ mock_blob.exists.return_value = False
21
+ return mock_blob
22
+
23
+
24
+ @pytest.fixture
25
+ def mock_bucket(mocker, mock_blob):
26
+ mock_bucket_class = mocker.patch('google.cloud.storage.Bucket', autospec=True)
27
+ mock_bucket = mock_bucket_class.return_value
28
+ mock_bucket.list_blobs.return_value = []
29
+ mock_bucket.blob.return_value = mock_blob # this avoids creating a new blob for each test, which will confuse the test results
30
+ return mock_bucket
31
+
32
+ @pytest.fixture
33
+ def mock_storage_client(mocker, mock_bucket):
34
+ mock_client_class = mocker.patch('google.cloud.storage.Client', autospec=True)
35
+ mock_client = mock_client_class.return_value
36
+ mock_client.bucket.return_value = mock_bucket
37
+ return mock_client
38
+
39
+
40
+
41
+ # --- Test Cases ---
42
+
43
+ def test_successful_gcs_upload(mock_storage_client):
44
+ test_data = {"key": "value"}
45
+ test_bucket_name = "test_bucket"
46
+ test_file_name = "test_file.json"
47
+
48
+ result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name, test_file_name)
49
+
50
+ assert result['gcs_path'] == f"gs://{test_bucket_name}/{test_file_name}"
51
+ assert result['local_path'] is None
52
+ assert result['gcs_file_already_exists'] is False
53
+ assert result['gcs_file_overwritten'] is False
54
+ assert result['gcs_file_saved_with_increment'] is False
55
+
56
+
57
+ def test_invalid_data_type(mock_storage_client):
58
+ with pytest.raises(ValueError) as exc_info:
59
+ write_json_to_gcs(mock_storage_client, 12345, "test_bucket", "test_file.json")
60
+ assert str(exc_info.value) == "Unsupported data type. Data must be a list, dict, or str."
61
+
62
+
63
+ def test_overwrite_if_exists(mock_storage_client, mock_blob):
64
+ mock_blob.exists.return_value = True # Simulate existing file
65
+ test_data = {"key": "value"}
66
+ test_bucket_name = "test_bucket"
67
+ test_file_name = "test_file.json"
68
+
69
+ result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name, test_file_name, overwrite_if_exists=True)
70
+
71
+ assert result['gcs_file_overwritten'] is True
72
+
73
+
74
+ def test_overwrite_with_substring(mock_storage_client, mock_bucket):
75
+ mock_bucket.list_blobs.return_value = [MagicMock(name='test_prefix_file1.json'), MagicMock(name='test_prefix_file2.json')]
76
+ test_data = {"key": "value"}
77
+ test_bucket_name = "test_bucket"
78
+ test_file_name = "test_file.json"
79
+ test_prefix = 'test_prefix'
80
+
81
+ result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
82
+ test_file_name, overwrite_if_exists=True,
83
+ file_exists_if_starts_with_prefix=test_prefix)
84
+ assert result['gcs_file_overwritten'] is True
85
+ assert result['gcs_file_exists_checked_on_name'] == test_prefix
86
+
87
+
88
+ def test_increment_if_exists(mock_storage_client, mock_blob):
89
+ mock_blob.exists.side_effect = [True, True, False] # Simulate existing files
90
+ test_data = {"key": "value"}
91
+ test_bucket_name = "test_bucket"
92
+ test_file_name = "test_file.json"
93
+ result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name, test_file_name, increment_if_exists=True)
94
+ assert result['gcs_path'] == f"gs://{test_bucket_name}/test_file_v2.json"
95
+ assert result['gcs_file_saved_with_increment'] is True
96
+
97
+
98
+ def test_overwrite_and_increment_raise_value_error(mock_storage_client):
99
+ test_data = {"key": "value"}
100
+ test_bucket_name = "test_bucket"
101
+ test_file_name = "test_file.json"
102
+ with pytest.raises(ValueError) as exc_info:
103
+ write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
104
+ test_file_name, overwrite_if_exists=True,
105
+ increment_if_exists=True)
106
+ assert str(exc_info.value) == "Both 'overwrite_if_exists' and 'increment_if_exists' cannot be True simultaneously."
107
+
108
+
109
+ @patch('os.path.exists', return_value=False) # Assume local file exists for simplicity
110
+ @patch('builtins.open', new_callable=MagicMock)
111
+ def test_local_save_after_gcs_failure(mock_open, mock_exists, mock_storage_client, mock_blob):
112
+ mock_blob.upload_from_string.side_effect = Exception("GCS Upload Failed")
113
+ test_data = {"key": "value"}
114
+ test_bucket_name = "test_bucket"
115
+ test_file_name = "test_file.json"
116
+
117
+ # Expecting an exception because GCS upload fails
118
+ with pytest.raises(Exception) as exc_info:
119
+ result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name, test_file_name, save_locally=True)
120
+
121
+ assert "GCS Upload Failed" in str(exc_info.value)
122
+ mock_open.assert_called_once_with(os.path.join("/tmp", test_file_name), 'w', encoding='utf-8')
123
+
124
+
125
+ @patch('builtins.open', new_callable=MagicMock)
126
+ def test_local_save_with_custom_path(mock_open, mock_storage_client):
127
+ local_path = "/my/custom/path"
128
+ test_data = {"key": "value"}
129
+ test_bucket_name = "test_bucket"
130
+ test_file_name = "test_file.json"
131
+
132
+ result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
133
+ test_file_name, local_path=local_path)
134
+
135
+ assert result['local_path'] == os.path.join(local_path, test_file_name)
136
+ mock_open.assert_called_once()
137
+
138
+ @patch('os.path.exists', side_effect=[True, True, False])
139
+ @patch('builtins.open', new_callable=MagicMock)
140
+ def test_local_save_with_increment(mock_open, mock_exists, mock_storage_client, mock_blob):
141
+ test_data = {"key": "value"}
142
+ test_bucket_name = "test_bucket"
143
+ test_file_name = "test_file.json"
144
+
145
+ result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
146
+ test_file_name, save_locally=True, increment_if_exists=True)
147
+
148
+ assert f"/tmp/test_file_v1.json" == result['local_path']
149
+ mock_open.assert_called_once()
150
+
151
+
152
+ @patch('builtins.open', new_callable=MagicMock)
153
+ def test_local_save_overwrite(mock_open, mock_storage_client):
154
+ test_data = {"key": "value"}
155
+ test_bucket_name = "test_bucket"
156
+ test_file_name = "test_file.json"
157
+
158
+ # Execute function
159
+ result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
160
+ test_file_name, save_locally=True, overwrite_if_exists=True)
161
+
162
+ # Check results
163
+ assert result['local_path'] == os.path.join("/tmp", test_file_name)
164
+ mock_open.assert_called_once_with(os.path.join("/tmp", test_file_name), 'w', encoding='utf-8')
165
+ file_handle = mock_open()
166
+
167
+
168
+ @patch('os.path.exists', return_value=True)
169
+ @patch('builtins.open', new_callable=MagicMock)
170
+ def test_local_save_skip(mock_open, mock_exists, mock_storage_client):
171
+ test_data = {"key": "value"}
172
+ test_bucket_name = "test_bucket"
173
+ test_file_name = "test_file.json"
174
+
175
+ result = write_json_to_gcs(mock_storage_client, test_data, test_bucket_name,
176
+ test_file_name, save_locally=True, overwrite_if_exists=False)
177
+
178
+ assert result['local_path'] == os.path.join("/tmp", test_file_name)
179
+ mock_open.assert_not_called()
180
+
181
+
182
+ def test_string_data_handling(mock_storage_client, mock_blob):
183
+ test_string_data = "This is a test string."
184
+ test_bucket_name = "test_bucket"
185
+ test_file_name = "test_file.json"
186
+
187
+ result = write_json_to_gcs(mock_storage_client, test_string_data, test_bucket_name, test_file_name)
188
+
189
+ assert result['gcs_path'] == f"gs://{test_bucket_name}/{test_file_name}"
@@ -1,15 +0,0 @@
1
- from .models import (Organisation, UserAuth, UserProfile,
2
- UserStatus, UserProfileUpdate, pulse_enums)
3
- from .utils_gcp import (setup_gcp_logger_and_error_report,
4
- read_csv_from_gcs, read_json_from_gcs,
5
- write_csv_to_gcs, write_json_to_gcs)
6
- from .utils_templates_and_schemas import (create_bigquery_schema_from_json,
7
- check_format_against_schema_template)
8
- from .utils_common import (ContextLog, Pipelinemon)
9
-
10
- from .enums import (TargetLogs, LogLevel, Unit, Frequency,
11
- Module, SubModule, BaseDataCategory,
12
- FinCoreCategory, FincCoreSubCategory,
13
- FinCoreRecordsCategory, ExchangeOrPublisher,
14
- SourcingPipelineType, SourcingTriggerType,
15
- DWEvent, DWEventTriggerType)
@@ -1,28 +0,0 @@
1
-
2
- # pylint: disable=missing-module-docstring
3
- # pylint: disable=missing-function-docstring
4
- # pylint: disable=missing-class-docstring
5
-
6
- from .enums_common_utils import (LogLevel,
7
- SystemsImpacted,
8
- TargetLogs,
9
- Unit,
10
- Frequency)
11
-
12
-
13
- from .enums_modules import(Module,
14
- SubModule,
15
- BaseDataCategory)
16
-
17
-
18
- from .enums_module_fincore import (FinCoreCategory,
19
- FincCoreSubCategory,
20
- FinCoreRecordsCategory,
21
- ExchangeOrPublisher)
22
-
23
-
24
-
25
- from .enums_data_eng import (SourcingTriggerType,
26
- SourcingPipelineType,
27
- DWEvent,
28
- DWEventTriggerType)