datamarket 0.9.51__py3-none-any.whl → 0.9.52__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamarket might be problematic. Click here for more details.

@@ -0,0 +1,127 @@
1
+ ########################################################################################################################
2
+ # IMPORTS
3
+
4
+ import logging
5
+ from pathlib import Path
6
+ from typing import Any, Dict, List, Optional
7
+
8
+ from azure.storage.blob import BlobServiceClient
9
+ from pendulum import now
10
+
11
+ ########################################################################################################################
12
+ # CLASSES
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class AzureBlobInterface:
18
+ def __init__(self, config):
19
+ self.profiles: List[Dict[str, Any]] = []
20
+ self.config = config
21
+
22
+ for section in getattr(self.config, "sections", lambda: [])():
23
+ if section.startswith("azure:"):
24
+ profile_name = section.split(":", 1)[1]
25
+ connection_string = self.config[section].get("connection_string")
26
+ container_name = self.config[section].get("container_name")
27
+
28
+ self.profiles.append(
29
+ {
30
+ "profile": profile_name,
31
+ "container_name": container_name,
32
+ "session": BlobServiceClient.from_connection_string(
33
+ connection_string
34
+ ).get_container_client(container_name),
35
+ }
36
+ )
37
+
38
+ if not self.profiles:
39
+ logger.warning("No Azure profiles found in config file")
40
+ self.current_profile: Optional[Dict[str, Any]] = (
41
+ self.profiles[0] if self.profiles else None
42
+ )
43
+
44
+ def switch_profile(self, profile_name: str) -> None:
45
+ for profile in self.profiles:
46
+ if profile["profile"] == profile_name:
47
+ self.current_profile = profile
48
+ return
49
+ logger.warning(f"Profile {profile_name} not found")
50
+
51
+ def upload_file(
52
+ self,
53
+ local_file,
54
+ remote_folder,
55
+ remote_file=None,
56
+ upload_file_info=False,
57
+ **kwargs,
58
+ ):
59
+ if not remote_file:
60
+ remote_file = Path(local_file).name
61
+
62
+ remote_path = f"{remote_folder}/{remote_file}" if remote_folder else remote_file
63
+
64
+ blob_client = self.current_profile["session"].get_blob_client(remote_path)
65
+ with open(local_file, "rb") as data:
66
+ blob_client.upload_blob(data, overwrite=True)
67
+
68
+ if upload_file_info:
69
+ self.upload_file_info(remote_path, **kwargs)
70
+
71
+ def upload_file_info(self, remote_path, **kwargs):
72
+ summary_file = remote_path.split(".")[0] + "_resumen.csv"
73
+ blob_client = self.current_profile["session"].get_blob_client(summary_file)
74
+
75
+ new_record = {
76
+ "file": remote_path,
77
+ "num_rows": kwargs["num_rows"],
78
+ "schema_version": kwargs["schema_version"],
79
+ "upload_date": now(tz="Europe/Madrid").to_datetime_string(),
80
+ }
81
+
82
+ new_record_str = "file,num_rows,schema_version,upload_date\n"
83
+ new_record_str += ",".join([str(v) for v in new_record.values()]) + "\n"
84
+
85
+ blob_client.upload_blob(new_record_str, overwrite=True)
86
+
87
+ def download_file(self, local_file, remote_path):
88
+ blob_client = self.current_profile["session"].get_blob_client(remote_path)
89
+ blob_data = blob_client.download_blob()
90
+ with open(local_file, "wb") as f:
91
+ blob_data.readinto(f)
92
+
93
+ def check_file_exists_and_not_empty(self, remote_file, remote_folder):
94
+ """
95
+ Checks if a blob exists in the specified folder and has a size greater than 100 bytes.
96
+
97
+ Args:
98
+ remote_file (str): The name of the file (blob) to check.
99
+ remote_folder (str): The folder (prefix) where the file is located.
100
+
101
+ Returns:
102
+ bool: True if the blob exists and has a size greater than 100, False otherwise.
103
+ """
104
+
105
+ remote_path = f"{remote_folder}/{remote_file}" if remote_folder else remote_file
106
+
107
+ try:
108
+ blob_client = self.current_profile["session"].get_blob_client(remote_path)
109
+ if blob_client.exists():
110
+ properties = blob_client.get_blob_properties()
111
+ if properties.size > 100: # Check if size is greater than 100 bytes
112
+ logger.debug(
113
+ f"Blob '{remote_path}' exists and is not empty (size: {properties.size})."
114
+ )
115
+ return True
116
+ else:
117
+ logger.debug(
118
+ f"Blob '{remote_path}' exists but size ({properties.size}) is not > 100 bytes."
119
+ )
120
+ return False
121
+ else:
122
+ logger.debug(f"Blob '{remote_path}' does not exist.")
123
+ return False
124
+ except Exception as e:
125
+ logger.error(f"Error checking blob '{remote_path}': {e}")
126
+ # In case of error, assume it doesn't exist or is empty to allow upload attempt
127
+ return False
@@ -4,39 +4,53 @@
4
4
  import logging
5
5
  from ftplib import FTP, FTP_TLS
6
6
  from pathlib import Path
7
-
8
- from dynaconf import Dynaconf
7
+ from typing import Any, Dict, List, Optional
9
8
 
10
9
  ########################################################################################################################
11
10
  # CLASSES
12
11
 
13
12
  logger = logging.getLogger(__name__)
14
13
 
15
- class FTPInterface:
16
- def __init__(self, config: Dynaconf):
17
- if "ftp" in config:
18
- self.config = config["ftp"]
19
14
 
20
- self.ftp = self.get_ftp()
21
- else:
15
+ class FTPInterface:
16
+ def __init__(self, config):
17
+ self.profiles: List[Dict[str, Any]] = []
18
+ self.config = config
19
+ for section in getattr(self.config, "sections", lambda: [])():
20
+ if section.startswith("ftp:"):
21
+ profile_name = section.split(":", 1)[1]
22
+ ftps = self.config[section]["ftps"].lower() == "true"
23
+ ftp_conn = (
24
+ FTP_TLS(self.config[section]["server"])
25
+ if ftps
26
+ else FTP(self.config[section]["server"])
27
+ ) # noqa: S321
28
+ ftp_conn.login(
29
+ self.config[section]["username"], self.config[section]["password"]
30
+ )
31
+ self.profiles.append({"profile": profile_name, "session": ftp_conn})
32
+
33
+ if not self.profiles:
22
34
  logger.warning("no ftp section in config")
23
35
 
24
- def get_ftp(self):
25
- if self.config["ftps"]:
26
- ftp_conn = FTP_TLS(self.config["server"])
36
+ self.current_profile: Optional[Dict[str, Any]] = (
37
+ self.profiles[0] if self.profiles else None
38
+ )
39
+ self.ftp = self.current_profile["session"]
27
40
 
28
- else:
29
- ftp_conn = FTP(self.config["server"])
30
-
31
- ftp_conn.login(self.config["username"], self.config["password"])
32
-
33
- return ftp_conn
41
+ def switch_profile(self, profile_name: str) -> None:
42
+ for profile in self.profiles:
43
+ if profile["profile"] == profile_name:
44
+ self.current_profile = profile
45
+ self.ftp = profile["session"]
46
+ return
47
+ logger.warning(f"Profile {profile_name} not found")
34
48
 
35
49
  def upload_file(self, local_file, remote_folder, remote_file=None):
36
50
  if not remote_file:
37
51
  remote_file = Path(local_file).name
38
52
 
39
- self._create_remote_dir_tree(full_path=f"{remote_folder}{remote_file}")
53
+ self._create_remote_dir_tree(full_path=f"/{remote_folder}{remote_file}")
40
54
 
41
55
  self.ftp.cwd(remote_folder)
42
56
 
@@ -59,4 +73,4 @@ class FTPInterface:
59
73
  except Exception as e:
60
74
  logger.warning(f"Error while creating remote directory: {e}")
61
75
  self.ftp.mkd(part)
62
- self.ftp.cwd(part)
76
+ self.ftp.cwd(part)
@@ -1,8 +1,9 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: datamarket
3
- Version: 0.9.51
3
+ Version: 0.9.52
4
4
  Summary: Utilities that integrate advanced scraping knowledge into just one library.
5
5
  License: GPL-3.0-or-later
6
+ License-File: LICENSE
6
7
  Author: DataMarket
7
8
  Author-email: techsupport@datamarket.es
8
9
  Requires-Python: >=3.12,<3.13
@@ -4,8 +4,9 @@ datamarket/exceptions/main.py,sha256=MP5ql6M7DoMbBf-Dg_2ohcUFdWXgzv-dXHntPPit31s
4
4
  datamarket/interfaces/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  datamarket/interfaces/alchemy.py,sha256=mQwjDqBpz1QHRV2JTCALvn5iK_ky69oE2Gw-EtRXsqQ,14664
6
6
  datamarket/interfaces/aws.py,sha256=7KLUeBxmPN7avEMPsu5HC_KHB1N7W6Anp2X8fo43mlw,2383
7
+ datamarket/interfaces/azure.py,sha256=4lxjL4O4nGO8aDmzY8m9x6vzgYpzQaBWwhERrTuwtqA,4936
7
8
  datamarket/interfaces/drive.py,sha256=3nhx3THr2SHNWKYwme9F2nPpvsqyEMFIxz0whF2FjHk,4840
8
- datamarket/interfaces/ftp.py,sha256=o0KlJxtksbop9OjCiQRzyAa2IeG_ExVXagS6apwrAQo,1881
9
+ datamarket/interfaces/ftp.py,sha256=t4zU3ccKo2-8R3nZvM0gqgWbLMRr21jYS5bYpX0jjWk,2772
9
10
  datamarket/interfaces/nominatim.py,sha256=HLk0FcdfbOVCF_i71l-Hlb17swL0W1a3Gg2n5OLD0tM,15507
10
11
  datamarket/interfaces/peerdb.py,sha256=cwYwvO740GyaPo9zLAwJsf3UeJDGDiYzjQVM9Q6s-_g,23652
11
12
  datamarket/interfaces/proxy.py,sha256=updoOStKd8-nQBbxWbnD9eOt6HksnYi-5dQ0rEySf5M,3152
@@ -26,7 +27,7 @@ datamarket/utils/strings/normalization.py,sha256=rj0wfJSjqcCRp-ruHqc5pylO3_TOmY5
26
27
  datamarket/utils/strings/obfuscation.py,sha256=8gMepfjPq0N4_IpKR6i2dy_9VJugQ3qJiRiRvKavB3s,5246
27
28
  datamarket/utils/typer.py,sha256=FDF3l6gh3UlAFPsHCtesnekvct2rKz0oFn3uKARBQvE,814
28
29
  datamarket/utils/types.py,sha256=vxdQZdwdXrfPR4Es52gBgol-tMRIOD6oK9cBo3rB0JQ,74
29
- datamarket-0.9.51.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
30
- datamarket-0.9.51.dist-info/METADATA,sha256=UPV2cxDlddvKHiKY5tSt-dDkA7reLhyIX1KAIfcjxag,7326
31
- datamarket-0.9.51.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
32
- datamarket-0.9.51.dist-info/RECORD,,
30
+ datamarket-0.9.52.dist-info/METADATA,sha256=80X3F8UhM8-vlWqkp3_72Tmh0TzGJudtngDOt5x7Kcc,7348
31
+ datamarket-0.9.52.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
32
+ datamarket-0.9.52.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
33
+ datamarket-0.9.52.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.1.3
2
+ Generator: poetry-core 2.2.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any