xlwings-utils 25.0.10__py3-none-any.whl → 25.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of xlwings-utils might be problematic. Click here for more details.
- xlwings_utils/xlwings_utils.py +88 -46
- xlwings_utils/xlwings_utils_old.py +1064 -0
- {xlwings_utils-25.0.10.dist-info → xlwings_utils-25.1.0.dist-info}/METADATA +4 -5
- xlwings_utils-25.1.0.dist-info/RECORD +7 -0
- xlwings_utils-25.0.10.dist-info/RECORD +0 -6
- {xlwings_utils-25.0.10.dist-info → xlwings_utils-25.1.0.dist-info}/WHEEL +0 -0
- {xlwings_utils-25.0.10.dist-info → xlwings_utils-25.1.0.dist-info}/top_level.txt +0 -0
xlwings_utils/xlwings_utils.py
CHANGED
|
@@ -5,28 +5,33 @@
|
|
|
5
5
|
# /_/\_\|_| \_/\_/ |_||_| |_| \__, ||___/ _____ \__,_| \__||_||_||___/
|
|
6
6
|
# |___/ |_____|
|
|
7
7
|
|
|
8
|
-
__version__ = "25.0
|
|
8
|
+
__version__ = "25.1.0"
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
import dropbox
|
|
12
11
|
from pathlib import Path
|
|
13
12
|
import os
|
|
14
13
|
import sys
|
|
15
14
|
import math
|
|
16
15
|
import base64
|
|
16
|
+
import requests
|
|
17
|
+
import json
|
|
17
18
|
|
|
18
|
-
dbx = None
|
|
19
19
|
Pythonista = sys.platform == "ios"
|
|
20
|
+
|
|
20
21
|
try:
|
|
21
22
|
import xlwings
|
|
22
23
|
|
|
23
24
|
xlwings = True
|
|
25
|
+
import pyodide_http
|
|
24
26
|
|
|
25
27
|
except ImportError:
|
|
26
28
|
xlwings = False
|
|
27
29
|
|
|
28
30
|
missing = object()
|
|
29
31
|
|
|
32
|
+
_token = None
|
|
33
|
+
missing = object()
|
|
34
|
+
|
|
30
35
|
|
|
31
36
|
def dropbox_init(refresh_token=missing, app_key=missing, app_secret=missing, **kwargs):
|
|
32
37
|
"""
|
|
@@ -59,7 +64,10 @@ def dropbox_init(refresh_token=missing, app_key=missing, app_secret=missing, **k
|
|
|
59
64
|
-------
|
|
60
65
|
dropbox object
|
|
61
66
|
"""
|
|
62
|
-
|
|
67
|
+
|
|
68
|
+
global _token
|
|
69
|
+
if xlwings:
|
|
70
|
+
pyodide_http.patch_all() # to enable chunked mode
|
|
63
71
|
|
|
64
72
|
if refresh_token is missing:
|
|
65
73
|
if "DROPBOX.REFRESH_TOKEN" in os.environ:
|
|
@@ -77,18 +85,22 @@ def dropbox_init(refresh_token=missing, app_key=missing, app_secret=missing, **k
|
|
|
77
85
|
else:
|
|
78
86
|
raise ValueError("no DROPBOX.APP_SECRET found in environment.")
|
|
79
87
|
|
|
80
|
-
|
|
88
|
+
resp = requests.post(
|
|
89
|
+
"https://api.dropbox.com/oauth2/token",
|
|
90
|
+
data={"grant_type": "refresh_token", "refresh_token": refresh_token, "client_id": app_key, "client_secret": app_secret},
|
|
91
|
+
timeout=30,
|
|
92
|
+
)
|
|
81
93
|
try:
|
|
82
|
-
|
|
83
|
-
except
|
|
94
|
+
resp.raise_for_status()
|
|
95
|
+
except requests.exceptions.HTTPError:
|
|
84
96
|
raise ValueError("invalid dropbox credentials")
|
|
85
|
-
|
|
97
|
+
_token = resp.json()["access_token"]
|
|
86
98
|
|
|
87
99
|
|
|
88
|
-
def
|
|
89
|
-
global
|
|
90
|
-
if
|
|
91
|
-
|
|
100
|
+
def _login_dropbox():
|
|
101
|
+
global _token
|
|
102
|
+
if _token is None:
|
|
103
|
+
dropbox_init() # use environment
|
|
92
104
|
|
|
93
105
|
|
|
94
106
|
def list_dropbox(path="", recursive=False, show_files=True, show_folders=False):
|
|
@@ -122,28 +134,31 @@ def list_dropbox(path="", recursive=False, show_files=True, show_folders=False):
|
|
|
122
134
|
If REFRESH_TOKEN, APP_KEY and APP_SECRET environment variables are specified,
|
|
123
135
|
it is not necessary to call dropbox_init() prior to any dropbox function.
|
|
124
136
|
"""
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
137
|
+
_login_dropbox()
|
|
138
|
+
|
|
139
|
+
API_RPC = "https://api.dropboxapi.com/2"
|
|
140
|
+
headers = {"Authorization": f"Bearer {_token}", "Content-Type": "application/json"}
|
|
141
|
+
payload = {"path": path, "recursive": recursive, "include_deleted": False}
|
|
142
|
+
r = requests.post("https://api.dropboxapi.com/2/files/list_folder", headers=headers, json=payload, timeout=30)
|
|
143
|
+
r.raise_for_status()
|
|
144
|
+
data = r.json()
|
|
145
|
+
entries = data["entries"]
|
|
146
|
+
while data.get("has_more"):
|
|
147
|
+
r = requests.post(f"{API_RPC}/files/list_folder/continue", headers=headers, json={"cursor": data["cursor"]}, timeout=30)
|
|
148
|
+
r.raise_for_status()
|
|
149
|
+
data = r.json()
|
|
150
|
+
entries.extend(data["entries"])
|
|
134
151
|
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
return out
|
|
152
|
+
result = []
|
|
153
|
+
for entry in entries:
|
|
154
|
+
if show_files and entry[".tag"] == "file":
|
|
155
|
+
result.append(entry["path_display"])
|
|
156
|
+
if show_folders and entry[".tag"] == "folder":
|
|
157
|
+
result.append(entry["path_display"] + "/")
|
|
158
|
+
return result
|
|
144
159
|
|
|
145
160
|
|
|
146
|
-
def read_dropbox(dropbox_path
|
|
161
|
+
def read_dropbox(dropbox_path):
|
|
147
162
|
"""
|
|
148
163
|
read_dropbox
|
|
149
164
|
|
|
@@ -154,9 +169,6 @@ def read_dropbox(dropbox_path, max_retries=100):
|
|
|
154
169
|
dropbox_path : str or Pathlib.Path
|
|
155
170
|
path to read from
|
|
156
171
|
|
|
157
|
-
max_retries : int
|
|
158
|
-
number of retries (default: 100)
|
|
159
|
-
|
|
160
172
|
Returns
|
|
161
173
|
-------
|
|
162
174
|
contents of the dropbox file : bytes
|
|
@@ -165,18 +177,20 @@ def read_dropbox(dropbox_path, max_retries=100):
|
|
|
165
177
|
----
|
|
166
178
|
If REFRESH_TOKEN, APP_KEY and APP_SECRET environment variables are specified,
|
|
167
179
|
it is not necessary to call dropbox_init() prior to any dropbox function.
|
|
168
|
-
|
|
169
|
-
As reading from dropbox is very unreliable under pyodide, reading will have to be retried (by default maximum 100 times).
|
|
170
|
-
The number of retries can be found with read_dropbox.retries.
|
|
171
180
|
"""
|
|
172
181
|
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
182
|
+
_login_dropbox()
|
|
183
|
+
headers = {"Authorization": f"Bearer {_token}", "Dropbox-API-Arg": json.dumps({"path": dropbox_path})}
|
|
184
|
+
with requests.post("https://content.dropboxapi.com/2/files/download", headers=headers, stream=True, timeout=60) as r:
|
|
185
|
+
try:
|
|
186
|
+
r.raise_for_status()
|
|
187
|
+
except requests.exceptions.HTTPError as e:
|
|
188
|
+
raise FileNotFoundError(f"file {dropbox_path} not found. Original message is {e}") from None
|
|
189
|
+
chunks = []
|
|
190
|
+
for chunk in r.iter_content(chunk_size=1024):
|
|
191
|
+
if chunk:
|
|
192
|
+
chunks.append(chunk)
|
|
193
|
+
return b"".join(chunks)
|
|
180
194
|
|
|
181
195
|
|
|
182
196
|
def write_dropbox(dropbox_path, contents):
|
|
@@ -198,8 +212,36 @@ def write_dropbox(dropbox_path, contents):
|
|
|
198
212
|
If REFRESH_TOKEN, APP_KEY and APP_SECRET environment variables are specified,
|
|
199
213
|
it is not necessary to call dropbox_init() prior to any dropbox function.
|
|
200
214
|
"""
|
|
201
|
-
|
|
202
|
-
|
|
215
|
+
_login_dropbox()
|
|
216
|
+
headers = {
|
|
217
|
+
"Authorization": f"Bearer {_token}",
|
|
218
|
+
"Dropbox-API-Arg": json.dumps(
|
|
219
|
+
{
|
|
220
|
+
"path": dropbox_path, # Where it will be saved in Dropbox
|
|
221
|
+
"mode": "overwrite", # "add" or "overwrite"
|
|
222
|
+
"autorename": False,
|
|
223
|
+
"mute": False,
|
|
224
|
+
}
|
|
225
|
+
),
|
|
226
|
+
"Content-Type": "application/octet-stream",
|
|
227
|
+
}
|
|
228
|
+
response = requests.post("https://content.dropboxapi.com/2/files/upload", headers=headers, data=contents)
|
|
229
|
+
return response
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def delete_from_dropbox(dropbox_path):
|
|
233
|
+
_login_dropbox()
|
|
234
|
+
|
|
235
|
+
headers = {"Authorization": f"Bearer {_token}", "Content-Type": "application/json"}
|
|
236
|
+
|
|
237
|
+
data = {
|
|
238
|
+
"path": dropbox_path # Path in Dropbox, starting with /
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
response = requests.post("https://api.dropboxapi.com/2/files/delete_v2", headers=headers, data=json.dumps(data))
|
|
242
|
+
if response.status_code == 200:
|
|
243
|
+
return
|
|
244
|
+
raise FileNotFoundError(f"dropbox file {dropbox_path} not found")
|
|
203
245
|
|
|
204
246
|
|
|
205
247
|
def list_local(path, recursive=False, show_files=True, show_folders=False):
|
|
@@ -0,0 +1,1064 @@
|
|
|
1
|
+
# _ _ _ _ _
|
|
2
|
+
# __ __| |__ __(_) _ __ __ _ ___ _ _ | |_ (_)| | ___
|
|
3
|
+
# \ \/ /| |\ \ /\ / /| || '_ \ / _` |/ __| | | | || __|| || |/ __|
|
|
4
|
+
# > < | | \ V V / | || | | || (_| |\__ \ | |_| || |_ | || |\__ \
|
|
5
|
+
# /_/\_\|_| \_/\_/ |_||_| |_| \__, ||___/ _____ \__,_| \__||_||_||___/
|
|
6
|
+
# |___/ |_____|
|
|
7
|
+
|
|
8
|
+
__version__ = "25.0.10"
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
import os
|
|
13
|
+
import sys
|
|
14
|
+
import math
|
|
15
|
+
import base64
|
|
16
|
+
import requests
|
|
17
|
+
import json
|
|
18
|
+
import pyodide_http
|
|
19
|
+
|
|
20
|
+
Pythonista = sys.platform == "ios"
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
import xlwings
|
|
24
|
+
|
|
25
|
+
xlwings = True
|
|
26
|
+
|
|
27
|
+
except ImportError:
|
|
28
|
+
xlwings = False
|
|
29
|
+
|
|
30
|
+
missing = object()
|
|
31
|
+
|
|
32
|
+
_token = None
|
|
33
|
+
missing = object()
|
|
34
|
+
|
|
35
|
+
def dropbox_init(refresh_token=missing, app_key=missing, app_secret=missing, **kwargs):
|
|
36
|
+
"""
|
|
37
|
+
dropbox initialize
|
|
38
|
+
|
|
39
|
+
This function may to be called prior to using any dropbox function
|
|
40
|
+
to specify the request token, app key and app secret.
|
|
41
|
+
If these are specified as DROPBOX.REFRESH_TOKEN, DROPBOX.APP_KEY and DROPBOX.APP_SECRET
|
|
42
|
+
environment variables, it is not necessary to call dropbox_init().
|
|
43
|
+
|
|
44
|
+
Parameters
|
|
45
|
+
----------
|
|
46
|
+
refresh_token : str
|
|
47
|
+
oauth2 refreshntoken
|
|
48
|
+
|
|
49
|
+
if omitted: use the environment variable REFRESH_TOKEN
|
|
50
|
+
|
|
51
|
+
app_key : str
|
|
52
|
+
app key
|
|
53
|
+
|
|
54
|
+
if omitted: use the environment variable APP_KEY
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
app_secret : str
|
|
58
|
+
app secret
|
|
59
|
+
|
|
60
|
+
if omitted: use the environment variable APP_SECRET
|
|
61
|
+
|
|
62
|
+
Returns
|
|
63
|
+
-------
|
|
64
|
+
dropbox object
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
global _token
|
|
68
|
+
pyodide_http.patch_all() # to enable chunked mode
|
|
69
|
+
|
|
70
|
+
if refresh_token is missing:
|
|
71
|
+
if "DROPBOX.REFRESH_TOKEN" in os.environ:
|
|
72
|
+
refresh_token = os.environ["DROPBOX.REFRESH_TOKEN"]
|
|
73
|
+
else:
|
|
74
|
+
raise ValueError("no DROPBOX.REFRESH_TOKEN found in environment.")
|
|
75
|
+
if app_key is missing:
|
|
76
|
+
if "DROPBOX.APP_KEY" in os.environ:
|
|
77
|
+
app_key = os.environ["DROPBOX.APP_KEY"]
|
|
78
|
+
else:
|
|
79
|
+
raise ValueError("no DROPBOX.APP_KEY found in environment.")
|
|
80
|
+
if app_secret is missing:
|
|
81
|
+
if "DROPBOX.APP_SECRET" in os.environ:
|
|
82
|
+
app_secret = os.environ["DROPBOX.APP_SECRET"]
|
|
83
|
+
else:
|
|
84
|
+
raise ValueError("no DROPBOX.APP_SECRET found in environment.")
|
|
85
|
+
|
|
86
|
+
resp = requests.post(
|
|
87
|
+
"https://api.dropbox.com/oauth2/token",
|
|
88
|
+
data={
|
|
89
|
+
"grant_type": "refresh_token",
|
|
90
|
+
"refresh_token": refresh_token,
|
|
91
|
+
"client_id": app_key,
|
|
92
|
+
"client_secret": app_secret,
|
|
93
|
+
},
|
|
94
|
+
timeout=30,
|
|
95
|
+
)
|
|
96
|
+
try:
|
|
97
|
+
resp.raise_for_status()
|
|
98
|
+
except requests.exceptions.HTTPError:
|
|
99
|
+
raise ValueError("invalid dropbox credentials")
|
|
100
|
+
_token=resp.json()["access_token"]
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def _login_dropbox():
|
|
104
|
+
global _token
|
|
105
|
+
if _token is None:
|
|
106
|
+
dropbox_init() # use environment
|
|
107
|
+
|
|
108
|
+
def list_dropbox(path="", recursive=False, show_files=True, show_folders=False):
|
|
109
|
+
"""
|
|
110
|
+
list_dropbox
|
|
111
|
+
|
|
112
|
+
returns all dropbox files/folders in path
|
|
113
|
+
|
|
114
|
+
Parameters
|
|
115
|
+
----------
|
|
116
|
+
path : str or Pathlib.Path
|
|
117
|
+
path from which to list all files (default: '')
|
|
118
|
+
|
|
119
|
+
recursive : bool
|
|
120
|
+
if True, recursively list files and folders. if False (default) no recursion
|
|
121
|
+
|
|
122
|
+
show_files : bool
|
|
123
|
+
if True (default), show file entries
|
|
124
|
+
if False, do not show file entries
|
|
125
|
+
|
|
126
|
+
show_folders : bool
|
|
127
|
+
if True, show folder entries
|
|
128
|
+
if False (default), do not show folder entries
|
|
129
|
+
|
|
130
|
+
Returns
|
|
131
|
+
-------
|
|
132
|
+
files : list
|
|
133
|
+
|
|
134
|
+
Note
|
|
135
|
+
----
|
|
136
|
+
If REFRESH_TOKEN, APP_KEY and APP_SECRET environment variables are specified,
|
|
137
|
+
it is not necessary to call dropbox_init() prior to any dropbox function.
|
|
138
|
+
"""
|
|
139
|
+
_login_dropbox()
|
|
140
|
+
|
|
141
|
+
API_RPC = "https://api.dropboxapi.com/2"
|
|
142
|
+
headers = {"Authorization": f"Bearer {_token}", "Content-Type": "application/json"}
|
|
143
|
+
payload = {"path": path, "recursive": recursive, "include_deleted": False}
|
|
144
|
+
r = requests.post(
|
|
145
|
+
"https://api.dropboxapi.com/2/files/list_folder", headers=headers, json=payload, timeout=30
|
|
146
|
+
)
|
|
147
|
+
r.raise_for_status()
|
|
148
|
+
data = r.json()
|
|
149
|
+
entries = data["entries"]
|
|
150
|
+
while data.get("has_more"):
|
|
151
|
+
r = requests.post(
|
|
152
|
+
f"{API_RPC}/files/list_folder/continue",
|
|
153
|
+
headers=headers,
|
|
154
|
+
json={"cursor": data["cursor"]},
|
|
155
|
+
timeout=30,
|
|
156
|
+
)
|
|
157
|
+
r.raise_for_status()
|
|
158
|
+
data = r.json()
|
|
159
|
+
entries.extend(data["entries"])
|
|
160
|
+
|
|
161
|
+
result=[]
|
|
162
|
+
for entry in entries:
|
|
163
|
+
if show_files and entry[".tag"]=="file":
|
|
164
|
+
result.append(entry["path_display"])
|
|
165
|
+
if show_folders and entry[".tag"]=="folder":
|
|
166
|
+
result.append(entry["path_display"]+"/")
|
|
167
|
+
return result
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def read_dropbox(dropbox_path, max_retries=100):
|
|
171
|
+
"""
|
|
172
|
+
read_dropbox
|
|
173
|
+
|
|
174
|
+
read from dopbox at given path
|
|
175
|
+
|
|
176
|
+
Parameters
|
|
177
|
+
----------
|
|
178
|
+
dropbox_path : str or Pathlib.Path
|
|
179
|
+
path to read from
|
|
180
|
+
|
|
181
|
+
max_retries : int
|
|
182
|
+
number of retries (default: 100)
|
|
183
|
+
this parameter is deprecated and ignored
|
|
184
|
+
|
|
185
|
+
Returns
|
|
186
|
+
-------
|
|
187
|
+
contents of the dropbox file : bytes
|
|
188
|
+
|
|
189
|
+
Note
|
|
190
|
+
----
|
|
191
|
+
If REFRESH_TOKEN, APP_KEY and APP_SECRET environment variables are specified,
|
|
192
|
+
it is not necessary to call dropbox_init() prior to any dropbox function.
|
|
193
|
+
"""
|
|
194
|
+
|
|
195
|
+
_login_dropbox()
|
|
196
|
+
headers = {
|
|
197
|
+
"Authorization": f"Bearer {_token}",
|
|
198
|
+
"Dropbox-API-Arg": json.dumps({"path": dropbox_path}),
|
|
199
|
+
}
|
|
200
|
+
with requests.post(
|
|
201
|
+
"https://content.dropboxapi.com/2/files/download", headers=headers, stream=True, timeout=60
|
|
202
|
+
) as r:
|
|
203
|
+
r.raise_for_status()
|
|
204
|
+
chunks=[]
|
|
205
|
+
for chunk in r.iter_content(chunk_size=1024):
|
|
206
|
+
if chunk:
|
|
207
|
+
chunks.append(chunk)
|
|
208
|
+
return b"".join(chunks)
|
|
209
|
+
|
|
210
|
+
def write_dropbox(dropbox_path, contents):
|
|
211
|
+
"""
|
|
212
|
+
write_dropbox
|
|
213
|
+
|
|
214
|
+
write from dopbox at given path
|
|
215
|
+
|
|
216
|
+
Parameters
|
|
217
|
+
----------
|
|
218
|
+
dropbox_path : str or Pathlib.Path
|
|
219
|
+
path to write to
|
|
220
|
+
|
|
221
|
+
contents : bytes
|
|
222
|
+
contents to be written
|
|
223
|
+
|
|
224
|
+
Note
|
|
225
|
+
----
|
|
226
|
+
If REFRESH_TOKEN, APP_KEY and APP_SECRET environment variables are specified,
|
|
227
|
+
it is not necessary to call dropbox_init() prior to any dropbox function.
|
|
228
|
+
"""
|
|
229
|
+
_login_dropbox()
|
|
230
|
+
headers = {
|
|
231
|
+
"Authorization": f"Bearer {_token}",
|
|
232
|
+
"Dropbox-API-Arg": json.dumps({
|
|
233
|
+
"path": dropbox_path, # Where it will be saved in Dropbox
|
|
234
|
+
"mode": "overwrite", # "add" or "overwrite"
|
|
235
|
+
"autorename": False,
|
|
236
|
+
"mute": False
|
|
237
|
+
}),
|
|
238
|
+
"Content-Type": "application/octet-stream"
|
|
239
|
+
}
|
|
240
|
+
response = requests.post("https://content.dropboxapi.com/2/files/upload", headers=headers, data=contents)
|
|
241
|
+
return response
|
|
242
|
+
|
|
243
|
+
def delete_from_dropbox(dropbox_path):
|
|
244
|
+
_login_dropbox()
|
|
245
|
+
|
|
246
|
+
headers = {
|
|
247
|
+
"Authorization": f"Bearer {_token}",
|
|
248
|
+
"Content-Type": "application/json"
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
data = {
|
|
252
|
+
"path": dropbox_path # Path in Dropbox, starting with /
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
response = requests.post("https://api.dropboxapi.com/2/files/delete_v2", headers=headers, data=json.dumps(data))
|
|
256
|
+
|
|
257
|
+
print(response.status_code)
|
|
258
|
+
print(response.json())
|
|
259
|
+
|
|
260
|
+
def list_local(path, recursive=False, show_files=True, show_folders=False):
|
|
261
|
+
"""
|
|
262
|
+
list_local
|
|
263
|
+
|
|
264
|
+
returns all local files/folders in path
|
|
265
|
+
|
|
266
|
+
Parameters
|
|
267
|
+
----------
|
|
268
|
+
path : str or Pathlib.Path
|
|
269
|
+
path from which to list all files (default: '')
|
|
270
|
+
|
|
271
|
+
recursive : bool
|
|
272
|
+
if True, recursively list files. if False (default) no recursion
|
|
273
|
+
|
|
274
|
+
show_files : bool
|
|
275
|
+
if True (default), show file entries
|
|
276
|
+
if False, do not show file entries
|
|
277
|
+
|
|
278
|
+
show_folders : bool
|
|
279
|
+
if True, show folder entries
|
|
280
|
+
if False (default), do not show folder entries
|
|
281
|
+
|
|
282
|
+
Returns
|
|
283
|
+
-------
|
|
284
|
+
files, relative to path : list
|
|
285
|
+
"""
|
|
286
|
+
path = Path(path)
|
|
287
|
+
|
|
288
|
+
result = []
|
|
289
|
+
for entry in path.iterdir():
|
|
290
|
+
if entry.is_file():
|
|
291
|
+
if show_files:
|
|
292
|
+
result.append(str(entry))
|
|
293
|
+
elif entry.is_dir():
|
|
294
|
+
if show_folders:
|
|
295
|
+
result.append(str(entry) + "/")
|
|
296
|
+
if recursive:
|
|
297
|
+
result.extend(
|
|
298
|
+
list_local(
|
|
299
|
+
entry,
|
|
300
|
+
recursive=recursive,
|
|
301
|
+
show_files=show_files,
|
|
302
|
+
show_folders=show_folders,
|
|
303
|
+
)
|
|
304
|
+
)
|
|
305
|
+
return result
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
def write_local(path, contents):
|
|
309
|
+
path = Path(path)
|
|
310
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
311
|
+
with open(path, "wb") as f:
|
|
312
|
+
f.write(contents)
|
|
313
|
+
|
|
314
|
+
def read_local(path):
|
|
315
|
+
path = Path(path)
|
|
316
|
+
with open(path, "rb") as f:
|
|
317
|
+
contents = f.read()
|
|
318
|
+
return contents
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
class block:
|
|
323
|
+
"""
|
|
324
|
+
block is 2 dimensional data structure with 1 as lowest index (like xlwings range)
|
|
325
|
+
|
|
326
|
+
Parameters
|
|
327
|
+
----------
|
|
328
|
+
number_of_rows : int
|
|
329
|
+
number of rows (dedault 1)
|
|
330
|
+
|
|
331
|
+
number_of_columns : int
|
|
332
|
+
number of columns (default 1)
|
|
333
|
+
|
|
334
|
+
Returns
|
|
335
|
+
-------
|
|
336
|
+
block
|
|
337
|
+
"""
|
|
338
|
+
|
|
339
|
+
def __init__(self, number_of_rows=1, number_of_columns=1):
|
|
340
|
+
self.dict = {}
|
|
341
|
+
self.number_of_rows = number_of_rows
|
|
342
|
+
self.number_of_columns = number_of_columns
|
|
343
|
+
self._highest_used_row_number = None
|
|
344
|
+
self._highest_used_column_number = None
|
|
345
|
+
|
|
346
|
+
def __eq__(self, other):
|
|
347
|
+
if isinstance(other, block):
|
|
348
|
+
return self.value == other.value
|
|
349
|
+
return False
|
|
350
|
+
|
|
351
|
+
@classmethod
|
|
352
|
+
def from_value(cls, value, column_like=False):
|
|
353
|
+
"""
|
|
354
|
+
makes a block from a given value
|
|
355
|
+
|
|
356
|
+
Parameters
|
|
357
|
+
----------
|
|
358
|
+
value : scalar, list of scalars, list of lists of scalars or block
|
|
359
|
+
value to be used in block, possibly expanded to a list of lists of scalars
|
|
360
|
+
|
|
361
|
+
column_like : boolean
|
|
362
|
+
if value is a list of scalars, values is interpreted as a column if True, as a row otherwise
|
|
363
|
+
|
|
364
|
+
Returns
|
|
365
|
+
-------
|
|
366
|
+
block : block
|
|
367
|
+
"""
|
|
368
|
+
if isinstance(value, block):
|
|
369
|
+
value = value.value
|
|
370
|
+
if not isinstance(value, list):
|
|
371
|
+
value = [[value]]
|
|
372
|
+
if not isinstance(value[0], list):
|
|
373
|
+
if column_like:
|
|
374
|
+
value = [[item] for item in value]
|
|
375
|
+
else:
|
|
376
|
+
value = [value]
|
|
377
|
+
bl = cls(len(value), 1)
|
|
378
|
+
|
|
379
|
+
for row, row_contents in enumerate(value, 1):
|
|
380
|
+
for column, item in enumerate(row_contents, 1):
|
|
381
|
+
if item and not (isinstance(item, float) and math.isnan(item)):
|
|
382
|
+
bl.dict[row, column] = item
|
|
383
|
+
bl._number_of_columns = max(bl.number_of_columns, column)
|
|
384
|
+
return bl
|
|
385
|
+
|
|
386
|
+
@classmethod
|
|
387
|
+
def from_range(cls, rng):
|
|
388
|
+
"""
|
|
389
|
+
makes a block from a given range
|
|
390
|
+
|
|
391
|
+
Parameters
|
|
392
|
+
----------
|
|
393
|
+
rng : xlwings.Range
|
|
394
|
+
range to be used be used in block
|
|
395
|
+
|
|
396
|
+
Returns
|
|
397
|
+
-------
|
|
398
|
+
block : block
|
|
399
|
+
"""
|
|
400
|
+
number_of_rows, number_of_columns = rng.shape
|
|
401
|
+
return cls.from_value(rng.value, column_like=(number_of_columns == 1))
|
|
402
|
+
|
|
403
|
+
@classmethod
|
|
404
|
+
def from_xlrd_sheet(cls, sheet):
|
|
405
|
+
"""
|
|
406
|
+
makes a block from a xlrd sheet
|
|
407
|
+
|
|
408
|
+
Parameters
|
|
409
|
+
----------
|
|
410
|
+
sheet : xlrd sheet
|
|
411
|
+
sheet to be used be used in block
|
|
412
|
+
|
|
413
|
+
Returns
|
|
414
|
+
-------
|
|
415
|
+
block : block
|
|
416
|
+
"""
|
|
417
|
+
v = [sheet.row_values(row_idx)[0 : sheet.ncols] for row_idx in range(0, sheet.nrows)]
|
|
418
|
+
return cls.from_value(v)
|
|
419
|
+
|
|
420
|
+
@classmethod
|
|
421
|
+
def from_openpyxl_sheet(cls, sheet):
|
|
422
|
+
"""
|
|
423
|
+
makes a block from an openpyxl sheet
|
|
424
|
+
|
|
425
|
+
Parameters
|
|
426
|
+
----------
|
|
427
|
+
sheet : xlrd sheet
|
|
428
|
+
sheet to be used be used in block
|
|
429
|
+
|
|
430
|
+
Returns
|
|
431
|
+
-------
|
|
432
|
+
block : block
|
|
433
|
+
"""
|
|
434
|
+
v = [[cell.value for cell in row] for row in sheet.iter_rows()]
|
|
435
|
+
return cls.from_value(v)
|
|
436
|
+
|
|
437
|
+
@classmethod
|
|
438
|
+
def from_file(cls, filename):
|
|
439
|
+
"""
|
|
440
|
+
makes a block from a file
|
|
441
|
+
|
|
442
|
+
Parameters
|
|
443
|
+
----------
|
|
444
|
+
filename : str
|
|
445
|
+
file to be used be used in block
|
|
446
|
+
|
|
447
|
+
Returns
|
|
448
|
+
-------
|
|
449
|
+
block : block
|
|
450
|
+
"""
|
|
451
|
+
with open(filename, "r") as f:
|
|
452
|
+
v = [[line if line else missing] for line in f.read().splitlines()]
|
|
453
|
+
return cls.from_value(v)
|
|
454
|
+
|
|
455
|
+
@classmethod
|
|
456
|
+
def from_dataframe(cls, df):
|
|
457
|
+
"""
|
|
458
|
+
makes a block from a given dataframe
|
|
459
|
+
|
|
460
|
+
Parameters
|
|
461
|
+
----------
|
|
462
|
+
df : pandas dataframe
|
|
463
|
+
dataframe to be used be used in block
|
|
464
|
+
|
|
465
|
+
Returns
|
|
466
|
+
-------
|
|
467
|
+
block : block
|
|
468
|
+
"""
|
|
469
|
+
v = df.values.tolist()
|
|
470
|
+
return cls.from_value(v)
|
|
471
|
+
|
|
472
|
+
def to_openpyxl_sheet(self, sheet):
|
|
473
|
+
"""
|
|
474
|
+
appends a block to a given openpyxl sheet
|
|
475
|
+
|
|
476
|
+
Parameters
|
|
477
|
+
----------
|
|
478
|
+
sheet: openpyxl sheet
|
|
479
|
+
sheet to be used be used
|
|
480
|
+
|
|
481
|
+
Returns
|
|
482
|
+
-------
|
|
483
|
+
block : block
|
|
484
|
+
"""
|
|
485
|
+
for row in self.value:
|
|
486
|
+
sheet.append(row)
|
|
487
|
+
|
|
488
|
+
def reshape(self, number_of_rows=missing, number_of_columns=missing):
|
|
489
|
+
"""
|
|
490
|
+
makes a new block with given dimensions
|
|
491
|
+
|
|
492
|
+
Parameters
|
|
493
|
+
----------
|
|
494
|
+
number_of_rows : int
|
|
495
|
+
if given, expand or shrink to the given number of rows
|
|
496
|
+
|
|
497
|
+
number_of_columns : int
|
|
498
|
+
if given, expand or shrink to the given number of columns
|
|
499
|
+
|
|
500
|
+
Returns
|
|
501
|
+
-------
|
|
502
|
+
block : block
|
|
503
|
+
"""
|
|
504
|
+
if number_of_rows is missing:
|
|
505
|
+
number_of_rows = self.number_of_rows
|
|
506
|
+
if number_of_columns is missing:
|
|
507
|
+
number_of_columns = self.number_of_columns
|
|
508
|
+
bl = block(number_of_rows=number_of_rows, number_of_columns=number_of_columns)
|
|
509
|
+
for (row, column), value in self.dict.items():
|
|
510
|
+
if row <= number_of_rows and column <= number_of_columns:
|
|
511
|
+
bl[row, column] = value
|
|
512
|
+
return bl
|
|
513
|
+
|
|
514
|
+
@property
|
|
515
|
+
def value(self):
|
|
516
|
+
return [[self.dict.get((row, column)) for column in range(1, self.number_of_columns + 1)] for row in range(1, self.number_of_rows + 1)]
|
|
517
|
+
|
|
518
|
+
def __setitem__(self, row_column, value):
|
|
519
|
+
row, column = row_column
|
|
520
|
+
if row < 1 or row > self.number_of_rows:
|
|
521
|
+
raise IndexError(f"row must be between 1 and {self.number_of_rows} not {row}")
|
|
522
|
+
if column < 1 or column > self.number_of_columns:
|
|
523
|
+
raise IndexError(f"column must be between 1 and {self.number_of_columns} not {column}")
|
|
524
|
+
if value is None:
|
|
525
|
+
if (row, column) in self.dict:
|
|
526
|
+
del self.dict[row, column]
|
|
527
|
+
self._highest_used_row_number = None # invalidate cached value
|
|
528
|
+
self._highest_used_column_number = None # invalidate cached value
|
|
529
|
+
else:
|
|
530
|
+
self.dict[row, column] = value
|
|
531
|
+
if self._highest_used_row_number:
|
|
532
|
+
self._highest_used_row_number = max(self._highest_used_row_number, row)
|
|
533
|
+
if self._highest_used_column_number:
|
|
534
|
+
self._highest_used_column_number = max(self._highest_used_column_number, column)
|
|
535
|
+
|
|
536
|
+
def __getitem__(self, row_column):
|
|
537
|
+
row, column = row_column
|
|
538
|
+
if row < 1 or row > self.number_of_rows:
|
|
539
|
+
raise IndexError(f"row must be between 1 and {self.number_of_rows} not {row}")
|
|
540
|
+
if column < 1 or column > self.number_of_columns:
|
|
541
|
+
raise IndexError(f"column must be between 1 and {self.number_of_columns} not {column}")
|
|
542
|
+
return self.dict.get((row, column))
|
|
543
|
+
|
|
544
|
+
def minimized(self):
|
|
545
|
+
"""
|
|
546
|
+
Returns
|
|
547
|
+
-------
|
|
548
|
+
minimized block : block
|
|
549
|
+
uses highest_used_row_number and highest_used_column_number to minimize the block
|
|
550
|
+
"""
|
|
551
|
+
return self.reshape(number_of_rows=self.highest_used_row_number, number_of_columns=self.highest_used_column_number)
|
|
552
|
+
|
|
553
|
+
@property
|
|
554
|
+
def number_of_rows(self):
|
|
555
|
+
return self._number_of_rows
|
|
556
|
+
|
|
557
|
+
@number_of_rows.setter
|
|
558
|
+
def number_of_rows(self, value):
|
|
559
|
+
if value < 1:
|
|
560
|
+
raise ValueError(f"number_of_rows should be >=1, not {value}")
|
|
561
|
+
self._highest_used_row_number = None
|
|
562
|
+
self._number_of_rows = value
|
|
563
|
+
for row, column in list(self.dict):
|
|
564
|
+
if row > self._number_of_rows:
|
|
565
|
+
del self.dict[row, column]
|
|
566
|
+
|
|
567
|
+
@property
|
|
568
|
+
def number_of_columns(self):
|
|
569
|
+
return self._number_of_columns
|
|
570
|
+
|
|
571
|
+
@number_of_columns.setter
|
|
572
|
+
def number_of_columns(self, value):
|
|
573
|
+
if value < 1:
|
|
574
|
+
raise ValueError(f"number_of_columns should be >=1, not {value}")
|
|
575
|
+
self._highest_used_column_number = None
|
|
576
|
+
self._number_of_columns = value
|
|
577
|
+
for row, column in list(self.dict):
|
|
578
|
+
if column > self._number_of_columns:
|
|
579
|
+
del self.dict[row, column]
|
|
580
|
+
|
|
581
|
+
@property
|
|
582
|
+
def highest_used_row_number(self):
|
|
583
|
+
if not self._highest_used_row_number:
|
|
584
|
+
if self.dict:
|
|
585
|
+
self._highest_used_row_number = max(row for (row, column) in self.dict)
|
|
586
|
+
else:
|
|
587
|
+
self._highest_used_row_number = 1
|
|
588
|
+
return self._highest_used_row_number
|
|
589
|
+
|
|
590
|
+
@property
|
|
591
|
+
def highest_used_column_number(self):
|
|
592
|
+
if not self._highest_used_column_number:
|
|
593
|
+
if self.dict:
|
|
594
|
+
self._highest_used_column_number = max(column for (row, column) in self.dict)
|
|
595
|
+
else:
|
|
596
|
+
self._highest_used_column_number = 1
|
|
597
|
+
|
|
598
|
+
return self._highest_used_column_number
|
|
599
|
+
|
|
600
|
+
def __repr__(self):
|
|
601
|
+
return f"block({self.value})"
|
|
602
|
+
|
|
603
|
+
def _check_row(self, row, name):
|
|
604
|
+
if row < 1:
|
|
605
|
+
raise ValueError(f"{name}={row} < 1")
|
|
606
|
+
if row > self.number_of_rows:
|
|
607
|
+
raise ValueError(f"{name}={row} > number_of_rows={self.number_of_rows}")
|
|
608
|
+
|
|
609
|
+
def _check_column(self, column, name):
|
|
610
|
+
if column < 1:
|
|
611
|
+
raise ValueError(f"{name}={column} < 1")
|
|
612
|
+
if column > self.number_of_columns:
|
|
613
|
+
raise ValueError(f"{name}={column} > number_of_columns={self.number_of_columns}")
|
|
614
|
+
|
|
615
|
+
def transposed(self):
|
|
616
|
+
"""
|
|
617
|
+
transpose block
|
|
618
|
+
|
|
619
|
+
Returns
|
|
620
|
+
-------
|
|
621
|
+
transposed block : block
|
|
622
|
+
"""
|
|
623
|
+
bl = block(number_of_rows=self.number_of_columns, number_of_columns=self.number_of_rows)
|
|
624
|
+
for (row, column), value in self.dict.items():
|
|
625
|
+
bl[column, row] = value
|
|
626
|
+
return bl
|
|
627
|
+
|
|
628
|
+
def vlookup(self, s, *, row_from=1, row_to=missing, column1=1, column2=missing, default=missing):
|
|
629
|
+
"""
|
|
630
|
+
searches in column1 for row between row_from and row_to for s and returns the value found at (that row, column2)
|
|
631
|
+
|
|
632
|
+
Parameters
|
|
633
|
+
----------
|
|
634
|
+
s : any
|
|
635
|
+
value to seach for
|
|
636
|
+
|
|
637
|
+
row_from : int
|
|
638
|
+
row to start search (default 1)
|
|
639
|
+
|
|
640
|
+
should be between 1 and number_of_rows
|
|
641
|
+
|
|
642
|
+
row_to : int
|
|
643
|
+
row to end search (default number_of_rows)
|
|
644
|
+
|
|
645
|
+
should be between 1 and number_of_rows
|
|
646
|
+
|
|
647
|
+
column1 : int
|
|
648
|
+
column to search in (default 1)
|
|
649
|
+
|
|
650
|
+
should be between 1 and number_of_columns
|
|
651
|
+
|
|
652
|
+
column2 : int
|
|
653
|
+
column to return looked up value from (default column1 + 1)
|
|
654
|
+
|
|
655
|
+
should be between 1 and number_of_columns
|
|
656
|
+
|
|
657
|
+
default : any
|
|
658
|
+
if s is not found, returns the default.
|
|
659
|
+
|
|
660
|
+
if omitted, a ValueError exception will be raised in that case
|
|
661
|
+
|
|
662
|
+
Returns
|
|
663
|
+
-------
|
|
664
|
+
block[found row number, column2] : any
|
|
665
|
+
"""
|
|
666
|
+
if column2 is missing:
|
|
667
|
+
column2 = column1 + 1
|
|
668
|
+
self._check_column(column2, "column2")
|
|
669
|
+
row = self.lookup_row(s, row_from=row_from, row_to=row_to, column1=column1, default=-1)
|
|
670
|
+
if row == -1:
|
|
671
|
+
if default is missing:
|
|
672
|
+
raise ValueError(f"{s} not found]")
|
|
673
|
+
else:
|
|
674
|
+
return default
|
|
675
|
+
else:
|
|
676
|
+
return self[row, column2]
|
|
677
|
+
|
|
678
|
+
def lookup_row(self, s, *, row_from=1, row_to=missing, column1=1, default=missing):
|
|
679
|
+
"""
|
|
680
|
+
searches in column1 for row between row_from and row_to for s and returns that row number
|
|
681
|
+
|
|
682
|
+
Parameters
|
|
683
|
+
----------
|
|
684
|
+
s : any
|
|
685
|
+
value to seach for
|
|
686
|
+
|
|
687
|
+
row_from : int
|
|
688
|
+
row to start search (default 1)
|
|
689
|
+
|
|
690
|
+
should be between 1 and number_of_rows
|
|
691
|
+
|
|
692
|
+
row_to : int
|
|
693
|
+
row to end search (default number_of_rows)
|
|
694
|
+
|
|
695
|
+
should be between 1 and number_of_rows
|
|
696
|
+
|
|
697
|
+
column1 : int
|
|
698
|
+
column to search in (default 1)
|
|
699
|
+
|
|
700
|
+
should be between 1 and number_of_columns
|
|
701
|
+
|
|
702
|
+
column2 : int
|
|
703
|
+
column to return looked up value from (default column1 + 1)
|
|
704
|
+
|
|
705
|
+
default : any
|
|
706
|
+
if s is not found, returns the default.
|
|
707
|
+
|
|
708
|
+
if omitted, a ValueError exception will be raised
|
|
709
|
+
|
|
710
|
+
default : any
|
|
711
|
+
if s is not found, returns the default.
|
|
712
|
+
|
|
713
|
+
if omitted, a ValueError exception will be raised in that case
|
|
714
|
+
|
|
715
|
+
|
|
716
|
+
Returns
|
|
717
|
+
-------
|
|
718
|
+
row number where block[row nunber, column1] == s : int
|
|
719
|
+
"""
|
|
720
|
+
if row_to is missing:
|
|
721
|
+
row_to = self.highest_used_row_number
|
|
722
|
+
self._check_row(row_from, "row_from")
|
|
723
|
+
self._check_row(row_to, "row_to")
|
|
724
|
+
self._check_column(column1, "column1")
|
|
725
|
+
|
|
726
|
+
for row in range(row_from, row_to + 1):
|
|
727
|
+
if self[row, column1] == s:
|
|
728
|
+
return row
|
|
729
|
+
if default is missing:
|
|
730
|
+
raise ValueError(f"{s} not found")
|
|
731
|
+
else:
|
|
732
|
+
return default
|
|
733
|
+
|
|
734
|
+
def hlookup(self, s, *, column_from=1, column_to=missing, row1=1, row2=missing, default=missing):
|
|
735
|
+
"""
|
|
736
|
+
searches in row1 for column between column_from and column_to for s and returns the value found at (that column, row2)
|
|
737
|
+
|
|
738
|
+
Parameters
|
|
739
|
+
----------
|
|
740
|
+
s : any
|
|
741
|
+
value to seach for
|
|
742
|
+
|
|
743
|
+
column_from : int
|
|
744
|
+
column to start search (default 1)
|
|
745
|
+
|
|
746
|
+
should be between 1 and number_of_columns
|
|
747
|
+
|
|
748
|
+
column_to : int
|
|
749
|
+
column to end search (default number_of_columns)
|
|
750
|
+
|
|
751
|
+
should be between 1 and number_of_columns
|
|
752
|
+
|
|
753
|
+
row1 : int
|
|
754
|
+
row to search in (default 1)
|
|
755
|
+
|
|
756
|
+
should be between 1 and number_of_rows
|
|
757
|
+
|
|
758
|
+
row2 : int
|
|
759
|
+
row to return looked up value from (default row1 + 1)
|
|
760
|
+
|
|
761
|
+
should be between 1 and number_of_rows
|
|
762
|
+
|
|
763
|
+
default : any
|
|
764
|
+
if s is not found, returns the default.
|
|
765
|
+
|
|
766
|
+
if omitted, a ValueError exception will be raised in that case
|
|
767
|
+
|
|
768
|
+
Returns
|
|
769
|
+
-------
|
|
770
|
+
block[row, found column, row2] : any
|
|
771
|
+
"""
|
|
772
|
+
if row2 is missing:
|
|
773
|
+
row2 = row1 + 1
|
|
774
|
+
self._check_row(row2, "row2")
|
|
775
|
+
column = self.lookup_column(s, column_from=column_from, column_to=column_to, row1=row1, default=-1)
|
|
776
|
+
if column == -1:
|
|
777
|
+
if default is missing:
|
|
778
|
+
raise ValueError(f"{s} not found")
|
|
779
|
+
else:
|
|
780
|
+
return default
|
|
781
|
+
else:
|
|
782
|
+
return self[row2, column]
|
|
783
|
+
|
|
784
|
+
def lookup_column(self, s, *, column_from=1, column_to=missing, row1=1, default=missing):
|
|
785
|
+
"""
|
|
786
|
+
searches in row1 for column between column_from and column_to for s and returns that column number
|
|
787
|
+
|
|
788
|
+
Parameters
|
|
789
|
+
----------
|
|
790
|
+
s : any
|
|
791
|
+
value to seach for
|
|
792
|
+
|
|
793
|
+
column_from : int
|
|
794
|
+
column to start search (default 1)
|
|
795
|
+
|
|
796
|
+
should be between 1 and number_of_columns
|
|
797
|
+
|
|
798
|
+
column_to : int
|
|
799
|
+
column to end search (default number_of_columns)
|
|
800
|
+
|
|
801
|
+
should be between 1 and number_of_columns
|
|
802
|
+
|
|
803
|
+
row1 : int
|
|
804
|
+
row to search in (default 1)
|
|
805
|
+
|
|
806
|
+
should be between 1 and number_of_rows
|
|
807
|
+
|
|
808
|
+
row2 : int
|
|
809
|
+
row to return looked up value from (default row1 + 1)
|
|
810
|
+
|
|
811
|
+
default : any
|
|
812
|
+
if s is not found, returns the default.
|
|
813
|
+
|
|
814
|
+
if omitted, a ValueError exception will be raised in that case
|
|
815
|
+
|
|
816
|
+
Returns
|
|
817
|
+
-------
|
|
818
|
+
column number where block[row1, column number] == s : int
|
|
819
|
+
"""
|
|
820
|
+
if column_to is missing:
|
|
821
|
+
column_to = self.highest_used_column_number
|
|
822
|
+
self._check_column(column_from, "column_from")
|
|
823
|
+
self._check_column(column_to, "column_to")
|
|
824
|
+
self._check_row(row1, "row1")
|
|
825
|
+
|
|
826
|
+
for column in range(column_from, column_to + 1):
|
|
827
|
+
if self[row1, column] == s:
|
|
828
|
+
return column
|
|
829
|
+
if default is missing:
|
|
830
|
+
raise ValueError(f"{s} not found")
|
|
831
|
+
else:
|
|
832
|
+
return default
|
|
833
|
+
|
|
834
|
+
def lookup(self, s, *, row_from=1, row_to=missing, column1=1, column2=missing, default=missing):
|
|
835
|
+
"""
|
|
836
|
+
searches in column1 for row between row_from and row_to for s and returns the value found at (that row, column2)
|
|
837
|
+
|
|
838
|
+
Parameters
|
|
839
|
+
----------
|
|
840
|
+
s : any
|
|
841
|
+
value to seach for
|
|
842
|
+
|
|
843
|
+
row_from : int
|
|
844
|
+
row to start search (default 1)
|
|
845
|
+
|
|
846
|
+
should be between 1 and number_of_rows
|
|
847
|
+
|
|
848
|
+
row_to : int
|
|
849
|
+
row to end search (default number_of_rows)
|
|
850
|
+
|
|
851
|
+
should be between 1 and number_of_rows
|
|
852
|
+
|
|
853
|
+
column1 : int
|
|
854
|
+
column to search in (default 1)
|
|
855
|
+
|
|
856
|
+
should be between 1 and number_of_columns
|
|
857
|
+
|
|
858
|
+
column2 : int
|
|
859
|
+
column to return looked up value from (default column1 + 1)
|
|
860
|
+
|
|
861
|
+
should be between 1 and number_of_columns
|
|
862
|
+
|
|
863
|
+
default : any
|
|
864
|
+
if s is not found, returns the default.
|
|
865
|
+
|
|
866
|
+
if omitted, a ValueError exception will be raised in that case
|
|
867
|
+
|
|
868
|
+
Returns
|
|
869
|
+
-------
|
|
870
|
+
block[found row number, column2] : any
|
|
871
|
+
|
|
872
|
+
Note
|
|
873
|
+
----
|
|
874
|
+
This is exactly the same as vlookup.
|
|
875
|
+
"""
|
|
876
|
+
return self.vlookup(s, row_from=row_from, row_to=row_to, column1=column1, column2=column2, default=default)
|
|
877
|
+
|
|
878
|
+
def decode_to_files(self):
|
|
879
|
+
"""
|
|
880
|
+
decode the block with encoded file(s) to individual pyoidide file(s)
|
|
881
|
+
|
|
882
|
+
Returns
|
|
883
|
+
-------
|
|
884
|
+
count : int
|
|
885
|
+
number of files decoded
|
|
886
|
+
|
|
887
|
+
Note
|
|
888
|
+
----
|
|
889
|
+
if the block does not contain an encode file, the method just returns 0
|
|
890
|
+
"""
|
|
891
|
+
count = 0
|
|
892
|
+
for column in range(1, self.number_of_columns + 1):
|
|
893
|
+
row = 1
|
|
894
|
+
bl = self.minimized()
|
|
895
|
+
while row <= self.number_of_rows:
|
|
896
|
+
if self[row, column] and self[row, column].startswith("<file=") and self[row, column].endswith(">"):
|
|
897
|
+
filename = self[row, column][6:-1]
|
|
898
|
+
collect = []
|
|
899
|
+
row += 1
|
|
900
|
+
while bl[row, column] != "</file>":
|
|
901
|
+
if bl[row, column]:
|
|
902
|
+
collect.append(bl[row, column])
|
|
903
|
+
row += 1
|
|
904
|
+
decoded = base64.b64decode("".join(collect))
|
|
905
|
+
open(filename, "wb").write(decoded)
|
|
906
|
+
count += 1
|
|
907
|
+
row += 1
|
|
908
|
+
return count
|
|
909
|
+
|
|
910
|
+
@classmethod
|
|
911
|
+
def encode_file(cls, file):
|
|
912
|
+
"""
|
|
913
|
+
make a block with the given pyodide file encoded
|
|
914
|
+
|
|
915
|
+
Parameters
|
|
916
|
+
----------
|
|
917
|
+
file : file name (str)
|
|
918
|
+
file to be encoded
|
|
919
|
+
|
|
920
|
+
Returns
|
|
921
|
+
-------
|
|
922
|
+
block with encoded file : block (minimized)
|
|
923
|
+
"""
|
|
924
|
+
|
|
925
|
+
bl = cls(number_of_rows=100000, number_of_columns=1)
|
|
926
|
+
|
|
927
|
+
n = 5000 # block size
|
|
928
|
+
row = 1
|
|
929
|
+
bl[row, 1] = f"<file={file}>"
|
|
930
|
+
row += 1
|
|
931
|
+
b64 = base64.b64encode(open(file, "rb").read()).decode("utf-8")
|
|
932
|
+
while b64:
|
|
933
|
+
b64_n = b64[:n]
|
|
934
|
+
bl[row, 1] = b64_n
|
|
935
|
+
row += 1
|
|
936
|
+
b64 = b64[n:]
|
|
937
|
+
bl[row, 1] = f"</file>"
|
|
938
|
+
row += 1
|
|
939
|
+
return bl.minimized()
|
|
940
|
+
|
|
941
|
+
|
|
942
|
+
class Capture:
|
|
943
|
+
"""
|
|
944
|
+
specifies how to capture stdout
|
|
945
|
+
|
|
946
|
+
Parameters
|
|
947
|
+
----------
|
|
948
|
+
enabled : bool
|
|
949
|
+
if True (default), all stdout output is captured
|
|
950
|
+
|
|
951
|
+
if False, stdout output is printed
|
|
952
|
+
|
|
953
|
+
include_print : bool
|
|
954
|
+
if False (default), nothing will be printed if enabled is True
|
|
955
|
+
|
|
956
|
+
if True, output will be printed (and captured if enabled is True)
|
|
957
|
+
|
|
958
|
+
Note
|
|
959
|
+
----
|
|
960
|
+
Use this like ::
|
|
961
|
+
|
|
962
|
+
capture = xwu.Capture()
|
|
963
|
+
"""
|
|
964
|
+
|
|
965
|
+
_instance = None
|
|
966
|
+
|
|
967
|
+
def __new__(cls, *args, **kwargs):
|
|
968
|
+
# singleton
|
|
969
|
+
if cls._instance is None:
|
|
970
|
+
cls._instance = super(Capture, cls).__new__(cls)
|
|
971
|
+
return cls._instance
|
|
972
|
+
|
|
973
|
+
def __init__(self, enabled=missing, include_print=missing):
|
|
974
|
+
if hasattr(self, "stdout"):
|
|
975
|
+
if enabled is not missing:
|
|
976
|
+
self.enabled = enabled
|
|
977
|
+
if include_print is not missing:
|
|
978
|
+
self.include_print = include_print
|
|
979
|
+
return
|
|
980
|
+
self.stdout = sys.stdout
|
|
981
|
+
self._buffer = []
|
|
982
|
+
self.enabled = True if enabled is missing else enabled
|
|
983
|
+
self.include_print = False if include_print is missing else include_print
|
|
984
|
+
|
|
985
|
+
def __call__(self, enabled=missing, include_print=missing):
|
|
986
|
+
return self.__class__(enabled, include_print)
|
|
987
|
+
|
|
988
|
+
def __enter__(self):
|
|
989
|
+
self.enabled = True
|
|
990
|
+
|
|
991
|
+
def __exit__(self, exc_type, exc_value, tb):
|
|
992
|
+
self.enabled = False
|
|
993
|
+
|
|
994
|
+
def write(self, data):
|
|
995
|
+
self._buffer.append(data)
|
|
996
|
+
if self._include_print:
|
|
997
|
+
self.stdout.write(data)
|
|
998
|
+
|
|
999
|
+
def flush(self):
|
|
1000
|
+
if self._include_print:
|
|
1001
|
+
self.stdout.flush()
|
|
1002
|
+
self._buffer.append("\n")
|
|
1003
|
+
|
|
1004
|
+
@property
|
|
1005
|
+
def enabled(self):
|
|
1006
|
+
return sys.out == self
|
|
1007
|
+
|
|
1008
|
+
@enabled.setter
|
|
1009
|
+
def enabled(self, value):
|
|
1010
|
+
if value:
|
|
1011
|
+
sys.stdout = self
|
|
1012
|
+
else:
|
|
1013
|
+
sys.stdout = self.stdout
|
|
1014
|
+
|
|
1015
|
+
@property
|
|
1016
|
+
def value(self):
|
|
1017
|
+
result = self.value_keep
|
|
1018
|
+
self.clear()
|
|
1019
|
+
return result
|
|
1020
|
+
|
|
1021
|
+
@property
|
|
1022
|
+
def value_keep(self):
|
|
1023
|
+
result = [[line] for line in self.str_keep.splitlines()]
|
|
1024
|
+
return result
|
|
1025
|
+
|
|
1026
|
+
@property
|
|
1027
|
+
def str(self):
|
|
1028
|
+
result = self.str_keep
|
|
1029
|
+
self._buffer.clear()
|
|
1030
|
+
return result
|
|
1031
|
+
|
|
1032
|
+
@property
|
|
1033
|
+
def str_keep(self):
|
|
1034
|
+
result = "".join(self._buffer)
|
|
1035
|
+
return result
|
|
1036
|
+
|
|
1037
|
+
def clear(self):
|
|
1038
|
+
self._buffer.clear()
|
|
1039
|
+
|
|
1040
|
+
@property
|
|
1041
|
+
def include_print(self):
|
|
1042
|
+
return self._include_print
|
|
1043
|
+
|
|
1044
|
+
@include_print.setter
|
|
1045
|
+
def include_print(self, value):
|
|
1046
|
+
self._include_print = value
|
|
1047
|
+
|
|
1048
|
+
|
|
1049
|
+
def trigger_macro(sheet):
|
|
1050
|
+
"""
|
|
1051
|
+
triggers the macro on sheet
|
|
1052
|
+
|
|
1053
|
+
Parameters
|
|
1054
|
+
----------
|
|
1055
|
+
sheet : sheet
|
|
1056
|
+
sheet to use
|
|
1057
|
+
|
|
1058
|
+
"""
|
|
1059
|
+
|
|
1060
|
+
sheet["A1"].value = "=NOW()"
|
|
1061
|
+
|
|
1062
|
+
|
|
1063
|
+
if __name__ == "__main__":
|
|
1064
|
+
...
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: xlwings_utils
|
|
3
|
-
Version: 25.0
|
|
3
|
+
Version: 25.1.0
|
|
4
4
|
Summary: xlwings_utils
|
|
5
5
|
Author-email: Ruud van der Ham <rt.van.der.ham@gmail.com>
|
|
6
6
|
Project-URL: Homepage, https://github.com/salabim/xlwings_utils
|
|
@@ -9,7 +9,6 @@ Classifier: Development Status :: 5 - Production/Stable
|
|
|
9
9
|
Classifier: Programming Language :: Python :: 3 :: Only
|
|
10
10
|
Requires-Python: >=3.9
|
|
11
11
|
Description-Content-Type: text/markdown
|
|
12
|
-
Requires-Dist: dropbox
|
|
13
12
|
|
|
14
13
|
<img src="https://www.salabim.org/xlwings_utils_logo2.png">
|
|
15
14
|
|
|
@@ -19,7 +18,7 @@ This module provides some useful functions to be used in xlwings (lite).
|
|
|
19
18
|
|
|
20
19
|
## Installation
|
|
21
20
|
|
|
22
|
-
Just add `xlwings-utils`
|
|
21
|
+
Just add `xlwings-utils` to the *requirements.txt* tab.
|
|
23
22
|
|
|
24
23
|
In the script, add
|
|
25
24
|
|
|
@@ -66,10 +65,10 @@ The file `dropbox setup.py` can also be found in the repo of xlwings_lite .
|
|
|
66
65
|
Then, it is possible to list all files in a specified folder using the list_dropbox function.
|
|
67
66
|
It is also possible to get at all folders and to access all underlying folders.
|
|
68
67
|
|
|
69
|
-
The `read_dropbox` function can be used to read the contents (bytes) of a Dropbox file. As reading from Dropbox under pyodide is rather unreliable, xlwings_utils automatically retries several times (by default 100 times). The actual number of retries can be found with `read_dropbox.retries`.
|
|
70
|
-
|
|
71
68
|
The function `write_dropbox` can be used to write contents (bytes) to a Dropbox file.
|
|
72
69
|
|
|
70
|
+
The function `delete_from_dropbox` can be used to delete a Dropbox file.
|
|
71
|
+
|
|
73
72
|
The functions `list_local`, `read_local` and `write_local` offer similar functionality for the local file system (on pyodide).
|
|
74
73
|
|
|
75
74
|
So, a way to access a file on the system's drive (mapped to Dropbox) as a local file is:
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
xlwings_utils/__init__.py,sha256=FdaRztevSu5akGL7KBUBRzqwLMRTdvVUuS2Kfp2f1Uc,68
|
|
2
|
+
xlwings_utils/xlwings_utils.py,sha256=E2to_M9F_23uk1h8PLW3wDO5aFRVETrtxs1loQF1ZLM,30256
|
|
3
|
+
xlwings_utils/xlwings_utils_old.py,sha256=l1BowCMx6v6OqSaSKcTcNwwqpbU9EI987s9COcIBFjI,30452
|
|
4
|
+
xlwings_utils-25.1.0.dist-info/METADATA,sha256=IcNOW_IKJ8EISXzenOl4yyr6eSUuIEwnWpXmOB6FM8I,12240
|
|
5
|
+
xlwings_utils-25.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
6
|
+
xlwings_utils-25.1.0.dist-info/top_level.txt,sha256=kf5SEv0gZiRObPhUoYcc1O_iX_wwTOPeUIYvzyYeAM4,14
|
|
7
|
+
xlwings_utils-25.1.0.dist-info/RECORD,,
|
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
xlwings_utils/__init__.py,sha256=FdaRztevSu5akGL7KBUBRzqwLMRTdvVUuS2Kfp2f1Uc,68
|
|
2
|
-
xlwings_utils/xlwings_utils.py,sha256=d4qgsb-65sSLifCallOuAtSHsYhBZx5rCfC5hsvCAgw,28891
|
|
3
|
-
xlwings_utils-25.0.10.dist-info/METADATA,sha256=8n4lu4bgA1E-6sI0sWCY3Z8aaoq-UH-IarF7nQXdgkg,12527
|
|
4
|
-
xlwings_utils-25.0.10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
5
|
-
xlwings_utils-25.0.10.dist-info/top_level.txt,sha256=kf5SEv0gZiRObPhUoYcc1O_iX_wwTOPeUIYvzyYeAM4,14
|
|
6
|
-
xlwings_utils-25.0.10.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|