ellipsis 3.1.1__tar.gz → 3.3.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ellipsis-3.1.1 → ellipsis-3.3.0}/PKG-INFO +1 -5
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/__init__.py +4 -2
- ellipsis-3.3.0/ellipsis/account/__init__.py +4 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/account/root.py +18 -1
- ellipsis-3.3.0/ellipsis/apiManager.py +183 -0
- ellipsis-3.3.0/ellipsis/compute/__init__.py +1 -0
- ellipsis-3.3.0/ellipsis/compute/root.py +180 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/__init__.py +4 -1
- ellipsis-3.3.0/ellipsis/path/bookmark/__init__.py +2 -0
- ellipsis-3.3.0/ellipsis/path/bookmark/root.py +39 -0
- ellipsis-3.3.0/ellipsis/path/file/__init__.py +2 -0
- ellipsis-3.3.0/ellipsis/path/file/root.py +155 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/folder/__init__.py +2 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/folder/root.py +14 -0
- ellipsis-3.3.0/ellipsis/path/pointCloud/__init__.py +5 -0
- {ellipsis-3.1.1/ellipsis/path/vector → ellipsis-3.3.0/ellipsis/path/pointCloud}/root.py +2 -12
- ellipsis-3.3.0/ellipsis/path/pointCloud/timestamp/__init__.py +13 -0
- ellipsis-3.3.0/ellipsis/path/pointCloud/timestamp/file/__init__.py +2 -0
- ellipsis-3.3.0/ellipsis/path/pointCloud/timestamp/file/root.py +96 -0
- ellipsis-3.3.0/ellipsis/path/pointCloud/timestamp/order/__init__.py +2 -0
- ellipsis-3.3.0/ellipsis/path/pointCloud/timestamp/order/root.py +32 -0
- ellipsis-3.3.0/ellipsis/path/pointCloud/timestamp/root.py +178 -0
- ellipsis-3.3.0/ellipsis/path/pointCloud/timestamp/util.py +76 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/raster/root.py +3 -2
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/raster/style/root.py +8 -6
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/raster/timestamp/__init__.py +3 -4
- ellipsis-3.3.0/ellipsis/path/raster/timestamp/file/__init__.py +2 -0
- ellipsis-3.3.0/ellipsis/path/raster/timestamp/file/root.py +100 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/raster/timestamp/order/root.py +6 -8
- ellipsis-3.3.0/ellipsis/path/raster/timestamp/root.py +460 -0
- ellipsis-3.3.0/ellipsis/path/raster/timestamp/util.py +76 -0
- ellipsis-3.3.0/ellipsis/path/root.py +218 -0
- ellipsis-3.3.0/ellipsis/path/setUpTask/__init__.py +2 -0
- ellipsis-3.3.0/ellipsis/path/setUpTask/root.py +50 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/usage/__init__.py +1 -1
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/usage/root.py +10 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/__init__.py +1 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/featureProperty/root.py +0 -1
- ellipsis-3.3.0/ellipsis/path/vector/root.py +45 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/style/root.py +6 -4
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/timestamp/__init__.py +1 -1
- ellipsis-3.3.0/ellipsis/path/vector/timestamp/feature/root.py +184 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/timestamp/feature/series/root.py +45 -29
- ellipsis-3.3.0/ellipsis/path/vector/timestamp/file/__init__.py +2 -0
- ellipsis-3.3.0/ellipsis/path/vector/timestamp/file/root.py +115 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/timestamp/order/root.py +6 -4
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/timestamp/root.py +95 -40
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/sanitize.py +50 -1
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/util/__init__.py +3 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/util/root.py +159 -20
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis.egg-info/PKG-INFO +1 -5
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis.egg-info/SOURCES.txt +20 -6
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis.egg-info/requires.txt +6 -2
- {ellipsis-3.1.1 → ellipsis-3.3.0}/setup.py +8 -4
- ellipsis-3.3.0/test/test.py +651 -0
- ellipsis-3.1.1/ellipsis/account/__init__.py +0 -4
- ellipsis-3.1.1/ellipsis/apiManager.py +0 -129
- ellipsis-3.1.1/ellipsis/path/file/__init__.py +0 -2
- ellipsis-3.1.1/ellipsis/path/file/root.py +0 -25
- ellipsis-3.1.1/ellipsis/path/raster/timestamp/root.py +0 -322
- ellipsis-3.1.1/ellipsis/path/raster/timestamp/upload/__init__.py +0 -2
- ellipsis-3.1.1/ellipsis/path/raster/timestamp/upload/root.py +0 -72
- ellipsis-3.1.1/ellipsis/path/root.py +0 -312
- ellipsis-3.1.1/ellipsis/path/vector/timestamp/feature/root.py +0 -271
- ellipsis-3.1.1/ellipsis/path/vector/timestamp/upload/__init__.py +0 -2
- ellipsis-3.1.1/ellipsis/path/vector/timestamp/upload/root.py +0 -49
- ellipsis-3.1.1/ellipsis/view/__init__.py +0 -3
- ellipsis-3.1.1/ellipsis/view/root.py +0 -59
- ellipsis-3.1.1/test/test.py +0 -422
- {ellipsis-3.1.1 → ellipsis-3.3.0}/LICENSE +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/README.md +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/account/accessToken/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/account/accessToken/root.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/hashtag/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/hashtag/root.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/invite/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/invite/root.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/member/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/member/root.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/raster/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/raster/style/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/raster/timestamp/order/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/featureProperty/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/style/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/timestamp/feature/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/timestamp/feature/message/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/timestamp/feature/message/root.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/timestamp/feature/series/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/path/vector/timestamp/order/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/user/__init__.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis/user/root.py +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis.egg-info/dependency_links.txt +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/ellipsis.egg-info/top_level.txt +0 -0
- {ellipsis-3.1.1 → ellipsis-3.3.0}/setup.cfg +0 -0
|
@@ -1,12 +1,10 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ellipsis
|
|
3
|
-
Version: 3.
|
|
3
|
+
Version: 3.3.0
|
|
4
4
|
Summary: Package to interact with the Ellipsis API
|
|
5
5
|
Home-page: https://github.com/ellipsis-drive-internal/python-package
|
|
6
6
|
Author: Daniel van der Maas
|
|
7
7
|
Author-email: daniel@ellipsis-drive.com
|
|
8
|
-
License: UNKNOWN
|
|
9
|
-
Platform: UNKNOWN
|
|
10
8
|
Classifier: Programming Language :: Python :: 3
|
|
11
9
|
Classifier: License :: OSI Approved :: MIT License
|
|
12
10
|
Classifier: Operating System :: OS Independent
|
|
@@ -50,5 +48,3 @@ Another example
|
|
|
50
48
|
info = el.path.get(folderId, token)
|
|
51
49
|
layers = el.path.listPath(folderId, pathType='layer', token = token, listAll = True)
|
|
52
50
|
folders = el.path.listPath(folderId, pathType='folder', token = token, listAll = True)
|
|
53
|
-
|
|
54
|
-
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from ellipsis import apiManager
|
|
2
2
|
from ellipsis import sanitize
|
|
3
3
|
from ellipsis.util.root import recurse
|
|
4
|
+
import requests
|
|
4
5
|
|
|
5
6
|
def logIn(username, password, validFor = None):
|
|
6
7
|
|
|
@@ -10,12 +11,28 @@ def logIn(username, password, validFor = None):
|
|
|
10
11
|
|
|
11
12
|
json = {'username': username, 'password': password, 'validFor': validFor}
|
|
12
13
|
|
|
14
|
+
r = apiManager.call(requests.post,'/account/login', body=json, token=None, crash=False)
|
|
15
|
+
if r.status_code == 400:
|
|
16
|
+
x = r.json()
|
|
17
|
+
if x['message'] == "No password configured.":
|
|
18
|
+
raise ValueError("You cannot login with your Google credentials in the Python module. You need to configure an Ellipsis Drive specific password. You can do this on https://app.ellipsis-drive.com/account-settings/security")
|
|
19
|
+
if r.status_code != 200:
|
|
20
|
+
raise ValueError(r.text)
|
|
13
21
|
|
|
14
|
-
r =
|
|
22
|
+
r = r.json()
|
|
15
23
|
token = r['token']
|
|
16
24
|
|
|
17
25
|
return(token)
|
|
18
26
|
|
|
27
|
+
|
|
28
|
+
def getInfo(token):
|
|
29
|
+
token = sanitize.validString('token', token, True)
|
|
30
|
+
|
|
31
|
+
r = apiManager.get( '/account', body={}, token=token)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
return r
|
|
35
|
+
|
|
19
36
|
def listRoot(rootName, token, pathTypes= None, pageStart = None, listAll = True):
|
|
20
37
|
token = sanitize.validString('token', token, True)
|
|
21
38
|
rootName = sanitize.validString('rootName', rootName, True)
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
import json
|
|
3
|
+
import urllib
|
|
4
|
+
import os
|
|
5
|
+
import time
|
|
6
|
+
from requests_toolbelt import MultipartEncoder
|
|
7
|
+
|
|
8
|
+
baseUrl = 'https://api.ellipsis-drive.com/v3'
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def filterNone(body, toString= False):
|
|
12
|
+
if type(body) == type(None):
|
|
13
|
+
return body
|
|
14
|
+
|
|
15
|
+
params = {}
|
|
16
|
+
for k in body.keys():
|
|
17
|
+
if type(body[k]) != type(None):
|
|
18
|
+
if toString:
|
|
19
|
+
if str(type(body[k])) != str(type('x')):
|
|
20
|
+
params[k] = json.dumps(body[k])
|
|
21
|
+
else:
|
|
22
|
+
params[k] = body[k]
|
|
23
|
+
|
|
24
|
+
else:
|
|
25
|
+
params[k] = body[k]
|
|
26
|
+
return params
|
|
27
|
+
|
|
28
|
+
def get(url, body = None, token = None, crash = True, parseJson = True):
|
|
29
|
+
if body == None:
|
|
30
|
+
body = {'token': token}
|
|
31
|
+
else:
|
|
32
|
+
body['token'] = token
|
|
33
|
+
body = filterNone(body)
|
|
34
|
+
|
|
35
|
+
for k in body.keys():
|
|
36
|
+
if type(body[k]) != type('x') :
|
|
37
|
+
body[k] = json.dumps(body[k])
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
body = urllib.parse.urlencode(body)
|
|
41
|
+
|
|
42
|
+
url = url + '?' + body
|
|
43
|
+
|
|
44
|
+
r = call( method = requests.get, url = url, body = None, token = token, crash = crash, parseJson = parseJson )
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
return r
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def post(url, body, token=None):
|
|
51
|
+
r = call(method=requests.post, url=url, body=body, token=token)
|
|
52
|
+
return r
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def put(url, body, token=None):
|
|
56
|
+
r = call(method=requests.put, url=url, body=body, token=token)
|
|
57
|
+
return r
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def patch(url, body, token=None):
|
|
61
|
+
r = call(method=requests.patch, url=url, body=body, token=token)
|
|
62
|
+
return r
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def delete(url, body, token = None):
|
|
66
|
+
r = call( method = requests.delete, url = url, body = body, token = token )
|
|
67
|
+
return r
|
|
68
|
+
|
|
69
|
+
RETRIES = 10
|
|
70
|
+
WAIT = 2
|
|
71
|
+
def call(method, url, body = None, token = None, crash = True, parseJson = True):
|
|
72
|
+
tried = 0
|
|
73
|
+
while tried <RETRIES:
|
|
74
|
+
try:
|
|
75
|
+
r = actualCall(method, url, body, token)
|
|
76
|
+
break
|
|
77
|
+
except Exception as ex:
|
|
78
|
+
print('time out detected, retrying request')
|
|
79
|
+
print('url', url)
|
|
80
|
+
print('error',ex)
|
|
81
|
+
time.sleep(WAIT)
|
|
82
|
+
tried = tried +1
|
|
83
|
+
if tried >= RETRIES:
|
|
84
|
+
raise ValueError('Could not reach server')
|
|
85
|
+
if crash:
|
|
86
|
+
if r.status_code != 200:
|
|
87
|
+
raise ValueError(r.text)
|
|
88
|
+
|
|
89
|
+
if parseJson:
|
|
90
|
+
try:
|
|
91
|
+
r = r.json()
|
|
92
|
+
except:
|
|
93
|
+
r = r.text
|
|
94
|
+
|
|
95
|
+
return r
|
|
96
|
+
else:
|
|
97
|
+
return r
|
|
98
|
+
|
|
99
|
+
TIMEOUTTIME = 20
|
|
100
|
+
def actualCall(method, url, body, token):
|
|
101
|
+
body = filterNone(body)
|
|
102
|
+
if type(body) != type(None) and type(body) != type({}):
|
|
103
|
+
raise ValueError(
|
|
104
|
+
'body of an API call must be of type dict or noneType')
|
|
105
|
+
|
|
106
|
+
if type(token) != type(None) and type(token) != type('x'):
|
|
107
|
+
raise ValueError('Token must be of type string or noneType')
|
|
108
|
+
|
|
109
|
+
if token == None:
|
|
110
|
+
r = method(baseUrl + url, json=body, timeout=TIMEOUTTIME)
|
|
111
|
+
else:
|
|
112
|
+
if not 'Bearer' in token:
|
|
113
|
+
token = 'Bearer ' + token
|
|
114
|
+
r = method(baseUrl + url, json=body, headers={"Authorization": token}, timeout=TIMEOUTTIME)
|
|
115
|
+
|
|
116
|
+
return r
|
|
117
|
+
|
|
118
|
+
def upload(url, filePath, body, token, key = 'data', memfile= None):
|
|
119
|
+
body['debug'] = True
|
|
120
|
+
body = filterNone(body, toString=True)
|
|
121
|
+
|
|
122
|
+
seperator = os.path.sep
|
|
123
|
+
fileName = filePath.split(seperator)[len(filePath.split(seperator))-1 ]
|
|
124
|
+
|
|
125
|
+
if str(type(memfile)) == str(type(None)):
|
|
126
|
+
conn_file = open(filePath, 'rb')
|
|
127
|
+
else:
|
|
128
|
+
conn_file = memfile
|
|
129
|
+
|
|
130
|
+
payload = MultipartEncoder(fields = {**body, key: (fileName, conn_file, 'application/octet-stream')})
|
|
131
|
+
if not 'Bearer' in token:
|
|
132
|
+
token = 'Bearer ' + token
|
|
133
|
+
|
|
134
|
+
r = requests.post(baseUrl + url, headers = {"Authorization":token, "Content-Type": payload.content_type}, data=payload, verify=False)
|
|
135
|
+
|
|
136
|
+
if str(type(memfile)) == str(type(None)):
|
|
137
|
+
conn_file.close()
|
|
138
|
+
|
|
139
|
+
if r.status_code != 200:
|
|
140
|
+
raise ValueError(r.text)
|
|
141
|
+
return r.json()
|
|
142
|
+
|
|
143
|
+
def download(url, filePath=None, token = None, memfile = None):
|
|
144
|
+
if type(token) == type(None):
|
|
145
|
+
with requests.get(baseUrl + url, stream=True) as r:
|
|
146
|
+
r.raise_for_status()
|
|
147
|
+
if str(type(memfile)) == str(type(None)):
|
|
148
|
+
with open(filePath, 'wb') as f:
|
|
149
|
+
for chunk in r.iter_content(chunk_size=8192):
|
|
150
|
+
# If you have chunk encoded response uncomment if
|
|
151
|
+
# and set chunk_size parameter to None.
|
|
152
|
+
#if chunk:
|
|
153
|
+
f.write(chunk)
|
|
154
|
+
else:
|
|
155
|
+
for chunk in r.iter_content(chunk_size=8192):
|
|
156
|
+
# If you have chunk encoded response uncomment if
|
|
157
|
+
# and set chunk_size parameter to None.
|
|
158
|
+
#if chunk:
|
|
159
|
+
memfile.write(chunk)
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
else:
|
|
164
|
+
token = 'Bearer ' + token
|
|
165
|
+
with requests.get(baseUrl + url, stream=True, headers={"Authorization": token}) as r:
|
|
166
|
+
r.raise_for_status()
|
|
167
|
+
if str(type(memfile)) == str(type(None)):
|
|
168
|
+
with open(filePath, 'wb') as f:
|
|
169
|
+
for chunk in r.iter_content(chunk_size=8192):
|
|
170
|
+
# If you have chunk encoded response uncomment if
|
|
171
|
+
# and set chunk_size parameter to None.
|
|
172
|
+
#if chunk:
|
|
173
|
+
f.write(chunk)
|
|
174
|
+
else:
|
|
175
|
+
for chunk in r.iter_content(chunk_size=8192):
|
|
176
|
+
# If you have chunk encoded response uncomment if
|
|
177
|
+
# and set chunk_size parameter to None.
|
|
178
|
+
#if chunk:
|
|
179
|
+
memfile.write(chunk)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from ellipsis.compute.root import createCompute, listComputes, execute, terminateCompute, terminateAll, addToLayer
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
import dill
|
|
2
|
+
import base64
|
|
3
|
+
import time
|
|
4
|
+
|
|
5
|
+
from ellipsis.util.root import recurse
|
|
6
|
+
from ellipsis import sanitize
|
|
7
|
+
from ellipsis.account import getInfo
|
|
8
|
+
from ellipsis import apiManager
|
|
9
|
+
from ellipsis.path.raster.timestamp.file import add as addFile
|
|
10
|
+
from ellipsis.path.raster.timestamp import activate
|
|
11
|
+
from io import BytesIO
|
|
12
|
+
|
|
13
|
+
def createCompute(layers, token, files = None, nodes=None, interpreter='python3.12', requirements= [], awaitTillStarted = True, largeResult = False):
|
|
14
|
+
layers = sanitize.validDictArray('layers', layers, True)
|
|
15
|
+
files = sanitize.validUuidArray('files', files, False)
|
|
16
|
+
token = sanitize.validString('token', token, True)
|
|
17
|
+
nodes = sanitize.validInt('nodes', nodes, False)
|
|
18
|
+
interpreter = sanitize.validString('interpreter', interpreter, True)
|
|
19
|
+
requirements = sanitize.validStringArray('requirements', requirements, False)
|
|
20
|
+
largeResult = sanitize.validBool('largeResult', largeResult, True)
|
|
21
|
+
if type(nodes) == type(None):
|
|
22
|
+
info = getInfo(token=token)
|
|
23
|
+
nodes = info['plan']['maxComputeNodes']
|
|
24
|
+
if nodes == 0:
|
|
25
|
+
raise ValueError('You have no compute nodes in your plan. Please update your subscription')
|
|
26
|
+
|
|
27
|
+
requirements = "\n".join(requirements)
|
|
28
|
+
|
|
29
|
+
body = {'layers':layers, 'files':files, 'interpreter':interpreter, 'nodes':nodes, 'requirements':requirements, 'largeResult': largeResult}
|
|
30
|
+
r = apiManager.post('/compute', body, token)
|
|
31
|
+
|
|
32
|
+
computeId = r['id']
|
|
33
|
+
while awaitTillStarted:
|
|
34
|
+
print('waiting')
|
|
35
|
+
res = listComputes(token=token)['result']
|
|
36
|
+
r = [x for x in res if x['id'] == computeId][0]
|
|
37
|
+
if r['status'] == 'available':
|
|
38
|
+
break
|
|
39
|
+
if r['status'] == 'errored':
|
|
40
|
+
raise ValueError(r['message'])
|
|
41
|
+
|
|
42
|
+
time.sleep(1)
|
|
43
|
+
|
|
44
|
+
return {'id':computeId}
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def execute(computeId, f, token, awaitTillCompleted=True, writeToLayer = None):
|
|
48
|
+
computeId = sanitize.validUuid('computeId', computeId, True)
|
|
49
|
+
token = sanitize.validString('token', token, True)
|
|
50
|
+
writeToLayer = sanitize.validObject('writeToLayer', writeToLayer, False)
|
|
51
|
+
awaitTillCompleted = sanitize.validBool('awaitTillCompleted', awaitTillCompleted, False)
|
|
52
|
+
|
|
53
|
+
if str(type(f)) != "<class 'function'>":
|
|
54
|
+
raise ValueError('parameter f must be a function')
|
|
55
|
+
if type(writeToLayer) != type(None):
|
|
56
|
+
if not 'file' in writeToLayer:
|
|
57
|
+
writeToLayer['file'] = {'format':'tif'}
|
|
58
|
+
f_bytes = dill.dumps(f)
|
|
59
|
+
f_string = base64.b64encode( f_bytes )
|
|
60
|
+
f_string = str(f_string)[2: -1]
|
|
61
|
+
body = { 'file':f_string, 'writeToLayer':writeToLayer}
|
|
62
|
+
apiManager.post('/compute/' + computeId + '/execute', body, token)
|
|
63
|
+
|
|
64
|
+
while awaitTillCompleted:
|
|
65
|
+
res = listComputes(token=token)['result']
|
|
66
|
+
r = [x for x in res if x['id'] == computeId][0]
|
|
67
|
+
print('waiting')
|
|
68
|
+
if r['status'] == 'completed':
|
|
69
|
+
break
|
|
70
|
+
if r['status'] == 'errored':
|
|
71
|
+
raise ValueError(str(r['message']))
|
|
72
|
+
time.sleep(1)
|
|
73
|
+
|
|
74
|
+
for x in r['result']:
|
|
75
|
+
if x['type'] == 'exception':
|
|
76
|
+
raise x['value']
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
values = [ '/compute/' + computeId + '/file/'+ x['value'] if x['type'] == 'file' else x['value'] for x in r['result']]
|
|
80
|
+
return values
|
|
81
|
+
|
|
82
|
+
def parseResults(r):
|
|
83
|
+
results = []
|
|
84
|
+
for x in r:
|
|
85
|
+
x = base64.b64decode(x)
|
|
86
|
+
x = dill.loads(x)
|
|
87
|
+
results = results + x
|
|
88
|
+
|
|
89
|
+
return results
|
|
90
|
+
|
|
91
|
+
def terminateCompute(computeId, token, awaitTillTerminated = True):
|
|
92
|
+
computeId = sanitize.validUuid('computeId', computeId, True)
|
|
93
|
+
token = sanitize.validString('token', token, True)
|
|
94
|
+
sanitize.validBool('awaitTillTerminated',awaitTillTerminated, True)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
r = apiManager.post('/compute/' + computeId + '/terminate', {}, token)
|
|
98
|
+
|
|
99
|
+
while awaitTillTerminated:
|
|
100
|
+
res = listComputes(token=token)['result']
|
|
101
|
+
z = [x for x in res if x['id'] == computeId][0]
|
|
102
|
+
if z['status'] == 'stopped':
|
|
103
|
+
break
|
|
104
|
+
time.sleep(1)
|
|
105
|
+
|
|
106
|
+
return r
|
|
107
|
+
|
|
108
|
+
def terminateAll(token, awaitTillTerminated = True ):
|
|
109
|
+
token = sanitize.validString('token', token, True)
|
|
110
|
+
sanitize.validBool('awaitTillTerminated',awaitTillTerminated, True)
|
|
111
|
+
|
|
112
|
+
res = listComputes(token = token)['result']
|
|
113
|
+
|
|
114
|
+
for x in res:
|
|
115
|
+
if x['status'] != 'stopped' and x['status'] != 'errored' and x['status'] != 'stopping':
|
|
116
|
+
|
|
117
|
+
apiManager.post('/compute/' + x['id'] + '/terminate', {}, token)
|
|
118
|
+
|
|
119
|
+
while awaitTillTerminated:
|
|
120
|
+
res = listComputes(token=token)['result']
|
|
121
|
+
z = [x for x in res if x['id'] == x['id']][0]
|
|
122
|
+
if z['status'] == 'stopped':
|
|
123
|
+
break
|
|
124
|
+
time.sleep(1)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def getComputeInfo(computeId, token):
|
|
129
|
+
res = listComputes(token=token)['result']
|
|
130
|
+
r = [x for x in res if x['id'] == computeId]
|
|
131
|
+
if len(r) ==0:
|
|
132
|
+
raise ValueError('No compute found for given id')
|
|
133
|
+
return r[0]
|
|
134
|
+
|
|
135
|
+
def listComputes(token, pageStart = None, listAll = True):
|
|
136
|
+
token = sanitize.validString('token', token, True)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
body = { 'pageStart':pageStart }
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def f(body):
|
|
143
|
+
return apiManager.get('/compute', body, token)
|
|
144
|
+
|
|
145
|
+
r = recurse(f, body, listAll)
|
|
146
|
+
for i in range(len(r['result'])):
|
|
147
|
+
if 'result' in r['result'][i]:
|
|
148
|
+
r['result'][i]['result'] = parseResults(r['result'][i]['result'])
|
|
149
|
+
return r
|
|
150
|
+
|
|
151
|
+
def addToLayer(response, pathId, timestampId, token):
|
|
152
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
153
|
+
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
154
|
+
token = sanitize.validString('token', token, True)
|
|
155
|
+
|
|
156
|
+
for url in response:
|
|
157
|
+
print('fetching file ' + url.split('/')[-1])
|
|
158
|
+
memfile = BytesIO()
|
|
159
|
+
memfile = downloadFile(url, token, memfile=memfile)
|
|
160
|
+
print('read file ' + url.split('/')[-1])
|
|
161
|
+
print('adding file ' + url.split('/')[-1])
|
|
162
|
+
addFile(pathId = pathId, timestampId=timestampId, token = token, fileFormat='tif', memFile=memfile, name= url.split('/')[-1] + '.tif' )
|
|
163
|
+
print('file ' + url.split('/')[-1] + ' added to layer')
|
|
164
|
+
activate(pathId=pathId, timestampId=timestampId, token=token)
|
|
165
|
+
print('layer can now be found at ' + apiManager.baseUrl + '/drive/me?pathId=' + pathId )
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def downloadFile(url, token, filePath = None, memfile = None):
|
|
170
|
+
|
|
171
|
+
url = sanitize.validString('url', url, True)
|
|
172
|
+
token = sanitize.validString('token', token, True)
|
|
173
|
+
filePath = sanitize.validString('filePath', filePath, False)
|
|
174
|
+
if memfile == None and filePath == None:
|
|
175
|
+
raise ValueError('Either memfile or filePath is required')
|
|
176
|
+
|
|
177
|
+
apiManager.download(url = url, filePath=filePath, memfile=memfile, token = token)
|
|
178
|
+
|
|
179
|
+
if memfile != None:
|
|
180
|
+
return memfile
|
|
@@ -1,10 +1,13 @@
|
|
|
1
|
-
from ellipsis.path.root import
|
|
1
|
+
from ellipsis.path.root import search, favorite, unfavorite, editPublicAccess, delete, editMetadata, move, rename, trash, recover, get
|
|
2
2
|
from ellipsis.path import hashtag
|
|
3
3
|
from ellipsis.path import invite
|
|
4
4
|
from ellipsis.path import member
|
|
5
5
|
from ellipsis.path import raster
|
|
6
6
|
from ellipsis.path import vector
|
|
7
|
+
from ellipsis.path import pointCloud
|
|
7
8
|
from ellipsis.path import usage
|
|
8
9
|
from ellipsis.path import folder
|
|
9
10
|
from ellipsis.path import file
|
|
11
|
+
from ellipsis.path import bookmark
|
|
12
|
+
from ellipsis.path import setUpTask
|
|
10
13
|
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from ellipsis import apiManager
|
|
2
|
+
from ellipsis import sanitize
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def get(pathId, token=None):
|
|
6
|
+
token = sanitize.validString('token', token, False)
|
|
7
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
8
|
+
|
|
9
|
+
r = apiManager.get('/path/' + pathId + '/bookmark', {}, token)
|
|
10
|
+
|
|
11
|
+
return r
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def add(name, bookmark, token, parentId = None, publicAccess = None, metadata=None):
|
|
16
|
+
token = sanitize.validString('token', token, True)
|
|
17
|
+
bookmark = sanitize.validObject('bookmark', bookmark, True)
|
|
18
|
+
name = sanitize.validString('pathId', name, False)
|
|
19
|
+
metadata = sanitize.validObject('metadata', metadata, False)
|
|
20
|
+
publicAccess = sanitize.validObject('publicAccess', publicAccess, False)
|
|
21
|
+
parentId = sanitize.validUuid('parentId', parentId, False)
|
|
22
|
+
|
|
23
|
+
r = apiManager.post('/path/bookmark', {'name':name, 'bookmark':bookmark , 'parentId':parentId, 'publicAccess':publicAccess, 'metadata':metadata}, token)
|
|
24
|
+
|
|
25
|
+
return r
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def edit(pathId, token, layers=None, dems=None):
|
|
29
|
+
layers = sanitize.validObject('layers', layers, False)
|
|
30
|
+
dems = sanitize.validObject('dems', dems, False)
|
|
31
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
32
|
+
|
|
33
|
+
r = apiManager.patch('/path/' + pathId + '/bookmark', {'layers':layers, 'dems':dems}, token)
|
|
34
|
+
|
|
35
|
+
return r
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
from ellipsis import apiManager
|
|
2
|
+
from ellipsis import sanitize
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
import pickle
|
|
6
|
+
from io import BytesIO
|
|
7
|
+
import json
|
|
8
|
+
import pandas as pd
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def add( token, filePath=None, memFile=None, parentId = None, publicAccess =None, metadata=None,name=None):
|
|
12
|
+
token = sanitize.validString('token', token, True)
|
|
13
|
+
parentId = sanitize.validUuid('parentId', parentId, False)
|
|
14
|
+
publicAccess = sanitize.validObject('publicAccess', publicAccess, False)
|
|
15
|
+
metadata = sanitize.validObject('metadata', metadata, False)
|
|
16
|
+
filePath = sanitize.validString('filePath', filePath, False)
|
|
17
|
+
name = sanitize.validString('name', name, False)
|
|
18
|
+
if type(memFile) == type(None) and type(filePath) == type(None):
|
|
19
|
+
raise ValueError('You need to specify either a filePath or a memFile')
|
|
20
|
+
if type(memFile) != type(None) and type(name) == type(None):
|
|
21
|
+
raise ValueError('Parameter name is required when using a memory file')
|
|
22
|
+
|
|
23
|
+
seperator = os.path.sep
|
|
24
|
+
fileName = filePath.split(seperator)[len(filePath.split(seperator))-1 ]
|
|
25
|
+
if len(fileName) > 64:
|
|
26
|
+
fileName = fileName[0:63]
|
|
27
|
+
body = {'name':fileName, 'publicAccess':publicAccess, 'metadata':metadata, 'parentId':parentId}
|
|
28
|
+
if type(memFile) == type(None):
|
|
29
|
+
r = apiManager.upload('/path/file' , filePath, body, token, key = 'data')
|
|
30
|
+
else:
|
|
31
|
+
r = apiManager.upload('/path/file' , filePath , name, body, token, memfile = memFile)
|
|
32
|
+
|
|
33
|
+
return r
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def download(pathId, filePath, token=None):
|
|
37
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
38
|
+
token = sanitize.validString('token', token, False)
|
|
39
|
+
filePath = sanitize.validString('filePath', filePath, True)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
apiManager.download('/path/' + pathId + '/file/data', filePath, token)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def addCsv(df, name, token, parentId = None, publicAccess =None, metadata=None):
|
|
47
|
+
token = sanitize.validString('token', token, True)
|
|
48
|
+
name = sanitize.validString('name', name, True)
|
|
49
|
+
df = sanitize.validPandas('df', df, True)
|
|
50
|
+
|
|
51
|
+
parentId = sanitize.validUuid('parentId', parentId, False)
|
|
52
|
+
publicAccess = sanitize.validObject('publicAccess', publicAccess, False)
|
|
53
|
+
metadata = sanitize.validObject('metadata', metadata, False)
|
|
54
|
+
|
|
55
|
+
memfile = BytesIO()
|
|
56
|
+
df.to_csv(memfile)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
body = {'name':name, 'publicAccess':publicAccess, 'metadata':metadata, 'parentId':parentId}
|
|
60
|
+
r = apiManager.upload('/path/file' , name, body, token, key='data', memfile = memfile)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
return r
|
|
64
|
+
|
|
65
|
+
def getCsv(pathId, token=None):
|
|
66
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
67
|
+
token = sanitize.validString('token', token, False)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
r = apiManager.get('/path/' + pathId + '/file/data', {}, token)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
memfile = BytesIO(bytes(r, 'utf-8'))
|
|
74
|
+
try:
|
|
75
|
+
df = pd.read_csv(memfile)
|
|
76
|
+
except:
|
|
77
|
+
raise ValueError('Read file is not a valid CSV')
|
|
78
|
+
return df
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def addJson(d, name, token, parentId = None, publicAccess =None, metadata=None):
|
|
83
|
+
token = sanitize.validString('token', token, True)
|
|
84
|
+
name = sanitize.validString('name', name, True)
|
|
85
|
+
d = sanitize.validObject('d', d, True)
|
|
86
|
+
|
|
87
|
+
parentId = sanitize.validUuid('parentId', parentId, False)
|
|
88
|
+
publicAccess = sanitize.validObject('publicAccess', publicAccess, False)
|
|
89
|
+
metadata = sanitize.validObject('metadata', metadata, False)
|
|
90
|
+
|
|
91
|
+
memfile = BytesIO()
|
|
92
|
+
d = json.dumps(d)
|
|
93
|
+
memfile.write(bytes(d, 'utf-8'))
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
body = {'name':name, 'publicAccess':publicAccess, 'metadata':metadata, 'parentId':parentId}
|
|
97
|
+
r = apiManager.upload('/path/file' , name, body, token, key='data', memfile = memfile)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
return r
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def getJson(pathId, token=None):
|
|
104
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
105
|
+
token = sanitize.validString('token', token, False)
|
|
106
|
+
|
|
107
|
+
memfile = BytesIO()
|
|
108
|
+
apiManager.download('/path/' + pathId + '/file/data', '', token, memfile)
|
|
109
|
+
memfile.seek(0)
|
|
110
|
+
try:
|
|
111
|
+
x = json.load(memfile)
|
|
112
|
+
except:
|
|
113
|
+
raise ValueError('Read file not a valid pickle file')
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
return x
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def addPickle(x, name, token, parentId = None, publicAccess =None, metadata=None):
|
|
121
|
+
token = sanitize.validString('token', token, True)
|
|
122
|
+
name = sanitize.validString('name', name, True)
|
|
123
|
+
parentId = sanitize.validUuid('parentId', parentId, False)
|
|
124
|
+
publicAccess = sanitize.validObject('publicAccess', publicAccess, False)
|
|
125
|
+
metadata = sanitize.validObject('metadata', metadata, False)
|
|
126
|
+
|
|
127
|
+
memfile = BytesIO()
|
|
128
|
+
pickle.dump(x, memfile)
|
|
129
|
+
|
|
130
|
+
body = {'name':name, 'publicAccess':publicAccess, 'metadata':metadata, 'parentId':parentId}
|
|
131
|
+
r = apiManager.upload('/path/file' , name, body, token, key='data', memfile = memfile)
|
|
132
|
+
|
|
133
|
+
return r
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def getPickle(pathId, token=None):
|
|
137
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
138
|
+
token = sanitize.validString('token', token, False)
|
|
139
|
+
|
|
140
|
+
memfile = BytesIO()
|
|
141
|
+
apiManager.download('/path/' + pathId + '/file/data', '', token, memfile)
|
|
142
|
+
memfile.seek(0)
|
|
143
|
+
try:
|
|
144
|
+
x = pickle.load(memfile)
|
|
145
|
+
except:
|
|
146
|
+
raise ValueError('Read file not a valid pickle file')
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
return x
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
|
|
@@ -36,3 +36,17 @@ def add( name, token, parentId = None, publicAccess =None, metadata=None):
|
|
|
36
36
|
|
|
37
37
|
return apiManager.post('/path/folder', body, token)
|
|
38
38
|
|
|
39
|
+
def traverse(pathId, location, pathType, token):
|
|
40
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
41
|
+
location = sanitize.validStringArray('location', location, True)
|
|
42
|
+
pathType = sanitize.validString('pathType', pathType, True)
|
|
43
|
+
token = sanitize.validString('token', token, False)
|
|
44
|
+
|
|
45
|
+
body = {'location': location, 'type':pathType }
|
|
46
|
+
|
|
47
|
+
return apiManager.get('/path/' + pathId + '/folder/traverse', body, token)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
|