ellipsis 3.1.45__tar.gz → 3.1.46__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ellipsis might be problematic. Click here for more details.
- {ellipsis-3.1.45 → ellipsis-3.1.46}/PKG-INFO +1 -1
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/__init__.py +1 -2
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/__init__.py +1 -0
- ellipsis-3.1.46/ellipsis/path/bookmark/__init__.py +2 -0
- ellipsis-3.1.46/ellipsis/path/bookmark/root.py +39 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/pointCloud/timestamp/file/root.py +20 -5
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/raster/timestamp/__init__.py +1 -3
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/raster/timestamp/file/root.py +22 -8
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/raster/timestamp/root.py +41 -91
- ellipsis-3.1.46/ellipsis/path/vector/timestamp/feature/root.py +184 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/timestamp/file/root.py +24 -8
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/timestamp/order/root.py +3 -2
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/timestamp/root.py +10 -11
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/util/__init__.py +1 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/util/root.py +25 -10
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis.egg-info/PKG-INFO +1 -1
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis.egg-info/SOURCES.txt +2 -2
- {ellipsis-3.1.45 → ellipsis-3.1.46}/setup.py +1 -1
- {ellipsis-3.1.45 → ellipsis-3.1.46}/test/test.py +67 -28
- ellipsis-3.1.45/ellipsis/path/vector/timestamp/feature/root.py +0 -259
- ellipsis-3.1.45/ellipsis/view/__init__.py +0 -3
- ellipsis-3.1.45/ellipsis/view/root.py +0 -59
- {ellipsis-3.1.45 → ellipsis-3.1.46}/LICENSE +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/README.md +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/account/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/account/accessToken/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/account/accessToken/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/account/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/apiManager.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/file/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/file/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/folder/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/folder/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/hashtag/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/hashtag/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/invite/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/invite/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/member/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/member/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/pointCloud/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/pointCloud/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/pointCloud/timestamp/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/pointCloud/timestamp/file/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/pointCloud/timestamp/order/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/pointCloud/timestamp/order/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/pointCloud/timestamp/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/pointCloud/timestamp/util.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/raster/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/raster/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/raster/style/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/raster/style/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/raster/timestamp/file/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/raster/timestamp/order/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/raster/timestamp/order/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/raster/timestamp/util.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/usage/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/usage/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/featureProperty/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/featureProperty/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/style/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/style/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/timestamp/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/timestamp/feature/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/timestamp/feature/message/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/timestamp/feature/message/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/timestamp/feature/series/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/timestamp/feature/series/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/timestamp/file/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/path/vector/timestamp/order/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/sanitize.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/user/__init__.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis/user/root.py +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis.egg-info/dependency_links.txt +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis.egg-info/requires.txt +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/ellipsis.egg-info/top_level.txt +0 -0
- {ellipsis-3.1.45 → ellipsis-3.1.46}/setup.cfg +0 -0
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from ellipsis import apiManager
|
|
2
|
+
from ellipsis import sanitize
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def get(pathId, token=None):
|
|
6
|
+
token = sanitize.validString('token', token, False)
|
|
7
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
8
|
+
|
|
9
|
+
r = apiManager.get('/path/' + pathId + '/bookmark', {}, token)
|
|
10
|
+
|
|
11
|
+
return r
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def add(name, bookmark, token, parentId = None, publicAccess = None, metadata=None):
|
|
16
|
+
token = sanitize.validString('token', token, True)
|
|
17
|
+
bookmark = sanitize.validObject('bookmark', bookmark, True)
|
|
18
|
+
name = sanitize.validString('pathId', name, False)
|
|
19
|
+
metadata = sanitize.validObject('metadata', metadata, False)
|
|
20
|
+
publicAccess = sanitize.validObject('publicAccess', publicAccess, False)
|
|
21
|
+
parentId = sanitize.validUuid('parentId', parentId, False)
|
|
22
|
+
|
|
23
|
+
r = apiManager.post('/path/bookmark', {'name':name, 'bookmark':bookmark , 'parentId':parentId, 'publicAccess':publicAccess, 'metadata':metadata}, token)
|
|
24
|
+
|
|
25
|
+
return r
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def edit(pathId, token, layers=None, dems=None):
|
|
29
|
+
layers = sanitize.validObject('layers', layers, False)
|
|
30
|
+
dems = sanitize.validObject('dems', dems, False)
|
|
31
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
32
|
+
|
|
33
|
+
r = apiManager.patch('/path/' + pathId + '/bookmark', {'layers':layers, 'dems':dems}, token)
|
|
34
|
+
|
|
35
|
+
return r
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
|
|
@@ -6,19 +6,34 @@ import os
|
|
|
6
6
|
import numpy as np
|
|
7
7
|
import geopandas as gpd
|
|
8
8
|
|
|
9
|
-
def add(pathId, timestampId, filePath,
|
|
9
|
+
def add(pathId, timestampId, token, fileFormat, filePath = None, memFile =None, name=None, epsg = None):
|
|
10
10
|
token = sanitize.validString('token', token, True)
|
|
11
11
|
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
12
12
|
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
13
|
-
filePath = sanitize.validString('filePath', filePath,
|
|
13
|
+
filePath = sanitize.validString('filePath', filePath, False)
|
|
14
14
|
epsg = sanitize.validInt('epsg', epsg, True)
|
|
15
15
|
fileFormat = sanitize.validString('fileFormat', fileFormat, True)
|
|
16
|
+
name = sanitize.validString('name', name, False)
|
|
17
|
+
|
|
18
|
+
if type(memFile) == type(None) and type(filePath) == type(None):
|
|
19
|
+
raise ValueError('You need to specify either a filePath or a memFile')
|
|
20
|
+
|
|
21
|
+
if type(memFile) != type(None) and type(name) == type(None):
|
|
22
|
+
raise ValueError('Parameter name is required when using a memory file')
|
|
23
|
+
|
|
24
|
+
if type(name ) == type(None):
|
|
25
|
+
seperator = os.path.sep
|
|
26
|
+
fileName = filePath.split(seperator)[len(filePath.split(seperator))-1 ]
|
|
27
|
+
else:
|
|
28
|
+
fileName = name
|
|
16
29
|
|
|
17
|
-
seperator = os.path.sep
|
|
18
|
-
fileName = filePath.split(seperator)[len(filePath.split(seperator))-1 ]
|
|
19
30
|
|
|
20
31
|
body = {'name':fileName, 'epsg':epsg, 'format':fileFormat}
|
|
21
|
-
|
|
32
|
+
if type(memFile) == type(None):
|
|
33
|
+
r = apiManager.upload('/path/' + pathId + '/pointCloud/timestamp/' + timestampId + '/file' , filePath, body, token)
|
|
34
|
+
else:
|
|
35
|
+
r = apiManager.upload('/path/' + pathId + '/pointCloud/timestamp/' + timestampId + '/file' , name, body, token, memfile=memFile)
|
|
36
|
+
|
|
22
37
|
return r
|
|
23
38
|
|
|
24
39
|
def get(pathId, timestampId, token, pageStart= None, listAll = True):
|
|
@@ -6,13 +6,11 @@ from ellipsis.path.raster.timestamp.root import deactivate
|
|
|
6
6
|
from ellipsis.path.raster.timestamp.root import getBounds
|
|
7
7
|
from ellipsis.path.raster.timestamp.root import analyse
|
|
8
8
|
from ellipsis.path.raster.timestamp.root import getRaster
|
|
9
|
-
from ellipsis.path.raster.timestamp.root import getDownsampledRaster
|
|
10
9
|
from ellipsis.path.raster.timestamp.root import trash
|
|
11
10
|
from ellipsis.path.raster.timestamp.root import recover
|
|
12
11
|
from ellipsis.path.raster.timestamp.root import getSampledRaster
|
|
13
|
-
from ellipsis.path.raster.timestamp.root import getValuesAlongLine
|
|
14
12
|
from ellipsis.path.raster.timestamp.root import contour
|
|
15
|
-
|
|
13
|
+
from ellipsis.path.raster.timestamp.root import getLocationInfo
|
|
16
14
|
from ellipsis.path.raster.timestamp import file
|
|
17
15
|
from ellipsis.path.raster.timestamp import order
|
|
18
16
|
|
|
@@ -6,21 +6,35 @@ import os
|
|
|
6
6
|
import numpy as np
|
|
7
7
|
import geopandas as gpd
|
|
8
8
|
|
|
9
|
-
def add(pathId, timestampId,
|
|
9
|
+
def add(pathId, timestampId, token, fileFormat, filePath=None, memFile = None, epsg = None, noDataValue = None, mosaicPriority = None, name = None):
|
|
10
10
|
token = sanitize.validString('token', token, True)
|
|
11
11
|
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
12
12
|
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
13
|
-
filePath = sanitize.validString('filePath', filePath,
|
|
14
|
-
|
|
13
|
+
filePath = sanitize.validString('filePath', filePath, False)
|
|
14
|
+
name = sanitize.validString('name', name, False)
|
|
15
|
+
noDataValue = sanitize.validFloat('noDataValue', noDataValue, False)
|
|
15
16
|
epsg = sanitize.validInt('epsg', epsg, False)
|
|
16
17
|
fileFormat = sanitize.validString('fileFormat', fileFormat, True)
|
|
17
18
|
mosaicPriority = sanitize.validString('mosaicPriority', mosaicPriority, False)
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
19
|
+
|
|
20
|
+
if type(memFile) == type(None) and type(filePath) == type(None):
|
|
21
|
+
raise ValueError('You need to specify either a filePath or a memFile')
|
|
22
|
+
|
|
23
|
+
if type(memFile) != type(None) and type(name) == type(None):
|
|
24
|
+
raise ValueError('Parameter name is required when using a memory file')
|
|
25
|
+
|
|
26
|
+
if type(name ) == type(None):
|
|
27
|
+
seperator = os.path.sep
|
|
28
|
+
fileName = filePath.split(seperator)[len(filePath.split(seperator))-1 ]
|
|
29
|
+
else:
|
|
30
|
+
fileName = name
|
|
31
|
+
|
|
22
32
|
body = {'name':fileName, 'epsg':epsg, 'noDataValue': noDataValue, 'format':fileFormat, 'mosaicPriority':mosaicPriority}
|
|
23
|
-
|
|
33
|
+
if type(memFile) == type(None):
|
|
34
|
+
r = apiManager.upload('/path/' + pathId + '/raster/timestamp/' + timestampId + '/file' , filePath, body, token)
|
|
35
|
+
else:
|
|
36
|
+
r = apiManager.upload('/path/' + pathId + '/raster/timestamp/' + timestampId + '/file' , name, body, token, memfile = memFile)
|
|
37
|
+
|
|
24
38
|
return r
|
|
25
39
|
|
|
26
40
|
def get(pathId, timestampId, token, pageStart= None, listAll = True):
|
|
@@ -23,42 +23,41 @@ import requests
|
|
|
23
23
|
from skimage.measure import find_contours
|
|
24
24
|
from shapely.geometry import Point, LineString
|
|
25
25
|
|
|
26
|
-
def
|
|
27
|
-
return getSampledRaster(pathId, timestampId, extent, width, height, epsg, style, token)
|
|
28
|
-
|
|
29
|
-
def getSampledRaster(pathId, timestampId, extent, width, height, epsg=4326, style = None, token = None):
|
|
30
|
-
bounds = extent
|
|
26
|
+
def getSampledRaster(pathId, timestampId, extent, width, height, epsg=3857, token = None):
|
|
31
27
|
token = sanitize.validString('token', token, False)
|
|
32
28
|
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
33
29
|
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
34
|
-
|
|
35
|
-
style = sanitize.validObject('style', style, False)
|
|
30
|
+
extent = sanitize.validBounds('extent', extent, True)
|
|
36
31
|
epsg = sanitize.validInt('epsg', epsg, True)
|
|
37
|
-
body = {'pathId':pathId, 'timestampId':timestampId, 'extent':bounds, 'width':width, 'height':height, 'style':style, 'epsg':epsg}
|
|
38
32
|
|
|
39
|
-
|
|
40
|
-
|
|
33
|
+
res = getActualExtent(extent['xMin'], extent['xMax'], extent['yMin'], extent['yMax'], 'EPSG:' + str(epsg))
|
|
34
|
+
|
|
35
|
+
if res['status'] == '400':
|
|
36
|
+
raise ValueError('Invalid epsg and extent combination')
|
|
37
|
+
|
|
38
|
+
extentWeb = res['message']
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
body = {'pathId':pathId, 'timestampId':timestampId, 'extent':extentWeb, 'width':width, 'height':height, 'applyStyle':False}
|
|
43
|
+
|
|
41
44
|
|
|
42
45
|
r = apiManager.get('/path/' + pathId + '/raster/timestamp/' + timestampId + '/rasterByExtent', body, token, crash = True, parseJson = False)
|
|
43
46
|
|
|
44
47
|
|
|
45
|
-
|
|
46
|
-
r = tifffile.imread(BytesIO(r.content))
|
|
47
|
-
else:
|
|
48
|
-
r = np.array(Image.open(BytesIO(r.content)))
|
|
49
|
-
#tif also has bands in last channel
|
|
50
|
-
r = np.transpose(r, [2,0,1])
|
|
48
|
+
r = tifffile.imread(BytesIO(r.content))
|
|
51
49
|
|
|
52
|
-
|
|
53
|
-
yMin = bounds['yMin']
|
|
54
|
-
xMax = bounds['xMax']
|
|
55
|
-
yMax = bounds['yMax']
|
|
50
|
+
r = np.transpose(r, [2,0,1])
|
|
56
51
|
|
|
57
52
|
|
|
58
53
|
|
|
59
|
-
trans = rasterio.transform.from_bounds(xMin, yMin, xMax, yMax, r.shape[2], r.shape[1])
|
|
60
54
|
|
|
61
|
-
|
|
55
|
+
if epsg != 3857:
|
|
56
|
+
return reprojectRaster(r=r, sourceExtent=extentWeb, targetExtent=extent, targetWidth=r.shape[2],
|
|
57
|
+
targetHeight=r.shape[1], sourceEpsg=3857, targetEpsg=epsg, interpolation='nearest')
|
|
58
|
+
else:
|
|
59
|
+
trans = rasterio.transform.from_bounds( extent['xMin'], extent['yMin'], extent['xMax'], extent['yMax'], r.shape[2], r.shape[1])
|
|
60
|
+
return {'raster': r, 'transform':trans, 'extent': extent, 'crs':"EPSG:" + str(epsg) }
|
|
62
61
|
|
|
63
62
|
|
|
64
63
|
def contour(pathId, timestampId, extent, interval = None, intervals = None, epsg = 4326, bandNumber = 1, token = None):
|
|
@@ -93,9 +92,11 @@ def contour(pathId, timestampId, extent, interval = None, intervals = None, epsg
|
|
|
93
92
|
if type(intervals) == type(None):
|
|
94
93
|
minVal = np.min(raster[bandNumber-1, raster[-1,:,:] == 1])
|
|
95
94
|
maxVal = np.max(raster[bandNumber-1, raster[-1,:,:] == 1])
|
|
95
|
+
|
|
96
|
+
if type(interval) == type(None):
|
|
97
|
+
interval = (maxVal - minVal) /10
|
|
96
98
|
|
|
97
|
-
|
|
98
|
-
cont = minVal + interval
|
|
99
|
+
cont = minVal
|
|
99
100
|
|
|
100
101
|
conts = []
|
|
101
102
|
while cont < maxVal:
|
|
@@ -114,7 +115,6 @@ def contour(pathId, timestampId, extent, interval = None, intervals = None, epsg
|
|
|
114
115
|
lines = find_contours(raster[bandNumber-1,:,:], mask=raster[-1,:,:] == 1, level = cont)
|
|
115
116
|
|
|
116
117
|
newLines = []
|
|
117
|
-
line = lines[0]
|
|
118
118
|
for line in lines:
|
|
119
119
|
newLine = LineString([ Point( xMinWeb + x[0] * Lx , yMinWeb + x[1] * Ly ) for x in line])
|
|
120
120
|
newLines = newLines + [newLine]
|
|
@@ -130,69 +130,25 @@ def contour(pathId, timestampId, extent, interval = None, intervals = None, epsg
|
|
|
130
130
|
return sh
|
|
131
131
|
|
|
132
132
|
|
|
133
|
-
def
|
|
133
|
+
def getLocationInfo(pathId, timestampId, locations, epsg = 4326, token= None):
|
|
134
134
|
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
135
135
|
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
136
136
|
token = sanitize.validString('token', token, False)
|
|
137
|
-
line = sanitize.validShapely('line', line, True)
|
|
138
137
|
epsg = sanitize.validInt('epsg', epsg, True)
|
|
139
138
|
|
|
140
|
-
if
|
|
141
|
-
|
|
139
|
+
if epsg != 4326:
|
|
140
|
+
points = [Point(l) for l in locations]
|
|
141
|
+
sh = gpd.GeoDataFrame({'geometry':points})
|
|
142
|
+
sh.crs = 'EPSG:' + str(epsg)
|
|
143
|
+
sh.to_crs('EPSG:4326')
|
|
144
|
+
locations = [ l for l in zip(sh.bounds['minx'], sh.bounds['miny'] )]
|
|
145
|
+
body = {'locations':locations}
|
|
142
146
|
|
|
143
|
-
|
|
144
|
-
temp.crs = 'EPSG:' + str(epsg)
|
|
145
|
-
temp = temp.to_crs('EPSG:3857')
|
|
146
|
-
line = temp['geometry'].values[0]
|
|
147
|
-
line = list(line.coords)
|
|
148
|
-
|
|
149
|
-
x_of_line = [p[0] for p in line]
|
|
150
|
-
y_of_line = [p[1] for p in line]
|
|
151
|
-
|
|
152
|
-
#the first action is to cacluate a bounding box for the raster we need to retrieve
|
|
153
|
-
xMin = min(x_of_line)
|
|
154
|
-
xMax = max(x_of_line)
|
|
155
|
-
yMin = min(y_of_line)
|
|
156
|
-
yMax = max(y_of_line)
|
|
157
|
-
|
|
158
|
-
d = (xMax - xMin) * 0.1
|
|
159
|
-
xMax = xMax + d
|
|
160
|
-
xMin = xMin -d
|
|
161
|
-
d = (yMax - yMin) * 0.1
|
|
162
|
-
yMax = yMax + d
|
|
163
|
-
yMin = yMin -d
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
#now we retrieve the needed raster we use epsg = 4326 but we can use other coordinates as well
|
|
168
|
-
extent = {'xMin': xMin, 'xMax':xMax, 'yMin':yMin, 'yMax':yMax}
|
|
169
|
-
|
|
170
|
-
size = 1000
|
|
171
|
-
r = getSampledRaster(pathId = pathId, timestampId = timestampId, extent = extent, width = size, height = size, epsg=3857, token = token)
|
|
172
|
-
raster = r['raster']
|
|
173
|
-
|
|
174
|
-
memfile = MemoryFile()
|
|
175
|
-
dataset = memfile.open( driver='GTiff', dtype='float32', height=size, width=size, count = raster.shape[0], crs= r['crs'], transform=r['transform'])
|
|
176
|
-
dataset.write(raster)
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
values = list(dataset.sample(line))
|
|
181
|
-
|
|
182
|
-
memfile.close()
|
|
183
|
-
return values
|
|
184
|
-
|
|
185
|
-
def getLocationInfo(pathId, timestampId, locations, epsg = 4326, token= None):
|
|
186
|
-
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
187
|
-
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
188
|
-
token = sanitize.validString('token', token, False)
|
|
189
|
-
epsg = sanitize.validInt('epsg', epsg, True)
|
|
190
|
-
body = {'locations':locations,'epsg':epsg}
|
|
191
|
-
r = apiManager.post('/path/' + pathId + '/raster/timestamp/' + timestampId + '/location', body, token)
|
|
147
|
+
r = apiManager.get('/path/' + pathId + '/raster/timestamp/' + timestampId + '/location', body, token)
|
|
192
148
|
return r
|
|
193
149
|
|
|
194
150
|
|
|
195
|
-
def getRaster(pathId, timestampId, extent, token = None, showProgress = True, epsg =
|
|
151
|
+
def getRaster(pathId, timestampId, extent, token = None, showProgress = True, epsg = 3857):
|
|
196
152
|
bounds = extent
|
|
197
153
|
|
|
198
154
|
token = sanitize.validString('token', token, False)
|
|
@@ -200,12 +156,7 @@ def getRaster(pathId, timestampId, extent, token = None, showProgress = True, ep
|
|
|
200
156
|
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
201
157
|
bounds = sanitize.validBounds('bounds', bounds, True)
|
|
202
158
|
showProgress = sanitize.validBool('showProgress', showProgress, True)
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
if type(threads)!= type(None):
|
|
206
|
-
Warning('The parameter threads is deprecated')
|
|
207
|
-
if type(style)!= type(None):
|
|
208
|
-
Warning('The parameter style is deprecated')
|
|
159
|
+
|
|
209
160
|
|
|
210
161
|
xMin = bounds['xMin']
|
|
211
162
|
yMin = bounds['yMin']
|
|
@@ -241,7 +192,6 @@ def getRaster(pathId, timestampId, extent, token = None, showProgress = True, ep
|
|
|
241
192
|
|
|
242
193
|
zoom = t['zoom']
|
|
243
194
|
|
|
244
|
-
body = {"applyStyle":False}
|
|
245
195
|
|
|
246
196
|
LEN = 2.003751e+07
|
|
247
197
|
|
|
@@ -294,7 +244,6 @@ def getRaster(pathId, timestampId, extent, token = None, showProgress = True, ep
|
|
|
294
244
|
|
|
295
245
|
def fetch(tileX, tileY):
|
|
296
246
|
if tarred:
|
|
297
|
-
|
|
298
247
|
cuts = cutOfTilesPerZoom[zoom]
|
|
299
248
|
|
|
300
249
|
zones = [{'name': 'zone0', 'offset':0, 'start':cuts['start'][0] , 'end':cuts['end'][0]}, ]
|
|
@@ -316,8 +265,8 @@ def getRaster(pathId, timestampId, extent, token = None, showProgress = True, ep
|
|
|
316
265
|
frac_w = int(w/2**offset)
|
|
317
266
|
|
|
318
267
|
url = apiManager.baseUrl + '/path/' + pathId + '/raster/timestamp/' + timestampId + '/tarTile/' + str(zoom_c) + '/' + str(tileX_c) + '/' + str(tileY_c)
|
|
319
|
-
|
|
320
|
-
|
|
268
|
+
url = url + '?applyStyle=false'
|
|
269
|
+
|
|
321
270
|
if str(type(token)) == str(type(None)):
|
|
322
271
|
r = requests.get(url)
|
|
323
272
|
else:
|
|
@@ -327,6 +276,7 @@ def getRaster(pathId, timestampId, extent, token = None, showProgress = True, ep
|
|
|
327
276
|
r = requests.get(url, headers={"Authorization": token})
|
|
328
277
|
|
|
329
278
|
else:
|
|
279
|
+
body = {'applyStyle':False}
|
|
330
280
|
r = apiManager.get('/path/' + pathId + '/raster/timestamp/' + timestampId + '/tile/' + str(zoom) + '/' + str(tileX) + '/' + str(tileY), body, token, False)
|
|
331
281
|
|
|
332
282
|
if r.status_code == 403:
|
|
@@ -395,10 +345,10 @@ def getRaster(pathId, timestampId, extent, token = None, showProgress = True, ep
|
|
|
395
345
|
r_total = r_total[:,min_y_index:max_y_index,min_x_index:max_x_index]
|
|
396
346
|
|
|
397
347
|
mercatorExtent = {'xMin' : xMinWeb, 'yMin': yMinWeb, 'xMax': xMaxWeb, 'yMax': yMaxWeb}
|
|
398
|
-
if
|
|
348
|
+
if epsg == 3857:
|
|
399
349
|
trans = rasterio.transform.from_bounds(xMinWeb, yMinWeb, xMaxWeb, yMaxWeb, r_total.shape[2], r_total.shape[1])
|
|
400
350
|
|
|
401
|
-
return {'raster': r_total, 'transform':trans, 'extent':mercatorExtent, '
|
|
351
|
+
return {'raster': r_total, 'transform':trans, 'extent':mercatorExtent, 'crs': 'EPSG:' + str(3857)}
|
|
402
352
|
else:
|
|
403
353
|
return reprojectRaster(r = r_total, sourceExtent = mercatorExtent, targetExtent = extent, targetWidth=r_total.shape[2], targetHeight=r_total.shape[1], sourceEpsg = 3857, targetEpsg= epsg, interpolation = 'nearest')
|
|
404
354
|
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
from ellipsis import apiManager
|
|
2
|
+
from ellipsis import sanitize
|
|
3
|
+
from ellipsis.util import chunks
|
|
4
|
+
from ellipsis.util import loadingBar
|
|
5
|
+
from ellipsis.util.root import stringToDate
|
|
6
|
+
from ellipsis.util.root import recurse
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
import json
|
|
10
|
+
import geopandas as gpd
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def add(pathId, timestampId, features, token, showProgress = True, cores = 1):
|
|
15
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
16
|
+
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
17
|
+
token = sanitize.validString('token', token, True)
|
|
18
|
+
showProgress = sanitize.validBool('showProgress', showProgress, True)
|
|
19
|
+
cores = sanitize.validInt('cores', cores, True)
|
|
20
|
+
features = sanitize.validGeopandas('features', features, True, cores = cores)
|
|
21
|
+
|
|
22
|
+
features_json = features.to_json(na='drop')
|
|
23
|
+
features_json = json.loads(features_json)
|
|
24
|
+
features_json = np.array(features_json['features'])
|
|
25
|
+
|
|
26
|
+
#check if first time
|
|
27
|
+
firstTime = apiManager.get('/path/' + pathId, None, token)
|
|
28
|
+
if not 'vector' in firstTime.keys():
|
|
29
|
+
raise ValueError('Can only add features if path is of type vector')
|
|
30
|
+
firstTime = len(firstTime['vector']['properties']) ==0
|
|
31
|
+
|
|
32
|
+
if firstTime:
|
|
33
|
+
print('no properties known for this timestamp adding them automatically')
|
|
34
|
+
columns = features.columns
|
|
35
|
+
columns = [c for c in columns if c != 'geometry']
|
|
36
|
+
for c in columns:
|
|
37
|
+
if 'int' in str(features.dtypes[c]) or 'Int' in str(features.dtypes[c]):
|
|
38
|
+
propertyType = 'integer'
|
|
39
|
+
elif 'float' in str(features.dtypes[c]) or 'Float' in str(features.dtypes[c]):
|
|
40
|
+
propertyType = 'float'
|
|
41
|
+
elif 'bool' in str(features.dtypes[c]):
|
|
42
|
+
propertyType = 'boolean'
|
|
43
|
+
elif 'datetime' in str(features.dtypes[c]):
|
|
44
|
+
propertyType = 'datetime'
|
|
45
|
+
else:
|
|
46
|
+
propertyType = 'string'
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
###date
|
|
50
|
+
body = {'name': c , 'type': propertyType , 'required': False, 'private': False}
|
|
51
|
+
apiManager.post('/path/' + pathId + '/vector/property', body, token)
|
|
52
|
+
indices = chunks(np.arange(features.shape[0]))
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
addedIds = []
|
|
56
|
+
for i in np.arange(len(indices)):
|
|
57
|
+
indices_sub = indices[i]
|
|
58
|
+
features_sub = features_json[indices_sub]
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
featuresBody = [{'feature': features_sub[i] } for i in np.arange(len(indices_sub))]
|
|
62
|
+
body = {"features":featuresBody}
|
|
63
|
+
r = apiManager.post('/path/' + pathId + '/vector/timestamp/' + timestampId + '/feature', body, token)
|
|
64
|
+
addedIds = addedIds + r
|
|
65
|
+
if showProgress:
|
|
66
|
+
loadingBar(i*3000 + len(indices_sub),features.shape[0])
|
|
67
|
+
i = i+1
|
|
68
|
+
|
|
69
|
+
return(addedIds)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def edit(pathId, timestampId, featureIds, token, features, showProgress = True, cores = 1):
|
|
74
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
75
|
+
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
76
|
+
token = sanitize.validString('token', token, True)
|
|
77
|
+
featureIds = sanitize.validUuidArray('featureIds', featureIds, True)
|
|
78
|
+
showProgress = sanitize.validBool('showProgress', showProgress, True)
|
|
79
|
+
cores = sanitize.validInt('cores', cores, True)
|
|
80
|
+
features = sanitize.validGeopandas('features', features, False, cores=cores)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
if type(features) != type(None) and features.shape[0] != len(featureIds):
|
|
85
|
+
raise ValueError('featureIds must be of same length as the features geopandas dataframe')
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
indices = chunks(np.arange(len(featureIds)),1000)
|
|
89
|
+
i=0
|
|
90
|
+
for i in np.arange(len(indices)):
|
|
91
|
+
indices_sub = indices[i]
|
|
92
|
+
featureIds_sub = list( np.array(featureIds)[indices_sub])
|
|
93
|
+
|
|
94
|
+
if type(features) != type(None):
|
|
95
|
+
features_sub = features.iloc[indices_sub]
|
|
96
|
+
features_sub =features_sub.to_json(na='drop')
|
|
97
|
+
features_sub = json.loads(features_sub)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
changes = [{'featureId':x[0] , 'newProperties':x[1]['properties'], 'newGeometry':x[1]['geometry']} for x in zip(featureIds_sub, features_sub['features'])]
|
|
101
|
+
|
|
102
|
+
body = {'changes':changes}
|
|
103
|
+
r = apiManager.patch('/path/' + pathId + '/vector/timestamp/' + timestampId + '/feature', body, token)
|
|
104
|
+
|
|
105
|
+
if len(indices) > 1 and showProgress:
|
|
106
|
+
loadingBar(i*1000 + len(indices_sub),len(featureIds))
|
|
107
|
+
|
|
108
|
+
return r
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def trash(pathId, timestampId, featureIds, token, showProgress = True):
|
|
112
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
113
|
+
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
114
|
+
token = sanitize.validString('token', token, True)
|
|
115
|
+
featureIds = sanitize.validUuidArray('featureIds', featureIds, True)
|
|
116
|
+
showProgress = sanitize.validBool('showProgress', showProgress, True)
|
|
117
|
+
|
|
118
|
+
indices = chunks(np.arange(len(featureIds)),1000)
|
|
119
|
+
i=0
|
|
120
|
+
for i in np.arange(len(indices)):
|
|
121
|
+
indices_sub = indices[i]
|
|
122
|
+
featureIds_sub = list(np.array(featureIds)[indices_sub])
|
|
123
|
+
|
|
124
|
+
body = {'featureIds': featureIds_sub, 'trashed': True}
|
|
125
|
+
r = apiManager.put('/path/' + pathId + '/vector/timestamp/' + timestampId + '/feature/trashed', body, token)
|
|
126
|
+
|
|
127
|
+
if len(indices) > 1 and showProgress:
|
|
128
|
+
loadingBar(i*1000 + len(indices_sub),len(featureIds))
|
|
129
|
+
|
|
130
|
+
return r
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def recover(pathId, timestampId, featureIds, token, showProgress = True):
|
|
134
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
135
|
+
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
136
|
+
token = sanitize.validString('token', token, True)
|
|
137
|
+
featureIds = sanitize.validUuidArray('featureIds', featureIds, True)
|
|
138
|
+
showProgress = sanitize.validBool('showProgress', showProgress, True)
|
|
139
|
+
|
|
140
|
+
indices = chunks(np.arange(len(featureIds)),1000)
|
|
141
|
+
i=0
|
|
142
|
+
for i in np.arange(len(indices)):
|
|
143
|
+
indices_sub = indices[i]
|
|
144
|
+
featureIds_sub = list(np.array(featureIds)[indices_sub])
|
|
145
|
+
|
|
146
|
+
body = {'featureIds': featureIds_sub, 'trashed': False}
|
|
147
|
+
r = apiManager.put('/path/' + pathId + '/vector/timestamp/' + timestampId + '/feature/trashed', body, token)
|
|
148
|
+
|
|
149
|
+
if len(indices) > 1 and showProgress:
|
|
150
|
+
loadingBar(i*1000 + len(indices_sub),len(featureIds))
|
|
151
|
+
|
|
152
|
+
return r
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def versions(pathId, timestampId, featureId, token = None, pageStart = None, listAll = True):
|
|
157
|
+
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
158
|
+
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
159
|
+
token = sanitize.validString('token', token, False)
|
|
160
|
+
featureId = sanitize.validUuid('featureId', featureId, True)
|
|
161
|
+
pageStart = sanitize.validUuid('pageStart', pageStart, False)
|
|
162
|
+
listAll = sanitize.validBool('listAll', listAll, True)
|
|
163
|
+
|
|
164
|
+
body = {'returnType':'all'}
|
|
165
|
+
def f(body):
|
|
166
|
+
return apiManager.get('/path/' + pathId + '/vector/timestamp/' + timestampId + '/feature/' + featureId + '/version', body, token)
|
|
167
|
+
|
|
168
|
+
r = recurse(f, body, listAll)
|
|
169
|
+
|
|
170
|
+
features = [ x['feature'] for x in r['result']]
|
|
171
|
+
dates = [stringToDate(x['date']) for x in r['result'] ]
|
|
172
|
+
usernames = [x['user']['username'] for x in r['result'] ]
|
|
173
|
+
userIds = [x['user']['id'] for x in r['result'] ]
|
|
174
|
+
|
|
175
|
+
sh = gpd.GeoDataFrame.from_features(features)
|
|
176
|
+
sh['username'] = usernames
|
|
177
|
+
sh['userId'] = userIds
|
|
178
|
+
sh['dates'] = dates
|
|
179
|
+
|
|
180
|
+
sh.crs = {'init': 'epsg:4326'}
|
|
181
|
+
r['result'] = sh
|
|
182
|
+
return(r)
|
|
183
|
+
|
|
184
|
+
|
|
@@ -5,27 +5,43 @@ from ellipsis.util.root import recurse
|
|
|
5
5
|
import numpy as np
|
|
6
6
|
import geopandas as gpd
|
|
7
7
|
|
|
8
|
-
def add(pathId, timestampId, filePath,
|
|
8
|
+
def add(pathId, timestampId, token, fileFormat, filePath=None, memFile = None, name = None, epsg = None, dateColumns = None, datePatterns = None, method= 'simplify', fastUpload = True):
|
|
9
9
|
token = sanitize.validString('token', token, True)
|
|
10
10
|
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
11
11
|
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
12
|
-
filePath = sanitize.validString('filePath', filePath,
|
|
12
|
+
filePath = sanitize.validString('filePath', filePath, False)
|
|
13
13
|
epsg = sanitize.validInt('epsg', epsg, False)
|
|
14
|
-
|
|
15
|
-
fileFormat = sanitize.validString('fileFormat', fileFormat, True)
|
|
14
|
+
fileFormat = sanitize.validString('fileFormat', fileFormat, True)
|
|
16
15
|
dateColumns = sanitize.validStringArray('dateColumns', dateColumns, False)
|
|
17
16
|
datePatterns = sanitize.validStringArray('datePatterns', datePatterns, False)
|
|
17
|
+
name = sanitize.validString('name', name, False)
|
|
18
18
|
fastUpload = sanitize.validBool('fastUpload', fastUpload, True)
|
|
19
19
|
if fastUpload:
|
|
20
20
|
fastUpload='true'
|
|
21
21
|
else:
|
|
22
22
|
fastUpload = 'false'
|
|
23
23
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
24
|
+
|
|
25
|
+
if type(memFile) == type(None) and type(filePath) == type(None):
|
|
26
|
+
raise ValueError('You need to specify either a filePath or a memFile')
|
|
27
|
+
|
|
28
|
+
if type(memFile) != type(None) and type(name) == type(None):
|
|
29
|
+
raise ValueError('Parameter name is required when using a memory file')
|
|
30
|
+
|
|
31
|
+
if type(name ) == type(None):
|
|
32
|
+
seperator = os.path.sep
|
|
33
|
+
fileName = filePath.split(seperator)[len(filePath.split(seperator))-1 ]
|
|
34
|
+
else:
|
|
35
|
+
fileName = name
|
|
36
|
+
|
|
37
|
+
|
|
27
38
|
body = {'name':fileName, 'epsg':epsg, 'format':fileFormat, 'dateColumns': dateColumns, 'datePatterns':datePatterns, 'fastUpload':fastUpload}
|
|
28
|
-
|
|
39
|
+
if type(memFile) == type(None):
|
|
40
|
+
r = apiManager.upload('/path/' + pathId + '/vector/timestamp/' + timestampId + '/file' , filePath, body, token)
|
|
41
|
+
else:
|
|
42
|
+
r = apiManager.upload('/path/' + pathId + '/vector/timestamp/' + timestampId + '/file', name, body, token,
|
|
43
|
+
memfile=memFile)
|
|
44
|
+
|
|
29
45
|
return r
|
|
30
46
|
|
|
31
47
|
def get(pathId, timestampId, token, pageStart = None, listAll = True):
|
|
@@ -6,16 +6,17 @@ def get(token):
|
|
|
6
6
|
r = apiManager.get('/path/vector/timestamp/order', None, token)
|
|
7
7
|
return r
|
|
8
8
|
|
|
9
|
-
def add(pathId, timestampId, token, extent = None, fileFormat = 'geojson'):
|
|
9
|
+
def add(pathId, timestampId, token, extent = None, fileFormat = 'geojson', epsg = 4326):
|
|
10
10
|
|
|
11
11
|
token = sanitize.validString('token', token, True)
|
|
12
12
|
pathId = sanitize.validUuid('pathId', pathId, True)
|
|
13
13
|
timestampId = sanitize.validUuid('timestampId', timestampId, True)
|
|
14
14
|
extent = sanitize.validBounds('extent', extent, False)
|
|
15
15
|
fileFormat = sanitize.validString('fileFormat', fileFormat, True)
|
|
16
|
+
epsg = sanitize.validInt('epsg', epsg, True)
|
|
16
17
|
|
|
17
18
|
|
|
18
|
-
body = { 'extent':extent, 'format' :fileFormat}
|
|
19
|
+
body = { 'extent':extent, 'format' :fileFormat, 'epsg':epsg}
|
|
19
20
|
|
|
20
21
|
r = apiManager.post('/path/' + pathId + '/vector/timestamp/' + timestampId + '/order', body, token)
|
|
21
22
|
|