earthengine-api 1.5.13rc0__py3-none-any.whl → 1.7.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of earthengine-api might be problematic. Click here for more details.
- {earthengine_api-1.5.13rc0.dist-info → earthengine_api-1.7.4.dist-info}/METADATA +3 -3
- earthengine_api-1.7.4.dist-info/RECORD +109 -0
- {earthengine_api-1.5.13rc0.dist-info → earthengine_api-1.7.4.dist-info}/WHEEL +1 -1
- ee/__init__.py +29 -28
- ee/_arg_types.py +7 -6
- ee/_cloud_api_utils.py +95 -78
- ee/_helpers.py +17 -13
- ee/_state.py +105 -0
- ee/_utils.py +2 -1
- ee/apifunction.py +21 -19
- ee/apitestcase.py +33 -38
- ee/batch.py +87 -77
- ee/blob.py +10 -12
- ee/classifier.py +57 -59
- ee/cli/commands.py +178 -114
- ee/cli/eecli.py +1 -1
- ee/cli/utils.py +61 -42
- ee/clusterer.py +39 -41
- ee/collection.py +64 -54
- ee/computedobject.py +19 -16
- ee/confusionmatrix.py +9 -9
- ee/customfunction.py +13 -12
- ee/data.py +220 -322
- ee/daterange.py +10 -10
- ee/deprecation.py +21 -13
- ee/deserializer.py +25 -20
- ee/dictionary.py +11 -11
- ee/ee_array.py +22 -20
- ee/ee_date.py +23 -23
- ee/ee_list.py +15 -16
- ee/ee_number.py +11 -21
- ee/ee_string.py +24 -32
- ee/ee_types.py +4 -4
- ee/element.py +15 -15
- ee/encodable.py +7 -4
- ee/errormargin.py +4 -4
- ee/feature.py +68 -71
- ee/featurecollection.py +41 -40
- ee/filter.py +90 -92
- ee/function.py +8 -8
- ee/geometry.py +95 -93
- ee/image.py +238 -236
- ee/image_converter.py +4 -4
- ee/imagecollection.py +30 -27
- ee/join.py +13 -15
- ee/kernel.py +55 -57
- ee/mapclient.py +9 -9
- ee/model.py +29 -31
- ee/oauth.py +76 -63
- ee/pixeltype.py +6 -6
- ee/projection.py +5 -4
- ee/reducer.py +41 -41
- ee/serializer.py +14 -14
- ee/table_converter.py +7 -6
- ee/terrain.py +7 -9
- ee/tests/_cloud_api_utils_test.py +21 -6
- ee/tests/_helpers_test.py +57 -4
- ee/tests/_state_test.py +49 -0
- ee/tests/algorithms.json +85 -2
- ee/tests/apifunction_test.py +5 -5
- ee/tests/batch_test.py +135 -57
- ee/tests/blob_test.py +5 -5
- ee/tests/classifier_test.py +3 -3
- ee/tests/clusterer_test.py +3 -3
- ee/tests/collection_test.py +48 -13
- ee/tests/confusionmatrix_test.py +3 -3
- ee/tests/data_test.py +484 -55
- ee/tests/daterange_test.py +4 -4
- ee/tests/deprecation_test.py +6 -4
- ee/tests/deserializer_test.py +64 -5
- ee/tests/dictionary_test.py +12 -12
- ee/tests/ee_array_test.py +3 -3
- ee/tests/ee_date_test.py +4 -4
- ee/tests/ee_list_test.py +3 -3
- ee/tests/ee_number_test.py +75 -30
- ee/tests/ee_string_test.py +11 -3
- ee/tests/ee_test.py +40 -22
- ee/tests/element_test.py +2 -2
- ee/tests/errormargin_test.py +1 -1
- ee/tests/feature_test.py +10 -10
- ee/tests/featurecollection_test.py +3 -3
- ee/tests/filter_test.py +4 -4
- ee/tests/function_test.py +5 -5
- ee/tests/geometry_point_test.py +3 -3
- ee/tests/geometry_test.py +93 -52
- ee/tests/image_converter_test.py +1 -3
- ee/tests/image_test.py +3 -3
- ee/tests/imagecollection_test.py +3 -3
- ee/tests/join_test.py +3 -3
- ee/tests/kernel_test.py +7 -3
- ee/tests/model_test.py +17 -5
- ee/tests/oauth_test.py +189 -7
- ee/tests/pixeltype_test.py +6 -7
- ee/tests/projection_test.py +5 -6
- ee/tests/reducer_test.py +16 -3
- ee/tests/serializer_test.py +39 -12
- ee/tests/table_converter_test.py +51 -7
- ee/tests/terrain_test.py +11 -3
- earthengine_api-1.5.13rc0.dist-info/RECORD +0 -107
- {earthengine_api-1.5.13rc0.dist-info → earthengine_api-1.7.4.dist-info}/entry_points.txt +0 -0
- {earthengine_api-1.5.13rc0.dist-info → earthengine_api-1.7.4.dist-info}/licenses/LICENSE +0 -0
- {earthengine_api-1.5.13rc0.dist-info → earthengine_api-1.7.4.dist-info}/top_level.txt +0 -0
ee/cli/utils.py
CHANGED
|
@@ -6,6 +6,7 @@ the classes for configuration and runtime context management.
|
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
8
|
import collections
|
|
9
|
+
from collections.abc import Iterable
|
|
9
10
|
import datetime
|
|
10
11
|
import json
|
|
11
12
|
import os
|
|
@@ -13,7 +14,7 @@ import re
|
|
|
13
14
|
import tempfile
|
|
14
15
|
import threading
|
|
15
16
|
import time
|
|
16
|
-
from typing import Any, AnyStr
|
|
17
|
+
from typing import Any, AnyStr
|
|
17
18
|
import urllib.parse
|
|
18
19
|
|
|
19
20
|
from google.cloud import storage
|
|
@@ -31,7 +32,7 @@ DEFAULT_EE_CONFIG_FILE_RELATIVE = os.path.join(
|
|
|
31
32
|
DEFAULT_EE_CONFIG_FILE = os.path.join(
|
|
32
33
|
HOMEDIR, DEFAULT_EE_CONFIG_FILE_RELATIVE)
|
|
33
34
|
|
|
34
|
-
CONFIG_PARAMS:
|
|
35
|
+
CONFIG_PARAMS: dict[str, str | list[str] | None] = {
|
|
35
36
|
'account': None,
|
|
36
37
|
'cloud_api_key': None,
|
|
37
38
|
'private_key': None,
|
|
@@ -39,12 +40,6 @@ CONFIG_PARAMS: Dict[str, Union[str, List[str], None]] = {
|
|
|
39
40
|
'url': 'https://earthengine.googleapis.com',
|
|
40
41
|
}
|
|
41
42
|
|
|
42
|
-
TASK_FINISHED_STATES: Tuple[str, str, str] = (
|
|
43
|
-
ee.batch.Task.State.COMPLETED,
|
|
44
|
-
ee.batch.Task.State.FAILED,
|
|
45
|
-
ee.batch.Task.State.CANCELLED,
|
|
46
|
-
)
|
|
47
|
-
|
|
48
43
|
|
|
49
44
|
class CommandLineConfig:
|
|
50
45
|
"""Holds the configuration parameters used by the EE command line interface.
|
|
@@ -150,9 +145,12 @@ class CommandLineConfig:
|
|
|
150
145
|
|
|
151
146
|
|
|
152
147
|
def _split_gcs_path(path):
|
|
153
|
-
|
|
148
|
+
# This only catches some troubles. For complete details on naming, see:
|
|
149
|
+
# https://cloud.google.com/storage/docs/buckets
|
|
150
|
+
# https://cloud.google.com/storage/docs/objects#naming
|
|
151
|
+
m = re.search('gs://([a-z0-9][a-z0-9-_.]*)/(.*)', path, re.IGNORECASE)
|
|
154
152
|
if not m:
|
|
155
|
-
raise ValueError('
|
|
153
|
+
raise ValueError(f"'{path}' is not a valid GCS path")
|
|
156
154
|
|
|
157
155
|
return m.groups()
|
|
158
156
|
|
|
@@ -182,7 +180,7 @@ class GcsHelper:
|
|
|
182
180
|
raise ValueError('Size of files in \'{}\' exceeds allowed size: '
|
|
183
181
|
'{} > {}.'.format(path, total_bytes, max_bytes))
|
|
184
182
|
if total_bytes == 0:
|
|
185
|
-
raise ValueError(
|
|
183
|
+
raise ValueError(f"No files found at '{path}'.")
|
|
186
184
|
|
|
187
185
|
def download_dir_to_temp(self, path: str) -> str:
|
|
188
186
|
"""Downloads recursively the contents at a GCS path to a temp directory."""
|
|
@@ -202,7 +200,7 @@ class GcsHelper:
|
|
|
202
200
|
if not os.path.exists(dir_path):
|
|
203
201
|
os.makedirs(dir_path)
|
|
204
202
|
|
|
205
|
-
if output_path
|
|
203
|
+
if not output_path.endswith('/'):
|
|
206
204
|
blob.download_to_filename(output_path)
|
|
207
205
|
|
|
208
206
|
return temp_dir
|
|
@@ -211,7 +209,7 @@ class GcsHelper:
|
|
|
211
209
|
"""Uploads a directory to cloud storage."""
|
|
212
210
|
canonical_path = _canonicalize_dir_path(source_path)
|
|
213
211
|
|
|
214
|
-
files =
|
|
212
|
+
files = []
|
|
215
213
|
for dirpath, _, filenames in os.walk(canonical_path):
|
|
216
214
|
files += [os.path.join(dirpath, f) for f in filenames]
|
|
217
215
|
|
|
@@ -229,7 +227,7 @@ def is_gcs_path(path: str) -> bool:
|
|
|
229
227
|
|
|
230
228
|
|
|
231
229
|
def query_yes_no(msg: str) -> bool:
|
|
232
|
-
print('
|
|
230
|
+
print(f'{msg} (y/n)')
|
|
233
231
|
while True:
|
|
234
232
|
confirm = input().lower()
|
|
235
233
|
if confirm == 'y':
|
|
@@ -247,23 +245,31 @@ def truncate(string: str, length: int) -> str:
|
|
|
247
245
|
return string
|
|
248
246
|
|
|
249
247
|
|
|
248
|
+
def _task_id_to_operation_name(task_id: str) -> str:
|
|
249
|
+
"""Converts a task ID to an operation name."""
|
|
250
|
+
# pylint: disable=protected-access
|
|
251
|
+
return ee._cloud_api_utils.convert_task_id_to_operation_name(
|
|
252
|
+
ee.data._get_state().cloud_api_user_project, task_id
|
|
253
|
+
)
|
|
254
|
+
# pylint: enable=protected-access
|
|
255
|
+
|
|
256
|
+
|
|
250
257
|
def wait_for_task(
|
|
251
258
|
task_id: str, timeout: float, log_progress: bool = True
|
|
252
259
|
) -> None:
|
|
253
260
|
"""Waits for the specified task to finish, or a timeout to occur."""
|
|
254
261
|
start = time.time()
|
|
255
|
-
elapsed
|
|
262
|
+
elapsed: float
|
|
256
263
|
last_check = 0
|
|
257
264
|
while True:
|
|
258
265
|
elapsed = time.time() - start
|
|
259
|
-
status = ee.data.
|
|
260
|
-
state = status['state']
|
|
261
|
-
if
|
|
262
|
-
error_message = status.get('
|
|
263
|
-
print('Task
|
|
264
|
-
% (task_id, state, elapsed))
|
|
266
|
+
status = ee.data.getOperation(_task_id_to_operation_name(task_id))
|
|
267
|
+
state = status['metadata']['state']
|
|
268
|
+
if status.get('done', False):
|
|
269
|
+
error_message = status.get('error', {}).get('message')
|
|
270
|
+
print(f'Task {task_id} ended at state: {state} after {elapsed:.2f} seconds')
|
|
265
271
|
if error_message:
|
|
266
|
-
raise ee.ee_exception.EEException('Error:
|
|
272
|
+
raise ee.ee_exception.EEException(f'Error: {error_message}')
|
|
267
273
|
return
|
|
268
274
|
if log_progress and elapsed - last_check >= 30:
|
|
269
275
|
print('[{:%H:%M:%S}] Current state for task {}: {}'
|
|
@@ -274,11 +280,13 @@ def wait_for_task(
|
|
|
274
280
|
time.sleep(min(10, remaining))
|
|
275
281
|
else:
|
|
276
282
|
break
|
|
277
|
-
print(
|
|
283
|
+
print(
|
|
284
|
+
f'Wait for task {task_id} timed out after {elapsed:.2f} seconds'
|
|
285
|
+
)
|
|
278
286
|
|
|
279
287
|
|
|
280
288
|
def wait_for_tasks(
|
|
281
|
-
task_id_list:
|
|
289
|
+
task_id_list: list[str], timeout: float, log_progress: bool = False
|
|
282
290
|
) -> None:
|
|
283
291
|
"""For each task specified in task_id_list, wait for that task or timeout."""
|
|
284
292
|
|
|
@@ -296,20 +304,30 @@ def wait_for_tasks(
|
|
|
296
304
|
for thread in threads:
|
|
297
305
|
thread.join()
|
|
298
306
|
|
|
299
|
-
|
|
307
|
+
get_state = lambda task_id: ee.data.getOperation(
|
|
308
|
+
_task_id_to_operation_name(task_id)
|
|
309
|
+
)['metadata']['state']
|
|
310
|
+
status_list = [get_state(task_id) for task_id in task_id_list]
|
|
300
311
|
status_counts = collections.defaultdict(int)
|
|
301
312
|
for status in status_list:
|
|
302
|
-
status_counts[status
|
|
303
|
-
|
|
304
|
-
|
|
313
|
+
status_counts[status] += 1
|
|
314
|
+
succeeded = status_counts['SUCCEEDED']
|
|
315
|
+
failed = status_counts['FAILED']
|
|
316
|
+
cancelled = status_counts['CANCELLED']
|
|
317
|
+
num_incomplete = (
|
|
318
|
+
len(status_list)
|
|
319
|
+
- succeeded
|
|
320
|
+
- failed
|
|
321
|
+
- cancelled
|
|
322
|
+
)
|
|
305
323
|
print('Finished waiting for tasks.\n Status summary:')
|
|
306
|
-
print('
|
|
307
|
-
print('
|
|
308
|
-
print('
|
|
309
|
-
print('
|
|
324
|
+
print(f' {succeeded} tasks completed successfully.')
|
|
325
|
+
print(f' {failed} tasks failed.')
|
|
326
|
+
print(f' {cancelled} tasks cancelled.')
|
|
327
|
+
print(f' {num_incomplete} tasks are still incomplete (timed-out)')
|
|
310
328
|
|
|
311
329
|
|
|
312
|
-
def expand_gcs_wildcards(source_files:
|
|
330
|
+
def expand_gcs_wildcards(source_files: list[str]) -> Iterable[str]:
|
|
313
331
|
"""Implements glob-like '*' wildcard completion for cloud storage objects.
|
|
314
332
|
|
|
315
333
|
Args:
|
|
@@ -338,7 +356,7 @@ def expand_gcs_wildcards(source_files: List[str]) -> Iterable[str]:
|
|
|
338
356
|
bucket, rest = bucket_match.group(1, 2)
|
|
339
357
|
else:
|
|
340
358
|
raise ee.ee_exception.EEException(
|
|
341
|
-
'Badly formatted source file or bucket:
|
|
359
|
+
f'Badly formatted source file or bucket: {source}')
|
|
342
360
|
prefix = rest[:rest.find('*')] # Everything before the first wildcard
|
|
343
361
|
|
|
344
362
|
bucket_files = _gcs_ls(bucket, prefix)
|
|
@@ -382,27 +400,28 @@ def _gcs_ls(bucket: str, prefix: str = '') -> Iterable[str]:
|
|
|
382
400
|
try:
|
|
383
401
|
response, content = http.request(url, method=method)
|
|
384
402
|
except httplib2.HttpLib2Error as e:
|
|
385
|
-
raise ee.ee_exception.EEException('Unexpected HTTP error:
|
|
403
|
+
raise ee.ee_exception.EEException(f'Unexpected HTTP error: {e}') from e
|
|
386
404
|
|
|
387
405
|
if response.status < 100 or response.status >= 300:
|
|
388
|
-
raise ee.ee_exception.EEException(
|
|
389
|
-
|
|
390
|
-
|
|
406
|
+
raise ee.ee_exception.EEException(
|
|
407
|
+
f'Error retrieving bucket {bucket}; '
|
|
408
|
+
f'Server returned HTTP code: {response.status}'
|
|
409
|
+
)
|
|
391
410
|
|
|
392
411
|
json_content = json.loads(content)
|
|
393
412
|
if 'error' in json_content:
|
|
394
413
|
json_error = json_content['error']['message']
|
|
395
|
-
raise ee.ee_exception.EEException(
|
|
396
|
-
|
|
414
|
+
raise ee.ee_exception.EEException(
|
|
415
|
+
f'Error retrieving bucket {bucket}: {json_error}')
|
|
397
416
|
|
|
398
417
|
if 'items' not in json_content:
|
|
399
418
|
raise ee.ee_exception.EEException(
|
|
400
|
-
'Cannot find items list in the response from GCS:
|
|
419
|
+
f'Cannot find items list in the response from GCS: {json_content}')
|
|
401
420
|
objects = json_content['items']
|
|
402
421
|
object_names = [str(gc_object['name']) for gc_object in objects]
|
|
403
422
|
|
|
404
423
|
for name in object_names:
|
|
405
|
-
yield 'gs
|
|
424
|
+
yield f'gs://{bucket}/{name}'
|
|
406
425
|
|
|
407
426
|
# GCS indicates no more results
|
|
408
427
|
if 'nextPageToken' not in json_content:
|
ee/clusterer.py
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
"""A wrapper for Clusterers."""
|
|
2
2
|
from __future__ import annotations
|
|
3
3
|
|
|
4
|
-
from typing import Optional
|
|
5
|
-
|
|
6
4
|
from ee import _arg_types
|
|
7
5
|
from ee import apifunction
|
|
8
6
|
from ee import computedobject
|
|
@@ -72,7 +70,7 @@ class Clusterer(computedobject.ComputedObject):
|
|
|
72
70
|
|
|
73
71
|
# TODO: The Optional is suspect. The return from schema is
|
|
74
72
|
# never None, but the .getInfo() of the result can be None.
|
|
75
|
-
def schema(self) ->
|
|
73
|
+
def schema(self) -> ee_list.List | None:
|
|
76
74
|
"""Returns the names of the inputs used by this Clusterer.
|
|
77
75
|
|
|
78
76
|
Or None if this Clusterer has not had any training data added yet.
|
|
@@ -84,10 +82,10 @@ class Clusterer(computedobject.ComputedObject):
|
|
|
84
82
|
self,
|
|
85
83
|
features: _arg_types.FeatureCollection,
|
|
86
84
|
# pylint: disable-next=invalid-name
|
|
87
|
-
inputProperties:
|
|
88
|
-
subsampling:
|
|
85
|
+
inputProperties: _arg_types.List | None = None,
|
|
86
|
+
subsampling: _arg_types.Number | None = None,
|
|
89
87
|
# pylint: disable-next=invalid-name
|
|
90
|
-
subsamplingSeed:
|
|
88
|
+
subsamplingSeed: _arg_types.Integer | None = None,
|
|
91
89
|
) -> Clusterer:
|
|
92
90
|
"""Returns a trained Clusterer.
|
|
93
91
|
|
|
@@ -117,15 +115,15 @@ class Clusterer(computedobject.ComputedObject):
|
|
|
117
115
|
@staticmethod
|
|
118
116
|
def wekaCascadeKMeans(
|
|
119
117
|
# pylint: disable=invalid-name
|
|
120
|
-
minClusters:
|
|
121
|
-
maxClusters:
|
|
118
|
+
minClusters: _arg_types.Integer | None = None,
|
|
119
|
+
maxClusters: _arg_types.Integer | None = None,
|
|
122
120
|
# pylint: disable-next=invalid-name
|
|
123
|
-
restarts:
|
|
124
|
-
manual:
|
|
125
|
-
init:
|
|
121
|
+
restarts: _arg_types.Integer | None = None,
|
|
122
|
+
manual: _arg_types.Bool | None = None,
|
|
123
|
+
init: _arg_types.Bool | None = None,
|
|
126
124
|
# pylint: disable=invalid-name
|
|
127
|
-
distanceFunction:
|
|
128
|
-
maxIterations:
|
|
125
|
+
distanceFunction: _arg_types.String | None = None,
|
|
126
|
+
maxIterations: _arg_types.Integer | None = None,
|
|
129
127
|
# pylint: disable-next=invalid-name
|
|
130
128
|
) -> Clusterer:
|
|
131
129
|
"""Returns a weka Cascade K-Means Clusterer.
|
|
@@ -161,9 +159,9 @@ class Clusterer(computedobject.ComputedObject):
|
|
|
161
159
|
|
|
162
160
|
@staticmethod
|
|
163
161
|
def wekaCobweb(
|
|
164
|
-
acuity:
|
|
165
|
-
cutoff:
|
|
166
|
-
seed:
|
|
162
|
+
acuity: _arg_types.Number | None = None,
|
|
163
|
+
cutoff: _arg_types.Number | None = None,
|
|
164
|
+
seed: _arg_types.Integer | None = None,
|
|
167
165
|
) -> Clusterer:
|
|
168
166
|
"""Returns a weka Cobweb Clusterer.
|
|
169
167
|
|
|
@@ -186,22 +184,22 @@ class Clusterer(computedobject.ComputedObject):
|
|
|
186
184
|
@staticmethod
|
|
187
185
|
def wekaKMeans(
|
|
188
186
|
nClusters: _arg_types.Integer, # pylint: disable=invalid-name
|
|
189
|
-
init:
|
|
190
|
-
canopies:
|
|
187
|
+
init: _arg_types.Integer | None = None,
|
|
188
|
+
canopies: _arg_types.Bool | None = None,
|
|
191
189
|
# pylint: disable=invalid-name
|
|
192
|
-
maxCandidates:
|
|
193
|
-
periodicPruning:
|
|
194
|
-
minDensity:
|
|
190
|
+
maxCandidates: _arg_types.Integer | None = None,
|
|
191
|
+
periodicPruning: _arg_types.Integer | None = None,
|
|
192
|
+
minDensity: _arg_types.Integer | None = None,
|
|
195
193
|
# pylint: enable=invalid-name
|
|
196
|
-
t1:
|
|
197
|
-
t2:
|
|
194
|
+
t1: _arg_types.Number | None = None,
|
|
195
|
+
t2: _arg_types.Number | None = None,
|
|
198
196
|
# pylint: disable=invalid-name
|
|
199
|
-
distanceFunction:
|
|
200
|
-
maxIterations:
|
|
201
|
-
preserveOrder:
|
|
197
|
+
distanceFunction: _arg_types.String | None = None,
|
|
198
|
+
maxIterations: _arg_types.Integer | None = None,
|
|
199
|
+
preserveOrder: _arg_types.Bool | None = None,
|
|
202
200
|
# pylint: enable=invalid-name
|
|
203
|
-
fast:
|
|
204
|
-
seed:
|
|
201
|
+
fast: _arg_types.Bool | None = None,
|
|
202
|
+
seed: _arg_types.Integer | None = None,
|
|
205
203
|
) -> Clusterer:
|
|
206
204
|
"""Returns a weka K-Means Clusterer.
|
|
207
205
|
|
|
@@ -260,12 +258,12 @@ class Clusterer(computedobject.ComputedObject):
|
|
|
260
258
|
@staticmethod
|
|
261
259
|
def wekaLVQ(
|
|
262
260
|
# pylint: disable=invalid-name
|
|
263
|
-
numClusters:
|
|
264
|
-
learningRate:
|
|
261
|
+
numClusters: _arg_types.Integer | None = None,
|
|
262
|
+
learningRate: _arg_types.Number | None = None,
|
|
265
263
|
# pylint: enable=invalid-name
|
|
266
|
-
epochs:
|
|
264
|
+
epochs: _arg_types.Integer | None = None,
|
|
267
265
|
# pylint: disable-next=invalid-name
|
|
268
|
-
normalizeInput:
|
|
266
|
+
normalizeInput: _arg_types.Bool | None = None,
|
|
269
267
|
) -> Clusterer:
|
|
270
268
|
"""Returns a weka Learning Vector Quantization (LVQ) Clusterer.
|
|
271
269
|
|
|
@@ -290,16 +288,16 @@ class Clusterer(computedobject.ComputedObject):
|
|
|
290
288
|
@staticmethod
|
|
291
289
|
def wekaXMeans(
|
|
292
290
|
# pylint: disable=invalid-name
|
|
293
|
-
minClusters:
|
|
294
|
-
maxClusters:
|
|
295
|
-
maxIterations:
|
|
296
|
-
maxKMeans:
|
|
297
|
-
maxForChildren:
|
|
298
|
-
useKD:
|
|
299
|
-
cutoffFactor:
|
|
300
|
-
distanceFunction:
|
|
291
|
+
minClusters: _arg_types.Integer | None = None,
|
|
292
|
+
maxClusters: _arg_types.Integer | None = None,
|
|
293
|
+
maxIterations: _arg_types.Integer | None = None,
|
|
294
|
+
maxKMeans: _arg_types.Integer | None = None,
|
|
295
|
+
maxForChildren: _arg_types.Integer | None = None,
|
|
296
|
+
useKD: _arg_types.Bool | None = None,
|
|
297
|
+
cutoffFactor: _arg_types.Number | None = None,
|
|
298
|
+
distanceFunction: _arg_types.String | None = None,
|
|
301
299
|
# pylint: enable=invalid-name
|
|
302
|
-
seed:
|
|
300
|
+
seed: _arg_types.Integer | None = None,
|
|
303
301
|
) -> Clusterer:
|
|
304
302
|
"""Returns a weka X-Means Clusterer.
|
|
305
303
|
|