kaggle 1.6.0a3__tar.gz → 1.6.0a5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. {kaggle-1.6.0a3/kaggle.egg-info → kaggle-1.6.0a5}/PKG-INFO +1 -1
  2. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/api/kaggle_api_extended.py +449 -102
  3. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/cli.py +41 -8
  4. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/kaggle_models_extended.py +31 -0
  5. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/model_new_instance_request.py +1 -1
  6. {kaggle-1.6.0a3 → kaggle-1.6.0a5/kaggle.egg-info}/PKG-INFO +1 -1
  7. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/setup.py +1 -1
  8. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/LICENSE +0 -0
  9. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/MANIFEST.in +0 -0
  10. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/README.md +0 -0
  11. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/__init__.py +0 -0
  12. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/api/__init__.py +0 -0
  13. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/api/kaggle_api.py +0 -0
  14. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/api_client.py +0 -0
  15. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/configuration.py +0 -0
  16. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/__init__.py +0 -0
  17. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/api_blob_type.py +0 -0
  18. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/collaborator.py +0 -0
  19. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/create_inbox_file_request.py +0 -0
  20. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/dataset_column.py +0 -0
  21. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/dataset_new_request.py +0 -0
  22. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/dataset_new_version_request.py +0 -0
  23. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/dataset_update_settings_request.py +0 -0
  24. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/error.py +0 -0
  25. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/kernel_push_request.py +0 -0
  26. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/license.py +0 -0
  27. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/model_instance_new_version_request.py +0 -0
  28. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/model_instance_update_request.py +0 -0
  29. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/model_new_request.py +0 -0
  30. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/model_update_request.py +0 -0
  31. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/result.py +0 -0
  32. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/start_blob_upload_request.py +0 -0
  33. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/start_blob_upload_response.py +0 -0
  34. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/models/upload_file.py +0 -0
  35. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/rest.py +0 -0
  36. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/test/__init__.py +0 -0
  37. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle/test/test_authenticate.py +0 -0
  38. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle.egg-info/SOURCES.txt +0 -0
  39. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle.egg-info/dependency_links.txt +0 -0
  40. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle.egg-info/entry_points.txt +0 -0
  41. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle.egg-info/requires.txt +0 -0
  42. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/kaggle.egg-info/top_level.txt +0 -0
  43. {kaggle-1.6.0a3 → kaggle-1.6.0a5}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kaggle
3
- Version: 1.6.0a3
3
+ Version: 1.6.0a5
4
4
  Summary: Kaggle API
5
5
  Home-page: https://github.com/Kaggle/kaggle-api
6
6
  Author: Kaggle
@@ -38,9 +38,11 @@ import io
38
38
  import json
39
39
  import os
40
40
  from os.path import expanduser
41
+ from random import random
41
42
  import sys
42
43
  import shutil
43
44
  import tarfile
45
+ import time
44
46
  import zipfile
45
47
  import tempfile
46
48
  from ..api_client import ApiClient
@@ -66,6 +68,7 @@ from ..models.kaggle_models_extended import Metadata
66
68
  from ..models.kaggle_models_extended import Model
67
69
  from ..models.kaggle_models_extended import ModelNewResponse
68
70
  from ..models.kaggle_models_extended import ModelDeleteResponse
71
+ from ..models.kaggle_models_extended import ResumableUploadResult
69
72
  from ..models.kaggle_models_extended import Submission
70
73
  from ..models.kaggle_models_extended import SubmitResult
71
74
  from ..models.kernel_push_request import KernelPushRequest
@@ -76,9 +79,11 @@ from ..models.model_instance_new_version_request import ModelInstanceNewVersionR
76
79
  from ..models.model_update_request import ModelUpdateRequest
77
80
  from ..models.model_instance_update_request import ModelInstanceUpdateRequest
78
81
  from ..models.start_blob_upload_request import StartBlobUploadRequest
82
+ from ..models.start_blob_upload_response import StartBlobUploadResponse
79
83
  from ..models.upload_file import UploadFile
80
84
  import requests
81
85
  from requests.adapters import HTTPAdapter
86
+ import requests.packages.urllib3.exceptions as urllib3_exceptions
82
87
  from requests.packages.urllib3.util.retry import Retry
83
88
  from ..rest import ApiException
84
89
  import six
@@ -107,13 +112,167 @@ class DirectoryArchive(object):
107
112
  self.path = shutil.make_archive(os.path.join(self._temp_dir, dir_name),
108
113
  self._format, self._fullpath)
109
114
  _, self.name = os.path.split(self.path)
115
+ return self
110
116
 
111
117
  def __exit__(self, *args):
112
118
  shutil.rmtree(self._temp_dir)
113
119
 
114
120
 
121
+ class ResumableUploadContext(object):
122
+
123
+ def __init__(self, no_resume=False):
124
+ self.no_resume = no_resume
125
+ self._temp_dir = os.path.join(tempfile.gettempdir(), '.kaggle/uploads')
126
+ self._file_uploads = []
127
+
128
+ def __enter__(self):
129
+ if self.no_resume:
130
+ return
131
+ self._create_temp_dir()
132
+ return self
133
+
134
+ def __exit__(self, exc_type, exc_value, exc_traceback):
135
+ if self.no_resume:
136
+ return
137
+ if exc_type is not None:
138
+ # Don't delete the upload file info when there is an error
139
+ # to give it a chance to retry/resume on the next invocation.
140
+ return
141
+ for file_upload in self._file_uploads:
142
+ file_upload.cleanup()
143
+
144
+ def get_upload_info_file_path(self, path):
145
+ return os.path.join(
146
+ self._temp_dir,
147
+ '%s.json' % path.replace(os.path.sep, '_').replace(':', '_'))
148
+
149
+ def new_resumable_file_upload(self, path, start_blob_upload_request):
150
+ file_upload = ResumableFileUpload(path, start_blob_upload_request,
151
+ self)
152
+ self._file_uploads.append(file_upload)
153
+ file_upload.load()
154
+ return file_upload
155
+
156
+ def _create_temp_dir(self):
157
+ try:
158
+ os.makedirs(self._temp_dir)
159
+ except FileExistsError:
160
+ pass
161
+
162
+
163
+ class ResumableFileUpload(object):
164
+ # Reference: https://cloud.google.com/storage/docs/resumable-uploads
165
+ # A resumable upload must be completed within a week of being initiated
166
+ RESUMABLE_UPLOAD_EXPIRY_SECONDS = 6 * 24 * 3600
167
+
168
+ def __init__(self, path, start_blob_upload_request, context):
169
+ self.path = path
170
+ self.start_blob_upload_request = start_blob_upload_request
171
+ self.context = context
172
+ self.timestamp = int(time.time())
173
+ self.start_blob_upload_response = None
174
+ self.can_resume = False
175
+ self.upload_complete = False
176
+ if self.context.no_resume:
177
+ return
178
+ self._upload_info_file_path = self.context.get_upload_info_file_path(
179
+ path)
180
+
181
+ def get_token(self):
182
+ if self.upload_complete:
183
+ return self.start_blob_upload_response.token
184
+ return None
185
+
186
+ def load(self):
187
+ if self.context.no_resume:
188
+ return
189
+ self._load_previous_if_any()
190
+
191
+ def _load_previous_if_any(self):
192
+ if not os.path.exists(self._upload_info_file_path):
193
+ return False
194
+
195
+ try:
196
+ with io.open(self._upload_info_file_path, 'r') as f:
197
+ previous = ResumableFileUpload.from_dict(
198
+ json.load(f), self.context)
199
+ if self._is_previous_valid(previous):
200
+ self.start_blob_upload_response = previous.start_blob_upload_response
201
+ self.timestamp = previous.timestamp
202
+ self.can_resume = True
203
+ except Exception as e:
204
+ print('Error while trying to load upload info:', e)
205
+
206
+ def _is_previous_valid(self, previous):
207
+ return previous.path == self.path and \
208
+ previous.start_blob_upload_request == self.start_blob_upload_request and \
209
+ previous.timestamp > time.time() - ResumableFileUpload.RESUMABLE_UPLOAD_EXPIRY_SECONDS
210
+
211
+ def upload_initiated(self, start_blob_upload_response):
212
+ if self.context.no_resume:
213
+ return
214
+
215
+ self.start_blob_upload_response = start_blob_upload_response
216
+ with io.open(self._upload_info_file_path, 'w') as f:
217
+ json.dump(self.to_dict(), f, indent=True)
218
+
219
+ def upload_completed(self):
220
+ if self.context.no_resume:
221
+ return
222
+
223
+ self.upload_complete = True
224
+ self._save()
225
+
226
+ def _save(self):
227
+ with io.open(self._upload_info_file_path, 'w') as f:
228
+ json.dump(self.to_dict(), f, indent=True)
229
+
230
+ def cleanup(self):
231
+ if self.context.no_resume:
232
+ return
233
+
234
+ try:
235
+ os.remove(self._upload_info_file_path)
236
+ except OSError:
237
+ pass
238
+
239
+ def to_dict(self):
240
+ return {
241
+ 'path':
242
+ self.path,
243
+ 'start_blob_upload_request':
244
+ self.start_blob_upload_request.to_dict(),
245
+ 'timestamp':
246
+ self.timestamp,
247
+ 'start_blob_upload_response':
248
+ self.start_blob_upload_response.to_dict()
249
+ if self.start_blob_upload_response is not None else None,
250
+ 'upload_complete':
251
+ self.upload_complete,
252
+ }
253
+
254
+ def from_dict(other, context):
255
+ new = ResumableFileUpload(
256
+ other['path'],
257
+ StartBlobUploadRequest(**other['start_blob_upload_request']),
258
+ context)
259
+ new.timestamp = other.get('timestamp')
260
+ start_blob_upload_response = other.get('start_blob_upload_response')
261
+ if start_blob_upload_response is not None:
262
+ new.start_blob_upload_response = StartBlobUploadResponse(
263
+ **start_blob_upload_response)
264
+ new.upload_complete = other.get('upload_complete') or False
265
+ return new
266
+
267
+ def to_str(self):
268
+ return str(self.to_dict())
269
+
270
+ def __repr__(self):
271
+ return self.to_str()
272
+
273
+
115
274
  class KaggleApi(KaggleApi):
116
- __version__ = '1.6.0a3'
275
+ __version__ = '1.6.0a5'
117
276
 
118
277
  CONFIG_NAME_PROXY = 'proxy'
119
278
  CONFIG_NAME_COMPETITION = 'competition'
@@ -129,6 +288,7 @@ class KaggleApi(KaggleApi):
129
288
  MODEL_METADATA_FILE = 'model-metadata.json'
130
289
  MODEL_INSTANCE_METADATA_FILE = 'model-instance-metadata.json'
131
290
  MAX_NUM_INBOX_FILES_TO_UPLOAD = 1000
291
+ MAX_UPLOAD_RESUME_ATTEMPTS = 10
132
292
 
133
293
  config_dir = os.environ.get('KAGGLE_CONFIG_DIR') or os.path.join(
134
294
  expanduser('~'), '.kaggle')
@@ -180,6 +340,46 @@ class KaggleApi(KaggleApi):
180
340
  reload(sys)
181
341
  sys.setdefaultencoding('latin1')
182
342
 
343
+ def _is_retriable(self, e):
344
+ return issubclass(type(e), ConnectionError) or \
345
+ issubclass(type(e), urllib3_exceptions.ConnectionError) or \
346
+ issubclass(type(e), urllib3_exceptions.ConnectTimeoutError) or \
347
+ issubclass(type(e), urllib3_exceptions.ProtocolError) or \
348
+ issubclass(type(e), requests.exceptions.ConnectionError) or \
349
+ issubclass(type(e), requests.exceptions.ConnectTimeout)
350
+
351
+ def _calculate_backoff_delay(self, attempt, initial_delay_millis,
352
+ retry_multiplier, randomness_factor):
353
+ delay_ms = initial_delay_millis * (retry_multiplier**attempt)
354
+ random_wait_ms = int(random() - 0.5) * 2 * delay_ms * randomness_factor
355
+ total_delay = (delay_ms + random_wait_ms) / 1000.0
356
+ return total_delay
357
+
358
+ def with_retry(self,
359
+ func,
360
+ max_retries=10,
361
+ initial_delay_millis=500,
362
+ retry_multiplier=1.7,
363
+ randomness_factor=0.5):
364
+
365
+ def retriable_func(*args):
366
+ for i in range(1, max_retries + 1):
367
+ try:
368
+ return func(*args)
369
+ except Exception as e:
370
+ if self._is_retriable(e) and i < max_retries:
371
+ total_delay = self._calculate_backoff_delay(
372
+ i, initial_delay_millis, retry_multiplier,
373
+ randomness_factor)
374
+ print(
375
+ 'Request failed: %s. Will retry in %2.1f seconds' %
376
+ (e, total_delay))
377
+ time.sleep(total_delay)
378
+ continue
379
+ raise
380
+
381
+ return retriable_func
382
+
183
383
  ## Authentication
184
384
 
185
385
  def authenticate(self):
@@ -567,10 +767,11 @@ class KaggleApi(KaggleApi):
567
767
  upload_result_token = upload_result['token']
568
768
  else:
569
769
  # New submissions path!
570
- success = self.upload_complete(file_name,
571
- url_result['createUrl'], quiet)
572
- if not success:
573
- # Actual error is printed during upload_complete. Not
770
+ upload_status = self.upload_complete(file_name,
771
+ url_result['createUrl'],
772
+ quiet)
773
+ if upload_status != ResumableUploadResult.COMPLETE:
774
+ # Actual error is printed during upload_complete. Not
574
775
  # ideal but changing would not be backwards compatible
575
776
  return "Could not submit to competition"
576
777
 
@@ -1301,7 +1502,7 @@ class KaggleApi(KaggleApi):
1301
1502
  force=force,
1302
1503
  quiet=quiet)
1303
1504
 
1304
- def _upload_blob(self, path, quiet, blob_type):
1505
+ def _upload_blob(self, path, quiet, blob_type, upload_context):
1305
1506
  """ upload a file
1306
1507
 
1307
1508
  Parameters
@@ -1309,23 +1510,45 @@ class KaggleApi(KaggleApi):
1309
1510
  path: the complete path to upload
1310
1511
  quiet: suppress verbose output (default is False)
1311
1512
  blob_type (ApiBlobType): To which entity the file/blob refers
1513
+ upload_context (ResumableUploadContext): Context for resumable uploads
1312
1514
  """
1313
1515
  file_name = os.path.basename(path)
1314
1516
  content_length = os.path.getsize(path)
1315
1517
  last_modified_epoch_seconds = int(os.path.getmtime(path))
1316
1518
 
1317
- request = StartBlobUploadRequest(
1519
+ start_blob_upload_request = StartBlobUploadRequest(
1318
1520
  blob_type,
1319
1521
  file_name,
1320
1522
  content_length,
1321
1523
  last_modified_epoch_seconds=last_modified_epoch_seconds)
1322
- response = self.process_response(
1323
- self.upload_file_with_http_info(request))
1324
- success = self.upload_complete(path, response.create_url, quiet)
1325
1524
 
1326
- if success:
1327
- return response.token
1328
- return None
1525
+ file_upload = upload_context.new_resumable_file_upload(
1526
+ path, start_blob_upload_request)
1527
+
1528
+ for i in range(0, self.MAX_UPLOAD_RESUME_ATTEMPTS):
1529
+ if file_upload.upload_complete:
1530
+ return file_upload
1531
+
1532
+ if not file_upload.can_resume:
1533
+ # Initiate upload on Kaggle backend to get the url and token.
1534
+ start_blob_upload_response = self.process_response(
1535
+ self.with_retry(self.upload_file_with_http_info)(
1536
+ file_upload.start_blob_upload_request))
1537
+ file_upload.upload_initiated(start_blob_upload_response)
1538
+
1539
+ upload_result = self.upload_complete(
1540
+ path,
1541
+ file_upload.start_blob_upload_response.create_url,
1542
+ quiet,
1543
+ resume=file_upload.can_resume)
1544
+ if upload_result == ResumableUploadResult.INCOMPLETE:
1545
+ continue # Continue (i.e., retry/resume) only if the upload is incomplete.
1546
+
1547
+ if upload_result == ResumableUploadResult.COMPLETE:
1548
+ file_upload.upload_completed()
1549
+ break
1550
+
1551
+ return file_upload.get_token()
1329
1552
 
1330
1553
  def dataset_create_version(self,
1331
1554
  folder,
@@ -1377,30 +1600,33 @@ class KaggleApi(KaggleApi):
1377
1600
  convert_to_csv=convert_to_csv,
1378
1601
  category_ids=keywords,
1379
1602
  delete_old_versions=delete_old_versions)
1380
- self.upload_files(request, resources, folder, ApiBlobType.DATASET,
1381
- quiet, dir_mode)
1382
1603
 
1383
- if id_no:
1384
- result = DatasetNewVersionResponse(
1385
- self.process_response(
1386
- self.datasets_create_version_by_id_with_http_info(
1387
- id_no, request)))
1388
- else:
1389
- if ref == self.config_values[
1390
- self.CONFIG_NAME_USER] + '/INSERT_SLUG_HERE':
1391
- raise ValueError(
1392
- 'Default slug detected, please change values before '
1393
- 'uploading')
1394
- self.validate_dataset_string(ref)
1395
- ref_list = ref.split('/')
1396
- owner_slug = ref_list[0]
1397
- dataset_slug = ref_list[1]
1398
- result = DatasetNewVersionResponse(
1399
- self.process_response(
1400
- self.datasets_create_version_with_http_info(
1401
- owner_slug, dataset_slug, request)))
1604
+ with ResumableUploadContext() as upload_context:
1605
+ self.upload_files(request, resources, folder, ApiBlobType.DATASET,
1606
+ upload_context, quiet, dir_mode)
1402
1607
 
1403
- return result
1608
+ if id_no:
1609
+ result = DatasetNewVersionResponse(
1610
+ self.process_response(
1611
+ self.with_retry(
1612
+ self.datasets_create_version_by_id_with_http_info)(
1613
+ id_no, request)))
1614
+ else:
1615
+ if ref == self.config_values[
1616
+ self.CONFIG_NAME_USER] + '/INSERT_SLUG_HERE':
1617
+ raise ValueError(
1618
+ 'Default slug detected, please change values before '
1619
+ 'uploading')
1620
+ self.validate_dataset_string(ref)
1621
+ ref_list = ref.split('/')
1622
+ owner_slug = ref_list[0]
1623
+ dataset_slug = ref_list[1]
1624
+ result = DatasetNewVersionResponse(
1625
+ self.process_response(
1626
+ self.datasets_create_version_with_http_info(
1627
+ owner_slug, dataset_slug, request)))
1628
+
1629
+ return result
1404
1630
 
1405
1631
  def dataset_create_version_cli(self,
1406
1632
  folder,
@@ -1543,11 +1769,14 @@ class KaggleApi(KaggleApi):
1543
1769
  is_private=not public,
1544
1770
  convert_to_csv=convert_to_csv,
1545
1771
  category_ids=keywords)
1546
- self.upload_files(request, resources, folder, ApiBlobType.DATASET,
1547
- quiet, dir_mode)
1548
- result = DatasetNewResponse(
1549
- self.process_response(
1550
- self.datasets_create_new_with_http_info(request)))
1772
+
1773
+ with ResumableUploadContext() as upload_context:
1774
+ self.upload_files(request, resources, folder, ApiBlobType.DATASET,
1775
+ upload_context, quiet, dir_mode)
1776
+ result = DatasetNewResponse(
1777
+ self.process_response(
1778
+ self.with_retry(
1779
+ self.datasets_create_new_with_http_info)(request)))
1551
1780
 
1552
1781
  return result
1553
1782
 
@@ -1606,7 +1835,7 @@ class KaggleApi(KaggleApi):
1606
1835
  size_read = 0
1607
1836
  open_mode = 'wb'
1608
1837
  remote_date = datetime.strptime(response.headers['Last-Modified'],
1609
- '%a, %d %b %Y %X %Z')
1838
+ '%a, %d %b %Y %H:%M:%S %Z')
1610
1839
  remote_date_timestamp = time.mktime(remote_date.timetuple())
1611
1840
 
1612
1841
  if not quiet:
@@ -2484,32 +2713,35 @@ class KaggleApi(KaggleApi):
2484
2713
  else:
2485
2714
  print('Model creation error: ' + result.error)
2486
2715
 
2487
- def model_delete(self, model):
2716
+ def model_delete(self, model, yes):
2488
2717
  """ call to delete a model from the API
2489
2718
  Parameters
2490
2719
  ==========
2491
2720
  model: the string identified of the model
2492
2721
  should be in format [owner]/[model-name]
2722
+ yes: automatic confirmation
2493
2723
  """
2494
2724
  owner_slug, model_slug = self.split_model_string(model)
2495
2725
 
2496
- if not self.confirmation():
2497
- print('Deletion cancelled')
2498
- exit(0)
2726
+ if not yes:
2727
+ if not self.confirmation():
2728
+ print('Deletion cancelled')
2729
+ exit(0)
2499
2730
 
2500
2731
  res = ModelDeleteResponse(
2501
2732
  self.process_response(
2502
2733
  self.delete_model_with_http_info(owner_slug, model_slug)))
2503
2734
  return res
2504
2735
 
2505
- def model_delete_cli(self, model):
2736
+ def model_delete_cli(self, model, yes):
2506
2737
  """ wrapper for client for model_delete
2507
2738
  Parameters
2508
2739
  ==========
2509
2740
  model: the string identified of the model
2510
2741
  should be in format [owner]/[model-name]
2742
+ yes: automatic confirmation
2511
2743
  """
2512
- result = self.model_delete(model)
2744
+ result = self.model_delete(model, yes)
2513
2745
 
2514
2746
  if result.hasError:
2515
2747
  print('Model deletion error: ' + result.error)
@@ -2753,14 +2985,18 @@ class KaggleApi(KaggleApi):
2753
2985
  fine_tunable=fine_tunable,
2754
2986
  training_data=training_data,
2755
2987
  files=[])
2756
- self.upload_files(request, None, folder, ApiBlobType.MODEL, quiet,
2757
- dir_mode)
2758
- result = ModelNewResponse(
2759
- self.process_response(
2760
- self.models_create_instance_with_http_info(
2761
- owner_slug, model_slug, request)))
2762
2988
 
2763
- return result
2989
+ with ResumableUploadContext() as upload_context:
2990
+ self.upload_files(request, None, folder, ApiBlobType.MODEL,
2991
+ upload_context, quiet, dir_mode)
2992
+ result = ModelNewResponse(
2993
+ self.process_response(
2994
+ self.with_retry(
2995
+ self.models_create_instance_with_http_info)(owner_slug,
2996
+ model_slug,
2997
+ request)))
2998
+
2999
+ return result
2764
3000
 
2765
3001
  def model_instance_create_cli(self, folder, quiet=False, dir_mode='skip'):
2766
3002
  """ client wrapper for creating a new model instance
@@ -2779,21 +3015,23 @@ class KaggleApi(KaggleApi):
2779
3015
  else:
2780
3016
  print('Model instance creation error: ' + result.error)
2781
3017
 
2782
- def model_instance_delete(self, model_instance):
3018
+ def model_instance_delete(self, model_instance, yes):
2783
3019
  """ call to delete a model instance from the API
2784
3020
  Parameters
2785
3021
  ==========
2786
3022
  model_instance: the string identified of the model instance
2787
3023
  should be in format [owner]/[model-name]/[framework]/[instance-slug]
3024
+ yes: automatic confirmation
2788
3025
  """
2789
3026
  if model_instance is None:
2790
3027
  raise ValueError('A model instance must be specified')
2791
3028
  owner_slug, model_slug, framework, instance_slug = self.split_model_instance_string(
2792
3029
  model_instance)
2793
3030
 
2794
- if not self.confirmation():
2795
- print('Deletion cancelled')
2796
- exit(0)
3031
+ if not yes:
3032
+ if not self.confirmation():
3033
+ print('Deletion cancelled')
3034
+ exit(0)
2797
3035
 
2798
3036
  res = ModelDeleteResponse(
2799
3037
  self.process_response(
@@ -2801,14 +3039,15 @@ class KaggleApi(KaggleApi):
2801
3039
  owner_slug, model_slug, framework, instance_slug)))
2802
3040
  return res
2803
3041
 
2804
- def model_instance_delete_cli(self, model_instance):
3042
+ def model_instance_delete_cli(self, model_instance, yes):
2805
3043
  """ wrapper for client for model_instance_delete
2806
3044
  Parameters
2807
3045
  ==========
2808
3046
  model_instance: the string identified of the model instance
2809
3047
  should be in format [owner]/[model-name]/[framework]/[instance-slug]
3048
+ yes: automatic confirmation
2810
3049
  """
2811
- result = self.model_instance_delete(model_instance)
3050
+ result = self.model_instance_delete(model_instance, yes)
2812
3051
 
2813
3052
  if result.hasError:
2814
3053
  print('Model instance deletion error: ' + result.error)
@@ -2928,15 +3167,18 @@ class KaggleApi(KaggleApi):
2928
3167
 
2929
3168
  request = ModelInstanceNewVersionRequest(version_notes=version_notes,
2930
3169
  files=[])
2931
- self.upload_files(request, None, folder, ApiBlobType.MODEL, quiet,
2932
- dir_mode)
2933
- result = ModelNewResponse(
2934
- self.process_response(
2935
- self.models_create_instance_version_with_http_info(
2936
- owner_slug, model_slug, framework, instance_slug,
2937
- request)))
2938
3170
 
2939
- return result
3171
+ with ResumableUploadContext() as upload_context:
3172
+ self.upload_files(request, None, folder, ApiBlobType.MODEL,
3173
+ upload_context, quiet, dir_mode)
3174
+ result = ModelNewResponse(
3175
+ self.process_response(
3176
+ self.with_retry(
3177
+ self.models_create_instance_version_with_http_info)(
3178
+ owner_slug, model_slug, framework, instance_slug,
3179
+ request)))
3180
+
3181
+ return result
2940
3182
 
2941
3183
  def model_instance_version_create_cli(self,
2942
3184
  model_instance,
@@ -3053,12 +3295,13 @@ class KaggleApi(KaggleApi):
3053
3295
  force=force,
3054
3296
  quiet=quiet)
3055
3297
 
3056
- def model_instance_version_delete(self, model_instance_version):
3298
+ def model_instance_version_delete(self, model_instance_version, yes):
3057
3299
  """ call to delete a model instance version from the API
3058
3300
  Parameters
3059
3301
  ==========
3060
3302
  model_instance_version: the string identified of the model instance version
3061
3303
  should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
3304
+ yes: automatic confirmation
3062
3305
  """
3063
3306
  if model_instance_version is None:
3064
3307
  raise ValueError('A model instance version must be specified')
@@ -3071,9 +3314,10 @@ class KaggleApi(KaggleApi):
3071
3314
  instance_slug = urls[3]
3072
3315
  version_number = urls[4]
3073
3316
 
3074
- if not self.confirmation():
3075
- print('Deletion cancelled')
3076
- exit(0)
3317
+ if not yes:
3318
+ if not self.confirmation():
3319
+ print('Deletion cancelled')
3320
+ exit(0)
3077
3321
 
3078
3322
  res = ModelDeleteResponse(
3079
3323
  self.process_response(
@@ -3082,46 +3326,61 @@ class KaggleApi(KaggleApi):
3082
3326
  version_number)))
3083
3327
  return res
3084
3328
 
3085
- def model_instance_version_delete_cli(self, model_instance_version):
3329
+ def model_instance_version_delete_cli(self, model_instance_version, yes):
3086
3330
  """ wrapper for client for model_instance_version_delete
3087
3331
  Parameters
3088
3332
  ==========
3089
3333
  model_instance_version: the string identified of the model instance version
3090
3334
  should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
3335
+ yes: automatic confirmation
3091
3336
  """
3092
- result = self.model_instance_version_delete(model_instance_version)
3337
+ result = self.model_instance_version_delete(model_instance_version,
3338
+ yes)
3093
3339
 
3094
3340
  if result.hasError:
3095
3341
  print('Model instance version deletion error: ' + result.error)
3096
3342
  else:
3097
3343
  print('The model instance version was deleted.')
3098
3344
 
3099
- def files_upload_cli(self, local_paths, inbox_path=None):
3345
+ def files_upload_cli(self, local_paths, inbox_path, no_resume,
3346
+ no_compress):
3100
3347
  if len(local_paths) > self.MAX_NUM_INBOX_FILES_TO_UPLOAD:
3101
- print('Cannot upload more than',
3102
- self.MAX_NUM_INBOX_FILES_TO_UPLOAD, 'files!')
3348
+ print('Cannot upload more than %d files!' %
3349
+ self.MAX_NUM_INBOX_FILES_TO_UPLOAD)
3103
3350
  return
3104
3351
 
3105
- for local_path in local_paths:
3106
- self.file_upload_cli(local_path, inbox_path)
3352
+ files_to_create = []
3353
+ with ResumableUploadContext(no_resume) as upload_context:
3354
+ for local_path in local_paths:
3355
+ (upload_file,
3356
+ file_name) = self.file_upload_cli(local_path, inbox_path,
3357
+ no_compress, upload_context)
3358
+ if upload_file is None:
3359
+ continue
3360
+
3361
+ create_inbox_file_request = CreateInboxFileRequest(
3362
+ virtual_directory=inbox_path,
3363
+ blob_file_token=upload_file.token)
3364
+ files_to_create.append((create_inbox_file_request, file_name))
3365
+
3366
+ for (create_inbox_file_request, file_name) in files_to_create:
3367
+ self.process_response(
3368
+ self.with_retry(
3369
+ self.create_inbox_file)(create_inbox_file_request))
3370
+ print('Inbox file created:', file_name)
3107
3371
 
3108
- def file_upload_cli(self, local_path, inbox_path=None):
3372
+ def file_upload_cli(self, local_path, inbox_path, no_compress,
3373
+ upload_context):
3109
3374
  full_path = os.path.abspath(local_path)
3110
3375
  parent_path = os.path.dirname(full_path)
3111
3376
  file_or_folder_name = os.path.basename(full_path)
3377
+ dir_mode = 'tar' if no_compress else 'zip'
3112
3378
 
3113
3379
  upload_file = self._upload_file_or_folder(parent_path,
3114
3380
  file_or_folder_name,
3115
- ApiBlobType.INBOX, 'zip')
3116
- if upload_file is None:
3117
- return
3118
-
3119
- inbox_path = inbox_path or ''
3120
- create_inbox_file_request = CreateInboxFileRequest(
3121
- virtual_directory=inbox_path, blob_file_token=upload_file.token)
3122
- self.process_response(
3123
- self.create_inbox_file(create_inbox_file_request))
3124
- print('Upload complete:', file_or_folder_name)
3381
+ ApiBlobType.INBOX,
3382
+ upload_context, dir_mode)
3383
+ return (upload_file, file_or_folder_name)
3125
3384
 
3126
3385
  def print_obj(self, obj, indent=2):
3127
3386
  pretty = json.dumps(obj, indent=indent)
@@ -3138,7 +3397,7 @@ class KaggleApi(KaggleApi):
3138
3397
  """
3139
3398
  try:
3140
3399
  remote_date = datetime.strptime(response.headers['Last-Modified'],
3141
- '%a, %d %b %Y %X %Z')
3400
+ '%a, %d %b %Y %H:%M:%S %Z')
3142
3401
  file_exists = os.path.isfile(outfile)
3143
3402
  if file_exists:
3144
3403
  local_date = datetime.fromtimestamp(os.path.getmtime(outfile))
@@ -3298,6 +3557,7 @@ class KaggleApi(KaggleApi):
3298
3557
  resources,
3299
3558
  folder,
3300
3559
  blob_type,
3560
+ upload_context,
3301
3561
  quiet=False,
3302
3562
  dir_mode='skip'):
3303
3563
  """ upload files in a folder
@@ -3307,6 +3567,7 @@ class KaggleApi(KaggleApi):
3307
3567
  resources: the files to upload
3308
3568
  folder: the folder to upload from
3309
3569
  blob_type (ApiBlobType): To which entity the file/blob refers
3570
+ upload_context (ResumableUploadContext): Context for resumable uploads
3310
3571
  quiet: suppress verbose output (default is False)
3311
3572
  """
3312
3573
  for file_name in os.listdir(folder):
@@ -3317,7 +3578,8 @@ class KaggleApi(KaggleApi):
3317
3578
  ]):
3318
3579
  continue
3319
3580
  upload_file = self._upload_file_or_folder(folder, file_name,
3320
- blob_type, dir_mode,
3581
+ blob_type,
3582
+ upload_context, dir_mode,
3321
3583
  quiet, resources)
3322
3584
  if upload_file is not None:
3323
3585
  request.files.append(upload_file)
@@ -3326,20 +3588,21 @@ class KaggleApi(KaggleApi):
3326
3588
  parent_path,
3327
3589
  file_or_folder_name,
3328
3590
  blob_type,
3591
+ upload_context,
3329
3592
  dir_mode,
3330
3593
  quiet=False,
3331
3594
  resources=None):
3332
3595
  full_path = os.path.join(parent_path, file_or_folder_name)
3333
3596
  if os.path.isfile(full_path):
3334
3597
  return self._upload_file(file_or_folder_name, full_path, blob_type,
3335
- quiet, resources)
3598
+ upload_context, quiet, resources)
3336
3599
 
3337
3600
  elif os.path.isdir(full_path):
3338
3601
  if dir_mode in ['zip', 'tar']:
3339
- archive = DirectoryArchive(full_path, dir_mode)
3340
- with archive:
3602
+ with DirectoryArchive(full_path, dir_mode) as archive:
3341
3603
  return self._upload_file(archive.name, archive.path,
3342
- blob_type, quiet, resources)
3604
+ blob_type, upload_context, quiet,
3605
+ resources)
3343
3606
  elif not quiet:
3344
3607
  print("Skipping folder: " + file_or_folder_name +
3345
3608
  "; use '--dir-mode' to upload folders")
@@ -3348,13 +3611,15 @@ class KaggleApi(KaggleApi):
3348
3611
  print('Skipping: ' + file_or_folder_name)
3349
3612
  return None
3350
3613
 
3351
- def _upload_file(self, file_name, full_path, blob_type, quiet, resources):
3614
+ def _upload_file(self, file_name, full_path, blob_type, upload_context,
3615
+ quiet, resources):
3352
3616
  """ Helper function to upload a single file
3353
3617
  Parameters
3354
3618
  ==========
3355
3619
  file_name: name of the file to upload
3356
3620
  full_path: path to the file to upload
3357
3621
  blob_type (ApiBlobType): To which entity the file/blob refers
3622
+ upload_context (ResumableUploadContext): Context for resumable uploads
3358
3623
  quiet: suppress verbose output
3359
3624
  resources: optional file metadata
3360
3625
  :return: None - upload unsuccessful; instance of UploadFile - upload successful
@@ -3364,7 +3629,7 @@ class KaggleApi(KaggleApi):
3364
3629
  print('Starting upload for file ' + file_name)
3365
3630
 
3366
3631
  content_length = os.path.getsize(full_path)
3367
- token = self._upload_blob(full_path, quiet, blob_type)
3632
+ token = self._upload_blob(full_path, quiet, blob_type, upload_context)
3368
3633
  if token is None:
3369
3634
  if not quiet:
3370
3635
  print('Upload unsuccessful: ' + file_name)
@@ -3422,7 +3687,7 @@ class KaggleApi(KaggleApi):
3422
3687
  processed_column.type = original_type
3423
3688
  return processed_column
3424
3689
 
3425
- def upload_complete(self, path, url, quiet):
3690
+ def upload_complete(self, path, url, quiet, resume=False):
3426
3691
  """ function to complete an upload to retrieve a path from a url
3427
3692
  Parameters
3428
3693
  ==========
@@ -3431,25 +3696,107 @@ class KaggleApi(KaggleApi):
3431
3696
  quiet: suppress verbose output (default is False)
3432
3697
  """
3433
3698
  file_size = os.path.getsize(path)
3699
+ resumable_upload_result = ResumableUploadResult.Incomplete()
3700
+
3434
3701
  try:
3435
- with tqdm(total=file_size,
3702
+ if resume:
3703
+ resumable_upload_result = self._resume_upload(
3704
+ url, file_size, quiet)
3705
+ if resumable_upload_result.result != ResumableUploadResult.INCOMPLETE:
3706
+ return resumable_upload_result.result
3707
+
3708
+ start_at = resumable_upload_result.start_at
3709
+ upload_size = file_size - start_at
3710
+
3711
+ with tqdm(total=upload_size,
3436
3712
  unit='B',
3437
3713
  unit_scale=True,
3438
3714
  unit_divisor=1024,
3439
3715
  disable=quiet) as progress_bar:
3440
3716
  with io.open(path, 'rb', buffering=0) as fp:
3441
- reader = TqdmBufferedReader(fp, progress_bar)
3442
3717
  session = requests.Session()
3718
+ if start_at > 0:
3719
+ fp.seek(start_at)
3720
+ session.headers.update({
3721
+ 'Content-Length':
3722
+ '%d' % upload_size,
3723
+ 'Content-Range':
3724
+ 'bytes %d-%d/%d' %
3725
+ (start_at, file_size - 1, file_size)
3726
+ })
3727
+ reader = TqdmBufferedReader(fp, progress_bar)
3443
3728
  retries = Retry(total=10, backoff_factor=0.5)
3444
3729
  adapter = HTTPAdapter(max_retries=retries)
3445
3730
  session.mount('http://', adapter)
3446
3731
  session.mount('https://', adapter)
3447
3732
  response = session.put(url, data=reader)
3733
+ if self._is_upload_successful(response):
3734
+ return ResumableUploadResult.COMPLETE
3735
+ if response.status_code == 503:
3736
+ return ResumableUploadResult.INCOMPLETE
3737
+ # Server returned a non-resumable error so give up.
3738
+ return ResumableUploadResult.FAILED
3448
3739
  except Exception as error:
3449
3740
  print(error)
3450
- return False
3741
+ # There is probably some weird bug in our code so try to resume the upload
3742
+ # in case it works on the next try.
3743
+ return ResumableUploadResult.INCOMPLETE
3744
+
3745
+ def _resume_upload(self, url, content_length, quiet):
3746
+ # Documentation: https://developers.google.com/drive/api/guides/manage-uploads#resume-upload
3747
+ session = requests.Session()
3748
+ session.headers.update({
3749
+ 'Content-Length': '0',
3750
+ 'Content-Range': 'bytes */%d' % content_length,
3751
+ })
3752
+
3753
+ response = session.put(url)
3754
+
3755
+ if self._is_upload_successful(response):
3756
+ return ResumableUploadResult.Complete()
3757
+ if response.status_code == 404:
3758
+ # Upload expired so need to start from scratch.
3759
+ if not query:
3760
+ print('Upload of %s expired. Please try again.' % path)
3761
+ return ResumableUploadResult.Failed()
3762
+ if response.status_code == 308: # Resume Incomplete
3763
+ bytes_uploaded = self._get_bytes_already_uploaded(response, quiet)
3764
+ if bytes_uploaded is None:
3765
+ # There is an error with the Range header so need to start from scratch.
3766
+ return ResumableUploadResult.Failed()
3767
+ result = ResumableUploadResult.Incomplete(bytes_uploaded)
3768
+ if not quiet:
3769
+ print('Already uploaded %d bytes. Will resume upload at %d.' %
3770
+ (result.bytes_uploaded, result.start_at))
3771
+ return result
3772
+ else:
3773
+ if not quiet:
3774
+ print('Server returned %d. Please try again.' %
3775
+ response.status_code)
3776
+ return ResumableUploadResult.Failed()
3777
+
3778
+ def _is_upload_successful(self, response):
3451
3779
  return response.status_code == 200 or response.status_code == 201
3452
3780
 
3781
+ def _get_bytes_already_uploaded(self, response, quiet):
3782
+ range_val = response.headers.get('Range')
3783
+ if range_val is None:
3784
+ return 0 # This means server hasn't received anything before.
3785
+ items = range_val.split('-') # Example: bytes=0-1000 => ['0', '1000']
3786
+ if len(items) != 2:
3787
+ if not quiet:
3788
+ print('Invalid Range header format: %s. Will try again.' %
3789
+ range_val)
3790
+ return None # Shouldn't happen, something's wrong with Range header format.
3791
+ bytes_uploaded_str = items[-1] # Example: ['0', '1000'] => '1000'
3792
+ try:
3793
+ return int(bytes_uploaded_str) # Example: '1000' => 1000
3794
+ except ValueError:
3795
+ if not quiet:
3796
+ print('Invalid Range header format: %s. Will try again.' %
3797
+ range_val)
3798
+ return None # Shouldn't happen, something's wrong with Range header format.
3799
+
3453
3800
  def validate_dataset_string(self, dataset):
3454
3801
  """ determine if a dataset string is valid, meaning it is in the format
3455
3802
  of {username}/{dataset-slug} or {username}/{dataset-slug}/{version-number}.
@@ -957,6 +957,11 @@ def parse_models(subparsers):
957
957
  help=Help.command_models_delete)
958
958
  parser_models_delete_optional = parser_models_delete._action_groups.pop()
959
959
  parser_models_delete_optional.add_argument('model', help=Help.param_model)
960
+ parser_models_delete_optional.add_argument('-y',
961
+ '--yes',
962
+ dest='yes',
963
+ action='store_true',
964
+ help=Help.param_yes)
960
965
  parser_models_delete._action_groups.append(parser_models_delete_optional)
961
966
  parser_models_delete.set_defaults(func=api.model_delete_cli)
962
967
 
@@ -1066,6 +1071,11 @@ def parse_model_instances(subparsers):
1066
1071
  )
1067
1072
  parser_model_instances_delete_optional.add_argument(
1068
1073
  'model_instance', help=Help.param_model_instance)
1074
+ parser_model_instances_delete_optional.add_argument('-y',
1075
+ '--yes',
1076
+ dest='yes',
1077
+ action='store_true',
1078
+ help=Help.param_yes)
1069
1079
  parser_model_instances_delete._action_groups.append(
1070
1080
  parser_model_instances_delete_optional)
1071
1081
  parser_model_instances_delete.set_defaults(
@@ -1184,6 +1194,8 @@ def parse_model_instance_versions(subparsers):
1184
1194
  )
1185
1195
  parser_model_instance_versions_delete_optional.add_argument(
1186
1196
  'model_instance_version', help=Help.param_model_instance_version)
1197
+ parser_model_instance_versions_delete_optional.add_argument(
1198
+ '-y', '--yes', dest='yes', action='store_true', help=Help.param_yes)
1187
1199
  parser_model_instance_versions_delete._action_groups.append(
1188
1200
  parser_model_instance_versions_delete_optional)
1189
1201
  parser_model_instance_versions_delete.set_defaults(
@@ -1214,12 +1226,27 @@ def parse_files(subparsers):
1214
1226
  '--inbox-path',
1215
1227
  dest='inbox_path',
1216
1228
  required=False,
1229
+ default='',
1217
1230
  help=Help.param_files_upload_inbox_path)
1218
1231
  parser_files_upload_optional.add_argument(
1219
1232
  'local_paths',
1220
1233
  metavar='local-path',
1221
1234
  nargs='+',
1222
1235
  help=Help.param_files_upload_local_paths)
1236
+ parser_files_upload_optional.add_argument(
1237
+ '--no-resume',
1238
+ dest='no_resume',
1239
+ action='store_true',
1240
+ required=False,
1241
+ default=False,
1242
+ help=Help.param_files_upload_no_resume)
1243
+ parser_files_upload_optional.add_argument(
1244
+ '--no-compress',
1245
+ dest='no_compress',
1246
+ action='store_true',
1247
+ required=False,
1248
+ default=False,
1249
+ help=Help.param_files_upload_no_compress)
1223
1250
  parser_files_upload._action_groups.append(parser_files_upload_optional)
1224
1251
  parser_files_upload.set_defaults(func=api.files_upload_cli)
1225
1252
 
@@ -1299,12 +1326,13 @@ class Help(object):
1299
1326
 
1300
1327
  kaggle = 'Use one of:\ncompetitions {' + ', '.join(
1301
1328
  competitions_choices) + '}\ndatasets {' + ', '.join(
1302
- datasets_choices) + '}\nmodels {' + ', '.join(
1303
- models_choices) + '}\nmodels instances {' + ', '.join(
1304
- model_instances_choices
1305
- ) + '}\nmodels instances versions {' + ', '.join(
1306
- model_instance_versions_choices
1307
- ) + '}\nconfig {' + ', '.join(config_choices) + '}'
1329
+ datasets_choices) + '}\nkernels {' + ', '.join(
1330
+ kernels_choices) + '}\nmodels {' + ', '.join(
1331
+ models_choices) + '}\nmodels instances {' + ', '.join(
1332
+ model_instances_choices
1333
+ ) + '}\nmodels instances versions {' + ', '.join(
1334
+ model_instance_versions_choices
1335
+ ) + '}\nconfig {' + ', '.join(config_choices) + '}'
1308
1336
 
1309
1337
  group_competitions = 'Commands related to Kaggle competitions'
1310
1338
  group_datasets = 'Commands related to Kaggle datasets'
@@ -1395,6 +1423,9 @@ class Help(object):
1395
1423
  'Unzip the downloaded file. Will delete the zip file when completed.')
1396
1424
  param_untar = (
1397
1425
  'Untar the downloaded file. Will delete the tar file when completed.')
1426
+ param_yes = (
1427
+ 'Sets any confirmation values to "yes" automatically. Users will not be asked to confirm.'
1428
+ )
1398
1429
 
1399
1430
  # Competitions params
1400
1431
  param_competition = (
@@ -1554,8 +1585,10 @@ class Help(object):
1554
1585
  param_files_upload_inbox_path = 'Virtual path on the server where the uploaded files will be stored'
1555
1586
  param_files_upload_local_paths = (
1556
1587
  'List of local filesystem paths. Each path creates a separate file on the server. '
1557
- 'Directories are uploaded as zip archives (e.g., a directory called "data" will be uploaded as "data.zip")'
1558
- )
1588
+ 'Directories are uploaded as zip archives by default (e.g., a directory called '
1589
+ '"data" will be uploaded as "data.zip")')
1590
+ param_files_upload_no_compress = 'Whether to compress directories (zip) or not (tar)'
1591
+ param_files_upload_no_resume = 'Whether to skip resumable uploads.'
1559
1592
 
1560
1593
  # Config params
1561
1594
  param_config_name = ('Name of the configuration parameter\n(one of '
@@ -32,6 +32,7 @@
32
32
 
33
33
  # coding=utf-8
34
34
  import os
35
+ import time
35
36
  from datetime import datetime
36
37
 
37
38
 
@@ -243,3 +244,33 @@ def parse(string):
243
244
  except:
244
245
  pass
245
246
  return string
247
+
248
+
249
+ class ResumableUploadResult(object):
250
+ # Upload was complete, i.e., all bytes were received by the server.
251
+ COMPLETE = 1
252
+
253
+ # There was a non-transient error during the upload or the upload expired.
254
+ # The upload cannot be resumed so it should be restarted from scratch
255
+ # (i.e., call /api/v1/files/upload to initiate the upload and get the
256
+ # create/upload url and token).
257
+ FAILED = 2
258
+
259
+ # Upload was interrupted due to some (transient) failure but it can be
260
+ # safely resumed.
261
+ INCOMPLETE = 3
262
+
263
+ def __init__(self, result, bytes_uploaded=None):
264
+ self.result = result
265
+ self.bytes_uploaded = bytes_uploaded
266
+ self.start_at = 0 if bytes_uploaded is None else bytes_uploaded + 1
267
+
268
+ def Complete():
269
+ return ResumableUploadResult(ResumableUploadResult.COMPLETE)
270
+
271
+ def Failed():
272
+ return ResumableUploadResult(ResumableUploadResult.FAILED)
273
+
274
+ def Incomplete(bytes_uploaded=None):
275
+ return ResumableUploadResult(ResumableUploadResult.INCOMPLETE,
276
+ bytes_uploaded)
@@ -144,7 +144,7 @@ class ModelNewInstanceRequest(object):
144
144
  """
145
145
  if framework is None:
146
146
  raise ValueError("Invalid value for `framework`, must not be `None`") # noqa: E501
147
- allowed_values = ["tensorFlow1", "tensorFlow2", "tfLite", "tfJs", "pyTorch", "jax", "coral", "scikitLearn", "mxnet", "onnx"] # noqa: E501
147
+ allowed_values = ["tensorFlow1", "tensorFlow2", "tfLite", "tfJs", "pyTorch", "jax", "coral", "scikitLearn", "mxnet", "onnx", "keras"] # noqa: E501
148
148
  if framework not in allowed_values:
149
149
  raise ValueError(
150
150
  "Invalid value for `framework` ({0}), must be one of {1}" # noqa: E501
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kaggle
3
- Version: 1.6.0a3
3
+ Version: 1.6.0a5
4
4
  Summary: Kaggle API
5
5
  Home-page: https://github.com/Kaggle/kaggle-api
6
6
  Author: Kaggle
@@ -19,7 +19,7 @@ from setuptools import setup, find_packages
19
19
 
20
20
  setup(
21
21
  name='kaggle',
22
- version='1.6.0a3',
22
+ version='1.6.0a5',
23
23
  description='Kaggle API',
24
24
  long_description=
25
25
  ('Official API for https://www.kaggle.com, accessible using a command line '
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes