kaggle 1.7.4.5__py3-none-any.whl → 1.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kaggle/__init__.py +10 -6
- kaggle/api/kaggle_api.py +574 -598
- kaggle/api/kaggle_api_extended.py +5251 -4769
- kaggle/cli.py +1335 -1585
- kaggle/models/api_blob_type.py +3 -3
- kaggle/models/dataset_column.py +165 -174
- kaggle/models/dataset_new_request.py +83 -41
- kaggle/models/dataset_new_version_request.py +32 -25
- kaggle/models/dataset_update_settings_request.py +35 -27
- kaggle/models/kaggle_models_extended.py +169 -172
- kaggle/models/kernel_push_request.py +66 -49
- kaggle/models/model_instance_new_version_request.py +10 -18
- kaggle/models/model_instance_update_request.py +103 -34
- kaggle/models/model_new_instance_request.py +138 -41
- kaggle/models/model_new_request.py +35 -27
- kaggle/models/model_update_request.py +32 -25
- kaggle/models/start_blob_upload_request.py +192 -195
- kaggle/models/start_blob_upload_response.py +98 -98
- kaggle/models/upload_file.py +114 -120
- kaggle/test/test_authenticate.py +23 -23
- {kaggle-1.7.4.5.dist-info → kaggle-1.8.0.dist-info}/METADATA +11 -15
- kaggle-1.8.0.dist-info/RECORD +148 -0
- kagglesdk/__init__.py +5 -1
- kagglesdk/benchmarks/services/__init__.py +0 -0
- kagglesdk/benchmarks/services/benchmarks_api_service.py +19 -0
- kagglesdk/benchmarks/types/__init__.py +0 -0
- kagglesdk/benchmarks/types/benchmark_types.py +307 -0
- kagglesdk/benchmarks/types/benchmarks_api_service.py +243 -0
- kagglesdk/blobs/services/blob_api_service.py +1 -1
- kagglesdk/blobs/types/blob_api_service.py +2 -2
- kagglesdk/common/services/__init__.py +0 -0
- kagglesdk/common/services/operations_service.py +46 -0
- kagglesdk/common/types/file_download.py +1 -1
- kagglesdk/common/types/http_redirect.py +1 -1
- kagglesdk/common/types/operations.py +194 -0
- kagglesdk/common/types/operations_service.py +48 -0
- kagglesdk/community/__init__.py +0 -0
- kagglesdk/community/types/__init__.py +0 -0
- kagglesdk/community/types/content_enums.py +44 -0
- kagglesdk/community/types/organization.py +410 -0
- kagglesdk/competitions/services/competition_api_service.py +49 -12
- kagglesdk/competitions/types/competition.py +14 -0
- kagglesdk/competitions/types/competition_api_service.py +1639 -1275
- kagglesdk/competitions/types/search_competitions.py +28 -0
- kagglesdk/datasets/databundles/__init__.py +0 -0
- kagglesdk/datasets/databundles/types/__init__.py +0 -0
- kagglesdk/datasets/databundles/types/databundle_api_types.py +540 -0
- kagglesdk/datasets/services/dataset_api_service.py +39 -14
- kagglesdk/datasets/types/dataset_api_service.py +554 -300
- kagglesdk/datasets/types/dataset_enums.py +21 -0
- kagglesdk/datasets/types/dataset_service.py +145 -0
- kagglesdk/datasets/types/dataset_types.py +74 -74
- kagglesdk/datasets/types/search_datasets.py +6 -0
- kagglesdk/discussions/__init__.py +0 -0
- kagglesdk/discussions/types/__init__.py +0 -0
- kagglesdk/discussions/types/search_discussions.py +43 -0
- kagglesdk/discussions/types/writeup_enums.py +11 -0
- kagglesdk/education/services/education_api_service.py +1 -1
- kagglesdk/education/types/education_api_service.py +1 -1
- kagglesdk/kaggle_client.py +46 -23
- kagglesdk/kaggle_creds.py +148 -0
- kagglesdk/kaggle_env.py +89 -25
- kagglesdk/kaggle_http_client.py +216 -306
- kagglesdk/kaggle_oauth.py +200 -0
- kagglesdk/kaggle_object.py +286 -293
- kagglesdk/kernels/services/kernels_api_service.py +46 -9
- kagglesdk/kernels/types/kernels_api_service.py +635 -159
- kagglesdk/kernels/types/kernels_enums.py +6 -0
- kagglesdk/kernels/types/search_kernels.py +6 -0
- kagglesdk/licenses/__init__.py +0 -0
- kagglesdk/licenses/types/__init__.py +0 -0
- kagglesdk/licenses/types/licenses_types.py +182 -0
- kagglesdk/models/services/model_api_service.py +41 -17
- kagglesdk/models/types/model_api_service.py +987 -637
- kagglesdk/models/types/model_enums.py +8 -0
- kagglesdk/models/types/model_service.py +71 -71
- kagglesdk/models/types/model_types.py +1057 -5
- kagglesdk/models/types/search_models.py +8 -0
- kagglesdk/search/__init__.py +0 -0
- kagglesdk/search/services/__init__.py +0 -0
- kagglesdk/search/services/search_api_service.py +19 -0
- kagglesdk/search/types/__init__.py +0 -0
- kagglesdk/search/types/search_api_service.py +2435 -0
- kagglesdk/search/types/search_content_shared.py +50 -0
- kagglesdk/search/types/search_enums.py +45 -0
- kagglesdk/search/types/search_service.py +303 -0
- kagglesdk/security/services/iam_service.py +31 -0
- kagglesdk/security/services/oauth_service.py +27 -1
- kagglesdk/security/types/authentication.py +63 -63
- kagglesdk/security/types/iam_service.py +496 -0
- kagglesdk/security/types/oauth_service.py +797 -10
- kagglesdk/security/types/roles.py +8 -0
- kagglesdk/security/types/security_types.py +159 -0
- kagglesdk/test/__init__.py +0 -0
- kagglesdk/test/test_client.py +20 -22
- kagglesdk/users/services/account_service.py +13 -1
- kagglesdk/users/services/group_api_service.py +31 -0
- kagglesdk/users/types/account_service.py +169 -28
- kagglesdk/users/types/group_api_service.py +315 -0
- kagglesdk/users/types/group_types.py +165 -0
- kagglesdk/users/types/groups_enum.py +8 -0
- kagglesdk/users/types/progression_service.py +9 -0
- kagglesdk/users/types/search_users.py +23 -0
- kagglesdk/users/types/user_avatar.py +226 -0
- kaggle/configuration.py +0 -206
- kaggle-1.7.4.5.dist-info/RECORD +0 -98
- {kaggle-1.7.4.5.dist-info → kaggle-1.8.0.dist-info}/WHEEL +0 -0
- {kaggle-1.7.4.5.dist-info → kaggle-1.8.0.dist-info}/entry_points.txt +0 -0
- {kaggle-1.7.4.5.dist-info → kaggle-1.8.0.dist-info}/licenses/LICENSE.txt +0 -0
- {kaggle/test → kagglesdk/benchmarks}/__init__.py +0 -0
kaggle/models/api_blob_type.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
class ApiBlobType(object):
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
2
|
+
DATASET = "dataset"
|
|
3
|
+
MODEL = "model"
|
|
4
|
+
INBOX = "inbox"
|
kaggle/models/dataset_column.py
CHANGED
|
@@ -23,215 +23,206 @@ import six
|
|
|
23
23
|
|
|
24
24
|
|
|
25
25
|
class DatasetColumn(object):
|
|
26
|
-
|
|
26
|
+
"""
|
|
27
27
|
Attributes:
|
|
28
28
|
column_types (dict): The key is attribute name
|
|
29
29
|
and the value is attribute type.
|
|
30
30
|
attribute_map (dict): The key is attribute name
|
|
31
31
|
and the value is json key in definition.
|
|
32
32
|
"""
|
|
33
|
-
column_types = {
|
|
34
|
-
'order': 'float',
|
|
35
|
-
'name': 'str',
|
|
36
|
-
'type': 'str',
|
|
37
|
-
'original_type': 'str',
|
|
38
|
-
'description': 'str'
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
attribute_map = {
|
|
42
|
-
'order': 'order',
|
|
43
|
-
'name': 'name',
|
|
44
|
-
'type': 'type',
|
|
45
|
-
'original_type': 'originalType',
|
|
46
|
-
'description': 'description'
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
def __init__(self,
|
|
50
|
-
order=None,
|
|
51
|
-
name=None,
|
|
52
|
-
type=None,
|
|
53
|
-
original_type=None,
|
|
54
|
-
description=None): # noqa: E501
|
|
55
|
-
"""DatasetColumn - a model defined in Swagger""" # noqa: E501
|
|
56
|
-
|
|
57
|
-
self._order = None
|
|
58
|
-
self._name = None
|
|
59
|
-
self._type = None
|
|
60
|
-
self._original_type = None
|
|
61
|
-
self._description = None
|
|
62
|
-
self.discriminator = None
|
|
63
|
-
|
|
64
|
-
if order is not None:
|
|
65
|
-
self.order = order
|
|
66
|
-
if name is not None:
|
|
67
|
-
self.name = name
|
|
68
|
-
if type is not None:
|
|
69
|
-
self.type = type
|
|
70
|
-
if original_type is not None:
|
|
71
|
-
self.original_type = original_type
|
|
72
|
-
if description is not None:
|
|
73
|
-
self.description = description
|
|
74
|
-
|
|
75
|
-
@property
|
|
76
|
-
def order(self):
|
|
77
|
-
"""Gets the order of this DatasetColumn. # noqa: E501.
|
|
78
|
-
|
|
79
|
-
The order that the column comes in, 0-based. (The first column is 0,
|
|
80
|
-
second is 1, etc.) # noqa: E501
|
|
81
|
-
|
|
82
|
-
:return: The order of this DatasetColumn. # noqa: E501
|
|
83
|
-
:rtype: float
|
|
84
|
-
"""
|
|
85
|
-
return self._order
|
|
86
33
|
|
|
87
|
-
|
|
88
|
-
def order(self, order):
|
|
89
|
-
"""Sets the order of this DatasetColumn.
|
|
34
|
+
column_types = {"order": "float", "name": "str", "type": "str", "original_type": "str", "description": "str"}
|
|
90
35
|
|
|
91
|
-
|
|
92
|
-
|
|
36
|
+
attribute_map = {
|
|
37
|
+
"order": "order",
|
|
38
|
+
"name": "name",
|
|
39
|
+
"type": "type",
|
|
40
|
+
"original_type": "originalType",
|
|
41
|
+
"description": "description",
|
|
42
|
+
}
|
|
93
43
|
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
"""
|
|
44
|
+
def __init__(self, order=None, name=None, type=None, original_type=None, description=None): # noqa: E501
|
|
45
|
+
"""DatasetColumn - a model defined in Swagger""" # noqa: E501
|
|
97
46
|
|
|
98
|
-
|
|
47
|
+
self._order = None
|
|
48
|
+
self._name = None
|
|
49
|
+
self._type = None
|
|
50
|
+
self._original_type = None
|
|
51
|
+
self._description = None
|
|
52
|
+
self.discriminator = None
|
|
99
53
|
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
54
|
+
if order is not None:
|
|
55
|
+
self.order = order
|
|
56
|
+
if name is not None:
|
|
57
|
+
self.name = name
|
|
58
|
+
if type is not None:
|
|
59
|
+
self.type = type
|
|
60
|
+
if original_type is not None:
|
|
61
|
+
self.original_type = original_type
|
|
62
|
+
if description is not None:
|
|
63
|
+
self.description = description
|
|
103
64
|
|
|
104
|
-
|
|
65
|
+
@property
|
|
66
|
+
def order(self):
|
|
67
|
+
"""Gets the order of this DatasetColumn. # noqa: E501.
|
|
105
68
|
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
"""
|
|
109
|
-
return self._name
|
|
69
|
+
The order that the column comes in, 0-based. (The first column is 0,
|
|
70
|
+
second is 1, etc.) # noqa: E501
|
|
110
71
|
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
72
|
+
:return: The order of this DatasetColumn. # noqa: E501
|
|
73
|
+
:rtype: float
|
|
74
|
+
"""
|
|
75
|
+
return self._order
|
|
114
76
|
|
|
115
|
-
|
|
77
|
+
@order.setter
|
|
78
|
+
def order(self, order):
|
|
79
|
+
"""Sets the order of this DatasetColumn.
|
|
116
80
|
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
"""
|
|
81
|
+
The order that the column comes in, 0-based. (The first column is 0,
|
|
82
|
+
second is 1, etc.) # noqa: E501
|
|
120
83
|
|
|
121
|
-
|
|
84
|
+
:param order: The order of this DatasetColumn. # noqa: E501
|
|
85
|
+
:type: float
|
|
86
|
+
"""
|
|
122
87
|
|
|
123
|
-
|
|
124
|
-
def type(self):
|
|
125
|
-
"""Gets the type of this DatasetColumn. # noqa: E501.
|
|
88
|
+
self._order = order
|
|
126
89
|
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
# noqa: E501
|
|
90
|
+
@property
|
|
91
|
+
def name(self):
|
|
92
|
+
"""Gets the name of this DatasetColumn. # noqa: E501.
|
|
131
93
|
|
|
132
|
-
|
|
133
|
-
:rtype: str
|
|
134
|
-
"""
|
|
135
|
-
return self._type
|
|
94
|
+
The column name # noqa: E501
|
|
136
95
|
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
96
|
+
:return: The name of this DatasetColumn. # noqa: E501
|
|
97
|
+
:rtype: str
|
|
98
|
+
"""
|
|
99
|
+
return self._name
|
|
140
100
|
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
101
|
+
@name.setter
|
|
102
|
+
def name(self, name):
|
|
103
|
+
"""Sets the name of this DatasetColumn.
|
|
104
|
+
|
|
105
|
+
The column name # noqa: E501
|
|
106
|
+
|
|
107
|
+
:param name: The name of this DatasetColumn. # noqa: E501
|
|
108
|
+
:type: str
|
|
109
|
+
"""
|
|
145
110
|
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
111
|
+
self._name = name
|
|
112
|
+
|
|
113
|
+
@property
|
|
114
|
+
def type(self):
|
|
115
|
+
"""Gets the type of this DatasetColumn. # noqa: E501.
|
|
116
|
+
|
|
117
|
+
The type of all of the fields in the column. Please see the data
|
|
118
|
+
types on
|
|
119
|
+
https://github.com/Kaggle/kaggle-api/wiki/Dataset-Metadata
|
|
120
|
+
# noqa: E501
|
|
121
|
+
|
|
122
|
+
:return: The type of this DatasetColumn. # noqa: E501
|
|
123
|
+
:rtype: str
|
|
124
|
+
"""
|
|
125
|
+
return self._type
|
|
149
126
|
|
|
150
|
-
|
|
127
|
+
@type.setter
|
|
128
|
+
def type(self, type):
|
|
129
|
+
"""Sets the type of this DatasetColumn.
|
|
130
|
+
|
|
131
|
+
The type of all of the fields in the column. Please see the data
|
|
132
|
+
types on
|
|
133
|
+
https://github.com/Kaggle/kaggle-api/wiki/Dataset-Metadata
|
|
134
|
+
# noqa: E501
|
|
135
|
+
|
|
136
|
+
:param type: The type of this DatasetColumn. # noqa: E501
|
|
137
|
+
:type: str
|
|
138
|
+
"""
|
|
139
|
+
|
|
140
|
+
self._type = type
|
|
141
|
+
|
|
142
|
+
@property
|
|
143
|
+
def original_type(self):
|
|
144
|
+
"""Gets the original_type of this DatasetColumn. # noqa: E501.
|
|
145
|
+
|
|
146
|
+
Used to store the original type of the column, which will be converted to Kaggle's types. For example, an `originalType` of `\"integer\"` would convert to a `type` of `\"numeric\"` # noqa: E501
|
|
147
|
+
|
|
148
|
+
:return: The original_type of this DatasetColumn. # noqa: E501
|
|
149
|
+
:rtype: str
|
|
150
|
+
"""
|
|
151
|
+
return self._original_type
|
|
151
152
|
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
153
|
+
@original_type.setter
|
|
154
|
+
def original_type(self, original_type):
|
|
155
|
+
"""Sets the original_type of this DatasetColumn.
|
|
155
156
|
|
|
156
|
-
|
|
157
|
+
Used to store the original type of the column, which will be converted to Kaggle's types. For example, an `originalType` of `\"integer\"` would convert to a `type` of `\"numeric\"` # noqa: E501
|
|
157
158
|
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
return self._original_type
|
|
159
|
+
:param original_type: The original_type of this DatasetColumn. # noqa: E501
|
|
160
|
+
:type: str
|
|
161
|
+
"""
|
|
162
162
|
|
|
163
|
-
|
|
164
|
-
def original_type(self, original_type):
|
|
165
|
-
"""Sets the original_type of this DatasetColumn.
|
|
163
|
+
self._original_type = original_type
|
|
166
164
|
|
|
167
|
-
|
|
165
|
+
@property
|
|
166
|
+
def description(self):
|
|
167
|
+
"""Gets the description of this DatasetColumn. # noqa: E501.
|
|
168
168
|
|
|
169
|
-
|
|
170
|
-
:type: str
|
|
171
|
-
"""
|
|
169
|
+
The description of the column # noqa: E501
|
|
172
170
|
|
|
173
|
-
|
|
171
|
+
:return: The description of this DatasetColumn. # noqa: E501
|
|
172
|
+
:rtype: str
|
|
173
|
+
"""
|
|
174
|
+
return self._description
|
|
174
175
|
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
176
|
+
@description.setter
|
|
177
|
+
def description(self, description):
|
|
178
|
+
"""Sets the description of this DatasetColumn.
|
|
178
179
|
|
|
179
|
-
|
|
180
|
+
The description of the column # noqa: E501
|
|
180
181
|
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
182
|
+
:param description: The description of this DatasetColumn. # noqa:
|
|
183
|
+
E501
|
|
184
|
+
:type: str
|
|
185
|
+
"""
|
|
185
186
|
|
|
186
|
-
|
|
187
|
-
def description(self, description):
|
|
188
|
-
"""Sets the description of this DatasetColumn.
|
|
187
|
+
self._description = description
|
|
189
188
|
|
|
190
|
-
|
|
189
|
+
def to_dict(self):
|
|
190
|
+
"""Returns the model properties as a dict."""
|
|
191
|
+
result = {}
|
|
191
192
|
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
193
|
+
for attr, _ in six.iteritems(self.column_types):
|
|
194
|
+
value = getattr(self, attr)
|
|
195
|
+
if isinstance(value, list):
|
|
196
|
+
result[attr] = list(map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
|
|
197
|
+
elif hasattr(value, "to_dict"):
|
|
198
|
+
result[attr] = value.to_dict()
|
|
199
|
+
elif isinstance(value, dict):
|
|
200
|
+
result[attr] = dict(
|
|
201
|
+
map(
|
|
202
|
+
lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item,
|
|
203
|
+
value.items(),
|
|
204
|
+
)
|
|
205
|
+
)
|
|
206
|
+
else:
|
|
207
|
+
result[attr] = value
|
|
208
|
+
|
|
209
|
+
return result
|
|
210
|
+
|
|
211
|
+
def to_str(self):
|
|
212
|
+
"""Returns the string representation of the model."""
|
|
213
|
+
return pprint.pformat(self.to_dict())
|
|
214
|
+
|
|
215
|
+
def __repr__(self):
|
|
216
|
+
"""For `print` and `pprint`"""
|
|
217
|
+
return self.to_str()
|
|
218
|
+
|
|
219
|
+
def __eq__(self, other):
|
|
220
|
+
"""Returns true if both objects are equal."""
|
|
221
|
+
if not isinstance(other, DatasetColumn):
|
|
222
|
+
return False
|
|
223
|
+
|
|
224
|
+
return self.__dict__ == other.__dict__
|
|
196
225
|
|
|
197
|
-
self
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
"""Returns the model properties as a dict."""
|
|
201
|
-
result = {}
|
|
202
|
-
|
|
203
|
-
for attr, _ in six.iteritems(self.column_types):
|
|
204
|
-
value = getattr(self, attr)
|
|
205
|
-
if isinstance(value, list):
|
|
206
|
-
result[attr] = list(
|
|
207
|
-
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
|
|
208
|
-
elif hasattr(value, "to_dict"):
|
|
209
|
-
result[attr] = value.to_dict()
|
|
210
|
-
elif isinstance(value, dict):
|
|
211
|
-
result[attr] = dict(
|
|
212
|
-
map(
|
|
213
|
-
lambda item: (item[0], item[1].to_dict())
|
|
214
|
-
if hasattr(item[1], "to_dict") else item, value.items()))
|
|
215
|
-
else:
|
|
216
|
-
result[attr] = value
|
|
217
|
-
|
|
218
|
-
return result
|
|
219
|
-
|
|
220
|
-
def to_str(self):
|
|
221
|
-
"""Returns the string representation of the model."""
|
|
222
|
-
return pprint.pformat(self.to_dict())
|
|
223
|
-
|
|
224
|
-
def __repr__(self):
|
|
225
|
-
"""For `print` and `pprint`"""
|
|
226
|
-
return self.to_str()
|
|
227
|
-
|
|
228
|
-
def __eq__(self, other):
|
|
229
|
-
"""Returns true if both objects are equal."""
|
|
230
|
-
if not isinstance(other, DatasetColumn):
|
|
231
|
-
return False
|
|
232
|
-
|
|
233
|
-
return self.__dict__ == other.__dict__
|
|
234
|
-
|
|
235
|
-
def __ne__(self, other):
|
|
236
|
-
"""Returns true if both objects are not equal."""
|
|
237
|
-
return not self == other
|
|
226
|
+
def __ne__(self, other):
|
|
227
|
+
"""Returns true if both objects are not equal."""
|
|
228
|
+
return not self == other
|
|
@@ -32,33 +32,46 @@ class DatasetNewRequest(object):
|
|
|
32
32
|
attribute_map (dict): The key is attribute name
|
|
33
33
|
and the value is json key in definition.
|
|
34
34
|
"""
|
|
35
|
+
|
|
35
36
|
project_types = {
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
37
|
+
"title": "str",
|
|
38
|
+
"slug": "str",
|
|
39
|
+
"owner_slug": "str",
|
|
40
|
+
"license_name": "str",
|
|
41
|
+
"subtitle": "str",
|
|
42
|
+
"description": "str",
|
|
43
|
+
"files": "list[UploadFile]",
|
|
44
|
+
"is_private": "bool",
|
|
45
|
+
"convert_to_csv": "bool",
|
|
46
|
+
"category_ids": "list[str]",
|
|
46
47
|
}
|
|
47
48
|
|
|
48
49
|
attribute_map = {
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
50
|
+
"title": "title",
|
|
51
|
+
"slug": "slug",
|
|
52
|
+
"owner_slug": "ownerSlug",
|
|
53
|
+
"license_name": "licenseName",
|
|
54
|
+
"subtitle": "subtitle",
|
|
55
|
+
"description": "description",
|
|
56
|
+
"files": "files",
|
|
57
|
+
"is_private": "isPrivate",
|
|
58
|
+
"convert_to_csv": "convertToCsv",
|
|
59
|
+
"category_ids": "categoryIds",
|
|
59
60
|
}
|
|
60
61
|
|
|
61
|
-
def __init__(
|
|
62
|
+
def __init__(
|
|
63
|
+
self,
|
|
64
|
+
title=None,
|
|
65
|
+
slug=None,
|
|
66
|
+
owner_slug=None,
|
|
67
|
+
license_name="unknown",
|
|
68
|
+
subtitle=None,
|
|
69
|
+
description="",
|
|
70
|
+
files=None,
|
|
71
|
+
is_private=True,
|
|
72
|
+
convert_to_csv=True,
|
|
73
|
+
category_ids=None,
|
|
74
|
+
): # noqa: E501
|
|
62
75
|
|
|
63
76
|
self._title = None
|
|
64
77
|
self._slug = None
|
|
@@ -187,22 +200,54 @@ class DatasetNewRequest(object):
|
|
|
187
200
|
# noqa: E501
|
|
188
201
|
:type: str
|
|
189
202
|
"""
|
|
190
|
-
allowed_values = [
|
|
203
|
+
allowed_values = [
|
|
204
|
+
"CC0-1.0",
|
|
205
|
+
"CC-BY-SA-4.0",
|
|
206
|
+
"GPL-2.0",
|
|
207
|
+
"ODbL-1.0",
|
|
208
|
+
"CC-BY-NC-SA-4.0",
|
|
209
|
+
"unknown",
|
|
210
|
+
"DbCL-1.0",
|
|
211
|
+
"CC-BY-SA-3.0",
|
|
212
|
+
"copyright-authors",
|
|
213
|
+
"other",
|
|
214
|
+
"reddit-api",
|
|
215
|
+
"world-bank",
|
|
216
|
+
"CC-BY-4.0",
|
|
217
|
+
"CC-BY-NC-4.0",
|
|
218
|
+
"PDDL",
|
|
219
|
+
"CC-BY-3.0",
|
|
220
|
+
"CC-BY-3.0-IGO",
|
|
221
|
+
"US-Government-Works",
|
|
222
|
+
"CC-BY-NC-SA-3.0-IGO",
|
|
223
|
+
"CDLA-Permissive-1.0",
|
|
224
|
+
"CDLA-Sharing-1.0",
|
|
225
|
+
"CC-BY-ND-4.0",
|
|
226
|
+
"CC-BY-NC-ND-4.0",
|
|
227
|
+
"ODC-BY-1.0",
|
|
228
|
+
"LGPL-3.0",
|
|
229
|
+
"AGPL-3.0",
|
|
230
|
+
"FDL-1.3",
|
|
231
|
+
"EU-ODP-Legal-Notice",
|
|
232
|
+
"apache-2.0",
|
|
233
|
+
"GPL-3.0",
|
|
234
|
+
] # noqa: E501
|
|
191
235
|
if license_name not in allowed_values:
|
|
192
236
|
raise ValueError(
|
|
193
|
-
"Invalid value for `license_name` ({0}), must be one of {1}" # noqa: E501
|
|
194
|
-
|
|
237
|
+
"Invalid value for `license_name` ({0}), must be one of {1}".format( # noqa: E501
|
|
238
|
+
license_name, allowed_values
|
|
239
|
+
)
|
|
195
240
|
)
|
|
196
241
|
else:
|
|
197
242
|
license_name = license_name.lower()
|
|
198
|
-
if license_name[0-1] ==
|
|
199
|
-
license_name =
|
|
200
|
-
elif license_name[0-3] ==
|
|
201
|
-
license_name =
|
|
202
|
-
elif license_name[0-3] ==
|
|
203
|
-
license_name =
|
|
243
|
+
if license_name[0 - 1] == "cc":
|
|
244
|
+
license_name = "cc"
|
|
245
|
+
elif license_name[0 - 3] == "gpl":
|
|
246
|
+
license_name = "gpl"
|
|
247
|
+
elif license_name[0 - 3] == "odb":
|
|
248
|
+
license_name = "odb"
|
|
204
249
|
else:
|
|
205
|
-
license_name =
|
|
250
|
+
license_name = "other"
|
|
206
251
|
self._license_name = license_name
|
|
207
252
|
|
|
208
253
|
@property
|
|
@@ -363,18 +408,16 @@ class DatasetNewRequest(object):
|
|
|
363
408
|
for attr, _ in six.iteritems(self.project_types):
|
|
364
409
|
value = getattr(self, attr)
|
|
365
410
|
if isinstance(value, list):
|
|
366
|
-
result[attr] = list(map(
|
|
367
|
-
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
|
|
368
|
-
value
|
|
369
|
-
))
|
|
411
|
+
result[attr] = list(map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
|
|
370
412
|
elif hasattr(value, "to_dict"):
|
|
371
413
|
result[attr] = value.to_dict()
|
|
372
414
|
elif isinstance(value, dict):
|
|
373
|
-
result[attr] = dict(
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
415
|
+
result[attr] = dict(
|
|
416
|
+
map(
|
|
417
|
+
lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item,
|
|
418
|
+
value.items(),
|
|
419
|
+
)
|
|
420
|
+
)
|
|
378
421
|
else:
|
|
379
422
|
result[attr] = value
|
|
380
423
|
|
|
@@ -398,4 +441,3 @@ class DatasetNewRequest(object):
|
|
|
398
441
|
def __ne__(self, other):
|
|
399
442
|
"""Returns true if both objects are not equal."""
|
|
400
443
|
return not self == other
|
|
401
|
-
|
|
@@ -33,27 +33,37 @@ class DatasetNewVersionRequest(object):
|
|
|
33
33
|
attribute_map (dict): The key is attribute name
|
|
34
34
|
and the value is json key in definition.
|
|
35
35
|
"""
|
|
36
|
+
|
|
36
37
|
project_types = {
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
38
|
+
"version_notes": "str",
|
|
39
|
+
"subtitle": "str",
|
|
40
|
+
"description": "str",
|
|
41
|
+
"files": "list[UploadFile]",
|
|
42
|
+
"convert_to_csv": "bool",
|
|
43
|
+
"category_ids": "list[str]",
|
|
44
|
+
"delete_old_versions": "bool",
|
|
44
45
|
}
|
|
45
46
|
|
|
46
47
|
attribute_map = {
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
48
|
+
"version_notes": "versionNotes",
|
|
49
|
+
"subtitle": "subtitle",
|
|
50
|
+
"description": "description",
|
|
51
|
+
"files": "files",
|
|
52
|
+
"convert_to_csv": "convertToCsv",
|
|
53
|
+
"category_ids": "categoryIds",
|
|
54
|
+
"delete_old_versions": "deleteOldVersions",
|
|
54
55
|
}
|
|
55
56
|
|
|
56
|
-
def __init__(
|
|
57
|
+
def __init__(
|
|
58
|
+
self,
|
|
59
|
+
version_notes=None,
|
|
60
|
+
subtitle=None,
|
|
61
|
+
description=None,
|
|
62
|
+
files=None,
|
|
63
|
+
convert_to_csv=True,
|
|
64
|
+
category_ids=None,
|
|
65
|
+
delete_old_versions=False,
|
|
66
|
+
): # noqa: E501
|
|
57
67
|
|
|
58
68
|
self._version_notes = None
|
|
59
69
|
self._subtitle = None
|
|
@@ -274,18 +284,16 @@ class DatasetNewVersionRequest(object):
|
|
|
274
284
|
for attr, _ in six.iteritems(self.project_types):
|
|
275
285
|
value = getattr(self, attr)
|
|
276
286
|
if isinstance(value, list):
|
|
277
|
-
result[attr] = list(map(
|
|
278
|
-
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
|
|
279
|
-
value
|
|
280
|
-
))
|
|
287
|
+
result[attr] = list(map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
|
|
281
288
|
elif hasattr(value, "to_dict"):
|
|
282
289
|
result[attr] = value.to_dict()
|
|
283
290
|
elif isinstance(value, dict):
|
|
284
|
-
result[attr] = dict(
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
291
|
+
result[attr] = dict(
|
|
292
|
+
map(
|
|
293
|
+
lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item,
|
|
294
|
+
value.items(),
|
|
295
|
+
)
|
|
296
|
+
)
|
|
289
297
|
else:
|
|
290
298
|
result[attr] = value
|
|
291
299
|
|
|
@@ -309,4 +317,3 @@ class DatasetNewVersionRequest(object):
|
|
|
309
317
|
def __ne__(self, other):
|
|
310
318
|
"""Returns true if both objects are not equal."""
|
|
311
319
|
return not self == other
|
|
312
|
-
|