udata 9.1.2.dev30355__py2.py3-none-any.whl → 9.1.2.dev30382__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of udata might be problematic. Click here for more details.

Files changed (425) hide show
  1. tasks/__init__.py +109 -107
  2. tasks/helpers.py +18 -18
  3. udata/__init__.py +4 -4
  4. udata/admin/views.py +5 -5
  5. udata/api/__init__.py +135 -124
  6. udata/api/commands.py +45 -37
  7. udata/api/errors.py +5 -4
  8. udata/api/fields.py +23 -21
  9. udata/api/oauth2.py +55 -74
  10. udata/api/parsers.py +15 -15
  11. udata/api/signals.py +1 -1
  12. udata/api_fields.py +137 -89
  13. udata/app.py +56 -54
  14. udata/assets.py +5 -5
  15. udata/auth/__init__.py +37 -26
  16. udata/auth/forms.py +23 -15
  17. udata/auth/helpers.py +1 -1
  18. udata/auth/mails.py +3 -3
  19. udata/auth/password_validation.py +19 -15
  20. udata/auth/views.py +94 -68
  21. udata/commands/__init__.py +71 -69
  22. udata/commands/cache.py +7 -7
  23. udata/commands/db.py +201 -140
  24. udata/commands/dcat.py +36 -30
  25. udata/commands/fixtures.py +100 -84
  26. udata/commands/images.py +21 -20
  27. udata/commands/info.py +17 -20
  28. udata/commands/init.py +10 -10
  29. udata/commands/purge.py +12 -13
  30. udata/commands/serve.py +41 -29
  31. udata/commands/static.py +16 -18
  32. udata/commands/test.py +20 -20
  33. udata/commands/tests/fixtures.py +26 -24
  34. udata/commands/worker.py +31 -33
  35. udata/core/__init__.py +12 -12
  36. udata/core/activity/__init__.py +0 -1
  37. udata/core/activity/api.py +59 -49
  38. udata/core/activity/models.py +28 -26
  39. udata/core/activity/signals.py +1 -1
  40. udata/core/activity/tasks.py +16 -10
  41. udata/core/badges/api.py +6 -6
  42. udata/core/badges/commands.py +14 -13
  43. udata/core/badges/fields.py +8 -5
  44. udata/core/badges/forms.py +7 -4
  45. udata/core/badges/models.py +16 -31
  46. udata/core/badges/permissions.py +1 -3
  47. udata/core/badges/signals.py +2 -2
  48. udata/core/badges/tasks.py +3 -2
  49. udata/core/badges/tests/test_commands.py +10 -10
  50. udata/core/badges/tests/test_model.py +24 -31
  51. udata/core/contact_point/api.py +19 -18
  52. udata/core/contact_point/api_fields.py +21 -14
  53. udata/core/contact_point/factories.py +2 -2
  54. udata/core/contact_point/forms.py +7 -6
  55. udata/core/contact_point/models.py +3 -5
  56. udata/core/dataservices/api.py +26 -21
  57. udata/core/dataservices/factories.py +13 -11
  58. udata/core/dataservices/models.py +35 -40
  59. udata/core/dataservices/permissions.py +4 -4
  60. udata/core/dataservices/rdf.py +40 -17
  61. udata/core/dataservices/tasks.py +4 -3
  62. udata/core/dataset/actions.py +10 -10
  63. udata/core/dataset/activities.py +21 -23
  64. udata/core/dataset/api.py +321 -298
  65. udata/core/dataset/api_fields.py +443 -271
  66. udata/core/dataset/apiv2.py +305 -229
  67. udata/core/dataset/commands.py +38 -36
  68. udata/core/dataset/constants.py +61 -54
  69. udata/core/dataset/csv.py +70 -74
  70. udata/core/dataset/events.py +39 -32
  71. udata/core/dataset/exceptions.py +8 -4
  72. udata/core/dataset/factories.py +57 -65
  73. udata/core/dataset/forms.py +87 -63
  74. udata/core/dataset/models.py +336 -280
  75. udata/core/dataset/permissions.py +9 -6
  76. udata/core/dataset/preview.py +15 -17
  77. udata/core/dataset/rdf.py +156 -122
  78. udata/core/dataset/search.py +92 -77
  79. udata/core/dataset/signals.py +1 -1
  80. udata/core/dataset/tasks.py +63 -54
  81. udata/core/discussions/actions.py +5 -5
  82. udata/core/discussions/api.py +124 -120
  83. udata/core/discussions/factories.py +2 -2
  84. udata/core/discussions/forms.py +9 -7
  85. udata/core/discussions/metrics.py +1 -3
  86. udata/core/discussions/models.py +25 -24
  87. udata/core/discussions/notifications.py +18 -14
  88. udata/core/discussions/permissions.py +3 -3
  89. udata/core/discussions/signals.py +4 -4
  90. udata/core/discussions/tasks.py +24 -28
  91. udata/core/followers/api.py +32 -33
  92. udata/core/followers/models.py +9 -9
  93. udata/core/followers/signals.py +3 -3
  94. udata/core/jobs/actions.py +7 -7
  95. udata/core/jobs/api.py +99 -92
  96. udata/core/jobs/commands.py +48 -49
  97. udata/core/jobs/forms.py +11 -11
  98. udata/core/jobs/models.py +6 -6
  99. udata/core/metrics/__init__.py +2 -2
  100. udata/core/metrics/commands.py +34 -30
  101. udata/core/metrics/models.py +2 -4
  102. udata/core/metrics/signals.py +1 -1
  103. udata/core/metrics/tasks.py +3 -3
  104. udata/core/organization/activities.py +12 -15
  105. udata/core/organization/api.py +167 -174
  106. udata/core/organization/api_fields.py +183 -124
  107. udata/core/organization/apiv2.py +32 -32
  108. udata/core/organization/commands.py +20 -22
  109. udata/core/organization/constants.py +11 -11
  110. udata/core/organization/csv.py +17 -15
  111. udata/core/organization/factories.py +8 -11
  112. udata/core/organization/forms.py +32 -26
  113. udata/core/organization/metrics.py +2 -1
  114. udata/core/organization/models.py +87 -67
  115. udata/core/organization/notifications.py +18 -14
  116. udata/core/organization/permissions.py +10 -11
  117. udata/core/organization/rdf.py +14 -14
  118. udata/core/organization/search.py +30 -28
  119. udata/core/organization/signals.py +7 -7
  120. udata/core/organization/tasks.py +42 -61
  121. udata/core/owned.py +38 -27
  122. udata/core/post/api.py +82 -81
  123. udata/core/post/constants.py +8 -5
  124. udata/core/post/factories.py +4 -4
  125. udata/core/post/forms.py +13 -14
  126. udata/core/post/models.py +20 -22
  127. udata/core/post/tests/test_api.py +30 -32
  128. udata/core/reports/api.py +8 -7
  129. udata/core/reports/constants.py +1 -3
  130. udata/core/reports/models.py +10 -10
  131. udata/core/reuse/activities.py +15 -19
  132. udata/core/reuse/api.py +123 -126
  133. udata/core/reuse/api_fields.py +120 -85
  134. udata/core/reuse/apiv2.py +11 -10
  135. udata/core/reuse/constants.py +23 -23
  136. udata/core/reuse/csv.py +18 -18
  137. udata/core/reuse/factories.py +5 -9
  138. udata/core/reuse/forms.py +24 -21
  139. udata/core/reuse/models.py +55 -51
  140. udata/core/reuse/permissions.py +2 -2
  141. udata/core/reuse/search.py +49 -46
  142. udata/core/reuse/signals.py +1 -1
  143. udata/core/reuse/tasks.py +4 -5
  144. udata/core/site/api.py +47 -50
  145. udata/core/site/factories.py +2 -2
  146. udata/core/site/forms.py +4 -5
  147. udata/core/site/models.py +94 -63
  148. udata/core/site/rdf.py +14 -14
  149. udata/core/spam/api.py +16 -9
  150. udata/core/spam/constants.py +4 -4
  151. udata/core/spam/fields.py +13 -7
  152. udata/core/spam/models.py +27 -20
  153. udata/core/spam/signals.py +1 -1
  154. udata/core/spam/tests/test_spam.py +6 -5
  155. udata/core/spatial/api.py +72 -80
  156. udata/core/spatial/api_fields.py +73 -58
  157. udata/core/spatial/commands.py +67 -64
  158. udata/core/spatial/constants.py +3 -3
  159. udata/core/spatial/factories.py +37 -54
  160. udata/core/spatial/forms.py +27 -26
  161. udata/core/spatial/geoids.py +17 -17
  162. udata/core/spatial/models.py +43 -47
  163. udata/core/spatial/tasks.py +2 -1
  164. udata/core/spatial/tests/test_api.py +115 -130
  165. udata/core/spatial/tests/test_fields.py +74 -77
  166. udata/core/spatial/tests/test_geoid.py +22 -22
  167. udata/core/spatial/tests/test_models.py +5 -7
  168. udata/core/spatial/translations.py +16 -16
  169. udata/core/storages/__init__.py +16 -18
  170. udata/core/storages/api.py +66 -64
  171. udata/core/storages/tasks.py +7 -7
  172. udata/core/storages/utils.py +15 -15
  173. udata/core/storages/views.py +5 -6
  174. udata/core/tags/api.py +17 -14
  175. udata/core/tags/csv.py +4 -4
  176. udata/core/tags/models.py +8 -5
  177. udata/core/tags/tasks.py +11 -13
  178. udata/core/tags/views.py +4 -4
  179. udata/core/topic/api.py +84 -73
  180. udata/core/topic/apiv2.py +157 -127
  181. udata/core/topic/factories.py +3 -4
  182. udata/core/topic/forms.py +12 -14
  183. udata/core/topic/models.py +14 -19
  184. udata/core/topic/parsers.py +26 -26
  185. udata/core/user/activities.py +30 -29
  186. udata/core/user/api.py +151 -152
  187. udata/core/user/api_fields.py +132 -100
  188. udata/core/user/apiv2.py +7 -7
  189. udata/core/user/commands.py +38 -38
  190. udata/core/user/factories.py +8 -9
  191. udata/core/user/forms.py +14 -11
  192. udata/core/user/metrics.py +2 -2
  193. udata/core/user/models.py +68 -69
  194. udata/core/user/permissions.py +4 -5
  195. udata/core/user/rdf.py +7 -8
  196. udata/core/user/tasks.py +2 -2
  197. udata/core/user/tests/test_user_model.py +24 -16
  198. udata/db/tasks.py +2 -1
  199. udata/entrypoints.py +35 -31
  200. udata/errors.py +2 -1
  201. udata/event/values.py +6 -6
  202. udata/factories.py +2 -2
  203. udata/features/identicon/api.py +5 -6
  204. udata/features/identicon/backends.py +48 -55
  205. udata/features/identicon/tests/test_backends.py +4 -5
  206. udata/features/notifications/__init__.py +0 -1
  207. udata/features/notifications/actions.py +9 -9
  208. udata/features/notifications/api.py +17 -13
  209. udata/features/territories/__init__.py +12 -10
  210. udata/features/territories/api.py +14 -15
  211. udata/features/territories/models.py +23 -28
  212. udata/features/transfer/actions.py +8 -11
  213. udata/features/transfer/api.py +84 -77
  214. udata/features/transfer/factories.py +2 -1
  215. udata/features/transfer/models.py +11 -12
  216. udata/features/transfer/notifications.py +19 -15
  217. udata/features/transfer/permissions.py +5 -5
  218. udata/forms/__init__.py +5 -2
  219. udata/forms/fields.py +164 -172
  220. udata/forms/validators.py +19 -22
  221. udata/forms/widgets.py +9 -13
  222. udata/frontend/__init__.py +31 -26
  223. udata/frontend/csv.py +68 -58
  224. udata/frontend/markdown.py +40 -44
  225. udata/harvest/actions.py +89 -77
  226. udata/harvest/api.py +294 -238
  227. udata/harvest/backends/__init__.py +4 -4
  228. udata/harvest/backends/base.py +128 -111
  229. udata/harvest/backends/dcat.py +80 -66
  230. udata/harvest/commands.py +56 -60
  231. udata/harvest/csv.py +8 -8
  232. udata/harvest/exceptions.py +6 -3
  233. udata/harvest/filters.py +24 -23
  234. udata/harvest/forms.py +27 -28
  235. udata/harvest/models.py +88 -80
  236. udata/harvest/notifications.py +15 -10
  237. udata/harvest/signals.py +13 -13
  238. udata/harvest/tasks.py +11 -10
  239. udata/harvest/tests/factories.py +23 -24
  240. udata/harvest/tests/test_actions.py +136 -166
  241. udata/harvest/tests/test_api.py +220 -214
  242. udata/harvest/tests/test_base_backend.py +117 -112
  243. udata/harvest/tests/test_dcat_backend.py +380 -308
  244. udata/harvest/tests/test_filters.py +33 -22
  245. udata/harvest/tests/test_models.py +11 -14
  246. udata/harvest/tests/test_notifications.py +6 -7
  247. udata/harvest/tests/test_tasks.py +7 -6
  248. udata/i18n.py +237 -78
  249. udata/linkchecker/backends.py +5 -11
  250. udata/linkchecker/checker.py +23 -22
  251. udata/linkchecker/commands.py +4 -6
  252. udata/linkchecker/models.py +6 -6
  253. udata/linkchecker/tasks.py +18 -20
  254. udata/mail.py +21 -21
  255. udata/migrations/2020-07-24-remove-s-from-scope-oauth.py +9 -8
  256. udata/migrations/2020-08-24-add-fs-filename.py +9 -8
  257. udata/migrations/2020-09-28-update-reuses-datasets-metrics.py +5 -4
  258. udata/migrations/2020-10-16-migrate-ods-resources.py +9 -10
  259. udata/migrations/2021-04-08-update-schema-with-new-structure.py +8 -7
  260. udata/migrations/2021-05-27-fix-default-schema-name.py +7 -6
  261. udata/migrations/2021-07-05-remove-unused-badges.py +17 -15
  262. udata/migrations/2021-07-07-update-schema-for-community-resources.py +7 -6
  263. udata/migrations/2021-08-17-follow-integrity.py +5 -4
  264. udata/migrations/2021-08-17-harvest-integrity.py +13 -12
  265. udata/migrations/2021-08-17-oauth2client-integrity.py +5 -4
  266. udata/migrations/2021-08-17-transfer-integrity.py +5 -4
  267. udata/migrations/2021-08-17-users-integrity.py +9 -8
  268. udata/migrations/2021-12-14-reuse-topics.py +7 -6
  269. udata/migrations/2022-04-21-improve-extension-detection.py +8 -7
  270. udata/migrations/2022-09-22-clean-inactive-harvest-datasets.py +16 -14
  271. udata/migrations/2022-10-10-add-fs_uniquifier-to-user-model.py +6 -6
  272. udata/migrations/2022-10-10-migrate-harvest-extras.py +36 -26
  273. udata/migrations/2023-02-08-rename-internal-dates.py +46 -28
  274. udata/migrations/2024-01-29-fix-reuse-and-dataset-with-private-None.py +10 -8
  275. udata/migrations/2024-03-22-migrate-activity-kwargs-to-extras.py +6 -4
  276. udata/migrations/2024-06-11-fix-reuse-datasets-references.py +7 -6
  277. udata/migrations/__init__.py +123 -105
  278. udata/models/__init__.py +4 -4
  279. udata/mongo/__init__.py +13 -11
  280. udata/mongo/badges_field.py +3 -2
  281. udata/mongo/datetime_fields.py +13 -12
  282. udata/mongo/document.py +17 -16
  283. udata/mongo/engine.py +15 -16
  284. udata/mongo/errors.py +2 -1
  285. udata/mongo/extras_fields.py +30 -20
  286. udata/mongo/queryset.py +12 -12
  287. udata/mongo/slug_fields.py +38 -28
  288. udata/mongo/taglist_field.py +1 -2
  289. udata/mongo/url_field.py +5 -5
  290. udata/mongo/uuid_fields.py +4 -3
  291. udata/notifications/__init__.py +1 -1
  292. udata/notifications/mattermost.py +10 -9
  293. udata/rdf.py +167 -188
  294. udata/routing.py +40 -45
  295. udata/search/__init__.py +18 -19
  296. udata/search/adapter.py +17 -16
  297. udata/search/commands.py +44 -51
  298. udata/search/fields.py +13 -20
  299. udata/search/query.py +23 -18
  300. udata/search/result.py +9 -10
  301. udata/sentry.py +21 -19
  302. udata/settings.py +262 -198
  303. udata/sitemap.py +8 -6
  304. udata/static/chunks/{11.e9b9ca1f3e03d4020377.js → 11.52e531c19f8de80c00cf.js} +3 -3
  305. udata/static/chunks/{11.e9b9ca1f3e03d4020377.js.map → 11.52e531c19f8de80c00cf.js.map} +1 -1
  306. udata/static/chunks/{13.038c0d9aa0dfa0181c4b.js → 13.c3343a7f1070061c0e10.js} +2 -2
  307. udata/static/chunks/{13.038c0d9aa0dfa0181c4b.js.map → 13.c3343a7f1070061c0e10.js.map} +1 -1
  308. udata/static/chunks/{16.0baa2b64a74a2dcde25c.js → 16.8fa42440ad75ca172e6d.js} +2 -2
  309. udata/static/chunks/{16.0baa2b64a74a2dcde25c.js.map → 16.8fa42440ad75ca172e6d.js.map} +1 -1
  310. udata/static/chunks/{19.350a9f150b074b4ecefa.js → 19.9c6c8412729cd6d59cfa.js} +3 -3
  311. udata/static/chunks/{19.350a9f150b074b4ecefa.js.map → 19.9c6c8412729cd6d59cfa.js.map} +1 -1
  312. udata/static/chunks/{5.6ebbce2b9b3e696d3da5.js → 5.71d15c2e4f21feee2a9a.js} +3 -3
  313. udata/static/chunks/{5.6ebbce2b9b3e696d3da5.js.map → 5.71d15c2e4f21feee2a9a.js.map} +1 -1
  314. udata/static/chunks/{6.d8a5f7b017bcbd083641.js → 6.9139dc098b8ea640b890.js} +3 -3
  315. udata/static/chunks/{6.d8a5f7b017bcbd083641.js.map → 6.9139dc098b8ea640b890.js.map} +1 -1
  316. udata/static/common.js +1 -1
  317. udata/static/common.js.map +1 -1
  318. udata/storage/s3.py +20 -13
  319. udata/tags.py +4 -5
  320. udata/tasks.py +43 -42
  321. udata/tests/__init__.py +9 -6
  322. udata/tests/api/__init__.py +5 -6
  323. udata/tests/api/test_auth_api.py +395 -321
  324. udata/tests/api/test_base_api.py +31 -33
  325. udata/tests/api/test_contact_points.py +7 -9
  326. udata/tests/api/test_dataservices_api.py +211 -158
  327. udata/tests/api/test_datasets_api.py +823 -812
  328. udata/tests/api/test_follow_api.py +13 -15
  329. udata/tests/api/test_me_api.py +95 -112
  330. udata/tests/api/test_organizations_api.py +301 -339
  331. udata/tests/api/test_reports_api.py +35 -25
  332. udata/tests/api/test_reuses_api.py +134 -139
  333. udata/tests/api/test_swagger.py +5 -5
  334. udata/tests/api/test_tags_api.py +18 -25
  335. udata/tests/api/test_topics_api.py +94 -94
  336. udata/tests/api/test_transfer_api.py +53 -48
  337. udata/tests/api/test_user_api.py +128 -141
  338. udata/tests/apiv2/test_datasets.py +290 -198
  339. udata/tests/apiv2/test_me_api.py +10 -11
  340. udata/tests/apiv2/test_organizations.py +56 -74
  341. udata/tests/apiv2/test_swagger.py +5 -5
  342. udata/tests/apiv2/test_topics.py +69 -87
  343. udata/tests/cli/test_cli_base.py +8 -8
  344. udata/tests/cli/test_db_cli.py +21 -19
  345. udata/tests/dataservice/test_dataservice_tasks.py +8 -12
  346. udata/tests/dataset/test_csv_adapter.py +44 -35
  347. udata/tests/dataset/test_dataset_actions.py +2 -3
  348. udata/tests/dataset/test_dataset_commands.py +7 -8
  349. udata/tests/dataset/test_dataset_events.py +36 -29
  350. udata/tests/dataset/test_dataset_model.py +224 -217
  351. udata/tests/dataset/test_dataset_rdf.py +142 -131
  352. udata/tests/dataset/test_dataset_tasks.py +15 -15
  353. udata/tests/dataset/test_resource_preview.py +10 -13
  354. udata/tests/features/territories/__init__.py +9 -13
  355. udata/tests/features/territories/test_territories_api.py +71 -91
  356. udata/tests/forms/test_basic_fields.py +7 -7
  357. udata/tests/forms/test_current_user_field.py +39 -66
  358. udata/tests/forms/test_daterange_field.py +31 -39
  359. udata/tests/forms/test_dict_field.py +28 -26
  360. udata/tests/forms/test_extras_fields.py +102 -76
  361. udata/tests/forms/test_form_field.py +8 -8
  362. udata/tests/forms/test_image_field.py +33 -26
  363. udata/tests/forms/test_model_field.py +134 -123
  364. udata/tests/forms/test_model_list_field.py +7 -7
  365. udata/tests/forms/test_nested_model_list_field.py +117 -79
  366. udata/tests/forms/test_publish_as_field.py +36 -65
  367. udata/tests/forms/test_reference_field.py +34 -53
  368. udata/tests/forms/test_user_forms.py +23 -21
  369. udata/tests/forms/test_uuid_field.py +6 -10
  370. udata/tests/frontend/__init__.py +9 -6
  371. udata/tests/frontend/test_auth.py +7 -6
  372. udata/tests/frontend/test_csv.py +81 -96
  373. udata/tests/frontend/test_hooks.py +43 -43
  374. udata/tests/frontend/test_markdown.py +211 -191
  375. udata/tests/helpers.py +32 -37
  376. udata/tests/models.py +2 -2
  377. udata/tests/organization/test_csv_adapter.py +21 -16
  378. udata/tests/organization/test_notifications.py +11 -18
  379. udata/tests/organization/test_organization_model.py +13 -13
  380. udata/tests/organization/test_organization_rdf.py +29 -22
  381. udata/tests/organization/test_organization_tasks.py +16 -17
  382. udata/tests/plugin.py +76 -73
  383. udata/tests/reuse/test_reuse_model.py +21 -21
  384. udata/tests/reuse/test_reuse_task.py +11 -13
  385. udata/tests/search/__init__.py +11 -12
  386. udata/tests/search/test_adapter.py +60 -70
  387. udata/tests/search/test_query.py +16 -16
  388. udata/tests/search/test_results.py +10 -7
  389. udata/tests/site/test_site_api.py +11 -16
  390. udata/tests/site/test_site_metrics.py +20 -30
  391. udata/tests/site/test_site_model.py +4 -5
  392. udata/tests/site/test_site_rdf.py +94 -78
  393. udata/tests/test_activity.py +17 -17
  394. udata/tests/test_discussions.py +292 -299
  395. udata/tests/test_i18n.py +37 -40
  396. udata/tests/test_linkchecker.py +91 -85
  397. udata/tests/test_mail.py +13 -17
  398. udata/tests/test_migrations.py +219 -180
  399. udata/tests/test_model.py +164 -157
  400. udata/tests/test_notifications.py +17 -17
  401. udata/tests/test_owned.py +14 -14
  402. udata/tests/test_rdf.py +25 -23
  403. udata/tests/test_routing.py +89 -93
  404. udata/tests/test_storages.py +137 -128
  405. udata/tests/test_tags.py +44 -46
  406. udata/tests/test_topics.py +7 -7
  407. udata/tests/test_transfer.py +42 -49
  408. udata/tests/test_uris.py +160 -161
  409. udata/tests/test_utils.py +79 -71
  410. udata/tests/user/test_user_rdf.py +5 -9
  411. udata/tests/workers/test_jobs_commands.py +57 -58
  412. udata/tests/workers/test_tasks_routing.py +23 -29
  413. udata/tests/workers/test_workers_api.py +125 -131
  414. udata/tests/workers/test_workers_helpers.py +6 -6
  415. udata/tracking.py +4 -6
  416. udata/uris.py +45 -46
  417. udata/utils.py +68 -66
  418. udata/wsgi.py +1 -1
  419. {udata-9.1.2.dev30355.dist-info → udata-9.1.2.dev30382.dist-info}/METADATA +3 -2
  420. udata-9.1.2.dev30382.dist-info/RECORD +704 -0
  421. udata-9.1.2.dev30355.dist-info/RECORD +0 -704
  422. {udata-9.1.2.dev30355.dist-info → udata-9.1.2.dev30382.dist-info}/LICENSE +0 -0
  423. {udata-9.1.2.dev30355.dist-info → udata-9.1.2.dev30382.dist-info}/WHEEL +0 -0
  424. {udata-9.1.2.dev30355.dist-info → udata-9.1.2.dev30382.dist-info}/entry_points.txt +0 -0
  425. {udata-9.1.2.dev30355.dist-info → udata-9.1.2.dev30382.dist-info}/top_level.txt +0 -0
@@ -5,31 +5,44 @@ from uuid import uuid4
5
5
 
6
6
  import pytest
7
7
  import pytz
8
- from flask import url_for
9
8
  import requests_mock
9
+ from flask import url_for
10
10
 
11
11
  from udata.api import fields
12
12
  from udata.app import cache
13
13
  from udata.core import storages
14
14
  from udata.core.badges.factories import badge_factory
15
- from udata.core.dataset.api_fields import (dataset_harvest_fields,
16
- resource_harvest_fields)
17
- from udata.core.dataset.factories import (CommunityResourceFactory,
18
- DatasetFactory, LicenseFactory,
19
- ResourceFactory, ResourceSchemaMockData,
20
- HiddenDatasetFactory)
21
- from udata.core.dataset.models import (HarvestDatasetMetadata,
22
- HarvestResourceMetadata, ResourceMixin)
15
+ from udata.core.dataset.api_fields import (
16
+ dataset_harvest_fields,
17
+ resource_harvest_fields,
18
+ )
19
+ from udata.core.dataset.constants import (
20
+ LEGACY_FREQUENCIES,
21
+ RESOURCE_TYPES,
22
+ UPDATE_FREQUENCIES,
23
+ )
24
+ from udata.core.dataset.factories import (
25
+ CommunityResourceFactory,
26
+ DatasetFactory,
27
+ HiddenDatasetFactory,
28
+ LicenseFactory,
29
+ ResourceFactory,
30
+ ResourceSchemaMockData,
31
+ )
32
+ from udata.core.dataset.models import (
33
+ HarvestDatasetMetadata,
34
+ HarvestResourceMetadata,
35
+ ResourceMixin,
36
+ )
23
37
  from udata.core.organization.factories import OrganizationFactory
24
38
  from udata.core.spatial.factories import SpatialCoverageFactory
25
39
  from udata.core.topic.factories import TopicFactory
26
40
  from udata.core.user.factories import AdminFactory, UserFactory
27
41
  from udata.i18n import gettext as _
28
42
  from udata.models import CommunityResource, Dataset, Follow, Member, db
29
- from udata.core.dataset.constants import LEGACY_FREQUENCIES, RESOURCE_TYPES, UPDATE_FREQUENCIES
30
43
  from udata.tags import MAX_TAG_LENGTH, MIN_TAG_LENGTH
31
44
  from udata.tests.features.territories import create_geozones_fixtures
32
- from udata.tests.helpers import assert200, assert404, assert204
45
+ from udata.tests.helpers import assert200, assert204, assert404
33
46
  from udata.utils import faker, unique_string
34
47
 
35
48
  from . import APITestCase
@@ -38,9 +51,11 @@ SAMPLE_GEOM = {
38
51
  "type": "MultiPolygon",
39
52
  "coordinates": [
40
53
  [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]], # noqa
41
- [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], # noqa
42
- [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]
43
- ]
54
+ [
55
+ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], # noqa
56
+ [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]],
57
+ ],
58
+ ],
44
59
  }
45
60
 
46
61
 
@@ -48,247 +63,268 @@ class DatasetAPITest(APITestCase):
48
63
  modules = []
49
64
 
50
65
  def test_dataset_api_list(self):
51
- '''It should fetch a dataset list from the API'''
66
+ """It should fetch a dataset list from the API"""
52
67
  datasets = [DatasetFactory() for i in range(2)]
53
68
 
54
- response = self.get(url_for('api.datasets'))
69
+ response = self.get(url_for("api.datasets"))
55
70
  self.assert200(response)
56
- self.assertEqual(len(response.json['data']), len(datasets))
57
- self.assertTrue('quality' in response.json['data'][0])
71
+ self.assertEqual(len(response.json["data"]), len(datasets))
72
+ self.assertTrue("quality" in response.json["data"][0])
58
73
 
59
74
  def test_dataset_api_full_text_search(self):
60
- '''Should proceed to full text search on datasets'''
75
+ """Should proceed to full text search on datasets"""
61
76
  [DatasetFactory() for i in range(2)]
62
77
  DatasetFactory(title="some spécial integer")
63
78
  DatasetFactory(title="some spécial float")
64
79
  dataset = DatasetFactory(title="some spécial chars")
65
80
 
66
81
  # with accent
67
- response = self.get(url_for('api.datasets', q='some spécial chars'))
82
+ response = self.get(url_for("api.datasets", q="some spécial chars"))
68
83
  self.assert200(response)
69
- self.assertEqual(len(response.json['data']), 1)
70
- self.assertEqual(response.json['data'][0]['id'], str(dataset.id))
84
+ self.assertEqual(len(response.json["data"]), 1)
85
+ self.assertEqual(response.json["data"][0]["id"], str(dataset.id))
71
86
 
72
87
  # with accent
73
- response = self.get(url_for('api.datasets', q='spécial'))
88
+ response = self.get(url_for("api.datasets", q="spécial"))
74
89
  self.assert200(response)
75
- self.assertEqual(len(response.json['data']), 3)
90
+ self.assertEqual(len(response.json["data"]), 3)
76
91
 
77
92
  # without accent
78
- response = self.get(url_for('api.datasets', q='special'))
93
+ response = self.get(url_for("api.datasets", q="special"))
79
94
  self.assert200(response)
80
- self.assertEqual(len(response.json['data']), 3)
95
+ self.assertEqual(len(response.json["data"]), 3)
81
96
 
82
97
  def test_dataset_api_sorting(self):
83
- '''Should sort datasets results from the API'''
98
+ """Should sort datasets results from the API"""
84
99
  self.login()
85
100
  [DatasetFactory() for i in range(2)]
86
101
 
87
102
  to_follow = DatasetFactory(title="dataset to follow")
88
103
 
89
- response = self.post(url_for('api.dataset_followers', id=to_follow.id))
104
+ response = self.post(url_for("api.dataset_followers", id=to_follow.id))
90
105
  self.assert201(response)
91
106
 
92
107
  to_follow.count_followers()
93
- self.assertEqual(to_follow.get_metrics()['followers'], 1)
108
+ self.assertEqual(to_follow.get_metrics()["followers"], 1)
94
109
 
95
110
  # without accent
96
- response = self.get(url_for('api.datasets', sort='-followers'))
111
+ response = self.get(url_for("api.datasets", sort="-followers"))
97
112
  self.assert200(response)
98
- self.assertEqual(len(response.json['data']), 3)
99
- self.assertEqual(response.json['data'][0]['id'], str(to_follow.id))
113
+ self.assertEqual(len(response.json["data"]), 3)
114
+ self.assertEqual(response.json["data"][0]["id"], str(to_follow.id))
100
115
 
101
116
  def test_dataset_api_sorting_created(self):
102
117
  self.login()
103
118
  first = DatasetFactory(title="first created dataset")
104
119
  second = DatasetFactory(title="second created dataset")
105
- response = self.get(url_for('api.datasets', sort='created'))
120
+ response = self.get(url_for("api.datasets", sort="created"))
106
121
  self.assert200(response)
107
- self.assertEqual(response.json['data'][0]['id'], str(first.id))
122
+ self.assertEqual(response.json["data"][0]["id"], str(first.id))
108
123
 
109
- response = self.get(url_for('api.datasets', sort='-created'))
124
+ response = self.get(url_for("api.datasets", sort="-created"))
110
125
  self.assert200(response)
111
- self.assertEqual(response.json['data'][0]['id'], str(second.id))
126
+ self.assertEqual(response.json["data"][0]["id"], str(second.id))
112
127
 
113
128
  second.title = "second updated dataset"
114
129
  second.save()
115
- response = self.get(url_for('api.datasets', sort='-last_update'))
130
+ response = self.get(url_for("api.datasets", sort="-last_update"))
116
131
  self.assert200(response)
117
- self.assertEqual(response.json['data'][0]['id'], str(second.id))
132
+ self.assertEqual(response.json["data"][0]["id"], str(second.id))
118
133
 
119
- response = self.get(url_for('api.datasets', sort='last_update'))
134
+ response = self.get(url_for("api.datasets", sort="last_update"))
120
135
  self.assert200(response)
121
- self.assertEqual(response.json['data'][0]['id'], str(first.id))
136
+ self.assertEqual(response.json["data"][0]["id"], str(first.id))
122
137
 
123
138
  def test_dataset_api_default_sorting(self):
124
139
  # Default sort should be -created
125
140
  self.login()
126
141
  [DatasetFactory(title="some created dataset") for i in range(10)]
127
142
  last = DatasetFactory(title="last created dataset")
128
- response = self.get(url_for('api.datasets'))
143
+ response = self.get(url_for("api.datasets"))
129
144
  self.assert200(response)
130
- self.assertEqual(response.json['data'][0]['id'], str(last.id))
145
+ self.assertEqual(response.json["data"][0]["id"], str(last.id))
131
146
 
132
147
  def test_dataset_api_list_with_filters(self):
133
- '''Should filters datasets results based on query filters'''
148
+ """Should filters datasets results based on query filters"""
134
149
  owner = UserFactory()
135
150
  org = OrganizationFactory()
136
151
 
137
152
  [DatasetFactory() for i in range(2)]
138
153
 
139
- tag_dataset = DatasetFactory(tags=['my-tag', 'other'])
140
- license_dataset = DatasetFactory(license=LicenseFactory(id='cc-by'))
141
- format_dataset = DatasetFactory(resources=[ResourceFactory(format='my-format')])
154
+ tag_dataset = DatasetFactory(tags=["my-tag", "other"])
155
+ license_dataset = DatasetFactory(license=LicenseFactory(id="cc-by"))
156
+ format_dataset = DatasetFactory(resources=[ResourceFactory(format="my-format")])
142
157
  featured_dataset = DatasetFactory(featured=True)
143
158
  topic_dataset = DatasetFactory()
144
159
  topic = TopicFactory(datasets=[topic_dataset])
145
160
 
146
161
  paca, _, _ = create_geozones_fixtures()
147
162
  geozone_dataset = DatasetFactory(spatial=SpatialCoverageFactory(zones=[paca.id]))
148
- granularity_dataset = DatasetFactory(
149
- spatial=SpatialCoverageFactory(granularity='country')
150
- )
163
+ granularity_dataset = DatasetFactory(spatial=SpatialCoverageFactory(granularity="country"))
151
164
 
152
- temporal_coverage = db.DateRange(start='2022-05-03', end='2022-05-04')
165
+ temporal_coverage = db.DateRange(start="2022-05-03", end="2022-05-04")
153
166
  temporal_coverage_dataset = DatasetFactory(temporal_coverage=temporal_coverage)
154
167
 
155
168
  owner_dataset = DatasetFactory(owner=owner)
156
169
  org_dataset = DatasetFactory(organization=org)
157
170
 
158
- schema_dataset = DatasetFactory(resources=[
159
- ResourceFactory(schema={'name': 'my-schema', 'url': 'https://example.org', 'version': '1.0.0'})
160
- ])
161
- schema_version2_dataset = DatasetFactory(resources=[
162
- ResourceFactory(schema={'name': 'other-schema', 'url': 'https://example.org', 'version': '2.0.0'})
163
- ])
171
+ schema_dataset = DatasetFactory(
172
+ resources=[
173
+ ResourceFactory(
174
+ schema={"name": "my-schema", "url": "https://example.org", "version": "1.0.0"}
175
+ )
176
+ ]
177
+ )
178
+ schema_version2_dataset = DatasetFactory(
179
+ resources=[
180
+ ResourceFactory(
181
+ schema={
182
+ "name": "other-schema",
183
+ "url": "https://example.org",
184
+ "version": "2.0.0",
185
+ }
186
+ )
187
+ ]
188
+ )
164
189
 
165
190
  # filter on tag
166
- response = self.get(url_for('api.datasets', tag='my-tag'))
191
+ response = self.get(url_for("api.datasets", tag="my-tag"))
167
192
  self.assert200(response)
168
- self.assertEqual(len(response.json['data']), 1)
169
- self.assertEqual(response.json['data'][0]['id'], str(tag_dataset.id))
193
+ self.assertEqual(len(response.json["data"]), 1)
194
+ self.assertEqual(response.json["data"][0]["id"], str(tag_dataset.id))
170
195
 
171
196
  # filter on format
172
- response = self.get(url_for('api.datasets', format='my-format'))
197
+ response = self.get(url_for("api.datasets", format="my-format"))
173
198
  self.assert200(response)
174
- self.assertEqual(len(response.json['data']), 1)
175
- self.assertEqual(response.json['data'][0]['id'], str(format_dataset.id))
199
+ self.assertEqual(len(response.json["data"]), 1)
200
+ self.assertEqual(response.json["data"][0]["id"], str(format_dataset.id))
176
201
 
177
202
  # filter on featured
178
- response = self.get(url_for('api.datasets', featured='true'))
203
+ response = self.get(url_for("api.datasets", featured="true"))
179
204
  self.assert200(response)
180
- self.assertEqual(len(response.json['data']), 1)
181
- self.assertEqual(response.json['data'][0]['id'], str(featured_dataset.id))
205
+ self.assertEqual(len(response.json["data"]), 1)
206
+ self.assertEqual(response.json["data"][0]["id"], str(featured_dataset.id))
182
207
 
183
208
  # filter on license
184
- response = self.get(url_for('api.datasets', license='cc-by'))
209
+ response = self.get(url_for("api.datasets", license="cc-by"))
185
210
  self.assert200(response)
186
- self.assertEqual(len(response.json['data']), 1)
187
- self.assertEqual(response.json['data'][0]['id'], str(license_dataset.id))
211
+ self.assertEqual(len(response.json["data"]), 1)
212
+ self.assertEqual(response.json["data"][0]["id"], str(license_dataset.id))
188
213
 
189
214
  # filter on geozone
190
- response = self.get(url_for('api.datasets', geozone=paca.id))
215
+ response = self.get(url_for("api.datasets", geozone=paca.id))
191
216
  self.assert200(response)
192
- self.assertEqual(len(response.json['data']), 1)
193
- self.assertEqual(response.json['data'][0]['id'], str(geozone_dataset.id))
217
+ self.assertEqual(len(response.json["data"]), 1)
218
+ self.assertEqual(response.json["data"][0]["id"], str(geozone_dataset.id))
194
219
 
195
220
  # filter on granularity
196
- response = self.get(url_for('api.datasets', granularity='country'))
221
+ response = self.get(url_for("api.datasets", granularity="country"))
197
222
  self.assert200(response)
198
- self.assertEqual(len(response.json['data']), 1)
199
- self.assertEqual(response.json['data'][0]['id'], str(granularity_dataset.id))
223
+ self.assertEqual(len(response.json["data"]), 1)
224
+ self.assertEqual(response.json["data"][0]["id"], str(granularity_dataset.id))
200
225
 
201
226
  # filter on temporal_coverage
202
- response = self.get(url_for('api.datasets', temporal_coverage='2022-05-03-2022-05-04'))
227
+ response = self.get(url_for("api.datasets", temporal_coverage="2022-05-03-2022-05-04"))
203
228
  self.assert200(response)
204
- self.assertEqual(len(response.json['data']), 1)
205
- self.assertEqual(response.json['data'][0]['id'], str(temporal_coverage_dataset.id))
229
+ self.assertEqual(len(response.json["data"]), 1)
230
+ self.assertEqual(response.json["data"][0]["id"], str(temporal_coverage_dataset.id))
206
231
 
207
232
  # filter on owner
208
- response = self.get(url_for('api.datasets', owner=owner.id))
233
+ response = self.get(url_for("api.datasets", owner=owner.id))
209
234
  self.assert200(response)
210
- self.assertEqual(len(response.json['data']), 1)
211
- self.assertEqual(response.json['data'][0]['id'], str(owner_dataset.id))
235
+ self.assertEqual(len(response.json["data"]), 1)
236
+ self.assertEqual(response.json["data"][0]["id"], str(owner_dataset.id))
212
237
 
213
- response = self.get(url_for('api.datasets', owner='owner-id'))
238
+ response = self.get(url_for("api.datasets", owner="owner-id"))
214
239
  self.assert400(response)
215
240
 
216
241
  # filter on organization
217
- response = self.get(url_for('api.datasets', organization=org.id))
242
+ response = self.get(url_for("api.datasets", organization=org.id))
218
243
  self.assert200(response)
219
- self.assertEqual(len(response.json['data']), 1)
220
- self.assertEqual(response.json['data'][0]['id'], str(org_dataset.id))
244
+ self.assertEqual(len(response.json["data"]), 1)
245
+ self.assertEqual(response.json["data"][0]["id"], str(org_dataset.id))
221
246
 
222
- response = self.get(url_for('api.datasets', organization='org-id'))
247
+ response = self.get(url_for("api.datasets", organization="org-id"))
223
248
  self.assert400(response)
224
249
 
225
250
  # filter on schema
226
- response = self.get(url_for('api.datasets', schema='my-schema'))
251
+ response = self.get(url_for("api.datasets", schema="my-schema"))
227
252
  self.assert200(response)
228
- self.assertEqual(len(response.json['data']), 1)
229
- self.assertEqual(response.json['data'][0]['id'], str(schema_dataset.id))
253
+ self.assertEqual(len(response.json["data"]), 1)
254
+ self.assertEqual(response.json["data"][0]["id"], str(schema_dataset.id))
230
255
 
231
256
  # filter on schema version
232
- response = self.get(url_for('api.datasets', schema_version='2.0.0'))
257
+ response = self.get(url_for("api.datasets", schema_version="2.0.0"))
233
258
  self.assert200(response)
234
- self.assertEqual(len(response.json['data']), 1)
235
- self.assertEqual(response.json['data'][0]['id'], str(schema_version2_dataset.id))
259
+ self.assertEqual(len(response.json["data"]), 1)
260
+ self.assertEqual(response.json["data"][0]["id"], str(schema_version2_dataset.id))
236
261
 
237
262
  # filter on topic
238
- response = self.get(url_for('api.datasets', topic=topic.id))
263
+ response = self.get(url_for("api.datasets", topic=topic.id))
239
264
  self.assert200(response)
240
- self.assertEqual(len(response.json['data']), 1)
241
- self.assertEqual(response.json['data'][0]['id'], str(topic_dataset.id))
265
+ self.assertEqual(len(response.json["data"]), 1)
266
+ self.assertEqual(response.json["data"][0]["id"], str(topic_dataset.id))
242
267
 
243
268
  # filter on non existing topic
244
- response = self.get(url_for('api.datasets', topic=topic_dataset.id))
269
+ response = self.get(url_for("api.datasets", topic=topic_dataset.id))
245
270
  self.assert200(response)
246
- self.assertTrue(len(response.json['data']) > 0)
271
+ self.assertTrue(len(response.json["data"]) > 0)
247
272
 
248
273
  # filter on non id for topic
249
- response = self.get(url_for('api.datasets', topic='xxx'))
274
+ response = self.get(url_for("api.datasets", topic="xxx"))
250
275
  self.assert400(response)
251
276
 
252
277
  def test_dataset_api_get(self):
253
- '''It should fetch a dataset from the API'''
278
+ """It should fetch a dataset from the API"""
254
279
  resources = [ResourceFactory() for _ in range(2)]
255
280
  dataset = DatasetFactory(resources=resources)
256
- response = self.get(url_for('api.dataset', dataset=dataset))
281
+ response = self.get(url_for("api.dataset", dataset=dataset))
257
282
  self.assert200(response)
258
283
  data = json.loads(response.data)
259
- self.assertEqual(len(data['resources']), len(resources))
260
- self.assertTrue('quality' in data)
261
- self.assertTrue('internal' in data)
284
+ self.assertEqual(len(data["resources"]), len(resources))
285
+ self.assertTrue("quality" in data)
286
+ self.assertTrue("internal" in data)
262
287
  # Reloads dataset from mongoDB to get mongoDB's date's milliseconds reset.
263
288
  dataset.reload()
264
- self.assertEqual(data['internal']['created_at_internal'], fields.ISODateTime().format(dataset.created_at_internal))
265
- self.assertEqual(data['internal']['last_modified_internal'], fields.ISODateTime().format(dataset.last_modified_internal))
289
+ self.assertEqual(
290
+ data["internal"]["created_at_internal"],
291
+ fields.ISODateTime().format(dataset.created_at_internal),
292
+ )
293
+ self.assertEqual(
294
+ data["internal"]["last_modified_internal"],
295
+ fields.ISODateTime().format(dataset.last_modified_internal),
296
+ )
266
297
 
267
- self.assertTrue('internal' in data['resources'][0])
268
- self.assertEqual(data['resources'][0]['internal']['created_at_internal'], fields.ISODateTime().format(dataset.resources[0].created_at_internal))
269
- self.assertEqual(data['resources'][0]['internal']['last_modified_internal'], fields.ISODateTime().format(dataset.resources[0].last_modified_internal))
298
+ self.assertTrue("internal" in data["resources"][0])
299
+ self.assertEqual(
300
+ data["resources"][0]["internal"]["created_at_internal"],
301
+ fields.ISODateTime().format(dataset.resources[0].created_at_internal),
302
+ )
303
+ self.assertEqual(
304
+ data["resources"][0]["internal"]["last_modified_internal"],
305
+ fields.ISODateTime().format(dataset.resources[0].last_modified_internal),
306
+ )
270
307
 
271
308
  def test_dataset_api_get_deleted(self):
272
- '''It should not fetch a deleted dataset from the API and raise 410'''
309
+ """It should not fetch a deleted dataset from the API and raise 410"""
273
310
  dataset = DatasetFactory(deleted=datetime.utcnow())
274
311
 
275
- response = self.get(url_for('api.dataset', dataset=dataset))
312
+ response = self.get(url_for("api.dataset", dataset=dataset))
276
313
  self.assert410(response)
277
314
 
278
315
  def test_dataset_api_get_deleted_but_authorized(self):
279
- '''It should a deleted dataset from the API if user is authorized'''
316
+ """It should a deleted dataset from the API if user is authorized"""
280
317
  self.login()
281
- dataset = DatasetFactory(owner=self.user,
282
- deleted=datetime.utcnow())
318
+ dataset = DatasetFactory(owner=self.user, deleted=datetime.utcnow())
283
319
 
284
- response = self.get(url_for('api.dataset', dataset=dataset))
320
+ response = self.get(url_for("api.dataset", dataset=dataset))
285
321
  self.assert200(response)
286
322
 
287
323
  def test_dataset_api_create(self):
288
- '''It should create a dataset from the API'''
324
+ """It should create a dataset from the API"""
289
325
  data = DatasetFactory.as_dict()
290
326
  self.login()
291
- response = self.post(url_for('api.datasets'), data)
327
+ response = self.post(url_for("api.datasets"), data)
292
328
  self.assert201(response)
293
329
  self.assertEqual(Dataset.objects.count(), 1)
294
330
 
@@ -297,14 +333,14 @@ class DatasetAPITest(APITestCase):
297
333
  self.assertIsNone(dataset.organization)
298
334
 
299
335
  def test_dataset_api_create_as_org(self):
300
- '''It should create a dataset as organization from the API'''
336
+ """It should create a dataset as organization from the API"""
301
337
  self.login()
302
338
  data = DatasetFactory.as_dict()
303
- member = Member(user=self.user, role='editor')
339
+ member = Member(user=self.user, role="editor")
304
340
  org = OrganizationFactory(members=[member])
305
- data['organization'] = str(org.id)
341
+ data["organization"] = str(org.id)
306
342
 
307
- response = self.post(url_for('api.datasets'), data)
343
+ response = self.post(url_for("api.datasets"), data)
308
344
  self.assert201(response)
309
345
  self.assertEqual(Dataset.objects.count(), 1)
310
346
 
@@ -320,79 +356,79 @@ class DatasetAPITest(APITestCase):
320
356
  self.login()
321
357
  data = DatasetFactory.as_dict()
322
358
  org = OrganizationFactory()
323
- data['organization'] = str(org.id)
324
- response = self.post(url_for('api.datasets'), data)
359
+ data["organization"] = str(org.id)
360
+ response = self.post(url_for("api.datasets"), data)
325
361
  self.assert400(response)
326
362
  self.assertEqual(Dataset.objects.count(), 0)
327
363
 
328
364
  def test_dataset_api_create_tags(self):
329
- '''It should create a dataset from the API with tags'''
365
+ """It should create a dataset from the API with tags"""
330
366
  data = DatasetFactory.as_dict()
331
- data['tags'] = [unique_string(16) for _ in range(3)]
367
+ data["tags"] = [unique_string(16) for _ in range(3)]
332
368
  with self.api_user():
333
- response = self.post(url_for('api.datasets'), data)
369
+ response = self.post(url_for("api.datasets"), data)
334
370
  self.assert201(response)
335
371
  self.assertEqual(Dataset.objects.count(), 1)
336
372
  dataset = Dataset.objects.first()
337
- self.assertEqual(dataset.tags, sorted(data['tags']))
373
+ self.assertEqual(dataset.tags, sorted(data["tags"]))
338
374
 
339
375
  def test_dataset_api_fail_to_create_too_short_tags(self):
340
- '''It should fail to create a dataset from the API because
341
- the tag is too short'''
376
+ """It should fail to create a dataset from the API because
377
+ the tag is too short"""
342
378
  data = DatasetFactory.as_dict()
343
- data['tags'] = [unique_string(MIN_TAG_LENGTH - 1)]
379
+ data["tags"] = [unique_string(MIN_TAG_LENGTH - 1)]
344
380
  with self.api_user():
345
- response = self.post(url_for('api.datasets'), data)
381
+ response = self.post(url_for("api.datasets"), data)
346
382
  self.assertStatus(response, 400)
347
383
 
348
384
  def test_dataset_api_fail_to_create_too_long_tags(self):
349
- '''Should fail creating a dataset with a tag long'''
385
+ """Should fail creating a dataset with a tag long"""
350
386
  data = DatasetFactory.as_dict()
351
- data['tags'] = [unique_string(MAX_TAG_LENGTH + 1)]
387
+ data["tags"] = [unique_string(MAX_TAG_LENGTH + 1)]
352
388
  with self.api_user():
353
- response = self.post(url_for('api.datasets'), data)
389
+ response = self.post(url_for("api.datasets"), data)
354
390
  self.assertStatus(response, 400)
355
391
 
356
392
  def test_dataset_api_create_and_slugify_tags(self):
357
- '''It should create a dataset from the API and slugify the tags'''
393
+ """It should create a dataset from the API and slugify the tags"""
358
394
  data = DatasetFactory.as_dict()
359
- data['tags'] = [' Aaa bBB $$ $$-µ ']
395
+ data["tags"] = [" Aaa bBB $$ $$-µ "]
360
396
  with self.api_user():
361
- response = self.post(url_for('api.datasets'), data)
397
+ response = self.post(url_for("api.datasets"), data)
362
398
  self.assert201(response)
363
399
  self.assertEqual(Dataset.objects.count(), 1)
364
400
  dataset = Dataset.objects.first()
365
- self.assertEqual(dataset.tags, ['aaa-bbb-u'])
401
+ self.assertEqual(dataset.tags, ["aaa-bbb-u"])
366
402
 
367
403
  def test_dataset_api_create_with_extras(self):
368
- '''It should create a dataset with extras from the API'''
404
+ """It should create a dataset with extras from the API"""
369
405
  data = DatasetFactory.as_dict()
370
- data['extras'] = {
371
- 'integer': 42,
372
- 'float': 42.0,
373
- 'string': 'value',
374
- 'dict': {
375
- 'foo': 'bar',
376
- }
406
+ data["extras"] = {
407
+ "integer": 42,
408
+ "float": 42.0,
409
+ "string": "value",
410
+ "dict": {
411
+ "foo": "bar",
412
+ },
377
413
  }
378
414
  with self.api_user():
379
- response = self.post(url_for('api.datasets'), data)
415
+ response = self.post(url_for("api.datasets"), data)
380
416
  self.assert201(response)
381
417
  self.assertEqual(Dataset.objects.count(), 1)
382
418
 
383
419
  dataset = Dataset.objects.first()
384
- self.assertEqual(dataset.extras['integer'], 42)
385
- self.assertEqual(dataset.extras['float'], 42.0)
386
- self.assertEqual(dataset.extras['string'], 'value')
387
- self.assertEqual(dataset.extras['dict']['foo'], 'bar')
420
+ self.assertEqual(dataset.extras["integer"], 42)
421
+ self.assertEqual(dataset.extras["float"], 42.0)
422
+ self.assertEqual(dataset.extras["string"], "value")
423
+ self.assertEqual(dataset.extras["dict"]["foo"], "bar")
388
424
 
389
425
  def test_dataset_api_create_with_resources(self):
390
- '''It should create a dataset with resources from the API'''
426
+ """It should create a dataset with resources from the API"""
391
427
  data = DatasetFactory.as_dict()
392
- data['resources'] = [ResourceFactory.as_dict() for _ in range(3)]
428
+ data["resources"] = [ResourceFactory.as_dict() for _ in range(3)]
393
429
 
394
430
  with self.api_user():
395
- response = self.post(url_for('api.datasets'), data)
431
+ response = self.post(url_for("api.datasets"), data)
396
432
  self.assert201(response)
397
433
  self.assertEqual(Dataset.objects.count(), 1)
398
434
 
@@ -404,21 +440,19 @@ class DatasetAPITest(APITestCase):
404
440
  should fail
405
441
  """
406
442
  data = DatasetFactory.as_dict()
407
- data['resources'] = {
408
- k: v for k, v in enumerate([
409
- ResourceFactory.as_dict() for _ in range(3)
410
- ])
443
+ data["resources"] = {
444
+ k: v for k, v in enumerate([ResourceFactory.as_dict() for _ in range(3)])
411
445
  }
412
446
  with self.api_user():
413
- response = self.post(url_for('api.datasets'), data)
447
+ response = self.post(url_for("api.datasets"), data)
414
448
  self.assert400(response)
415
449
 
416
450
  def test_dataset_api_create_with_geom(self):
417
- '''It should create a dataset with resources from the API'''
451
+ """It should create a dataset with resources from the API"""
418
452
  data = DatasetFactory.as_dict()
419
- data['spatial'] = {'geom': SAMPLE_GEOM}
453
+ data["spatial"] = {"geom": SAMPLE_GEOM}
420
454
  with self.api_user():
421
- response = self.post(url_for('api.datasets'), data)
455
+ response = self.post(url_for("api.datasets"), data)
422
456
  self.assert201(response)
423
457
  self.assertEqual(Dataset.objects.count(), 1)
424
458
 
@@ -426,36 +460,35 @@ class DatasetAPITest(APITestCase):
426
460
  self.assertEqual(dataset.spatial.geom, SAMPLE_GEOM)
427
461
 
428
462
  def test_dataset_api_create_with_legacy_frequency(self):
429
- '''It should create a dataset from the API with a legacy frequency'''
463
+ """It should create a dataset from the API with a legacy frequency"""
430
464
  self.login()
431
465
 
432
466
  for oldFreq, newFreq in LEGACY_FREQUENCIES.items():
433
467
  data = DatasetFactory.as_dict()
434
- data['frequency'] = oldFreq
435
- response = self.post(url_for('api.datasets'), data)
468
+ data["frequency"] = oldFreq
469
+ response = self.post(url_for("api.datasets"), data)
436
470
  self.assert201(response)
437
- self.assertEqual(response.json['frequency'], newFreq)
471
+ self.assertEqual(response.json["frequency"], newFreq)
438
472
 
439
473
  def test_dataset_api_update(self):
440
- '''It should update a dataset from the API'''
474
+ """It should update a dataset from the API"""
441
475
  user = self.login()
442
476
  dataset = DatasetFactory(owner=user)
443
477
  data = dataset.to_dict()
444
- data['description'] = 'new description'
445
- response = self.put(url_for('api.dataset', dataset=dataset), data)
478
+ data["description"] = "new description"
479
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
446
480
  self.assert200(response)
447
481
  self.assertEqual(Dataset.objects.count(), 1)
448
- self.assertEqual(Dataset.objects.first().description,
449
- 'new description')
482
+ self.assertEqual(Dataset.objects.first().description, "new description")
450
483
 
451
484
  def test_dataset_api_update_with_resources(self):
452
- '''It should update a dataset from the API with resources parameters'''
485
+ """It should update a dataset from the API with resources parameters"""
453
486
  user = self.login()
454
487
  dataset = DatasetFactory(owner=user)
455
488
  initial_length = len(dataset.resources)
456
489
  data = dataset.to_dict()
457
- data['resources'].append(ResourceFactory.as_dict())
458
- response = self.put(url_for('api.dataset', dataset=dataset), data)
490
+ data["resources"].append(ResourceFactory.as_dict())
491
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
459
492
  self.assert200(response)
460
493
  self.assertEqual(Dataset.objects.count(), 1)
461
494
 
@@ -466,135 +499,132 @@ class DatasetAPITest(APITestCase):
466
499
  user = self.login()
467
500
  dataset = HiddenDatasetFactory(owner=user)
468
501
  data = dataset.to_dict()
469
- data['description'] = 'new description'
470
- del data['private']
502
+ data["description"] = "new description"
503
+ del data["private"]
471
504
 
472
- response = self.put(url_for('api.dataset', dataset=dataset), data)
505
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
473
506
  self.assert200(response)
474
507
  dataset.reload()
475
- self.assertEqual(dataset.description, 'new description')
508
+ self.assertEqual(dataset.description, "new description")
476
509
  self.assertEqual(dataset.private, True)
477
-
478
- data['private'] = None
479
- response = self.put(url_for('api.dataset', dataset=dataset), data)
510
+
511
+ data["private"] = None
512
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
480
513
  self.assert200(response)
481
514
  dataset.reload()
482
515
  self.assertEqual(dataset.private, False)
483
516
 
484
- data['private'] = True
485
- response = self.put(url_for('api.dataset', dataset=dataset), data)
517
+ data["private"] = True
518
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
486
519
  self.assert200(response)
487
520
  dataset.reload()
488
521
  self.assertEqual(dataset.private, True)
489
522
 
490
523
  def test_dataset_api_update_new_resource_with_extras(self):
491
- '''It should update a dataset with a new resource with extras'''
524
+ """It should update a dataset with a new resource with extras"""
492
525
  user = self.login()
493
526
  dataset = DatasetFactory(owner=user)
494
527
  data = dataset.to_dict()
495
528
  resource_data = ResourceFactory.as_dict()
496
- resource_data['extras'] = {'extra:id': 'id'}
497
- data['resources'].append(resource_data)
498
- response = self.put(url_for('api.dataset', dataset=dataset), data)
529
+ resource_data["extras"] = {"extra:id": "id"}
530
+ data["resources"].append(resource_data)
531
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
499
532
  self.assert200(response)
500
533
  dataset.reload()
501
- resource = next((
502
- r for r in dataset.resources if r.title == resource_data['title']
503
- ))
504
- self.assertEqual(resource.extras, {'extra:id': 'id'})
534
+ resource = next((r for r in dataset.resources if r.title == resource_data["title"]))
535
+ self.assertEqual(resource.extras, {"extra:id": "id"})
505
536
 
506
537
  def test_dataset_api_update_existing_resource_with_extras(self):
507
- '''It should update a dataset's existing resource with extras'''
538
+ """It should update a dataset's existing resource with extras"""
508
539
  user = self.login()
509
540
  dataset = DatasetFactory(owner=user, nb_resources=1)
510
541
  data = dataset.to_dict()
511
- data['resources'][0]['extras'] = {'extra:id': 'id'}
512
- response = self.put(url_for('api.dataset', dataset=dataset), data)
542
+ data["resources"][0]["extras"] = {"extra:id": "id"}
543
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
513
544
  self.assert200(response)
514
545
  dataset.reload()
515
546
  resource = dataset.resources[0]
516
- self.assertEqual(resource.extras, {'extra:id': 'id'})
547
+ self.assertEqual(resource.extras, {"extra:id": "id"})
517
548
 
518
549
  def test_dataset_api_update_without_resources(self):
519
- '''It should update a dataset from the API without resources'''
550
+ """It should update a dataset from the API without resources"""
520
551
  user = self.login()
521
- dataset = DatasetFactory(owner=user,
522
- resources=ResourceFactory.build_batch(3))
552
+ dataset = DatasetFactory(owner=user, resources=ResourceFactory.build_batch(3))
523
553
  initial_length = len(dataset.resources)
524
554
  data = dataset.to_dict()
525
- del data['resources']
526
- data['description'] = faker.sentence()
527
- response = self.put(url_for('api.dataset', dataset=dataset), data)
555
+ del data["resources"]
556
+ data["description"] = faker.sentence()
557
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
528
558
  self.assert200(response)
529
559
  self.assertEqual(Dataset.objects.count(), 1)
530
560
 
531
561
  dataset.reload()
532
- self.assertEqual(dataset.description, data['description'])
562
+ self.assertEqual(dataset.description, data["description"])
533
563
  self.assertEqual(len(dataset.resources), initial_length)
534
564
 
535
565
  def test_dataset_api_update_with_extras(self):
536
- '''It should update a dataset from the API with extras parameters'''
566
+ """It should update a dataset from the API with extras parameters"""
537
567
  user = self.login()
538
568
  dataset = DatasetFactory(owner=user)
539
569
  data = dataset.to_dict()
540
- data['extras'] = {
541
- 'integer': 42,
542
- 'float': 42.0,
543
- 'string': 'value',
570
+ data["extras"] = {
571
+ "integer": 42,
572
+ "float": 42.0,
573
+ "string": "value",
544
574
  }
545
- response = self.put(url_for('api.dataset', dataset=dataset), data)
575
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
546
576
  self.assert200(response)
547
577
  self.assertEqual(Dataset.objects.count(), 1)
548
578
 
549
579
  dataset = Dataset.objects.first()
550
- self.assertEqual(dataset.extras['integer'], 42)
551
- self.assertEqual(dataset.extras['float'], 42.0)
552
- self.assertEqual(dataset.extras['string'], 'value')
580
+ self.assertEqual(dataset.extras["integer"], 42)
581
+ self.assertEqual(dataset.extras["float"], 42.0)
582
+ self.assertEqual(dataset.extras["string"], "value")
553
583
 
554
584
  def test_dataset_api_update_with_no_extras(self):
555
- '''It should update a dataset from the API with no extras
585
+ """It should update a dataset from the API with no extras
556
586
 
557
587
  In that case the extras parameters are kept.
558
- '''
588
+ """
559
589
  data = DatasetFactory.as_dict()
560
- data['extras'] = {
561
- 'integer': 42,
562
- 'float': 42.0,
563
- 'string': 'value',
590
+ data["extras"] = {
591
+ "integer": 42,
592
+ "float": 42.0,
593
+ "string": "value",
564
594
  }
565
595
  with self.api_user():
566
- response = self.post(url_for('api.datasets'), data)
596
+ response = self.post(url_for("api.datasets"), data)
567
597
 
568
598
  dataset = Dataset.objects.first()
569
599
  data = dataset.to_dict()
570
- del data['extras']
571
- response = self.put(url_for('api.dataset', dataset=dataset), data)
600
+ del data["extras"]
601
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
572
602
  self.assert200(response)
573
603
  self.assertEqual(Dataset.objects.count(), 1)
574
604
 
575
605
  dataset = Dataset.objects.first()
576
- self.assertEqual(dataset.extras['integer'], 42)
577
- self.assertEqual(dataset.extras['float'], 42.0)
578
- self.assertEqual(dataset.extras['string'], 'value')
606
+ self.assertEqual(dataset.extras["integer"], 42)
607
+ self.assertEqual(dataset.extras["float"], 42.0)
608
+ self.assertEqual(dataset.extras["string"], "value")
579
609
 
580
610
  def test_dataset_api_update_with_empty_extras(self):
581
- '''It should update a dataset from the API with empty extras
611
+ """It should update a dataset from the API with empty extras
582
612
 
583
613
  In that case the extras parameters are set to an empty dict.
584
- '''
614
+ """
585
615
  data = DatasetFactory.as_dict()
586
- data['extras'] = {
587
- 'integer': 42,
588
- 'float': 42.0,
589
- 'string': 'value',
616
+ data["extras"] = {
617
+ "integer": 42,
618
+ "float": 42.0,
619
+ "string": "value",
590
620
  }
591
621
  with self.api_user():
592
- response = self.post(url_for('api.datasets'), data)
622
+ response = self.post(url_for("api.datasets"), data)
593
623
 
594
624
  dataset = Dataset.objects.first()
595
625
  data = dataset.to_dict()
596
- data['extras'] = {}
597
- response = self.put(url_for('api.dataset', dataset=dataset), data)
626
+ data["extras"] = {}
627
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
598
628
  self.assert200(response)
599
629
  self.assertEqual(Dataset.objects.count(), 1)
600
630
 
@@ -602,259 +632,282 @@ class DatasetAPITest(APITestCase):
602
632
  self.assertEqual(dataset.extras, {})
603
633
 
604
634
  def test_dataset_api_update_deleted(self):
605
- '''It should not update a deleted dataset from the API and raise 401'''
635
+ """It should not update a deleted dataset from the API and raise 401"""
606
636
  user = self.login()
607
637
  dataset = DatasetFactory(owner=user, deleted=datetime.utcnow())
608
638
  data = dataset.to_dict()
609
- data['description'] = 'new description'
610
- response = self.put(url_for('api.dataset', dataset=dataset), data)
639
+ data["description"] = "new description"
640
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
611
641
  self.assert410(response)
612
642
  self.assertEqual(Dataset.objects.count(), 1)
613
- self.assertEqual(Dataset.objects.first().description,
614
- dataset.description)
643
+ self.assertEqual(Dataset.objects.first().description, dataset.description)
615
644
 
616
645
  def test_dataset_api_update_contact_point(self):
617
- '''It should update a dataset from the API'''
646
+ """It should update a dataset from the API"""
618
647
  self.login()
619
648
 
620
649
  # Org and contact point creation
621
- member = Member(user=self.user, role='admin')
650
+ member = Member(user=self.user, role="admin")
622
651
  org = OrganizationFactory(members=[member])
623
652
  contact_point_data = {
624
- 'email': 'mooneywayne@cobb-cochran.com',
625
- 'name': 'Martin Schultz',
626
- 'organization': str(org.id)
653
+ "email": "mooneywayne@cobb-cochran.com",
654
+ "name": "Martin Schultz",
655
+ "organization": str(org.id),
627
656
  }
628
- response = self.post(url_for('api.contact_points'), contact_point_data)
657
+ response = self.post(url_for("api.contact_points"), contact_point_data)
629
658
  self.assert201(response)
630
659
 
631
- response = self.get(url_for('api.org_contact_points', org=org))
660
+ response = self.get(url_for("api.org_contact_points", org=org))
632
661
  assert200(response)
633
- contact_point_id = response.json['data'][0]['id']
662
+ contact_point_id = response.json["data"][0]["id"]
634
663
 
635
664
  # Dataset creation
636
665
  dataset = DatasetFactory(organization=org)
637
666
  data = DatasetFactory.as_dict()
638
667
 
639
- data['contact_point'] = contact_point_id
640
- response = self.put(url_for('api.dataset', dataset=dataset), data)
668
+ data["contact_point"] = contact_point_id
669
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
641
670
  self.assert200(response)
642
671
 
643
672
  dataset = Dataset.objects.first()
644
- self.assertEqual(dataset.contact_point.name, contact_point_data['name'])
673
+ self.assertEqual(dataset.contact_point.name, contact_point_data["name"])
645
674
 
646
- data['contact_point'] = None
647
- response = self.put(url_for('api.dataset', dataset=dataset), data)
675
+ data["contact_point"] = None
676
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
648
677
  self.assert200(response)
649
678
 
650
679
  dataset.reload()
651
680
  self.assertEqual(dataset.contact_point, None)
652
681
 
653
682
  def test_dataset_api_update_contact_point_error(self):
654
- '''It should update a dataset from the API'''
683
+ """It should update a dataset from the API"""
655
684
  self.login()
656
685
 
657
686
  # Org and contact point creation
658
- member = Member(user=self.user, role='admin')
687
+ member = Member(user=self.user, role="admin")
659
688
  org = OrganizationFactory(members=[member])
660
689
  contact_point_data = {
661
- 'email': 'mooneywayne@cobb-cochran.com',
662
- 'name': 'Martin Schultz',
663
- 'organization': str(org.id)
690
+ "email": "mooneywayne@cobb-cochran.com",
691
+ "name": "Martin Schultz",
692
+ "organization": str(org.id),
664
693
  }
665
- response = self.post(url_for('api.contact_points'), contact_point_data)
694
+ response = self.post(url_for("api.contact_points"), contact_point_data)
666
695
  self.assert201(response)
667
696
 
668
- response = self.get(url_for('api.org_contact_points', org=org))
697
+ response = self.get(url_for("api.org_contact_points", org=org))
669
698
  assert200(response)
670
- contact_point_id = response.json['data'][0]['id']
699
+ contact_point_id = response.json["data"][0]["id"]
671
700
 
672
701
  # Dataset creation
673
702
  dataset = DatasetFactory(owner=self.user)
674
703
  data = DatasetFactory.as_dict()
675
704
 
676
- data['contact_point'] = contact_point_id
677
- response = self.put(url_for('api.dataset', dataset=dataset), data)
705
+ data["contact_point"] = contact_point_id
706
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
678
707
  self.assert400(response)
679
- self.assertEqual(response.json['errors']['contact_point'][0], _('Wrong contact point id or contact point ownership mismatch'))
708
+ self.assertEqual(
709
+ response.json["errors"]["contact_point"][0],
710
+ _("Wrong contact point id or contact point ownership mismatch"),
711
+ )
680
712
 
681
713
  def test_dataset_api_delete(self):
682
- '''It should delete a dataset from the API'''
714
+ """It should delete a dataset from the API"""
683
715
  user = self.login()
684
716
  dataset = DatasetFactory(owner=user, nb_resources=1)
685
- response = self.delete(url_for('api.dataset', dataset=dataset))
717
+ response = self.delete(url_for("api.dataset", dataset=dataset))
686
718
 
687
719
  self.assertStatus(response, 204)
688
720
  self.assertEqual(Dataset.objects.count(), 1)
689
721
  self.assertIsNotNone(Dataset.objects[0].deleted)
690
722
 
691
- response = self.get(url_for('api.datasets'))
723
+ response = self.get(url_for("api.datasets"))
692
724
  self.assert200(response)
693
- self.assertEqual(len(response.json['data']), 0)
725
+ self.assertEqual(len(response.json["data"]), 0)
694
726
 
695
727
  def test_dataset_api_delete_deleted(self):
696
- '''It should delete a deleted dataset from the API and raise 410'''
728
+ """It should delete a deleted dataset from the API and raise 410"""
697
729
  user = self.login()
698
730
  dataset = DatasetFactory(owner=user, deleted=datetime.utcnow(), nb_resources=1)
699
- response = self.delete(url_for('api.dataset', dataset=dataset))
731
+ response = self.delete(url_for("api.dataset", dataset=dataset))
700
732
 
701
733
  self.assert410(response)
702
734
 
703
735
  def test_dataset_api_feature(self):
704
- '''It should mark the dataset featured on POST'''
736
+ """It should mark the dataset featured on POST"""
705
737
  self.login(AdminFactory())
706
738
  dataset = DatasetFactory(featured=False)
707
739
 
708
- response = self.post(url_for('api.dataset_featured', dataset=dataset))
740
+ response = self.post(url_for("api.dataset_featured", dataset=dataset))
709
741
  self.assert200(response)
710
742
 
711
743
  dataset.reload()
712
744
  self.assertTrue(dataset.featured)
713
745
 
714
746
  def test_dataset_api_feature_already(self):
715
- '''It shouldn't do anything to feature an already featured dataset'''
747
+ """It shouldn't do anything to feature an already featured dataset"""
716
748
  self.login(AdminFactory())
717
749
  dataset = DatasetFactory(featured=True)
718
750
 
719
- response = self.post(url_for('api.dataset_featured', dataset=dataset))
751
+ response = self.post(url_for("api.dataset_featured", dataset=dataset))
720
752
  self.assert200(response)
721
753
 
722
754
  dataset.reload()
723
755
  self.assertTrue(dataset.featured)
724
756
 
725
757
  def test_dataset_api_unfeature(self):
726
- '''It should unmark the dataset featured on POST'''
758
+ """It should unmark the dataset featured on POST"""
727
759
  self.login(AdminFactory())
728
760
  dataset = DatasetFactory(featured=True)
729
761
 
730
- response = self.delete(url_for('api.dataset_featured',
731
- dataset=dataset))
762
+ response = self.delete(url_for("api.dataset_featured", dataset=dataset))
732
763
  self.assert200(response)
733
764
 
734
765
  dataset.reload()
735
766
  self.assertFalse(dataset.featured)
736
767
 
737
768
  def test_dataset_api_unfeature_already(self):
738
- '''It shouldn't do anything to unfeature a not featured dataset'''
769
+ """It shouldn't do anything to unfeature a not featured dataset"""
739
770
  self.login(AdminFactory())
740
771
  dataset = DatasetFactory(featured=False)
741
772
 
742
- response = self.delete(url_for('api.dataset_featured',
743
- dataset=dataset))
773
+ response = self.delete(url_for("api.dataset_featured", dataset=dataset))
744
774
  self.assert200(response)
745
775
 
746
776
  dataset.reload()
747
777
  self.assertFalse(dataset.featured)
748
778
 
749
- @pytest.mark.options(SCHEMA_CATALOG_URL='https://example.com/schemas')
750
- @requests_mock.Mocker(kw='rmock')
779
+ @pytest.mark.options(SCHEMA_CATALOG_URL="https://example.com/schemas")
780
+ @requests_mock.Mocker(kw="rmock")
751
781
  def test_dataset_new_resource_with_schema(self, rmock):
752
- '''Tests api validation to prevent schema creation with a name and a url'''
753
- rmock.get('https://example.com/schemas', json=ResourceSchemaMockData.get_mock_data())
782
+ """Tests api validation to prevent schema creation with a name and a url"""
783
+ rmock.get("https://example.com/schemas", json=ResourceSchemaMockData.get_mock_data())
754
784
 
755
785
  user = self.login()
756
786
  dataset = DatasetFactory(owner=user)
757
787
  data = dataset.to_dict()
758
788
  resource_data = ResourceFactory.as_dict()
759
789
 
760
- resource_data['schema'] = {'url': 'test'}
761
- data['resources'].append(resource_data)
762
- response = self.put(url_for('api.dataset', dataset=dataset), data)
790
+ resource_data["schema"] = {"url": "test"}
791
+ data["resources"].append(resource_data)
792
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
763
793
  self.assert400(response)
764
- assert response.json['errors']['resources'][0]['schema']['url'] == [_('Invalid URL "{url}"').format(url="test")]
794
+ assert response.json["errors"]["resources"][0]["schema"]["url"] == [
795
+ _('Invalid URL "{url}"').format(url="test")
796
+ ]
765
797
 
766
- resource_data['schema'] = {'name': 'unknown-schema'}
767
- data['resources'].append(resource_data)
768
- response = self.put(url_for('api.dataset', dataset=dataset), data)
798
+ resource_data["schema"] = {"name": "unknown-schema"}
799
+ data["resources"].append(resource_data)
800
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
769
801
  self.assert400(response)
770
- assert response.json['errors']['resources'][0]['schema']['name'] == [_('Schema name "{schema}" is not an allowed value. Allowed values: {values}').format(schema='unknown-schema', values='etalab/schema-irve-statique, 139bercy/format-commande-publique')]
802
+ assert response.json["errors"]["resources"][0]["schema"]["name"] == [
803
+ _('Schema name "{schema}" is not an allowed value. Allowed values: {values}').format(
804
+ schema="unknown-schema",
805
+ values="etalab/schema-irve-statique, 139bercy/format-commande-publique",
806
+ )
807
+ ]
771
808
 
772
- resource_data['schema'] = {'name': 'etalab/schema-irve'}
773
- data['resources'].append(resource_data)
774
- response = self.put(url_for('api.dataset', dataset=dataset), data)
809
+ resource_data["schema"] = {"name": "etalab/schema-irve"}
810
+ data["resources"].append(resource_data)
811
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
775
812
  self.assert400(response)
776
- assert response.json['errors']['resources'][0]['schema']['name'] == [_('Schema name "{schema}" is not an allowed value. Allowed values: {values}').format(schema='etalab/schema-irve', values='etalab/schema-irve-statique, 139bercy/format-commande-publique')]
813
+ assert response.json["errors"]["resources"][0]["schema"]["name"] == [
814
+ _('Schema name "{schema}" is not an allowed value. Allowed values: {values}').format(
815
+ schema="etalab/schema-irve",
816
+ values="etalab/schema-irve-statique, 139bercy/format-commande-publique",
817
+ )
818
+ ]
777
819
 
778
- resource_data['schema'] = {'name': 'etalab/schema-irve-statique', 'version': '42.0.0'}
779
- data['resources'].append(resource_data)
780
- response = self.put(url_for('api.dataset', dataset=dataset), data)
820
+ resource_data["schema"] = {"name": "etalab/schema-irve-statique", "version": "42.0.0"}
821
+ data["resources"].append(resource_data)
822
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
781
823
  self.assert400(response)
782
- assert response.json['errors']['resources'][0]['schema']['version'] == [_('Version "{version}" is not an allowed value for the schema "{name}". Allowed versions: {values}').format(version='42.0.0', name='etalab/schema-irve-statique', values='2.2.0, 2.2.1, latest')]
824
+ assert response.json["errors"]["resources"][0]["schema"]["version"] == [
825
+ _(
826
+ 'Version "{version}" is not an allowed value for the schema "{name}". Allowed versions: {values}'
827
+ ).format(
828
+ version="42.0.0", name="etalab/schema-irve-statique", values="2.2.0, 2.2.1, latest"
829
+ )
830
+ ]
783
831
 
784
- resource_data['schema'] = {'url': 'http://example.com', 'name': 'etalab/schema-irve-statique'}
785
- data['resources'].append(resource_data)
786
- response = self.put(url_for('api.dataset', dataset=dataset), data)
832
+ resource_data["schema"] = {
833
+ "url": "http://example.com",
834
+ "name": "etalab/schema-irve-statique",
835
+ }
836
+ data["resources"].append(resource_data)
837
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
787
838
  self.assert200(response)
788
839
  dataset.reload()
789
- assert dataset.resources[0].schema['url'] == 'http://example.com'
790
- assert dataset.resources[0].schema['name'] == 'etalab/schema-irve-statique'
791
- assert dataset.resources[0].schema['version'] == None
840
+ assert dataset.resources[0].schema["url"] == "http://example.com"
841
+ assert dataset.resources[0].schema["name"] == "etalab/schema-irve-statique"
842
+ assert dataset.resources[0].schema["version"] == None
792
843
 
793
- resource_data['schema'] = {'name': 'etalab/schema-irve-statique'}
794
- data['resources'].append(resource_data)
795
- response = self.put(url_for('api.dataset', dataset=dataset), data)
844
+ resource_data["schema"] = {"name": "etalab/schema-irve-statique"}
845
+ data["resources"].append(resource_data)
846
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
796
847
  self.assert200(response)
797
848
 
798
849
  dataset.reload()
799
- assert dataset.resources[0].schema['name'] == 'etalab/schema-irve-statique'
800
- assert dataset.resources[0].schema['url'] == None
801
- assert dataset.resources[0].schema['version'] == None
850
+ assert dataset.resources[0].schema["name"] == "etalab/schema-irve-statique"
851
+ assert dataset.resources[0].schema["url"] == None
852
+ assert dataset.resources[0].schema["version"] == None
802
853
 
803
- resource_data['schema'] = {'name': 'etalab/schema-irve-statique', 'version': '2.2.0'}
804
- data['resources'].append(resource_data)
805
- response = self.put(url_for('api.dataset', dataset=dataset), data)
854
+ resource_data["schema"] = {"name": "etalab/schema-irve-statique", "version": "2.2.0"}
855
+ data["resources"].append(resource_data)
856
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
806
857
  self.assert200(response)
807
858
 
808
859
  dataset.reload()
809
- assert dataset.resources[0].schema['name'] == 'etalab/schema-irve-statique'
810
- assert dataset.resources[0].schema['url'] == None
811
- assert dataset.resources[0].schema['version'] == '2.2.0'
860
+ assert dataset.resources[0].schema["name"] == "etalab/schema-irve-statique"
861
+ assert dataset.resources[0].schema["url"] == None
862
+ assert dataset.resources[0].schema["version"] == "2.2.0"
812
863
 
813
- resource_data['schema'] = {'url': 'https://schema.data.gouv.fr/schemas/etalab/schema-irve-statique/2.2.1/schema-statique.json'}
814
- data['resources'].append(resource_data)
815
- response = self.put(url_for('api.dataset', dataset=dataset), data)
864
+ resource_data["schema"] = {
865
+ "url": "https://schema.data.gouv.fr/schemas/etalab/schema-irve-statique/2.2.1/schema-statique.json"
866
+ }
867
+ data["resources"].append(resource_data)
868
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
816
869
  self.assert200(response)
817
870
 
818
871
  dataset.reload()
819
- assert dataset.resources[0].schema['name'] == 'etalab/schema-irve-statique'
820
- assert dataset.resources[0].schema['url'] == None
821
- assert dataset.resources[0].schema['version'] == '2.2.1'
872
+ assert dataset.resources[0].schema["name"] == "etalab/schema-irve-statique"
873
+ assert dataset.resources[0].schema["url"] == None
874
+ assert dataset.resources[0].schema["version"] == "2.2.1"
822
875
 
823
876
  # Putting `None` as the schema argument do not remove the schema
824
877
  # Not sure if it's the correct behaviour but it's the normal behaviour on the API v1… :-(
825
878
  # I think it should be if the key 'schema' is missing, the old value is kept, if the key is present
826
879
  # but `None` update it inside the DB as `None`.
827
880
  data = response.json
828
- data['resources'][0]['schema'] = None
829
- response = self.put(url_for('api.dataset', dataset=dataset), data)
881
+ data["resources"][0]["schema"] = None
882
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
830
883
  self.assert200(response)
831
884
 
832
885
  dataset.reload()
833
- assert dataset.resources[0].schema['name'] == 'etalab/schema-irve-statique'
834
- assert dataset.resources[0].schema['url'] == None
835
- assert dataset.resources[0].schema['version'] == '2.2.1'
886
+ assert dataset.resources[0].schema["name"] == "etalab/schema-irve-statique"
887
+ assert dataset.resources[0].schema["url"] == None
888
+ assert dataset.resources[0].schema["version"] == "2.2.1"
836
889
 
837
890
  # Putting `None` as the schema name and version remove the schema
838
891
  # This is a workaround for the None on schema behaviour explain above.
839
892
  data = response.json
840
- data['resources'][0]['schema']['name'] = None
841
- data['resources'][0]['schema']['version'] = None
893
+ data["resources"][0]["schema"]["name"] = None
894
+ data["resources"][0]["schema"]["version"] = None
842
895
 
843
- response = self.put(url_for('api.dataset', dataset=dataset), data)
896
+ response = self.put(url_for("api.dataset", dataset=dataset), data)
844
897
  self.assert200(response)
845
898
 
846
899
  dataset.reload()
847
- assert dataset.resources[0].schema['name'] == None
848
- assert dataset.resources[0].schema['url'] == None
849
- assert dataset.resources[0].schema['version'] == None
900
+ assert dataset.resources[0].schema["name"] == None
901
+ assert dataset.resources[0].schema["url"] == None
902
+ assert dataset.resources[0].schema["version"] == None
850
903
 
851
904
 
852
905
  class DatasetBadgeAPITest(APITestCase):
853
906
  @classmethod
854
907
  def setUpClass(cls):
855
908
  # Register at least two badges
856
- Dataset.__badges__['test-1'] = 'Test 1'
857
- Dataset.__badges__['test-2'] = 'Test 2'
909
+ Dataset.__badges__["test-1"] = "Test 1"
910
+ Dataset.__badges__["test-2"] = "Test 2"
858
911
 
859
912
  cls.factory = badge_factory(Dataset)
860
913
 
@@ -863,7 +916,7 @@ class DatasetBadgeAPITest(APITestCase):
863
916
  self.dataset = DatasetFactory(owner=UserFactory())
864
917
 
865
918
  def test_list(self):
866
- response = self.get(url_for('api.available_dataset_badges'))
919
+ response = self.get(url_for("api.available_dataset_badges"))
867
920
  self.assertStatus(response, 200)
868
921
  self.assertEqual(len(response.json), len(Dataset.__badges__))
869
922
  for kind, label in Dataset.__badges__.items():
@@ -872,18 +925,15 @@ class DatasetBadgeAPITest(APITestCase):
872
925
 
873
926
  def test_create(self):
874
927
  data = self.factory.as_dict()
875
- response = self.post(
876
- url_for('api.dataset_badges', dataset=self.dataset), data)
928
+ response = self.post(url_for("api.dataset_badges", dataset=self.dataset), data)
877
929
  self.assert201(response)
878
930
  self.dataset.reload()
879
931
  self.assertEqual(len(self.dataset.badges), 1)
880
932
 
881
933
  def test_create_same(self):
882
934
  data = self.factory.as_dict()
883
- self.post(
884
- url_for('api.dataset_badges', dataset=self.dataset), data)
885
- response = self.post(
886
- url_for('api.dataset_badges', dataset=self.dataset), data)
935
+ self.post(url_for("api.dataset_badges", dataset=self.dataset), data)
936
+ response = self.post(url_for("api.dataset_badges", dataset=self.dataset), data)
887
937
  self.assertStatus(response, 200)
888
938
  self.dataset.reload()
889
939
  self.assertEqual(len(self.dataset.badges), 1)
@@ -894,9 +944,8 @@ class DatasetBadgeAPITest(APITestCase):
894
944
  kinds_keys = list(Dataset.__badges__)
895
945
  self.dataset.add_badge(kinds_keys[0])
896
946
  data = self.factory.as_dict()
897
- data['kind'] = kinds_keys[1]
898
- response = self.post(
899
- url_for('api.dataset_badges', dataset=self.dataset), data)
947
+ data["kind"] = kinds_keys[1]
948
+ response = self.post(url_for("api.dataset_badges", dataset=self.dataset), data)
900
949
  self.assert201(response)
901
950
  self.dataset.reload()
902
951
  self.assertEqual(len(self.dataset.badges), 2)
@@ -905,16 +954,16 @@ class DatasetBadgeAPITest(APITestCase):
905
954
  badge = self.factory()
906
955
  self.dataset.add_badge(badge.kind)
907
956
  response = self.delete(
908
- url_for('api.dataset_badge', dataset=self.dataset,
909
- badge_kind=str(badge.kind)))
957
+ url_for("api.dataset_badge", dataset=self.dataset, badge_kind=str(badge.kind))
958
+ )
910
959
  self.assertStatus(response, 204)
911
960
  self.dataset.reload()
912
961
  self.assertEqual(len(self.dataset.badges), 0)
913
962
 
914
963
  def test_delete_404(self):
915
964
  response = self.delete(
916
- url_for('api.dataset_badge', dataset=self.dataset,
917
- badge_kind=str(self.factory().kind)))
965
+ url_for("api.dataset_badge", dataset=self.dataset, badge_kind=str(self.factory().kind))
966
+ )
918
967
  self.assert404(response)
919
968
 
920
969
 
@@ -926,145 +975,145 @@ class DatasetResourceAPITest(APITestCase):
926
975
  self.dataset = DatasetFactory(owner=self.user)
927
976
 
928
977
  def test_get(self):
929
- '''It should fetch a resource from the API'''
978
+ """It should fetch a resource from the API"""
930
979
  resource = ResourceFactory()
931
980
  dataset = DatasetFactory(resources=[resource])
932
- response = self.get(url_for('api.resource', dataset=dataset,
933
- rid=resource.id))
981
+ response = self.get(url_for("api.resource", dataset=dataset, rid=resource.id))
934
982
  self.assert200(response)
935
983
  data = json.loads(response.data)
936
- assert data['title'] == resource.title
937
- assert data['latest'] == resource.latest
938
- assert data['url'] == resource.url
984
+ assert data["title"] == resource.title
985
+ assert data["latest"] == resource.latest
986
+ assert data["url"] == resource.url
939
987
 
940
988
  def test_create(self):
941
989
  data = ResourceFactory.as_dict()
942
- data['extras'] = {'extra:id': 'id'}
943
- data['filetype'] = 'remote'
944
- response = self.post(url_for('api.resources',
945
- dataset=self.dataset), data)
990
+ data["extras"] = {"extra:id": "id"}
991
+ data["filetype"] = "remote"
992
+ response = self.post(url_for("api.resources", dataset=self.dataset), data)
946
993
  self.assert201(response)
947
994
  self.dataset.reload()
948
995
  self.assertEqual(len(self.dataset.resources), 1)
949
- self.assertEqual(self.dataset.resources[0].extras, {'extra:id': 'id'})
996
+ self.assertEqual(self.dataset.resources[0].extras, {"extra:id": "id"})
950
997
 
951
998
  def test_unallowed_create_filetype_file(self):
952
999
  data = ResourceFactory.as_dict()
953
- data['filetype'] = 'file' # to be explicit
954
- response = self.post(url_for('api.resources',
955
- dataset=self.dataset), data)
1000
+ data["filetype"] = "file" # to be explicit
1001
+ response = self.post(url_for("api.resources", dataset=self.dataset), data)
956
1002
  # should fail because the POST endpoint only supports URL setting for remote resources
957
1003
  self.assert400(response)
958
1004
 
959
1005
  def test_create_normalize_format(self):
960
- _format = ' FORMAT '
1006
+ _format = " FORMAT "
961
1007
  data = ResourceFactory.as_dict()
962
- data['filetype'] = 'remote'
963
- data['format'] = _format
964
- response = self.post(url_for('api.resources',
965
- dataset=self.dataset), data)
1008
+ data["filetype"] = "remote"
1009
+ data["format"] = _format
1010
+ response = self.post(url_for("api.resources", dataset=self.dataset), data)
966
1011
  self.assert201(response)
967
1012
  self.dataset.reload()
968
- self.assertEqual(self.dataset.resources[0].format,
969
- _format.strip().lower())
1013
+ self.assertEqual(self.dataset.resources[0].format, _format.strip().lower())
970
1014
 
971
1015
  def test_create_2nd(self):
972
1016
  self.dataset.resources.append(ResourceFactory())
973
1017
  self.dataset.save()
974
1018
 
975
1019
  data = ResourceFactory.as_dict()
976
- data['filetype'] = 'remote'
977
- response = self.post(url_for('api.resources',
978
- dataset=self.dataset), data)
1020
+ data["filetype"] = "remote"
1021
+ response = self.post(url_for("api.resources", dataset=self.dataset), data)
979
1022
  self.assert201(response)
980
1023
  self.dataset.reload()
981
1024
  self.assertEqual(len(self.dataset.resources), 2)
982
1025
 
983
1026
  def test_create_with_file(self):
984
- '''It should create a resource from the API with a file'''
1027
+ """It should create a resource from the API with a file"""
985
1028
  user = self.login()
986
- org = OrganizationFactory(members=[
987
- Member(user=user, role='admin')
988
- ])
1029
+ org = OrganizationFactory(members=[Member(user=user, role="admin")])
989
1030
  dataset = DatasetFactory(organization=org)
990
1031
  response = self.post(
991
- url_for('api.upload_new_dataset_resource', dataset=dataset),
992
- {'file': (BytesIO(b'aaa'), 'test.txt')}, json=False)
1032
+ url_for("api.upload_new_dataset_resource", dataset=dataset),
1033
+ {"file": (BytesIO(b"aaa"), "test.txt")},
1034
+ json=False,
1035
+ )
993
1036
  self.assert201(response)
994
1037
  data = json.loads(response.data)
995
- self.assertEqual(data['title'], 'test.txt')
996
- response = self.put(
997
- url_for('api.resource', dataset=dataset, rid=data['id']), data)
1038
+ self.assertEqual(data["title"], "test.txt")
1039
+ response = self.put(url_for("api.resource", dataset=dataset, rid=data["id"]), data)
998
1040
  self.assert200(response)
999
1041
  dataset.reload()
1000
1042
  self.assertEqual(len(dataset.resources), 1)
1001
- self.assertTrue(dataset.resources[0].url.endswith('test.txt'))
1043
+ self.assertTrue(dataset.resources[0].url.endswith("test.txt"))
1002
1044
 
1003
1045
  def test_create_with_file_chunks(self):
1004
- '''It should create a resource from the API with a chunked file'''
1046
+ """It should create a resource from the API with a chunked file"""
1005
1047
  user = self.login()
1006
- org = OrganizationFactory(members=[
1007
- Member(user=user, role='admin')
1008
- ])
1048
+ org = OrganizationFactory(members=[Member(user=user, role="admin")])
1009
1049
  dataset = DatasetFactory(organization=org)
1010
1050
 
1011
1051
  uuid = str(uuid4())
1012
1052
  parts = 4
1013
- url = url_for('api.upload_new_dataset_resource', dataset=dataset)
1053
+ url = url_for("api.upload_new_dataset_resource", dataset=dataset)
1014
1054
 
1015
1055
  for i in range(parts):
1016
- response = self.post(url, {
1017
- 'file': (BytesIO(b'a'), 'blob'),
1018
- 'uuid': uuid,
1019
- 'filename': 'test.txt',
1020
- 'partindex': i,
1021
- 'partbyteoffset': 0,
1022
- 'totalfilesize': parts,
1023
- 'totalparts': parts,
1024
- 'chunksize': 1,
1025
- }, json=False)
1056
+ response = self.post(
1057
+ url,
1058
+ {
1059
+ "file": (BytesIO(b"a"), "blob"),
1060
+ "uuid": uuid,
1061
+ "filename": "test.txt",
1062
+ "partindex": i,
1063
+ "partbyteoffset": 0,
1064
+ "totalfilesize": parts,
1065
+ "totalparts": parts,
1066
+ "chunksize": 1,
1067
+ },
1068
+ json=False,
1069
+ )
1026
1070
 
1027
1071
  self.assert200(response)
1028
- assert response.json['success']
1029
- assert 'filename' not in response.json
1030
- assert 'url' not in response.json
1031
- assert 'size' not in response.json
1032
- assert 'sha1' not in response.json
1033
- assert 'url' not in response.json
1034
-
1035
- response = self.post(url, {
1036
- 'uuid': uuid,
1037
- 'filename': 'test.txt',
1038
- 'totalfilesize': parts,
1039
- 'totalparts': parts,
1040
- }, json=False)
1072
+ assert response.json["success"]
1073
+ assert "filename" not in response.json
1074
+ assert "url" not in response.json
1075
+ assert "size" not in response.json
1076
+ assert "sha1" not in response.json
1077
+ assert "url" not in response.json
1078
+
1079
+ response = self.post(
1080
+ url,
1081
+ {
1082
+ "uuid": uuid,
1083
+ "filename": "test.txt",
1084
+ "totalfilesize": parts,
1085
+ "totalparts": parts,
1086
+ },
1087
+ json=False,
1088
+ )
1041
1089
  self.assert201(response)
1042
1090
  data = json.loads(response.data)
1043
- self.assertEqual(data['title'], 'test.txt')
1091
+ self.assertEqual(data["title"], "test.txt")
1044
1092
 
1045
1093
  def test_reorder(self):
1046
1094
  # Register an extra field in order to test
1047
1095
  # https://github.com/opendatateam/udata/issues/1794
1048
- ResourceMixin.extras.register('my:register', db.BooleanField)
1096
+ ResourceMixin.extras.register("my:register", db.BooleanField)
1049
1097
  self.dataset.resources = ResourceFactory.build_batch(3)
1050
1098
  self.dataset.resources[0].extras = {
1051
- 'my:register': True,
1099
+ "my:register": True,
1052
1100
  }
1053
1101
  self.dataset.save()
1054
1102
  self.dataset.reload() # Otherwise `last_modified` date is inaccurate.
1055
1103
  initial_last_modified = self.dataset.last_modified
1056
1104
 
1057
1105
  initial_order = [r.id for r in self.dataset.resources]
1058
- expected_order = [{'id': str(id)} for id in reversed(initial_order)]
1106
+ expected_order = [{"id": str(id)} for id in reversed(initial_order)]
1059
1107
 
1060
- response = self.put(url_for('api.resources', dataset=self.dataset),
1061
- expected_order)
1108
+ response = self.put(url_for("api.resources", dataset=self.dataset), expected_order)
1062
1109
  self.assertStatus(response, 200)
1063
- self.assertEqual([str(r['id']) for r in response.json],
1064
- [str(r['id']) for r in expected_order])
1110
+ self.assertEqual(
1111
+ [str(r["id"]) for r in response.json], [str(r["id"]) for r in expected_order]
1112
+ )
1065
1113
  self.dataset.reload()
1066
- self.assertEqual([str(r.id) for r in self.dataset.resources],
1067
- [str(r['id']) for r in expected_order])
1114
+ self.assertEqual(
1115
+ [str(r.id) for r in self.dataset.resources], [str(r["id"]) for r in expected_order]
1116
+ )
1068
1117
  self.assertEqual(self.dataset.last_modified, initial_last_modified)
1069
1118
 
1070
1119
  def test_update_local(self):
@@ -1072,159 +1121,157 @@ class DatasetResourceAPITest(APITestCase):
1072
1121
  self.dataset.resources.append(resource)
1073
1122
  self.dataset.save()
1074
1123
  data = {
1075
- 'title': faker.sentence(),
1076
- 'description': faker.text(),
1077
- 'url': faker.url(),
1078
- 'extras': {
1079
- 'extra:id': 'id',
1080
- }
1124
+ "title": faker.sentence(),
1125
+ "description": faker.text(),
1126
+ "url": faker.url(),
1127
+ "extras": {
1128
+ "extra:id": "id",
1129
+ },
1081
1130
  }
1082
- response = self.put(url_for('api.resource',
1083
- dataset=self.dataset,
1084
- rid=str(resource.id)), data)
1131
+ response = self.put(
1132
+ url_for("api.resource", dataset=self.dataset, rid=str(resource.id)), data
1133
+ )
1085
1134
  self.assert200(response)
1086
1135
  self.dataset.reload()
1087
1136
  self.assertEqual(len(self.dataset.resources), 1)
1088
1137
  updated = self.dataset.resources[0]
1089
- self.assertEqual(updated.title, data['title'])
1090
- self.assertEqual(updated.description, data['description'])
1138
+ self.assertEqual(updated.title, data["title"])
1139
+ self.assertEqual(updated.description, data["description"])
1091
1140
  # Url should NOT have been updated as it is a hosted resource
1092
- self.assertNotEqual(updated.url, data['url'])
1093
- self.assertEqual(updated.extras, {'extra:id': 'id'})
1141
+ self.assertNotEqual(updated.url, data["url"])
1142
+ self.assertEqual(updated.extras, {"extra:id": "id"})
1094
1143
 
1095
1144
  def test_update_remote(self):
1096
1145
  resource = ResourceFactory()
1097
- resource.filetype = 'remote'
1146
+ resource.filetype = "remote"
1098
1147
  self.dataset.resources.append(resource)
1099
1148
  self.dataset.save()
1100
1149
  data = {
1101
- 'title': faker.sentence(),
1102
- 'description': faker.text(),
1103
- 'url': faker.url(),
1104
- 'extras': {
1105
- 'extra:id': 'id',
1106
- }
1150
+ "title": faker.sentence(),
1151
+ "description": faker.text(),
1152
+ "url": faker.url(),
1153
+ "extras": {
1154
+ "extra:id": "id",
1155
+ },
1107
1156
  }
1108
- response = self.put(url_for('api.resource',
1109
- dataset=self.dataset,
1110
- rid=str(resource.id)), data)
1157
+ response = self.put(
1158
+ url_for("api.resource", dataset=self.dataset, rid=str(resource.id)), data
1159
+ )
1111
1160
  self.assert200(response)
1112
1161
  self.dataset.reload()
1113
1162
  self.assertEqual(len(self.dataset.resources), 1)
1114
1163
  updated = self.dataset.resources[0]
1115
- self.assertEqual(updated.title, data['title'])
1116
- self.assertEqual(updated.description, data['description'])
1164
+ self.assertEqual(updated.title, data["title"])
1165
+ self.assertEqual(updated.description, data["description"])
1117
1166
  # Url should have been updated as it is a remote resource
1118
- self.assertEqual(updated.url, data['url'])
1119
- self.assertEqual(updated.extras, {'extra:id': 'id'})
1167
+ self.assertEqual(updated.url, data["url"])
1168
+ self.assertEqual(updated.extras, {"extra:id": "id"})
1120
1169
 
1121
1170
  def test_bulk_update(self):
1122
1171
  resources = ResourceFactory.build_batch(2)
1123
1172
  self.dataset.resources.extend(resources)
1124
1173
  self.dataset.save()
1125
1174
  ids = [r.id for r in self.dataset.resources]
1126
- data = [{
1127
- 'id': str(id),
1128
- 'title': faker.sentence(),
1129
- 'description': faker.text(),
1130
- } for id in ids]
1131
- data.append({
1132
- 'title': faker.sentence(),
1133
- 'description': faker.text(),
1134
- 'url': faker.url(),
1135
- })
1136
- response = self.put(url_for('api.resources', dataset=self.dataset), data)
1175
+ data = [
1176
+ {
1177
+ "id": str(id),
1178
+ "title": faker.sentence(),
1179
+ "description": faker.text(),
1180
+ }
1181
+ for id in ids
1182
+ ]
1183
+ data.append(
1184
+ {
1185
+ "title": faker.sentence(),
1186
+ "description": faker.text(),
1187
+ "url": faker.url(),
1188
+ }
1189
+ )
1190
+ response = self.put(url_for("api.resources", dataset=self.dataset), data)
1137
1191
  self.assert200(response)
1138
1192
  self.dataset.reload()
1139
1193
  self.assertEqual(len(self.dataset.resources), 3)
1140
1194
  for idx, id in enumerate(ids):
1141
1195
  resource = self.dataset.resources[idx]
1142
1196
  rdata = data[idx]
1143
- self.assertEqual(str(resource.id), rdata['id'])
1144
- self.assertEqual(resource.title, rdata['title'])
1145
- self.assertEqual(resource.description, rdata['description'])
1197
+ self.assertEqual(str(resource.id), rdata["id"])
1198
+ self.assertEqual(resource.title, rdata["title"])
1199
+ self.assertEqual(resource.description, rdata["description"])
1146
1200
  self.assertIsNotNone(resource.url)
1147
1201
 
1148
1202
  def test_update_404(self):
1149
1203
  data = {
1150
- 'title': faker.sentence(),
1151
- 'description': faker.text(),
1152
- 'url': faker.url(),
1204
+ "title": faker.sentence(),
1205
+ "description": faker.text(),
1206
+ "url": faker.url(),
1153
1207
  }
1154
- response = self.put(url_for('api.resource',
1155
- dataset=self.dataset,
1156
- rid=str(ResourceFactory().id)), data)
1208
+ response = self.put(
1209
+ url_for("api.resource", dataset=self.dataset, rid=str(ResourceFactory().id)), data
1210
+ )
1157
1211
  self.assert404(response)
1158
1212
 
1159
1213
  def test_update_with_file(self):
1160
- '''It should update a resource from the API with a file'''
1214
+ """It should update a resource from the API with a file"""
1161
1215
  user = self.login()
1162
1216
  resource = ResourceFactory()
1163
- org = OrganizationFactory(members=[
1164
- Member(user=user, role='admin')
1165
- ])
1217
+ org = OrganizationFactory(members=[Member(user=user, role="admin")])
1166
1218
  dataset = DatasetFactory(resources=[resource], organization=org)
1167
1219
  response = self.post(
1168
- url_for('api.upload_dataset_resource',
1169
- dataset=dataset, rid=resource.id),
1170
- {'file': (BytesIO(b'aaa'), 'test.txt')}, json=False)
1220
+ url_for("api.upload_dataset_resource", dataset=dataset, rid=resource.id),
1221
+ {"file": (BytesIO(b"aaa"), "test.txt")},
1222
+ json=False,
1223
+ )
1171
1224
  self.assert200(response)
1172
1225
  data = json.loads(response.data)
1173
- self.assertEqual(data['title'], 'test.txt')
1174
- response = self.put(
1175
- url_for('api.resource', dataset=dataset, rid=data['id']), data)
1226
+ self.assertEqual(data["title"], "test.txt")
1227
+ response = self.put(url_for("api.resource", dataset=dataset, rid=data["id"]), data)
1176
1228
  self.assert200(response)
1177
1229
  dataset.reload()
1178
1230
  self.assertEqual(len(dataset.resources), 1)
1179
- self.assertTrue(dataset.resources[0].url.endswith('test.txt'))
1231
+ self.assertTrue(dataset.resources[0].url.endswith("test.txt"))
1180
1232
 
1181
1233
  def test_file_update_old_file_deletion(self):
1182
- '''It should update a resource's file and delete the old one'''
1234
+ """It should update a resource's file and delete the old one"""
1183
1235
  resource = ResourceFactory()
1184
1236
  self.dataset.resources.append(resource)
1185
1237
  self.dataset.save()
1186
1238
 
1187
1239
  upload_response = self.post(
1188
- url_for(
1189
- 'api.upload_dataset_resource',
1190
- dataset=self.dataset,
1191
- rid=str(resource.id)
1192
- ), {'file': (BytesIO(b'aaa'), 'test.txt')}, json=False)
1240
+ url_for("api.upload_dataset_resource", dataset=self.dataset, rid=str(resource.id)),
1241
+ {"file": (BytesIO(b"aaa"), "test.txt")},
1242
+ json=False,
1243
+ )
1193
1244
 
1194
1245
  data = json.loads(upload_response.data)
1195
- self.assertEqual(data['title'], 'test.txt')
1246
+ self.assertEqual(data["title"], "test.txt")
1196
1247
 
1197
1248
  upload_response = self.post(
1198
- url_for(
1199
- 'api.upload_dataset_resource',
1200
- dataset=self.dataset,
1201
- rid=str(resource.id)
1202
- ), {'file': (BytesIO(b'aaa'), 'test_update.txt')}, json=False)
1249
+ url_for("api.upload_dataset_resource", dataset=self.dataset, rid=str(resource.id)),
1250
+ {"file": (BytesIO(b"aaa"), "test_update.txt")},
1251
+ json=False,
1252
+ )
1203
1253
 
1204
1254
  data = json.loads(upload_response.data)
1205
- self.assertEqual(data['title'], 'test-update.txt')
1255
+ self.assertEqual(data["title"], "test-update.txt")
1206
1256
 
1207
1257
  resource_strorage = list(storages.resources.list_files())
1208
1258
  self.assertEqual(len(resource_strorage), 1)
1209
- self.assertEqual(resource_strorage[0][-15:], 'test-update.txt')
1259
+ self.assertEqual(resource_strorage[0][-15:], "test-update.txt")
1210
1260
 
1211
1261
  def test_delete(self):
1212
1262
  resource = ResourceFactory()
1213
1263
  self.dataset.resources.append(resource)
1214
1264
  self.dataset.save()
1215
1265
  upload_response = self.post(
1216
- url_for(
1217
- 'api.upload_dataset_resource',
1218
- dataset=self.dataset,
1219
- rid=str(resource.id)
1220
- ), {'file': (BytesIO(b'aaa'), 'test.txt')}, json=False)
1266
+ url_for("api.upload_dataset_resource", dataset=self.dataset, rid=str(resource.id)),
1267
+ {"file": (BytesIO(b"aaa"), "test.txt")},
1268
+ json=False,
1269
+ )
1221
1270
 
1222
1271
  data = json.loads(upload_response.data)
1223
- self.assertEqual(data['title'], 'test.txt')
1272
+ self.assertEqual(data["title"], "test.txt")
1224
1273
 
1225
- response = self.delete(url_for('api.resource',
1226
- dataset=self.dataset,
1227
- rid=str(resource.id)))
1274
+ response = self.delete(url_for("api.resource", dataset=self.dataset, rid=str(resource.id)))
1228
1275
 
1229
1276
  self.assertStatus(response, 204)
1230
1277
  self.dataset.reload()
@@ -1232,21 +1279,21 @@ class DatasetResourceAPITest(APITestCase):
1232
1279
  self.assertEqual(list(storages.resources.list_files()), [])
1233
1280
 
1234
1281
  def test_delete_404(self):
1235
- response = self.delete(url_for('api.resource',
1236
- dataset=self.dataset,
1237
- rid=str(ResourceFactory().id)))
1282
+ response = self.delete(
1283
+ url_for("api.resource", dataset=self.dataset, rid=str(ResourceFactory().id))
1284
+ )
1238
1285
  self.assert404(response)
1239
1286
 
1240
1287
  def test_follow_dataset(self):
1241
- '''It should follow a dataset on POST'''
1288
+ """It should follow a dataset on POST"""
1242
1289
  user = self.login()
1243
1290
  to_follow = DatasetFactory()
1244
1291
 
1245
- response = self.post(url_for('api.dataset_followers', id=to_follow.id))
1292
+ response = self.post(url_for("api.dataset_followers", id=to_follow.id))
1246
1293
  self.assert201(response)
1247
1294
 
1248
1295
  to_follow.count_followers()
1249
- self.assertEqual(to_follow.get_metrics()['followers'], 1)
1296
+ self.assertEqual(to_follow.get_metrics()["followers"], 1)
1250
1297
 
1251
1298
  self.assertEqual(Follow.objects.following(to_follow).count(), 0)
1252
1299
  self.assertEqual(Follow.objects.followers(to_follow).count(), 1)
@@ -1256,18 +1303,17 @@ class DatasetResourceAPITest(APITestCase):
1256
1303
  self.assertEqual(Follow.objects.followers(user).count(), 0)
1257
1304
 
1258
1305
  def test_unfollow_dataset(self):
1259
- '''It should unfollow the dataset on DELETE'''
1306
+ """It should unfollow the dataset on DELETE"""
1260
1307
  user = self.login()
1261
1308
  to_follow = DatasetFactory()
1262
1309
  Follow.objects.create(follower=user, following=to_follow)
1263
1310
 
1264
- response = self.delete(url_for('api.dataset_followers',
1265
- id=to_follow.id))
1311
+ response = self.delete(url_for("api.dataset_followers", id=to_follow.id))
1266
1312
  self.assert200(response)
1267
1313
 
1268
1314
  nb_followers = Follow.objects.followers(to_follow).count()
1269
1315
 
1270
- self.assertEqual(response.json['followers'], nb_followers)
1316
+ self.assertEqual(response.json["followers"], nb_followers)
1271
1317
 
1272
1318
  self.assertEqual(Follow.objects.following(to_follow).count(), 0)
1273
1319
  self.assertEqual(nb_followers, 0)
@@ -1275,198 +1321,187 @@ class DatasetResourceAPITest(APITestCase):
1275
1321
  self.assertEqual(Follow.objects.followers(user).count(), 0)
1276
1322
 
1277
1323
  def test_suggest_formats_api(self):
1278
- '''It should suggest formats'''
1279
- DatasetFactory(resources=[
1280
- ResourceFactory(format=f)
1281
- for f in (faker.word(), faker.word(), 'kml', 'kml-1')
1282
- ])
1324
+ """It should suggest formats"""
1325
+ DatasetFactory(
1326
+ resources=[
1327
+ ResourceFactory(format=f) for f in (faker.word(), faker.word(), "kml", "kml-1")
1328
+ ]
1329
+ )
1283
1330
 
1284
- response = self.get(url_for('api.suggest_formats'),
1285
- qs={'q': 'km', 'size': '5'})
1331
+ response = self.get(url_for("api.suggest_formats"), qs={"q": "km", "size": "5"})
1286
1332
  self.assert200(response)
1287
1333
 
1288
1334
  self.assertLessEqual(len(response.json), 5)
1289
1335
  self.assertGreater(len(response.json), 1)
1290
1336
 
1291
1337
  for suggestion in response.json:
1292
- self.assertIn('text', suggestion)
1293
- self.assertIn('km', suggestion['text'])
1338
+ self.assertIn("text", suggestion)
1339
+ self.assertIn("km", suggestion["text"])
1294
1340
 
1295
1341
  def test_suggest_format_api_no_match(self):
1296
- '''It should not provide format suggestion if no match'''
1297
- DatasetFactory(resources=[
1298
- ResourceFactory(format=faker.word()) for _ in range(3)
1299
- ])
1342
+ """It should not provide format suggestion if no match"""
1343
+ DatasetFactory(resources=[ResourceFactory(format=faker.word()) for _ in range(3)])
1300
1344
 
1301
- response = self.get(url_for('api.suggest_formats'),
1302
- qs={'q': 'test', 'size': '5'})
1345
+ response = self.get(url_for("api.suggest_formats"), qs={"q": "test", "size": "5"})
1303
1346
  self.assert200(response)
1304
1347
  self.assertEqual(len(response.json), 0)
1305
1348
 
1306
1349
  def test_suggest_format_api_empty(self):
1307
- '''It should not provide format suggestion if no data'''
1308
- response = self.get(url_for('api.suggest_formats'),
1309
- qs={'q': 'test', 'size': '5'})
1350
+ """It should not provide format suggestion if no data"""
1351
+ response = self.get(url_for("api.suggest_formats"), qs={"q": "test", "size": "5"})
1310
1352
  self.assert200(response)
1311
1353
  self.assertEqual(len(response.json), 0)
1312
1354
 
1313
1355
  def test_suggest_mime_api(self):
1314
- '''It should suggest mime types'''
1315
- DatasetFactory(resources=[
1316
- ResourceFactory(mime=f) for f in (
1317
- faker.mime_type(category=None),
1318
- faker.mime_type(category=None),
1319
- 'application/json',
1320
- 'application/json-1'
1321
- )
1322
- ])
1356
+ """It should suggest mime types"""
1357
+ DatasetFactory(
1358
+ resources=[
1359
+ ResourceFactory(mime=f)
1360
+ for f in (
1361
+ faker.mime_type(category=None),
1362
+ faker.mime_type(category=None),
1363
+ "application/json",
1364
+ "application/json-1",
1365
+ )
1366
+ ]
1367
+ )
1323
1368
 
1324
- response = self.get(url_for('api.suggest_mime'),
1325
- qs={'q': 'js', 'size': '5'})
1369
+ response = self.get(url_for("api.suggest_mime"), qs={"q": "js", "size": "5"})
1326
1370
  self.assert200(response)
1327
1371
  self.assertLessEqual(len(response.json), 5)
1328
1372
 
1329
1373
  for suggestion in response.json:
1330
- self.assertIn('text', suggestion)
1374
+ self.assertIn("text", suggestion)
1331
1375
 
1332
1376
  def test_suggest_mime_api_plus(self):
1333
- '''It should suggest mime types'''
1334
- DatasetFactory(resources=[ResourceFactory(mime='application/xhtml+xml')])
1377
+ """It should suggest mime types"""
1378
+ DatasetFactory(resources=[ResourceFactory(mime="application/xhtml+xml")])
1335
1379
 
1336
- response = self.get(url_for('api.suggest_mime'),
1337
- qs={'q': 'xml', 'size': '5'})
1380
+ response = self.get(url_for("api.suggest_mime"), qs={"q": "xml", "size": "5"})
1338
1381
  self.assert200(response)
1339
1382
 
1340
1383
  self.assertEqual(len(response.json), 5)
1341
1384
 
1342
1385
  def test_suggest_mime_api_no_match(self):
1343
- '''It should not provide format suggestion if no match'''
1344
- DatasetFactory(resources=[
1345
- ResourceFactory(mime=faker.word()) for _ in range(3)
1346
- ])
1386
+ """It should not provide format suggestion if no match"""
1387
+ DatasetFactory(resources=[ResourceFactory(mime=faker.word()) for _ in range(3)])
1347
1388
 
1348
- response = self.get(url_for('api.suggest_mime'),
1349
- qs={'q': 'test', 'size': '5'})
1389
+ response = self.get(url_for("api.suggest_mime"), qs={"q": "test", "size": "5"})
1350
1390
  self.assert200(response)
1351
1391
  self.assertEqual(len(response.json), 0)
1352
1392
 
1353
1393
  def test_suggest_mime_api_empty(self):
1354
- '''It should not provide mime suggestion if no data'''
1355
- response = self.get(url_for('api.suggest_mime'),
1356
- qs={'q': 'test', 'size': '5'})
1394
+ """It should not provide mime suggestion if no data"""
1395
+ response = self.get(url_for("api.suggest_mime"), qs={"q": "test", "size": "5"})
1357
1396
  self.assert200(response)
1358
1397
  self.assertEqual(len(response.json), 0)
1359
1398
 
1360
1399
  def test_suggest_datasets_api(self):
1361
- '''It should suggest datasets'''
1400
+ """It should suggest datasets"""
1362
1401
  for i in range(3):
1363
1402
  DatasetFactory(
1364
- title='title-test-{0}'.format(i) if i % 2 else faker.word(),
1403
+ title="title-test-{0}".format(i) if i % 2 else faker.word(),
1365
1404
  visible=True,
1366
- metrics={"followers": i})
1405
+ metrics={"followers": i},
1406
+ )
1367
1407
  max_follower_dataset = DatasetFactory(
1368
- title='title-test-4',
1369
- visible=True,
1370
- metrics={"followers": 10}
1408
+ title="title-test-4", visible=True, metrics={"followers": 10}
1371
1409
  )
1372
1410
 
1373
- response = self.get(url_for('api.suggest_datasets'),
1374
- qs={'q': 'title-test', 'size': '5'})
1411
+ response = self.get(url_for("api.suggest_datasets"), qs={"q": "title-test", "size": "5"})
1375
1412
  self.assert200(response)
1376
1413
 
1377
1414
  self.assertLessEqual(len(response.json), 5)
1378
1415
  self.assertGreater(len(response.json), 1)
1379
1416
  for suggestion in response.json:
1380
- self.assertIn('id', suggestion)
1381
- self.assertIn('title', suggestion)
1382
- self.assertIn('slug', suggestion)
1383
- self.assertIn('image_url', suggestion)
1384
- self.assertIn('title-test', suggestion['title'])
1385
- self.assertEqual(response.json[0]['id'], str(max_follower_dataset.id))
1417
+ self.assertIn("id", suggestion)
1418
+ self.assertIn("title", suggestion)
1419
+ self.assertIn("slug", suggestion)
1420
+ self.assertIn("image_url", suggestion)
1421
+ self.assertIn("title-test", suggestion["title"])
1422
+ self.assertEqual(response.json[0]["id"], str(max_follower_dataset.id))
1386
1423
 
1387
1424
  def test_suggest_datasets_acronym_api(self):
1388
- '''It should suggest datasets from their acronyms'''
1425
+ """It should suggest datasets from their acronyms"""
1389
1426
  for i in range(4):
1390
1427
  DatasetFactory(
1391
1428
  # Ensure title does not contains 'acronym-tes'
1392
1429
  title=faker.unique_string(),
1393
- acronym='acronym-test-{0}'.format(i) if i % 2 else None,
1394
- visible=True)
1430
+ acronym="acronym-test-{0}".format(i) if i % 2 else None,
1431
+ visible=True,
1432
+ )
1395
1433
 
1396
- response = self.get(url_for('api.suggest_datasets'),
1397
- qs={'q': 'acronym-test', 'size': '5'})
1434
+ response = self.get(url_for("api.suggest_datasets"), qs={"q": "acronym-test", "size": "5"})
1398
1435
  self.assert200(response)
1399
1436
 
1400
1437
  self.assertLessEqual(len(response.json), 5)
1401
1438
  self.assertGreater(len(response.json), 1)
1402
1439
 
1403
1440
  for suggestion in response.json:
1404
- self.assertIn('id', suggestion)
1405
- self.assertIn('title', suggestion)
1406
- self.assertIn('slug', suggestion)
1407
- self.assertIn('image_url', suggestion)
1408
- self.assertNotIn('tes', suggestion['title'])
1409
- self.assertIn('acronym-test', suggestion['acronym'])
1441
+ self.assertIn("id", suggestion)
1442
+ self.assertIn("title", suggestion)
1443
+ self.assertIn("slug", suggestion)
1444
+ self.assertIn("image_url", suggestion)
1445
+ self.assertNotIn("tes", suggestion["title"])
1446
+ self.assertIn("acronym-test", suggestion["acronym"])
1410
1447
 
1411
1448
  def test_suggest_datasets_api_unicode(self):
1412
- '''It should suggest datasets with special characters'''
1449
+ """It should suggest datasets with special characters"""
1413
1450
  for i in range(4):
1414
1451
  DatasetFactory(
1415
- title='title-testé-{0}'.format(i) if i % 2 else faker.word(),
1416
- resources=[ResourceFactory()])
1452
+ title="title-testé-{0}".format(i) if i % 2 else faker.word(),
1453
+ resources=[ResourceFactory()],
1454
+ )
1417
1455
 
1418
- response = self.get(url_for('api.suggest_datasets'),
1419
- qs={'q': 'title-testé', 'size': '5'})
1456
+ response = self.get(url_for("api.suggest_datasets"), qs={"q": "title-testé", "size": "5"})
1420
1457
  self.assert200(response)
1421
1458
 
1422
1459
  self.assertLessEqual(len(response.json), 5)
1423
1460
  self.assertGreater(len(response.json), 1)
1424
1461
 
1425
1462
  for suggestion in response.json:
1426
- self.assertIn('id', suggestion)
1427
- self.assertIn('title', suggestion)
1428
- self.assertIn('slug', suggestion)
1429
- self.assertIn('image_url', suggestion)
1430
- self.assertIn('title-testé', suggestion['title'])
1463
+ self.assertIn("id", suggestion)
1464
+ self.assertIn("title", suggestion)
1465
+ self.assertIn("slug", suggestion)
1466
+ self.assertIn("image_url", suggestion)
1467
+ self.assertIn("title-testé", suggestion["title"])
1431
1468
 
1432
1469
  def test_suggest_datasets_api_no_match(self):
1433
- '''It should not provide dataset suggestion if no match'''
1470
+ """It should not provide dataset suggestion if no match"""
1434
1471
  for i in range(3):
1435
1472
  DatasetFactory(resources=[ResourceFactory()])
1436
1473
 
1437
- response = self.get(url_for('api.suggest_datasets'),
1438
- qs={'q': 'xxxxxx', 'size': '5'})
1474
+ response = self.get(url_for("api.suggest_datasets"), qs={"q": "xxxxxx", "size": "5"})
1439
1475
  self.assert200(response)
1440
1476
  self.assertEqual(len(response.json), 0)
1441
1477
 
1442
1478
  def test_suggest_datasets_api_empty(self):
1443
- '''It should not provide dataset suggestion if no data'''
1444
- response = self.get(url_for('api.suggest_datasets'),
1445
- qs={'q': 'xxxxxx', 'size': '5'})
1479
+ """It should not provide dataset suggestion if no data"""
1480
+ response = self.get(url_for("api.suggest_datasets"), qs={"q": "xxxxxx", "size": "5"})
1446
1481
  self.assert200(response)
1447
1482
  self.assertEqual(len(response.json), 0)
1448
1483
 
1449
1484
 
1450
1485
  class DatasetReferencesAPITest(APITestCase):
1451
1486
  def test_dataset_licenses_list(self):
1452
- '''It should fetch the dataset licenses list from the API'''
1487
+ """It should fetch the dataset licenses list from the API"""
1453
1488
  licenses = LicenseFactory.create_batch(4)
1454
1489
 
1455
- response = self.get(url_for('api.licenses'))
1490
+ response = self.get(url_for("api.licenses"))
1456
1491
  self.assert200(response)
1457
1492
  self.assertEqual(len(response.json), len(licenses))
1458
1493
 
1459
1494
  def test_dataset_frequencies_list(self):
1460
- '''It should fetch the dataset frequencies list from the API'''
1461
- response = self.get(url_for('api.dataset_frequencies'))
1495
+ """It should fetch the dataset frequencies list from the API"""
1496
+ response = self.get(url_for("api.dataset_frequencies"))
1462
1497
  self.assert200(response)
1463
1498
  self.assertEqual(len(response.json), len(UPDATE_FREQUENCIES))
1464
1499
 
1465
1500
  def test_dataset_allowed_resources_extensions(self):
1466
- '''It should fetch the resources allowed extensions list from the API'''
1467
- extensions = ['csv', 'json', 'xml']
1468
- self.app.config['ALLOWED_RESOURCES_EXTENSIONS'] = extensions
1469
- response = self.get(url_for('api.allowed_extensions'))
1501
+ """It should fetch the resources allowed extensions list from the API"""
1502
+ extensions = ["csv", "json", "xml"]
1503
+ self.app.config["ALLOWED_RESOURCES_EXTENSIONS"] = extensions
1504
+ response = self.get(url_for("api.allowed_extensions"))
1470
1505
  self.assert200(response)
1471
1506
  self.assertEqual(response.json, extensions)
1472
1507
 
@@ -1475,20 +1510,19 @@ class DatasetArchivedAPITest(APITestCase):
1475
1510
  modules = []
1476
1511
 
1477
1512
  def test_dataset_api_search_archived(self):
1478
- '''It should search datasets from the API, excluding archived ones'''
1513
+ """It should search datasets from the API, excluding archived ones"""
1479
1514
  DatasetFactory(archived=None)
1480
1515
  dataset = DatasetFactory(archived=datetime.utcnow())
1481
1516
 
1482
- response = self.get(url_for('api.datasets', q=''))
1517
+ response = self.get(url_for("api.datasets", q=""))
1483
1518
  self.assert200(response)
1484
- self.assertEqual(len(response.json['data']), 1)
1485
- self.assertNotIn(str(dataset.id),
1486
- [r['id'] for r in response.json['data']])
1519
+ self.assertEqual(len(response.json["data"]), 1)
1520
+ self.assertNotIn(str(dataset.id), [r["id"] for r in response.json["data"]])
1487
1521
 
1488
1522
  def test_dataset_api_get_archived(self):
1489
- '''It should fetch an archived dataset from the API and return 200'''
1523
+ """It should fetch an archived dataset from the API and return 200"""
1490
1524
  dataset = DatasetFactory(archived=datetime.utcnow())
1491
- response = self.get(url_for('api.dataset', dataset=dataset))
1525
+ response = self.get(url_for("api.dataset", dataset=dataset))
1492
1526
  self.assert200(response)
1493
1527
 
1494
1528
 
@@ -1496,66 +1530,66 @@ class CommunityResourceAPITest(APITestCase):
1496
1530
  modules = []
1497
1531
 
1498
1532
  def test_community_resource_api_get(self):
1499
- '''It should fetch a community resource from the API'''
1533
+ """It should fetch a community resource from the API"""
1500
1534
  community_resource = CommunityResourceFactory()
1501
1535
 
1502
- response = self.get(url_for('api.community_resource',
1503
- community=community_resource))
1536
+ response = self.get(url_for("api.community_resource", community=community_resource))
1504
1537
  self.assert200(response)
1505
1538
  data = json.loads(response.data)
1506
- self.assertEqual(data['id'], str(community_resource.id))
1539
+ self.assertEqual(data["id"], str(community_resource.id))
1507
1540
 
1508
1541
  def test_resources_api_list(self):
1509
- '''It should list community resources from the API'''
1542
+ """It should list community resources from the API"""
1510
1543
  community_resources = [CommunityResourceFactory() for _ in range(40)]
1511
- response = self.get(url_for('api.community_resources'))
1544
+ response = self.get(url_for("api.community_resources"))
1512
1545
  self.assert200(response)
1513
- resources = json.loads(response.data)['data']
1546
+ resources = json.loads(response.data)["data"]
1514
1547
 
1515
- response = self.get(url_for('api.community_resources', page=2))
1548
+ response = self.get(url_for("api.community_resources", page=2))
1516
1549
  self.assert200(response)
1517
- resources += json.loads(response.data)['data']
1550
+ resources += json.loads(response.data)["data"]
1518
1551
 
1519
1552
  self.assertEqual(len(resources), len(community_resources))
1520
1553
  # Assert we don't have duplicates
1521
- self.assertEqual(len(set(res['id'] for res in resources)), len(community_resources))
1554
+ self.assertEqual(len(set(res["id"] for res in resources)), len(community_resources))
1522
1555
 
1523
1556
  def test_community_resource_api_get_from_string_id(self):
1524
- '''It should fetch a community resource from the API'''
1557
+ """It should fetch a community resource from the API"""
1525
1558
  community_resource = CommunityResourceFactory()
1526
1559
 
1527
- response = self.get(url_for('api.community_resource',
1528
- community=str(community_resource.id)))
1560
+ response = self.get(url_for("api.community_resource", community=str(community_resource.id)))
1529
1561
  self.assert200(response)
1530
1562
  data = json.loads(response.data)
1531
- self.assertEqual(data['id'], str(community_resource.id))
1563
+ self.assertEqual(data["id"], str(community_resource.id))
1532
1564
 
1533
1565
  def test_community_resource_api_create_dataset_binding(self):
1534
- '''It should create a community resource linked to the right dataset'''
1566
+ """It should create a community resource linked to the right dataset"""
1535
1567
  dataset = DatasetFactory()
1536
1568
  self.login()
1537
1569
  response = self.post(
1538
- url_for('api.upload_new_community_resource', dataset=dataset),
1539
- {'file': (BytesIO(b'aaa'), 'test.txt')}, json=False)
1570
+ url_for("api.upload_new_community_resource", dataset=dataset),
1571
+ {"file": (BytesIO(b"aaa"), "test.txt")},
1572
+ json=False,
1573
+ )
1540
1574
  self.assert201(response)
1541
1575
  self.assertEqual(CommunityResource.objects.count(), 1)
1542
1576
  community_resource = CommunityResource.objects.first()
1543
1577
  self.assertEqual(community_resource.dataset, dataset)
1544
1578
 
1545
1579
  def test_community_resource_api_create(self):
1546
- '''It should create a community resource from the API'''
1580
+ """It should create a community resource from the API"""
1547
1581
  dataset = DatasetFactory()
1548
1582
  self.login()
1549
1583
  response = self.post(
1550
- url_for('api.upload_new_community_resource', dataset=dataset),
1551
- {'file': (BytesIO(b'aaa'), 'test.txt')}, json=False)
1584
+ url_for("api.upload_new_community_resource", dataset=dataset),
1585
+ {"file": (BytesIO(b"aaa"), "test.txt")},
1586
+ json=False,
1587
+ )
1552
1588
  self.assert201(response)
1553
1589
  data = json.loads(response.data)
1554
- resource_id = data['id']
1555
- self.assertEqual(data['title'], 'test.txt')
1556
- response = self.put(
1557
- url_for('api.community_resource', community=resource_id),
1558
- data)
1590
+ resource_id = data["id"]
1591
+ self.assertEqual(data["title"], "test.txt")
1592
+ response = self.put(url_for("api.community_resource", community=resource_id), data)
1559
1593
  self.assertStatus(response, 200)
1560
1594
  self.assertEqual(CommunityResource.objects.count(), 1)
1561
1595
  community_resource = CommunityResource.objects.first()
@@ -1563,23 +1597,21 @@ class CommunityResourceAPITest(APITestCase):
1563
1597
  self.assertIsNone(community_resource.organization)
1564
1598
 
1565
1599
  def test_community_resource_api_create_as_org(self):
1566
- '''It should create a community resource as org from the API'''
1600
+ """It should create a community resource as org from the API"""
1567
1601
  dataset = DatasetFactory()
1568
1602
  user = self.login()
1569
- org = OrganizationFactory(members=[
1570
- Member(user=user, role='admin')
1571
- ])
1603
+ org = OrganizationFactory(members=[Member(user=user, role="admin")])
1572
1604
  response = self.post(
1573
- url_for('api.upload_new_community_resource', dataset=dataset),
1574
- {'file': (BytesIO(b'aaa'), 'test.txt')}, json=False)
1605
+ url_for("api.upload_new_community_resource", dataset=dataset),
1606
+ {"file": (BytesIO(b"aaa"), "test.txt")},
1607
+ json=False,
1608
+ )
1575
1609
  self.assert201(response)
1576
1610
  data = json.loads(response.data)
1577
- self.assertEqual(data['title'], 'test.txt')
1578
- resource_id = data['id']
1579
- data['organization'] = str(org.id)
1580
- response = self.put(
1581
- url_for('api.community_resource', community=resource_id),
1582
- data)
1611
+ self.assertEqual(data["title"], "test.txt")
1612
+ resource_id = data["id"]
1613
+ data["organization"] = str(org.id)
1614
+ response = self.put(url_for("api.community_resource", community=resource_id), data)
1583
1615
  self.assertStatus(response, 200)
1584
1616
  self.assertEqual(CommunityResource.objects.count(), 1)
1585
1617
  community_resource = CommunityResource.objects.first()
@@ -1587,99 +1619,87 @@ class CommunityResourceAPITest(APITestCase):
1587
1619
  self.assertIsNone(community_resource.owner)
1588
1620
 
1589
1621
  def test_community_resource_api_update(self):
1590
- '''It should update a community resource from the API'''
1622
+ """It should update a community resource from the API"""
1591
1623
  user = self.login()
1592
1624
  community_resource = CommunityResourceFactory(owner=user)
1593
1625
  data = community_resource.to_dict()
1594
- data['description'] = 'new description'
1595
- response = self.put(url_for('api.community_resource',
1596
- community=community_resource),
1597
- data)
1626
+ data["description"] = "new description"
1627
+ response = self.put(url_for("api.community_resource", community=community_resource), data)
1598
1628
  self.assert200(response)
1599
1629
  self.assertEqual(CommunityResource.objects.count(), 1)
1600
- self.assertEqual(CommunityResource.objects.first().description,
1601
- 'new description')
1630
+ self.assertEqual(CommunityResource.objects.first().description, "new description")
1602
1631
 
1603
1632
  def test_community_resource_api_update_w_previous_owner(self):
1604
- '''Should update a community resource and keep the original author'''
1633
+ """Should update a community resource and keep the original author"""
1605
1634
  owner = UserFactory()
1606
1635
  community_resource = CommunityResourceFactory(owner=owner)
1607
1636
  self.login(AdminFactory())
1608
1637
  data = community_resource.to_dict()
1609
- data['description'] = 'new description'
1610
- response = self.put(url_for('api.community_resource',
1611
- community=community_resource),
1612
- data)
1638
+ data["description"] = "new description"
1639
+ response = self.put(url_for("api.community_resource", community=community_resource), data)
1613
1640
  self.assert200(response)
1614
- self.assertEqual(CommunityResource.objects.first().owner,
1615
- owner)
1641
+ self.assertEqual(CommunityResource.objects.first().owner, owner)
1616
1642
 
1617
1643
  def test_community_resource_api_update_with_file(self):
1618
- '''It should update a community resource file from the API'''
1644
+ """It should update a community resource file from the API"""
1619
1645
  dataset = DatasetFactory()
1620
1646
  user = self.login()
1621
- community_resource = CommunityResourceFactory(dataset=dataset,
1622
- owner=user)
1647
+ community_resource = CommunityResourceFactory(dataset=dataset, owner=user)
1623
1648
  response = self.post(
1624
- url_for('api.upload_community_resource',
1625
- community=community_resource),
1626
- {'file': (BytesIO(b'aaa'), 'test.txt')}, json=False)
1649
+ url_for("api.upload_community_resource", community=community_resource),
1650
+ {"file": (BytesIO(b"aaa"), "test.txt")},
1651
+ json=False,
1652
+ )
1627
1653
  self.assert200(response)
1628
1654
  data = json.loads(response.data)
1629
- self.assertEqual(data['id'], str(community_resource.id))
1630
- self.assertEqual(data['title'], 'test.txt')
1631
- data['description'] = 'new description'
1632
- response = self.put(url_for('api.community_resource',
1633
- community=community_resource),
1634
- data)
1655
+ self.assertEqual(data["id"], str(community_resource.id))
1656
+ self.assertEqual(data["title"], "test.txt")
1657
+ data["description"] = "new description"
1658
+ response = self.put(url_for("api.community_resource", community=community_resource), data)
1635
1659
  self.assert200(response)
1636
1660
  self.assertEqual(CommunityResource.objects.count(), 1)
1637
- self.assertEqual(CommunityResource.objects.first().description,
1638
- 'new description')
1639
- self.assertTrue(
1640
- CommunityResource.objects.first().url.endswith('test.txt'))
1661
+ self.assertEqual(CommunityResource.objects.first().description, "new description")
1662
+ self.assertTrue(CommunityResource.objects.first().url.endswith("test.txt"))
1641
1663
 
1642
1664
  def test_community_resource_file_update_old_file_deletion(self):
1643
- '''It should update a community resource's file and delete the old one'''
1665
+ """It should update a community resource's file and delete the old one"""
1644
1666
  dataset = DatasetFactory()
1645
1667
  user = self.login()
1646
- community_resource = CommunityResourceFactory(dataset=dataset,
1647
- owner=user)
1668
+ community_resource = CommunityResourceFactory(dataset=dataset, owner=user)
1648
1669
  response = self.post(
1649
- url_for('api.upload_community_resource',
1650
- community=community_resource),
1651
- {'file': (BytesIO(b'aaa'), 'test.txt')}, json=False)
1670
+ url_for("api.upload_community_resource", community=community_resource),
1671
+ {"file": (BytesIO(b"aaa"), "test.txt")},
1672
+ json=False,
1673
+ )
1652
1674
  self.assert200(response)
1653
1675
  data = json.loads(response.data)
1654
- self.assertEqual(data['id'], str(community_resource.id))
1655
- self.assertEqual(data['title'], 'test.txt')
1676
+ self.assertEqual(data["id"], str(community_resource.id))
1677
+ self.assertEqual(data["title"], "test.txt")
1656
1678
 
1657
1679
  response = self.post(
1658
- url_for('api.upload_community_resource',
1659
- community=community_resource),
1660
- {'file': (BytesIO(b'aaa'), 'test_update.txt')}, json=False)
1680
+ url_for("api.upload_community_resource", community=community_resource),
1681
+ {"file": (BytesIO(b"aaa"), "test_update.txt")},
1682
+ json=False,
1683
+ )
1661
1684
  self.assert200(response)
1662
1685
  data = json.loads(response.data)
1663
- self.assertEqual(data['id'], str(community_resource.id))
1664
- self.assertEqual(data['title'], 'test-update.txt')
1686
+ self.assertEqual(data["id"], str(community_resource.id))
1687
+ self.assertEqual(data["title"], "test-update.txt")
1665
1688
 
1666
1689
  self.assertEqual(len(list(storages.resources.list_files())), 1)
1667
1690
 
1668
1691
  def test_community_resource_api_create_remote(self):
1669
- '''It should create a remote community resource from the API'''
1692
+ """It should create a remote community resource from the API"""
1670
1693
  user = self.login()
1671
1694
  dataset = DatasetFactory()
1672
1695
  attrs = CommunityResourceFactory.as_dict()
1673
- attrs['filetype'] = 'remote'
1674
- attrs['dataset'] = str(dataset.id)
1675
- response = self.post(
1676
- url_for('api.community_resources'),
1677
- attrs
1678
- )
1696
+ attrs["filetype"] = "remote"
1697
+ attrs["dataset"] = str(dataset.id)
1698
+ response = self.post(url_for("api.community_resources"), attrs)
1679
1699
  self.assert201(response)
1680
1700
  data = json.loads(response.data)
1681
- self.assertEqual(data['title'], attrs['title'])
1682
- self.assertEqual(data['url'], attrs['url'])
1701
+ self.assertEqual(data["title"], attrs["title"])
1702
+ self.assertEqual(data["url"], attrs["url"])
1683
1703
  self.assertEqual(CommunityResource.objects.count(), 1)
1684
1704
  community_resource = CommunityResource.objects.first()
1685
1705
  self.assertEqual(community_resource.dataset, dataset)
@@ -1687,54 +1707,45 @@ class CommunityResourceAPITest(APITestCase):
1687
1707
  self.assertIsNone(community_resource.organization)
1688
1708
 
1689
1709
  def test_community_resource_api_unallowed_create_filetype_file(self):
1690
- '''It should create a remote community resource from the API'''
1710
+ """It should create a remote community resource from the API"""
1691
1711
  self.login()
1692
1712
  dataset = DatasetFactory()
1693
1713
  attrs = CommunityResourceFactory.as_dict()
1694
- attrs['filetype'] = 'file' # to be explicit
1695
- attrs['dataset'] = str(dataset.id)
1696
- response = self.post(
1697
- url_for('api.community_resources'),
1698
- attrs
1699
- )
1714
+ attrs["filetype"] = "file" # to be explicit
1715
+ attrs["dataset"] = str(dataset.id)
1716
+ response = self.post(url_for("api.community_resources"), attrs)
1700
1717
  # should fail because the POST endpoint only supports URL setting
1701
1718
  # for remote community resources
1702
1719
  self.assert400(response)
1703
1720
 
1704
1721
  def test_community_resource_api_create_remote_needs_dataset(self):
1705
- '''
1722
+ """
1706
1723
  It should prevent remote community resource creation without dataset
1707
1724
  from the API
1708
- '''
1725
+ """
1709
1726
  self.login()
1710
1727
  attrs = CommunityResourceFactory.as_dict()
1711
- attrs['filetype'] = 'remote'
1712
- response = self.post(
1713
- url_for('api.community_resources'),
1714
- attrs
1715
- )
1728
+ attrs["filetype"] = "remote"
1729
+ response = self.post(url_for("api.community_resources"), attrs)
1716
1730
  self.assertStatus(response, 400)
1717
1731
  data = json.loads(response.data)
1718
- self.assertIn('errors', data)
1719
- self.assertIn('dataset', data['errors'])
1732
+ self.assertIn("errors", data)
1733
+ self.assertIn("dataset", data["errors"])
1720
1734
  self.assertEqual(CommunityResource.objects.count(), 0)
1721
1735
 
1722
1736
  def test_community_resource_api_create_remote_needs_real_dataset(self):
1723
- '''
1737
+ """
1724
1738
  It should prevent remote community resource creation without a valid
1725
1739
  dataset identifier
1726
- '''
1740
+ """
1727
1741
  self.login()
1728
1742
  attrs = CommunityResourceFactory.as_dict()
1729
- attrs['dataset'] = 'xxx'
1730
- response = self.post(
1731
- url_for('api.community_resources'),
1732
- attrs
1733
- )
1743
+ attrs["dataset"] = "xxx"
1744
+ response = self.post(url_for("api.community_resources"), attrs)
1734
1745
  self.assertStatus(response, 400)
1735
1746
  data = json.loads(response.data)
1736
- self.assertIn('errors', data)
1737
- self.assertIn('dataset', data['errors'])
1747
+ self.assertIn("errors", data)
1748
+ self.assertIn("dataset", data["errors"])
1738
1749
  self.assertEqual(CommunityResource.objects.count(), 0)
1739
1750
 
1740
1751
  def test_community_resource_api_delete(self):
@@ -1742,21 +1753,21 @@ class CommunityResourceAPITest(APITestCase):
1742
1753
  self.login()
1743
1754
 
1744
1755
  response = self.post(
1745
- url_for('api.upload_new_community_resource', dataset=dataset),
1746
- {'file': (BytesIO(b'aaa'), 'test.txt')}, json=False)
1756
+ url_for("api.upload_new_community_resource", dataset=dataset),
1757
+ {"file": (BytesIO(b"aaa"), "test.txt")},
1758
+ json=False,
1759
+ )
1747
1760
  self.assert201(response)
1748
1761
 
1749
1762
  data = json.loads(response.data)
1750
- resource_id = data['id']
1751
- self.assertEqual(data['title'], 'test.txt')
1763
+ resource_id = data["id"]
1764
+ self.assertEqual(data["title"], "test.txt")
1752
1765
 
1753
- response = self.put(
1754
- url_for('api.community_resource', community=resource_id),
1755
- data)
1766
+ response = self.put(url_for("api.community_resource", community=resource_id), data)
1756
1767
  self.assertStatus(response, 200)
1757
1768
  self.assertEqual(CommunityResource.objects.count(), 1)
1758
1769
 
1759
- response = self.delete(url_for('api.community_resource', community=resource_id))
1770
+ response = self.delete(url_for("api.community_resource", community=resource_id))
1760
1771
  self.assertStatus(response, 204)
1761
1772
 
1762
1773
  self.assertEqual(CommunityResource.objects.count(), 0)
@@ -1764,134 +1775,136 @@ class CommunityResourceAPITest(APITestCase):
1764
1775
 
1765
1776
 
1766
1777
  class ResourcesTypesAPITest(APITestCase):
1767
-
1768
1778
  def test_resource_types_list(self):
1769
- '''It should fetch the resource types list from the API'''
1770
- response = self.get(url_for('api.resource_types'))
1779
+ """It should fetch the resource types list from the API"""
1780
+ response = self.get(url_for("api.resource_types"))
1771
1781
  self.assert200(response)
1772
1782
  self.assertEqual(len(response.json), len(RESOURCE_TYPES))
1773
1783
 
1774
1784
 
1775
- @pytest.mark.usefixtures('clean_db')
1785
+ @pytest.mark.usefixtures("clean_db")
1776
1786
  class DatasetSchemasAPITest:
1777
1787
  modules = []
1778
1788
 
1779
1789
  def test_dataset_schemas_api_list(self, api, rmock, app):
1780
1790
  # Can't use @pytest.mark.options otherwise a request will be
1781
1791
  # made before setting up rmock at module load, resulting in a 404
1782
- app.config['SCHEMA_CATALOG_URL'] = 'https://example.com/schemas'
1792
+ app.config["SCHEMA_CATALOG_URL"] = "https://example.com/schemas"
1783
1793
 
1784
- rmock.get('https://example.com/schemas', json=ResourceSchemaMockData.get_mock_data())
1785
- response = api.get(url_for('api.schemas'))
1794
+ rmock.get("https://example.com/schemas", json=ResourceSchemaMockData.get_mock_data())
1795
+ response = api.get(url_for("api.schemas"))
1786
1796
 
1787
1797
  assert200(response)
1788
- assert response.json == ResourceSchemaMockData.get_expected_assignable_schemas_from_mock_data()
1798
+ assert (
1799
+ response.json == ResourceSchemaMockData.get_expected_assignable_schemas_from_mock_data()
1800
+ )
1789
1801
 
1790
1802
  @pytest.mark.options(SCHEMA_CATALOG_URL=None)
1791
1803
  def test_dataset_schemas_api_list_no_catalog_url(self, api):
1792
- response = api.get(url_for('api.schemas'))
1804
+ response = api.get(url_for("api.schemas"))
1793
1805
 
1794
1806
  assert200(response)
1795
1807
  assert response.json == []
1796
1808
 
1797
- @pytest.mark.options(SCHEMA_CATALOG_URL='https://example.com/notfound')
1809
+ @pytest.mark.options(SCHEMA_CATALOG_URL="https://example.com/notfound")
1798
1810
  def test_dataset_schemas_api_list_not_found(self, api, rmock):
1799
- rmock.get('https://example.com/notfound', status_code=404)
1800
- response = api.get(url_for('api.schemas'))
1811
+ rmock.get("https://example.com/notfound", status_code=404)
1812
+ response = api.get(url_for("api.schemas"))
1801
1813
  assert404(response)
1802
1814
 
1803
- @pytest.mark.options(SCHEMA_CATALOG_URL='https://example.com/schemas')
1815
+ @pytest.mark.options(SCHEMA_CATALOG_URL="https://example.com/schemas")
1804
1816
  def test_dataset_schemas_api_list_error_no_cache(self, api, rmock):
1805
- rmock.get('https://example.com/schemas', status_code=500)
1817
+ rmock.get("https://example.com/schemas", status_code=500)
1806
1818
 
1807
- response = api.get(url_for('api.schemas'))
1819
+ response = api.get(url_for("api.schemas"))
1808
1820
  assert response.status_code == 503
1809
1821
 
1810
- @pytest.mark.options(SCHEMA_CATALOG_URL='https://example.com/schemas')
1822
+ @pytest.mark.options(SCHEMA_CATALOG_URL="https://example.com/schemas")
1811
1823
  def test_dataset_schemas_api_list_error_w_cache(self, api, rmock, mocker):
1812
- cache_mock_set = mocker.patch.object(cache, 'set')
1813
- mocker.patch.object(cache, 'get', return_value=ResourceSchemaMockData.get_mock_data()['schemas'])
1824
+ cache_mock_set = mocker.patch.object(cache, "set")
1825
+ mocker.patch.object(
1826
+ cache, "get", return_value=ResourceSchemaMockData.get_mock_data()["schemas"]
1827
+ )
1814
1828
 
1815
1829
  # Fill cache
1816
- rmock.get('https://example.com/schemas', json=ResourceSchemaMockData.get_mock_data())
1817
- response = api.get(url_for('api.schemas'))
1830
+ rmock.get("https://example.com/schemas", json=ResourceSchemaMockData.get_mock_data())
1831
+ response = api.get(url_for("api.schemas"))
1818
1832
  assert200(response)
1819
- assert response.json == ResourceSchemaMockData.get_expected_assignable_schemas_from_mock_data()
1833
+ assert (
1834
+ response.json == ResourceSchemaMockData.get_expected_assignable_schemas_from_mock_data()
1835
+ )
1820
1836
  assert cache_mock_set.called
1821
1837
 
1822
1838
  # Endpoint becomes unavailable
1823
- rmock.get('https://example.com/schemas', status_code=500)
1839
+ rmock.get("https://example.com/schemas", status_code=500)
1824
1840
 
1825
1841
  # Long term cache is used
1826
- response = api.get(url_for('api.schemas'))
1842
+ response = api.get(url_for("api.schemas"))
1827
1843
  assert200(response)
1828
- assert response.json == ResourceSchemaMockData.get_expected_assignable_schemas_from_mock_data()
1844
+ assert (
1845
+ response.json == ResourceSchemaMockData.get_expected_assignable_schemas_from_mock_data()
1846
+ )
1829
1847
 
1830
1848
 
1831
- @pytest.mark.usefixtures('clean_db')
1849
+ @pytest.mark.usefixtures("clean_db")
1832
1850
  class HarvestMetadataAPITest:
1833
-
1834
1851
  modules = []
1835
1852
 
1836
1853
  # api fields should be updated before app is created
1837
- dataset_harvest_fields['dynamic_field'] = fields.String(description='', allow_null=True)
1838
- resource_harvest_fields['dynamic_field'] = fields.String(description='', allow_null=True)
1854
+ dataset_harvest_fields["dynamic_field"] = fields.String(description="", allow_null=True)
1855
+ resource_harvest_fields["dynamic_field"] = fields.String(description="", allow_null=True)
1839
1856
 
1840
1857
  def test_dataset_with_harvest_metadata(self, api):
1841
1858
  date = datetime(2022, 2, 22, tzinfo=pytz.UTC)
1842
1859
  harvest_metadata = HarvestDatasetMetadata(
1843
- backend='DCAT',
1860
+ backend="DCAT",
1844
1861
  created_at=date,
1845
1862
  modified_at=date,
1846
- source_id='source_id',
1847
- remote_id='remote_id',
1848
- domain='domain.gouv.fr',
1863
+ source_id="source_id",
1864
+ remote_id="remote_id",
1865
+ domain="domain.gouv.fr",
1849
1866
  last_update=date,
1850
- remote_url='http://domain.gouv.fr/dataset/remote_url',
1851
- uri='http://domain.gouv.fr/dataset/uri',
1852
- dct_identifier='http://domain.gouv.fr/dataset/identifier',
1867
+ remote_url="http://domain.gouv.fr/dataset/remote_url",
1868
+ uri="http://domain.gouv.fr/dataset/uri",
1869
+ dct_identifier="http://domain.gouv.fr/dataset/identifier",
1853
1870
  archived_at=date,
1854
- archived='not-on-remote'
1871
+ archived="not-on-remote",
1855
1872
  )
1856
1873
  dataset = DatasetFactory(harvest=harvest_metadata)
1857
1874
 
1858
- response = api.get(url_for('api.dataset', dataset=dataset))
1875
+ response = api.get(url_for("api.dataset", dataset=dataset))
1859
1876
  assert200(response)
1860
- assert response.json['harvest'] == {
1861
- 'backend': 'DCAT',
1862
- 'created_at': date.isoformat(),
1863
- 'modified_at': date.isoformat(),
1864
- 'source_id': 'source_id',
1865
- 'remote_id': 'remote_id',
1866
- 'domain': 'domain.gouv.fr',
1867
- 'last_update': date.isoformat(),
1868
- 'remote_url': 'http://domain.gouv.fr/dataset/remote_url',
1869
- 'uri': 'http://domain.gouv.fr/dataset/uri',
1870
- 'dct_identifier': 'http://domain.gouv.fr/dataset/identifier',
1871
- 'archived_at': date.isoformat(),
1872
- 'archived': 'not-on-remote'
1877
+ assert response.json["harvest"] == {
1878
+ "backend": "DCAT",
1879
+ "created_at": date.isoformat(),
1880
+ "modified_at": date.isoformat(),
1881
+ "source_id": "source_id",
1882
+ "remote_id": "remote_id",
1883
+ "domain": "domain.gouv.fr",
1884
+ "last_update": date.isoformat(),
1885
+ "remote_url": "http://domain.gouv.fr/dataset/remote_url",
1886
+ "uri": "http://domain.gouv.fr/dataset/uri",
1887
+ "dct_identifier": "http://domain.gouv.fr/dataset/identifier",
1888
+ "archived_at": date.isoformat(),
1889
+ "archived": "not-on-remote",
1873
1890
  }
1874
1891
 
1875
1892
  def test_dataset_dynamic_harvest_metadata_without_api_field(self, api):
1876
- harvest_metadata = HarvestDatasetMetadata(
1877
- dynamic_field_but_no_api_field_defined='DCAT'
1878
- )
1893
+ harvest_metadata = HarvestDatasetMetadata(dynamic_field_but_no_api_field_defined="DCAT")
1879
1894
  dataset = DatasetFactory(harvest=harvest_metadata)
1880
1895
 
1881
- response = api.get(url_for('api.dataset', dataset=dataset))
1896
+ response = api.get(url_for("api.dataset", dataset=dataset))
1882
1897
  assert200(response)
1883
- assert response.json['harvest'] == {}
1898
+ assert response.json["harvest"] == {}
1884
1899
 
1885
1900
  def test_dataset_dynamic_harvest_metadata_with_api_field(self, api):
1886
- harvest_metadata = HarvestDatasetMetadata(
1887
- dynamic_field='dynamic_value'
1888
- )
1901
+ harvest_metadata = HarvestDatasetMetadata(dynamic_field="dynamic_value")
1889
1902
  dataset = DatasetFactory(harvest=harvest_metadata)
1890
1903
 
1891
- response = api.get(url_for('api.dataset', dataset=dataset))
1904
+ response = api.get(url_for("api.dataset", dataset=dataset))
1892
1905
  assert200(response)
1893
- assert response.json['harvest'] == {
1894
- 'dynamic_field': 'dynamic_value',
1906
+ assert response.json["harvest"] == {
1907
+ "dynamic_field": "dynamic_value",
1895
1908
  }
1896
1909
 
1897
1910
  def test_dataset_with_resource_harvest_metadata(self, api):
@@ -1900,38 +1913,36 @@ class HarvestMetadataAPITest:
1900
1913
  harvest_metadata = HarvestResourceMetadata(
1901
1914
  created_at=date,
1902
1915
  modified_at=date,
1903
- uri='http://domain.gouv.fr/dataset/uri',
1916
+ uri="http://domain.gouv.fr/dataset/uri",
1904
1917
  )
1905
1918
  dataset = DatasetFactory(resources=[ResourceFactory(harvest=harvest_metadata)])
1906
1919
 
1907
- response = api.get(url_for('api.dataset', dataset=dataset))
1920
+ response = api.get(url_for("api.dataset", dataset=dataset))
1908
1921
  assert200(response)
1909
- assert response.json['resources'][0]['harvest'] == {
1910
- 'created_at': date.isoformat(),
1911
- 'modified_at': date.isoformat(),
1912
- 'uri': 'http://domain.gouv.fr/dataset/uri',
1922
+ assert response.json["resources"][0]["harvest"] == {
1923
+ "created_at": date.isoformat(),
1924
+ "modified_at": date.isoformat(),
1925
+ "uri": "http://domain.gouv.fr/dataset/uri",
1913
1926
  }
1914
1927
 
1915
1928
  def test_resource_dynamic_harvest_metadata_without_api_field(self, api):
1916
1929
  harvest_metadata = HarvestResourceMetadata(
1917
- dynamic_field_but_no_api_field_defined='dynamic_value'
1930
+ dynamic_field_but_no_api_field_defined="dynamic_value"
1918
1931
  )
1919
1932
  dataset = DatasetFactory(resources=[ResourceFactory(harvest=harvest_metadata)])
1920
1933
 
1921
- response = api.get(url_for('api.dataset', dataset=dataset))
1934
+ response = api.get(url_for("api.dataset", dataset=dataset))
1922
1935
  assert200(response)
1923
- assert response.json['resources'][0]['harvest'] == {}
1936
+ assert response.json["resources"][0]["harvest"] == {}
1924
1937
 
1925
1938
  def test_resource_dynamic_harvest_metadata_with_api_field(self, api):
1926
- harvest_metadata = HarvestResourceMetadata(
1927
- dynamic_field='dynamic_value'
1928
- )
1939
+ harvest_metadata = HarvestResourceMetadata(dynamic_field="dynamic_value")
1929
1940
  dataset = DatasetFactory(resources=[ResourceFactory(harvest=harvest_metadata)])
1930
1941
 
1931
- response = api.get(url_for('api.dataset', dataset=dataset))
1942
+ response = api.get(url_for("api.dataset", dataset=dataset))
1932
1943
  assert200(response)
1933
- assert response.json['resources'][0]['harvest'] == {
1934
- 'dynamic_field': 'dynamic_value',
1944
+ assert response.json["resources"][0]["harvest"] == {
1945
+ "dynamic_field": "dynamic_value",
1935
1946
  }
1936
1947
 
1937
1948
  def test_dataset_with_harvest_computed_dates(self, api):
@@ -1943,10 +1954,10 @@ class HarvestMetadataAPITest:
1943
1954
  )
1944
1955
  dataset = DatasetFactory(harvest=harvest_metadata)
1945
1956
 
1946
- response = api.get(url_for('api.dataset', dataset=dataset))
1957
+ response = api.get(url_for("api.dataset", dataset=dataset))
1947
1958
  assert200(response)
1948
- assert response.json['created_at'] == creation_date.isoformat()
1949
- assert response.json['last_modified'] == modification_date.isoformat()
1959
+ assert response.json["created_at"] == creation_date.isoformat()
1960
+ assert response.json["last_modified"] == modification_date.isoformat()
1950
1961
 
1951
1962
  resource_harvest_metadata = HarvestResourceMetadata(
1952
1963
  created_at=creation_date,
@@ -1954,7 +1965,7 @@ class HarvestMetadataAPITest:
1954
1965
  )
1955
1966
  dataset = DatasetFactory(resources=[ResourceFactory(harvest=resource_harvest_metadata)])
1956
1967
 
1957
- response = api.get(url_for('api.dataset', dataset=dataset))
1968
+ response = api.get(url_for("api.dataset", dataset=dataset))
1958
1969
  assert200(response)
1959
- assert response.json['resources'][0]['created_at'] == creation_date.isoformat()
1960
- assert response.json['resources'][0]['last_modified'] == modification_date.isoformat()
1970
+ assert response.json["resources"][0]["created_at"] == creation_date.isoformat()
1971
+ assert response.json["resources"][0]["last_modified"] == modification_date.isoformat()