@data-fair/catalog-data-fair 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -0
- package/index.ts +36 -0
- package/lib/capabilities.ts +18 -0
- package/lib/download.ts +226 -0
- package/lib/imports.ts +58 -0
- package/lib/prepare.ts +23 -0
- package/lib/resources/thumbnail.svg +19 -0
- package/package.json +54 -0
- package/types/catalogConfig/.type/index.d.ts +20 -0
- package/types/catalogConfig/.type/index.js +53 -0
- package/types/catalogConfig/.type/validate.js +102 -0
- package/types/catalogConfig/index.ts +1 -0
- package/types/catalogConfig/schema.json +30 -0
- package/types/datafairSchemas/.type/index.d.ts +231 -0
- package/types/datafairSchemas/.type/index.js +7 -0
- package/types/datafairSchemas/index.ts +1 -0
- package/types/datafairSchemas/schema.json +378 -0
- package/types/importConfig/.type/index.d.ts +107 -0
- package/types/importConfig/.type/index.js +273 -0
- package/types/importConfig/.type/validate.js +806 -0
- package/types/importConfig/index.ts +1 -0
- package/types/importConfig/schema.json +252 -0
- package/types/index.ts +3 -0
package/README.md
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
# <img alt="Data FAIR logo" src="https://cdn.jsdelivr.net/gh/data-fair/data-fair@master/ui/public/assets/logo.svg" width="40"> @data-fair/catalog-mock
|
|
2
|
+
|
|
3
|
+
A simple mock plugin for the Data Fair catalogs service.
|
|
4
|
+
This plugin can be used as a template for creating new plugins.
|
package/index.ts
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import type CatalogPlugin from '@data-fair/types-catalogs'
|
|
2
|
+
import { importConfigSchema, configSchema, assertConfigValid, type DataFairConfig } from '#types'
|
|
3
|
+
import { type DataFairCapabilities, capabilities } from './lib/capabilities.ts'
|
|
4
|
+
|
|
5
|
+
// Since the plugin is very frequently imported, each function is imported on demand,
|
|
6
|
+
// instead of loading the entire plugin.
|
|
7
|
+
// This file should not contain any code, but only constants and dynamic imports of functions.
|
|
8
|
+
|
|
9
|
+
const plugin: CatalogPlugin<DataFairConfig, DataFairCapabilities> = {
|
|
10
|
+
async prepare (context) {
|
|
11
|
+
const prepare = (await import('./lib/prepare.ts')).default
|
|
12
|
+
return prepare(context)
|
|
13
|
+
},
|
|
14
|
+
|
|
15
|
+
async listResources (context) {
|
|
16
|
+
const { listResources } = await import('./lib/imports.ts')
|
|
17
|
+
return listResources(context)
|
|
18
|
+
},
|
|
19
|
+
|
|
20
|
+
async getResource (context) {
|
|
21
|
+
const { getResource } = await import('./lib/download.ts')
|
|
22
|
+
return getResource(context)
|
|
23
|
+
},
|
|
24
|
+
|
|
25
|
+
metadata: {
|
|
26
|
+
title: 'Catalog Data Fair',
|
|
27
|
+
description: 'Data Fair plugin for Data Fair Catalog',
|
|
28
|
+
thumbnailPath: './lib/resources/thumbnail.svg',
|
|
29
|
+
capabilities
|
|
30
|
+
},
|
|
31
|
+
|
|
32
|
+
importConfigSchema,
|
|
33
|
+
configSchema,
|
|
34
|
+
assertConfigValid
|
|
35
|
+
}
|
|
36
|
+
export default plugin
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { Capability } from '@data-fair/types-catalogs'
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* The list of capabilities of the plugin.
|
|
5
|
+
* These capabilities define the actions that can be performed with the plugin.
|
|
6
|
+
* We add "as const" to ensure the type is a list of literal type, not a list of strings.
|
|
7
|
+
* This allows TypeScript to check if the plugin has the required funcitons for each capability.
|
|
8
|
+
*/
|
|
9
|
+
export const capabilities = [
|
|
10
|
+
'import',
|
|
11
|
+
'search',
|
|
12
|
+
'pagination',
|
|
13
|
+
'importConfig',
|
|
14
|
+
'thumbnail',
|
|
15
|
+
] satisfies Capability[]
|
|
16
|
+
|
|
17
|
+
export type DataFairCapabilities = typeof capabilities
|
|
18
|
+
export default capabilities
|
package/lib/download.ts
ADDED
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
import type { DataFairConfig, DataFairDataset, ImportConfig } from '#types'
|
|
2
|
+
import type { CatalogPlugin, GetResourceContext, Resource } from '@data-fair/types-catalogs'
|
|
3
|
+
import axios from '@data-fair/lib-node/axios.js'
|
|
4
|
+
import * as fs from 'fs'
|
|
5
|
+
import { join } from 'path'
|
|
6
|
+
import { Transform } from 'stream'
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Retrieves a resource by first fetching its metadata and then downloading the actual resource.
|
|
10
|
+
* The downloaded file path is added to the dataset metadata before returning.
|
|
11
|
+
*
|
|
12
|
+
* @param context - The context containing configuration and parameters required to fetch and download the resource.
|
|
13
|
+
* @returns A promise that resolves to the dataset metadata with the downloaded file path included.
|
|
14
|
+
*/
|
|
15
|
+
export const getResource = async (context: GetResourceContext<DataFairConfig>): ReturnType<CatalogPlugin['getResource']> => {
|
|
16
|
+
context.log.step('Import de la ressource')
|
|
17
|
+
|
|
18
|
+
const resource = await getMetaData(context)
|
|
19
|
+
resource.filePath = await downloadResource(context, resource)
|
|
20
|
+
|
|
21
|
+
return resource
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Returns the DataFair Resource with all its metadatas
|
|
26
|
+
* @param catalogConfig the DataFair configuration [ex: { url: 'https://example.com' }]
|
|
27
|
+
* @param resourceId the dataset Id to fetch fields from
|
|
28
|
+
* @returns the Resource corresponding to the id by this configuration
|
|
29
|
+
*/
|
|
30
|
+
const getMetaData = async ({ catalogConfig, resourceId, log }: GetResourceContext<DataFairConfig>): Promise<Resource> => {
|
|
31
|
+
let dataset: DataFairDataset
|
|
32
|
+
try {
|
|
33
|
+
const url = `${catalogConfig.url}/data-fair/api/v1/datasets/${resourceId}`
|
|
34
|
+
const res = (await axios.get(url))
|
|
35
|
+
if (res.status !== 200) {
|
|
36
|
+
throw new Error(`HTTP error : ${res.status}, ${res.data}`)
|
|
37
|
+
}
|
|
38
|
+
dataset = res.data
|
|
39
|
+
log.info('Import des métadonnées de la ressource', { url })
|
|
40
|
+
} catch (e) {
|
|
41
|
+
console.error('Error while fetching metadatas', e)
|
|
42
|
+
throw new Error(`Erreur lors de la récuperation de la resource DataFair. ${e instanceof Error ? e.message : e}`)
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const resource: Resource = {
|
|
46
|
+
id: resourceId,
|
|
47
|
+
title: dataset.title,
|
|
48
|
+
description: dataset.description,
|
|
49
|
+
format: 'csv',
|
|
50
|
+
origin: `${catalogConfig.url}/datasets/${resourceId}`,
|
|
51
|
+
frequency: dataset.frequency,
|
|
52
|
+
image: dataset.image,
|
|
53
|
+
keywords: dataset.keywords,
|
|
54
|
+
size: dataset.file?.size,
|
|
55
|
+
schema: dataset.schema,
|
|
56
|
+
filePath: '',
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (dataset.license) {
|
|
60
|
+
resource.license = {
|
|
61
|
+
title: dataset.license.title ?? '',
|
|
62
|
+
href: dataset.license.href ?? '',
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
return resource
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Download a specified from a Data Fair service.
|
|
70
|
+
* If the resource has a distant file and no import configuration will download the distant file, otherwise the data will be fetch by set of rows.
|
|
71
|
+
* @param context - the download context, contains the download configuration, the resource Id
|
|
72
|
+
* @param res - the metadatas about the resource.
|
|
73
|
+
* @returns A promise resolving to the file path of the downloaded CSV.
|
|
74
|
+
*/
|
|
75
|
+
const downloadResource = async (context: GetResourceContext<DataFairConfig>, res: Resource): Promise<string> => {
|
|
76
|
+
const filePath = join(context.tmpDir, `${context.resourceId}.csv`)
|
|
77
|
+
try {
|
|
78
|
+
if (res.size && res.size > 0 && context.importConfig.fields?.length === 0 && context.importConfig.filters?.length === 0) {
|
|
79
|
+
await downloadResourceFile(filePath, context)
|
|
80
|
+
} else {
|
|
81
|
+
await downloadResourceLines(filePath, context)
|
|
82
|
+
}
|
|
83
|
+
return filePath
|
|
84
|
+
} catch (error) {
|
|
85
|
+
console.error('Error while downloading the file', error)
|
|
86
|
+
context.log.error(`Erreur pendant le téléchargement du fichier : ${error instanceof Error ? error.message : error}`)
|
|
87
|
+
throw new Error(`Erreur pendant le téléchargement du fichier: ${error instanceof Error ? error.message : String(error)}`)
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Downloads the rows of a dataset thanks to its full file and saves them as a CSV file in a given file path. The configuration of the importConfig is not applicable.
|
|
93
|
+
* @param filePath - The path to the temporary file where the CSV will be saved.
|
|
94
|
+
* @param catalogConfig - The DataFair configuration object.
|
|
95
|
+
* @param resourceId - The Id of the dataset to download.
|
|
96
|
+
* @param log - The log utilitary to display messages
|
|
97
|
+
* @returns A promise that resolves when the file is successfully downloaded and saved.
|
|
98
|
+
* @throws If there is an error writing the file or fetching the dataset.
|
|
99
|
+
*/
|
|
100
|
+
const downloadResourceFile = async (filePath: string, { catalogConfig, resourceId, log }: GetResourceContext<DataFairConfig>): Promise<void> => {
|
|
101
|
+
const url = `${catalogConfig.url}/data-fair/api/v1/datasets/${resourceId}/full`
|
|
102
|
+
log.info('Import des données de la ressource', url)
|
|
103
|
+
|
|
104
|
+
const fileStream = fs.createWriteStream(filePath)
|
|
105
|
+
|
|
106
|
+
const response = await axios.get(url, { responseType: 'stream' })
|
|
107
|
+
|
|
108
|
+
if (response.status !== 200) {
|
|
109
|
+
throw new Error(`Error while fetching data: HTTP ${response.statusText}`)
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
return new Promise<void>((resolve, reject) => {
|
|
113
|
+
response.data.pipe(fileStream)
|
|
114
|
+
fileStream.on('finish', () => {
|
|
115
|
+
resolve()
|
|
116
|
+
})
|
|
117
|
+
response.data.on('error', (err: any) => {
|
|
118
|
+
fs.unlink(filePath, () => { }) // Delete the file in case of error
|
|
119
|
+
reject(err)
|
|
120
|
+
})
|
|
121
|
+
fileStream.on('error', (err) => {
|
|
122
|
+
fs.unlink(filePath, () => { }) // Delete the file in case of error
|
|
123
|
+
reject(err)
|
|
124
|
+
})
|
|
125
|
+
})
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Downloads the rows of a dataset matching the given filters and saves them as a CSV file in a given file path.
|
|
130
|
+
* @param filePath - The path to the temporary file where the CSV will be saved.
|
|
131
|
+
* @param catalogConfig - The DataFair configuration object.
|
|
132
|
+
* @param resourceId - The Id of the dataset to download.
|
|
133
|
+
* @param importConfig - The import configuration, including filters to apply.
|
|
134
|
+
* @param log - The log utilitary to display messages
|
|
135
|
+
* @returns A promise that resolves when the file is successfully downloaded and saved.
|
|
136
|
+
* @throws If there is an error writing the file or fetching the dataset.
|
|
137
|
+
*/
|
|
138
|
+
const downloadResourceLines = async (destFile: string, { catalogConfig, resourceId, importConfig, log }: GetResourceContext<DataFairConfig> & { importConfig: ImportConfig }) => {
|
|
139
|
+
let url: string | null = `${catalogConfig.url}/data-fair/api/v1/datasets/${resourceId}/lines?format=csv&size=3000`
|
|
140
|
+
|
|
141
|
+
if (importConfig.fields) {
|
|
142
|
+
url += '&select=' + importConfig.fields.map(field => field.key).join(',')
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
if (importConfig.filters) {
|
|
146
|
+
importConfig.filters.forEach((filter) => {
|
|
147
|
+
switch (filter.type) {
|
|
148
|
+
case 'in':
|
|
149
|
+
case 'nin':
|
|
150
|
+
url += `&${filter.field.key}_${filter.type}="${filter.values?.join('","')}"`
|
|
151
|
+
break
|
|
152
|
+
case 'starts':
|
|
153
|
+
case 'gte':
|
|
154
|
+
case 'lte':
|
|
155
|
+
url += `&${filter.field.key}_${filter.type}=${filter.value}`
|
|
156
|
+
break
|
|
157
|
+
default:
|
|
158
|
+
break
|
|
159
|
+
}
|
|
160
|
+
})
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
log.info('Import des données de la ressource', url)
|
|
164
|
+
const writer = fs.createWriteStream(destFile)
|
|
165
|
+
let isFirstChunk = true
|
|
166
|
+
|
|
167
|
+
while (url) {
|
|
168
|
+
console.log(url)
|
|
169
|
+
const response = await axios.get(url, { responseType: 'stream' })
|
|
170
|
+
if (response.status !== 200) {
|
|
171
|
+
throw new Error(`Error while fetching data: HTTP ${response.statusText}`)
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
await new Promise<void>((resolve, reject) => {
|
|
175
|
+
let stream = response.data
|
|
176
|
+
if (!isFirstChunk) {
|
|
177
|
+
let skippedHeader = false
|
|
178
|
+
stream = response.data.pipe(new Transform({
|
|
179
|
+
transform (chunk, _encoding, callback) {
|
|
180
|
+
if (!skippedHeader) {
|
|
181
|
+
const headerEndIndex = chunk.indexOf('\n')
|
|
182
|
+
if (headerEndIndex !== -1) {
|
|
183
|
+
chunk = chunk.slice(headerEndIndex + 1)
|
|
184
|
+
skippedHeader = true
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
this.push(chunk)
|
|
188
|
+
callback()
|
|
189
|
+
}
|
|
190
|
+
}))
|
|
191
|
+
}
|
|
192
|
+
stream.pipe(writer, { end: false })
|
|
193
|
+
stream.on('end', () => {
|
|
194
|
+
const linkHeader = response.headers.link
|
|
195
|
+
url = extractNextPageUrl(linkHeader)
|
|
196
|
+
isFirstChunk = false
|
|
197
|
+
resolve()
|
|
198
|
+
})
|
|
199
|
+
stream.on('error', (error: any) => {
|
|
200
|
+
writer.close()
|
|
201
|
+
console.error(`Error while fetching lines at ${url}`, error)
|
|
202
|
+
reject(error)
|
|
203
|
+
})
|
|
204
|
+
})
|
|
205
|
+
}
|
|
206
|
+
writer.end()
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
/**
|
|
210
|
+
* Extract the next url (to fetch the next page) from the headers.
|
|
211
|
+
* @param linkHeader the header where the `next` url should be
|
|
212
|
+
* @returns the url if exists, null otherwise
|
|
213
|
+
*/
|
|
214
|
+
const extractNextPageUrl = (linkHeader: string | undefined): string | null => {
|
|
215
|
+
if (!linkHeader) return null
|
|
216
|
+
const links = linkHeader.split(',')
|
|
217
|
+
for (const link of links) {
|
|
218
|
+
const [urlPart, relPart] = link.split(';')
|
|
219
|
+
const url = urlPart.trim().slice(1, -1) // Remove < and >
|
|
220
|
+
const rel = relPart.trim()
|
|
221
|
+
if (rel === 'rel=next') {
|
|
222
|
+
return url
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
return null
|
|
226
|
+
}
|
package/lib/imports.ts
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import type { DataFairCatalog, DataFairDataset, DataFairConfig } from '#types'
|
|
2
|
+
import axios from '@data-fair/lib-node/axios.js'
|
|
3
|
+
import type { CatalogPlugin, ListResourcesContext } from '@data-fair/types-catalogs'
|
|
4
|
+
import type { DataFairCapabilities } from './capabilities.ts'
|
|
5
|
+
|
|
6
|
+
type ResourceList = Awaited<ReturnType<CatalogPlugin['listResources']>>['results']
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Transform an ODS catalog into a Data Fair catalog
|
|
10
|
+
* @param dataFairDataset the dataset to transform
|
|
11
|
+
* @returns an object containing the count of resources, the transformed resources, and an empty path array
|
|
12
|
+
*/
|
|
13
|
+
const prepareCatalog = (dataFairDatasets: DataFairDataset[]): ResourceList => {
|
|
14
|
+
const catalog: ResourceList = []
|
|
15
|
+
|
|
16
|
+
for (const dataFairDataset of dataFairDatasets) {
|
|
17
|
+
catalog.push({
|
|
18
|
+
id: dataFairDataset.id,
|
|
19
|
+
title: dataFairDataset.title,
|
|
20
|
+
format: 'csv',
|
|
21
|
+
size: dataFairDataset.file?.size ?? dataFairDataset.storage?.size ?? dataFairDataset.originalFile?.size,
|
|
22
|
+
type: 'resource',
|
|
23
|
+
} as ResourceList[number])
|
|
24
|
+
}
|
|
25
|
+
return catalog
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Returns the catalog [list of dataset] from a Data Fair service
|
|
30
|
+
* @param config the Data Fair configuration
|
|
31
|
+
* @returns the list of Resources available on this catalog
|
|
32
|
+
*/
|
|
33
|
+
export const listResources = async (config: ListResourcesContext<DataFairConfig, DataFairCapabilities>): ReturnType<CatalogPlugin<DataFairConfig>['listResources']> => {
|
|
34
|
+
const dataFairParams: Record<string, any> = {}
|
|
35
|
+
if (config.params?.q) dataFairParams.q = config.params.q
|
|
36
|
+
if (config.params?.size) dataFairParams.size = config.params.size
|
|
37
|
+
if (config.params?.page) dataFairParams.page = config.params.page
|
|
38
|
+
|
|
39
|
+
let data: DataFairCatalog
|
|
40
|
+
const url = `${config.catalogConfig.url}/data-fair/api/v1/catalog/datasets`
|
|
41
|
+
try {
|
|
42
|
+
const res = (await axios.get(url, { params: dataFairParams }))
|
|
43
|
+
if (res.status !== 200) {
|
|
44
|
+
throw new Error(`HTTP error : ${res.status}, ${res.data}`)
|
|
45
|
+
}
|
|
46
|
+
data = res.data
|
|
47
|
+
} catch (e) {
|
|
48
|
+
console.error(`Error fetching datasets from ${url} : ${e}`)
|
|
49
|
+
throw new Error(`Erreur lors de la récuperation de la resource Data Fair (${e instanceof Error ? e.message : ''})`)
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const catalog = prepareCatalog(data.results)
|
|
53
|
+
return {
|
|
54
|
+
count: data.count,
|
|
55
|
+
results: catalog,
|
|
56
|
+
path: []
|
|
57
|
+
}
|
|
58
|
+
}
|
package/lib/prepare.ts
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import type { PrepareContext } from '@data-fair/types-catalogs'
|
|
2
|
+
import type { DataFairCapabilities } from './capabilities.ts'
|
|
3
|
+
import type { DataFairConfig } from '#types'
|
|
4
|
+
import axios from '@data-fair/lib-node/axios.js'
|
|
5
|
+
|
|
6
|
+
export default async ({ catalogConfig, capabilities, secrets }: PrepareContext<DataFairConfig, DataFairCapabilities>) => {
|
|
7
|
+
// To remove when catalog/datasets allows q parameters
|
|
8
|
+
capabilities = capabilities.filter(c => c !== 'search')
|
|
9
|
+
|
|
10
|
+
// test the url
|
|
11
|
+
try {
|
|
12
|
+
await axios.get(catalogConfig.url + '/data-fair/api/v1/catalog/datasets?size=1&select=id')
|
|
13
|
+
} catch (e) {
|
|
14
|
+
console.error('Erreur URL pendant la configuration : ', e instanceof Error ? e.message : e)
|
|
15
|
+
throw new Error('Configuration invalide, vérifiez l\'URL')
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return {
|
|
19
|
+
catalogConfig,
|
|
20
|
+
capabilities,
|
|
21
|
+
secrets
|
|
22
|
+
}
|
|
23
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
<svg viewBox="-5 0 110 100" width="200" height="200" class="logo" version="1.1" xmlns="http://www.w3.org/2000/svg">
|
|
2
|
+
<defs>
|
|
3
|
+
<filter id="dropshadow">
|
|
4
|
+
<feGaussianBlur in="SourceAlpha" stdDeviation="2"/>
|
|
5
|
+
<feOffset dx="2" dy="2" result="offsetblur"/>
|
|
6
|
+
<feMerge>
|
|
7
|
+
<feMergeNode/>
|
|
8
|
+
<feMergeNode in="SourceGraphic"/>
|
|
9
|
+
</feMerge>
|
|
10
|
+
</filter>
|
|
11
|
+
</defs>
|
|
12
|
+
<g>
|
|
13
|
+
<!--<path d="M35,5 L65,5 L31,92 A4,4 0 0 1 28,95 L2,95 A3,3 0 0 1 1,92 Z" fill="#90caf9" filter="url(#dropshadow)"></path>
|
|
14
|
+
<path d="M65,5 L35,5 L69,92 A4,4 0 0 0 72,95 L98,95 A3,3 0 0 0 99,92 Z" fill="#0288d1" filter="url(#dropshadow)"></path>-->
|
|
15
|
+
|
|
16
|
+
<path d="M48,4 L71,52 L0,100 Z" fill="#0288d1" filter="url(#dropshadow)"/>
|
|
17
|
+
<path d="M50,0 L25,50 L100,100 Z" fill="#90caf9" filter="url(#dropshadow)"/>
|
|
18
|
+
</g>
|
|
19
|
+
</svg>
|
package/package.json
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@data-fair/catalog-data-fair",
|
|
3
|
+
"description": "A simple Data Fair plugin for the Data Fair catalogs service.",
|
|
4
|
+
"version": "0.1.0",
|
|
5
|
+
"main": "index.ts",
|
|
6
|
+
"type": "module",
|
|
7
|
+
"scripts": {
|
|
8
|
+
"build-types": "df-build-types ./",
|
|
9
|
+
"check-types": "tsc",
|
|
10
|
+
"lint": "eslint .",
|
|
11
|
+
"prepare": "husky || true",
|
|
12
|
+
"test-base": "NODE_ENV=test EVENTS_LOG_LEVEL=alert node --disable-warning=ExperimentalWarning --test-force-exit --test-concurrency=1 --test",
|
|
13
|
+
"test-only": "npm run test-base -- --test-only test-it/*.ts",
|
|
14
|
+
"test": "npm run test-base test-it/*.ts",
|
|
15
|
+
"quality": "npm run lint && npm run build-types && npm run check-types && npm run test && npm audit --omit=dev --audit-level=critical",
|
|
16
|
+
"prepublishOnly": "npm run build-types"
|
|
17
|
+
},
|
|
18
|
+
"files": [
|
|
19
|
+
"./lib/**",
|
|
20
|
+
"./types/**",
|
|
21
|
+
"index.ts"
|
|
22
|
+
],
|
|
23
|
+
"imports": {
|
|
24
|
+
"#types": "./types/index.ts",
|
|
25
|
+
"#type/*": "./types/*"
|
|
26
|
+
},
|
|
27
|
+
"keywords": [
|
|
28
|
+
"data-fair-catalogs-plugin"
|
|
29
|
+
],
|
|
30
|
+
"license": "MIT",
|
|
31
|
+
"dependencies": {
|
|
32
|
+
"@data-fair/lib-node": "^2.8.2",
|
|
33
|
+
"@data-fair/lib-utils": "^1.6.0",
|
|
34
|
+
"prom-client": "^15.1.3"
|
|
35
|
+
},
|
|
36
|
+
"devDependencies": {
|
|
37
|
+
"@commitlint/cli": "^19.8.0",
|
|
38
|
+
"@commitlint/config-conventional": "^19.8.0",
|
|
39
|
+
"@data-fair/lib-types-builder": "^1.8.0",
|
|
40
|
+
"@data-fair/types-catalogs": "^0.3.0",
|
|
41
|
+
"@types/nock": "^10.0.3",
|
|
42
|
+
"chalk": "^5.4.1",
|
|
43
|
+
"dayjs": "^1.11.13",
|
|
44
|
+
"draftlog": "^1.0.13",
|
|
45
|
+
"eslint": "^9.25.1",
|
|
46
|
+
"husky": "^9.1.7",
|
|
47
|
+
"neostandard": "^0.12.1",
|
|
48
|
+
"nock": "^14.0.5",
|
|
49
|
+
"typescript": "^5.8.3"
|
|
50
|
+
},
|
|
51
|
+
"relativeDependencies": {
|
|
52
|
+
"@data-fair/types-catalogs": "../../catalogs/types-catalogs"
|
|
53
|
+
}
|
|
54
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
|
|
2
|
+
export const schemaExports: string[]
|
|
3
|
+
|
|
4
|
+
// see https://github.com/bcherny/json-schema-to-typescript/issues/439 if some types are not exported
|
|
5
|
+
/**
|
|
6
|
+
* The url of the catalog
|
|
7
|
+
*/
|
|
8
|
+
export type URL = string;
|
|
9
|
+
|
|
10
|
+
export type DataFairConfig = {
|
|
11
|
+
url: URL;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
export declare function validate(data: any): data is DataFairConfig
|
|
16
|
+
export declare function assertValid(data: any, options?: import('@data-fair/lib-validation').AssertValidOptions): asserts data is DataFairConfig
|
|
17
|
+
export declare function returnValid(data: any, options?: import('@data-fair/lib-validation').AssertValidOptions): DataFairConfig
|
|
18
|
+
|
|
19
|
+
export declare const schema: any
|
|
20
|
+
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
/* eslint-disable */
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
import validate from './validate.js'
|
|
5
|
+
import { assertValid as assertValidGeneric } from '@data-fair/lib-validation'
|
|
6
|
+
|
|
7
|
+
export const schemaExports = [
|
|
8
|
+
"types",
|
|
9
|
+
"validate",
|
|
10
|
+
"schema"
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
export { validate } from './validate.js'
|
|
14
|
+
export function assertValid(data, options) {
|
|
15
|
+
assertValidGeneric(validate, data, options)
|
|
16
|
+
}
|
|
17
|
+
export function returnValid(data, options) {
|
|
18
|
+
assertValid(data, options)
|
|
19
|
+
return data
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export const schema = {
|
|
23
|
+
"$id": "https://github.com/data-fair/catalog-data-fair/catalog-config",
|
|
24
|
+
"x-exports": [
|
|
25
|
+
"types",
|
|
26
|
+
"validate",
|
|
27
|
+
"schema"
|
|
28
|
+
],
|
|
29
|
+
"title": "DataFairConfig",
|
|
30
|
+
"type": "object",
|
|
31
|
+
"additionalProperties": false,
|
|
32
|
+
"required": [
|
|
33
|
+
"url"
|
|
34
|
+
],
|
|
35
|
+
"properties": {
|
|
36
|
+
"url": {
|
|
37
|
+
"type": "string",
|
|
38
|
+
"title": "URL",
|
|
39
|
+
"description": "The url of the catalog",
|
|
40
|
+
"x-i18n-description": {
|
|
41
|
+
"fr": "L'URL du site où le catalogue est publié. *L'URL de l'API utilisée sera* *https*[]()*://example.com***/data-fair/api/v1**"
|
|
42
|
+
},
|
|
43
|
+
"pattern": "^https?://.*[^/]$",
|
|
44
|
+
"errorMessage": "The URL must start with http:// or https:// and must not end with `/`.",
|
|
45
|
+
"x-i18n-errorMessage": {
|
|
46
|
+
"fr": "L'URL doit commencer par http:// ou https:// et ne pas se terminer par `/`."
|
|
47
|
+
},
|
|
48
|
+
"examples": [
|
|
49
|
+
"https://example.com"
|
|
50
|
+
]
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
/* eslint-disable */
|
|
2
|
+
// @ts-nocheck
|
|
3
|
+
|
|
4
|
+
"use strict";
|
|
5
|
+
export const validate = validate14;
|
|
6
|
+
export default validate14;
|
|
7
|
+
const schema16 = {"$id":"https://github.com/data-fair/catalog-data-fair/catalog-config","x-exports":["types","validate","schema"],"title":"DataFairConfig","type":"object","additionalProperties":false,"required":["url"],"properties":{"url":{"type":"string","title":"URL","description":"The url of the catalog","x-i18n-description":{"fr":"L'URL du site où le catalogue est publié. *L'URL de l'API utilisée sera* *https*[]()*://example.com***/data-fair/api/v1**"},"pattern":"^https?://.*[^/]$","errorMessage":"The URL must start with http:// or https:// and must not end with `/`.","x-i18n-errorMessage":{"fr":"L'URL doit commencer par http:// ou https:// et ne pas se terminer par `/`."},"examples":["https://example.com"]}}};
|
|
8
|
+
const pattern0 = new RegExp("^https?://.*[^/]$", "u");
|
|
9
|
+
|
|
10
|
+
function validate14(data, {instancePath="", parentData, parentDataProperty, rootData=data}={}){
|
|
11
|
+
/*# sourceURL="https://github.com/data-fair/catalog-data-fair/catalog-config" */;
|
|
12
|
+
let vErrors = null;
|
|
13
|
+
let errors = 0;
|
|
14
|
+
if(data && typeof data == "object" && !Array.isArray(data)){
|
|
15
|
+
if(data.url === undefined){
|
|
16
|
+
const err0 = {instancePath,schemaPath:"#/required",keyword:"required",params:{missingProperty: "url"},message:"must have required property '"+"url"+"'"};
|
|
17
|
+
if(vErrors === null){
|
|
18
|
+
vErrors = [err0];
|
|
19
|
+
}
|
|
20
|
+
else {
|
|
21
|
+
vErrors.push(err0);
|
|
22
|
+
}
|
|
23
|
+
errors++;
|
|
24
|
+
}
|
|
25
|
+
for(const key0 in data){
|
|
26
|
+
if(!(key0 === "url")){
|
|
27
|
+
const err1 = {instancePath,schemaPath:"#/additionalProperties",keyword:"additionalProperties",params:{additionalProperty: key0},message:"must NOT have additional properties"};
|
|
28
|
+
if(vErrors === null){
|
|
29
|
+
vErrors = [err1];
|
|
30
|
+
}
|
|
31
|
+
else {
|
|
32
|
+
vErrors.push(err1);
|
|
33
|
+
}
|
|
34
|
+
errors++;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
if(data.url !== undefined){
|
|
38
|
+
let data0 = data.url;
|
|
39
|
+
if(typeof data0 === "string"){
|
|
40
|
+
if(!pattern0.test(data0)){
|
|
41
|
+
const err2 = {instancePath:instancePath+"/url",schemaPath:"#/properties/url/pattern",keyword:"pattern",params:{pattern: "^https?://.*[^/]$"},message:"must match pattern \""+"^https?://.*[^/]$"+"\""};
|
|
42
|
+
if(vErrors === null){
|
|
43
|
+
vErrors = [err2];
|
|
44
|
+
}
|
|
45
|
+
else {
|
|
46
|
+
vErrors.push(err2);
|
|
47
|
+
}
|
|
48
|
+
errors++;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
else {
|
|
52
|
+
const err3 = {instancePath:instancePath+"/url",schemaPath:"#/properties/url/type",keyword:"type",params:{type: "string"},message:"must be string"};
|
|
53
|
+
if(vErrors === null){
|
|
54
|
+
vErrors = [err3];
|
|
55
|
+
}
|
|
56
|
+
else {
|
|
57
|
+
vErrors.push(err3);
|
|
58
|
+
}
|
|
59
|
+
errors++;
|
|
60
|
+
}
|
|
61
|
+
if(errors > 0){
|
|
62
|
+
const emErrs0 = [];
|
|
63
|
+
for(const err4 of vErrors){
|
|
64
|
+
if(((((err4.keyword !== "errorMessage") && (!err4.emUsed)) && ((err4.instancePath === instancePath+"/url") || ((err4.instancePath.indexOf(instancePath+"/url") === 0) && (err4.instancePath[instancePath+"/url".length] === "/")))) && (err4.schemaPath.indexOf("#/properties/url") === 0)) && (err4.schemaPath["#/properties/url".length] === "/")){
|
|
65
|
+
emErrs0.push(err4);
|
|
66
|
+
err4.emUsed = true;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
if(emErrs0.length){
|
|
70
|
+
const err5 = {instancePath:instancePath+"/url",schemaPath:"#/properties/url/errorMessage",keyword:"errorMessage",params:{errors: emErrs0},message:"The URL must start with http:// or https:// and must not end with `/`."};
|
|
71
|
+
if(vErrors === null){
|
|
72
|
+
vErrors = [err5];
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
vErrors.push(err5);
|
|
76
|
+
}
|
|
77
|
+
errors++;
|
|
78
|
+
}
|
|
79
|
+
const emErrs1 = [];
|
|
80
|
+
for(const err6 of vErrors){
|
|
81
|
+
if(!err6.emUsed){
|
|
82
|
+
emErrs1.push(err6);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
vErrors = emErrs1;
|
|
86
|
+
errors = emErrs1.length;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
else {
|
|
91
|
+
const err7 = {instancePath,schemaPath:"#/type",keyword:"type",params:{type: "object"},message:"must be object"};
|
|
92
|
+
if(vErrors === null){
|
|
93
|
+
vErrors = [err7];
|
|
94
|
+
}
|
|
95
|
+
else {
|
|
96
|
+
vErrors.push(err7);
|
|
97
|
+
}
|
|
98
|
+
errors++;
|
|
99
|
+
}
|
|
100
|
+
validate14.errors = vErrors;
|
|
101
|
+
return errors === 0;
|
|
102
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './.type/index.js'
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
{
|
|
2
|
+
"$id": "https://github.com/data-fair/catalog-data-fair/catalog-config",
|
|
3
|
+
"x-exports": [
|
|
4
|
+
"types",
|
|
5
|
+
"validate",
|
|
6
|
+
"schema"
|
|
7
|
+
],
|
|
8
|
+
"title": "DataFairConfig",
|
|
9
|
+
"type": "object",
|
|
10
|
+
"additionalProperties": false,
|
|
11
|
+
"required": [
|
|
12
|
+
"url"
|
|
13
|
+
],
|
|
14
|
+
"properties": {
|
|
15
|
+
"url": {
|
|
16
|
+
"type": "string",
|
|
17
|
+
"title": "URL",
|
|
18
|
+
"description": "The url of the catalog",
|
|
19
|
+
"x-i18n-description": {
|
|
20
|
+
"fr": "L'URL du site où le catalogue est publié. *L'URL de l'API utilisée sera* *https*[]()*://example.com***/data-fair/api/v1**"
|
|
21
|
+
},
|
|
22
|
+
"pattern": "^https?://.*[^/]$",
|
|
23
|
+
"errorMessage": "The URL must start with http:// or https:// and must not end with `/`.",
|
|
24
|
+
"x-i18n-errorMessage": {
|
|
25
|
+
"fr": "L'URL doit commencer par http:// ou https:// et ne pas se terminer par `/`."
|
|
26
|
+
},
|
|
27
|
+
"examples": ["https://example.com"]
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|