maplib 0.15.11__cp39-abi3-manylinux_2_24_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
maplib/.gitignore ADDED
@@ -0,0 +1 @@
1
+ *.so
maplib/__init__.py ADDED
@@ -0,0 +1,58 @@
1
+ # r'''
2
+ # # Overview
3
+ #
4
+ # '''
5
+
6
+ __all__ = [
7
+ "Mapping",
8
+ "a",
9
+ "Triple",
10
+ "SolutionMappings",
11
+ "IndexingOptions",
12
+ "ValidationReport",
13
+ "Instance",
14
+ "Template",
15
+ "Argument",
16
+ "Parameter",
17
+ "Variable",
18
+ "RDFType",
19
+ "XSD",
20
+ "IRI",
21
+ "Literal",
22
+ "Prefix",
23
+ "BlankNode",
24
+ "explore",
25
+ "add_triples"]
26
+
27
+ import pathlib
28
+ from .maplib import *
29
+ from .add_triples import add_triples
30
+
31
+ if (pathlib.Path(__file__).parent.resolve() / "graph_explorer").exists():
32
+ from .graph_explorer import explore
33
+ else:
34
+ async def explore(
35
+ m: "Mapping",
36
+ host: str = "localhost",
37
+ port: int = 8000,
38
+ bind: str = "localhost",
39
+ popup=True,
40
+ fts=True,
41
+ ):
42
+ """Starts a graph explorer session.
43
+ To run from Jupyter Notebook use:
44
+ >>> from maplib import explore
45
+ >>>
46
+ >>> await explore(m)
47
+
48
+ This will block further execution of the notebook until you stop the cell.
49
+
50
+ :param m: The Mapping to explore
51
+ :param host: The hostname that we will point the browser to.
52
+ :param port: The port where the graph explorer webserver listens on.
53
+ :param bind: Bind to the following host / ip.
54
+ :param popup: Pop up the browser window.
55
+ :param fts: Enable full text search indexing
56
+ """
57
+ print("Contact Data Treehouse to try!")
58
+
maplib/__init__.pyi ADDED
@@ -0,0 +1,768 @@
1
+ from pathlib import Path
2
+ from typing import Union, List, Dict, Optional, Callable, Tuple, Literal as LiteralType
3
+ from polars import DataFrame
4
+ from datetime import datetime, date
5
+
6
+
7
+ class RDFType:
8
+ """
9
+ The type of a column containing a RDF variable.
10
+ For instance, xsd:string is RDFType.Literal("http://www.w3.org/2001/XMLSchema#string")
11
+ """
12
+
13
+ IRI: Callable[[], "RDFType"]
14
+ BlankNode: Callable[[], "RDFType"]
15
+ Literal: Callable[[Union[str, "IRI"]], "RDFType"]
16
+ Multi: Callable[[List["RDFType"]], "RDFType"]
17
+ Nested: Callable[["RDFType"], "RDFType"]
18
+ Unknown: Callable[[], "RDFType"]
19
+
20
+ class SolutionMappings:
21
+ """
22
+ Detailed information about the solution mappings and the types of the variables.
23
+ """
24
+
25
+ mappings: DataFrame
26
+ rdf_types: Dict[str, RDFType]
27
+
28
+ class Variable:
29
+ """
30
+ A variable in a template.
31
+ """
32
+
33
+ name: str
34
+
35
+ def __init__(self, name: str):
36
+ """
37
+ Create a new variable.
38
+ :param name: The name of the variable.
39
+ """
40
+ ...
41
+
42
+ class IRI:
43
+ iri: str
44
+ """
45
+ An IRI.
46
+ """
47
+
48
+ def __init__(self, iri: str):
49
+ """
50
+ Create a new IRI
51
+ :param iri: IRI (without < and >).
52
+ """
53
+
54
+ class BlankNode:
55
+ """
56
+ A Blank Node.
57
+ """
58
+
59
+ name: str
60
+
61
+ def __init__(self, name: str):
62
+ """
63
+ Create a new Blank Node
64
+ :param name: Name of blank node (without _:).
65
+ """
66
+
67
+ class Prefix:
68
+ """
69
+ A prefix that can be used to ergonomically build iris.
70
+ """
71
+
72
+ def __init__(self, prefix, iri):
73
+ """
74
+ Create a new prefix.
75
+ :param prefix: The name of the prefix
76
+ :param iri: The prefix IRI.
77
+ """
78
+
79
+ def suf(self, suffix: str) -> IRI:
80
+ """
81
+ Create a IRI by appending the suffix.
82
+ :param suffix: The suffix to append.
83
+ :return:
84
+ """
85
+
86
+ class Literal:
87
+ """
88
+ An RDF literal.
89
+ """
90
+
91
+ value: str
92
+ datatype: Optional[IRI]
93
+ language: Optional[str]
94
+
95
+ def __init__(self, value: str, data_type: IRI = None, language: str = None):
96
+ """
97
+ Create a new RDF Literal
98
+ :param value: The lexical representation of the value.
99
+ :param data_type: The data type of the value (an IRI).
100
+ :param language: The language tag of the value.
101
+ """
102
+
103
+ def to_native(self) -> Union[int, float, bool, str, datetime, date]:
104
+ """
105
+
106
+ :return:
107
+ """
108
+
109
+ class Parameter:
110
+ variable: Variable
111
+ optional: bool
112
+ allow_blank: bool
113
+ rdf_type: Optional[RDFType]
114
+ default_value: Optional[Union[Literal, IRI, BlankNode]]
115
+ """
116
+ Parameters for template signatures.
117
+ """
118
+
119
+ def __init__(
120
+ self,
121
+ variable: Variable,
122
+ optional: Optional[bool] = False,
123
+ allow_blank: Optional[bool] = True,
124
+ rdf_type: Optional[RDFType] = None,
125
+ default_value: Optional[Union[Literal, IRI, BlankNode]] = None,
126
+ ):
127
+ """
128
+ Create a new parameter for a Template.
129
+ :param variable: The variable.
130
+ :param optional: Can the variable be unbound?
131
+ :param allow_blank: Can the variable be bound to a blank node?
132
+ :param rdf_type: The type of the variable. Can be nested.
133
+ :param default_value: Default value when no value provided.
134
+ """
135
+
136
+ class Argument:
137
+ def __init__(
138
+ self, term: Union[Variable, IRI, Literal], list_expand: Optional[bool] = False
139
+ ):
140
+ """
141
+ An argument for a template instance.
142
+ :param term: The term.
143
+ :param list_expand: Should the argument be expanded? Used with the list_expander argument of instance.
144
+ """
145
+
146
+ class Instance:
147
+ def __init__(
148
+ self,
149
+ iri: IRI,
150
+ arguments: List[Union[Argument, Variable, IRI, Literal, BlankNode, None]],
151
+ list_expander: Optional[LiteralType["cross", "zipMin", "zipMax"]] = None,
152
+ ):
153
+ """
154
+ A template instance.
155
+ :param iri: The IRI of the template to be instantiated.
156
+ :param arguments: The arguments for template instantiation.
157
+ :param list_expander: (How) should we do list expansion?
158
+ """
159
+
160
+ class Template:
161
+ iri: str
162
+ parameters: List[Parameter]
163
+ instances: List[Instance]
164
+ """
165
+ An OTTR Template.
166
+ Note that accessing parameters- or instances-fields returns copies.
167
+ To change these fields, you must assign new lists of parameters or instances.
168
+ """
169
+
170
+ def __init__(
171
+ self,
172
+ iri: IRI,
173
+ parameters: List[Union[Parameter, Variable]],
174
+ instances: List[Instance],
175
+ ):
176
+ """
177
+ Create a new OTTR Template
178
+ :param iri: The IRI of the template
179
+ :param parameters:
180
+ :param instances:
181
+ """
182
+
183
+ def instance(
184
+ self,
185
+ arguments: List[Union[Argument, Variable, IRI, Literal, None]],
186
+ list_expander: LiteralType["cross", "zipMin", "zipMax"] = None,
187
+ ) -> Instance:
188
+ """
189
+
190
+ :param arguments: The arguments to the template.
191
+ :param list_expander: (How) should we list-expand?
192
+ :return:
193
+ """
194
+
195
+ def Triple(
196
+ subject: Union["Argument", IRI, Variable, BlankNode],
197
+ predicate: Union["Argument", IRI, Variable, BlankNode],
198
+ object: Union["Argument", IRI, Variable, Literal, BlankNode],
199
+ list_expander: Optional[LiteralType["cross", "zipMin", "zipMax"]] = None,
200
+ ):
201
+ """
202
+ An OTTR Triple Pattern used for creating templates.
203
+ This is the basis pattern which all template instances are rewritten into.
204
+ Equivalent to:
205
+
206
+ >>> ottr = Prefix("http://ns.ottr.xyz/0.4/")
207
+ ... Instance(ottr.suf("Triple"), subject, predicate, object, list_expander)
208
+
209
+ :param subject:
210
+ :param predicate:
211
+ :param object:
212
+ :param list_expander:
213
+ :return:
214
+ """
215
+
216
+ class XSD:
217
+ """
218
+ The xsd namespace, for convenience.
219
+ """
220
+
221
+ boolean: IRI
222
+ byte: IRI
223
+ date: IRI
224
+ dateTime: IRI
225
+ dateTimeStamp: IRI
226
+ decimal: IRI
227
+ double: IRI
228
+ duration: IRI
229
+ float: IRI
230
+ int_: IRI
231
+ integer: IRI
232
+ language: IRI
233
+ long: IRI
234
+ short: IRI
235
+ string: IRI
236
+
237
+ def __init__(self):
238
+ """
239
+ Create the xsd namespace helper.
240
+ """
241
+
242
+ def a() -> IRI:
243
+ """
244
+ :return: IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")
245
+ """
246
+
247
+ # END COMMON WITH CHRONTEXT
248
+
249
+ class IndexingOptions:
250
+ """
251
+ Options for indexing
252
+ """
253
+
254
+ def __init__(
255
+ self,
256
+ object_sort_all: bool = None,
257
+ object_sort_some: List["IRI"] = None,
258
+ fts_path: str = None,
259
+ ):
260
+ """
261
+ Defaults to indexing on subjects and objects for select types (e.g. rdf:type and rdfs:label)
262
+
263
+ :param object_sort_all: Enable object-indexing for all suitable predicates (doubles memory requirement).
264
+ :param object_sort_some: Enable object-indexing for a selected list of predicates.
265
+ :param fts_path: Enable full text search, stored at the path
266
+ """
267
+
268
+ ParametersType = Dict[str, Tuple[DataFrame, Dict[str, RDFType]]]
269
+
270
+ class ValidationReport:
271
+ """
272
+ SHACL Validation report.
273
+ Only constructed by maplib.
274
+ """
275
+
276
+ conforms: bool
277
+ shape_targets: DataFrame
278
+ performance: DataFrame
279
+
280
+ def results(
281
+ self,
282
+ native_dataframe: bool = False,
283
+ include_datatypes: bool = False,
284
+ streaming: bool = False,
285
+ ) -> Optional[Union[DataFrame, SolutionMappings]]:
286
+ """
287
+ Return the results of the validation report, if they exist.
288
+
289
+ :param native_dataframe: Return columns with maplib-native formatting. Useful for round-trips.
290
+ :param include_datatypes: Return datatypes of the results DataFrame (returns SolutionMappings instead of DataFrame).
291
+ :param streaming: Use the Polars streaming functionality.
292
+ :return: The SHACL validation report, as a DataFrame
293
+ """
294
+
295
+ def details(
296
+ self,
297
+ native_dataframe: bool = False,
298
+ include_datatypes: bool = False,
299
+ streaming: bool = False,
300
+ ) -> Optional[DataFrame]:
301
+ """
302
+ Returns the details of the validation report.
303
+ Only available if validation was called with include_details=True.
304
+
305
+ :param native_dataframe: Return columns with maplib-native formatting. Useful for round-trips.
306
+ :param include_datatypes: Return datatypes of the results DataFrame (returns SolutionMappings instead of DataFrame).
307
+ :param streaming: Use the Polars streaming functionality.
308
+ :return: Details of the SHACL validation report, as a DataFrame
309
+ """
310
+
311
+ def graph(self, indexing=None) -> "Mapping":
312
+ """
313
+ Creates a new mapping object where the base graph is the validation report with results.
314
+ Includes the details of the validation report in the new graph if they exist.
315
+
316
+ :param indexing: Should the constructed graph be indexed?
317
+ If not specified it is inherited from the mapping where validate was called.
318
+ :return:
319
+ """
320
+
321
+ class Mapping:
322
+ """
323
+ A mapping session allowing:
324
+
325
+ * Iterative mapping using OTTR templates
326
+ * Interactive SPARQL querying and enrichment
327
+ * SHACL validation
328
+
329
+ Usage:
330
+
331
+ >>> from maplib import Mapping
332
+ ... doc = '''
333
+ ... :prefix ex:<http://example.net/ns#>.
334
+ ... ex:ExampleTemplate [?MyValue] :: {
335
+ ... ottr:Triple(ex:myObject, ex:hasValue, ?MyValue)
336
+ ... } .'''
337
+ ... m = Mapping(doc)
338
+
339
+ :param documents: a stOTTR document or a list of these
340
+ :param indexing_options: options for indexing
341
+ """
342
+
343
+ def __init__(
344
+ self,
345
+ documents: Union[str, List[str]] = None,
346
+ indexing_options: "IndexingOptions" = None,
347
+ ) -> "Mapping": ...
348
+ def add_template(self, template: Union["Template", str]):
349
+ """
350
+ Add a template to the mapping. Overwrites any existing template with the same IRI.
351
+ :param template: The template to add, as a stOTTR string or as a programmatically constructed Template.
352
+ :return:
353
+ """
354
+
355
+ def expand(
356
+ self,
357
+ template: Union[str, "Template", IRI],
358
+ df: DataFrame = None,
359
+ graph: str = None,
360
+ types: Dict[str, RDFType] = None,
361
+ validate_iris: bool = True,
362
+ delay_index: bool = True,
363
+ ) -> None:
364
+ """
365
+ Expand a template using a DataFrame
366
+ Usage:
367
+
368
+ >>> m.expand("ex:ExampleTemplate", df)
369
+
370
+ If the template has no arguments, the df argument is not necessary.
371
+
372
+ :param template: Template, IRI, IRI string or prefixed template name.
373
+ :param df: DataFrame where the columns have the same names as the template arguments
374
+ :param graph: The IRI of the graph to add triples to.
375
+ :param types: The types of the columns.
376
+ :param validate_iris: Validate any IRI-columns.
377
+ :param delay_index: Delay index construction - reduces write amplification when doing many expansions
378
+ """
379
+
380
+ def expand_triples(
381
+ self,
382
+ df: DataFrame = None,
383
+ verb: str = None,
384
+ graph: str = None,
385
+ types: Dict[str, RDFType] = None,
386
+ validate_iris: bool = True,
387
+ delay_index: bool = True,
388
+ ) -> None:
389
+ """
390
+ Expand a template using a DataFrame with columns subject, object and verb
391
+ The verb column can also be supplied as a string if it is the same for all rows.
392
+ Usage:
393
+
394
+ >>> m.expand_triples(df)
395
+
396
+ If the template has no arguments, the df argument is not necessary.
397
+
398
+ :param df: DataFrame where the columns are named subject and object. May also contain a verb-column.
399
+ :param verb: The uri of the verb.
400
+ :param graph: The IRI of the graph to add triples to.
401
+ :param types: The types of the columns.
402
+ :param validate_iris: Validate any IRI-columns.
403
+ :param delay_index: Delay index construction - reduces write amplification when doing many expansions
404
+ """
405
+
406
+ def expand_default(
407
+ self,
408
+ df: DataFrame,
409
+ primary_key_column: str,
410
+ dry_run: bool = False,
411
+ graph: str = None,
412
+ types: Dict[str, RDFType] = None,
413
+ validate_iris: bool = True,
414
+ delay_index: bool = True,
415
+ ) -> str:
416
+ """
417
+ Create a default template and expand it based on a dataframe.
418
+ Usage:
419
+
420
+ >>> template_string = m.expand_default(df, "myKeyCol")
421
+ ... print(template_string)
422
+
423
+ :param df: DataFrame where the columns have the same names as the template arguments
424
+ :param primary_key_column: This column will be the subject of all triples in the generated template.
425
+ :param dry_run: Do not expand the template, only return the string.
426
+ :param graph: The IRI of the graph to add triples to.
427
+ :param types: The types of the columns.
428
+ :param validate_iris: Validate any IRI-columns.
429
+ :param delay_index: Delay index construction - reduces write amplification when doing many expansions
430
+ :return: The generated template
431
+ """
432
+
433
+ def query(
434
+ self,
435
+ query: str,
436
+ parameters: ParametersType = None,
437
+ include_datatypes: bool = False,
438
+ native_dataframe: bool = False,
439
+ graph: str = None,
440
+ streaming: bool = False,
441
+ return_json: bool = False,
442
+ include_transient: bool = True,
443
+ ) -> Union[
444
+ DataFrame, SolutionMappings, List[Union[DataFrame, SolutionMappings, str]], None
445
+ ]:
446
+ """
447
+ Query the contained knowledge graph using SPARQL
448
+ Currently, SELECT, CONSTRUCT and INSERT are supported.
449
+ Usage:
450
+
451
+ >>> df = mapping.query('''
452
+ ... PREFIX ex:<http://example.net/ns#>
453
+ ... SELECT ?obj1 ?obj2 WHERE {
454
+ ... ?obj1 ex:hasObj ?obj2
455
+ ... }''')
456
+ ... print(df)
457
+
458
+ :param query: The SPARQL query string
459
+ :param parameters: PVALUES Parameters, a DataFrame containing the value bindings in the custom PVALUES construction.
460
+ :param native_dataframe: Return columns with maplib-native formatting. Useful for round-trips.
461
+ :param include_datatypes: Datatypes are not returned by default, set to true to return a dict with the solution mappings and the datatypes.
462
+ :param graph: The IRI of the graph to query.
463
+ :param streaming: Use Polars streaming
464
+ :param return_json: Return JSON string.
465
+ :param include_transient: Include transient triples when querying.
466
+ :return: DataFrame (Select), list of DataFrames (Construct) containing results, or None for Insert-queries
467
+
468
+ """
469
+
470
+ def insert(
471
+ self,
472
+ query: str,
473
+ parameters: ParametersType = None,
474
+ include_datatypes: bool = False,
475
+ native_dataframe: bool = False,
476
+ transient: bool = False,
477
+ streaming: bool = False,
478
+ source_graph: str = None,
479
+ target_graph: str = None,
480
+ delay_index: bool = True,
481
+ include_transient: bool = True,
482
+ ):
483
+ """
484
+ Insert the results of a Construct query in the graph.
485
+ Useful for being able to use the same query for inspecting what will be inserted and actually inserting.
486
+ Usage:
487
+
488
+ >>> m = Mapping(doc)
489
+ ... # Omitted
490
+ ... hpizzas = '''
491
+ ... PREFIX pizza:<https://github.com/magbak/maplib/pizza#>
492
+ ... PREFIX ing:<https://github.com/magbak/maplib/pizza/ingredients#>
493
+ ... CONSTRUCT { ?p a pizza:HeterodoxPizza }
494
+ ... WHERE {
495
+ ... ?p a pizza:Pizza .
496
+ ... ?p pizza:hasIngredient ing:Pineapple .
497
+ ... }'''
498
+ ... m.insert(hpizzas)
499
+
500
+ :param query: The SPARQL Insert query string
501
+ :param parameters: PVALUES Parameters, a DataFrame containing the value bindings in the custom PVALUES construction.
502
+ :param native_dataframe: Return columns with maplib-native formatting. Useful for round-trips.
503
+ :param include_datatypes: Datatypes are not returned by default, set to true to return a dict with the solution mappings and the datatypes.
504
+ :param transient: Should the inserted triples be transient?
505
+ :param source_graph: The IRI of the source graph to execute the construct query.
506
+ :param target_graph: The IRI of the target graph to insert into.
507
+ :param streaming: Use Polars streaming
508
+ :param delay_index: Delay indexing, use when making multiple inserts of the same predicate.
509
+ :param include_transient: Include transient triples when querying (but see "transient" above).
510
+ :return: None
511
+ """
512
+
513
+ def validate(
514
+ self,
515
+ shape_graph: str,
516
+ include_details: bool = False,
517
+ include_conforms: bool = False,
518
+ include_shape_graph: bool = True,
519
+ streaming: bool = False,
520
+ max_shape_results: int = None,
521
+ result_storage: str = None,
522
+ only_shapes: List[str] = None,
523
+ deactivate_shapes: List[str] = None,
524
+ dry_run: bool = False,
525
+ ) -> ValidationReport:
526
+ """
527
+ Validate the contained knowledge graph using SHACL
528
+ Assumes that the contained knowledge graph also contains SHACL Shapes.
529
+
530
+ :param shape_graph: The IRI of the Shape Graph.
531
+ :param include_details: Include details of SHACL evaluation alongside the report. Currently uses a lot of memory.
532
+ :param include_conforms: Include those results that conformed. Also applies to details.
533
+ :param include_shape_graph: Include the shape graph in the report, useful when creating the graph from the report.
534
+ :param include_datatypes: Return the datatypes of the validation report (and details).
535
+ :param streaming: Use Polars streaming
536
+ :param max_shape_results: Maximum number of results per shape. Reduces the size of the result set.
537
+ :param result_storage: Where to store validation results. Can reduce memory use for large result sets.
538
+ :param only_shapes: Validate only these shapes, None means all shapes are validated (must be IRI, cannot be used with deactivate_shapes).
539
+ :param deactivate_shapes: Disable validation of these shapes (must be IRI, cannot be used with deactivate_shapes).
540
+ :param dry_run: Only find targets of shapes, but do not validate them.
541
+ :return: Validation report containing a report (report.df) and whether the graph conforms (report.conforms)
542
+ """
543
+
544
+ def read_triples(
545
+ self,
546
+ file_path: Union[str, Path],
547
+ format: LiteralType["ntriples", "turtle", "rdf/xml", "xml", "rdfxml"] = None,
548
+ base_iri: str = None,
549
+ transient: bool = False,
550
+ parallel: bool = False,
551
+ checked: bool = True,
552
+ graph: str = None,
553
+ replace_graph: bool = False,
554
+ ) -> None:
555
+ """
556
+ Reads triples from a file path.
557
+ You can specify the format, or it will be derived using file extension, e.g. filename.ttl or filename.nt.
558
+ Specify transient if you only want the triples to be available for further querying and validation,
559
+ but not persisted using write-methods.
560
+
561
+ Usage:
562
+
563
+ >>> m.read_triples("my_triples.ttl")
564
+
565
+ :param file_path: The path of the file containing triples
566
+ :param format: One of "ntriples", "turtle", "rdf/xml", otherwise it is inferred from the file extension.
567
+ :param base_iri: Base iri
568
+ :param transient: Should these triples be included when writing the graph to the file system?
569
+ :param parallel: Parse triples in parallel, currently only NTRiples. Assumes all prefixes are in the beginning of the document.
570
+ :param checked: Check IRIs etc.
571
+ :param graph: The IRI of the graph to read the triples into, if None, it will be the default graph.
572
+ :param replace_graph: Replace the graph with these triples? Will replace the default graph if no graph is specified.
573
+ """
574
+
575
+ def read_triples_string(
576
+ self,
577
+ s: str,
578
+ format: LiteralType["ntriples", "turtle", "rdf/xml", "xml", "rdfxml"],
579
+ base_iri: str = None,
580
+ transient: bool = False,
581
+ parallel: bool = False,
582
+ checked: bool = True,
583
+ graph: str = None,
584
+ replace_graph: bool = False,
585
+ ) -> None:
586
+ """
587
+ Reads triples from a string.
588
+ Specify transient if you only want the triples to be available for further querying and validation,
589
+ but not persisted using write-methods.
590
+
591
+ Usage:
592
+
593
+ >>> m.read_triples(my_ntriples_string, format="ntriples")
594
+
595
+ :param s: String containing serialized triples.
596
+ :param format: One of "ntriples", "turtle", "rdf/xml".
597
+ :param base_iri: Base iri
598
+ :param transient: Should these triples be included when writing the graph to the file system?
599
+ :param parallel: Parse triples in parallel, currently only NTRiples. Assumes all prefixes are in the beginning of the document.
600
+ :param checked: Check IRIs etc.
601
+ :param graph: The IRI of the graph to read the triples into.
602
+ :param replace_graph: Replace the graph with these triples? Will replace the default graph if no graph is specified.
603
+ """
604
+
605
+ def write_ntriples(self, file_path: Union[str, Path], graph: str = None) -> None:
606
+ """
607
+ DEPRECATED: use write_triples with format="ntriples"
608
+ Write the non-transient triples to the file path specified in the NTriples format.
609
+
610
+ Usage:
611
+
612
+ >>> m.write_ntriples("my_triples.nt")
613
+
614
+ :param file_path: The path of the file containing triples
615
+ :param graph: The IRI of the graph to write.
616
+ """
617
+
618
+ def write_triples(
619
+ self,
620
+ file_path: Union[str, Path],
621
+ format=LiteralType["ntriples", "turtle", "rdf/xml"],
622
+ graph: str = None,
623
+ ) -> None:
624
+ """
625
+ Write the non-transient triples to the file path specified in the NTriples format.
626
+
627
+ Usage:
628
+
629
+ >>> m.write_triples("my_triples.nt", format="ntriples")
630
+
631
+ :param file_path: The path of the file containing triples
632
+ :param format: One of "ntriples", "turtle", "rdf/xml".
633
+ :param graph: The IRI of the graph to write.
634
+ """
635
+
636
+ def write_ntriples_string(self, graph: str = None) -> str:
637
+ """
638
+ DEPRECATED: use write_triples_string with format="ntriples"
639
+ Write the non-transient triples to a string in memory.
640
+
641
+ Usage:
642
+
643
+ >>> s = m.write_ntriples_string()
644
+
645
+ :param graph: The IRI of the graph to write.
646
+ :return Triples in mapping in the NTriples format (potentially a large string)
647
+ """
648
+
649
+ def write_triples_string(
650
+ self, format=LiteralType["ntriples", "turtle", "rdf/xml"], graph: str = None
651
+ ) -> str:
652
+ """
653
+ DEPRECATED: use write_triples_string with format="ntriples"
654
+ Write the non-transient triples to a string in memory.
655
+
656
+ Usage:
657
+
658
+ >>> s = m.write_ntriples_string(format="turtle")
659
+
660
+ :param format: One of "ntriples", "turtle", "rdf/xml".
661
+ :param graph: The IRI of the graph to write.
662
+ :return Triples in mapping in the NTriples format (potentially a large string)
663
+ """
664
+
665
+ def write_native_parquet(
666
+ self, folder_path: Union[str, Path], graph: str = None
667
+ ) -> None:
668
+ """
669
+ Write non-transient triples using the internal native Parquet format.
670
+
671
+ Usage:
672
+
673
+ >>> m.write_native_parquet("output_folder")
674
+
675
+ :param folder_path: The path of the folder to write triples in the native format.
676
+ :param graph: The IRI of the graph to write.
677
+ """
678
+
679
+ def create_sprout(self):
680
+ """
681
+ A sprout is a simplified way of dealing with multiple graphs.
682
+ See also `Mapping.insert_sprout` and `Mapping.detach_sprout`
683
+
684
+ :return:
685
+ """
686
+
687
+ def insert_sprout(
688
+ self,
689
+ query: str,
690
+ parameters: ParametersType = None,
691
+ include_datatypes: bool = False,
692
+ native_dataframe: bool = False,
693
+ transient: bool = False,
694
+ streaming: bool = False,
695
+ source_graph: str = None,
696
+ target_graph: str = None,
697
+ delay_index: bool = True,
698
+ include_transient: bool = True,
699
+ ):
700
+ """
701
+ Insert the results of a Construct query in a sprouted graph, which is created if no sprout is active.
702
+ Sprouts are simplified way of dealing with multiple graphs.
703
+ Useful for being able to use the same query for inspecting what will be inserted and actually inserting.
704
+ See also `Mapping.detach_sprout`
705
+
706
+ Usage:
707
+
708
+ >>> m = Mapping(doc)
709
+ ... m.create_sprout()
710
+ ... # Omitted
711
+ ... hpizzas = '''
712
+ ... PREFIX pizza:<https://github.com/magbak/maplib/pizza#>
713
+ ... PREFIX ing:<https://github.com/magbak/maplib/pizza/ingredients#>
714
+ ... CONSTRUCT { ?p a pizza:HeterodoxPizza }
715
+ ... WHERE {
716
+ ... ?p a pizza:Pizza .
717
+ ... ?p pizza:hasIngredient ing:Pineapple .
718
+ ... }'''
719
+ ... m.insert_sprout(hpizzas)
720
+
721
+ :param query: The SPARQL Insert query string
722
+ :param parameters: PVALUES Parameters, a DataFrame containing the value bindings in the custom PVALUES construction.
723
+ :param native_dataframe: Return columns with maplib-native formatting. Useful for round-trips.
724
+ :param include_datatypes: Datatypes are not returned by default, set to true to return a dict with the solution mappings and the datatypes.
725
+ :param transient: Should the inserted triples be included in exports?
726
+ :param source_graph: The IRI of the source graph to execute the construct query.
727
+ :param target_graph: The IRI of the target graph to insert into.
728
+ :param streaming: Use Polars streaming
729
+ :param delay_index: Delay indexing, use when making multiple inserts of the same predicate to improve performance.
730
+ :param include_transient: Include transient triples when querying (see also "transient" above).
731
+ :return: None
732
+ """
733
+
734
+ def detach_sprout(self) -> "Mapping":
735
+ """
736
+ Detaches and returns the sprout from the mapping.
737
+
738
+ :return: The sprout as its own Mapping.
739
+ """
740
+
741
+ def get_predicate_iris(
742
+ self, graph: str = None, include_transient: bool = False
743
+ ) -> List["IRI"]:
744
+ """
745
+ :param graph: The graph to get the predicate iris from.
746
+ :param include_transient: Should we include predicates only between transient triples?
747
+ :return: The IRIs of the predicates currently in the given graph.
748
+ """
749
+
750
+ def get_predicate(
751
+ self, iri: "IRI", graph: str = None, include_transient: bool = False
752
+ ) -> List["SolutionMappings"]:
753
+ """
754
+ :param iri: The predicate IRI
755
+ :param graph: The graph to get the predicate from.
756
+ :param include_transient: Should we include transient triples?
757
+ :return: A list of the underlying tables that store a given predicate.
758
+ """
759
+
760
+ def create_index(
761
+ self, options: "IndexingOptions" = None, all: bool = True, graph: str = None
762
+ ):
763
+ """
764
+ :param options: Indexing options
765
+ :param all: Apply to all existing and new graphs
766
+ :param graph: The graph where indexes should be added
767
+ :return:
768
+ """
maplib/add_triples.py ADDED
@@ -0,0 +1,29 @@
1
+ from maplib.maplib import Mapping, Template, IRI, Triple, Variable
2
+
3
+
4
+ def add_triples(
5
+ source: Mapping, target: Mapping, source_graph: str = None, target_graph: str = None
6
+ ):
7
+ """(Zero) copy the triples from one Mapping into another.
8
+
9
+ :param source: The source mapping
10
+ :param target: The target mapping
11
+ :param source_graph: The named graph in the source mapping to copy from. None means default graph.
12
+ :param target_graph: The named graph in the target mapping to copy into. None means default graph.
13
+ """
14
+ for p in source.get_predicate_iris(source_graph):
15
+ subject = Variable("subject")
16
+ object = Variable("object")
17
+ template = Template(
18
+ iri=IRI("urn:maplib:tmp"),
19
+ parameters=[subject, object],
20
+ instances=[Triple(subject, p, object)],
21
+ )
22
+ sms = source.get_predicate(p, source_graph)
23
+ for sm in sms:
24
+ target.expand(
25
+ template,
26
+ sm.mappings,
27
+ types=sm.rdf_types,
28
+ graph=target_graph,
29
+ )
maplib/maplib.abi3.so ADDED
Binary file
maplib/py.typed ADDED
File without changes
@@ -0,0 +1,206 @@
1
+ Metadata-Version: 2.3
2
+ Name: maplib
3
+ Version: 0.15.11
4
+ Classifier: Development Status :: 4 - Beta
5
+ Classifier: License :: OSI Approved :: Apache Software License
6
+ Classifier: Programming Language :: Python :: 3 :: Only
7
+ Classifier: Programming Language :: Python :: 3.9
8
+ Classifier: Programming Language :: Python :: 3.10
9
+ Classifier: Programming Language :: Python :: 3.11
10
+ Classifier: Programming Language :: Rust
11
+ Classifier: Topic :: Database :: Database Engines/Servers
12
+ Classifier: Topic :: Scientific/Engineering
13
+ Requires-Dist: polars >=0.20.13
14
+ Requires-Dist: pyarrow >=7.0.0
15
+ License-File: LICENSE
16
+ Summary: Dataframe-based interactive knowledge graph construction
17
+ Keywords: rdf,graph,dataframe,sparql,ottr
18
+ Author-email: Magnus Bakken <magnus@data-treehouse.com>
19
+ Requires-Python: >=3.9
20
+ Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM
21
+ Project-URL: Homepage, https://github.com/DataTreehouse/maplib
22
+ Project-URL: Documentation, https://datatreehouse.github.io/maplib/maplib/maplib.html
23
+ Project-URL: Repository, https://github.com/DataTreehouse/maplib
24
+ Project-URL: Changelog, https://github.com/DataTreehouse/maplib/releases
25
+
26
+ ## maplib: High-performance RDF knowledge graph construction, SHACL validation and SPARQL-based enrichment in Python
27
+ maplib is a knowledge graph construction library for building RDF knowledge graphs using template expansion ([OTTR](https://ottr.xyz/) Templates). Maplib features SPARQL- and SHACL-engines that are available as the graph is being constructed, allowing enrichment and validation. It can construct and validate knowledge graphs with millions of nodes in seconds.
28
+
29
+ maplib allows you to leverage your existing skills with Pandas or Polars to extract and wrangle data from existing databases and spreadsheets, before applying simple templates to them to build a knowledge graph.
30
+
31
+ Template expansion is typically zero-copy and nearly instantaneous, and the built-in SPARQL and SHACL engines means you can query, inspect, enrich and validate the knowledge graph immediately.
32
+
33
+ maplib is written in Rust, it is built on [Apache Arrow](https://arrow.apache.org/) using [Pola.rs](https://www.pola.rs/) and uses libraries from [Oxigraph](https://github.com/oxigraph/oxigraph) for handling linked data as well as parsing SPARQL queries.
34
+
35
+ ## Installing
36
+ The package is published on [PyPi](https://pypi.org/project/maplib/) and the API documented [here](https://datatreehouse.github.io/maplib/maplib.html):
37
+ ```shell
38
+ pip install maplib
39
+ ```
40
+ Please send us a message, e.g. on LinkedIn (search for Data Treehouse) or on our [webpage](https://www.data-treehouse.com/contact-8) if you want to try out SHACL.
41
+
42
+ ## Mapping
43
+ We can easily map DataFrames to RDF-graphs using the Python library. Below is a reproduction of the example in the paper [1]. Assume that we have a DataFrame given by:
44
+
45
+ ```python
46
+ import polars as pl
47
+ pl.Config.set_fmt_str_lengths(150)
48
+
49
+ pi = "https://github.com/DataTreehouse/maplib/pizza#"
50
+ df = pl.DataFrame({
51
+ "p":[pi + "Hawaiian", pi + "Grandiosa"],
52
+ "c":[pi + "CAN", pi + "NOR"],
53
+ "ings": [[pi + "Pineapple", pi + "Ham"],
54
+ [pi + "Pepper", pi + "Meat"]]
55
+ })
56
+ print(df)
57
+ ```
58
+ That is, our DataFrame is:
59
+
60
+ | p | c | ings |
61
+ |-------------------------------|--------------------------------|------------------------------------------|
62
+ | str | str | list[str] |
63
+ | "https://.../pizza#Hawaiian" | "https://.../maplib/pizza#CAN" | [".../pizza#Pineapple", ".../pizza#Ham"] |
64
+ | "https://.../pizza#Grandiosa" | "https://.../maplib/pizza#NOR" | [".../pizza#Pepper", ".../pizza#Meat"] |
65
+
66
+ Then we can define a OTTR template, and create our knowledge graph by expanding this template with our DataFrame as input:
67
+ ```python
68
+ from maplib import Mapping, Prefix, Template, Argument, Parameter, Variable, RDFType, Triple, a
69
+ pi = Prefix("pi", pi)
70
+
71
+ p_var = Variable("p")
72
+ c_var = Variable("c")
73
+ ings_var = Variable("ings")
74
+
75
+ template = Template(
76
+ iri= pi.suf("PizzaTemplate"),
77
+ parameters= [
78
+ Parameter(variable=p_var, rdf_type=RDFType.IRI()),
79
+ Parameter(variable=c_var, rdf_type=RDFType.IRI()),
80
+ Parameter(variable=ings_var, rdf_type=RDFType.Nested(RDFType.IRI()))
81
+ ],
82
+ instances= [
83
+ Triple(p_var, a(), pi.suf("Pizza")),
84
+ Triple(p_var, pi.suf("fromCountry"), c_var),
85
+ Triple(
86
+ p_var,
87
+ pi.suf("hasIngredient"),
88
+ Argument(term=ings_var, list_expand=True),
89
+ list_expander="cross")
90
+ ]
91
+ )
92
+
93
+ m = Mapping()
94
+ m.expand(template, df)
95
+ hpizzas = """
96
+ PREFIX pi:<https://github.com/DataTreehouse/maplib/pizza#>
97
+ CONSTRUCT { ?p a pi:HeterodoxPizza }
98
+ WHERE {
99
+ ?p a pi:Pizza .
100
+ ?p pi:hasIngredient pi:Pineapple .
101
+ }"""
102
+ m.insert(hpizzas)
103
+ return m
104
+ ```
105
+
106
+ We can immediately query the mapped knowledge graph:
107
+
108
+ ```python
109
+ m.query("""
110
+ PREFIX pi:<https://github.com/DataTreehouse/maplib/pizza#>
111
+ SELECT ?p ?i WHERE {
112
+ ?p a pi:Pizza .
113
+ ?p pi:hasIngredient ?i .
114
+ }
115
+ """)
116
+ ```
117
+
118
+ The query gives the following result (a DataFrame):
119
+
120
+ | p | i |
121
+ |---------------------------------|---------------------------------------|
122
+ | str | str |
123
+ | "<https://.../pizza#Grandiosa>" | "<https://.../pizza#Meat>" |
124
+ | "<https://.../pizza#Grandiosa>" | "<https://.../pizza#Pepper>" |
125
+ | "<https://.../pizza#Hawaiian>" | "<https://.../pizza#Pineapple>" |
126
+ | "<https://.../pizza#Hawaiian>" | "<https://.../pizza#Ham>" |
127
+
128
+ Next, we are able to perform a construct query, which creates new triples but does not insert them.
129
+
130
+ ```python
131
+ hpizzas = """
132
+ PREFIX pi:<https://github.com/DataTreehouse/maplib/pizza#>
133
+ CONSTRUCT { ?p a pi:UnorthodoxPizza }
134
+ WHERE {
135
+ ?p a pi:Pizza .
136
+ ?p pi:hasIngredient pi:Pineapple .
137
+ }"""
138
+ res = m.query(hpizzas)
139
+ res[0]
140
+ ```
141
+
142
+ The resulting triples are given below:
143
+
144
+ | subject | verb | object |
145
+ |--------------------------------|--------------------------------------|---------------------------------------|
146
+ | str | str | str |
147
+ | "<https://.../pizza#Hawaiian>" | "<http://.../22-rdf-syntax-ns#type>" | "<https://.../pizza#UnorthodoxPizza>" |
148
+
149
+ If we are happy with the output of this construct-query, we can insert it in the mapping state. Afterwards we check that the triple is added with a query.
150
+
151
+ ```python
152
+ m.insert(hpizzas)
153
+ m.query("""
154
+ PREFIX pi:<https://github.com/DataTreehouse/maplib/pizza#>
155
+
156
+ SELECT ?p WHERE {
157
+ ?p a pi:UnorthodoxPizza
158
+ }
159
+ """)
160
+ ```
161
+
162
+ Indeed, we have added the triple:
163
+
164
+ | p |
165
+ |------------------------------------------------------------|
166
+ | str |
167
+ | "<https://github.com/DataTreehouse/maplib/pizza#Hawaiian>" |
168
+
169
+ ## API
170
+ The [API](https://datatreehouse.github.io/maplib/maplib.html) is simple, and contains only one class and a few methods for:
171
+ - expanding templates
172
+ - querying with SPARQL
173
+ - validating with SHACL
174
+ - importing triples (Turtle, RDF/XML, NTriples)
175
+ - writing triples (Turtle, RDF/XML, NTriples)
176
+ - creating a new Mapping object (sprout) based on queries over the current Mapping object.
177
+
178
+ The API is documented [HERE](https://datatreehouse.github.io/maplib/maplib.html)
179
+
180
+ ## Roadmap of features and optimizations
181
+ Spring 2025
182
+ - Datalog reasoning support
183
+ - Reduced memory footprint
184
+ - Further SPARQL optimizations
185
+ - JSON-LD support
186
+
187
+ Fall 2025
188
+ - SHACL rules support
189
+ - Improved TTL serialization (prettier and faster)
190
+ +++
191
+
192
+ Roadmap is subject to changes,particularly user and customer requests.
193
+
194
+ ## References
195
+ There is an associated paper [1] with associated benchmarks showing superior performance and scalability that can be found [here](https://ieeexplore.ieee.org/document/10106242). OTTR is described in [2].
196
+
197
+ [1] M. Bakken, "maplib: Interactive, literal RDF model mapping for industry," in IEEE Access, doi: 10.1109/ACCESS.2023.3269093.
198
+
199
+ [2] M. G. Skjæveland, D. P. Lupp, L. H. Karlsen, and J. W. Klüwer, “Ottr: Formal templates for pattern-based ontology engineering.” in WOP (Book),
200
+ 2021, pp. 349–377.
201
+
202
+ ## Licensing
203
+ All code produced since August 1st. 2023 is copyrighted to [Data Treehouse AS](https://www.data-treehouse.com/) with an Apache 2.0 license unless otherwise noted.
204
+
205
+ All code which was produced before August 1st. 2023 copyrighted to [Prediktor AS](https://www.prediktor.com/) with an Apache 2.0 license unless otherwise noted, and has been financed by [The Research Council of Norway](https://www.forskningsradet.no/en/) (grant no. 316656) and [Prediktor AS](https://www.prediktor.com/) as part of a PhD Degree. The code at this state is archived in the repository at [https://github.com/magbak/maplib](https://github.com/magbak/maplib).
206
+
@@ -0,0 +1,10 @@
1
+ maplib-0.15.11.dist-info/METADATA,sha256=F0jVNMeBpo166-vMIY-MHeXMP9a7p2d8d12tkojw9tc,9058
2
+ maplib-0.15.11.dist-info/WHEEL,sha256=CJlf0CwwxWB9-lL4mEcOQbzkizXEbn7t6nPcVV_BBq8,106
3
+ maplib-0.15.11.dist-info/licenses/LICENSE,sha256=jAD7dAxI84Df1hzrP67O-yOMrEfOrhLQcI8qEY6KRu8,11459
4
+ maplib/add_triples.py,sha256=DRT-FWrYiSnj8uue7i50ed32LOAcmx8_XPnDD22xU4A,1074
5
+ maplib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ maplib/.gitignore,sha256=XJr_ApsGWXGOqPaqA-Z7RsnZMR07sG0hbTPurJVGFAw,5
7
+ maplib/__init__.py,sha256=8wOl80QJmT6y9MwPrllV9WSjGURcgaN_CRrtiod1lI8,1405
8
+ maplib/__init__.pyi,sha256=IkfExJJiGL1CDssp1QqA2cdFGJRWCU0vXxOiNxL0RsA,27079
9
+ maplib/maplib.abi3.so,sha256=vy4t2zq20I0DSM7ArLdfQIJ6ckSo57FjYzdWON5hp00,67783184
10
+ maplib-0.15.11.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: maturin (1.7.4)
3
+ Root-Is-Purelib: false
4
+ Tag: cp39-abi3-manylinux_2_24_x86_64
@@ -0,0 +1,202 @@
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright code / files produced 2022 - July 31.2023 Prediktor AS
190
+ Copyright code / files produced on or after August 1. 2023 Data Treehouse AS
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.