maplib 0.17.8__cp313-cp313-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
maplib/.gitignore ADDED
@@ -0,0 +1 @@
1
+ *.so
maplib/__init__.py ADDED
@@ -0,0 +1,60 @@
1
+ # r'''
2
+ # # Overview
3
+ #
4
+ # '''
5
+
6
+ __all__ = [
7
+ "Model",
8
+ "a",
9
+ "Triple",
10
+ "SolutionModels",
11
+ "IndexingOptions",
12
+ "ValidationReport",
13
+ "Instance",
14
+ "Template",
15
+ "Argument",
16
+ "Parameter",
17
+ "Variable",
18
+ "RDFType",
19
+ "XSD",
20
+ "IRI",
21
+ "Literal",
22
+ "Prefix",
23
+ "BlankNode",
24
+ "explore",
25
+ "add_triples",
26
+ "MaplibException",
27
+ ]
28
+
29
+ import pathlib
30
+ from .maplib import *
31
+ from .adding_triples import add_triples
32
+
33
+ if (pathlib.Path(__file__).parent.resolve() / "graph_explorer").exists():
34
+ from .graph_explorer import explore
35
+ else:
36
+
37
+ def explore(
38
+ m: "Model",
39
+ host: str = "localhost",
40
+ port: int = 8000,
41
+ bind: str = "localhost",
42
+ popup=True,
43
+ fts=True,
44
+ ):
45
+ """Starts a graph explorer session.
46
+ To run from Jupyter Notebook use:
47
+ >>> from maplib import explore
48
+ >>>
49
+ >>> server = explore(m)
50
+ You can later stop the server with
51
+ >>> server.stop()
52
+
53
+ :param m: The Model to explore
54
+ :param host: The hostname that we will point the browser to.
55
+ :param port: The port where the graph explorer webserver listens on.
56
+ :param bind: Bind to the following host / ip.
57
+ :param popup: Pop up the browser window.
58
+ :param fts: Enable full text search indexing
59
+ """
60
+ print("Contact Data Treehouse to try!")
maplib/__init__.pyi ADDED
@@ -0,0 +1,817 @@
1
+ from pathlib import Path
2
+ from typing import Union, List, Dict, Optional, Callable, Tuple, Literal as LiteralType
3
+ from polars import DataFrame
4
+ from datetime import datetime, date
5
+
6
+ class RDFType:
7
+ """
8
+ The type of a column containing a RDF variable.
9
+ For instance, xsd:string is RDFType.Literal("http://www.w3.org/2001/XMLSchema#string")
10
+ """
11
+
12
+ IRI: Callable[[], "RDFType"]
13
+ BlankNode: Callable[[], "RDFType"]
14
+ Literal: Callable[[Union[str, "IRI"]], "RDFType"]
15
+ Multi: Callable[[List["RDFType"]], "RDFType"]
16
+ Nested: Callable[["RDFType"], "RDFType"]
17
+ Unknown: Callable[[], "RDFType"]
18
+
19
+ class SolutionMappings:
20
+ """
21
+ Detailed information about the solution mappings and the types of the variables.
22
+ """
23
+
24
+ mappings: DataFrame
25
+ rdf_types: Dict[str, RDFType]
26
+
27
+ class Variable:
28
+ """
29
+ A variable in a template.
30
+ """
31
+
32
+ name: str
33
+
34
+ def __init__(self, name: str):
35
+ """
36
+ Create a new variable.
37
+ :param name: The name of the variable.
38
+ """
39
+ ...
40
+
41
+ class IRI:
42
+ iri: str
43
+ """
44
+ An IRI.
45
+ """
46
+
47
+ def __init__(self, iri: str):
48
+ """
49
+ Create a new IRI
50
+ :param iri: IRI (without < and >).
51
+ """
52
+
53
+ class BlankNode:
54
+ """
55
+ A Blank Node.
56
+ """
57
+
58
+ name: str
59
+
60
+ def __init__(self, name: str):
61
+ """
62
+ Create a new Blank Node
63
+ :param name: Name of blank node (without _:).
64
+ """
65
+
66
+ class Prefix:
67
+ """
68
+ A prefix that can be used to ergonomically build iris.
69
+ """
70
+
71
+ def __init__(self, iri, prefix_name=None):
72
+ """
73
+ Create a new prefix.
74
+ :param iri: The prefix IRI.
75
+ :param prefix_name: The name of the prefix
76
+ """
77
+
78
+ def suf(self, suffix: str) -> IRI:
79
+ """
80
+ Create an IRI by appending the suffix.
81
+ :param suffix: The suffix to append.
82
+ :return:
83
+ """
84
+
85
+ class Literal:
86
+ """
87
+ An RDF literal.
88
+ """
89
+
90
+ value: str
91
+ datatype: Optional[IRI]
92
+ language: Optional[str]
93
+
94
+ def __init__(self, value: str, data_type: IRI = None, language: str = None):
95
+ """
96
+ Create a new RDF Literal
97
+ :param value: The lexical representation of the value.
98
+ :param data_type: The data type of the value (an IRI).
99
+ :param language: The language tag of the value.
100
+ """
101
+
102
+ def to_native(self) -> Union[int, float, bool, str, datetime, date]:
103
+ """
104
+
105
+ :return:
106
+ """
107
+
108
+ class Parameter:
109
+ variable: Variable
110
+ optional: bool
111
+ allow_blank: bool
112
+ rdf_type: Optional[RDFType]
113
+ default_value: Optional[Union[Literal, IRI, BlankNode]]
114
+ """
115
+ Parameters for template signatures.
116
+ """
117
+
118
+ def __init__(
119
+ self,
120
+ variable: Variable,
121
+ optional: Optional[bool] = False,
122
+ allow_blank: Optional[bool] = True,
123
+ rdf_type: Optional[RDFType] = None,
124
+ default_value: Optional[Union[Literal, IRI, BlankNode]] = None,
125
+ ):
126
+ """
127
+ Create a new parameter for a Template.
128
+ :param variable: The variable.
129
+ :param optional: Can the variable be unbound?
130
+ :param allow_blank: Can the variable be bound to a blank node?
131
+ :param rdf_type: The type of the variable. Can be nested.
132
+ :param default_value: Default value when no value provided.
133
+ """
134
+
135
+ class Argument:
136
+ def __init__(
137
+ self, term: Union[Variable, IRI, Literal], list_expand: Optional[bool] = False
138
+ ):
139
+ """
140
+ An argument for a template instance.
141
+ :param term: The term.
142
+ :param list_expand: Should the argument be expanded? Used with the list_expander argument of instance.
143
+ """
144
+
145
+ class Instance:
146
+ def __init__(
147
+ self,
148
+ iri: IRI,
149
+ arguments: List[Union[Argument, Variable, IRI, Literal, BlankNode, None]],
150
+ list_expander: Optional[LiteralType["cross", "zipMin", "zipMax"]] = None,
151
+ ):
152
+ """
153
+ A template instance.
154
+ :param iri: The IRI of the template to be instantiated.
155
+ :param arguments: The arguments for template instantiation.
156
+ :param list_expander: (How) should we do list expansion?
157
+ """
158
+
159
+ class Template:
160
+ iri: str
161
+ parameters: List[Parameter]
162
+ instances: List[Instance]
163
+ """
164
+ An OTTR Template.
165
+ Note that accessing parameters- or instances-fields returns copies.
166
+ To change these fields, you must assign new lists of parameters or instances.
167
+ """
168
+
169
+ def __init__(
170
+ self,
171
+ iri: IRI,
172
+ parameters: List[Union[Parameter, Variable]],
173
+ instances: List[Instance],
174
+ ):
175
+ """
176
+ Create a new OTTR Template
177
+ :param iri: The IRI of the template
178
+ :param parameters:
179
+ :param instances:
180
+ """
181
+
182
+ def instance(
183
+ self,
184
+ arguments: List[Union[Argument, Variable, IRI, Literal, None]],
185
+ list_expander: LiteralType["cross", "zipMin", "zipMax"] = None,
186
+ ) -> Instance:
187
+ """
188
+
189
+ :param arguments: The arguments to the template.
190
+ :param list_expander: (How) should we list-expand?
191
+ :return:
192
+ """
193
+
194
+ def Triple(
195
+ subject: Union["Argument", IRI, Variable, BlankNode],
196
+ predicate: Union["Argument", IRI, Variable, BlankNode],
197
+ object: Union["Argument", IRI, Variable, Literal, BlankNode],
198
+ list_expander: Optional[LiteralType["cross", "zipMin", "zipMax"]] = None,
199
+ ):
200
+ """
201
+ An OTTR Triple Pattern used for creating templates.
202
+ This is the basis pattern which all template instances are rewritten into.
203
+ Equivalent to:
204
+
205
+ >>> ottr = Prefix("http://ns.ottr.xyz/0.4/")
206
+ ... Instance(ottr.suf("Triple"), subject, predicate, object, list_expander)
207
+
208
+ :param subject:
209
+ :param predicate:
210
+ :param object:
211
+ :param list_expander:
212
+ :return:
213
+ """
214
+
215
+ class XSD:
216
+ """
217
+ The xsd namespace, for convenience.
218
+ """
219
+
220
+ boolean: IRI
221
+ byte: IRI
222
+ date: IRI
223
+ dateTime: IRI
224
+ dateTimeStamp: IRI
225
+ decimal: IRI
226
+ double: IRI
227
+ duration: IRI
228
+ float: IRI
229
+ int_: IRI
230
+ integer: IRI
231
+ language: IRI
232
+ long: IRI
233
+ short: IRI
234
+ string: IRI
235
+
236
+ def __init__(self):
237
+ """
238
+ Create the xsd namespace helper.
239
+ """
240
+
241
+ def a() -> IRI:
242
+ """
243
+ :return: IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")
244
+ """
245
+
246
+ # END COMMON WITH CHRONTEXT
247
+
248
+ class IndexingOptions:
249
+ """
250
+ Options for indexing
251
+ """
252
+
253
+ def __init__(
254
+ self,
255
+ object_sort_all: bool = None,
256
+ object_sort_some: List["IRI"] = None,
257
+ fts_path: str = None,
258
+ ):
259
+ """
260
+ Defaults to indexing on subjects and objects for select types (e.g. rdf:type and rdfs:label)
261
+
262
+ :param object_sort_all: Enable object-indexing for all suitable predicates (doubles memory requirement).
263
+ :param object_sort_some: Enable object-indexing for a selected list of predicates.
264
+ :param fts_path: Enable full text search, stored at the path
265
+ """
266
+
267
+ ParametersType = Dict[str, Tuple[DataFrame, Dict[str, RDFType]]]
268
+
269
+ class ValidationReport:
270
+ """
271
+ SHACL Validation report.
272
+ Only constructed by maplib.
273
+ """
274
+
275
+ conforms: bool
276
+ "Whether or not the validation report conforms to the shapes"
277
+
278
+ shape_targets: DataFrame
279
+ "A DataFrame containing the counts of the targets of each shape and constraint"
280
+
281
+ performance: DataFrame
282
+ "Performance statistics for the validation process"
283
+
284
+ def results(
285
+ self,
286
+ native_dataframe: bool = False,
287
+ include_datatypes: bool = False,
288
+ streaming: bool = False,
289
+ ) -> Optional[Union[DataFrame, "SolutionMappings"]]:
290
+ """
291
+ Return the results of the validation report, if they exist.
292
+
293
+ :param native_dataframe: Return columns with maplib-native formatting. Useful for round-trips.
294
+ :param include_datatypes: Return datatypes of the results DataFrame (returns SolutionMappings instead of DataFrame).
295
+ :param streaming: Use the Polars streaming functionality.
296
+ :return: The SHACL validation report, as a DataFrame
297
+ """
298
+
299
+ def details(
300
+ self,
301
+ native_dataframe: bool = False,
302
+ include_datatypes: bool = False,
303
+ streaming: bool = False,
304
+ ) -> Optional[DataFrame]:
305
+ """
306
+ Returns the details of the validation report.
307
+ Only available if validation was called with include_details=True.
308
+
309
+ :param native_dataframe: Return columns with maplib-native formatting. Useful for round-trips.
310
+ :param include_datatypes: Return datatypes of the results DataFrame (returns SolutionMappings instead of DataFrame).
311
+ :param streaming: Use the Polars streaming functionality.
312
+ :return: Details of the SHACL validation report, as a DataFrame
313
+ """
314
+
315
+ def graph(self) -> "Mapping":
316
+ """
317
+ Creates a new mapping object where the base graph is the validation report with results.
318
+ Includes the details of the validation report in the new graph if they exist.
319
+
320
+ :return:
321
+ """
322
+
323
+ class Model:
324
+ """
325
+ A mapping session allowing:
326
+
327
+ * Iterative mapping using OTTR templates
328
+ * Interactive SPARQL querying and enrichment
329
+ * SHACL validation
330
+
331
+ Usage:
332
+
333
+ >>> from maplib import Model
334
+ ... doc = '''
335
+ ... :prefix ex:<http://example.net/ns#>.
336
+ ... ex:ExampleTemplate [?MyValue] :: {
337
+ ... ottr:Triple(ex:myObject, ex:hasValue, ?MyValue)
338
+ ... } .'''
339
+ ... m = Model()
340
+ ... m.add_template(doc)
341
+
342
+ :param documents: a stOTTR document or a list of these
343
+ :param indexing_options: options for indexing
344
+ """
345
+
346
+ def __init__(
347
+ self,
348
+ indexing_options: "IndexingOptions" = None,
349
+ ) -> "Model": ...
350
+ def add_template(self, template: Union["Template", str]):
351
+ """
352
+ Add a template to the model. Overwrites any existing template with the same IRI.
353
+ :param template: The template to add, as a stOTTR string or as a programmatically constructed Template.
354
+ :return:
355
+ """
356
+
357
+ def map(
358
+ self,
359
+ template: Union[str, "Template", IRI],
360
+ df: DataFrame = None,
361
+ graph: str = None,
362
+ types: Dict[str, RDFType] = None,
363
+ validate_iris: bool = True,
364
+ ) -> None:
365
+ """
366
+ Map a template using a DataFrame
367
+ Usage:
368
+
369
+ >>> m.map("ex:ExampleTemplate", df)
370
+
371
+ If the template has no arguments, the df argument is not necessary.
372
+
373
+ :param template: Template, IRI, IRI string or prefixed template name.
374
+ :param df: DataFrame where the columns have the same names as the template arguments
375
+ :param graph: The IRI of the graph to add triples to.
376
+ :param types: The types of the columns.
377
+ :param validate_iris: Validate any IRI-columns.
378
+ """
379
+
380
+ def map_triples(
381
+ self,
382
+ df: DataFrame = None,
383
+ predicate: str = None,
384
+ graph: str = None,
385
+ types: Dict[str, RDFType] = None,
386
+ validate_iris: bool = True,
387
+ ) -> None:
388
+ """
389
+ Map a template using a DataFrame with columns subject, object and predicate
390
+ The predicate column can also be supplied as a string if it is the same for all rows.
391
+ Usage:
392
+
393
+ >>> m.map_triples(df)
394
+
395
+ If the template has no arguments, the df argument is not necessary.
396
+
397
+ :param df: DataFrame where the columns are named subject and object. May also contain a verb-column.
398
+ :param verb: The uri of the verb.
399
+ :param graph: The IRI of the graph to add triples to.
400
+ :param types: The types of the columns.
401
+ :param validate_iris: Validate any IRI-columns.
402
+ """
403
+
404
+ def map_default(
405
+ self,
406
+ df: DataFrame,
407
+ primary_key_column: str,
408
+ dry_run: bool = False,
409
+ graph: str = None,
410
+ types: Dict[str, RDFType] = None,
411
+ validate_iris: bool = True,
412
+ ) -> str:
413
+ """
414
+ Create a default template and map it based on a dataframe.
415
+ Usage:
416
+
417
+ >>> template_string = m.map_default(df, "myKeyCol")
418
+ ... print(template_string)
419
+
420
+ :param df: DataFrame where the columns have the same names as the template arguments
421
+ :param primary_key_column: This column will be the subject of all triples in the generated template.
422
+ :param dry_run: Do not map the template, only return the string.
423
+ :param graph: The IRI of the graph to add triples to.
424
+ :param types: The types of the columns.
425
+ :param validate_iris: Validate any IRI-columns.
426
+ :return: The generated template
427
+ """
428
+
429
+ def query(
430
+ self,
431
+ query: str,
432
+ parameters: ParametersType = None,
433
+ include_datatypes: bool = False,
434
+ native_dataframe: bool = False,
435
+ graph: str = None,
436
+ streaming: bool = False,
437
+ return_json: bool = False,
438
+ include_transient: bool = True,
439
+ ) -> Union[
440
+ DataFrame, SolutionMappings, List[Union[DataFrame, SolutionMappings, str]], None
441
+ ]:
442
+ """
443
+ Query the contained knowledge graph using SPARQL
444
+ Currently, SELECT, CONSTRUCT and INSERT are supported.
445
+ Usage:
446
+
447
+ >>> df = model.query('''
448
+ ... PREFIX ex:<http://example.net/ns#>
449
+ ... SELECT ?obj1 ?obj2 WHERE {
450
+ ... ?obj1 ex:hasObj ?obj2
451
+ ... }''')
452
+ ... print(df)
453
+
454
+ :param query: The SPARQL query string
455
+ :param parameters: PVALUES Parameters, a DataFrame containing the value bindings in the custom PVALUES construction.
456
+ :param native_dataframe: Return columns with maplib-native formatting. Useful for round-trips.
457
+ :param include_datatypes: Datatypes are not returned by default, set to true to return a dict with the solution mappings and the datatypes.
458
+ :param graph: The IRI of the graph to query.
459
+ :param streaming: Use Polars streaming
460
+ :param return_json: Return JSON string.
461
+ :param include_transient: Include transient triples when querying.
462
+ :return: DataFrame (Select), list of DataFrames (Construct) containing results, or None for Insert-queries
463
+
464
+ """
465
+
466
+ def update(
467
+ self,
468
+ update: str,
469
+ parameters: ParametersType = None,
470
+ streaming: bool = False,
471
+ include_transient: bool = True,
472
+ ):
473
+ """
474
+ Insert the results of a Construct query in the graph.
475
+ Useful for being able to use the same query for inspecting what will be inserted and actually inserting.
476
+ Usage:
477
+
478
+ >>> m = Model(doc)
479
+ ... # Omitted
480
+ ... update_pizzas = '''
481
+ ... ...'''
482
+ ... m.update(update_pizzas)
483
+
484
+ :param update: The SPARQL Update string
485
+ :param parameters: PVALUES Parameters, a DataFrame containing the value bindings in the custom PVALUES construction.
486
+ :param streaming: Use Polars streaming
487
+ :param include_transient: Include transient triples when querying (but see "transient" above).
488
+ :return: None
489
+ """
490
+
491
+ def insert(
492
+ self,
493
+ query: str,
494
+ parameters: ParametersType = None,
495
+ include_datatypes: bool = False,
496
+ native_dataframe: bool = False,
497
+ transient: bool = False,
498
+ streaming: bool = False,
499
+ source_graph: str = None,
500
+ target_graph: str = None,
501
+ include_transient: bool = True,
502
+ ):
503
+ """
504
+ Insert the results of a Construct query in the graph.
505
+ Useful for being able to use the same query for inspecting what will be inserted and actually inserting.
506
+ Usage:
507
+
508
+ >>> m = Model(doc)
509
+ ... # Omitted
510
+ ... hpizzas = '''
511
+ ... PREFIX pizza:<https://github.com/magbak/maplib/pizza#>
512
+ ... PREFIX ing:<https://github.com/magbak/maplib/pizza/ingredients#>
513
+ ... CONSTRUCT { ?p a pizza:HeterodoxPizza }
514
+ ... WHERE {
515
+ ... ?p a pizza:Pizza .
516
+ ... ?p pizza:hasIngredient ing:Pineapple .
517
+ ... }'''
518
+ ... m.insert(hpizzas)
519
+
520
+ :param query: The SPARQL Insert query string
521
+ :param parameters: PVALUES Parameters, a DataFrame containing the value bindings in the custom PVALUES construction.
522
+ :param native_dataframe: Return columns with maplib-native formatting. Useful for round-trips.
523
+ :param include_datatypes: Datatypes are not returned by default, set to true to return a dict with the solution mappings and the datatypes.
524
+ :param transient: Should the inserted triples be transient?
525
+ :param source_graph: The IRI of the source graph to execute the construct query.
526
+ :param target_graph: The IRI of the target graph to insert into.
527
+ :param streaming: Use Polars streaming
528
+ :param include_transient: Include transient triples when querying (but see "transient" above).
529
+ :return: None
530
+ """
531
+
532
+ def validate(
533
+ self,
534
+ shape_graph: str,
535
+ include_details: bool = False,
536
+ include_conforms: bool = False,
537
+ include_shape_graph: bool = True,
538
+ streaming: bool = False,
539
+ max_shape_constraint_results: int = None,
540
+ only_shapes: List[str] = None,
541
+ deactivate_shapes: List[str] = None,
542
+ dry_run: bool = False,
543
+ ) -> ValidationReport:
544
+ """
545
+ Validate the contained knowledge graph using SHACL
546
+ Assumes that the contained knowledge graph also contains SHACL Shapes.
547
+
548
+ :param shape_graph: The IRI of the Shape Graph.
549
+ :param include_details: Include details of SHACL evaluation alongside the report. Currently uses a lot of memory.
550
+ :param include_conforms: Include those results that conformed. Also applies to details.
551
+ :param include_shape_graph: Include the shape graph in the report, useful when creating the graph from the report.
552
+ :param include_datatypes: Return the datatypes of the validation report (and details).
553
+ :param streaming: Use Polars streaming
554
+ :param max_shape_constraint_results: Maximum number of results per shape and constraint. Reduces the size of the result set.
555
+ :param only_shapes: Validate only these shapes, None means all shapes are validated (must be IRI, cannot be used with deactivate_shapes).
556
+ :param deactivate_shapes: Disable validation of these shapes (must be IRI, cannot be used with deactivate_shapes).
557
+ :param dry_run: Only find targets of shapes, but do not validate them.
558
+ :return: Validation report containing a report (report.df) and whether the graph conforms (report.conforms)
559
+ """
560
+
561
+ def read(
562
+ self,
563
+ file_path: Union[str, Path],
564
+ format: LiteralType["ntriples", "turtle", "rdf/xml", "xml", "rdfxml"] = None,
565
+ base_iri: str = None,
566
+ transient: bool = False,
567
+ parallel: bool = None,
568
+ checked: bool = True,
569
+ graph: str = None,
570
+ replace_graph: bool = False,
571
+ ) -> None:
572
+ """
573
+ Reads triples from a file path.
574
+ You can specify the format, or it will be derived using file extension, e.g. filename.ttl or filename.nt.
575
+ Specify transient if you only want the triples to be available for further querying and validation,
576
+ but not persisted using write-methods.
577
+
578
+ Usage:
579
+
580
+ >>> m.read("my_triples.ttl")
581
+
582
+ :param file_path: The path of the file containing triples
583
+ :param format: One of "ntriples", "turtle", "rdf/xml", otherwise it is inferred from the file extension.
584
+ :param base_iri: Base iri
585
+ :param transient: Should these triples be included when writing the graph to the file system?
586
+ :param parallel: Parse triples in parallel, currently only NTRiples and Turtle. Assumes all prefixes are in the beginning of the document. Defaults to true only for NTriples.
587
+ :param checked: Check IRIs etc.
588
+ :param graph: The IRI of the graph to read the triples into, if None, it will be the default graph.
589
+ :param replace_graph: Replace the graph with these triples? Will replace the default graph if no graph is specified.
590
+ """
591
+
592
+ def reads(
593
+ self,
594
+ s: str,
595
+ format: LiteralType["ntriples", "turtle", "rdf/xml", "xml", "rdfxml"],
596
+ base_iri: str = None,
597
+ transient: bool = False,
598
+ parallel: bool = None,
599
+ checked: bool = True,
600
+ graph: str = None,
601
+ replace_graph: bool = False,
602
+ ) -> None:
603
+ """
604
+ Reads triples from a string.
605
+ Specify transient if you only want the triples to be available for further querying and validation,
606
+ but not persisted using write-methods.
607
+
608
+ Usage:
609
+
610
+ >>> m.reads(my_ntriples_string, format="ntriples")
611
+
612
+ :param s: String containing serialized triples.
613
+ :param format: One of "ntriples", "turtle", "rdf/xml".
614
+ :param base_iri: Base iri
615
+ :param transient: Should these triples be included when writing the graph to the file system?
616
+ :param parallel: Parse triples in parallel, currently only NTRiples and Turtle. Assumes all prefixes are in the beginning of the document. Defaults to true for NTriples.
617
+ :param checked: Check IRIs etc.
618
+ :param graph: The IRI of the graph to read the triples into.
619
+ :param replace_graph: Replace the graph with these triples? Will replace the default graph if no graph is specified.
620
+ """
621
+
622
+ def write_cim_xml(
623
+ self,
624
+ file_path: Union[str, Path],
625
+ profile_graph: str,
626
+ model_iri: str = None,
627
+ version: str = None,
628
+ description: str = None,
629
+ created: str = None,
630
+ scenario_time: str = None,
631
+ modeling_authority_set: str = None,
632
+ prefixes: Dict[str, str] = None,
633
+ graph: str = None,
634
+ ) -> None:
635
+ """
636
+ Write the legacy CIM XML format.
637
+
638
+ >>> PROFILE_GRAPH = "urn:graph:profiles"
639
+ >>> m = Model()
640
+ >>> m.read(model_path, base_iri=publicID, format="rdf/xml")
641
+ >>> m.read("61970-600-2_Equipment-AP-Voc-RDFS2020_v3-0-0.rdf", graph=PROFILE_GRAPH, format="rdf/xml")
642
+ >>> m.read("61970-600-2_Operation-AP-Voc-RDFS2020_v3-0-0.rdf", graph=PROFILE_GRAPH, format="rdf/xml")
643
+ >>> m.write_cim_xml(
644
+ >>> "model.xml",
645
+ >>> profile_graph=PROFILE_GRAPH,
646
+ >>> description = "MyModel",
647
+ >>> created = "2023-09-14T20:27:41",
648
+ >>> scenario_time = "2023-09-14T02:44:43",
649
+ >>> modeling_authority_set="www.westernpower.co.uk",
650
+ >>> version="22",
651
+ >>> )
652
+
653
+ :param file_path: The path of the file containing triples
654
+ :param profile_graph: The IRI of the graph containing the ontology of the CIM profile to write.
655
+ :param model_iri: model_iri a md:FullModel. Is generated if not provided.
656
+ :param version: model_iri md:Model.version version .
657
+ :param description: model_iri md:Model.description description .
658
+ :param created: model_iri md:Model.created created .
659
+ :param scenario_time: model_iri md:Model.scenarioTime scenario_time .
660
+ :param modeling_authority_set: model_iri md:Model.modelingAuthoritySet modeling_authority_set .
661
+ :param prefixes: Prefixes to be used in XML export.
662
+ :param graph: The graph to write, defaults to the default graph.
663
+ """
664
+
665
+ def write(
666
+ self,
667
+ file_path: Union[str, Path],
668
+ format=LiteralType["ntriples", "turtle", "rdf/xml"],
669
+ graph: str = None,
670
+ ) -> None:
671
+ """
672
+ Write the non-transient triples to the file path specified in the NTriples format.
673
+
674
+ Usage:
675
+
676
+ >>> m.write("my_triples.nt", format="ntriples")
677
+
678
+ :param file_path: The path of the file containing triples
679
+ :param format: One of "ntriples", "turtle", "rdf/xml".
680
+ :param graph: The IRI of the graph to write.
681
+ """
682
+
683
+ def writes(
684
+ self, format=LiteralType["ntriples", "turtle", "rdf/xml"], graph: str = None
685
+ ) -> str:
686
+ """
687
+ DEPRECATED: use writes with format="ntriples"
688
+ Write the non-transient triples to a string in memory.
689
+
690
+ Usage:
691
+
692
+ >>> s = m.write_ntriples_string(format="turtle")
693
+
694
+ :param format: One of "ntriples", "turtle", "rdf/xml".
695
+ :param graph: The IRI of the graph to write.
696
+ :return Triples in model in the NTriples format (potentially a large string)
697
+ """
698
+
699
+ def write_native_parquet(
700
+ self, folder_path: Union[str, Path], graph: str = None
701
+ ) -> None:
702
+ """
703
+ Write non-transient triples using the internal native Parquet format.
704
+
705
+ Usage:
706
+
707
+ >>> m.write_native_parquet("output_folder")
708
+
709
+ :param folder_path: The path of the folder to write triples in the native format.
710
+ :param graph: The IRI of the graph to write.
711
+ """
712
+
713
+ def create_sprout(self):
714
+ """
715
+ A sprout is a simplified way of dealing with multiple graphs.
716
+ See also `Model.insert_sprout` and `Model.detach_sprout`
717
+
718
+ :return:
719
+ """
720
+
721
+ def insert_sprout(
722
+ self,
723
+ query: str,
724
+ parameters: ParametersType = None,
725
+ include_datatypes: bool = False,
726
+ native_dataframe: bool = False,
727
+ transient: bool = False,
728
+ streaming: bool = False,
729
+ source_graph: str = None,
730
+ target_graph: str = None,
731
+ include_transient: bool = True,
732
+ ):
733
+ """
734
+ Insert the results of a Construct query in a sprouted graph, which is created if no sprout is active.
735
+ Sprouts are simplified way of dealing with multiple graphs.
736
+ Useful for being able to use the same query for inspecting what will be inserted and actually inserting.
737
+ See also `Model.detach_sprout`
738
+
739
+ Usage:
740
+
741
+ >>> m = Model()
742
+ ... m.add_template(doc)
743
+ ... m.create_sprout()
744
+ ... # Omitted
745
+ ... hpizzas = '''
746
+ ... PREFIX pizza:<https://github.com/magbak/maplib/pizza#>
747
+ ... PREFIX ing:<https://github.com/magbak/maplib/pizza/ingredients#>
748
+ ... CONSTRUCT { ?p a pizza:HeterodoxPizza }
749
+ ... WHERE {
750
+ ... ?p a pizza:Pizza .
751
+ ... ?p pizza:hasIngredient ing:Pineapple .
752
+ ... }'''
753
+ ... m.insert_sprout(hpizzas)
754
+
755
+ :param query: The SPARQL Insert query string
756
+ :param parameters: PVALUES Parameters, a DataFrame containing the value bindings in the custom PVALUES construction.
757
+ :param native_dataframe: Return columns with maplib-native formatting. Useful for round-trips.
758
+ :param include_datatypes: Datatypes are not returned by default, set to true to return a dict with the solution mappings and the datatypes.
759
+ :param transient: Should the inserted triples be included in exports?
760
+ :param source_graph: The IRI of the source graph to execute the construct query.
761
+ :param target_graph: The IRI of the target graph to insert into.
762
+ :param streaming: Use Polars streaming
763
+ :param include_transient: Include transient triples when querying (see also "transient" above).
764
+ :return: None
765
+ """
766
+
767
+ def detach_sprout(self) -> "Model":
768
+ """
769
+ Detaches and returns the sprout from the model.
770
+
771
+ :return: The sprout as its own Model.
772
+ """
773
+
774
+ def get_predicate_iris(
775
+ self, graph: str = None, include_transient: bool = False
776
+ ) -> List["IRI"]:
777
+ """
778
+ :param graph: The graph to get the predicate iris from.
779
+ :param include_transient: Should we include predicates only between transient triples?
780
+ :return: The IRIs of the predicates currently in the given graph.
781
+ """
782
+
783
+ def get_predicate(
784
+ self, iri: "IRI", graph: str = None, include_transient: bool = False
785
+ ) -> List["SolutionMappings"]:
786
+ """
787
+ :param iri: The predicate IRI
788
+ :param graph: The graph to get the predicate from.
789
+ :param include_transient: Should we include transient triples?
790
+ :return: A list of the underlying tables that store a given predicate.
791
+ """
792
+
793
+ def create_index(
794
+ self, options: "IndexingOptions" = None, all: bool = True, graph: str = None
795
+ ):
796
+ """
797
+ :param options: Indexing options
798
+ :param all: Apply to all existing and new graphs
799
+ :param graph: The graph where indexes should be added
800
+ :return:
801
+ """
802
+
803
+ def infer(
804
+ self,
805
+ ruleset: Union[str, List[str]],
806
+ include_datatypes: bool = False,
807
+ native_dataframe: bool = False,
808
+ ) -> Optional[Dict[str, DataFrame]]:
809
+ """
810
+ Run the inference rules that are provided
811
+ :param ruleset: The Datalog ruleset (a string).
812
+ :param native_dataframe: Return columns with maplib-native formatting. Useful for round-trips.
813
+ :param include_datatypes: Datatypes are not returned by default, set to true to return a dict with the solution mappings and the datatypes.
814
+ :return: The inferred N-Tuples.
815
+ """
816
+
817
+ class MaplibException(Exception): ...
@@ -0,0 +1,29 @@
1
+ from maplib.maplib import Model, Template, IRI, Triple, Variable
2
+
3
+
4
+ def add_triples(
5
+ source: Model, target: Model, source_graph: str = None, target_graph: str = None
6
+ ):
7
+ """(Zero) copy the triples from one Model into another.
8
+
9
+ :param source: The source mapping
10
+ :param target: The target mapping
11
+ :param source_graph: The named graph in the source mapping to copy from. None means default graph.
12
+ :param target_graph: The named graph in the target mapping to copy into. None means default graph.
13
+ """
14
+ for p in source.get_predicate_iris(source_graph):
15
+ subject = Variable("subject")
16
+ object = Variable("object")
17
+ template = Template(
18
+ iri=IRI("urn:maplib:tmp"),
19
+ parameters=[subject, object],
20
+ instances=[Triple(subject, p, object)],
21
+ )
22
+ sms = source.get_predicate(p, source_graph)
23
+ for sm in sms:
24
+ target.map(
25
+ template,
26
+ sm.mappings,
27
+ types=sm.rdf_types,
28
+ graph=target_graph,
29
+ )
Binary file
maplib/py.typed ADDED
File without changes
@@ -0,0 +1,208 @@
1
+ Metadata-Version: 2.3
2
+ Name: maplib
3
+ Version: 0.17.8
4
+ Classifier: Development Status :: 4 - Beta
5
+ Classifier: License :: OSI Approved :: Apache Software License
6
+ Classifier: Programming Language :: Python :: 3 :: Only
7
+ Classifier: Programming Language :: Python :: 3.9
8
+ Classifier: Programming Language :: Python :: 3.10
9
+ Classifier: Programming Language :: Python :: 3.11
10
+ Classifier: Programming Language :: Rust
11
+ Classifier: Topic :: Database :: Database Engines/Servers
12
+ Classifier: Topic :: Scientific/Engineering
13
+ Requires-Dist: polars >=0.20.13
14
+ Requires-Dist: pyarrow >=7.0.0
15
+ Requires-Dist: fastapi[standard] >=0.115 ; extra == 'explorer'
16
+ Provides-Extra: explorer
17
+ License-File: LICENSE
18
+ Summary: Dataframe-based interactive knowledge graph construction
19
+ Keywords: rdf,graph,dataframe,sparql,ottr
20
+ Author-email: Magnus Bakken <magnus@data-treehouse.com>
21
+ Requires-Python: >=3.9
22
+ Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM
23
+ Project-URL: Homepage, https://github.com/DataTreehouse/maplib
24
+ Project-URL: Documentation, https://datatreehouse.github.io/maplib/maplib/maplib.html
25
+ Project-URL: Repository, https://github.com/DataTreehouse/maplib
26
+ Project-URL: Changelog, https://github.com/DataTreehouse/maplib/releases
27
+
28
+ ## maplib: High-performance RDF knowledge graph construction, SHACL validation and SPARQL-based enrichment in Python
29
+ maplib is a knowledge graph construction library for building RDF knowledge graphs using template expansion ([OTTR](https://ottr.xyz/) Templates). Maplib features SPARQL- and SHACL-engines that are available as the graph is being constructed, allowing enrichment and validation. It can construct and validate knowledge graphs with millions of nodes in seconds.
30
+
31
+ maplib allows you to leverage your existing skills with Pandas or Polars to extract and wrangle data from existing databases and spreadsheets, before applying simple templates to them to build a knowledge graph.
32
+
33
+ Template expansion is typically zero-copy and nearly instantaneous, and the built-in SPARQL and SHACL engines means you can query, inspect, enrich and validate the knowledge graph immediately.
34
+
35
+ maplib is written in Rust, it is built on [Apache Arrow](https://arrow.apache.org/) using [Pola.rs](https://www.pola.rs/) and uses libraries from [Oxigraph](https://github.com/oxigraph/oxigraph) for handling linked data as well as parsing SPARQL queries.
36
+
37
+ ## Installing
38
+ The package is published on [PyPi](https://pypi.org/project/maplib/) and the API documented [here](https://datatreehouse.github.io/maplib/maplib.html):
39
+ ```shell
40
+ pip install maplib
41
+ ```
42
+ Please send us a message, e.g. on LinkedIn (search for Data Treehouse) or on our [webpage](https://www.data-treehouse.com/contact-8) if you want to try out SHACL.
43
+
44
+ ## Model
45
+ We can easily map DataFrames to RDF-graphs using the Python library. Below is a reproduction of the example in the paper [1]. Assume that we have a DataFrame given by:
46
+
47
+ ```python
48
+ import polars as pl
49
+ pl.Config.set_fmt_str_lengths(150)
50
+
51
+ pi = "https://github.com/DataTreehouse/maplib/pizza#"
52
+ df = pl.DataFrame({
53
+ "p":[pi + "Hawaiian", pi + "Grandiosa"],
54
+ "c":[pi + "CAN", pi + "NOR"],
55
+ "ings": [[pi + "Pineapple", pi + "Ham"],
56
+ [pi + "Pepper", pi + "Meat"]]
57
+ })
58
+ print(df)
59
+ ```
60
+ That is, our DataFrame is:
61
+
62
+ | p | c | ings |
63
+ |-------------------------------|--------------------------------|------------------------------------------|
64
+ | str | str | list[str] |
65
+ | "https://.../pizza#Hawaiian" | "https://.../maplib/pizza#CAN" | [".../pizza#Pineapple", ".../pizza#Ham"] |
66
+ | "https://.../pizza#Grandiosa" | "https://.../maplib/pizza#NOR" | [".../pizza#Pepper", ".../pizza#Meat"] |
67
+
68
+ Then we can define a OTTR template, and create our knowledge graph by expanding this template with our DataFrame as input:
69
+ ```python
70
+ from maplib import Model, Prefix, Template, Argument, Parameter, Variable, RDFType, Triple, a
71
+ pi = Prefix(pi)
72
+
73
+ p_var = Variable("p")
74
+ c_var = Variable("c")
75
+ ings_var = Variable("ings")
76
+
77
+ template = Template(
78
+ iri= pi.suf("PizzaTemplate"),
79
+ parameters= [
80
+ Parameter(variable=p_var, rdf_type=RDFType.IRI()),
81
+ Parameter(variable=c_var, rdf_type=RDFType.IRI()),
82
+ Parameter(variable=ings_var, rdf_type=RDFType.Nested(RDFType.IRI()))
83
+ ],
84
+ instances= [
85
+ Triple(p_var, a(), pi.suf("Pizza")),
86
+ Triple(p_var, pi.suf("fromCountry"), c_var),
87
+ Triple(
88
+ p_var,
89
+ pi.suf("hasIngredient"),
90
+ Argument(term=ings_var, list_expand=True),
91
+ list_expander="cross")
92
+ ]
93
+ )
94
+
95
+ m = Model()
96
+ m.map(template, df)
97
+ hpizzas = """
98
+ PREFIX pi:<https://github.com/DataTreehouse/maplib/pizza#>
99
+ CONSTRUCT { ?p a pi:HeterodoxPizza }
100
+ WHERE {
101
+ ?p a pi:Pizza .
102
+ ?p pi:hasIngredient pi:Pineapple .
103
+ }"""
104
+ m.insert(hpizzas)
105
+ return m
106
+ ```
107
+
108
+ We can immediately query the mapped knowledge graph:
109
+
110
+ ```python
111
+ m.query("""
112
+ PREFIX pi:<https://github.com/DataTreehouse/maplib/pizza#>
113
+ SELECT ?p ?i WHERE {
114
+ ?p a pi:Pizza .
115
+ ?p pi:hasIngredient ?i .
116
+ }
117
+ """)
118
+ ```
119
+
120
+ The query gives the following result (a DataFrame):
121
+
122
+ | p | i |
123
+ |---------------------------------|---------------------------------------|
124
+ | str | str |
125
+ | "<https://.../pizza#Grandiosa>" | "<https://.../pizza#Meat>" |
126
+ | "<https://.../pizza#Grandiosa>" | "<https://.../pizza#Pepper>" |
127
+ | "<https://.../pizza#Hawaiian>" | "<https://.../pizza#Pineapple>" |
128
+ | "<https://.../pizza#Hawaiian>" | "<https://.../pizza#Ham>" |
129
+
130
+ Next, we are able to perform a construct query, which creates new triples but does not insert them.
131
+
132
+ ```python
133
+ hpizzas = """
134
+ PREFIX pi:<https://github.com/DataTreehouse/maplib/pizza#>
135
+ CONSTRUCT { ?p a pi:UnorthodoxPizza }
136
+ WHERE {
137
+ ?p a pi:Pizza .
138
+ ?p pi:hasIngredient pi:Pineapple .
139
+ }"""
140
+ res = m.query(hpizzas)
141
+ res[0]
142
+ ```
143
+
144
+ The resulting triples are given below:
145
+
146
+ | subject | verb | object |
147
+ |--------------------------------|--------------------------------------|---------------------------------------|
148
+ | str | str | str |
149
+ | "<https://.../pizza#Hawaiian>" | "<http://.../22-rdf-syntax-ns#type>" | "<https://.../pizza#UnorthodoxPizza>" |
150
+
151
+ If we are happy with the output of this construct-query, we can insert it in the model state. Afterwards we check that the triple is added with a query.
152
+
153
+ ```python
154
+ m.insert(hpizzas)
155
+ m.query("""
156
+ PREFIX pi:<https://github.com/DataTreehouse/maplib/pizza#>
157
+
158
+ SELECT ?p WHERE {
159
+ ?p a pi:UnorthodoxPizza
160
+ }
161
+ """)
162
+ ```
163
+
164
+ Indeed, we have added the triple:
165
+
166
+ | p |
167
+ |------------------------------------------------------------|
168
+ | str |
169
+ | "<https://github.com/DataTreehouse/maplib/pizza#Hawaiian>" |
170
+
171
+ ## API
172
+ The [API](https://datatreehouse.github.io/maplib/maplib.html) is simple, and contains only one class and a few methods for:
173
+ - expanding templates
174
+ - querying with SPARQL
175
+ - validating with SHACL
176
+ - importing triples (Turtle, RDF/XML, NTriples)
177
+ - writing triples (Turtle, RDF/XML, NTriples)
178
+ - creating a new Model object (sprout) based on queries over the current Model object.
179
+
180
+ The API is documented [HERE](https://datatreehouse.github.io/maplib/maplib.html)
181
+
182
+ ## Roadmap of features and optimizations
183
+ Spring 2025
184
+ - Datalog reasoning support ✅
185
+ - Reduced memory footprint ✅
186
+ - Further SPARQL optimizations
187
+ - JSON-LD support
188
+
189
+ Fall 2025
190
+ - SHACL rules support
191
+ - Improved TTL serialization (prettier and faster)
192
+ +++
193
+
194
+ Roadmap is subject to changes,particularly user and customer requests.
195
+
196
+ ## References
197
+ There is an associated paper [1] with associated benchmarks showing superior performance and scalability that can be found [here](https://ieeexplore.ieee.org/document/10106242). OTTR is described in [2].
198
+
199
+ [1] M. Bakken, "maplib: Interactive, literal RDF model model for industry," in IEEE Access, doi: 10.1109/ACCESS.2023.3269093.
200
+
201
+ [2] M. G. Skjæveland, D. P. Lupp, L. H. Karlsen, and J. W. Klüwer, “Ottr: Formal templates for pattern-based ontology engineering.” in WOP (Book),
202
+ 2021, pp. 349–377.
203
+
204
+ ## Licensing
205
+ All code produced since August 1st. 2023 is copyrighted to [Data Treehouse AS](https://www.data-treehouse.com/) with an Apache 2.0 license unless otherwise noted.
206
+
207
+ All code which was produced before August 1st. 2023 copyrighted to [Prediktor AS](https://www.prediktor.com/) with an Apache 2.0 license unless otherwise noted, and has been financed by [The Research Council of Norway](https://www.forskningsradet.no/en/) (grant no. 316656) and [Prediktor AS](https://www.prediktor.com/) as part of a PhD Degree. The code at this state is archived in the repository at [https://github.com/magbak/maplib](https://github.com/magbak/maplib).
208
+
@@ -0,0 +1,10 @@
1
+ maplib-0.17.8.dist-info/METADATA,sha256=kQiWBrIVhDQgdHx2O-k9liy3I6F2tD_fWFItDmUAeYw,9130
2
+ maplib-0.17.8.dist-info/WHEEL,sha256=K0DEsw3hOaNwV0qMlVqQb_hWCuEqGXxG-3jni7ncFq0,104
3
+ maplib-0.17.8.dist-info/licenses/LICENSE,sha256=jAD7dAxI84Df1hzrP67O-yOMrEfOrhLQcI8qEY6KRu8,11459
4
+ maplib/adding_triples.py,sha256=-VttVnmnafz_pS40f6qrm3iMenJbYtKXz8fddlQ2U6s,1063
5
+ maplib/__init__.pyi,sha256=Vrys9gHGdeyZb7HtlJ9xZ5l8iJezfk786izZ7wBd2lA,28868
6
+ maplib/__init__.py,sha256=6lzPNY800rYkBJzZT3ZVv-NXQkfT41mfizTG03kR6Sk,1383
7
+ maplib/.gitignore,sha256=XJr_ApsGWXGOqPaqA-Z7RsnZMR07sG0hbTPurJVGFAw,5
8
+ maplib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ maplib/maplib.cpython-313-darwin.so,sha256=djiZG5bHFGvBaN7dZyO4f81Gmag6NOA9rkJyn_Je0gw,69266608
10
+ maplib-0.17.8.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: maturin (1.7.4)
3
+ Root-Is-Purelib: false
4
+ Tag: cp313-cp313-macosx_11_0_arm64
@@ -0,0 +1,202 @@
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright code / files produced 2022 - July 31.2023 Prediktor AS
190
+ Copyright code / files produced on or after August 1. 2023 Data Treehouse AS
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.