iolanta 2.1.10__py3-none-any.whl → 2.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
iolanta/mcp/cli.py CHANGED
@@ -1,4 +1,3 @@
1
- from pathlib import Path
2
1
  from typing import Annotated
3
2
 
4
3
  from fastmcp import FastMCP
@@ -14,22 +13,7 @@ def render_uri(
14
13
  as_format: Annotated[str, 'Format to render as. Examples: `labeled-triple-set`, `mermaid`'],
15
14
  ) -> str:
16
15
  """Render a URI."""
17
- result = render_and_return(uri, as_format)
18
- return str(result)
19
-
20
-
21
- @mcp.prompt(description="How to author Linked Data with Iolanta")
22
- def ld_authoring_rules() -> str:
23
- """How to author Linked Data with Iolanta."""
24
- rules_path = Path(__file__).parent / 'prompts' / 'rules.md'
25
- return rules_path.read_text()
26
-
27
-
28
- @mcp.prompt(description="How to author nanopublication assertions with Iolanta")
29
- def nanopublication_assertion_authoring_rules() -> str:
30
- """How to author nanopublication assertions with Iolanta."""
31
- rules_path = Path(__file__).parent / 'prompts' / 'nanopublication_assertion_authoring_rules.md'
32
- return rules_path.read_text()
16
+ return str(render_and_return(uri, as_format))
33
17
 
34
18
 
35
19
  def app():
iolanta/parse_quads.py CHANGED
@@ -1,6 +1,5 @@
1
1
  import dataclasses
2
2
  import hashlib
3
- from types import MappingProxyType
4
3
  from typing import Iterable, Optional
5
4
  from urllib.parse import unquote
6
5
 
@@ -11,11 +10,7 @@ from rdflib.term import Node
11
10
  from iolanta.errors import UnresolvedIRI
12
11
  from iolanta.models import Quad
13
12
  from iolanta.namespaces import IOLANTA, META
14
-
15
- NORMALIZE_TERMS_MAP = MappingProxyType({
16
- URIRef(_url := 'http://www.w3.org/2002/07/owl'): URIRef(f'{_url}#'),
17
- URIRef(_url := 'http://www.w3.org/2000/01/rdf-schema'): URIRef(f'{_url}#'),
18
- })
13
+ from iolanta.sparqlspace.redirects import apply_redirect
19
14
 
20
15
 
21
16
  def parse_term( # noqa: C901
@@ -35,8 +30,8 @@ def parse_term( # noqa: C901
35
30
  if term_type == 'literal':
36
31
  language = term.get('language')
37
32
 
38
- if datatype := term.get('datatype'):
39
- datatype = URIRef(datatype)
33
+ datatype_raw = term.get('datatype')
34
+ datatype = URIRef(datatype_raw) if datatype_raw else None
40
35
 
41
36
  if language and datatype:
42
37
  datatype = None
@@ -89,7 +84,7 @@ def _parse_quads_per_subgraph(
89
84
  )
90
85
 
91
86
 
92
- def parse_quads(
87
+ def parse_quads( # noqa: WPS210
93
88
  quads_document,
94
89
  graph: URIRef,
95
90
  blank_node_prefix: str = '',
@@ -132,12 +127,20 @@ def parse_quads(
132
127
  )
133
128
 
134
129
  for quad in quads: # noqa: WPS526
135
- yield quad.replace(
136
- subgraph_names | NORMALIZE_TERMS_MAP | {
137
- # To enable nanopub rendering
138
- URIRef('http://purl.org/nanopub/temp/np/'): graph,
139
- },
140
- ).normalize()
130
+ # Build replacement map with subgraph names and nanopub temp namespace
131
+ replacement_map = subgraph_names | {
132
+ # To enable nanopub rendering
133
+ URIRef('http://purl.org/nanopub/temp/np/'): graph,
134
+ }
135
+
136
+ # Apply redirects to all URIRefs in the replacement map
137
+ normalized_replacement_map = {
138
+ apply_redirect(key) if isinstance(key, URIRef) else key:
139
+ apply_redirect(value_node) if isinstance(value_node, URIRef) else value_node # noqa: WPS110
140
+ for key, value_node in replacement_map.items()
141
+ }
142
+
143
+ yield quad.replace(normalized_replacement_map).normalize()
141
144
 
142
145
 
143
146
  def raise_if_term_is_qname(term_value: str):
@@ -1,17 +1,14 @@
1
+ # noqa: WPS201, WPS202, WPS402
1
2
  import dataclasses
2
3
  import datetime
3
- import re
4
- import time
5
4
  from pathlib import Path
6
5
  from threading import Lock
7
- from types import MappingProxyType
8
6
  from typing import Any, Iterable, Mapping
9
7
 
10
8
  import diskcache
11
9
  import funcy
12
10
  import loguru
13
11
  import platformdirs
14
- import reasonable
15
12
  import requests
16
13
  import yaml_ld
17
14
  from nanopub import NanopubClient
@@ -35,55 +32,24 @@ from iolanta.namespaces import ( # noqa: WPS235
35
32
  DCTERMS,
36
33
  FOAF,
37
34
  IOLANTA,
38
- LOCAL,
39
35
  META,
40
36
  OWL,
41
- PROV,
42
37
  RDF,
43
38
  RDFS,
44
39
  VANN,
45
40
  )
46
- from iolanta.parse_quads import NORMALIZE_TERMS_MAP, parse_quads
41
+ from iolanta.parse_quads import parse_quads
42
+ from iolanta.sparqlspace.redirects import apply_redirect
47
43
 
48
44
  REASONING_ENABLED = True
49
45
  OWL_REASONING_ENABLED = False
50
46
 
51
47
  INFERENCE_DIR = Path(__file__).parent / 'inference'
52
- INDICES = [
48
+ INDICES = [ # noqa: WPS407
53
49
  URIRef('https://iolanta.tech/visualizations/index.yaml'),
54
50
  ]
55
51
 
56
52
 
57
- REDIRECTS = MappingProxyType({
58
- # FIXME This is presently hardcoded; we need to
59
- # - either find a way to resolve these URLs automatically,
60
- # - or create a repository of those redirects online.
61
- 'http://purl.org/vocab/vann/': URIRef(
62
- 'https://vocab.org/vann/vann-vocab-20100607.rdf',
63
- ),
64
- URIRef(DC): URIRef(DCTERMS),
65
- URIRef(RDF): URIRef(RDF),
66
- URIRef(RDFS): URIRef(RDFS),
67
- URIRef(OWL): URIRef(OWL),
68
-
69
- # Redirect FOAF namespace to GitHub mirror
70
- URIRef('https?://xmlns.com/foaf/0.1/.+'): URIRef(
71
- 'https://raw.githubusercontent.com/foaf/foaf/refs/heads/master/xmlns.com/htdocs/foaf/0.1/index.rdf',
72
- ),
73
- URIRef('https://www.nanopub.org/nschema'): URIRef(
74
- 'https://www.nanopub.net/nschema#',
75
- ),
76
- URIRef('https://nanopub.org/nschema'): URIRef(
77
- 'https://nanopub.net/nschema#',
78
- ),
79
- URIRef(PROV): URIRef('https://www.w3.org/ns/prov-o'),
80
-
81
- # Convert lexvo.org/id URLs to lexvo.org/data URLs
82
- r'https://lexvo\.org/id/(.+)': r'http://lexvo.org/data/\1',
83
- r'https://www\.lexinfo\.net/(.+)': r'http://www.lexinfo.net/\1',
84
- })
85
-
86
-
87
53
  @diskcache.Cache(
88
54
  directory=str(
89
55
  platformdirs.user_cache_path(
@@ -116,20 +82,20 @@ def find_retractions_for(nanopublication: URIRef) -> set[URIRef]:
116
82
  def _extract_from_mapping( # noqa: WPS213
117
83
  algebra: Mapping[str, Any],
118
84
  ) -> Iterable[URIRef | Variable]:
119
- match algebra.name:
85
+ match algebra.name: # noqa: WPS242
120
86
  case 'SelectQuery' | 'AskQuery' | 'Project' | 'Distinct' | 'Slice':
121
- yield from extract_mentioned_urls(algebra['p'])
87
+ yield from extract_mentioned_urls(algebra['p']) # noqa: WPS226
122
88
 
123
89
  case 'BGP':
124
90
  yield from [ # noqa: WPS353, WPS221
125
91
  term
126
92
  for triple in algebra['triples']
127
93
  for term in triple
128
- if isinstance(term, URIRef)
94
+ if isinstance(term, (URIRef, Variable))
129
95
  ]
130
96
 
131
97
  case 'Filter' | 'UnaryNot' | 'OrderCondition':
132
- yield from extract_mentioned_urls(algebra['expr']) # noqa: WPS204
98
+ yield from extract_mentioned_urls(algebra['expr']) # noqa: WPS204, WPS226
133
99
 
134
100
  case built_in if built_in.startswith('Builtin_'):
135
101
  yield from extract_mentioned_urls(algebra['arg'])
@@ -208,7 +174,10 @@ def normalize_term(term: Node) -> Node:
208
174
  * A dirty hack;
209
175
  * Based on hard code.
210
176
  """
211
- return NORMALIZE_TERMS_MAP.get(term, term)
177
+ if isinstance(term, URIRef):
178
+ return apply_redirect(term)
179
+
180
+ return term
212
181
 
213
182
 
214
183
  def resolve_variables(
@@ -258,7 +227,7 @@ class Skipped:
258
227
  LoadResult = Loaded | Skipped
259
228
 
260
229
 
261
- def _extract_nanopublication_uris(
230
+ def _extract_nanopublication_uris( # noqa: WPS231
262
231
  algebra: CompValue,
263
232
  ) -> Iterable[URIRef]:
264
233
  """Extract nanopublications to get retracting information for."""
@@ -293,34 +262,6 @@ def _extract_nanopublication_uris(
293
262
  )
294
263
 
295
264
 
296
- def apply_redirect(source: URIRef) -> URIRef: # noqa: WPS210
297
- """
298
- Rewrite the URL using regex patterns and group substitutions.
299
-
300
- For each pattern in REDIRECTS:
301
- - If the pattern matches the source URI
302
- - Replace the source with the destination, substituting any regex groups
303
- """
304
- source_str = str(source)
305
-
306
- for pattern, destination in REDIRECTS.items():
307
- pattern_str = str(pattern)
308
- destination_str = str(destination)
309
-
310
- match = re.match(pattern_str, source_str)
311
- if match:
312
- # Replace any group references in the destination
313
- # (like \1, \2, etc.)
314
- redirected_uri = re.sub(
315
- pattern_str,
316
- destination_str,
317
- source_str,
318
- )
319
- return URIRef(redirected_uri)
320
-
321
- return source
322
-
323
-
324
265
  def extract_triples(algebra: CompValue) -> Iterable[tuple[Node, Node, Node]]:
325
266
  """Extract triples from a SPARQL query algebra instance."""
326
267
  if isinstance(algebra, CompValue):
@@ -419,7 +360,7 @@ class GlobalSPARQLProcessor(Processor): # noqa: WPS338, WPS214
419
360
 
420
361
  self.graph._indices_loaded = True
421
362
 
422
- def query( # noqa: WPS211, WPS210, WPS231, C901
363
+ def query( # noqa: WPS211, WPS210, WPS231, WPS213, C901
423
364
  self,
424
365
  strOrQuery,
425
366
  initBindings=None,
@@ -471,8 +412,9 @@ class GlobalSPARQLProcessor(Processor): # noqa: WPS338, WPS214
471
412
 
472
413
  # Run inference if there's new data since last inference run
473
414
  # (after URLs are loaded so inference can use the loaded data)
474
- if self.graph.last_not_inferred_source is not None:
475
- self.logger.debug(f'Running inference, last_not_inferred_source: {self.graph.last_not_inferred_source}')
415
+ if self.graph.last_not_inferred_source is not None: # noqa: WPS504
416
+ last_source = self.graph.last_not_inferred_source
417
+ self.logger.debug(f'Running inference, last_not_inferred_source: {last_source}') # noqa: WPS237
476
418
  self._run_inference()
477
419
  else:
478
420
  self.logger.debug('Skipping inference, last_not_inferred_source is None')
@@ -493,7 +435,7 @@ class GlobalSPARQLProcessor(Processor): # noqa: WPS338, WPS214
493
435
 
494
436
  for row in bindings:
495
437
  break
496
- for _, maybe_iri in row.items():
438
+ for _, maybe_iri in row.items(): # noqa: WPS427
497
439
  if (
498
440
  isinstance(maybe_iri, URIRef)
499
441
  and isinstance(self.load(maybe_iri), Loaded)
@@ -584,7 +526,7 @@ class GlobalSPARQLProcessor(Processor): # noqa: WPS338, WPS214
584
526
  # FIXME This is definitely inefficient. However, python-yaml-ld caches
585
527
  # the document, so the performance overhead is not super high.
586
528
  try:
587
- _resolved_source = yaml_ld.load_document(source)['documentUrl']
529
+ resolved_source = yaml_ld.load_document(source)['documentUrl']
588
530
  except NotFound as not_found:
589
531
  self.logger.info(f'{not_found.path} | 404 Not Found')
590
532
  namespaces = [RDF, RDFS, OWL, FOAF, DC, VANN]
@@ -627,15 +569,15 @@ class GlobalSPARQLProcessor(Processor): # noqa: WPS338, WPS214
627
569
 
628
570
  return Loaded()
629
571
 
630
- if _resolved_source:
631
- _resolved_source_uri_ref = URIRef(_resolved_source)
632
- if _resolved_source_uri_ref != URIRef(source):
572
+ if resolved_source:
573
+ resolved_source_uri_ref = URIRef(resolved_source)
574
+ if resolved_source_uri_ref != URIRef(source):
633
575
  self.graph.add((
634
576
  source_uri,
635
577
  IOLANTA['redirects-to'],
636
- _resolved_source_uri_ref,
578
+ resolved_source_uri_ref,
637
579
  ))
638
- source = _resolved_source
580
+ source = resolved_source
639
581
 
640
582
  self._mark_as_loaded(source_uri)
641
583
 
@@ -700,7 +642,7 @@ class GlobalSPARQLProcessor(Processor): # noqa: WPS338, WPS214
700
642
 
701
643
  return term
702
644
 
703
- def _run_inference(self): # noqa: WPS231
645
+ def _run_inference(self): # noqa: WPS231, WPS220, WPS210
704
646
  """
705
647
  Run inference queries from the inference directory.
706
648
 
@@ -720,21 +662,21 @@ class GlobalSPARQLProcessor(Processor): # noqa: WPS338, WPS214
720
662
 
721
663
  # Read and execute the CONSTRUCT query
722
664
  query_text = inference_file.read_text()
723
- result = self.graph.query(query_text)
665
+ query_result = self.graph.query(query_text) # noqa: WPS110
724
666
 
725
667
  # CONSTRUCT queries return a SPARQLResult with a graph attribute
726
- result_graph = result.get('graph') if isinstance(result, dict) else result.graph
668
+ result_graph = query_result.get('graph') if isinstance(query_result, dict) else query_result.graph
727
669
  self.logger.debug(f'Inference {filename}: result_graph is {result_graph}, type: {type(result_graph)}')
728
- if result_graph is not None:
670
+ if result_graph is not None: # noqa: WPS504
729
671
  inferred_quads = [
730
- (s, p, o, inference_graph)
731
- for s, p, o in result_graph
672
+ (s, p, o, inference_graph) # noqa: WPS111
673
+ for s, p, o in result_graph # noqa: WPS111
732
674
  ]
733
675
  self.logger.debug(f'Inference {filename}: generated {len(inferred_quads)} quads')
734
676
 
735
677
  if inferred_quads:
736
- self.graph.addN(inferred_quads)
737
- self.logger.info(
678
+ self.graph.addN(inferred_quads) # noqa: WPS220
679
+ self.logger.info( # noqa: WPS220
738
680
  'Inference {filename}: added {count} triples',
739
681
  filename=filename,
740
682
  count=len(inferred_quads),
@@ -0,0 +1,79 @@
1
+ import re
2
+ from types import MappingProxyType
3
+
4
+ from rdflib import URIRef
5
+
6
+ from iolanta.namespaces import (
7
+ DC,
8
+ DCTERMS,
9
+ FOAF,
10
+ OWL,
11
+ PROV,
12
+ RDF,
13
+ RDFS,
14
+ VANN,
15
+ )
16
+
17
+ REDIRECTS = MappingProxyType({
18
+ # FIXME This is presently hardcoded; we need to
19
+ # - either find a way to resolve these URLs automatically,
20
+ # - or create a repository of those redirects online.
21
+ 'http://purl.org/vocab/vann/': URIRef(
22
+ 'https://vocab.org/vann/vann-vocab-20100607.rdf',
23
+ ),
24
+ URIRef(str(DC)): URIRef(str(DCTERMS)),
25
+ URIRef(str(RDF)): URIRef(str(RDF)),
26
+ URIRef(str(RDFS)): URIRef(str(RDFS)),
27
+ URIRef(str(OWL)): URIRef(str(OWL)),
28
+
29
+ # Add # fragment to OWL and RDFS namespace URIs
30
+ # (fixes bug reported at https://stackoverflow.com/q/78934864/1245471)
31
+ URIRef('http://www.w3.org/2002/07/owl'): URIRef('http://www.w3.org/2002/07/owl#'),
32
+ URIRef('http://www.w3.org/2000/01/rdf-schema'): URIRef('http://www.w3.org/2000/01/rdf-schema#'),
33
+
34
+ # Redirect FOAF namespace to GitHub mirror
35
+ URIRef('https?://xmlns.com/foaf/0.1/.+'): URIRef(
36
+ 'https://raw.githubusercontent.com/foaf/foaf/refs/heads/master/xmlns.com/htdocs/foaf/0.1/index.rdf',
37
+ ),
38
+ URIRef('https://www.nanopub.org/nschema'): URIRef(
39
+ 'https://www.nanopub.net/nschema#',
40
+ ),
41
+ URIRef('https://nanopub.org/nschema'): URIRef(
42
+ 'https://nanopub.net/nschema#',
43
+ ),
44
+
45
+ # Convert lexvo.org/id URLs to lexvo.org/data URLs
46
+ r'http://lexvo\.org/id/(.+)': r'http://lexvo.org/data/\1',
47
+ r'https://lexvo\.org/id/(.+)': r'http://lexvo.org/data/\1',
48
+ r'https://www\.lexinfo\.net/(.+)': r'http://www.lexinfo.net/\1',
49
+ # Convert Wikidata https:// to http:// (Wikidata JSON-LD uses http:// URIs)
50
+ r'https://www\.wikidata\.org/entity/(.+)': r'http://www.wikidata.org/entity/\1',
51
+ })
52
+
53
+
54
+ def apply_redirect(source: URIRef) -> URIRef: # noqa: WPS210
55
+ """
56
+ Rewrite the URL using regex patterns and group substitutions.
57
+
58
+ For each pattern in REDIRECTS:
59
+ - If the pattern matches the source URI
60
+ - Replace the source with the destination, substituting any regex groups
61
+ """
62
+ source_str = str(source)
63
+
64
+ for pattern, destination in REDIRECTS.items():
65
+ pattern_str = str(pattern)
66
+ destination_str = str(destination)
67
+
68
+ match = re.match(pattern_str, source_str)
69
+ if match:
70
+ # Replace any group references in the destination
71
+ # (like \1, \2, etc.)
72
+ redirected_uri = re.sub(
73
+ pattern_str,
74
+ destination_str,
75
+ source_str,
76
+ )
77
+ return URIRef(redirected_uri)
78
+
79
+ return source
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: iolanta
3
- Version: 2.1.10
3
+ Version: 2.1.11
4
4
  Summary: Semantic Web browser
5
5
  License: MIT
6
6
  Author: Anatoly Scherbakov
@@ -115,9 +115,7 @@ iolanta/labeled_triple_set/data/labeled_triple_set.yamlld,sha256=P3oAPSPsirpbcRX
115
115
  iolanta/labeled_triple_set/labeled_triple_set.py,sha256=o4IgvTvPd0mzBtpgHYd4n1xpujYdAvWBr6gIYwp5vnA,4061
116
116
  iolanta/labeled_triple_set/sparql/triples.sparql,sha256=VsCmYN5AX7jSIiFm-SqLcRcOvUVj8yyZI4PSzKROtQw,82
117
117
  iolanta/mcp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
118
- iolanta/mcp/cli.py,sha256=wmtJgeTijIoDe0C-3uGekVfYlhj2jqL30p_u_NuJzR0,1116
119
- iolanta/mcp/prompts/nanopublication_assertion_authoring_rules.md,sha256=Z5YL9YwcLHCqA4zQK2ziSQYgyR4SlhUO9LV_zHGf0JQ,2643
120
- iolanta/mcp/prompts/rules.md,sha256=LddpoNfUACfvWBNJ_ArAyJfP2zlQstlXS2QA6GCl9QI,4651
118
+ iolanta/mcp/cli.py,sha256=wlDandYo_VoMIg9oDGPJsIsMmiPo4j4-RBytIKWDtEI,484
121
119
  iolanta/mermaid/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
122
120
  iolanta/mermaid/facet.py,sha256=8mLOBrzlY84jiWhtJNY5BkXPDpRhL2OB1LUwwwNS1X0,4065
123
121
  iolanta/mermaid/mermaid.yamlld,sha256=G_8lqNfs6S7pz9koDC8xAaic4EaHsYnFLgexDVyMuCU,552
@@ -128,7 +126,7 @@ iolanta/mermaid/sparql/subgraphs.sparql,sha256=VuoOYr_ZtKXXRrBpAEJek0mBRzR9EV-Kn
128
126
  iolanta/models.py,sha256=2VrJGQE1YXbbVB1K5McCXe2CLAlzOUhA8FvbRI10nCc,3131
129
127
  iolanta/namespaces.py,sha256=S4fSjWrL33jylItDf6y2_CIJ4B-RQXDhBsZkB-SV9mw,1107
130
128
  iolanta/node_to_qname.py,sha256=a82_qpgT87cbekY_76tTkl4Z-6Rz6am4UGIQChUf9Y0,794
131
- iolanta/parse_quads.py,sha256=X-3hQAFzRD9U8KCuZMQTVOAapJR4OHkPoRbgJYiVbnk,4539
129
+ iolanta/parse_quads.py,sha256=ZYohKUh4WN3emq5xr6Sgf5gIw3_NFoUgYTZ3DOL-rQY,4876
132
130
  iolanta/plugin.py,sha256=MSxpuOIx93AgBahfS8bYh31MEgcwtUSQhj4Js7fgdSI,1096
133
131
  iolanta/query_result.py,sha256=VLLBkewUEymtzfB0jeIeRE3Np6pAgo959RPgNsEmiq8,1545
134
132
  iolanta/reformat_blank_nodes.py,sha256=MAVcXusUioKzAoTEHAMume5Gt9vBEpxJGrngqFzmkJI,712
@@ -141,12 +139,13 @@ iolanta/sparqlspace/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
141
139
  iolanta/sparqlspace/cli.py,sha256=pb6q03lzrU8OaZ4A3QAQEmaFYcQ_-sHUrPhRs6GE88w,1590
142
140
  iolanta/sparqlspace/inference/wikidata-prop-label.sparql,sha256=JYLAs28Z3a77cMcv44aZplwwrdqB-yshZn1dDZmRFAU,250
143
141
  iolanta/sparqlspace/inference/wikidata-statement-label.sparql,sha256=_Dp9jKCpCp2pLk0uacNUhUvvQ2Hov-WiMFprtuYTRyY,759
144
- iolanta/sparqlspace/processor.py,sha256=IsN6jc9HAE1B2TgZ-Wkq05WZMPmAvqajxu4peGGQwqo,25554
142
+ iolanta/sparqlspace/processor.py,sha256=Iwh2ISniYkQnVQ5ALYtGI8PXdIKNOHjvwCf8azHETZo,23929
143
+ iolanta/sparqlspace/redirects.py,sha256=ZYLb8rsjk9JG-mT5OQAzELer7okev34Le2VnACwpYzM,2657
145
144
  iolanta/sparqlspace/sparqlspace.py,sha256=Y8_ZPXwuGEXbEes6XQjaQWA2Zv9y8SWxMPDFdqVBGFo,796
146
145
  iolanta/widgets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
147
146
  iolanta/widgets/description.py,sha256=98Qd3FwT9r8sYqKjl9ZEptaVX9jJ2ULWf0uy3j52p5o,800
148
147
  iolanta/widgets/mixin.py,sha256=nDRCOc-gizCf1a5DAcYs4hW8eZEd6pHBPFsfm0ncv7E,251
149
- iolanta-2.1.10.dist-info/METADATA,sha256=6WI92t1Tf07VRH37C_S7Q-iB_PrDEyfxfik2ESuxBJI,2317
150
- iolanta-2.1.10.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
151
- iolanta-2.1.10.dist-info/entry_points.txt,sha256=Vu0W4D6H74HsTICvD8CDB1wYs6XNSyu55EZVXMo4H84,1718
152
- iolanta-2.1.10.dist-info/RECORD,,
148
+ iolanta-2.1.11.dist-info/METADATA,sha256=rq_HcxuelqK4s0REGfDy9_MaFfAfK5Lt5XTf_iWcc2s,2317
149
+ iolanta-2.1.11.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
150
+ iolanta-2.1.11.dist-info/entry_points.txt,sha256=Vu0W4D6H74HsTICvD8CDB1wYs6XNSyu55EZVXMo4H84,1718
151
+ iolanta-2.1.11.dist-info/RECORD,,
@@ -1,63 +0,0 @@
1
- # How to author Nanopublication Assertions with Iolanta
2
-
3
- ## What are Nanopublications?
4
-
5
- Nanopublications are a special type of Linked Data that contain structured knowledge statements with three main components:
6
-
7
- 1. **Assertion** - The core knowledge claim or statement
8
- 2. **Provenance** - Information about how the assertion was derived (sources, methods, contributors)
9
- 3. **Publication Info** - Metadata about the nanopublication itself (author, creation date, etc.)
10
-
11
- Nanopublications are cryptographically signed and published in the decentralized **Nanopublication Registry**, making them:
12
- - Irrevocably attributed to the author
13
- - Protected from tampering
14
- - Referenceable by unique IDs
15
- - Machine readable and reusable
16
- - Decentralized and persistent
17
-
18
- ## Assertion-Only Workflow
19
-
20
- **NP00.** Nanopublication assertion graphs must also satisfy the general rules for Linked Data authoring and workflow. That is provided in the MCP prompt named `ld_authoring_rules`.
21
-
22
- **NP01.** We focus only on writing the **assertion graph** of the nanopublication.
23
-
24
- **NP02.** Follow the standard YAML-LD authoring rules (R00-R23) for creating the assertion.
25
-
26
- **NP03.** The assertion should express a single, clear knowledge claim that can stand alone.
27
-
28
- **NP04.** Use proper Linked Data vocabularies and resolvable URIs for all entities and relationships.
29
-
30
- **NP05.** After the assertion graph is ready, follow this workflow:
31
-
32
- ```bash
33
- # Expand the YAML-LD to JSON-LD
34
- pyld expand assertion.yamlld > expanded.jsonld
35
-
36
- # Create nanopublication from the assertion
37
- np create from-assertion expanded.jsonld > nanopublication.trig
38
-
39
- # Publish the nanopublication (when ready)
40
- np publish nanopublication.trig
41
- ```
42
-
43
- **NP06.** The `pyld expand` command converts YAML-LD to expanded JSON-LD format.
44
-
45
- **NP07.** The `np create from-assertion` command automatically generates the provenance and publication info components.
46
-
47
- **NP08.** The `np publish` command cryptographically signs and publishes the nanopublication to the registry.
48
-
49
- **NP09.** Use the Iolanta MCP `render_uri` tool to validate the assertion before proceeding with the workflow.
50
-
51
- **NP10.** Save Mermaid visualizations of the assertion for documentation purposes.
52
-
53
- ## Best Practices for Assertions
54
-
55
- **NP11.** Keep assertions focused on a single, verifiable claim.
56
-
57
- **NP12.** Use canonical URIs from established knowledge bases (DBpedia, Wikidata, etc.).
58
-
59
- **NP13.** Include sufficient context and metadata to make the assertion meaningful.
60
-
61
- **NP14.** Ensure the assertion can be understood independently of external context.
62
-
63
- **NP15.** Use standard vocabularies and well-established ontologies for relationships.
@@ -1,83 +0,0 @@
1
- # How to author Linked Data with Iolanta
2
-
3
- **R00.** Follow this YAML-LD authoring workflow:
4
- - Draft YAML-LD from user text
5
- - Use the Iolanta MCP `render_uri` tool with `as_format: labeled-triple-set` to validate and get feedback
6
- - Address the feedback, correct the YAML-LD document appropriately
7
- - **After each change to the YAML-LD file, re-run the validation to check for new feedback**
8
-
9
- **R01.** Acceptance Criteria:
10
-
11
- - The document fits the original statement the user wanted to express;
12
- - No negative feedback is received.
13
-
14
- **R02.** Use YAML-LD format, which is JSON-LD in YAML syntax, for writing Linked Data.
15
-
16
- **R03.** Always quote the @ character in YAML since it's reserved. Use `"@id":` instead of `@id:`.
17
-
18
- **R04.** Prefer YAML-LD Convenience Context which maps @-keywords to $-keywords that don't need quoting: `"@type"` → `$type`, `"@id"` → `$id`, `"@graph"` → `$graph`.
19
-
20
- **R05.** Use the dollar-convenience context with `@import` syntax instead of array syntax. This provides cleaner, more readable YAML-LD documents.
21
-
22
- Example:
23
- ```yaml
24
- "@context":
25
- "@import": "https://json-ld.org/contexts/dollar-convenience.jsonld"
26
-
27
- schema: "https://schema.org/"
28
- wd: "https://www.wikidata.org/entity/"
29
-
30
- author:
31
- "@id": "https://schema.org/author"
32
- "@type": "@id"
33
- ```
34
-
35
- Instead of:
36
- ```yaml
37
- "@context":
38
- - "https://json-ld.org/contexts/dollar-convenience.jsonld"
39
- - schema: "https://schema.org/"
40
- - wd: "https://www.wikidata.org/entity/"
41
- - author:
42
- "@id": "https://schema.org/author"
43
- "@type": "@id"
44
- ```
45
-
46
- **R06.** Reduce quoting when not required by YAML syntax rules.
47
-
48
- **R07.** Do not use mock URLs like `https://example.org`. Use resolvable URLs that preferably point to Linked Data.
49
-
50
- **R08.** Use URIs that convey meaning and are renderable with Linked Data visualization tools. Search for appropriate URIs from sources like DBPedia or Wikidata.
51
-
52
- **R09.** Use the Iolanta MCP `render_uri` tool with `as_format: mermaid` to generate Mermaid graph visualizations of Linked Data. If the user asks, you can save them to `.mmd` files for preview and documentation purposes.
53
-
54
- **R10.** For language tags, use YAML-LD syntax: `rdfs:label: { $value: "text", $language: "lang" }` instead of Turtle syntax `"text"@lang`.
55
-
56
- **R11.** Do not attach labels to external URIs that are expected to return Linked Data. Iolanta will fetch those URIs and render labels from the fetched data.
57
-
58
- **R12.** Use `"@type": "@id"` in the context to coerce properties to IRIs instead of using `$id` wrappers in the document body.
59
-
60
- **R13.** For software packages, use `schema:SoftwareApplication` as the main type rather than `codemeta:SoftwareSourceCode`.
61
-
62
- **R14.** Use Wikidata entities for programming languages (e.g., `https://www.wikidata.org/entity/Q28865` for Python) instead of string literals.
63
-
64
- **R15.** Use proper ORCID URIs for authors (e.g., `https://orcid.org/0009-0001-8740-4213`) and coerce them to IRIs in the context.
65
-
66
- **R16.** For tools that provide both library and CLI functionality, classify as `schema:Tool` with `schema:applicationSubCategory: Command-line tool`.
67
-
68
- **R17.** Use real, resolvable repository URLs (e.g., `https://github.com/iolanta-tech/python-yaml-ld`) instead of placeholder URLs.
69
-
70
- **R18.** Include comprehensive metadata: name, description, author, license, programming language, version, repository links, and application category.
71
-
72
- **R19.** Use standard vocabularies: schema.org, RDFS, RDF, DCTerms, FOAF, and CodeMeta when appropriate.
73
-
74
- **R20.** Validate Linked Data using the Iolanta MCP `render_uri` tool with `as_format: labeled-triple-set` to check for URL-as-literal issues and proper IRI handling.
75
-
76
- **R21.** Do not add `rdfs:label` to external URIs that are expected to return Linked Data. If a URI does not exist or cannot be resolved, do not mask this fact by adding labels. Instead, use a different, existing URI or document the issue with a comment.
77
-
78
- **R22.** Define URI coercion in the context using `"@type": "@id"` rather than using `$id` wrappers in the document body. This keeps the document body clean and readable while ensuring proper URI handling.
79
-
80
- **R23.** When defining local shortcuts for URIs in the context, use dashed-case (e.g., `appears-in`, `named-after`) instead of camelCase (e.g., `appearsIn`, `namedAfter`). This improves readability and follows common YAML conventions.
81
-
82
- **R24.** Do not rely upon `owl:sameAs` or `schema:sameAs` to express identity relationships. This necessitates OWL inference at the side of the reader, which is performance-taxing and tends to create conflicts. Instead, use direct URIs for entities without relying on sameAs statements for identity.
83
-