docling-core 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of docling-core might be problematic. Click here for more details.
- docling_core/__init__.py +6 -0
- docling_core/py.typed +0 -0
- docling_core/resources/schemas/doc/ANN.json +171 -0
- docling_core/resources/schemas/doc/DOC.json +300 -0
- docling_core/resources/schemas/doc/OCR-output.json +166 -0
- docling_core/resources/schemas/doc/RAW.json +158 -0
- docling_core/resources/schemas/generated/ccs_document_schema.json +1071 -0
- docling_core/resources/schemas/generated/minimal_document_schema_flat.json +1129 -0
- docling_core/resources/schemas/search/search_doc_mapping.json +104 -0
- docling_core/resources/schemas/search/search_doc_mapping_v2.json +256 -0
- docling_core/search/__init__.py +6 -0
- docling_core/search/json_schema_to_search_mapper.py +406 -0
- docling_core/search/mapping.py +29 -0
- docling_core/search/meta.py +93 -0
- docling_core/search/package.py +56 -0
- docling_core/types/__init__.py +25 -0
- docling_core/types/base.py +248 -0
- docling_core/types/doc/__init__.py +6 -0
- docling_core/types/doc/base.py +199 -0
- docling_core/types/doc/doc_ann.py +76 -0
- docling_core/types/doc/doc_ocr.py +83 -0
- docling_core/types/doc/doc_raw.py +187 -0
- docling_core/types/doc/document.py +393 -0
- docling_core/types/gen/__init__.py +6 -0
- docling_core/types/gen/generic.py +33 -0
- docling_core/types/nlp/__init__.py +6 -0
- docling_core/types/nlp/qa.py +74 -0
- docling_core/types/nlp/qa_labels.py +118 -0
- docling_core/types/rec/__init__.py +6 -0
- docling_core/types/rec/attribute.py +55 -0
- docling_core/types/rec/base.py +90 -0
- docling_core/types/rec/predicate.py +133 -0
- docling_core/types/rec/record.py +95 -0
- docling_core/types/rec/statement.py +41 -0
- docling_core/types/rec/subject.py +77 -0
- docling_core/utils/__init__.py +6 -0
- docling_core/utils/alias.py +27 -0
- docling_core/utils/ds_generate_docs.py +144 -0
- docling_core/utils/ds_generate_jsonschema.py +62 -0
- docling_core/utils/validate.py +86 -0
- docling_core/utils/validators.py +100 -0
- docling_core-0.0.1.dist-info/LICENSE +21 -0
- docling_core-0.0.1.dist-info/METADATA +133 -0
- docling_core-0.0.1.dist-info/RECORD +46 -0
- docling_core-0.0.1.dist-info/WHEEL +4 -0
- docling_core-0.0.1.dist-info/entry_points.txt +5 -0
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright IBM Corp. 2024 - 2024
|
|
3
|
+
# SPDX-License-Identifier: MIT
|
|
4
|
+
#
|
|
5
|
+
|
|
6
|
+
"""Generate documentation of Docling types in HTML and Markdown.
|
|
7
|
+
|
|
8
|
+
Example:
|
|
9
|
+
python docling_core/utils/ds_generate_docs.py /tmp/docling_core_files
|
|
10
|
+
"""
|
|
11
|
+
import argparse
|
|
12
|
+
import glob
|
|
13
|
+
import json
|
|
14
|
+
import os
|
|
15
|
+
from argparse import BooleanOptionalAction
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from shutil import rmtree
|
|
18
|
+
from typing import Final
|
|
19
|
+
|
|
20
|
+
from json_schema_for_humans.generate import generate_from_filename
|
|
21
|
+
from json_schema_for_humans.generation_configuration import GenerationConfiguration
|
|
22
|
+
|
|
23
|
+
from docling_core.utils.ds_generate_jsonschema import generate_json_schema
|
|
24
|
+
|
|
25
|
+
MODELS: Final = ["Document", "Record", "Generic"]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _prepare_directory(folder: str, clean: bool = False) -> None:
|
|
29
|
+
"""Create a directory or empty its content if it already exists.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
folder: The name of the directory.
|
|
33
|
+
clean: Whether any existing content in the directory should be removed.
|
|
34
|
+
"""
|
|
35
|
+
if os.path.isdir(folder):
|
|
36
|
+
if clean:
|
|
37
|
+
for path in Path(folder).glob("**/*"):
|
|
38
|
+
if path.is_file():
|
|
39
|
+
path.unlink()
|
|
40
|
+
elif path.is_dir():
|
|
41
|
+
rmtree(path)
|
|
42
|
+
else:
|
|
43
|
+
os.makedirs(folder, exist_ok=True)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def generate_collection_jsonschema(folder: str):
|
|
47
|
+
"""Generate the JSON schema of Docling collections and export them to a folder.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
folder: The name of the directory.
|
|
51
|
+
"""
|
|
52
|
+
for item in MODELS:
|
|
53
|
+
json_schema = generate_json_schema(item)
|
|
54
|
+
with open(
|
|
55
|
+
os.path.join(folder, f"{item}.json"), mode="w", encoding="utf8"
|
|
56
|
+
) as json_file:
|
|
57
|
+
json.dump(json_schema, json_file, ensure_ascii=False, indent=2)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def generate_collection_html(folder: str):
|
|
61
|
+
"""Generate HTML pages documenting the data model of Docling collections.
|
|
62
|
+
|
|
63
|
+
The JSON schemas files need to be in a folder and the generated HTML pages will be
|
|
64
|
+
written in the same folder.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
folder: The name of the directory.
|
|
68
|
+
"""
|
|
69
|
+
config = GenerationConfiguration(
|
|
70
|
+
template_name="js_offline",
|
|
71
|
+
expand_buttons=True,
|
|
72
|
+
link_to_reused_ref=False,
|
|
73
|
+
with_footer=False,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
for doc_json in glob.glob(os.path.join(folder, "*.json")):
|
|
77
|
+
doc_html = doc_json.removesuffix(".json") + ".html"
|
|
78
|
+
generate_from_filename(doc_json, doc_html, config=config)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def generate_collection_markdown(folder: str):
|
|
82
|
+
"""Generate Markdown pages documenting the data model of Docling collections.
|
|
83
|
+
|
|
84
|
+
The JSON schemas files need to be in a folder and the generated markdown pages will
|
|
85
|
+
be written in the same folder.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
folder: The name of the directory.
|
|
89
|
+
"""
|
|
90
|
+
config = GenerationConfiguration(
|
|
91
|
+
template_name="md_nested",
|
|
92
|
+
expand_buttons=True,
|
|
93
|
+
link_to_reused_ref=False,
|
|
94
|
+
with_footer=False,
|
|
95
|
+
show_toc=False,
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
for doc_json in glob.glob(os.path.join(folder, "*.json")):
|
|
99
|
+
doc_html = doc_json.removesuffix(".json") + ".md"
|
|
100
|
+
generate_from_filename(doc_json, doc_html, config=config)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def main() -> None:
|
|
104
|
+
"""Generate the JSON Schema of Docling collections and export documentation."""
|
|
105
|
+
argparser = argparse.ArgumentParser()
|
|
106
|
+
argparser.add_argument(
|
|
107
|
+
"directory",
|
|
108
|
+
help=(
|
|
109
|
+
"Directory to generate files. If it exists, any existing content will be"
|
|
110
|
+
" removed."
|
|
111
|
+
),
|
|
112
|
+
)
|
|
113
|
+
argparser.add_argument(
|
|
114
|
+
"--clean",
|
|
115
|
+
help="Whether any existing content in directory should be removed.",
|
|
116
|
+
action=BooleanOptionalAction,
|
|
117
|
+
dest="clean",
|
|
118
|
+
default=False,
|
|
119
|
+
required=False,
|
|
120
|
+
)
|
|
121
|
+
argparser.add_argument(
|
|
122
|
+
"--template",
|
|
123
|
+
action="store",
|
|
124
|
+
default="markdown",
|
|
125
|
+
choices=["html", "markdown"],
|
|
126
|
+
type=str,
|
|
127
|
+
required=False,
|
|
128
|
+
dest="template",
|
|
129
|
+
help="Documentation template.",
|
|
130
|
+
)
|
|
131
|
+
args = argparser.parse_args()
|
|
132
|
+
|
|
133
|
+
_prepare_directory(args.directory, args.clean)
|
|
134
|
+
|
|
135
|
+
generate_collection_jsonschema(args.directory)
|
|
136
|
+
|
|
137
|
+
if args.template == "html":
|
|
138
|
+
generate_collection_html(args.directory)
|
|
139
|
+
elif args.template == "markdown":
|
|
140
|
+
generate_collection_markdown(args.directory)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
if __name__ == "__main__":
|
|
144
|
+
main()
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright IBM Corp. 2024 - 2024
|
|
3
|
+
# SPDX-License-Identifier: MIT
|
|
4
|
+
#
|
|
5
|
+
|
|
6
|
+
"""Generate the JSON Schema of pydantic models and export them to files.
|
|
7
|
+
|
|
8
|
+
Example:
|
|
9
|
+
python docling_core/utils/ds_generate_jsonschema.py doc.base.TableCell
|
|
10
|
+
|
|
11
|
+
"""
|
|
12
|
+
import argparse
|
|
13
|
+
import json
|
|
14
|
+
from typing import Any, Union
|
|
15
|
+
|
|
16
|
+
from pydantic import BaseModel
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _import_class(class_reference: str) -> Any:
|
|
20
|
+
components = class_reference.split(".")
|
|
21
|
+
module_ref = ".".join(components[:-1])
|
|
22
|
+
class_name = components[-1]
|
|
23
|
+
mod = __import__(module_ref, fromlist=[class_name])
|
|
24
|
+
class_type = getattr(mod, class_name)
|
|
25
|
+
|
|
26
|
+
return class_type
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def generate_json_schema(class_reference: str) -> Union[dict, None]:
|
|
30
|
+
"""Generate a jsonable dict of a model's schema from DS data types.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
class_reference: The reference to a class in 'src.data_types'.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
A jsonable dict of the model's schema.
|
|
37
|
+
"""
|
|
38
|
+
if not class_reference.startswith("docling_core.types."):
|
|
39
|
+
class_reference = "docling_core.types." + class_reference
|
|
40
|
+
class_type = _import_class(class_reference)
|
|
41
|
+
if issubclass(class_type, BaseModel):
|
|
42
|
+
return class_type.model_json_schema()
|
|
43
|
+
else:
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def main() -> None:
|
|
48
|
+
"""Print the JSON Schema of a model."""
|
|
49
|
+
argparser = argparse.ArgumentParser()
|
|
50
|
+
argparser.add_argument(
|
|
51
|
+
"class_ref", help="Class reference, e.g., doc.base.TableCell"
|
|
52
|
+
)
|
|
53
|
+
args = argparser.parse_args()
|
|
54
|
+
|
|
55
|
+
json_schema = generate_json_schema(args.class_ref)
|
|
56
|
+
print(
|
|
57
|
+
json.dumps(json_schema, ensure_ascii=False, indent=2).encode("utf-8").decode()
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
if __name__ == "__main__":
|
|
62
|
+
main()
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright IBM Corp. 2024 - 2024
|
|
3
|
+
# SPDX-License-Identifier: MIT
|
|
4
|
+
#
|
|
5
|
+
|
|
6
|
+
"""Validation of Document-related files against their data schemas."""
|
|
7
|
+
import argparse
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
|
|
11
|
+
from docling_core.utils.validators import (
|
|
12
|
+
validate_ann_schema,
|
|
13
|
+
validate_ocr_schema,
|
|
14
|
+
validate_raw_schema,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger("docling-core")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def parse_arguments():
|
|
21
|
+
"""Parse the arguments from the command line."""
|
|
22
|
+
argparser = argparse.ArgumentParser(description="validate example-file with schema")
|
|
23
|
+
|
|
24
|
+
argparser.add_argument(
|
|
25
|
+
"-f", "--format", required=True, help="format of the file [RAW, ANN, OCR]"
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
argparser.add_argument(
|
|
29
|
+
"-i", "--input-file", required=True, help="JSON filename to be validated"
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
pargs = argparser.parse_args()
|
|
33
|
+
|
|
34
|
+
return pargs.format, pargs.input_file
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def run():
|
|
38
|
+
"""Run the validation of a file containing a Document."""
|
|
39
|
+
file_format, input_file = parse_arguments()
|
|
40
|
+
|
|
41
|
+
with open(input_file, "r") as fd:
|
|
42
|
+
file_ = json.load(fd)
|
|
43
|
+
|
|
44
|
+
result = (False, "Empty result")
|
|
45
|
+
|
|
46
|
+
if file_format == "RAW":
|
|
47
|
+
result = validate_raw_schema(file_)
|
|
48
|
+
|
|
49
|
+
elif file_format == "ANN":
|
|
50
|
+
result = validate_ann_schema(file_)
|
|
51
|
+
|
|
52
|
+
elif file_format == "OCR":
|
|
53
|
+
result = validate_ocr_schema(file_)
|
|
54
|
+
|
|
55
|
+
else:
|
|
56
|
+
logger.error("format of the file needs to `RAW`, `ANN` or `OCR`")
|
|
57
|
+
|
|
58
|
+
if result[0]:
|
|
59
|
+
logger.info("Done!")
|
|
60
|
+
else:
|
|
61
|
+
logger.error("invalid schema: {}".format(result[1]))
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def main():
|
|
65
|
+
"""Set up the environment and run the validation of a Document."""
|
|
66
|
+
logger.setLevel(logging.DEBUG)
|
|
67
|
+
|
|
68
|
+
# create console handler and set level to debug
|
|
69
|
+
ch = logging.StreamHandler()
|
|
70
|
+
ch.setLevel(logging.DEBUG)
|
|
71
|
+
|
|
72
|
+
# create formatter
|
|
73
|
+
formatter = logging.Formatter("[%(asctime)s] [%(levelname)s] %(message)s")
|
|
74
|
+
|
|
75
|
+
# add formatter to ch
|
|
76
|
+
ch.setFormatter(formatter)
|
|
77
|
+
|
|
78
|
+
# add ch to logger
|
|
79
|
+
# logger.addHandler(ch)
|
|
80
|
+
|
|
81
|
+
logging.basicConfig(handlers=[ch])
|
|
82
|
+
run()
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
if __name__ == "__main__":
|
|
86
|
+
main()
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright IBM Corp. 2024 - 2024
|
|
3
|
+
# SPDX-License-Identifier: MIT
|
|
4
|
+
#
|
|
5
|
+
|
|
6
|
+
"""Module for custom type validators."""
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
from importlib import resources
|
|
11
|
+
from typing import Hashable, TypeVar
|
|
12
|
+
|
|
13
|
+
import jsonschema
|
|
14
|
+
from pydantic_core import PydanticCustomError
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger("docling-core")
|
|
17
|
+
|
|
18
|
+
T = TypeVar("T", bound=Hashable)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def validate_schema(file_: dict, schema: dict) -> tuple[bool, str]:
|
|
22
|
+
"""Check wheter the workflow is properly formatted JSON and contains valid keys.
|
|
23
|
+
|
|
24
|
+
Where possible, this also checks a few basic dependencies between properties, but
|
|
25
|
+
this functionality is limited.
|
|
26
|
+
"""
|
|
27
|
+
try:
|
|
28
|
+
jsonschema.validate(file_, schema)
|
|
29
|
+
return (True, "All good!")
|
|
30
|
+
|
|
31
|
+
except jsonschema.ValidationError as err:
|
|
32
|
+
return (False, err.message)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def validate_raw_schema(file_: dict) -> tuple[bool, str]:
|
|
36
|
+
"""Validate a RAW file."""
|
|
37
|
+
logger.debug("validate RAW schema ... ")
|
|
38
|
+
|
|
39
|
+
schema_txt = (
|
|
40
|
+
resources.files("docling_core")
|
|
41
|
+
.joinpath("resources/schemas/doc/RAW.json")
|
|
42
|
+
.read_text("utf-8")
|
|
43
|
+
)
|
|
44
|
+
schema = json.loads(schema_txt)
|
|
45
|
+
|
|
46
|
+
return validate_schema(file_, schema)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def validate_ann_schema(file_: dict) -> tuple[bool, str]:
|
|
50
|
+
"""Validate an annotated (ANN) file."""
|
|
51
|
+
logger.debug("validate ANN schema ... ")
|
|
52
|
+
|
|
53
|
+
schema_txt = (
|
|
54
|
+
resources.files("docling_core")
|
|
55
|
+
.joinpath("resources/schemas/doc/ANN.json")
|
|
56
|
+
.read_text("utf-8")
|
|
57
|
+
)
|
|
58
|
+
schema = json.loads(schema_txt)
|
|
59
|
+
|
|
60
|
+
return validate_schema(file_, schema)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def validate_ocr_schema(file_: dict) -> tuple[bool, str]:
|
|
64
|
+
"""Validate an OCR file."""
|
|
65
|
+
logger.debug("validate OCR schema ... ")
|
|
66
|
+
|
|
67
|
+
schema_txt = (
|
|
68
|
+
resources.files("docling_core")
|
|
69
|
+
.joinpath("resources/schemas/doc/OCR-output.json")
|
|
70
|
+
.read_text("utf-8")
|
|
71
|
+
)
|
|
72
|
+
schema = json.loads(schema_txt)
|
|
73
|
+
|
|
74
|
+
return validate_schema(file_, schema)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def validate_unique_list(v: list[T]) -> list[T]:
|
|
78
|
+
"""Validate that a list has unique values.
|
|
79
|
+
|
|
80
|
+
Validator for list types, since pydantic V2 does not support the `unique_items`
|
|
81
|
+
parameter from V1. More information on
|
|
82
|
+
https://github.com/pydantic/pydantic-core/pull/820#issuecomment-1670475909
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
v: any list of hashable types
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
The list, after checking for unique items.
|
|
89
|
+
"""
|
|
90
|
+
if len(v) != len(set(v)):
|
|
91
|
+
raise PydanticCustomError("unique_list", "List must be unique")
|
|
92
|
+
return v
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def validate_datetime(v, handler):
|
|
96
|
+
"""Validate that a value is a datetime or a non-numeric string."""
|
|
97
|
+
if type(v) is datetime or (type(v) is str and not v.isnumeric()):
|
|
98
|
+
return handler(v)
|
|
99
|
+
else:
|
|
100
|
+
raise ValueError("Value type must be a datetime or a non-numeric string")
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 International Business Machines
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: docling-core
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary: A python library to define and validate data types in Docling.
|
|
5
|
+
Home-page: https://ds4sd.github.io/
|
|
6
|
+
License: MIT
|
|
7
|
+
Keywords: docling,discovery,etl,information retrieval,analytics,database,database schema,schema,JSON
|
|
8
|
+
Author: Cesar Berrospi Ramis
|
|
9
|
+
Author-email: ceb@zurich.ibm.com
|
|
10
|
+
Maintainer: Cesar Berrospi Ramis
|
|
11
|
+
Maintainer-email: ceb@zurich.ibm.com
|
|
12
|
+
Requires-Python: >=3.9,<4.0
|
|
13
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
14
|
+
Classifier: Intended Audience :: Developers
|
|
15
|
+
Classifier: Intended Audience :: Science/Research
|
|
16
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
17
|
+
Classifier: Natural Language :: English
|
|
18
|
+
Classifier: Operating System :: OS Independent
|
|
19
|
+
Classifier: Programming Language :: Python :: 3
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
24
|
+
Classifier: Topic :: Database
|
|
25
|
+
Classifier: Topic :: Scientific/Engineering :: Information Analysis
|
|
26
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
27
|
+
Classifier: Typing :: Typed
|
|
28
|
+
Requires-Dist: json-schema-for-humans (>=1.0.0,<2.0.0)
|
|
29
|
+
Requires-Dist: jsonref (>=1.1.0,<2.0.0)
|
|
30
|
+
Requires-Dist: jsonschema (>=4.16.0,<5.0.0)
|
|
31
|
+
Requires-Dist: poetry (>=1.8.3,<2.0.0)
|
|
32
|
+
Requires-Dist: pydantic (>=2.6.0,<3.0.0)
|
|
33
|
+
Requires-Dist: pyproject-toml (>=0.0.10,<0.0.11)
|
|
34
|
+
Project-URL: Repository, https://github.com/DS4SD/docling-core
|
|
35
|
+
Description-Content-Type: text/markdown
|
|
36
|
+
|
|
37
|
+
[](https://pypi.org/project/docling-core/)
|
|
38
|
+

|
|
39
|
+
[](https://python-poetry.org/)
|
|
40
|
+
[](https://github.com/psf/black)
|
|
41
|
+
[](https://pycqa.github.io/isort/)
|
|
42
|
+
[](https://mypy-lang.org/)
|
|
43
|
+
[](https://pydantic.dev)
|
|
44
|
+
[](https://github.com/pre-commit/pre-commit)
|
|
45
|
+
[](https://opensource.org/licenses/MIT)
|
|
46
|
+
|
|
47
|
+
# Docling Core
|
|
48
|
+
|
|
49
|
+
Docling Core is a library that defines the data types in [Docling](https://ds4sd.github.io), leveraging pydantic models.
|
|
50
|
+
|
|
51
|
+
## Installation
|
|
52
|
+
|
|
53
|
+
Using [Poetry](https://python-poetry.org), create and activate a virtual environment.
|
|
54
|
+
|
|
55
|
+
```
|
|
56
|
+
poetry shell
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
Install the defined dependencies of the project.
|
|
60
|
+
|
|
61
|
+
```
|
|
62
|
+
poetry install
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
Test the installation running the pytest suite.
|
|
66
|
+
|
|
67
|
+
```
|
|
68
|
+
poetry run pytest test
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
## Basic Usage
|
|
72
|
+
|
|
73
|
+
- You can validate your JSON objects using the pydantic class definition.
|
|
74
|
+
|
|
75
|
+
```py
|
|
76
|
+
from docling_core.types import Document
|
|
77
|
+
|
|
78
|
+
data_dict = {...} # here the object you want to validate, as a dictionary
|
|
79
|
+
Document.model_validate(data_dict)
|
|
80
|
+
|
|
81
|
+
data_str = {...} # here the object as a JSON string
|
|
82
|
+
Document.model_validate_json(data_str)
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
- You can generate the JSON schema of a model with the script `ds_generate_jsonschema`.
|
|
86
|
+
|
|
87
|
+
```py
|
|
88
|
+
# for the `Document` type
|
|
89
|
+
ds_generate_jsonschema Document
|
|
90
|
+
|
|
91
|
+
# for the use `Record` type
|
|
92
|
+
ds_generate_jsonschema Record
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
## Documentation
|
|
96
|
+
|
|
97
|
+
Docling supports 3 main data types:
|
|
98
|
+
|
|
99
|
+
- **Document** for publications like books, articles, reports, or patents. When Docling converts an unstructured PDF document, the generated JSON follows this schema.
|
|
100
|
+
The Document type also models the metadata that may be attached to the converted document.
|
|
101
|
+
Check [Document](docs/Document.md) for the full JSON schema.
|
|
102
|
+
- **Record** for structured database records, centered on an entity or _subject_ that is provided with a list of attributes.
|
|
103
|
+
Related to records, the statements can represent annotations on text by Natural Language Processing (NLP) tools.
|
|
104
|
+
Check [Record](docs/Record.md) for the full JSON schema.
|
|
105
|
+
- **Generic** for any data representation, ensuring minimal configuration and maximum flexibility.
|
|
106
|
+
Check [Generic](docs/Generic.md) for the full JSON schema.
|
|
107
|
+
|
|
108
|
+
The data schemas are defined using [pydantic](https://pydantic-docs.helpmanual.io/) models, which provide built-in processes to support the creation of data that adhere to those models.
|
|
109
|
+
|
|
110
|
+
## Contributing
|
|
111
|
+
|
|
112
|
+
Please read [Contributing to Docling Core](./CONTRIBUTING.md) for details.
|
|
113
|
+
|
|
114
|
+
## References
|
|
115
|
+
|
|
116
|
+
If you use `Docling Core` in your projects, please consider citing the following:
|
|
117
|
+
|
|
118
|
+
```bib
|
|
119
|
+
@software{Docling,
|
|
120
|
+
author = {Deep Search Team},
|
|
121
|
+
month = {7},
|
|
122
|
+
title = {{Docling}},
|
|
123
|
+
url = {https://github.com/DS4SD/docling},
|
|
124
|
+
version = {main},
|
|
125
|
+
year = {2024}
|
|
126
|
+
}
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
## License
|
|
130
|
+
|
|
131
|
+
The `Docling Core` codebase is under MIT license.
|
|
132
|
+
For individual model usage, please refer to the model licenses found in the original packages.
|
|
133
|
+
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
docling_core/__init__.py,sha256=D0afxif-BMUrgx2cYk1cwxiwATRYaGXsIMk_z4nw1Vs,90
|
|
2
|
+
docling_core/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
+
docling_core/resources/schemas/doc/ANN.json,sha256=04U5j-PU9m5w7IagJ_rHcAx7qUtLkUuaWZO9GuYHnTA,4202
|
|
4
|
+
docling_core/resources/schemas/doc/DOC.json,sha256=9tVKpCqDGGq3074Nn5qlUCdTN-5k1Q0ri_scJblwnLE,6686
|
|
5
|
+
docling_core/resources/schemas/doc/OCR-output.json,sha256=56A3g5dbUEzUYwEqhQhnoNi-Jgm8n8YMp3U5F23cZVM,3080
|
|
6
|
+
docling_core/resources/schemas/doc/RAW.json,sha256=yN7bQacrV8d6a60eQKFNlxfHlA3OQuJsPnN8UJ6WCPM,4034
|
|
7
|
+
docling_core/resources/schemas/generated/ccs_document_schema.json,sha256=G4yy3dkdJXDfcrj_qWhMOJ9DPEIKaYHMYK8Sk8aVwy0,22484
|
|
8
|
+
docling_core/resources/schemas/generated/minimal_document_schema_flat.json,sha256=XP0gdvKdXhs-owtzlXOy7icvqmf6aFRATo1a2E1SqyI,29982
|
|
9
|
+
docling_core/resources/schemas/search/search_doc_mapping.json,sha256=1QNzBJt_ZIokKbO-5kBjvoBeoTTY4EuHnIZvsOPtSD0,2015
|
|
10
|
+
docling_core/resources/schemas/search/search_doc_mapping_v2.json,sha256=ZIr4tqhfX4S9QHG2Ei08QbzJbspPTf5duSwbdhzdUWA,6009
|
|
11
|
+
docling_core/search/__init__.py,sha256=RucCUQjDlTZ7VfgbfnKDRBL-A-_Lcc2JWWeiVWHtoOY,147
|
|
12
|
+
docling_core/search/json_schema_to_search_mapper.py,sha256=9crSFuSbcXrJej7j1rYWK6b0x37cHDmPF6va5j3gknA,12933
|
|
13
|
+
docling_core/search/mapping.py,sha256=6rqG7LgYSeWmooKNEcRa5gFDLp1ZdzPqDGlwTA5gpOk,724
|
|
14
|
+
docling_core/search/meta.py,sha256=wSurrsqdP1N3gQKx027fVdzVmc33a7Y6rPl-FClQvtA,3318
|
|
15
|
+
docling_core/search/package.py,sha256=Q0_FAWFt71_g0ifcFkCuXEpVAgpVFiT9mOdzq1fqeDM,1824
|
|
16
|
+
docling_core/types/__init__.py,sha256=6mrAEKRW85uHJwNQBufwjPcMWCjm3oocA6MaO4_NLgg,805
|
|
17
|
+
docling_core/types/base.py,sha256=fNtfQ20NKa_RBNBWbq0DfO8o0zC1Cec8UAMu0Znsltk,8170
|
|
18
|
+
docling_core/types/doc/__init__.py,sha256=Pzj_8rft6SJTVTCHgXRwHtuZjL6LK_6dcBWjikL9biY,125
|
|
19
|
+
docling_core/types/doc/base.py,sha256=-j4vVs3JZuaUjm0fHIkLU9TD_4IZXQuGouLrddEAwPw,5508
|
|
20
|
+
docling_core/types/doc/doc_ann.py,sha256=8pV2efUglw19jxl4_oqB__mSxjWvtGIcllyCdqA-b2s,1196
|
|
21
|
+
docling_core/types/doc/doc_ocr.py,sha256=6PC0C-OczF-MyfgRxEI1xs3PWgNOzi7i2yEQbTqZz0I,1387
|
|
22
|
+
docling_core/types/doc/doc_raw.py,sha256=Y69G6IiauNDaoT-5el4xo1ypWpnBJQ75akGGkCMTZSc,3888
|
|
23
|
+
docling_core/types/doc/document.py,sha256=cMduCiFkPVCmXQehvNkXqXtDiXJJtB72o7_LZXz_S6I,12549
|
|
24
|
+
docling_core/types/gen/__init__.py,sha256=C6TuCfvpSnSL5XDOFMcYHUY2-i08vvfOGRcdu6Af0pI,124
|
|
25
|
+
docling_core/types/gen/generic.py,sha256=l4CZ4_Lb8ONG36WNJWbKX5hGKvTh_yU-hXp5hsm7uVU,844
|
|
26
|
+
docling_core/types/nlp/__init__.py,sha256=hGcztAeVK7xkRBqRRvc4zbY4PGeJ0r0QrEsetnSx9nI,119
|
|
27
|
+
docling_core/types/nlp/qa.py,sha256=TyZjubqkEoREv0YzmuLKlq4WW_TnJNj7BoBY1_r2a1E,2731
|
|
28
|
+
docling_core/types/nlp/qa_labels.py,sha256=YLW2SYM9M1riktCUYctsg83Msb988NV2I754w4ibWzA,5880
|
|
29
|
+
docling_core/types/rec/__init__.py,sha256=JVcjGAc7FsIryBmlE1syiOJYWhh3hpJIpp2o7VrX_vE,123
|
|
30
|
+
docling_core/types/rec/attribute.py,sha256=PzPdaPhP5NWbFo8rYOoBl3Vfyx4zJUxN6ZpXl8UY7FM,1551
|
|
31
|
+
docling_core/types/rec/base.py,sha256=jhTfInNGyB9NUw7o33PElrFGL80TqhU8MLcLZNZYj3E,3222
|
|
32
|
+
docling_core/types/rec/predicate.py,sha256=4iDwXl9c4jzHTDIlRNE88yvDzKA9_od0xjPUUUP5IjI,3959
|
|
33
|
+
docling_core/types/rec/record.py,sha256=r1QgPepwH3YjmMHlwwmeK00ZHEJnAsvyOMeXFY_D9_Q,2750
|
|
34
|
+
docling_core/types/rec/statement.py,sha256=BXkuKBz0BL7eiowL_aaYxsz_WBLfR4hfgiqTby4TRnk,920
|
|
35
|
+
docling_core/types/rec/subject.py,sha256=wX9qsihwDbR7ZNSzY3vQymxi0eN1nxxsonrhSZzsMhA,2565
|
|
36
|
+
docling_core/utils/__init__.py,sha256=VauNNpWRHG0_ISKrsy5-gTxicrdQZSau6qMfuMl3iqk,120
|
|
37
|
+
docling_core/utils/alias.py,sha256=B6Lqvss8CbaNARHLR4qSmNh9OkB6LvqTpxfsFmkLAFo,874
|
|
38
|
+
docling_core/utils/ds_generate_docs.py,sha256=0xGBagdC_PGjyeHXYZo90VnVrSTMZgHb0SYhFa6X7bQ,4248
|
|
39
|
+
docling_core/utils/ds_generate_jsonschema.py,sha256=EhNQutqWJFWuN-yl9UUPFZ7DJTvGqg54qBIvUMHTHdA,1647
|
|
40
|
+
docling_core/utils/validate.py,sha256=3FmnxnKTDZC5J9OGxCL3U3DGRl0t0bBV1NcySXswdas,2031
|
|
41
|
+
docling_core/utils/validators.py,sha256=fBdyWX4PvFh7o_d25ZTs4iwmeo75QTbrxsvXv2kXkTg,2777
|
|
42
|
+
docling_core-0.0.1.dist-info/LICENSE,sha256=2M9-6EoQ1sxFztTOkXGAtwUDJvnWaAHdB9BYWVwGkIw,1087
|
|
43
|
+
docling_core-0.0.1.dist-info/METADATA,sha256=WcF2o7nPSZFydFZOCxd8tPnEYS53c940KzWPxlRem_U,5174
|
|
44
|
+
docling_core-0.0.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
45
|
+
docling_core-0.0.1.dist-info/entry_points.txt,sha256=XHhtJEkdUuLxXSNxLdFIzx_siQ3z2UFQEKp-P8VYAE4,189
|
|
46
|
+
docling_core-0.0.1.dist-info/RECORD,,
|