dt-extensions-sdk 1.2.2__py3-none-any.whl → 1.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. {dt_extensions_sdk-1.2.2.dist-info → dt_extensions_sdk-1.2.4.dist-info}/METADATA +1 -1
  2. dt_extensions_sdk-1.2.4.dist-info/RECORD +34 -0
  3. {dt_extensions_sdk-1.2.2.dist-info → dt_extensions_sdk-1.2.4.dist-info}/WHEEL +1 -1
  4. {dt_extensions_sdk-1.2.2.dist-info → dt_extensions_sdk-1.2.4.dist-info}/licenses/LICENSE.txt +9 -9
  5. dynatrace_extension/__about__.py +5 -5
  6. dynatrace_extension/__init__.py +27 -27
  7. dynatrace_extension/cli/__init__.py +5 -5
  8. dynatrace_extension/cli/create/__init__.py +1 -1
  9. dynatrace_extension/cli/create/create.py +76 -76
  10. dynatrace_extension/cli/create/extension_template/.gitignore.template +160 -160
  11. dynatrace_extension/cli/create/extension_template/README.md.template +33 -33
  12. dynatrace_extension/cli/create/extension_template/activation.json.template +15 -15
  13. dynatrace_extension/cli/create/extension_template/extension/activationSchema.json.template +118 -118
  14. dynatrace_extension/cli/create/extension_template/extension/extension.yaml.template +17 -17
  15. dynatrace_extension/cli/create/extension_template/extension_name/__main__.py.template +43 -43
  16. dynatrace_extension/cli/create/extension_template/setup.py.template +28 -28
  17. dynatrace_extension/cli/main.py +437 -437
  18. dynatrace_extension/cli/schema.py +129 -129
  19. dynatrace_extension/sdk/__init__.py +3 -3
  20. dynatrace_extension/sdk/activation.py +43 -43
  21. dynatrace_extension/sdk/callback.py +145 -135
  22. dynatrace_extension/sdk/communication.py +483 -483
  23. dynatrace_extension/sdk/event.py +19 -19
  24. dynatrace_extension/sdk/extension.py +1070 -1069
  25. dynatrace_extension/sdk/helper.py +191 -191
  26. dynatrace_extension/sdk/metric.py +118 -118
  27. dynatrace_extension/sdk/runtime.py +67 -67
  28. dynatrace_extension/sdk/snapshot.py +198 -198
  29. dynatrace_extension/sdk/vendor/mureq/LICENSE +13 -13
  30. dynatrace_extension/sdk/vendor/mureq/mureq.py +448 -448
  31. dt_extensions_sdk-1.2.2.dist-info/RECORD +0 -34
  32. {dt_extensions_sdk-1.2.2.dist-info → dt_extensions_sdk-1.2.4.dist-info}/entry_points.txt +0 -0
@@ -1,67 +1,67 @@
1
- # SPDX-FileCopyrightText: 2023-present Dynatrace LLC
2
- #
3
- # SPDX-License-Identifier: MIT
4
-
5
- import logging
6
- from typing import ClassVar, List, NamedTuple
7
-
8
-
9
- class DefaultLogLevel(NamedTuple):
10
- string_value: str
11
- int_value: int
12
-
13
-
14
- class RuntimeProperties:
15
- _default_log_level = DefaultLogLevel("info", logging.INFO)
16
- _log_level_converter: ClassVar = {"debug": logging.DEBUG, "info": logging.INFO}
17
-
18
- def __init__(self, json_response: dict):
19
- """
20
- This is the response from EEC when a status (heartbeat) is sent
21
- Example:
22
- {'extconfig': 'b2520a74-88e8-3e03-bc01-e1116fec4a98', 'userconfig': '1645918226657', 'debugmode': '0', 'runtime': {}, 'tasks': []}
23
- """
24
- self.extconfig: str = json_response.get("extconfig", "")
25
- self.userconfig: str = json_response.get("userconfig", "")
26
- self.debugmode: bool = json_response.get("debugmode", "0") == "1"
27
- self.runtime: dict = json_response.get("runtime", {})
28
- self.tasks: List[str] = json_response.get("tasks", [])
29
-
30
- @classmethod
31
- def set_default_log_level(cls, value: str):
32
- RuntimeProperties._default_log_level = DefaultLogLevel(value, RuntimeProperties._to_log_level(value))
33
-
34
- @classmethod
35
- def _to_log_level(cls, value: str) -> int:
36
- """
37
- The method convert LogLevel string value into Python log level (loggin package).
38
- loggin.INFO is a default.
39
- :param value: string log lever
40
- :return: Python log level
41
- """
42
- return RuntimeProperties._log_level_converter.get(value, RuntimeProperties._default_log_level.int_value)
43
-
44
- def log_level(self, extension_name: str) -> int:
45
- """
46
- The method check python.debuglevel (lower priority)
47
- and python.debuglevel.extension_name (higher priority) string debug flags.
48
- loggin.INFO is a default.
49
- :param extension_name: extension name
50
- :return: log level for Python log system (loggin)
51
- """
52
- value = self.runtime.get("debuglevel", RuntimeProperties._default_log_level.string_value)
53
- value = self.runtime.get(f"debuglevel.{extension_name}", value)
54
- return RuntimeProperties._to_log_level(value)
55
-
56
- def get_api_log_level(self, extension_name: str) -> int:
57
- """
58
- The method check python.debuglevel.api (lower priority)
59
- python.debuglevel.extension_name.api (higher priority) string debug flags.
60
- loggin.INFO is a default.
61
- :param extension_name: extension name
62
- :return: log level for Python log system (loggin)
63
- """
64
- value = self.runtime.get("debuglevel.api", RuntimeProperties._default_log_level.string_value)
65
- value = self.runtime.get(f"debuglevel.{extension_name}.api", value)
66
- return RuntimeProperties._to_log_level(value)
67
- pass
1
+ # SPDX-FileCopyrightText: 2023-present Dynatrace LLC
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+
5
+ import logging
6
+ from typing import ClassVar, List, NamedTuple
7
+
8
+
9
+ class DefaultLogLevel(NamedTuple):
10
+ string_value: str
11
+ int_value: int
12
+
13
+
14
+ class RuntimeProperties:
15
+ _default_log_level = DefaultLogLevel("info", logging.INFO)
16
+ _log_level_converter: ClassVar = {"debug": logging.DEBUG, "info": logging.INFO}
17
+
18
+ def __init__(self, json_response: dict):
19
+ """
20
+ This is the response from EEC when a status (heartbeat) is sent
21
+ Example:
22
+ {'extconfig': 'b2520a74-88e8-3e03-bc01-e1116fec4a98', 'userconfig': '1645918226657', 'debugmode': '0', 'runtime': {}, 'tasks': []}
23
+ """
24
+ self.extconfig: str = json_response.get("extconfig", "")
25
+ self.userconfig: str = json_response.get("userconfig", "")
26
+ self.debugmode: bool = json_response.get("debugmode", "0") == "1"
27
+ self.runtime: dict = json_response.get("runtime", {})
28
+ self.tasks: List[str] = json_response.get("tasks", [])
29
+
30
+ @classmethod
31
+ def set_default_log_level(cls, value: str):
32
+ RuntimeProperties._default_log_level = DefaultLogLevel(value, RuntimeProperties._to_log_level(value))
33
+
34
+ @classmethod
35
+ def _to_log_level(cls, value: str) -> int:
36
+ """
37
+ The method convert LogLevel string value into Python log level (loggin package).
38
+ loggin.INFO is a default.
39
+ :param value: string log lever
40
+ :return: Python log level
41
+ """
42
+ return RuntimeProperties._log_level_converter.get(value, RuntimeProperties._default_log_level.int_value)
43
+
44
+ def log_level(self, extension_name: str) -> int:
45
+ """
46
+ The method check python.debuglevel (lower priority)
47
+ and python.debuglevel.extension_name (higher priority) string debug flags.
48
+ loggin.INFO is a default.
49
+ :param extension_name: extension name
50
+ :return: log level for Python log system (loggin)
51
+ """
52
+ value = self.runtime.get("debuglevel", RuntimeProperties._default_log_level.string_value)
53
+ value = self.runtime.get(f"debuglevel.{extension_name}", value)
54
+ return RuntimeProperties._to_log_level(value)
55
+
56
+ def get_api_log_level(self, extension_name: str) -> int:
57
+ """
58
+ The method check python.debuglevel.api (lower priority)
59
+ python.debuglevel.extension_name.api (higher priority) string debug flags.
60
+ loggin.INFO is a default.
61
+ :param extension_name: extension name
62
+ :return: log level for Python log system (loggin)
63
+ """
64
+ value = self.runtime.get("debuglevel.api", RuntimeProperties._default_log_level.string_value)
65
+ value = self.runtime.get(f"debuglevel.{extension_name}.api", value)
66
+ return RuntimeProperties._to_log_level(value)
67
+ pass
@@ -1,198 +1,198 @@
1
- from __future__ import annotations
2
-
3
- import json
4
- import os
5
- from dataclasses import dataclass
6
- from pathlib import Path
7
-
8
- PREFIX_HOST = "HOST"
9
- PREFIX_PG = "PROCESS_GROUP"
10
- PREFIX_PGI = "PROCESS_GROUP_INSTANCE"
11
-
12
-
13
- @dataclass
14
- class EntryProperties:
15
- technologies: list[str]
16
- pg_technologies: list[str]
17
-
18
- @staticmethod
19
- def from_json(json_data: dict) -> EntryProperties:
20
- technologies = json_data.get("Technologies", "unknown-technologies").split(",")
21
- pg_technologies = json_data.get("pgTechnologies", "unknown-pg-technologies").split(",")
22
- return EntryProperties(technologies, pg_technologies)
23
-
24
-
25
- @dataclass
26
- class PortBinding:
27
- ip: str
28
- port: int
29
-
30
- @staticmethod
31
- def from_string(data: str) -> PortBinding:
32
- ip, port = data.split("_")
33
- return PortBinding(ip, int(port))
34
-
35
-
36
- @dataclass
37
- class ProcessProperties:
38
- cmd_line: str | None
39
- exe_path: str | None
40
- parent_pid: int | None
41
- work_dir: str | None
42
- listening_ports: list[int]
43
- port_bindings: list[PortBinding]
44
- docker_mount: str | None
45
- docker_container_id: str | None
46
- listening_internal_ports: str | None
47
-
48
- @staticmethod
49
- def from_json(json_data: dict) -> ProcessProperties:
50
- cmd_line = json_data.get("CmdLine", "unknown-cmd-line")
51
- exe_path = json_data.get("ExePath", "unknown-exe-path")
52
- parent_pid = int(json_data.get("ParentPid", "-1"))
53
- work_dir = json_data.get("WorkDir", "unknown-work-dir")
54
- listening_ports = [int(p) for p in json_data.get("ListeningPorts", "").split(" ") if p != ""]
55
- port_bindings = [PortBinding.from_string(p) for p in json_data.get("PortBindings", "").split(";") if p != ""]
56
- docker_mount = json_data.get("DockerMount", "unknown-docker-mount")
57
- docker_container_id = json_data.get("DockerContainerId", "unknown-docker-container-id")
58
- listening_internal_ports = json_data.get("ListeningInternalPorts", "unknown-listening-internal-ports")
59
- return ProcessProperties(
60
- cmd_line,
61
- exe_path,
62
- parent_pid,
63
- work_dir,
64
- listening_ports,
65
- port_bindings,
66
- docker_mount,
67
- docker_container_id,
68
- listening_internal_ports,
69
- )
70
-
71
-
72
- @dataclass
73
- class Process:
74
- pid: int
75
- process_name: str
76
- properties: ProcessProperties
77
-
78
- @staticmethod
79
- def from_json(json_data: dict) -> Process:
80
- pid = int(json_data.get("pid", "-1"))
81
- process_name = json_data.get("process_name", "unknown-process-name")
82
- all_properties = {}
83
- for p in json_data.get("properties", []):
84
- all_properties.update(p)
85
- properties = ProcessProperties.from_json(all_properties)
86
- return Process(pid, process_name, properties)
87
-
88
-
89
- @dataclass
90
- class Entry:
91
- group_id: str
92
- node_id: str
93
- group_instance_id: str
94
- process_type: int
95
- group_name: str
96
- processes: list[Process]
97
- properties: EntryProperties
98
-
99
- @staticmethod
100
- def from_json(json_data: dict) -> Entry:
101
- group_id = json_data.get("group_id", "0X0000000000000000")
102
- group_id = f"{PREFIX_PG}-{group_id[-16:]}"
103
-
104
- node_id = json_data.get("node_id", "0X0000000000000000")
105
-
106
- group_instance_id = json_data.get("group_instance_id", "0X0000000000000000")
107
- group_instance_id = f"{PREFIX_PGI}-{group_instance_id[-16:]}"
108
-
109
- process_type = int(json_data.get("process_type", "0"))
110
- group_name = json_data.get("group_name", "unknown-group-name")
111
- processes = [Process.from_json(p) for p in json_data.get("processes", [])]
112
-
113
- # The structure here was never thought out, so we have to check for both keys and merge them into one object
114
- properties_list = json_data.get("properties", [])
115
- technologies = [p for p in properties_list if "Technologies" in p]
116
- if technologies:
117
- technologies = technologies[0]["Technologies"].split(",")
118
-
119
- pg_technologies = [p for p in properties_list if "pgTechnologies" in p]
120
- if pg_technologies:
121
- pg_technologies = pg_technologies[0]["pgTechnologies"].split(",")
122
- properties = EntryProperties(technologies or [], pg_technologies or [])
123
-
124
- return Entry(group_id, node_id, group_instance_id, process_type, group_name, processes, properties)
125
-
126
-
127
- @dataclass
128
- class Snapshot:
129
- host_id: str
130
- entries: list[Entry]
131
-
132
- @staticmethod
133
- def parse_from_file(snapshot_file: Path | str | None = None) -> Snapshot:
134
- """Returns a process snapshot object like EF1.0 used to do"""
135
-
136
- if snapshot_file is None:
137
- snapshot_file = find_log_dir() / "plugin" / "oneagent_latest_snapshot.log"
138
-
139
- with open(snapshot_file) as f:
140
- snapshot_json = json.load(f)
141
-
142
- host_id = snapshot_json.get("host_id", "0X0000000000000000")
143
- host_id = f"{PREFIX_HOST}-{host_id[-16:]}"
144
- entries = [Entry.from_json(e) for e in snapshot_json.get("entries", [])]
145
- return Snapshot(host_id, entries)
146
-
147
- # Returns list of Process groups matching a technology. Use to simulate activation
148
- def get_process_groups_by_technology(self, technology: str) -> list[Process]:
149
- pgs = []
150
- for entry in self.entries:
151
- if technology in entry.properties.technologies:
152
- pgs.extend(entry.processes)
153
-
154
- return pgs
155
-
156
-
157
- def find_config_directory() -> Path:
158
- """
159
- Attempt to find the OneAgent config directory.
160
- Note, the user can never modify these directories
161
- Windows -> https://docs.dynatrace.com/docs/shortlink/oneagent-disk-requirements-windows#oneagent-files-aging-mechanism
162
- Linux -> https://docs.dynatrace.com/docs/shortlink/oneagent-disk-requirements-linux#sizes
163
- """
164
- config_dir_base = os.path.expandvars("%PROGRAMDATA%") if os.name == "nt" else "/var/lib"
165
- config_dir = Path(config_dir_base) / "dynatrace" / "oneagent" / "agent" / "config"
166
- if config_dir.exists():
167
- return config_dir
168
- file_path = Path(__file__).resolve()
169
-
170
- while file_path.parent != file_path:
171
- file_path = file_path.parent
172
- if file_path.name == "agent":
173
- return file_path / "config"
174
-
175
- msg = "Could not find the OneAgent config directory"
176
- raise Exception(msg)
177
-
178
-
179
- def find_log_dir() -> Path:
180
- """
181
- Attempt to find the OneAgent log directory.
182
- This is always stored in the installation.conf file.
183
- So we attempt to find the installation.conf file and read the LogDir property
184
- Returns: the Path to the log directory
185
- """
186
- config_dir = find_config_directory()
187
- installation_conf = config_dir / "installation.conf"
188
- if not installation_conf.exists():
189
- msg = f"Could not find installation.conf at {installation_conf}"
190
- raise Exception(msg)
191
-
192
- with open(installation_conf) as f:
193
- for line in f:
194
- if line.startswith("LogDir"):
195
- log_dir = line.split("=")[1].strip()
196
- return Path(log_dir)
197
- msg = f"Could not find LogDir in {installation_conf}"
198
- raise Exception(msg)
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import os
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+
8
+ PREFIX_HOST = "HOST"
9
+ PREFIX_PG = "PROCESS_GROUP"
10
+ PREFIX_PGI = "PROCESS_GROUP_INSTANCE"
11
+
12
+
13
+ @dataclass
14
+ class EntryProperties:
15
+ technologies: list[str]
16
+ pg_technologies: list[str]
17
+
18
+ @staticmethod
19
+ def from_json(json_data: dict) -> EntryProperties:
20
+ technologies = json_data.get("Technologies", "unknown-technologies").split(",")
21
+ pg_technologies = json_data.get("pgTechnologies", "unknown-pg-technologies").split(",")
22
+ return EntryProperties(technologies, pg_technologies)
23
+
24
+
25
+ @dataclass
26
+ class PortBinding:
27
+ ip: str
28
+ port: int
29
+
30
+ @staticmethod
31
+ def from_string(data: str) -> PortBinding:
32
+ ip, port = data.split("_")
33
+ return PortBinding(ip, int(port))
34
+
35
+
36
+ @dataclass
37
+ class ProcessProperties:
38
+ cmd_line: str | None
39
+ exe_path: str | None
40
+ parent_pid: int | None
41
+ work_dir: str | None
42
+ listening_ports: list[int]
43
+ port_bindings: list[PortBinding]
44
+ docker_mount: str | None
45
+ docker_container_id: str | None
46
+ listening_internal_ports: str | None
47
+
48
+ @staticmethod
49
+ def from_json(json_data: dict) -> ProcessProperties:
50
+ cmd_line = json_data.get("CmdLine", "unknown-cmd-line")
51
+ exe_path = json_data.get("ExePath", "unknown-exe-path")
52
+ parent_pid = int(json_data.get("ParentPid", "-1"))
53
+ work_dir = json_data.get("WorkDir", "unknown-work-dir")
54
+ listening_ports = [int(p) for p in json_data.get("ListeningPorts", "").split(" ") if p != ""]
55
+ port_bindings = [PortBinding.from_string(p) for p in json_data.get("PortBindings", "").split(";") if p != ""]
56
+ docker_mount = json_data.get("DockerMount", "unknown-docker-mount")
57
+ docker_container_id = json_data.get("DockerContainerId", "unknown-docker-container-id")
58
+ listening_internal_ports = json_data.get("ListeningInternalPorts", "unknown-listening-internal-ports")
59
+ return ProcessProperties(
60
+ cmd_line,
61
+ exe_path,
62
+ parent_pid,
63
+ work_dir,
64
+ listening_ports,
65
+ port_bindings,
66
+ docker_mount,
67
+ docker_container_id,
68
+ listening_internal_ports,
69
+ )
70
+
71
+
72
+ @dataclass
73
+ class Process:
74
+ pid: int
75
+ process_name: str
76
+ properties: ProcessProperties
77
+
78
+ @staticmethod
79
+ def from_json(json_data: dict) -> Process:
80
+ pid = int(json_data.get("pid", "-1"))
81
+ process_name = json_data.get("process_name", "unknown-process-name")
82
+ all_properties = {}
83
+ for p in json_data.get("properties", []):
84
+ all_properties.update(p)
85
+ properties = ProcessProperties.from_json(all_properties)
86
+ return Process(pid, process_name, properties)
87
+
88
+
89
+ @dataclass
90
+ class Entry:
91
+ group_id: str
92
+ node_id: str
93
+ group_instance_id: str
94
+ process_type: int
95
+ group_name: str
96
+ processes: list[Process]
97
+ properties: EntryProperties
98
+
99
+ @staticmethod
100
+ def from_json(json_data: dict) -> Entry:
101
+ group_id = json_data.get("group_id", "0X0000000000000000")
102
+ group_id = f"{PREFIX_PG}-{group_id[-16:]}"
103
+
104
+ node_id = json_data.get("node_id", "0X0000000000000000")
105
+
106
+ group_instance_id = json_data.get("group_instance_id", "0X0000000000000000")
107
+ group_instance_id = f"{PREFIX_PGI}-{group_instance_id[-16:]}"
108
+
109
+ process_type = int(json_data.get("process_type", "0"))
110
+ group_name = json_data.get("group_name", "unknown-group-name")
111
+ processes = [Process.from_json(p) for p in json_data.get("processes", [])]
112
+
113
+ # The structure here was never thought out, so we have to check for both keys and merge them into one object
114
+ properties_list = json_data.get("properties", [])
115
+ technologies = [p for p in properties_list if "Technologies" in p]
116
+ if technologies:
117
+ technologies = technologies[0]["Technologies"].split(",")
118
+
119
+ pg_technologies = [p for p in properties_list if "pgTechnologies" in p]
120
+ if pg_technologies:
121
+ pg_technologies = pg_technologies[0]["pgTechnologies"].split(",")
122
+ properties = EntryProperties(technologies or [], pg_technologies or [])
123
+
124
+ return Entry(group_id, node_id, group_instance_id, process_type, group_name, processes, properties)
125
+
126
+
127
+ @dataclass
128
+ class Snapshot:
129
+ host_id: str
130
+ entries: list[Entry]
131
+
132
+ @staticmethod
133
+ def parse_from_file(snapshot_file: Path | str | None = None) -> Snapshot:
134
+ """Returns a process snapshot object like EF1.0 used to do"""
135
+
136
+ if snapshot_file is None:
137
+ snapshot_file = find_log_dir() / "plugin" / "oneagent_latest_snapshot.log"
138
+
139
+ with open(snapshot_file) as f:
140
+ snapshot_json = json.load(f)
141
+
142
+ host_id = snapshot_json.get("host_id", "0X0000000000000000")
143
+ host_id = f"{PREFIX_HOST}-{host_id[-16:]}"
144
+ entries = [Entry.from_json(e) for e in snapshot_json.get("entries", [])]
145
+ return Snapshot(host_id, entries)
146
+
147
+ # Returns list of Process groups matching a technology. Use to simulate activation
148
+ def get_process_groups_by_technology(self, technology: str) -> list[Entry]:
149
+ pgs = []
150
+ for entry in self.entries:
151
+ if technology in entry.properties.technologies:
152
+ pgs.append(entry)
153
+
154
+ return pgs
155
+
156
+
157
+ def find_config_directory() -> Path:
158
+ """
159
+ Attempt to find the OneAgent config directory.
160
+ Note, the user can never modify these directories
161
+ Windows -> https://docs.dynatrace.com/docs/shortlink/oneagent-disk-requirements-windows#oneagent-files-aging-mechanism
162
+ Linux -> https://docs.dynatrace.com/docs/shortlink/oneagent-disk-requirements-linux#sizes
163
+ """
164
+ config_dir_base = os.path.expandvars("%PROGRAMDATA%") if os.name == "nt" else "/var/lib"
165
+ config_dir = Path(config_dir_base) / "dynatrace" / "oneagent" / "agent" / "config"
166
+ if config_dir.exists():
167
+ return config_dir
168
+ file_path = Path(__file__).resolve()
169
+
170
+ while file_path.parent != file_path:
171
+ file_path = file_path.parent
172
+ if file_path.name == "agent":
173
+ return file_path / "config"
174
+
175
+ msg = "Could not find the OneAgent config directory"
176
+ raise Exception(msg)
177
+
178
+
179
+ def find_log_dir() -> Path:
180
+ """
181
+ Attempt to find the OneAgent log directory.
182
+ This is always stored in the installation.conf file.
183
+ So we attempt to find the installation.conf file and read the LogDir property
184
+ Returns: the Path to the log directory
185
+ """
186
+ config_dir = find_config_directory()
187
+ installation_conf = config_dir / "installation.conf"
188
+ if not installation_conf.exists():
189
+ msg = f"Could not find installation.conf at {installation_conf}"
190
+ raise Exception(msg)
191
+
192
+ with open(installation_conf) as f:
193
+ for line in f:
194
+ if line.startswith("LogDir"):
195
+ log_dir = line.split("=")[1].strip()
196
+ return Path(log_dir)
197
+ msg = f"Could not find LogDir in {installation_conf}"
198
+ raise Exception(msg)
@@ -1,14 +1,14 @@
1
- BSD Zero Clause License
2
-
3
- Copyright (c) 2021 Shivaram Lingamneni
4
-
5
- Permission to use, copy, modify, and/or distribute this software for any
6
- purpose with or without fee is hereby granted.
7
-
8
- THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
9
- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
10
- AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
11
- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
12
- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
13
- OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
1
+ BSD Zero Clause License
2
+
3
+ Copyright (c) 2021 Shivaram Lingamneni
4
+
5
+ Permission to use, copy, modify, and/or distribute this software for any
6
+ purpose with or without fee is hereby granted.
7
+
8
+ THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
9
+ REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
10
+ AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
11
+ INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
12
+ LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
13
+ OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
14
14
  PERFORMANCE OF THIS SOFTWARE.