dapi 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dapi/__init__.py ADDED
@@ -0,0 +1,18 @@
1
+ """
2
+ `dapi` is a library that simplifies the process of submitting, running, and monitoring [TAPIS v2 / AgavePy](https://agavepy.readthedocs.io/en/latest/index.html) jobs on [DesignSafe](https://designsafe-ci.org) via [Jupyter Notebooks](https://jupyter.designsafe-ci.org).
3
+
4
+
5
+ ## Features
6
+
7
+ * Simplified TAPIS v2 Calls: No need to fiddle with complex API requests. `dapi` abstracts away the complexities.
8
+
9
+ * Seamless Integration with DesignSafe Jupyter Notebooks: Launch DesignSafe applications directly from the Jupyter environment.
10
+
11
+ ## Installation
12
+
13
+ ```shell
14
+ pip3 install dapi
15
+ ```
16
+
17
+ """
18
+ from . import jobs
dapi/db/__init__.py ADDED
@@ -0,0 +1,2 @@
1
+ name = "designsafe_db"
2
+ from .db import DSDatabase
dapi/db/config.py ADDED
@@ -0,0 +1,6 @@
1
+ # Mapping of shorthand names to actual database names and environment prefixes
2
+ db_config = {
3
+ "ngl": {"dbname": "sjbrande_ngl_db", "env_prefix": "NGL_"},
4
+ "vp": {"dbname": "sjbrande_vpdb", "env_prefix": "VP_"},
5
+ "eq": {"dbname": "post_earthquake_recovery", "env_prefix": "EQ_"},
6
+ }
dapi/db/db.py ADDED
@@ -0,0 +1,94 @@
1
+ import os
2
+ import pandas as pd
3
+ from sqlalchemy import create_engine, exc
4
+ from sqlalchemy.orm import sessionmaker
5
+ from sqlalchemy import text
6
+
7
+ from .config import db_config
8
+
9
+
10
+ class DSDatabase:
11
+ """A database utility class for connecting to a DesignSafe SQL database.
12
+
13
+ This class provides functionality to connect to a MySQL database using
14
+ SQLAlchemy and PyMySQL. It supports executing SQL queries and returning
15
+ results in different formats.
16
+
17
+ Attributes:
18
+ user (str): Database username, defaults to 'dspublic'.
19
+ password (str): Database password, defaults to 'R3ad0nlY'.
20
+ host (str): Database host address, defaults to '129.114.52.174'.
21
+ port (int): Database port, defaults to 3306.
22
+ db (str): Database name, can be 'sjbrande_ngl_db', 'sjbrande_vpdb', or 'post_earthquake_recovery'.
23
+ recycle_time (int): Time in seconds to recycle database connections.
24
+ engine (Engine): SQLAlchemy engine for database connection.
25
+ Session (sessionmaker): SQLAlchemy session maker bound to the engine.
26
+ """
27
+
28
+ def __init__(self, dbname="ngl"):
29
+ """Initializes the DSDatabase instance with environment variables and creates the database engine.
30
+
31
+ Args:
32
+ dbname (str): Shorthand for the database name. Must be one of 'ngl', 'vp', or 'eq'.
33
+ """
34
+
35
+ if dbname not in db_config:
36
+ raise ValueError(
37
+ f"Invalid database shorthand '{dbname}'. Allowed shorthands are: {', '.join(db_config.keys())}"
38
+ )
39
+
40
+ config = db_config[dbname]
41
+ env_prefix = config["env_prefix"]
42
+
43
+ self.user = os.getenv(f"{env_prefix}DB_USER", "dspublic")
44
+ self.password = os.getenv(f"{env_prefix}DB_PASSWORD", "R3ad0nlY")
45
+ self.host = os.getenv(f"{env_prefix}DB_HOST", "129.114.52.174")
46
+ self.port = os.getenv(f"{env_prefix}DB_PORT", 3306)
47
+ self.db = config["dbname"]
48
+
49
+ # Setup the database connection
50
+ self.engine = create_engine(
51
+ f"mysql+pymysql://{self.user}:{self.password}@{self.host}:{self.port}/{self.db}",
52
+ pool_recycle=3600, # 1 hour in seconds
53
+ )
54
+ self.Session = sessionmaker(bind=self.engine)
55
+
56
+ def read_sql(self, sql, output_type="DataFrame"):
57
+ """Executes a SQL query and returns the results.
58
+
59
+ Args:
60
+ sql (str): The SQL query string to be executed.
61
+ output_type (str, optional): The format for the query results. Defaults to 'DataFrame'.
62
+ Possible values are 'DataFrame' for a pandas DataFrame, or 'dict' for a list of dictionaries.
63
+
64
+ Returns:
65
+ pandas.DataFrame or list of dict: The result of the SQL query.
66
+
67
+ Raises:
68
+ ValueError: If the SQL query string is empty or if the output type is not valid.
69
+ SQLAlchemyError: If an error occurs during query execution.
70
+ """
71
+ if not sql:
72
+ raise ValueError("SQL query string is required")
73
+
74
+ if output_type not in ["DataFrame", "dict"]:
75
+ raise ValueError('Output type must be either "DataFrame" or "dict"')
76
+
77
+ session = self.Session()
78
+
79
+ try:
80
+ if output_type == "DataFrame":
81
+ return pd.read_sql_query(sql, session.bind)
82
+ else:
83
+ # Convert SQL string to a text object
84
+ sql_text = text(sql)
85
+ result = session.execute(sql_text)
86
+ return [dict(row) for row in result]
87
+ except exc.SQLAlchemyError as e:
88
+ raise Exception(f"SQLAlchemyError: {e}")
89
+ finally:
90
+ session.close()
91
+
92
+ def close(self):
93
+ """Close the database connection."""
94
+ self.engine.dispose()
dapi/jobs/__init__.py ADDED
@@ -0,0 +1,19 @@
1
+ """
2
+ `dapi` is a library that simplifies the process of submitting, running, and monitoring [TAPIS v2 / AgavePy](https://agavepy.readthedocs.io/en/latest/index.html) jobs on [DesignSafe](https://designsafe-ci.org) via [Jupyter Notebooks](https://jupyter.designsafe-ci.org).
3
+
4
+
5
+ ## Features
6
+
7
+ * Simplified TAPIS v2 Calls: No need to fiddle with complex API requests. `dapi` abstracts away the complexities.
8
+
9
+ * Seamless Integration with DesignSafe Jupyter Notebooks: Launch DesignSafe applications directly from the Jupyter environment.
10
+
11
+ ## Installation
12
+
13
+ ```shell
14
+ pip3 install dapi
15
+ ```
16
+
17
+ """
18
+ from .dir import get_ds_path_uri
19
+ from .jobs import get_status, runtime_summary, generate_job_info, get_archive_path
dapi/jobs/dir.py ADDED
@@ -0,0 +1,51 @@
1
+ import os
2
+
3
+
4
+ def get_ds_path_uri(ag, path):
5
+ """
6
+ Given a path on DesignSafe, determine the correct input URI.
7
+
8
+ Args:
9
+ ag (object): Agave object to fetch profiles or metadata.
10
+ path (str): The directory path.
11
+
12
+ Returns:
13
+ str: The corresponding input URI.
14
+
15
+ Raises:
16
+ ValueError: If no matching directory pattern is found.
17
+ """
18
+
19
+ # If any of the following directory patterns are found in the path,
20
+ # process them accordingly.
21
+ directory_patterns = [
22
+ ("jupyter/MyData", "designsafe.storage.default", True),
23
+ ("jupyter/mydata", "designsafe.storage.default", True),
24
+ ("jupyter/CommunityData", "designsafe.storage.community", False),
25
+ ("/MyData", "designsafe.storage.default", True),
26
+ ("/mydata", "designsafe.storage.default", True),
27
+ ]
28
+
29
+ for pattern, storage, use_username in directory_patterns:
30
+ if pattern in path:
31
+ path = path.split(pattern).pop()
32
+ input_dir = ag.profiles.get()["username"] + path if use_username else path
33
+ input_uri = f"agave://{storage}/{input_dir}"
34
+ return input_uri.replace(" ", "%20")
35
+
36
+ project_patterns = [
37
+ ("jupyter/MyProjects", "project-"),
38
+ ("jupyter/projects", "project-"),
39
+ ]
40
+
41
+ for pattern, prefix in project_patterns:
42
+ if pattern in path:
43
+ path = path.split(pattern + "/").pop()
44
+ project_id = path.split("/")[0]
45
+ query = {"value.projectId": str(project_id)}
46
+ path = path.split(project_id).pop()
47
+ project_uuid = ag.meta.listMetadata(q=str(query))[0]["uuid"]
48
+ input_uri = f"agave://{prefix}{project_uuid}{path}"
49
+ return input_uri.replace(" ", "%20")
50
+
51
+ raise ValueError(f"No matching directory pattern found for: {path}")
dapi/jobs/jobs.py ADDED
@@ -0,0 +1,212 @@
1
+ import time
2
+ from datetime import datetime, timedelta, timezone
3
+ from tqdm import tqdm
4
+ import logging
5
+
6
+ # Configuring the logging system
7
+ # logging.basicConfig(
8
+ # level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
9
+ # )
10
+
11
+
12
+ def get_status(ag, job_id, time_lapse=15):
13
+ """
14
+ Retrieves and monitors the status of a job from Agave.
15
+
16
+ This function initially waits for the job to start, displaying its progress using
17
+ a tqdm progress bar. Once the job starts, it monitors the job's status up to
18
+ a maximum duration specified by the job's "maxHours". If the job completes or fails
19
+ before reaching this maximum duration, it returns the job's final status.
20
+
21
+ Args:
22
+ ag (object): The Agave job object used to interact with the job.
23
+ job_id (str): The unique identifier of the job to monitor.
24
+ time_lapse (int, optional): Time interval, in seconds, to wait between status
25
+ checks. Defaults to 15 seconds.
26
+
27
+ Returns:
28
+ str: The final status of the job. Typical values include "FINISHED", "FAILED",
29
+ and "STOPPED".
30
+
31
+ Raises:
32
+ No exceptions are explicitly raised, but potential exceptions raised by the Agave
33
+ job object or other called functions/methods will propagate.
34
+ """
35
+
36
+ previous_status = None
37
+ # Initially check if the job is already running
38
+ status = ag.jobs.getStatus(jobId=job_id)["status"]
39
+
40
+ job_details = ag.jobs.get(jobId=job_id)
41
+ max_hours = job_details["maxHours"]
42
+
43
+ # Using tqdm to provide visual feedback while waiting for job to start
44
+ with tqdm(desc="Waiting for job to start", dynamic_ncols=True) as pbar:
45
+ while status not in ["RUNNING", "FINISHED", "FAILED", "STOPPED"]:
46
+ time.sleep(time_lapse)
47
+ status = ag.jobs.getStatus(jobId=job_id)["status"]
48
+ pbar.update(1)
49
+ pbar.set_postfix_str(f"Status: {status}")
50
+
51
+ # Once the job is running, monitor it for up to maxHours
52
+ max_iterations = int(max_hours * 3600 // time_lapse)
53
+
54
+ # Using tqdm for progress bar
55
+ for _ in tqdm(range(max_iterations), desc="Monitoring job", ncols=100):
56
+ status = ag.jobs.getStatus(jobId=job_id)["status"]
57
+
58
+ # Print status if it has changed
59
+ if status != previous_status:
60
+ tqdm.write(f"\tStatus: {status}")
61
+ previous_status = status
62
+
63
+ # Break the loop if job reaches one of these statuses
64
+ if status in ["FINISHED", "FAILED", "STOPPED"]:
65
+ break
66
+
67
+ time.sleep(time_lapse)
68
+ else:
69
+ # This block will execute if the for loop completes without a 'break'
70
+ logging.warn("Warning: Maximum monitoring time reached!")
71
+
72
+ return status
73
+
74
+
75
+ def runtime_summary(ag, job_id, verbose=False):
76
+ """Get the runtime of a job.
77
+
78
+ Args:
79
+ ag (object): The Agave object that has the job details.
80
+ job_id (str): The ID of the job for which the runtime needs to be determined.
81
+ verbose (bool): If True, prints all statuses. Otherwise, prints only specific statuses.
82
+
83
+ Returns:
84
+ None: This function doesn't return a value, but it prints the runtime details.
85
+
86
+ """
87
+
88
+ print("Runtime Summary")
89
+ print("---------------")
90
+
91
+ job_history = ag.jobs.getHistory(jobId=job_id)
92
+ total_time = job_history[-1]["created"] - job_history[0]["created"]
93
+
94
+ status_times = {}
95
+
96
+ for i in range(len(job_history) - 1):
97
+ current_status = job_history[i]["status"]
98
+ elapsed_time = job_history[i + 1]["created"] - job_history[i]["created"]
99
+
100
+ # Aggregate times for each status
101
+ if current_status in status_times:
102
+ status_times[current_status] += elapsed_time
103
+ else:
104
+ status_times[current_status] = elapsed_time
105
+
106
+ # Filter the statuses if verbose is False
107
+ if not verbose:
108
+ filtered_statuses = {
109
+ "PENDING",
110
+ "QUEUED",
111
+ "RUNNING",
112
+ "FINISHED",
113
+ "FAILED",
114
+ }
115
+ status_times = {
116
+ status: time
117
+ for status, time in status_times.items()
118
+ if status in filtered_statuses
119
+ }
120
+
121
+ # Determine the max width of status names for alignment
122
+ max_status_width = max(len(status) for status in status_times.keys())
123
+
124
+ # Print the aggregated times for each unique status in a table format
125
+ for status, time in status_times.items():
126
+ print(f"{status.upper():<{max_status_width + 2}} time: {time}")
127
+
128
+ print(f"{'TOTAL':<{max_status_width + 2}} time: {total_time}")
129
+ print("---------------")
130
+
131
+
132
+ def generate_job_info(
133
+ ag,
134
+ appid: str,
135
+ jobname: str = "dsjob",
136
+ queue: str = "development",
137
+ nnodes: int = 1,
138
+ nprocessors: int = 1,
139
+ runtime: str = "00:10:00",
140
+ inputs=None,
141
+ parameters=None,
142
+ ) -> dict:
143
+ """Generate a job information dictionary based on provided arguments.
144
+
145
+ Args:
146
+ ag (object): The Agave object to interact with the platform.
147
+ appid (str): The application ID for the job.
148
+ jobname (str, optional): The name of the job. Defaults to 'dsjob'.
149
+ queue (str, optional): The batch queue name. Defaults to 'skx-dev'.
150
+ nnodes (int, optional): The number of nodes required. Defaults to 1.
151
+ nprocessors (int, optional): The number of processors per node. Defaults to 1.
152
+ runtime (str, optional): The maximum runtime in the format 'HH:MM:SS'. Defaults to '00:10:00'.
153
+ inputs (dict, optional): The inputs for the job. Defaults to None.
154
+ parameters (dict, optional): The parameters for the job. Defaults to None.
155
+
156
+ Returns:
157
+ dict: A dictionary containing the job information.
158
+
159
+ Raises:
160
+ ValueError: If the provided appid is not valid.
161
+ """
162
+
163
+ try:
164
+ app = ag.apps.get(appId=appid)
165
+ except Exception:
166
+ raise ValueError(f"Invalid app ID: {appid}")
167
+
168
+ job_info = {
169
+ "appId": appid,
170
+ "name": jobname,
171
+ "batchQueue": queue,
172
+ "nodeCount": nnodes,
173
+ "processorsPerNode": nprocessors,
174
+ "memoryPerNode": "1",
175
+ "maxRunTime": runtime,
176
+ "archive": True,
177
+ "inputs": inputs,
178
+ "parameters": parameters,
179
+ }
180
+
181
+ return job_info
182
+
183
+
184
+ def get_archive_path(ag, job_id):
185
+ """
186
+ Get the archive path for a given job ID and modifies the user directory
187
+ to '/home/jupyter/MyData'.
188
+
189
+ Args:
190
+ ag (object): The Agave object to interact with the platform.
191
+ job_id (str): The job ID to retrieve the archive path for.
192
+
193
+ Returns:
194
+ str: The modified archive path.
195
+
196
+ Raises:
197
+ ValueError: If the archivePath format is unexpected.
198
+ """
199
+
200
+ # Fetch the job info.
201
+ job_info = ag.jobs.get(jobId=job_id)
202
+
203
+ # Try to split the archive path to extract the user.
204
+ try:
205
+ user, _ = job_info.archivePath.split("/", 1)
206
+ except ValueError:
207
+ raise ValueError(f"Unexpected archivePath format for jobId={job_id}")
208
+
209
+ # Construct the new path.
210
+ new_path = job_info.archivePath.replace(user, "/home/jupyter/MyData")
211
+
212
+ return new_path
@@ -0,0 +1,20 @@
1
+ # MIT License
2
+ > Copyright (c) [2023] [Authors]
3
+
4
+ Permission is hereby granted, free of charge, to any person obtaining a copy
5
+ of this software and associated documentation files (the "Software"), to deal
6
+ in the Software without restriction, including without limitation the rights
7
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8
+ copies of the Software, and to permit persons to whom the Software is
9
+ furnished to do so, subject to the following conditions:
10
+
11
+ The above copyright notice and this permission notice shall be included in all
12
+ copies or substantial portions of the Software.
13
+
14
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20
+ SOFTWARE.
@@ -0,0 +1,155 @@
1
+ Metadata-Version: 2.1
2
+ Name: dapi
3
+ Version: 0.2.0
4
+ Summary: dapi simplifies accessing TAPIS on DesignSafe
5
+ Author: Krishna Kumar
6
+ Author-email: krishnak@utexas.edu
7
+ Requires-Python: >=3.9,<4.0
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: Programming Language :: Python :: 3.9
10
+ Classifier: Programming Language :: Python :: 3.10
11
+ Classifier: Programming Language :: Python :: 3.11
12
+ Classifier: Programming Language :: Python :: 3.12
13
+ Requires-Dist: agavepy (>0.9.5)
14
+ Requires-Dist: exceptiongroup (>=1.1.3,<2.0.0)
15
+ Requires-Dist: numpy (>=1.24.0,<2.0.0)
16
+ Requires-Dist: pandas (>=2.1.3,<3.0.0)
17
+ Requires-Dist: pymysql (>=1.1.0,<2.0.0)
18
+ Requires-Dist: sqlalchemy (>=2.0.23,<3.0.0)
19
+ Requires-Dist: tqdm (>=4.66.1,<5.0.0)
20
+ Description-Content-Type: text/markdown
21
+
22
+ # DesignSafe API (dapi)
23
+
24
+ ![dapi](dapi.png)
25
+
26
+ [![build and test](https://github.com/DesignSafe-CI/dapi/actions/workflows/build-test.yml/badge.svg)](https://github.com/DesignSafe-CI/dapi/actions/workflows/build-test.yml)
27
+ [![License](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE.md)
28
+ [![Docs](https://img.shields.io/badge/view-docs-8A2BE2?color=8A2BE2)](https://designsafe-ci.github.io/dapi/dapi/index.html)
29
+
30
+ `dapi` is a library that simplifies the process of submitting, running, and monitoring [TAPIS v2 / AgavePy](https://agavepy.readthedocs.io/en/latest/index.html) jobs on [DesignSafe](https://designsafe-ci.org) via [Jupyter Notebooks](https://jupyter.designsafe-ci.org).
31
+
32
+ ## Features
33
+
34
+ ### Jobs
35
+
36
+ * Simplified TAPIS v2 Calls: No need to fiddle with complex API requests. `dapi` abstracts away the complexities.
37
+
38
+ * Seamless Integration with DesignSafe Jupyter Notebooks: Launch DesignSafe applications directly from the Jupyter environment.
39
+
40
+ ### Database
41
+
42
+ Connects to SQL databases on DesignSafe:
43
+
44
+ | Database | dbname | env_prefix |
45
+ |----------|--------|------------|
46
+ | NGL | `ngl`| `NGL_` |
47
+ | Earthake Recovery | `eq` | `EQ_` |
48
+ | Vp | `vp` | `VP_` |
49
+
50
+ Define the following environment variables:
51
+ ```
52
+ {env_prefix}DB_USER
53
+ {env_prefix}DB_PASSWORD
54
+ {env_prefix}DB_HOST
55
+ {env_prefix}DB_PORT
56
+ ```
57
+
58
+ For e.g., to add the environment variable `NGL_DB_USER` edit `~/.bashrc`, `~/.zshrc`, or a similar shell-specific configuration file for the current user and add `export NGL_DB_USER="dspublic"`.
59
+
60
+
61
+ ## Installation
62
+
63
+ Install `dapi` via pip
64
+
65
+ ```shell
66
+ pip3 install dapi
67
+ ```
68
+
69
+ To install the current development version of the library use:
70
+
71
+ ```shell
72
+ pip install git+https://github.com/DesignSafe-CI/dapi.git --quiet
73
+ ```
74
+
75
+ ## Example usage:
76
+
77
+ ### Jobs
78
+
79
+ * [Jupyter Notebook Templates](example-notebooks/template-mpm-run.ipynb) using dapi.
80
+
81
+ * View [dapi API doc](https://designsafe-ci.github.io/dapi/dapi/index.html)
82
+
83
+ On [DesignSafe Jupyter](https://jupyter.designsafe-ci.org/):
84
+
85
+ Install the latest version of `dapi` and restart the kernel (Kernel >> Restart Kernel):
86
+
87
+ ```python
88
+ # Remove any previous installations
89
+ !pip uninstall dapi -y
90
+ # Install
91
+ !pip install dapi --quiet
92
+ ```
93
+
94
+ * Import `dapi` library
95
+ ```python
96
+ import dapi
97
+ ```
98
+
99
+ * To list all functions in `dapi`
100
+ ```python
101
+ dir(dapi)
102
+ ```
103
+
104
+ ### Database
105
+ ```python
106
+ import dapi
107
+
108
+ db = dapi.DSDatabase("ngl")
109
+ sql = 'SELECT * FROM SITE'
110
+ df = db.read_sql(sql)
111
+ print(df)
112
+
113
+ # Optionally, close the database connection when done
114
+ db.close()
115
+ ```
116
+
117
+ ## Documentation
118
+
119
+ View [dapi API doc](https://designsafe-ci.github.io/dapi/dapi/index.html)
120
+
121
+ To generate API docs:
122
+
123
+ ```
124
+ pdoc --html --output-dir docs dapi --force
125
+ ```
126
+
127
+ ## Support
128
+
129
+ For any questions, issues, or feedback submit an [issue](https://github.com/DesignSafe-CI/dapi/issues/new)
130
+
131
+ ## Development
132
+
133
+ To develop or test the library locally. Install [Poetry](https://python-poetry.org/docs/#installation). In the current repository run the following commands
134
+
135
+ ```shell
136
+ poetry shell
137
+ poetry install
138
+ poetry build
139
+ ```
140
+
141
+ To run the unit test
142
+ ```shell
143
+ poetry run pytest -v
144
+ ```
145
+
146
+
147
+ ## License
148
+
149
+ `dapi` is licensed under the [MIT License](LICENSE.md).
150
+
151
+ ## Authors
152
+
153
+ * Krishna Kumar, University of Texas at Austin
154
+ * Prof. Pedro Arduino, University of Washington
155
+ * Prof. Scott Brandenberg, University of California Los Angeles
@@ -0,0 +1,11 @@
1
+ dapi/__init__.py,sha256=SKEAGaKxpRYafsqxol5RYep4VLujzYLDofzYtZ_AFBI,604
2
+ dapi/db/__init__.py,sha256=68vCJOTiQSIyGnlfF8__ufayGP52ItELF_TbPcZaPUk,50
3
+ dapi/db/config.py,sha256=wkaDhkV5CS1qcaKGo5GHXU34X8gUtwLPdd-PKlvNHFA,290
4
+ dapi/db/db.py,sha256=3brwLgealA00dgabv8O08Vog0gDIkkHPmTbGcxpj10Q,3701
5
+ dapi/jobs/__init__.py,sha256=e22e_EEaaWSSOMzA6ur1VyKqfMghn6NSwHmCNGwSsiw,701
6
+ dapi/jobs/dir.py,sha256=u2EtsHHotB7c-h-3QbMvLuGkZIxtrmTRajb0VWmqEH8,1807
7
+ dapi/jobs/jobs.py,sha256=_RQSetuLqdvCOLOVD5IkvUDna1iEbDdE_TKw9zj5TwM,7002
8
+ dapi-0.2.0.dist-info/LICENSE.md,sha256=BAQUrW-janfTWmXxSfvvnUvnTfS5qSEw_vVJn-SW3nE,1071
9
+ dapi-0.2.0.dist-info/METADATA,sha256=V79-jDXsF_TrBIkQIMIyySZknY5YeyzF1zJT2NA2g7A,4047
10
+ dapi-0.2.0.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
11
+ dapi-0.2.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: poetry-core 1.8.1
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any