process-intelligence 6.0.0__tar.gz → 9.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- process_intelligence-9.0.0/.github/workflows/publish.yml +99 -0
- process_intelligence-9.0.0/.gitignore +31 -0
- process_intelligence-9.0.0/PKG-INFO +21 -0
- process_intelligence-9.0.0/data/generate_sample.py +59 -0
- process_intelligence-9.0.0/data/sample_log.csv +117 -0
- process_intelligence-9.0.0/public-repo/.gitignore +31 -0
- process_intelligence-9.0.0/public-repo/LICENSE +21 -0
- process_intelligence-9.0.0/public-repo/README.md +2 -0
- process_intelligence-9.0.0/public-repo/data/generate_sample.py +59 -0
- process_intelligence-9.0.0/public-repo/data/sample_log.csv +117 -0
- process_intelligence-9.0.0/public-repo/pyproject.toml +49 -0
- process_intelligence-6.0.0/process_intelligence.egg-info/requires.txt → process_intelligence-9.0.0/public-repo/requirements.txt +0 -2
- process_intelligence-9.0.0/public-repo/src/some-new-file.py +3 -0
- process_intelligence-9.0.0/public-repo/tests/__init__.py +1 -0
- process_intelligence-9.0.0/pyproject.toml +49 -0
- process_intelligence-9.0.0/requirements.txt +1 -0
- process_intelligence-9.0.0/src/__init__.py +1 -0
- process_intelligence-9.0.0/src/config.py +10 -0
- process_intelligence-9.0.0/src/models.py +68 -0
- process_intelligence-9.0.0/src/some-new-file.py +3 -0
- process_intelligence-9.0.0/tests/__init__.py +1 -0
- process_intelligence-9.0.0/tests/test_helpers.py +62 -0
- process_intelligence-9.0.0/tests/test_models.py +65 -0
- process_intelligence-9.0.0/utils/__init__.py +1 -0
- process_intelligence-9.0.0/utils/helpers.py +45 -0
- process_intelligence-9.0.0/utils/logger.py +29 -0
- process_intelligence-6.0.0/PKG-INFO +0 -12
- process_intelligence-6.0.0/process_intelligence.egg-info/PKG-INFO +0 -12
- process_intelligence-6.0.0/process_intelligence.egg-info/SOURCES.txt +0 -17
- process_intelligence-6.0.0/process_intelligence.egg-info/dependency_links.txt +0 -1
- process_intelligence-6.0.0/process_intelligence.egg-info/top_level.txt +0 -2
- process_intelligence-6.0.0/setup.cfg +0 -4
- {process_intelligence-6.0.0 → process_intelligence-9.0.0}/LICENSE +0 -0
- {process_intelligence-6.0.0 → process_intelligence-9.0.0}/README.md +0 -0
- {process_intelligence-6.0.0 → process_intelligence-9.0.0/public-repo}/setup.py +0 -0
- {process_intelligence-6.0.0 → process_intelligence-9.0.0/public-repo}/src/__init__.py +0 -0
- {process_intelligence-6.0.0 → process_intelligence-9.0.0/public-repo}/src/config.py +0 -0
- {process_intelligence-6.0.0 → process_intelligence-9.0.0/public-repo}/src/models.py +0 -0
- {process_intelligence-6.0.0 → process_intelligence-9.0.0/public-repo}/src/new_src_file.py +0 -0
- {process_intelligence-6.0.0 → process_intelligence-9.0.0/public-repo}/tests/test_helpers.py +0 -0
- {process_intelligence-6.0.0 → process_intelligence-9.0.0/public-repo}/tests/test_models.py +0 -0
- {process_intelligence-6.0.0 → process_intelligence-9.0.0/public-repo}/utils/__init__.py +0 -0
- {process_intelligence-6.0.0 → process_intelligence-9.0.0/public-repo}/utils/helpers.py +0 -0
- {process_intelligence-6.0.0 → process_intelligence-9.0.0/public-repo}/utils/logger.py +0 -0
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
name: Publish to public repo
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
workflow_dispatch:
|
|
5
|
+
inputs:
|
|
6
|
+
tag:
|
|
7
|
+
description: 'Tag name (e.g. v1.2.0)'
|
|
8
|
+
required: true
|
|
9
|
+
commit_message:
|
|
10
|
+
description: 'Commit message for the public repo (e.g. v1.2.0)'
|
|
11
|
+
required: true
|
|
12
|
+
changelog:
|
|
13
|
+
description: 'Changelog / release notes'
|
|
14
|
+
required: true
|
|
15
|
+
|
|
16
|
+
jobs:
|
|
17
|
+
publish:
|
|
18
|
+
runs-on: ubuntu-latest
|
|
19
|
+
|
|
20
|
+
steps:
|
|
21
|
+
- name: Checkout private repo (full history)
|
|
22
|
+
uses: actions/checkout@v6
|
|
23
|
+
with:
|
|
24
|
+
fetch-depth: 0
|
|
25
|
+
token: ${{ secrets.PUBLIC_REPO_PAT }}
|
|
26
|
+
|
|
27
|
+
- name: Configure git
|
|
28
|
+
run: |
|
|
29
|
+
git config user.name "LukasSchBal" # adjust
|
|
30
|
+
git config user.email "l.schulzebalhorn@tudelft.nl" # adjust
|
|
31
|
+
|
|
32
|
+
- name: Verify tag matches pyproject.toml version
|
|
33
|
+
run: |
|
|
34
|
+
TOML_VERSION="v$(grep '^version' pyproject.toml | head -1 | sed 's/.*= *"\(.*\)"/\1/')"
|
|
35
|
+
if [[ "$TOML_VERSION" != "${{ github.event.inputs.tag }}" ]]; then
|
|
36
|
+
echo "ERROR: Tag '${{ github.event.inputs.tag }}' does not match pyproject.toml version '$TOML_VERSION'"
|
|
37
|
+
exit 1
|
|
38
|
+
fi
|
|
39
|
+
echo "Version check passed: $TOML_VERSION"
|
|
40
|
+
|
|
41
|
+
- name: Tag main branch in private repo
|
|
42
|
+
run: |
|
|
43
|
+
git fetch origin main
|
|
44
|
+
git tag -a ${{ github.event.inputs.tag }} -m "Release ${{ github.event.inputs.tag }}" origin/main
|
|
45
|
+
git push origin ${{ github.event.inputs.tag }}
|
|
46
|
+
|
|
47
|
+
- name: Push clean single commit to public repo
|
|
48
|
+
env:
|
|
49
|
+
PAT: ${{ secrets.PUBLIC_REPO_PAT }}
|
|
50
|
+
run: |
|
|
51
|
+
git clone https://x-access-token:${PAT}@github.com/LukasSchBal/test.git public-repo
|
|
52
|
+
cd public-repo
|
|
53
|
+
git config user.name "LukasSchBal"
|
|
54
|
+
git config user.email "l.schulzebalhorn@tudelft.nl"
|
|
55
|
+
|
|
56
|
+
git remote add private https://x-access-token:${PAT}@github.com/LukasSchBal/test-dev.git
|
|
57
|
+
git fetch private main
|
|
58
|
+
|
|
59
|
+
# Apply the private main tree on top of the existing master history
|
|
60
|
+
git checkout master
|
|
61
|
+
git checkout private/main -- .
|
|
62
|
+
|
|
63
|
+
# Exclude the publish workflow file
|
|
64
|
+
git rm --force --ignore-unmatch .github/workflows/publish.yml
|
|
65
|
+
|
|
66
|
+
git add --all
|
|
67
|
+
git commit -m "${{ github.event.inputs.commit_message }}"
|
|
68
|
+
git push origin master
|
|
69
|
+
|
|
70
|
+
- name: Mirror tag to public repo
|
|
71
|
+
env:
|
|
72
|
+
PAT: ${{ secrets.PUBLIC_REPO_PAT }}
|
|
73
|
+
run: |
|
|
74
|
+
cd public-repo
|
|
75
|
+
git tag -a ${{ github.event.inputs.tag }} -m "Release ${{ github.event.inputs.tag }}"
|
|
76
|
+
git push origin ${{ github.event.inputs.tag }}
|
|
77
|
+
|
|
78
|
+
- name: Create GitHub release in public repo
|
|
79
|
+
env:
|
|
80
|
+
GH_TOKEN: ${{ secrets.PUBLIC_REPO_PAT }}
|
|
81
|
+
run: |
|
|
82
|
+
gh release create ${{ github.event.inputs.tag }} \
|
|
83
|
+
--repo LukasSchBal/test \
|
|
84
|
+
--title "${{ github.event.inputs.tag }}" \
|
|
85
|
+
--notes "${{ github.event.inputs.changelog }}"
|
|
86
|
+
|
|
87
|
+
- name: Set up Python
|
|
88
|
+
uses: actions/setup-python@v6
|
|
89
|
+
with:
|
|
90
|
+
python-version: '3.x'
|
|
91
|
+
|
|
92
|
+
- name: Build and publish to PyPI
|
|
93
|
+
env:
|
|
94
|
+
TWINE_USERNAME: __token__
|
|
95
|
+
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
|
|
96
|
+
run: |
|
|
97
|
+
pip install build twine
|
|
98
|
+
python -m build
|
|
99
|
+
twine upload dist/*
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
### AL ###
|
|
2
|
+
#Template for AL projects for Dynamics 365 Business Central
|
|
3
|
+
#launch.json folder
|
|
4
|
+
.vscode/
|
|
5
|
+
#Cache folder
|
|
6
|
+
.alcache/
|
|
7
|
+
#Symbols folder
|
|
8
|
+
.alpackages/
|
|
9
|
+
#Snapshots folder
|
|
10
|
+
.snapshots/
|
|
11
|
+
#Testing Output folder
|
|
12
|
+
.output/
|
|
13
|
+
#Extension App-file
|
|
14
|
+
*.app
|
|
15
|
+
#Rapid Application Development File
|
|
16
|
+
rad.json
|
|
17
|
+
#Translation Base-file
|
|
18
|
+
*.g.xlf
|
|
19
|
+
#License-file
|
|
20
|
+
*.flf
|
|
21
|
+
#Test results file
|
|
22
|
+
TestResults.xml
|
|
23
|
+
|
|
24
|
+
# Python
|
|
25
|
+
__pycache__/
|
|
26
|
+
*.py[cod]
|
|
27
|
+
*.pyo
|
|
28
|
+
.pytest_cache/
|
|
29
|
+
*.egg-info/
|
|
30
|
+
dist/
|
|
31
|
+
build/
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: process-intelligence
|
|
3
|
+
Version: 9.0.0
|
|
4
|
+
Summary: A sample process intelligence Python project.
|
|
5
|
+
Project-URL: Homepage, https://github.com/LukasSchBal/test
|
|
6
|
+
Project-URL: Source, https://github.com/LukasSchBal/test-dev
|
|
7
|
+
Author-email: Lukas Schulze Balhorn <l.schulzebalhorn@tudelft.nl>
|
|
8
|
+
License-File: LICENSE
|
|
9
|
+
Keywords: process intelligence
|
|
10
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
+
Classifier: Operating System :: OS Independent
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Requires-Python: >=3.10
|
|
14
|
+
Provides-Extra: dev
|
|
15
|
+
Requires-Dist: pre-commit>=3.7.1; extra == 'dev'
|
|
16
|
+
Requires-Dist: pytest>=7.0; extra == 'dev'
|
|
17
|
+
Requires-Dist: ruff>=0.3.0; extra == 'dev'
|
|
18
|
+
Description-Content-Type: text/markdown
|
|
19
|
+
|
|
20
|
+
# test-dev
|
|
21
|
+
Random description, bla bla bla
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""Script to generate sample process event log data as CSV."""
|
|
2
|
+
|
|
3
|
+
import csv
|
|
4
|
+
import random
|
|
5
|
+
from datetime import datetime, timedelta
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
ACTIVITIES = [
|
|
10
|
+
"Register Request",
|
|
11
|
+
"Examine Casually",
|
|
12
|
+
"Examine Thoroughly",
|
|
13
|
+
"Check Ticket",
|
|
14
|
+
"Decide",
|
|
15
|
+
"Reject Request",
|
|
16
|
+
"Pay Compensation",
|
|
17
|
+
"Reinitiate Request",
|
|
18
|
+
]
|
|
19
|
+
|
|
20
|
+
RESOURCES = ["Alice", "Bob", "Carol", "Dave", "Ellen"]
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def generate_log(num_cases: int = 20, seed: int = 42) -> list[dict]:
|
|
24
|
+
random.seed(seed)
|
|
25
|
+
rows = []
|
|
26
|
+
base = datetime(2024, 3, 1, 8, 0, 0)
|
|
27
|
+
|
|
28
|
+
for i in range(1, num_cases + 1):
|
|
29
|
+
case_id = f"case-{i:04d}"
|
|
30
|
+
num_events = random.randint(3, len(ACTIVITIES))
|
|
31
|
+
activities = random.sample(ACTIVITIES, num_events)
|
|
32
|
+
timestamp = base + timedelta(days=random.randint(0, 30))
|
|
33
|
+
|
|
34
|
+
for activity in activities:
|
|
35
|
+
rows.append(
|
|
36
|
+
{
|
|
37
|
+
"case_id": case_id,
|
|
38
|
+
"activity": activity,
|
|
39
|
+
"timestamp": timestamp.strftime("%Y-%m-%dT%H:%M:%S"),
|
|
40
|
+
"resource": random.choice(RESOURCES),
|
|
41
|
+
}
|
|
42
|
+
)
|
|
43
|
+
timestamp += timedelta(minutes=random.randint(10, 240))
|
|
44
|
+
|
|
45
|
+
return rows
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def save_csv(rows: list[dict], output_path: Path) -> None:
|
|
49
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
50
|
+
with open(output_path, "w", newline="") as f:
|
|
51
|
+
writer = csv.DictWriter(f, fieldnames=["case_id", "activity", "timestamp", "resource"])
|
|
52
|
+
writer.writeheader()
|
|
53
|
+
writer.writerows(rows)
|
|
54
|
+
print(f"Saved {len(rows)} events to {output_path}")
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
if __name__ == "__main__":
|
|
58
|
+
rows = generate_log(num_cases=20)
|
|
59
|
+
save_csv(rows, Path(__file__).parent / "sample_log.csv")
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
case_id,activity,timestamp,resource
|
|
2
|
+
case-0001,Examine Casually,2024-03-22T08:00:00,Ellen
|
|
3
|
+
case-0001,Register Request,2024-03-22T08:32:00,Ellen
|
|
4
|
+
case-0001,Reject Request,2024-03-22T10:30:00,Alice
|
|
5
|
+
case-0001,Examine Thoroughly,2024-03-22T10:47:00,Alice
|
|
6
|
+
case-0001,Reinitiate Request,2024-03-22T11:52:00,Bob
|
|
7
|
+
case-0001,Pay Compensation,2024-03-22T14:11:00,Ellen
|
|
8
|
+
case-0001,Decide,2024-03-22T14:27:00,Ellen
|
|
9
|
+
case-0001,Check Ticket,2024-03-22T15:27:00,Ellen
|
|
10
|
+
case-0002,Reinitiate Request,2024-03-25T08:00:00,Bob
|
|
11
|
+
case-0002,Decide,2024-03-25T11:08:00,Dave
|
|
12
|
+
case-0002,Examine Thoroughly,2024-03-25T12:45:00,Carol
|
|
13
|
+
case-0002,Register Request,2024-03-25T13:34:00,Bob
|
|
14
|
+
case-0003,Examine Casually,2024-03-28T08:00:00,Carol
|
|
15
|
+
case-0003,Register Request,2024-03-28T10:44:00,Carol
|
|
16
|
+
case-0003,Check Ticket,2024-03-28T14:20:00,Alice
|
|
17
|
+
case-0003,Pay Compensation,2024-03-28T17:36:00,Dave
|
|
18
|
+
case-0003,Examine Thoroughly,2024-03-28T20:03:00,Alice
|
|
19
|
+
case-0004,Decide,2024-03-20T08:00:00,Carol
|
|
20
|
+
case-0004,Pay Compensation,2024-03-20T10:37:00,Bob
|
|
21
|
+
case-0004,Reject Request,2024-03-20T13:47:00,Alice
|
|
22
|
+
case-0005,Check Ticket,2024-03-15T08:00:00,Carol
|
|
23
|
+
case-0005,Pay Compensation,2024-03-15T08:51:00,Carol
|
|
24
|
+
case-0005,Examine Thoroughly,2024-03-15T10:31:00,Bob
|
|
25
|
+
case-0005,Register Request,2024-03-15T13:32:00,Carol
|
|
26
|
+
case-0005,Examine Casually,2024-03-15T16:41:00,Alice
|
|
27
|
+
case-0005,Decide,2024-03-15T19:26:00,Bob
|
|
28
|
+
case-0005,Reinitiate Request,2024-03-15T21:52:00,Bob
|
|
29
|
+
case-0005,Reject Request,2024-03-15T22:43:00,Dave
|
|
30
|
+
case-0006,Check Ticket,2024-03-27T08:00:00,Alice
|
|
31
|
+
case-0006,Reject Request,2024-03-27T11:36:00,Carol
|
|
32
|
+
case-0006,Examine Thoroughly,2024-03-27T13:28:00,Carol
|
|
33
|
+
case-0006,Register Request,2024-03-27T13:54:00,Bob
|
|
34
|
+
case-0006,Examine Casually,2024-03-27T16:29:00,Carol
|
|
35
|
+
case-0007,Reinitiate Request,2024-03-24T08:00:00,Ellen
|
|
36
|
+
case-0007,Check Ticket,2024-03-24T10:27:00,Carol
|
|
37
|
+
case-0007,Reject Request,2024-03-24T13:48:00,Ellen
|
|
38
|
+
case-0007,Pay Compensation,2024-03-24T15:47:00,Ellen
|
|
39
|
+
case-0007,Examine Casually,2024-03-24T17:39:00,Carol
|
|
40
|
+
case-0007,Decide,2024-03-24T18:45:00,Bob
|
|
41
|
+
case-0007,Register Request,2024-03-24T21:05:00,Dave
|
|
42
|
+
case-0007,Examine Thoroughly,2024-03-24T21:38:00,Alice
|
|
43
|
+
case-0008,Examine Thoroughly,2024-03-26T08:00:00,Dave
|
|
44
|
+
case-0008,Reject Request,2024-03-26T10:42:00,Alice
|
|
45
|
+
case-0008,Examine Casually,2024-03-26T12:30:00,Dave
|
|
46
|
+
case-0009,Decide,2024-03-11T08:00:00,Alice
|
|
47
|
+
case-0009,Reinitiate Request,2024-03-11T09:25:00,Dave
|
|
48
|
+
case-0009,Register Request,2024-03-11T10:15:00,Dave
|
|
49
|
+
case-0009,Reject Request,2024-03-11T10:25:00,Carol
|
|
50
|
+
case-0009,Examine Thoroughly,2024-03-11T12:43:00,Bob
|
|
51
|
+
case-0009,Check Ticket,2024-03-11T15:02:00,Alice
|
|
52
|
+
case-0010,Decide,2024-03-18T08:00:00,Ellen
|
|
53
|
+
case-0010,Pay Compensation,2024-03-18T08:10:00,Ellen
|
|
54
|
+
case-0010,Reject Request,2024-03-18T09:42:00,Dave
|
|
55
|
+
case-0010,Reinitiate Request,2024-03-18T09:56:00,Alice
|
|
56
|
+
case-0010,Examine Casually,2024-03-18T11:38:00,Carol
|
|
57
|
+
case-0010,Register Request,2024-03-18T12:49:00,Alice
|
|
58
|
+
case-0010,Check Ticket,2024-03-18T14:00:00,Ellen
|
|
59
|
+
case-0010,Examine Thoroughly,2024-03-18T14:30:00,Alice
|
|
60
|
+
case-0011,Examine Casually,2024-03-16T08:00:00,Ellen
|
|
61
|
+
case-0011,Pay Compensation,2024-03-16T08:52:00,Carol
|
|
62
|
+
case-0011,Decide,2024-03-16T11:17:00,Ellen
|
|
63
|
+
case-0011,Reinitiate Request,2024-03-16T13:15:00,Bob
|
|
64
|
+
case-0011,Reject Request,2024-03-16T15:43:00,Bob
|
|
65
|
+
case-0011,Examine Thoroughly,2024-03-16T18:55:00,Carol
|
|
66
|
+
case-0012,Reject Request,2024-03-11T08:00:00,Alice
|
|
67
|
+
case-0012,Check Ticket,2024-03-11T10:40:00,Ellen
|
|
68
|
+
case-0012,Decide,2024-03-11T11:48:00,Ellen
|
|
69
|
+
case-0012,Pay Compensation,2024-03-11T12:54:00,Alice
|
|
70
|
+
case-0012,Register Request,2024-03-11T13:22:00,Alice
|
|
71
|
+
case-0012,Reinitiate Request,2024-03-11T14:30:00,Alice
|
|
72
|
+
case-0012,Examine Thoroughly,2024-03-11T14:48:00,Carol
|
|
73
|
+
case-0012,Examine Casually,2024-03-11T15:16:00,Ellen
|
|
74
|
+
case-0013,Reinitiate Request,2024-03-08T08:00:00,Dave
|
|
75
|
+
case-0013,Examine Casually,2024-03-08T11:36:00,Dave
|
|
76
|
+
case-0013,Decide,2024-03-08T12:34:00,Alice
|
|
77
|
+
case-0013,Pay Compensation,2024-03-08T13:08:00,Dave
|
|
78
|
+
case-0013,Check Ticket,2024-03-08T14:48:00,Dave
|
|
79
|
+
case-0014,Register Request,2024-03-24T08:00:00,Carol
|
|
80
|
+
case-0014,Reject Request,2024-03-24T11:34:00,Alice
|
|
81
|
+
case-0014,Pay Compensation,2024-03-24T12:47:00,Bob
|
|
82
|
+
case-0014,Reinitiate Request,2024-03-24T13:45:00,Ellen
|
|
83
|
+
case-0014,Decide,2024-03-24T15:49:00,Bob
|
|
84
|
+
case-0014,Examine Casually,2024-03-24T17:47:00,Bob
|
|
85
|
+
case-0015,Check Ticket,2024-03-21T08:00:00,Ellen
|
|
86
|
+
case-0015,Pay Compensation,2024-03-21T11:44:00,Alice
|
|
87
|
+
case-0015,Register Request,2024-03-21T12:17:00,Bob
|
|
88
|
+
case-0015,Reinitiate Request,2024-03-21T13:09:00,Dave
|
|
89
|
+
case-0015,Reject Request,2024-03-21T15:23:00,Dave
|
|
90
|
+
case-0015,Decide,2024-03-21T16:27:00,Dave
|
|
91
|
+
case-0016,Pay Compensation,2024-03-30T08:00:00,Dave
|
|
92
|
+
case-0016,Register Request,2024-03-30T09:23:00,Dave
|
|
93
|
+
case-0016,Check Ticket,2024-03-30T12:31:00,Ellen
|
|
94
|
+
case-0016,Examine Thoroughly,2024-03-30T15:30:00,Dave
|
|
95
|
+
case-0017,Decide,2024-03-24T08:00:00,Ellen
|
|
96
|
+
case-0017,Examine Casually,2024-03-24T08:25:00,Carol
|
|
97
|
+
case-0017,Register Request,2024-03-24T08:49:00,Alice
|
|
98
|
+
case-0017,Reinitiate Request,2024-03-24T11:28:00,Dave
|
|
99
|
+
case-0018,Examine Thoroughly,2024-03-22T08:00:00,Bob
|
|
100
|
+
case-0018,Register Request,2024-03-22T09:53:00,Alice
|
|
101
|
+
case-0018,Decide,2024-03-22T13:50:00,Ellen
|
|
102
|
+
case-0018,Pay Compensation,2024-03-22T15:03:00,Ellen
|
|
103
|
+
case-0018,Examine Casually,2024-03-22T17:45:00,Alice
|
|
104
|
+
case-0018,Reject Request,2024-03-22T20:33:00,Alice
|
|
105
|
+
case-0018,Reinitiate Request,2024-03-22T22:30:00,Ellen
|
|
106
|
+
case-0019,Reject Request,2024-03-05T08:00:00,Carol
|
|
107
|
+
case-0019,Examine Thoroughly,2024-03-05T10:07:00,Carol
|
|
108
|
+
case-0019,Examine Casually,2024-03-05T13:29:00,Alice
|
|
109
|
+
case-0019,Pay Compensation,2024-03-05T13:41:00,Dave
|
|
110
|
+
case-0019,Reinitiate Request,2024-03-05T16:30:00,Ellen
|
|
111
|
+
case-0019,Check Ticket,2024-03-05T17:05:00,Alice
|
|
112
|
+
case-0019,Decide,2024-03-05T19:32:00,Bob
|
|
113
|
+
case-0020,Examine Thoroughly,2024-03-10T08:00:00,Bob
|
|
114
|
+
case-0020,Reinitiate Request,2024-03-10T10:02:00,Ellen
|
|
115
|
+
case-0020,Register Request,2024-03-10T13:12:00,Carol
|
|
116
|
+
case-0020,Examine Casually,2024-03-10T15:58:00,Ellen
|
|
117
|
+
case-0020,Pay Compensation,2024-03-10T16:10:00,Ellen
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
### AL ###
|
|
2
|
+
#Template for AL projects for Dynamics 365 Business Central
|
|
3
|
+
#launch.json folder
|
|
4
|
+
.vscode/
|
|
5
|
+
#Cache folder
|
|
6
|
+
.alcache/
|
|
7
|
+
#Symbols folder
|
|
8
|
+
.alpackages/
|
|
9
|
+
#Snapshots folder
|
|
10
|
+
.snapshots/
|
|
11
|
+
#Testing Output folder
|
|
12
|
+
.output/
|
|
13
|
+
#Extension App-file
|
|
14
|
+
*.app
|
|
15
|
+
#Rapid Application Development File
|
|
16
|
+
rad.json
|
|
17
|
+
#Translation Base-file
|
|
18
|
+
*.g.xlf
|
|
19
|
+
#License-file
|
|
20
|
+
*.flf
|
|
21
|
+
#Test results file
|
|
22
|
+
TestResults.xml
|
|
23
|
+
|
|
24
|
+
# Python
|
|
25
|
+
__pycache__/
|
|
26
|
+
*.py[cod]
|
|
27
|
+
*.pyo
|
|
28
|
+
.pytest_cache/
|
|
29
|
+
*.egg-info/
|
|
30
|
+
dist/
|
|
31
|
+
build/
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Process Intelligence Research
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""Script to generate sample process event log data as CSV."""
|
|
2
|
+
|
|
3
|
+
import csv
|
|
4
|
+
import random
|
|
5
|
+
from datetime import datetime, timedelta
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
ACTIVITIES = [
|
|
10
|
+
"Register Request",
|
|
11
|
+
"Examine Casually",
|
|
12
|
+
"Examine Thoroughly",
|
|
13
|
+
"Check Ticket",
|
|
14
|
+
"Decide",
|
|
15
|
+
"Reject Request",
|
|
16
|
+
"Pay Compensation",
|
|
17
|
+
"Reinitiate Request",
|
|
18
|
+
]
|
|
19
|
+
|
|
20
|
+
RESOURCES = ["Alice", "Bob", "Carol", "Dave", "Ellen"]
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def generate_log(num_cases: int = 20, seed: int = 42) -> list[dict]:
|
|
24
|
+
random.seed(seed)
|
|
25
|
+
rows = []
|
|
26
|
+
base = datetime(2024, 3, 1, 8, 0, 0)
|
|
27
|
+
|
|
28
|
+
for i in range(1, num_cases + 1):
|
|
29
|
+
case_id = f"case-{i:04d}"
|
|
30
|
+
num_events = random.randint(3, len(ACTIVITIES))
|
|
31
|
+
activities = random.sample(ACTIVITIES, num_events)
|
|
32
|
+
timestamp = base + timedelta(days=random.randint(0, 30))
|
|
33
|
+
|
|
34
|
+
for activity in activities:
|
|
35
|
+
rows.append(
|
|
36
|
+
{
|
|
37
|
+
"case_id": case_id,
|
|
38
|
+
"activity": activity,
|
|
39
|
+
"timestamp": timestamp.strftime("%Y-%m-%dT%H:%M:%S"),
|
|
40
|
+
"resource": random.choice(RESOURCES),
|
|
41
|
+
}
|
|
42
|
+
)
|
|
43
|
+
timestamp += timedelta(minutes=random.randint(10, 240))
|
|
44
|
+
|
|
45
|
+
return rows
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def save_csv(rows: list[dict], output_path: Path) -> None:
|
|
49
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
50
|
+
with open(output_path, "w", newline="") as f:
|
|
51
|
+
writer = csv.DictWriter(f, fieldnames=["case_id", "activity", "timestamp", "resource"])
|
|
52
|
+
writer.writeheader()
|
|
53
|
+
writer.writerows(rows)
|
|
54
|
+
print(f"Saved {len(rows)} events to {output_path}")
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
if __name__ == "__main__":
|
|
58
|
+
rows = generate_log(num_cases=20)
|
|
59
|
+
save_csv(rows, Path(__file__).parent / "sample_log.csv")
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
case_id,activity,timestamp,resource
|
|
2
|
+
case-0001,Examine Casually,2024-03-22T08:00:00,Ellen
|
|
3
|
+
case-0001,Register Request,2024-03-22T08:32:00,Ellen
|
|
4
|
+
case-0001,Reject Request,2024-03-22T10:30:00,Alice
|
|
5
|
+
case-0001,Examine Thoroughly,2024-03-22T10:47:00,Alice
|
|
6
|
+
case-0001,Reinitiate Request,2024-03-22T11:52:00,Bob
|
|
7
|
+
case-0001,Pay Compensation,2024-03-22T14:11:00,Ellen
|
|
8
|
+
case-0001,Decide,2024-03-22T14:27:00,Ellen
|
|
9
|
+
case-0001,Check Ticket,2024-03-22T15:27:00,Ellen
|
|
10
|
+
case-0002,Reinitiate Request,2024-03-25T08:00:00,Bob
|
|
11
|
+
case-0002,Decide,2024-03-25T11:08:00,Dave
|
|
12
|
+
case-0002,Examine Thoroughly,2024-03-25T12:45:00,Carol
|
|
13
|
+
case-0002,Register Request,2024-03-25T13:34:00,Bob
|
|
14
|
+
case-0003,Examine Casually,2024-03-28T08:00:00,Carol
|
|
15
|
+
case-0003,Register Request,2024-03-28T10:44:00,Carol
|
|
16
|
+
case-0003,Check Ticket,2024-03-28T14:20:00,Alice
|
|
17
|
+
case-0003,Pay Compensation,2024-03-28T17:36:00,Dave
|
|
18
|
+
case-0003,Examine Thoroughly,2024-03-28T20:03:00,Alice
|
|
19
|
+
case-0004,Decide,2024-03-20T08:00:00,Carol
|
|
20
|
+
case-0004,Pay Compensation,2024-03-20T10:37:00,Bob
|
|
21
|
+
case-0004,Reject Request,2024-03-20T13:47:00,Alice
|
|
22
|
+
case-0005,Check Ticket,2024-03-15T08:00:00,Carol
|
|
23
|
+
case-0005,Pay Compensation,2024-03-15T08:51:00,Carol
|
|
24
|
+
case-0005,Examine Thoroughly,2024-03-15T10:31:00,Bob
|
|
25
|
+
case-0005,Register Request,2024-03-15T13:32:00,Carol
|
|
26
|
+
case-0005,Examine Casually,2024-03-15T16:41:00,Alice
|
|
27
|
+
case-0005,Decide,2024-03-15T19:26:00,Bob
|
|
28
|
+
case-0005,Reinitiate Request,2024-03-15T21:52:00,Bob
|
|
29
|
+
case-0005,Reject Request,2024-03-15T22:43:00,Dave
|
|
30
|
+
case-0006,Check Ticket,2024-03-27T08:00:00,Alice
|
|
31
|
+
case-0006,Reject Request,2024-03-27T11:36:00,Carol
|
|
32
|
+
case-0006,Examine Thoroughly,2024-03-27T13:28:00,Carol
|
|
33
|
+
case-0006,Register Request,2024-03-27T13:54:00,Bob
|
|
34
|
+
case-0006,Examine Casually,2024-03-27T16:29:00,Carol
|
|
35
|
+
case-0007,Reinitiate Request,2024-03-24T08:00:00,Ellen
|
|
36
|
+
case-0007,Check Ticket,2024-03-24T10:27:00,Carol
|
|
37
|
+
case-0007,Reject Request,2024-03-24T13:48:00,Ellen
|
|
38
|
+
case-0007,Pay Compensation,2024-03-24T15:47:00,Ellen
|
|
39
|
+
case-0007,Examine Casually,2024-03-24T17:39:00,Carol
|
|
40
|
+
case-0007,Decide,2024-03-24T18:45:00,Bob
|
|
41
|
+
case-0007,Register Request,2024-03-24T21:05:00,Dave
|
|
42
|
+
case-0007,Examine Thoroughly,2024-03-24T21:38:00,Alice
|
|
43
|
+
case-0008,Examine Thoroughly,2024-03-26T08:00:00,Dave
|
|
44
|
+
case-0008,Reject Request,2024-03-26T10:42:00,Alice
|
|
45
|
+
case-0008,Examine Casually,2024-03-26T12:30:00,Dave
|
|
46
|
+
case-0009,Decide,2024-03-11T08:00:00,Alice
|
|
47
|
+
case-0009,Reinitiate Request,2024-03-11T09:25:00,Dave
|
|
48
|
+
case-0009,Register Request,2024-03-11T10:15:00,Dave
|
|
49
|
+
case-0009,Reject Request,2024-03-11T10:25:00,Carol
|
|
50
|
+
case-0009,Examine Thoroughly,2024-03-11T12:43:00,Bob
|
|
51
|
+
case-0009,Check Ticket,2024-03-11T15:02:00,Alice
|
|
52
|
+
case-0010,Decide,2024-03-18T08:00:00,Ellen
|
|
53
|
+
case-0010,Pay Compensation,2024-03-18T08:10:00,Ellen
|
|
54
|
+
case-0010,Reject Request,2024-03-18T09:42:00,Dave
|
|
55
|
+
case-0010,Reinitiate Request,2024-03-18T09:56:00,Alice
|
|
56
|
+
case-0010,Examine Casually,2024-03-18T11:38:00,Carol
|
|
57
|
+
case-0010,Register Request,2024-03-18T12:49:00,Alice
|
|
58
|
+
case-0010,Check Ticket,2024-03-18T14:00:00,Ellen
|
|
59
|
+
case-0010,Examine Thoroughly,2024-03-18T14:30:00,Alice
|
|
60
|
+
case-0011,Examine Casually,2024-03-16T08:00:00,Ellen
|
|
61
|
+
case-0011,Pay Compensation,2024-03-16T08:52:00,Carol
|
|
62
|
+
case-0011,Decide,2024-03-16T11:17:00,Ellen
|
|
63
|
+
case-0011,Reinitiate Request,2024-03-16T13:15:00,Bob
|
|
64
|
+
case-0011,Reject Request,2024-03-16T15:43:00,Bob
|
|
65
|
+
case-0011,Examine Thoroughly,2024-03-16T18:55:00,Carol
|
|
66
|
+
case-0012,Reject Request,2024-03-11T08:00:00,Alice
|
|
67
|
+
case-0012,Check Ticket,2024-03-11T10:40:00,Ellen
|
|
68
|
+
case-0012,Decide,2024-03-11T11:48:00,Ellen
|
|
69
|
+
case-0012,Pay Compensation,2024-03-11T12:54:00,Alice
|
|
70
|
+
case-0012,Register Request,2024-03-11T13:22:00,Alice
|
|
71
|
+
case-0012,Reinitiate Request,2024-03-11T14:30:00,Alice
|
|
72
|
+
case-0012,Examine Thoroughly,2024-03-11T14:48:00,Carol
|
|
73
|
+
case-0012,Examine Casually,2024-03-11T15:16:00,Ellen
|
|
74
|
+
case-0013,Reinitiate Request,2024-03-08T08:00:00,Dave
|
|
75
|
+
case-0013,Examine Casually,2024-03-08T11:36:00,Dave
|
|
76
|
+
case-0013,Decide,2024-03-08T12:34:00,Alice
|
|
77
|
+
case-0013,Pay Compensation,2024-03-08T13:08:00,Dave
|
|
78
|
+
case-0013,Check Ticket,2024-03-08T14:48:00,Dave
|
|
79
|
+
case-0014,Register Request,2024-03-24T08:00:00,Carol
|
|
80
|
+
case-0014,Reject Request,2024-03-24T11:34:00,Alice
|
|
81
|
+
case-0014,Pay Compensation,2024-03-24T12:47:00,Bob
|
|
82
|
+
case-0014,Reinitiate Request,2024-03-24T13:45:00,Ellen
|
|
83
|
+
case-0014,Decide,2024-03-24T15:49:00,Bob
|
|
84
|
+
case-0014,Examine Casually,2024-03-24T17:47:00,Bob
|
|
85
|
+
case-0015,Check Ticket,2024-03-21T08:00:00,Ellen
|
|
86
|
+
case-0015,Pay Compensation,2024-03-21T11:44:00,Alice
|
|
87
|
+
case-0015,Register Request,2024-03-21T12:17:00,Bob
|
|
88
|
+
case-0015,Reinitiate Request,2024-03-21T13:09:00,Dave
|
|
89
|
+
case-0015,Reject Request,2024-03-21T15:23:00,Dave
|
|
90
|
+
case-0015,Decide,2024-03-21T16:27:00,Dave
|
|
91
|
+
case-0016,Pay Compensation,2024-03-30T08:00:00,Dave
|
|
92
|
+
case-0016,Register Request,2024-03-30T09:23:00,Dave
|
|
93
|
+
case-0016,Check Ticket,2024-03-30T12:31:00,Ellen
|
|
94
|
+
case-0016,Examine Thoroughly,2024-03-30T15:30:00,Dave
|
|
95
|
+
case-0017,Decide,2024-03-24T08:00:00,Ellen
|
|
96
|
+
case-0017,Examine Casually,2024-03-24T08:25:00,Carol
|
|
97
|
+
case-0017,Register Request,2024-03-24T08:49:00,Alice
|
|
98
|
+
case-0017,Reinitiate Request,2024-03-24T11:28:00,Dave
|
|
99
|
+
case-0018,Examine Thoroughly,2024-03-22T08:00:00,Bob
|
|
100
|
+
case-0018,Register Request,2024-03-22T09:53:00,Alice
|
|
101
|
+
case-0018,Decide,2024-03-22T13:50:00,Ellen
|
|
102
|
+
case-0018,Pay Compensation,2024-03-22T15:03:00,Ellen
|
|
103
|
+
case-0018,Examine Casually,2024-03-22T17:45:00,Alice
|
|
104
|
+
case-0018,Reject Request,2024-03-22T20:33:00,Alice
|
|
105
|
+
case-0018,Reinitiate Request,2024-03-22T22:30:00,Ellen
|
|
106
|
+
case-0019,Reject Request,2024-03-05T08:00:00,Carol
|
|
107
|
+
case-0019,Examine Thoroughly,2024-03-05T10:07:00,Carol
|
|
108
|
+
case-0019,Examine Casually,2024-03-05T13:29:00,Alice
|
|
109
|
+
case-0019,Pay Compensation,2024-03-05T13:41:00,Dave
|
|
110
|
+
case-0019,Reinitiate Request,2024-03-05T16:30:00,Ellen
|
|
111
|
+
case-0019,Check Ticket,2024-03-05T17:05:00,Alice
|
|
112
|
+
case-0019,Decide,2024-03-05T19:32:00,Bob
|
|
113
|
+
case-0020,Examine Thoroughly,2024-03-10T08:00:00,Bob
|
|
114
|
+
case-0020,Reinitiate Request,2024-03-10T10:02:00,Ellen
|
|
115
|
+
case-0020,Register Request,2024-03-10T13:12:00,Carol
|
|
116
|
+
case-0020,Examine Casually,2024-03-10T15:58:00,Ellen
|
|
117
|
+
case-0020,Pay Compensation,2024-03-10T16:10:00,Ellen
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["hatchling"]
|
|
3
|
+
build-backend = "hatchling.build"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "process-intelligence"
|
|
7
|
+
version = "9.0.0"
|
|
8
|
+
description = "A sample process intelligence Python project."
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.10"
|
|
11
|
+
keywords = [
|
|
12
|
+
"process intelligence",
|
|
13
|
+
]
|
|
14
|
+
authors = [
|
|
15
|
+
{ name = "Lukas Schulze Balhorn", email = "l.schulzebalhorn@tudelft.nl" },
|
|
16
|
+
]
|
|
17
|
+
classifiers = [
|
|
18
|
+
"Programming Language :: Python :: 3",
|
|
19
|
+
"License :: OSI Approved :: MIT License",
|
|
20
|
+
"Operating System :: OS Independent",
|
|
21
|
+
]
|
|
22
|
+
dependencies = []
|
|
23
|
+
|
|
24
|
+
[project.optional-dependencies]
|
|
25
|
+
dev = [
|
|
26
|
+
"pre-commit>=3.7.1",
|
|
27
|
+
"ruff>=0.3.0",
|
|
28
|
+
"pytest>=7.0",
|
|
29
|
+
]
|
|
30
|
+
|
|
31
|
+
[project.urls]
|
|
32
|
+
Homepage = "https://github.com/LukasSchBal/test"
|
|
33
|
+
Source = "https://github.com/LukasSchBal/test-dev"
|
|
34
|
+
|
|
35
|
+
[tool.ruff]
|
|
36
|
+
line-length = 100
|
|
37
|
+
|
|
38
|
+
[tool.hatch.build.targets.wheel]
|
|
39
|
+
packages = ["src", "utils"]
|
|
40
|
+
|
|
41
|
+
[tool.ruff.lint]
|
|
42
|
+
extend-select = ["Q", "RUF100", "UP", "I", "E", "F"]
|
|
43
|
+
extend-ignore = [
|
|
44
|
+
"E501", # line too long
|
|
45
|
+
]
|
|
46
|
+
isort = { combine-as-imports = true }
|
|
47
|
+
|
|
48
|
+
[tool.ruff.lint.pydocstyle]
|
|
49
|
+
convention = "numpy"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# tests package
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["hatchling"]
|
|
3
|
+
build-backend = "hatchling.build"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "process-intelligence"
|
|
7
|
+
version = "9.0.0"
|
|
8
|
+
description = "A sample process intelligence Python project."
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.10"
|
|
11
|
+
keywords = [
|
|
12
|
+
"process intelligence",
|
|
13
|
+
]
|
|
14
|
+
authors = [
|
|
15
|
+
{ name = "Lukas Schulze Balhorn", email = "l.schulzebalhorn@tudelft.nl" },
|
|
16
|
+
]
|
|
17
|
+
classifiers = [
|
|
18
|
+
"Programming Language :: Python :: 3",
|
|
19
|
+
"License :: OSI Approved :: MIT License",
|
|
20
|
+
"Operating System :: OS Independent",
|
|
21
|
+
]
|
|
22
|
+
dependencies = []
|
|
23
|
+
|
|
24
|
+
[project.optional-dependencies]
|
|
25
|
+
dev = [
|
|
26
|
+
"pre-commit>=3.7.1",
|
|
27
|
+
"ruff>=0.3.0",
|
|
28
|
+
"pytest>=7.0",
|
|
29
|
+
]
|
|
30
|
+
|
|
31
|
+
[project.urls]
|
|
32
|
+
Homepage = "https://github.com/LukasSchBal/test"
|
|
33
|
+
Source = "https://github.com/LukasSchBal/test-dev"
|
|
34
|
+
|
|
35
|
+
[tool.ruff]
|
|
36
|
+
line-length = 100
|
|
37
|
+
|
|
38
|
+
[tool.hatch.build.targets.wheel]
|
|
39
|
+
packages = ["src", "utils"]
|
|
40
|
+
|
|
41
|
+
[tool.ruff.lint]
|
|
42
|
+
extend-select = ["Q", "RUF100", "UP", "I", "E", "F"]
|
|
43
|
+
extend-ignore = [
|
|
44
|
+
"E501", # line too long
|
|
45
|
+
]
|
|
46
|
+
isort = { combine-as-imports = true }
|
|
47
|
+
|
|
48
|
+
[tool.ruff.lint.pydocstyle]
|
|
49
|
+
convention = "numpy"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
pytest>=7.0
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# src package
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
"""Data models for the application."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from typing import List, Optional
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class Event:
|
|
10
|
+
"""Represents a process event."""
|
|
11
|
+
|
|
12
|
+
event_id: str
|
|
13
|
+
name: str
|
|
14
|
+
timestamp: datetime
|
|
15
|
+
resource: Optional[str] = None
|
|
16
|
+
attributes: dict = field(default_factory=dict)
|
|
17
|
+
|
|
18
|
+
def to_dict(self) -> dict:
|
|
19
|
+
return {
|
|
20
|
+
"event_id": self.event_id,
|
|
21
|
+
"name": self.name,
|
|
22
|
+
"timestamp": self.timestamp.isoformat(),
|
|
23
|
+
"resource": self.resource,
|
|
24
|
+
"attributes": self.attributes,
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class Case:
|
|
30
|
+
"""Represents a process case (trace)."""
|
|
31
|
+
|
|
32
|
+
case_id: str
|
|
33
|
+
events: List[Event] = field(default_factory=list)
|
|
34
|
+
|
|
35
|
+
def add_event(self, event: Event) -> None:
|
|
36
|
+
self.events.append(event)
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def duration(self) -> Optional[float]:
|
|
40
|
+
if len(self.events) < 2:
|
|
41
|
+
return None
|
|
42
|
+
start = min(e.timestamp for e in self.events)
|
|
43
|
+
end = max(e.timestamp for e in self.events)
|
|
44
|
+
return (end - start).total_seconds()
|
|
45
|
+
|
|
46
|
+
def __len__(self) -> int:
|
|
47
|
+
return len(self.events)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class ProcessLog:
|
|
52
|
+
"""Collection of cases forming a process event log."""
|
|
53
|
+
|
|
54
|
+
log_id: str
|
|
55
|
+
cases: List[Case] = field(default_factory=list)
|
|
56
|
+
|
|
57
|
+
def add_case(self, case: Case) -> None:
|
|
58
|
+
self.cases.append(case)
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def num_events(self) -> int:
|
|
62
|
+
return sum(len(c) for c in self.cases)
|
|
63
|
+
|
|
64
|
+
def get_case(self, case_id: str) -> Optional[Case]:
|
|
65
|
+
for case in self.cases:
|
|
66
|
+
if case.case_id == case_id:
|
|
67
|
+
return case
|
|
68
|
+
return None
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# tests package
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"""Tests for utils.helpers."""
|
|
2
|
+
|
|
3
|
+
import pytest
|
|
4
|
+
from utils.helpers import generate_id, format_duration, flatten, compute_checksum, chunk_list
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class TestGenerateId:
|
|
8
|
+
def test_returns_string(self):
|
|
9
|
+
assert isinstance(generate_id(), str)
|
|
10
|
+
|
|
11
|
+
def test_prefix_included(self):
|
|
12
|
+
uid = generate_id("evt")
|
|
13
|
+
assert uid.startswith("evt-")
|
|
14
|
+
|
|
15
|
+
def test_unique_ids(self):
|
|
16
|
+
ids = {generate_id() for _ in range(100)}
|
|
17
|
+
assert len(ids) == 100
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class TestFormatDuration:
|
|
21
|
+
def test_seconds_only(self):
|
|
22
|
+
assert format_duration(45.0) == "45.0s"
|
|
23
|
+
|
|
24
|
+
def test_minutes_and_seconds(self):
|
|
25
|
+
assert format_duration(125.0) == "2m 5s"
|
|
26
|
+
|
|
27
|
+
def test_hours_and_minutes(self):
|
|
28
|
+
assert format_duration(3661.0) == "1h 1m"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class TestFlatten:
|
|
32
|
+
def test_already_flat(self):
|
|
33
|
+
assert flatten([1, 2, 3]) == [1, 2, 3]
|
|
34
|
+
|
|
35
|
+
def test_nested(self):
|
|
36
|
+
assert flatten([[1, 2], [3, [4, 5]]]) == [1, 2, 3, 4, 5]
|
|
37
|
+
|
|
38
|
+
def test_empty(self):
|
|
39
|
+
assert flatten([]) == []
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class TestComputeChecksum:
|
|
43
|
+
def test_deterministic(self):
|
|
44
|
+
assert compute_checksum("hello") == compute_checksum("hello")
|
|
45
|
+
|
|
46
|
+
def test_different_inputs(self):
|
|
47
|
+
assert compute_checksum("foo") != compute_checksum("bar")
|
|
48
|
+
|
|
49
|
+
def test_length(self):
|
|
50
|
+
assert len(compute_checksum("test")) == 64
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class TestChunkList:
|
|
54
|
+
def test_even_split(self):
|
|
55
|
+
assert chunk_list([1, 2, 3, 4], 2) == [[1, 2], [3, 4]]
|
|
56
|
+
|
|
57
|
+
def test_uneven_split(self):
|
|
58
|
+
assert chunk_list([1, 2, 3], 2) == [[1, 2], [3]]
|
|
59
|
+
|
|
60
|
+
def test_invalid_size(self):
|
|
61
|
+
with pytest.raises(ValueError):
|
|
62
|
+
chunk_list([1, 2], 0)
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"""Tests for src.models."""
|
|
2
|
+
|
|
3
|
+
import pytest
|
|
4
|
+
from datetime import datetime, timedelta
|
|
5
|
+
from src.models import Event, Case, ProcessLog
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def make_event(name: str, offset_hours: int = 0) -> Event:
|
|
9
|
+
return Event(
|
|
10
|
+
event_id=f"evt-{offset_hours}",
|
|
11
|
+
name=name,
|
|
12
|
+
timestamp=datetime(2024, 1, 1, offset_hours),
|
|
13
|
+
resource="Alice",
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class TestEvent:
|
|
18
|
+
def test_to_dict_keys(self):
|
|
19
|
+
event = make_event("Start", 0)
|
|
20
|
+
d = event.to_dict()
|
|
21
|
+
assert set(d.keys()) == {"event_id", "name", "timestamp", "resource", "attributes"}
|
|
22
|
+
|
|
23
|
+
def test_to_dict_timestamp_is_string(self):
|
|
24
|
+
event = make_event("Start", 1)
|
|
25
|
+
assert isinstance(event.to_dict()["timestamp"], str)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class TestCase:
|
|
29
|
+
def test_add_event_increases_length(self):
|
|
30
|
+
case = Case(case_id="case-001")
|
|
31
|
+
assert len(case) == 0
|
|
32
|
+
case.add_event(make_event("A", 0))
|
|
33
|
+
assert len(case) == 1
|
|
34
|
+
|
|
35
|
+
def test_duration_none_for_single_event(self):
|
|
36
|
+
case = Case(case_id="case-002")
|
|
37
|
+
case.add_event(make_event("A", 0))
|
|
38
|
+
assert case.duration is None
|
|
39
|
+
|
|
40
|
+
def test_duration_calculated_correctly(self):
|
|
41
|
+
case = Case(case_id="case-003")
|
|
42
|
+
case.add_event(make_event("A", 0))
|
|
43
|
+
case.add_event(make_event("B", 2))
|
|
44
|
+
assert case.duration == pytest.approx(2 * 3600)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class TestProcessLog:
|
|
48
|
+
def test_num_events(self):
|
|
49
|
+
log = ProcessLog(log_id="log-001")
|
|
50
|
+
for i in range(3):
|
|
51
|
+
case = Case(case_id=f"case-{i}")
|
|
52
|
+
case.add_event(make_event("X", i))
|
|
53
|
+
case.add_event(make_event("Y", i + 1))
|
|
54
|
+
log.add_case(case)
|
|
55
|
+
assert log.num_events == 6
|
|
56
|
+
|
|
57
|
+
def test_get_case_existing(self):
|
|
58
|
+
log = ProcessLog(log_id="log-002")
|
|
59
|
+
case = Case(case_id="target")
|
|
60
|
+
log.add_case(case)
|
|
61
|
+
assert log.get_case("target") is case
|
|
62
|
+
|
|
63
|
+
def test_get_case_missing(self):
|
|
64
|
+
log = ProcessLog(log_id="log-003")
|
|
65
|
+
assert log.get_case("nope") is None
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# utils package
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"""Miscellaneous helper utilities."""
|
|
2
|
+
|
|
3
|
+
import uuid
|
|
4
|
+
import hashlib
|
|
5
|
+
from typing import Any, List
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def generate_id(prefix: str = "") -> str:
|
|
9
|
+
"""Return a short unique identifier with an optional prefix."""
|
|
10
|
+
uid = uuid.uuid4().hex[:8]
|
|
11
|
+
return f"{prefix}-{uid}" if prefix else uid
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def format_duration(seconds: float) -> str:
|
|
15
|
+
"""Convert a duration in seconds to a human-readable string."""
|
|
16
|
+
if seconds < 60:
|
|
17
|
+
return f"{seconds:.1f}s"
|
|
18
|
+
minutes, secs = divmod(seconds, 60)
|
|
19
|
+
if minutes < 60:
|
|
20
|
+
return f"{int(minutes)}m {int(secs)}s"
|
|
21
|
+
hours, mins = divmod(minutes, 60)
|
|
22
|
+
return f"{int(hours)}h {int(mins)}m"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def flatten(nested: List[Any]) -> List[Any]:
|
|
26
|
+
"""Recursively flatten a nested list."""
|
|
27
|
+
result: List[Any] = []
|
|
28
|
+
for item in nested:
|
|
29
|
+
if isinstance(item, list):
|
|
30
|
+
result.extend(flatten(item))
|
|
31
|
+
else:
|
|
32
|
+
result.append(item)
|
|
33
|
+
return result
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def compute_checksum(data: str) -> str:
|
|
37
|
+
"""Return the SHA-256 hex digest of the given string."""
|
|
38
|
+
return hashlib.sha256(data.encode()).hexdigest()
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def chunk_list(lst: List[Any], size: int) -> List[List[Any]]:
|
|
42
|
+
"""Split *lst* into consecutive chunks of *size* elements."""
|
|
43
|
+
if size <= 0:
|
|
44
|
+
raise ValueError("Chunk size must be a positive integer.")
|
|
45
|
+
return [lst[i : i + size] for i in range(0, len(lst), size)]
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"""Lightweight logging wrapper."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def get_logger(name: str, level: str = "INFO") -> logging.Logger:
|
|
8
|
+
"""Return a configured :class:`logging.Logger` instance.
|
|
9
|
+
|
|
10
|
+
Parameters
|
|
11
|
+
----------
|
|
12
|
+
name:
|
|
13
|
+
Logger name, typically ``__name__``.
|
|
14
|
+
level:
|
|
15
|
+
Logging level as a string (e.g. ``"DEBUG"``, ``"INFO"``).
|
|
16
|
+
"""
|
|
17
|
+
logger = logging.getLogger(name)
|
|
18
|
+
if not logger.handlers:
|
|
19
|
+
handler = logging.StreamHandler(sys.stdout)
|
|
20
|
+
formatter = logging.Formatter(
|
|
21
|
+
fmt="%(asctime)s [%(levelname)s] %(name)s: %(message)s",
|
|
22
|
+
datefmt="%Y-%m-%dT%H:%M:%S",
|
|
23
|
+
)
|
|
24
|
+
handler.setFormatter(formatter)
|
|
25
|
+
logger.addHandler(handler)
|
|
26
|
+
|
|
27
|
+
numeric_level = getattr(logging, level.upper(), logging.INFO)
|
|
28
|
+
logger.setLevel(numeric_level)
|
|
29
|
+
return logger
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: process-intelligence
|
|
3
|
-
Version: 6.0.0
|
|
4
|
-
Summary: A sample process intelligence Python project.
|
|
5
|
-
Requires-Python: >=3.10
|
|
6
|
-
License-File: LICENSE
|
|
7
|
-
Provides-Extra: dev
|
|
8
|
-
Requires-Dist: pytest>=7.0; extra == "dev"
|
|
9
|
-
Dynamic: license-file
|
|
10
|
-
Dynamic: provides-extra
|
|
11
|
-
Dynamic: requires-python
|
|
12
|
-
Dynamic: summary
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: process-intelligence
|
|
3
|
-
Version: 6.0.0
|
|
4
|
-
Summary: A sample process intelligence Python project.
|
|
5
|
-
Requires-Python: >=3.10
|
|
6
|
-
License-File: LICENSE
|
|
7
|
-
Provides-Extra: dev
|
|
8
|
-
Requires-Dist: pytest>=7.0; extra == "dev"
|
|
9
|
-
Dynamic: license-file
|
|
10
|
-
Dynamic: provides-extra
|
|
11
|
-
Dynamic: requires-python
|
|
12
|
-
Dynamic: summary
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
LICENSE
|
|
2
|
-
README.md
|
|
3
|
-
setup.py
|
|
4
|
-
process_intelligence.egg-info/PKG-INFO
|
|
5
|
-
process_intelligence.egg-info/SOURCES.txt
|
|
6
|
-
process_intelligence.egg-info/dependency_links.txt
|
|
7
|
-
process_intelligence.egg-info/requires.txt
|
|
8
|
-
process_intelligence.egg-info/top_level.txt
|
|
9
|
-
src/__init__.py
|
|
10
|
-
src/config.py
|
|
11
|
-
src/models.py
|
|
12
|
-
src/new_src_file.py
|
|
13
|
-
tests/test_helpers.py
|
|
14
|
-
tests/test_models.py
|
|
15
|
-
utils/__init__.py
|
|
16
|
-
utils/helpers.py
|
|
17
|
-
utils/logger.py
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|