jupyter-analysis-tools 1.5.0__tar.gz → 1.6.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/.copier-answers.yml +1 -1
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/.pre-commit-config.yaml +4 -4
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/CHANGELOG.md +18 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/PKG-INFO +21 -3
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/README.md +2 -2
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/conf.py +1 -1
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/pyproject.toml +1 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/__init__.py +1 -1
- jupyter_analysis_tools-1.6.0/src/jupyter_analysis_tools/datastore.py +166 -0
- jupyter_analysis_tools-1.6.0/src/jupyter_analysis_tools/ssfz2json.py +57 -0
- jupyter_analysis_tools-1.6.0/src/jupyter_analysis_tools/ssfz_compare.py +54 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools.egg-info/PKG-INFO +21 -3
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools.egg-info/SOURCES.txt +2 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools.egg-info/entry_points.txt +1 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/tests/requirements.txt +1 -0
- jupyter_analysis_tools-1.5.0/src/jupyter_analysis_tools/ssfz2json.py +0 -69
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/.editorconfig +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/AUTHORS.rst +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/CONTRIBUTING.rst +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/LICENSE +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/MANIFEST.in +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/ci/requirements.txt +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/_templates/class.rst +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/_templates/module.rst +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/authors.rst +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/changelog.rst +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/contributing.rst +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/index.rst +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/installation.rst +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/readme.rst +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/reference/index.rst +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/requirements.txt +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/spelling_wordlist.txt +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/docs/usage.rst +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/setup.cfg +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/analysis.py +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/binning.py +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/datalocations.py +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/distrib.py +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/git.py +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/plotting.py +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/readdata.py +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/utils.py +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/widgets.py +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools.egg-info/dependency_links.txt +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools.egg-info/requires.txt +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools.egg-info/top_level.txt +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/templates/CHANGELOG.md.j2 +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/testdata/2015-03-20-Silica.ssf.json +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/testdata/2015-03-20-Silica.ssfz +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/testdata/S2842 water.json +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/testdata/S2842 water.pdh +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/testdata/S2843[9].pdh +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/tests/readdata.py +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/tests/utils.py +0 -0
- {jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/tox.ini +0 -0
|
@@ -5,20 +5,20 @@
|
|
|
5
5
|
exclude: '^(\.tox|ci/templates|\.bumpversion\.cfg|testdata|docs/\w+\.xml|\w+\.drawio)(/|$)'
|
|
6
6
|
repos:
|
|
7
7
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
8
|
-
rev:
|
|
8
|
+
rev: v6.0.0
|
|
9
9
|
hooks:
|
|
10
10
|
- id: trailing-whitespace
|
|
11
11
|
- id: end-of-file-fixer
|
|
12
12
|
- id: debug-statements
|
|
13
13
|
- repo: https://github.com/pycqa/isort
|
|
14
|
-
rev:
|
|
14
|
+
rev: 6.0.1
|
|
15
15
|
hooks:
|
|
16
16
|
- id: isort
|
|
17
17
|
- repo: https://github.com/psf/black
|
|
18
|
-
rev:
|
|
18
|
+
rev: 25.9.0
|
|
19
19
|
hooks:
|
|
20
20
|
- id: black
|
|
21
21
|
- repo: https://github.com/pycqa/flake8
|
|
22
|
-
rev:
|
|
22
|
+
rev: 7.3.0
|
|
23
23
|
hooks:
|
|
24
24
|
- id: flake8
|
|
@@ -1,5 +1,23 @@
|
|
|
1
1
|
# CHANGELOG
|
|
2
2
|
|
|
3
|
+
## v1.6.0 (2025-09-19)
|
|
4
|
+
|
|
5
|
+
### Bug fixes
|
|
6
|
+
|
|
7
|
+
* **DataStore**: f-string syntax ([`9166382`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/916638264be58e75fdfba15d9c6a6584ace92199))
|
|
8
|
+
|
|
9
|
+
* **Tests**: pybis module required for collecting in new datastore module ([`ea6a21d`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/ea6a21df3656dcc5f926aa7ff67a7136806ded3b))
|
|
10
|
+
|
|
11
|
+
### Features
|
|
12
|
+
|
|
13
|
+
* **DataStore**: new module for managing objects in OpenBIS ([`cdf0a27`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/cdf0a27c0ae1412acd5329532ec8ec1fa7e6be94))
|
|
14
|
+
|
|
15
|
+
## v1.5.1 (2025-08-04)
|
|
16
|
+
|
|
17
|
+
### Bug fixes
|
|
18
|
+
|
|
19
|
+
* **readSSFZ**: split in two: ssfz2json for converting, ssfz_compare for diff-like compare ([`e8b24fe`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/e8b24fe796430bb4b09fa23dcd204e6162051f79))
|
|
20
|
+
|
|
3
21
|
## v1.5.0 (2025-08-04)
|
|
4
22
|
|
|
5
23
|
### Bug fixes
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: jupyter-analysis-tools
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.6.0
|
|
4
4
|
Summary: Yet another Python library with helpers and utilities for data analysis and processing.
|
|
5
5
|
Author-email: Ingo Breßler <ingo.bressler@bam.de>, "Brian R. Pauw" <brian.pauw@bam.de>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -35,10 +35,10 @@ Requires-Dist: matplotlib
|
|
|
35
35
|
Requires-Dist: ipywidgets
|
|
36
36
|
Dynamic: license-file
|
|
37
37
|
|
|
38
|
-
# Jupyter Analysis Tools (v1.
|
|
38
|
+
# Jupyter Analysis Tools (v1.6.0)
|
|
39
39
|
|
|
40
40
|
[](https://pypi.org/project/jupyter-analysis-tools)
|
|
41
|
-
[](https://github.com/BAMresearch/jupyter-analysis-tools/compare/v1.6.0...main)
|
|
42
42
|
[](https://en.wikipedia.org/wiki/MIT_license)
|
|
43
43
|
[](https://pypi.org/project/jupyter-analysis-tools)
|
|
44
44
|
[](https://pypi.org/project/jupyter-analysis-tools#files)
|
|
@@ -97,6 +97,24 @@ are installed:
|
|
|
97
97
|
|
|
98
98
|
# CHANGELOG
|
|
99
99
|
|
|
100
|
+
## v1.6.0 (2025-09-19)
|
|
101
|
+
|
|
102
|
+
### Bug fixes
|
|
103
|
+
|
|
104
|
+
* **DataStore**: f-string syntax ([`9166382`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/916638264be58e75fdfba15d9c6a6584ace92199))
|
|
105
|
+
|
|
106
|
+
* **Tests**: pybis module required for collecting in new datastore module ([`ea6a21d`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/ea6a21df3656dcc5f926aa7ff67a7136806ded3b))
|
|
107
|
+
|
|
108
|
+
### Features
|
|
109
|
+
|
|
110
|
+
* **DataStore**: new module for managing objects in OpenBIS ([`cdf0a27`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/cdf0a27c0ae1412acd5329532ec8ec1fa7e6be94))
|
|
111
|
+
|
|
112
|
+
## v1.5.1 (2025-08-04)
|
|
113
|
+
|
|
114
|
+
### Bug fixes
|
|
115
|
+
|
|
116
|
+
* **readSSFZ**: split in two: ssfz2json for converting, ssfz_compare for diff-like compare ([`e8b24fe`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/e8b24fe796430bb4b09fa23dcd204e6162051f79))
|
|
117
|
+
|
|
100
118
|
## v1.5.0 (2025-08-04)
|
|
101
119
|
|
|
102
120
|
### Bug fixes
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
# Jupyter Analysis Tools (v1.
|
|
1
|
+
# Jupyter Analysis Tools (v1.6.0)
|
|
2
2
|
|
|
3
3
|
[](https://pypi.org/project/jupyter-analysis-tools)
|
|
4
|
-
[](https://github.com/BAMresearch/jupyter-analysis-tools/compare/v1.6.0...main)
|
|
5
5
|
[](https://en.wikipedia.org/wiki/MIT_license)
|
|
6
6
|
[](https://pypi.org/project/jupyter-analysis-tools)
|
|
7
7
|
[](https://pypi.org/project/jupyter-analysis-tools#files)
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# datastore.py
|
|
3
|
+
|
|
4
|
+
import filecmp
|
|
5
|
+
import getpass
|
|
6
|
+
import tempfile
|
|
7
|
+
import warnings
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from pybis import Openbis
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class DataStore:
|
|
14
|
+
url = None
|
|
15
|
+
_availObj = None
|
|
16
|
+
_userspace = None
|
|
17
|
+
|
|
18
|
+
def __init__(self, url, username=None):
|
|
19
|
+
self.username = username
|
|
20
|
+
if self.username is None:
|
|
21
|
+
self.username = getpass.getuser()
|
|
22
|
+
print(f"Working as user '{self.username}'.")
|
|
23
|
+
# to generate PAT you need to login normally
|
|
24
|
+
self.ds = Openbis(url=self.url, verify_certificates=True)
|
|
25
|
+
# arg. *save_token* saves the openBIS token to ~/.pybis permanently
|
|
26
|
+
self.ds.login(
|
|
27
|
+
self.username,
|
|
28
|
+
getpass.getpass(prompt=f"Password for {self.username}: "),
|
|
29
|
+
save_token=False,
|
|
30
|
+
)
|
|
31
|
+
# create the PAT with the given name, don't store it
|
|
32
|
+
self.ds.get_or_create_personal_access_token("test-session")
|
|
33
|
+
|
|
34
|
+
@property
|
|
35
|
+
def userspace(self):
|
|
36
|
+
uspace = self._userspace
|
|
37
|
+
if uspace is None:
|
|
38
|
+
allspaces = self.ds.get_spaces()
|
|
39
|
+
uspace = allspaces.df[
|
|
40
|
+
allspaces.df.code.str.endswith(self.username.upper())
|
|
41
|
+
].code.values[0]
|
|
42
|
+
self._userspace = uspace
|
|
43
|
+
return uspace
|
|
44
|
+
|
|
45
|
+
@userspace.setter
|
|
46
|
+
def userspace(self, name):
|
|
47
|
+
name = name.upper()
|
|
48
|
+
if name in self.ds.get_spaces().df.code.values:
|
|
49
|
+
self._userspace = name
|
|
50
|
+
|
|
51
|
+
@staticmethod
|
|
52
|
+
def identifier(objects, code):
|
|
53
|
+
return objects[objects.code == code].identifier.tolist()[0]
|
|
54
|
+
|
|
55
|
+
def createProject(self, projectName, space, space_prefix=None):
|
|
56
|
+
"""Finds the requested project in the DataStore. If it does not exist,
|
|
57
|
+
creates a new project with the given code in the given space."""
|
|
58
|
+
# get available projects, accessible by the current user
|
|
59
|
+
projectsAvail = self.ds.get_projects()
|
|
60
|
+
if space_prefix:
|
|
61
|
+
projectsAvail = [prj for prj in projectsAvail if f"/{space_prefix}_" in prj.identifier]
|
|
62
|
+
projects = [prj for prj in projectsAvail if prj.code == projectName]
|
|
63
|
+
assert len(projects) <= 1, f"Multiple projects found for '{projectName}'"
|
|
64
|
+
dsProject = None
|
|
65
|
+
if len(projects): # get the existing object
|
|
66
|
+
dsProject = projects[0]
|
|
67
|
+
else: # create it, if not found
|
|
68
|
+
print(f"Creating project '{projectName}'")
|
|
69
|
+
dsProject = self.ds.new_project(code=projectName, space=space)
|
|
70
|
+
dsProject.save()
|
|
71
|
+
assert dsProject
|
|
72
|
+
return dsProject
|
|
73
|
+
|
|
74
|
+
def createCollection(self, collName, projectObj, defaultObjType=None):
|
|
75
|
+
collections = self.ds.get_collections(project=projectObj)
|
|
76
|
+
dsColl = [coll for coll in collections if coll.code == collName.upper()]
|
|
77
|
+
if len(dsColl):
|
|
78
|
+
dsColl = dsColl[0]
|
|
79
|
+
else: # create it, if not found
|
|
80
|
+
print(f"Creating collection '{collName}'")
|
|
81
|
+
dsColl = self.ds.new_collection(
|
|
82
|
+
code=collName, type="COLLECTION", project=projectObj, props={"$name": collName}
|
|
83
|
+
)
|
|
84
|
+
dsColl.save()
|
|
85
|
+
assert dsColl
|
|
86
|
+
# update properties (name, default view and object type) if not set)
|
|
87
|
+
props = dsColl.props.all() # props as dict
|
|
88
|
+
propKey = "$name"
|
|
89
|
+
if propKey in props and props[propKey] is None:
|
|
90
|
+
props[propKey] = collName
|
|
91
|
+
propKey = "$default_collection_view"
|
|
92
|
+
if propKey in props.keys() and props[propKey] is None:
|
|
93
|
+
propVal = [
|
|
94
|
+
item
|
|
95
|
+
for item in self.ds.get_vocabulary(propKey + "s").get_terms().df.code
|
|
96
|
+
if "list" in item.lower()
|
|
97
|
+
]
|
|
98
|
+
assert len(propVal)
|
|
99
|
+
props[propKey] = propVal[0]
|
|
100
|
+
if defaultObjType:
|
|
101
|
+
propKey = "$default_object_type"
|
|
102
|
+
if propKey in props.keys() and props[propKey] is None:
|
|
103
|
+
props[propKey] = defaultObjType
|
|
104
|
+
# print(f"Setting '{collName}' properties:\n {props}")
|
|
105
|
+
dsColl.set_props(props)
|
|
106
|
+
dsColl.save()
|
|
107
|
+
return dsColl
|
|
108
|
+
|
|
109
|
+
def createObject(
|
|
110
|
+
self,
|
|
111
|
+
projectName,
|
|
112
|
+
collectionName: str = None,
|
|
113
|
+
space=None,
|
|
114
|
+
objType: str = None,
|
|
115
|
+
props: dict = None,
|
|
116
|
+
):
|
|
117
|
+
dsProject = self.createProject(projectName, space)
|
|
118
|
+
dsColl = None
|
|
119
|
+
if collectionName is None: # collectionName is required
|
|
120
|
+
return None
|
|
121
|
+
dsColl = self.createCollection(collectionName, dsProject, defaultObjType=objType)
|
|
122
|
+
obj = None
|
|
123
|
+
obj = self.ds.get_objects(type=objType, where={"$name": props["$name"]}).objects
|
|
124
|
+
if len(obj):
|
|
125
|
+
prefix = objType
|
|
126
|
+
msg = "'{}' exists already in {}! Updating ...".format(
|
|
127
|
+
obj[0].props["$name"], obj[0].project.identifier
|
|
128
|
+
)
|
|
129
|
+
warnings.warn_explicit(msg, UserWarning, prefix, 0)
|
|
130
|
+
else: # does not exist yet
|
|
131
|
+
objName = f" '{props['$name']}'" if len(props.get("$name", "")) else ""
|
|
132
|
+
print(f"Creating new {objType}{objName} in {dsColl.identifier}")
|
|
133
|
+
obj = self.ds.new_object(type=objType, props=props, collection=dsColl)
|
|
134
|
+
obj.set_props(props)
|
|
135
|
+
return obj
|
|
136
|
+
|
|
137
|
+
def findObjects(self, *args, **kwargs):
|
|
138
|
+
return self.ds.get_objects(**kwargs)
|
|
139
|
+
|
|
140
|
+
def uploadDataset(self, obj, datasetType, fpaths=[]):
|
|
141
|
+
def _checkFile(localPath, remoteFiles):
|
|
142
|
+
remoteFile = [f for f in remoteFiles if f.name == localPath.name]
|
|
143
|
+
if not len(remoteFile): # file exists in the dataset as well
|
|
144
|
+
return False
|
|
145
|
+
return filecmp.cmp(localPath, remoteFile[0], shallow=False)
|
|
146
|
+
|
|
147
|
+
if not len(fpaths):
|
|
148
|
+
return # nothing to do
|
|
149
|
+
for dataset in obj.get_datasets(type=datasetType):
|
|
150
|
+
with tempfile.TemporaryDirectory() as tempdir:
|
|
151
|
+
dataset.download(destination=tempdir)
|
|
152
|
+
dsFiles = [f for f in Path(tempdir).rglob("*") if f.is_file()]
|
|
153
|
+
if len(fpaths) == len(dsFiles):
|
|
154
|
+
if all([_checkFile(fpath, dsFiles) for fpath in fpaths]):
|
|
155
|
+
print(
|
|
156
|
+
f"All local files of {datasetType} match files in dataset, "
|
|
157
|
+
"not updating."
|
|
158
|
+
)
|
|
159
|
+
continue # skip deletion below
|
|
160
|
+
print(f"Dataset {datasetType} needs update, deleting existing dataset:")
|
|
161
|
+
dataset.delete("Needs update")
|
|
162
|
+
if not len(obj.get_datasets(type=datasetType)): # didn't exist yet or all deleted
|
|
163
|
+
dataset = self.ds.new_dataset(
|
|
164
|
+
type=datasetType, collection=obj.collection, object=obj, files=fpaths
|
|
165
|
+
)
|
|
166
|
+
dataset.save()
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# ssfz2json.py
|
|
3
|
+
|
|
4
|
+
import argparse
|
|
5
|
+
import json
|
|
6
|
+
import sys
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from jupyter_analysis_tools.readdata import readSSFZ
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def main():
|
|
13
|
+
parser = argparse.ArgumentParser(
|
|
14
|
+
description="""
|
|
15
|
+
Reads and parses the embedded metadata of a .SSFZ file created by Anton Paar SAXSquant
|
|
16
|
+
software, converts it to JSON format and outputs it to <stdout>.
|
|
17
|
+
An output file path for the JSON data can be provided by optional argument.
|
|
18
|
+
"""
|
|
19
|
+
)
|
|
20
|
+
parser.add_argument(
|
|
21
|
+
"ssfzPath",
|
|
22
|
+
type=lambda p: Path(p).absolute(),
|
|
23
|
+
help="Path of the input .SSFZ file to read.",
|
|
24
|
+
)
|
|
25
|
+
parser.add_argument(
|
|
26
|
+
"-o",
|
|
27
|
+
"--out",
|
|
28
|
+
nargs="?",
|
|
29
|
+
default="stdout",
|
|
30
|
+
help=(
|
|
31
|
+
"Output file path to write the JSON data to. If the filename is omitted, "
|
|
32
|
+
"it is derived from the input file name by adding the .json suffix."
|
|
33
|
+
),
|
|
34
|
+
)
|
|
35
|
+
args = parser.parse_args()
|
|
36
|
+
# print(args)
|
|
37
|
+
if not args.ssfzPath.is_file():
|
|
38
|
+
print(f"Provided file '{args.ssfzPath}' not found!")
|
|
39
|
+
return 1
|
|
40
|
+
data = readSSFZ(args.ssfzPath)
|
|
41
|
+
json_args = dict(sort_keys=True, indent=2)
|
|
42
|
+
if args.out == "stdout":
|
|
43
|
+
print(json.dumps(data, **json_args))
|
|
44
|
+
else:
|
|
45
|
+
if args.out is None:
|
|
46
|
+
args.out = args.ssfzPath.with_suffix(args.ssfzPath.suffix + ".json")
|
|
47
|
+
if not Path(args.out).parent.is_dir():
|
|
48
|
+
print(f"Directory of provided output file '{args.out}' does not exist!")
|
|
49
|
+
return 1
|
|
50
|
+
with open(args.out, "w") as fd:
|
|
51
|
+
json.dump(data, fd, **json_args)
|
|
52
|
+
print(f"Wrote '{args.out}'.")
|
|
53
|
+
return 0
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
if __name__ == "__main__":
|
|
57
|
+
sys.exit(main())
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# ssfz2json.py
|
|
3
|
+
|
|
4
|
+
import argparse
|
|
5
|
+
import difflib
|
|
6
|
+
import json
|
|
7
|
+
import sys
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from jupyter_analysis_tools.readdata import readSSFZ
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def main():
|
|
14
|
+
parser = argparse.ArgumentParser(
|
|
15
|
+
description="""
|
|
16
|
+
Reads and parses the embedded metadata of two .SSFZ files created by Anton Paar
|
|
17
|
+
SAXSquant software, converts them to JSON format and performs a diff-like comparison
|
|
18
|
+
which is output on <stdout>.
|
|
19
|
+
"""
|
|
20
|
+
)
|
|
21
|
+
parser.add_argument(
|
|
22
|
+
"fromfile",
|
|
23
|
+
type=lambda p: Path(p).absolute(),
|
|
24
|
+
help="Path of the first .SSFZ file to compare.",
|
|
25
|
+
)
|
|
26
|
+
parser.add_argument(
|
|
27
|
+
"tofile",
|
|
28
|
+
type=lambda p: Path(p).absolute(),
|
|
29
|
+
help="Path of the second .SSFZ file to compare to.",
|
|
30
|
+
)
|
|
31
|
+
json_args = dict(sort_keys=True, indent=2)
|
|
32
|
+
args = parser.parse_args()
|
|
33
|
+
# print(args)
|
|
34
|
+
if not args.fromfile.is_file():
|
|
35
|
+
print(f"Provided file '{args.fromfile}' not found!")
|
|
36
|
+
return 1
|
|
37
|
+
if not args.tofile.is_file():
|
|
38
|
+
print(f"Provided file '{args.tofile}' not found!")
|
|
39
|
+
return 1
|
|
40
|
+
olddata = readSSFZ(args.fromfile)
|
|
41
|
+
newdata = readSSFZ(args.tofile)
|
|
42
|
+
diff = difflib.unified_diff(
|
|
43
|
+
json.dumps(olddata, **json_args).splitlines(keepends=True),
|
|
44
|
+
json.dumps(newdata, **json_args).splitlines(keepends=True),
|
|
45
|
+
fromfile=str(args.fromfile),
|
|
46
|
+
tofile=str(args.tofile),
|
|
47
|
+
)
|
|
48
|
+
for line in diff:
|
|
49
|
+
print(line, end="")
|
|
50
|
+
return 0
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
if __name__ == "__main__":
|
|
54
|
+
sys.exit(main())
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: jupyter-analysis-tools
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.6.0
|
|
4
4
|
Summary: Yet another Python library with helpers and utilities for data analysis and processing.
|
|
5
5
|
Author-email: Ingo Breßler <ingo.bressler@bam.de>, "Brian R. Pauw" <brian.pauw@bam.de>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -35,10 +35,10 @@ Requires-Dist: matplotlib
|
|
|
35
35
|
Requires-Dist: ipywidgets
|
|
36
36
|
Dynamic: license-file
|
|
37
37
|
|
|
38
|
-
# Jupyter Analysis Tools (v1.
|
|
38
|
+
# Jupyter Analysis Tools (v1.6.0)
|
|
39
39
|
|
|
40
40
|
[](https://pypi.org/project/jupyter-analysis-tools)
|
|
41
|
-
[](https://github.com/BAMresearch/jupyter-analysis-tools/compare/v1.6.0...main)
|
|
42
42
|
[](https://en.wikipedia.org/wiki/MIT_license)
|
|
43
43
|
[](https://pypi.org/project/jupyter-analysis-tools)
|
|
44
44
|
[](https://pypi.org/project/jupyter-analysis-tools#files)
|
|
@@ -97,6 +97,24 @@ are installed:
|
|
|
97
97
|
|
|
98
98
|
# CHANGELOG
|
|
99
99
|
|
|
100
|
+
## v1.6.0 (2025-09-19)
|
|
101
|
+
|
|
102
|
+
### Bug fixes
|
|
103
|
+
|
|
104
|
+
* **DataStore**: f-string syntax ([`9166382`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/916638264be58e75fdfba15d9c6a6584ace92199))
|
|
105
|
+
|
|
106
|
+
* **Tests**: pybis module required for collecting in new datastore module ([`ea6a21d`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/ea6a21df3656dcc5f926aa7ff67a7136806ded3b))
|
|
107
|
+
|
|
108
|
+
### Features
|
|
109
|
+
|
|
110
|
+
* **DataStore**: new module for managing objects in OpenBIS ([`cdf0a27`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/cdf0a27c0ae1412acd5329532ec8ec1fa7e6be94))
|
|
111
|
+
|
|
112
|
+
## v1.5.1 (2025-08-04)
|
|
113
|
+
|
|
114
|
+
### Bug fixes
|
|
115
|
+
|
|
116
|
+
* **readSSFZ**: split in two: ssfz2json for converting, ssfz_compare for diff-like compare ([`e8b24fe`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/e8b24fe796430bb4b09fa23dcd204e6162051f79))
|
|
117
|
+
|
|
100
118
|
## v1.5.0 (2025-08-04)
|
|
101
119
|
|
|
102
120
|
### Bug fixes
|
|
@@ -27,11 +27,13 @@ src/jupyter_analysis_tools/__init__.py
|
|
|
27
27
|
src/jupyter_analysis_tools/analysis.py
|
|
28
28
|
src/jupyter_analysis_tools/binning.py
|
|
29
29
|
src/jupyter_analysis_tools/datalocations.py
|
|
30
|
+
src/jupyter_analysis_tools/datastore.py
|
|
30
31
|
src/jupyter_analysis_tools/distrib.py
|
|
31
32
|
src/jupyter_analysis_tools/git.py
|
|
32
33
|
src/jupyter_analysis_tools/plotting.py
|
|
33
34
|
src/jupyter_analysis_tools/readdata.py
|
|
34
35
|
src/jupyter_analysis_tools/ssfz2json.py
|
|
36
|
+
src/jupyter_analysis_tools/ssfz_compare.py
|
|
35
37
|
src/jupyter_analysis_tools/utils.py
|
|
36
38
|
src/jupyter_analysis_tools/widgets.py
|
|
37
39
|
src/jupyter_analysis_tools.egg-info/PKG-INFO
|
|
@@ -1,69 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
# ssfz2json.py
|
|
3
|
-
|
|
4
|
-
import argparse
|
|
5
|
-
import json
|
|
6
|
-
import sys
|
|
7
|
-
from pathlib import Path
|
|
8
|
-
|
|
9
|
-
from jupyter_analysis_tools.readdata import readSSFZ
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def main():
|
|
13
|
-
parser = argparse.ArgumentParser(
|
|
14
|
-
description="""
|
|
15
|
-
Reads and parses a .SSFZ file created by Anton Paar SAXSquant software and writes them
|
|
16
|
-
back to disk as .JSON file under the same base name if no other output name was given.
|
|
17
|
-
|
|
18
|
-
If two .SSFZ files are provided, a diff-like comparison of metadata is output and the
|
|
19
|
-
*outPath* argument is ignored.
|
|
20
|
-
"""
|
|
21
|
-
)
|
|
22
|
-
parser.add_argument(
|
|
23
|
-
"-i",
|
|
24
|
-
"--inPath",
|
|
25
|
-
type=lambda p: Path(p).absolute(),
|
|
26
|
-
help="Path of the input .SSFZ file to read.",
|
|
27
|
-
required=True,
|
|
28
|
-
)
|
|
29
|
-
parser.add_argument(
|
|
30
|
-
"-c",
|
|
31
|
-
"--comparePath",
|
|
32
|
-
type=lambda p: Path(p).absolute(),
|
|
33
|
-
help="Path of a 2nd .SSFZ file to compare its metadata against the 1st one.",
|
|
34
|
-
)
|
|
35
|
-
parser.add_argument(
|
|
36
|
-
"-o",
|
|
37
|
-
"--outPath",
|
|
38
|
-
type=lambda p: Path(p).absolute(),
|
|
39
|
-
help="Output file Path to write the JSON data to.",
|
|
40
|
-
)
|
|
41
|
-
json_args = dict(sort_keys=True, indent=2)
|
|
42
|
-
args = parser.parse_args()
|
|
43
|
-
if not args.inPath.is_file():
|
|
44
|
-
print(f"Provided file '{args.inPath}' not found!")
|
|
45
|
-
return 1
|
|
46
|
-
in_data = readSSFZ(args.inPath)
|
|
47
|
-
if args.comparePath is not None:
|
|
48
|
-
import difflib
|
|
49
|
-
|
|
50
|
-
comp_data = readSSFZ(args.comparePath)
|
|
51
|
-
diff = difflib.unified_diff(
|
|
52
|
-
json.dumps(in_data, **json_args).splitlines(keepends=True),
|
|
53
|
-
json.dumps(comp_data, **json_args).splitlines(keepends=True),
|
|
54
|
-
fromfile=str(args.inPath),
|
|
55
|
-
tofile=str(args.comparePath),
|
|
56
|
-
)
|
|
57
|
-
for line in diff:
|
|
58
|
-
print(line, end="")
|
|
59
|
-
else: # just write JSON to outPath
|
|
60
|
-
if args.outPath is None:
|
|
61
|
-
args.outPath = args.inPath.with_suffix(args.inPath.suffix + ".json")
|
|
62
|
-
with open(args.outPath, "w") as fd:
|
|
63
|
-
json.dump(in_data, fd, **json_args)
|
|
64
|
-
print(f"Wrote '{args.outPath}'.")
|
|
65
|
-
return 0
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
if __name__ == "__main__":
|
|
69
|
-
sys.exit(main())
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/analysis.py
RENAMED
|
File without changes
|
{jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/binning.py
RENAMED
|
File without changes
|
|
File without changes
|
{jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/distrib.py
RENAMED
|
File without changes
|
{jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/git.py
RENAMED
|
File without changes
|
{jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/plotting.py
RENAMED
|
File without changes
|
{jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/readdata.py
RENAMED
|
File without changes
|
{jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/utils.py
RENAMED
|
File without changes
|
{jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/src/jupyter_analysis_tools/widgets.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/testdata/2015-03-20-Silica.ssf.json
RENAMED
|
File without changes
|
{jupyter_analysis_tools-1.5.0 → jupyter_analysis_tools-1.6.0}/testdata/2015-03-20-Silica.ssfz
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|