jupyter-analysis-tools 1.2.1__py3-none-any.whl → 1.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  # -*- coding: utf-8 -*-
2
2
  # __init__.py
3
3
 
4
- __version__ = "1.2.1"
4
+ __version__ = "1.3.0"
5
5
 
6
6
  from .binning import reBin
7
7
  from .git import checkRepo, isNBstripoutActivated, isNBstripoutInstalled, isRepo
@@ -68,14 +68,18 @@ def xmlPDHToDict(root):
68
68
  while stack:
69
69
  elem, parentCont = stack.pop()
70
70
  elemCont = {}
71
- key = elem.attrib.pop("key", None)
72
71
  idx = -1
73
- if (
74
- not len(list(elem)) and
75
- not len(elem.attrib) and
76
- not (elem.text and len(elem.text.strip()))
72
+ key = elem.attrib.pop("key", None)
73
+ if ( # get a unique key, the key can occur in multiple groups in PDH
74
+ key is not None and elem.tag == "group" and elem.attrib.get("id", None) is not None
75
+ ):
76
+ key = elem.attrib.pop("id")
77
+ if ( # skip empty elements with a key only early
78
+ not len(list(elem))
79
+ and not len(elem.attrib)
80
+ and not (elem.text and len(elem.text.strip()))
77
81
  ):
78
- continue # skip empty elements with a key only early
82
+ continue
79
83
  if elem.tag == "list":
80
84
  elemCont = []
81
85
  else: # add attributes & values to dict
@@ -115,9 +119,8 @@ def xmlPDHToDict(root):
115
119
  else: # have a key
116
120
  parentCont[parentKey] = {key: elemCont}
117
121
  else: # parentKey exists already
118
- if (
119
- not isinstance(parentCont[parentKey], list) and
120
- not isinstance(parentCont[parentKey], dict)
122
+ if not isinstance(parentCont[parentKey], list) and not isinstance(
123
+ parentCont[parentKey], dict
121
124
  ):
122
125
  # if its a plain value before, make a list out of it and append in next step
123
126
  parentCont[parentKey] = [parentCont[parentKey]]
@@ -135,7 +138,7 @@ def xmlPDHToDict(root):
135
138
  try:
136
139
  oldts = result["fileinfo"]["parameter"]["DateTime"]["value"]
137
140
  # timestamp seems to be based on around 2009-01-01 (a day give or take)
138
- delta = ((39 * 365 + 10) * 24 * 3600)
141
+ delta = (39 * 365 + 10) * 24 * 3600
139
142
  # make it compatible to datetime.datetime routines
140
143
  result["fileinfo"]["parameter"]["DateTime"]["value"] = oldts + delta
141
144
  except KeyError:
@@ -105,6 +105,30 @@ def addEnvScriptsToPATH():
105
105
  os.environ["PATH"] = sep.join(environPATH)
106
106
 
107
107
 
108
+ def networkdriveMapping(cmdOutput: str = None):
109
+ """Returns a dict of mapping drive letters to network paths (on Windows)."""
110
+ if isWindows():
111
+ if cmdOutput is None:
112
+ proc = subprocess.run(["net", "use"], capture_output=True, text=True, encoding="cp850")
113
+ cmdOutput = proc.stdout
114
+ rows = [line.split() for line in cmdOutput.splitlines() if "Windows Network" in line]
115
+ rows = dict(
116
+ [row[1:3] for row in rows if row[1].endswith(":") and row[2].startswith("\\\\")]
117
+ )
118
+ return rows
119
+ return {}
120
+
121
+
122
+ def makeNetworkdriveAbsolute(filepath, cmdOutput: str = None):
123
+ """Replaces the drive letter of the given path by the respective network path, if possible."""
124
+ if isWindows() and not filepath.drive.startswith(r"\\"):
125
+ drivemap = networkdriveMapping(cmdOutput=cmdOutput)
126
+ prefix = drivemap.get(filepath.drive, None)
127
+ if prefix is not None:
128
+ filepath = Path(prefix).joinpath(*filepath.parts[1:])
129
+ return filepath
130
+
131
+
108
132
  def checkWinFor7z():
109
133
  """Extend the PATH environment variable for access to the 7-zip executable."""
110
134
  if not isWindows():
@@ -127,7 +151,7 @@ def extract7z(fn, workdir=None):
127
151
  assert os.path.isfile(os.path.join(workdir, fn)), "Provided 7z archive '{}' not found!".format(
128
152
  fn
129
153
  )
130
- print("Extracting archived McDLS results:")
154
+ print(f"Extracting '{fn}':")
131
155
  proc = subprocess.run(
132
156
  ["7z", "x", fn],
133
157
  cwd=workdir,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: jupyter-analysis-tools
3
- Version: 1.2.1
3
+ Version: 1.3.0
4
4
  Summary: Yet another Python library with helpers and utilities for data analysis and processing.
5
5
  Author-email: Ingo Breßler <dev@ingobressler.net>
6
6
  License: MIT license
@@ -53,8 +53,8 @@ Yet another Python library with helpers and utilities for data analysis and proc
53
53
  :target: https://pypi.org/project/jupyter-analysis-tools
54
54
  :alt: PyPI Package latest release
55
55
 
56
- .. |commits-since| image:: https://img.shields.io/github/commits-since/BAMresearch/jupyter-analysis-tools/v1.2.1.svg
57
- :target: https://github.com/BAMresearch/jupyter-analysis-tools/compare/v1.2.1...main
56
+ .. |commits-since| image:: https://img.shields.io/github/commits-since/BAMresearch/jupyter-analysis-tools/v1.3.0.svg
57
+ :target: https://github.com/BAMresearch/jupyter-analysis-tools/compare/v1.3.0...main
58
58
  :alt: Commits since latest release
59
59
 
60
60
  .. |license| image:: https://img.shields.io/pypi/l/jupyter-analysis-tools.svg
@@ -127,6 +127,36 @@ Note, to combine the coverage data from all the tox environments run:
127
127
 
128
128
  # CHANGELOG
129
129
 
130
+ ## v1.3.0 (2025-07-16)
131
+
132
+ ### Bug fixes
133
+
134
+ * utils.extract7z: informative info message ([`80d2f71`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/80d2f7134992d59f0bfd7f5e7bc27772f77cd452))
135
+
136
+ ### Continuous integration
137
+
138
+ * coverage: fix coverage artifact name for multiple matrix.os ([`f471599`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/f471599b8c86e29fd20b78f7cbd9291c3a6dd98a))
139
+
140
+ * testing: test on Windows as well ([`4a83c39`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/4a83c3924bcb820ef0728af40b86a0f6622dfef2))
141
+
142
+ ### Features
143
+
144
+ * utils.makeNetworkdriveAbsolute: new routines for translating a windows drive letter mount to its network location ([`823a6bf`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/823a6bfe126829381bc14d34578f342f4b9d3e8f))
145
+
146
+ ## v1.2.2 (2025-07-15)
147
+
148
+ ### Bug fixes
149
+
150
+ * readPDHmeta: use unique dict keys, the xmk *key* can occur in multiple groups in PDH ([`ef41c81`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/ef41c81d40d801b5baf86f56cf9012ca35d2ccde))
151
+
152
+ ### Documentation
153
+
154
+ * pyproject: revert specify readme+changelog document types ([`1baa762`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/1baa762d441fe0a1b7b663b9d0589de857277426))
155
+
156
+ * pyproject: specify readme+changelog document types to render overview on pypi correctly ([`6e4d1e5`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/6e4d1e56640b604f971ddca8dabd8d1aff5c9bf1))
157
+
158
+ * ghpages: make sure .nojekyll exists after purging old html docs ([`4847845`](https://github.com/BAMresearch/jupyter-analysis-tools/commit/4847845cc06884b6e589b26897e83411d649ef4d))
159
+
130
160
  ## v1.2.1 (2025-07-11)
131
161
 
132
162
  ### Bug fixes
@@ -1,16 +1,16 @@
1
- jupyter_analysis_tools/__init__.py,sha256=JTPvYYcXYakQiDz7PPUSBwpL9F6SX5ZZaADP8iWJE6A,386
1
+ jupyter_analysis_tools/__init__.py,sha256=tqj5as0-tnD-C7LoDRTIxCvnL0YWYCGfXKmb8H-M-kA,386
2
2
  jupyter_analysis_tools/analysis.py,sha256=AiAvUO648f0PYXqLfal1kDH926neasE5c1RYFu9wtYg,1768
3
3
  jupyter_analysis_tools/binning.py,sha256=d6eXRC3IOnnJIF25OfEASyWedT71EX2nF7jAgGJ9suQ,14536
4
4
  jupyter_analysis_tools/datalocations.py,sha256=BakfiZOMcBwp-_DAn7l57lGWZmZGNnk0j73V75nLBUA,4322
5
5
  jupyter_analysis_tools/distrib.py,sha256=uyh2jXDdXR6dfd36CAoE5_psoFF0bfA6l1wletPD7Xo,16515
6
6
  jupyter_analysis_tools/git.py,sha256=mqSk5nnAFrmk1_2KFuKVrDWOkRbGbAQOq2N1DfxhNpg,2216
7
7
  jupyter_analysis_tools/plotting.py,sha256=L2gwSjlBVK8OneAfSuna3vCJIg2rSEdvd9TfEbM2Als,1183
8
- jupyter_analysis_tools/readdata.py,sha256=QG0cYA0dX6kmrMIdjHJoMWuIwwZbd0uYitztcxr_AN0,5829
9
- jupyter_analysis_tools/utils.py,sha256=dFE34fYQS7ivCfNy0nwwNPyBdZhIzW9QrxwjvvIHlIQ,5319
8
+ jupyter_analysis_tools/readdata.py,sha256=6Tncwo3NSYAnyLQzAhDtiUyp1Xpw3CahqQ_5NeGhJqI,6030
9
+ jupyter_analysis_tools/utils.py,sha256=EbRooLCGODH8tjQVE8-OuuPoI4weKLzmxdFY794En_k,6327
10
10
  jupyter_analysis_tools/widgets.py,sha256=rA8qPvY9nS1OtykZwXtCTG29K-N_MYFVb5Aj8yK40_s,2996
11
- jupyter_analysis_tools-1.2.1.dist-info/licenses/AUTHORS.rst,sha256=SUxxgElDBm6WdCbBBFfcr0ZE3SolWL0T0aS5Fym1198,100
12
- jupyter_analysis_tools-1.2.1.dist-info/licenses/LICENSE,sha256=SrbIwXA1ZLTO6uwZneJMpvdgiC-3fhNl0vwb3ALoY4g,1107
13
- jupyter_analysis_tools-1.2.1.dist-info/METADATA,sha256=3I-R__QvIkGKunshlZeGeAit7pOjYzAWiUGyyo5BiaQ,37099
14
- jupyter_analysis_tools-1.2.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
15
- jupyter_analysis_tools-1.2.1.dist-info/top_level.txt,sha256=ei_0x-BF85FLoJ_h67ySwDFowtqus_gI4_0GR466PEU,23
16
- jupyter_analysis_tools-1.2.1.dist-info/RECORD,,
11
+ jupyter_analysis_tools-1.3.0.dist-info/licenses/AUTHORS.rst,sha256=SUxxgElDBm6WdCbBBFfcr0ZE3SolWL0T0aS5Fym1198,100
12
+ jupyter_analysis_tools-1.3.0.dist-info/licenses/LICENSE,sha256=SrbIwXA1ZLTO6uwZneJMpvdgiC-3fhNl0vwb3ALoY4g,1107
13
+ jupyter_analysis_tools-1.3.0.dist-info/METADATA,sha256=6iqmTX_dO2RwXtQG4oU3mvsjl9fm8Xm3h4LNfpGtv6c,38742
14
+ jupyter_analysis_tools-1.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
15
+ jupyter_analysis_tools-1.3.0.dist-info/top_level.txt,sha256=ei_0x-BF85FLoJ_h67ySwDFowtqus_gI4_0GR466PEU,23
16
+ jupyter_analysis_tools-1.3.0.dist-info/RECORD,,