irie 0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of irie might be problematic. Click here for more details.

Files changed (145) hide show
  1. irie/__main__.py +24 -0
  2. irie/apps/__init__.py +5 -0
  3. irie/apps/authentication/__init__.py +1 -0
  4. irie/apps/authentication/admin.py +1 -0
  5. irie/apps/authentication/config.py +6 -0
  6. irie/apps/authentication/forms.py +41 -0
  7. irie/apps/authentication/migrations/__init__.py +1 -0
  8. irie/apps/authentication/models.py +1 -0
  9. irie/apps/authentication/tests.py +1 -0
  10. irie/apps/authentication/urls.py +9 -0
  11. irie/apps/authentication/views.py +53 -0
  12. irie/apps/config.py +8 -0
  13. irie/apps/context_processors.py +5 -0
  14. irie/apps/documents/__init__.py +0 -0
  15. irie/apps/documents/apps.py +7 -0
  16. irie/apps/documents/documents.py +61 -0
  17. irie/apps/documents/migrations/__init__.py +0 -0
  18. irie/apps/documents/tests.py +3 -0
  19. irie/apps/documents/urls.py +12 -0
  20. irie/apps/documents/views.py +27 -0
  21. irie/apps/evaluation/__init__.py +0 -0
  22. irie/apps/evaluation/admin.py +43 -0
  23. irie/apps/evaluation/apps.py +18 -0
  24. irie/apps/evaluation/daemon.py +107 -0
  25. irie/apps/evaluation/identification.py +196 -0
  26. irie/apps/evaluation/migrations/0001_initial.py +25 -0
  27. irie/apps/evaluation/migrations/0002_remove_evaluation_cesmd.py +17 -0
  28. irie/apps/evaluation/migrations/0003_evaluation_asset.py +20 -0
  29. irie/apps/evaluation/migrations/__init__.py +0 -0
  30. irie/apps/evaluation/models.py +72 -0
  31. irie/apps/evaluation/urls.py +16 -0
  32. irie/apps/evaluation/views.py +68 -0
  33. irie/apps/events/__init__.py +0 -0
  34. irie/apps/events/admin.py +9 -0
  35. irie/apps/events/apps.py +12 -0
  36. irie/apps/events/migrations/0001_initial.py +27 -0
  37. irie/apps/events/migrations/0002_alter_event_id.py +18 -0
  38. irie/apps/events/migrations/0003_event_cesmd.py +19 -0
  39. irie/apps/events/migrations/0004_event_record_identifier.py +19 -0
  40. irie/apps/events/migrations/0005_event_asset.py +21 -0
  41. irie/apps/events/migrations/0006_alter_event_event_file.py +18 -0
  42. irie/apps/events/migrations/__init__.py +0 -0
  43. irie/apps/events/models.py +70 -0
  44. irie/apps/events/tests.py +1 -0
  45. irie/apps/events/tests_events.py +240 -0
  46. irie/apps/events/urls.py +29 -0
  47. irie/apps/events/views.py +55 -0
  48. irie/apps/events/views_events.py +215 -0
  49. irie/apps/inventory/CESMD.py +81 -0
  50. irie/apps/inventory/__init__.py +5 -0
  51. irie/apps/inventory/admin.py +10 -0
  52. irie/apps/inventory/apps.py +12 -0
  53. irie/apps/inventory/archive/arcGIS.py +1175 -0
  54. irie/apps/inventory/calid.py +65 -0
  55. irie/apps/inventory/fields.py +5 -0
  56. irie/apps/inventory/forms.py +12 -0
  57. irie/apps/inventory/migrations/0001_initial.py +31 -0
  58. irie/apps/inventory/migrations/0002_assetevaluationmodel_cesmd.py +19 -0
  59. irie/apps/inventory/migrations/0003_auto_20230520_2030.py +23 -0
  60. irie/apps/inventory/migrations/0004_asset.py +27 -0
  61. irie/apps/inventory/migrations/0005_auto_20230731_1802.py +23 -0
  62. irie/apps/inventory/migrations/0006_auto_20230731_1816.py +28 -0
  63. irie/apps/inventory/migrations/0007_auto_20230731_1827.py +24 -0
  64. irie/apps/inventory/migrations/0008_asset_is_complete.py +19 -0
  65. irie/apps/inventory/migrations/0009_auto_20230731_1842.py +29 -0
  66. irie/apps/inventory/migrations/0010_auto_20230801_0025.py +23 -0
  67. irie/apps/inventory/migrations/0011_alter_asset_cgs_data.py +18 -0
  68. irie/apps/inventory/migrations/0012_corridor.py +22 -0
  69. irie/apps/inventory/migrations/0013_alter_asset_cesmd.py +18 -0
  70. irie/apps/inventory/migrations/0014_alter_asset_cesmd.py +18 -0
  71. irie/apps/inventory/migrations/__init__.py +0 -0
  72. irie/apps/inventory/models.py +70 -0
  73. irie/apps/inventory/tables.py +584 -0
  74. irie/apps/inventory/traffic.py +175052 -0
  75. irie/apps/inventory/urls.py +25 -0
  76. irie/apps/inventory/views.py +515 -0
  77. irie/apps/management/__init__.py +0 -0
  78. irie/apps/management/commands/__init__.py +0 -0
  79. irie/apps/networks/__init__.py +0 -0
  80. irie/apps/networks/apps.py +5 -0
  81. irie/apps/networks/forms.py +64 -0
  82. irie/apps/networks/migrations/0001_initial.py +26 -0
  83. irie/apps/networks/migrations/__init__.py +0 -0
  84. irie/apps/networks/models.py +14 -0
  85. irie/apps/networks/networks.py +782 -0
  86. irie/apps/networks/tests.py +1 -0
  87. irie/apps/networks/urls.py +18 -0
  88. irie/apps/networks/views.py +89 -0
  89. irie/apps/prediction/__init__.py +0 -0
  90. irie/apps/prediction/admin.py +9 -0
  91. irie/apps/prediction/apps.py +12 -0
  92. irie/apps/prediction/forms.py +20 -0
  93. irie/apps/prediction/metrics.py +61 -0
  94. irie/apps/prediction/migrations/0001_initial.py +32 -0
  95. irie/apps/prediction/migrations/0002_auto_20230731_1801.py +27 -0
  96. irie/apps/prediction/migrations/0003_rename_assetevaluationmodel_evaluation.py +18 -0
  97. irie/apps/prediction/migrations/0004_delete_evaluation.py +16 -0
  98. irie/apps/prediction/migrations/0005_predictormodel_protocol.py +18 -0
  99. irie/apps/prediction/migrations/0006_alter_predictormodel_protocol.py +18 -0
  100. irie/apps/prediction/migrations/0007_predictormodel_active.py +19 -0
  101. irie/apps/prediction/migrations/0008_predictormodel_description.py +18 -0
  102. irie/apps/prediction/migrations/0009_predictormodel_entry_point.py +19 -0
  103. irie/apps/prediction/migrations/0010_alter_predictormodel_entry_point.py +18 -0
  104. irie/apps/prediction/migrations/0011_remove_predictormodel_entry_point.py +17 -0
  105. irie/apps/prediction/migrations/0012_predictormodel_entry_point.py +18 -0
  106. irie/apps/prediction/migrations/0013_predictormodel_metrics.py +18 -0
  107. irie/apps/prediction/migrations/0014_auto_20240930_0004.py +28 -0
  108. irie/apps/prediction/migrations/0015_alter_predictormodel_render_file.py +18 -0
  109. irie/apps/prediction/migrations/__init__.py +0 -0
  110. irie/apps/prediction/models.py +37 -0
  111. irie/apps/prediction/predictor.py +286 -0
  112. irie/apps/prediction/runners/__init__.py +450 -0
  113. irie/apps/prediction/runners/metrics.py +168 -0
  114. irie/apps/prediction/runners/opensees/__init__.py +0 -0
  115. irie/apps/prediction/runners/opensees/schemas/__init__.py +39 -0
  116. irie/apps/prediction/runners/utilities.py +277 -0
  117. irie/apps/prediction/runners/xmlutils.py +232 -0
  118. irie/apps/prediction/runners/zipped.py +27 -0
  119. irie/apps/prediction/templatetags/__init__.py +0 -0
  120. irie/apps/prediction/templatetags/predictor.py +20 -0
  121. irie/apps/prediction/urls.py +19 -0
  122. irie/apps/prediction/views.py +184 -0
  123. irie/apps/prediction/views_api.py +216 -0
  124. irie/apps/site/__init__.py +0 -0
  125. irie/apps/site/admin.py +1 -0
  126. irie/apps/site/config.py +6 -0
  127. irie/apps/site/migrations/__init__.py +1 -0
  128. irie/apps/site/models.py +2 -0
  129. irie/apps/site/templatetags/__init__.py +0 -0
  130. irie/apps/site/templatetags/indexing.py +7 -0
  131. irie/apps/site/tests.py +1 -0
  132. irie/apps/site/urls.py +8 -0
  133. irie/apps/site/view_sdof.py +40 -0
  134. irie/apps/site/view_utils.py +13 -0
  135. irie/apps/site/views.py +88 -0
  136. irie/core/__init__.py +5 -0
  137. irie/core/asgi.py +12 -0
  138. irie/core/settings.py +223 -0
  139. irie/core/urls.py +39 -0
  140. irie/core/wsgi.py +12 -0
  141. irie-0.0.0.dist-info/METADATA +48 -0
  142. irie-0.0.0.dist-info/RECORD +145 -0
  143. irie-0.0.0.dist-info/WHEEL +5 -0
  144. irie-0.0.0.dist-info/entry_points.txt +2 -0
  145. irie-0.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,277 @@
1
+ #===----------------------------------------------------------------------===#
2
+ #
3
+ # STAIRLab -- STructural Artificial Intelligence Laboratory
4
+ #
5
+ #===----------------------------------------------------------------------===#
6
+ #
7
+ import json
8
+ import fnmatch
9
+ import numpy as np
10
+
11
+ from matplotlib import pyplot as plt
12
+ from math import pi
13
+ from collections import defaultdict
14
+ import quakeio
15
+
16
+ try:
17
+ from xmlutils import read_sect_xml3 as read_sect_xml
18
+ from xmlutils import read_xml
19
+ except ImportError:
20
+ from .xmlutils import read_sect_xml3 as read_sect_xml
21
+ from .xmlutils import read_xml
22
+
23
+ def read_model(filename:str)->dict:
24
+ with open(filename, "r") as f:
25
+ model = json.load(f)
26
+ sam = model["StructuralAnalysisModel"]
27
+ model["sections"] = {
28
+ int(s["name"]): s for s in sam["properties"]["sections"]
29
+ }
30
+ model["materials"] = {
31
+ int(m["name"]): m for m in sam["properties"]["uniaxialMaterials"]
32
+ }
33
+ model["elements"] = {
34
+ # int(el["name"]): el for el in sam["geometry"]["elements"]
35
+ int(el["name"]): {**el, "length": None} for el in sam["geometry"]["elements"]
36
+ }
37
+ return model
38
+
39
+ # --8<--------------------------------------------------------
40
+ def damage_states(Dcol):
41
+ from opensees import section
42
+ from opensees.section import patch
43
+
44
+ cover = 2.0
45
+ Rcol = Dcol/2
46
+ coverl = cover + Rcol*(1/np.cos(np.pi/8)-1)
47
+ return {
48
+ "dsr0": {
49
+ "regions": [
50
+ # external radius internal radius
51
+ section.PolygonRing(8, Rcol, Rcol-coverl/4)
52
+ ],
53
+ "material": "*concr*"
54
+ },
55
+ "dsr1": {
56
+ "regions": [
57
+ section.FiberSection(areas=[
58
+ patch.circ(intRad=Rcol - cover - 2, extRad=Rcol - cover)
59
+ ])
60
+ ],
61
+ "material": "*steel*"
62
+ },
63
+ "dsr2" : {
64
+ "regions": [
65
+ section.PolygonRing(8, Rcol, Rcol-coverl/4)
66
+ ]
67
+ },
68
+ "dsr3" : {
69
+ "regions": [
70
+ section.PolygonRing(8, Rcol-cover/2, Rcol-3*cover/4)
71
+ ],
72
+ "material": "*concr*"
73
+ },
74
+ "dsr4" : {
75
+ "regions": [
76
+ section.PolygonRing(8, Rcol-3*coverl/4, Rcol-coverl)
77
+ ],
78
+ "material": "*concr*"
79
+ },
80
+ "dsr5": {
81
+ "regions": [
82
+ section.FiberSection(areas=[
83
+ patch.circ(intRad=Rcol - cover - 2, extRad=Rcol - cover)
84
+ ])
85
+ ],
86
+ "material": "*concr*"
87
+ },
88
+ "dsr6": {
89
+ "regions": [
90
+ section.FiberSection(areas=[
91
+ patch.circ(intRad=Rcol - cover - 2, extRad=Rcol - cover)
92
+ ])
93
+ ],
94
+ "material": "*steel*"
95
+ },
96
+ "all": {
97
+ "regions": [
98
+ section.ConfinedPolygon(8, Rcol)
99
+ ]
100
+ }
101
+ }
102
+
103
+ # --8<--------------------------------------------------------
104
+
105
+ def iter_section_fibers(model, s, filt=None, match=None):
106
+ # if match is None:
107
+ # test = lambda a, b: a == b
108
+ # elif match == "pattern":
109
+ # pass
110
+ if filt is not None:
111
+ if "material" not in filt:
112
+ filt["material"] = "*"
113
+ for fiber in s["fibers"]:
114
+ print(fiber)
115
+ print(fnmatch.fnmatch(
116
+ model["materials"][fiber["material"]]["type"].lower(),
117
+ filt["material"]
118
+ ))
119
+ print([fiber["coord"] in region for region in filt["regions"]])
120
+ if (
121
+ ("material" not in filt) or fnmatch.fnmatch(
122
+ model["materials"][fiber["material"]]["type"].lower(),
123
+ filt["material"]
124
+ )
125
+ ) and any(
126
+ fiber["coord"] in region for region in filt["regions"]
127
+ ) :
128
+ yield fiber
129
+ else:
130
+ yield from s["fibers"]
131
+
132
+
133
+ def iter_elem_fibers(model:dict, elements:list, sections: list=(0,-1), filt:dict=None):
134
+ for tag in map(int,elements):
135
+ el = model["elements"][int(tag)]
136
+ for i in sections:
137
+ idx = len(el["sections"]) - 1 if i==-1 else i
138
+ tag = int(el["sections"][idx])
139
+ s = model["sections"][tag]
140
+ if "section" in s:
141
+ s = model["sections"][int(s["section"])]
142
+ for f in iter_section_fibers(model, s, filt):
143
+ yield el,idx+1,f
144
+
145
+ def fiber_strain(recorder_data, el, s, f, t=None):
146
+ if t is not None:
147
+ eps = recorder_data[int(el)][int(s)]["eps"][t]
148
+ kz = recorder_data[int(el)][int(s)]["kappaZ"][t]
149
+ ky = recorder_data[int(el)][int(s)]["kappaY"][t]
150
+ else:
151
+ eps = recorder_data[int(el)][int(s)]["eps"]
152
+ kz = recorder_data[int(el)][int(s)]["kappaZ"]
153
+ ky = recorder_data[int(el)][int(s)]["kappaY"]
154
+
155
+ return eps - kz * f["coord"][1] + ky * f["coord"][0]
156
+
157
+
158
+ REGIONS1 = damage_states(84.0)
159
+ REGIONS2 = damage_states(66.0)
160
+ REGIONS3 = damage_states(48.0)
161
+
162
+ def getDamageStateStrains(a, dsr, model, elems, strain_data=None):
163
+ if strain_data is None:
164
+ strain_data = {}
165
+
166
+ intFrames = 1
167
+
168
+ epsEle = []
169
+ for ele in elems:
170
+ if ele < 12000:
171
+ regions = REGIONS1
172
+ elif ele < 13000:
173
+ regions = REGIONS2
174
+ else:
175
+ regions = REGIONS3
176
+
177
+ # TODO!!!!!!!!!!!
178
+ if np.isin(ele, [2010, 2020, 12010, 12020, 12030, 13010, 13020, 14010, 14020, 14030]):
179
+ sec = 4
180
+ else:
181
+ sec = 1
182
+
183
+ data_file = f"eleDef{sec}.txt"
184
+ if data_file in strain_data:
185
+ strains = strain_data[data_file]
186
+ else:
187
+ strains = strain_data[data_file] = read_sect_xml(a/f"{data_file}")
188
+
189
+ print(list(iter_elem_fibers(model, [ele], [int(sec)-1], filt=regions["dsr1"])))
190
+
191
+ X,Y,epsRaw = zip(*(
192
+ (
193
+ fib["coord"][0], fib["coord"][1],
194
+ fiber_strain(strains, int(e["name"]), s, fib)
195
+ ) for ds in dsr
196
+ for e,s,fib in iter_elem_fibers(model, [ele], [int(sec)-1], filt=regions[ds])
197
+ ))
198
+
199
+ eps = np.array([e.T for e in epsRaw])
200
+ epsElei = X, Y, eps, intFrames, np.arange(eps.shape[1])
201
+ epsEle.append(epsElei)
202
+ return epsEle
203
+
204
+ def get_DS(a, model, elems, strain_data):
205
+ dsrs = list(map(lambda x: f"dsr{x}", range(1,7)))
206
+ thresholds = list(reversed([0.09, -0.011, -0.005, -0.005, -0.005, 0.002]))
207
+ ds_by_elem = {el: {"state": 0, "time": np.nan} for el in elems}
208
+ for i in range(len(dsrs)):
209
+ dsr = dsrs[i]
210
+ th = thresholds[i]
211
+ epsEle = getDamageStateStrains(a, [dsr], model, elems, strain_data)
212
+ for j in range(len(elems)):
213
+ X, Y, eps = epsEle[j][:3]
214
+
215
+ for t in range(eps.shape[1]):
216
+ epst = eps[:, t]
217
+ if (th < 0 and any(epst <= th)) or (th > 0 and any(epst >= th)):
218
+ ds_by_elem[elems[j]] = {"state": i+1, "time": t}
219
+ break
220
+ return ds_by_elem
221
+
222
+
223
+ def getPeakXML(file, filter="*"):
224
+ nodeOutputs = read_xml(file)
225
+ return {
226
+ node:
227
+ {resp: max(abs(nodeOutputs[node][resp])) for resp in nodeOutputs[node]}
228
+ for node in nodeOutputs
229
+ }
230
+
231
+ def getPeak(file, other=None):
232
+ nodeOutputs = np.loadtxt(file)
233
+ peakVals = np.max(np.abs(nodeOutputs), axis=0)
234
+ peakVals = np.append(peakVals, max(peakVals))
235
+ timePeakVals = np.argmax(np.abs(nodeOutputs), axis=0)
236
+ maxPeakCol = np.argmax(peakVals)
237
+ timePeakVals = np.append(timePeakVals, timePeakVals[maxPeakCol])
238
+ return peakVals, timePeakVals, maxPeakCol
239
+
240
+ def husid(accRH, plothusid, dt, lb=0.05, ub=0.95):
241
+ ai = np.tril(np.ones(len(accRH)))@accRH**2
242
+ husid = ai/ai[-1]
243
+ ilb = next(x for x, val in enumerate(husid) if val > lb)
244
+ iub = next(x for x, val in enumerate(husid) if val > ub)
245
+ if plothusid:
246
+ fig, ax = plt.subplots()
247
+ if dt is not None:
248
+ print("duration between ", f"{100*lb}%", " and ", f"{100*ub}%", " (s): ", dt*(iub-ilb))
249
+ ax.plot(dt*np.arange(len(accRH)), husid)
250
+ ax.set_xlabel("time (s)")
251
+ else:
252
+ ax.plot(np.arange(len(accRH)), husid)
253
+ ax.set_xlabel("timestep")
254
+ ax.axhline(husid[ilb], linestyle=":", label=f"{100*lb}%")
255
+ ax.axhline(husid[iub], linestyle="--", label=f"{100*ub}%")
256
+ ax.set_title("Husid Plot")
257
+ ax.legend()
258
+ return (ilb, iub)
259
+
260
+ def get_node_values(filename, channels, quant=None):
261
+ if quant is None:
262
+ quant = "accel"
263
+
264
+ event = quakeio.read(filename)
265
+ rotated = set()
266
+
267
+ nodes = defaultdict(dict)
268
+ for nm,ch in channels.items():
269
+ channel = event.match("l", station_channel=nm)
270
+ if id(channel._parent) not in rotated:
271
+ channel._parent.rotate(ch[2])
272
+ rotated.add(id(channel._parent))
273
+ series = getattr(channel, quant).data
274
+ nodes[ch[0]][ch[1]] = series
275
+
276
+ return nodes
277
+
@@ -0,0 +1,232 @@
1
+ #===----------------------------------------------------------------------===#
2
+ #
3
+ # STAIRLab -- STructural Artificial Intelligence Laboratory
4
+ #
5
+ #===----------------------------------------------------------------------===#
6
+ #
7
+ import re
8
+ import warnings
9
+ import numpy as np
10
+ from collections import defaultdict
11
+ import xml.etree.ElementTree as ET
12
+
13
+ re_elem_tag = re.compile(rb'eleTag="([0-9]*)"')
14
+ re_node_tag = re.compile(rb'nodeTag="([0-9]*)"')
15
+ re_sect_num = re.compile(rb'number="([0-9]*)"')
16
+ resp_tag = re.compile(rb"<ResponseType>([A-z0-9]*)</ResponseType>")
17
+
18
+ class ParseError(Exception): pass
19
+
20
+ def getDictData(allData, curDict):
21
+ if isinstance(curDict, (defaultdict,dict)):
22
+ for key, item in curDict.items():
23
+ if isinstance(item, (defaultdict,dict)):
24
+ getDictData(allData, item)
25
+ elif isinstance(item, int):
26
+ curDict[key] = allData[:, item]
27
+
28
+
29
+ def read_sect_xml3(filename: str)->dict:
30
+ data_dict = defaultdict(lambda: defaultdict(dict))
31
+ counter = 0
32
+
33
+ with open(filename, "rb") as f:
34
+ # print(f.read())
35
+ try:
36
+ for line in f:
37
+ if b"<ElementOutput" in line and b"/>" not in line and (elem := re_elem_tag.search(line)):
38
+ elem_tag = int(elem.group(1).decode())
39
+ while b"</ElementOutput>" not in line:
40
+ line = next(f)
41
+ if b"<GaussPointOutput" in line:
42
+ sect = re_sect_num.search(line).group(1).decode()
43
+
44
+ elif b"<ResponseType" in line:
45
+ r_label = resp_tag.search(line).group(1).decode()
46
+ while r_label in data_dict[elem_tag][sect]:
47
+ r_label += "_"
48
+ data_dict[elem_tag][sect][r_label] = counter
49
+ counter += 1
50
+
51
+
52
+ elif b"<Data>" in line:
53
+ lines = f.read()
54
+ lines = lines[:lines.find(b"</Data>")].split()
55
+ data = np.fromiter(lines, dtype=np.float64, count=len(lines))
56
+ except StopIteration:
57
+ raise ParseError(f"Failed to find end tag in XML file {filename}")
58
+
59
+ getDictData(data.reshape((-1, counter)), data_dict)
60
+ return data_dict
61
+
62
+ def read_sect_xml1(xml_file):
63
+ root = ET.parse(xml_file).getroot()
64
+
65
+ dataDict = {}
66
+ colCtr = 0
67
+
68
+ # time_output = root.find("TimeOutput")
69
+ # if time_output:
70
+ # hdrs.append(child[0].text)
71
+ # dataDict[child[0].text] = colCtr
72
+ # colCtr += 1
73
+
74
+ elems = root.findall("ElementOutput")
75
+ for child in elems:
76
+
77
+ eleKey = child.attrib["eleTag"]
78
+ secKey = child[0].attrib["number"]
79
+
80
+ dataDict[eleKey] = {secKey: {}}
81
+
82
+ for respCtr in range(len(child[0][0])):
83
+ respKey = child[0][0][respCtr].text
84
+ if respKey in dataDict[eleKey][secKey].keys():
85
+ respKey = respKey + "_"
86
+ dataDict[eleKey][secKey][respKey] = colCtr
87
+ colCtr += 1
88
+
89
+ data_element = root.find("Data")
90
+ data = np.array(data_element.text.split(), dtype=float)
91
+ getDictData(data.reshape((-1, colCtr)), dataDict)
92
+ return dataDict
93
+
94
+ def read_sect_xml2(xml_file):
95
+ root = ET.parse(xml_file).getroot()
96
+
97
+ dataDict = {}
98
+ colCtr = 0
99
+
100
+ # time_output = root.find("TimeOutput")
101
+ # if time_output:
102
+ # hdrs.append(child[0].text)
103
+ # dataDict[child[0].text] = colCtr
104
+ # colCtr += 1
105
+
106
+ elems = root.findall("ElementOutput")
107
+ for child in elems:
108
+
109
+ eleKey = child.attrib["eleTag"]
110
+ secKey = child[0].attrib["number"]
111
+
112
+ dataDict[eleKey] = {secKey: {}}
113
+
114
+ for respCtr in range(len(child[0][0])):
115
+ respKey = child[0][0][respCtr].text
116
+ if respKey in dataDict[eleKey][secKey].keys():
117
+ respKey = respKey + "_"
118
+ dataDict[eleKey][secKey][respKey] = colCtr
119
+ colCtr += 1
120
+
121
+ data_element = root.find("Data")
122
+ data = np.fromiter(
123
+ (i for text in data_element.itertext() for i in text.split()), dtype=float,
124
+ ).reshape((-1, colCtr))
125
+ getDictData(data, dataDict)
126
+ return dataDict
127
+
128
+ def read_sect_xml0(xml_file):
129
+ "Arpit Nema"
130
+ root = ET.parse(xml_file).getroot()
131
+
132
+ hdrs = []
133
+ dataDict = {}
134
+ colCtr = 0
135
+ for i, child in enumerate(root):
136
+ if child.tag == "TimeOutput":
137
+ hdrs.append(child[0].text)
138
+ dataDict[child[0].text] = colCtr
139
+ colCtr += 1
140
+ elif child.tag == "ElementOutput":
141
+ eleKey = child.attrib["eleTag"]
142
+ secKey = child[0].attrib["number"]
143
+ hdrPre = eleKey + "_" + secKey + "_" + child[0][0].attrib["secTag"]
144
+
145
+ dataDict[eleKey] = {secKey: {}}
146
+ for respCtr in range(len(child[0][0])):
147
+ hdrs.append(hdrPre + "_" + child[0][0][respCtr].text)
148
+ respKey = child[0][0][respCtr].text
149
+ if respKey in dataDict[eleKey][secKey].keys():
150
+ respKey = respKey + "_"
151
+ dataDict[eleKey][secKey][respKey] = colCtr
152
+ colCtr += 1
153
+ elif child.tag == "Data":
154
+ tmp = child.text
155
+
156
+ data = np.array(tmp.replace("\n", "").split(), dtype=float)
157
+
158
+ data = data.reshape((-1, len(hdrs)))
159
+ getDictData(data, dataDict)
160
+ return dataDict
161
+
162
+
163
+
164
+ def read_nodeRH_xml(filename: str)->dict:
165
+ data_dict = defaultdict(lambda: defaultdict(dict))
166
+ counter = 0
167
+
168
+ with open(filename, "rb") as f:
169
+ try:
170
+ for line in f:
171
+ if b"<NodeOutput" in line and (node := re_node_tag.search(line)):
172
+ node_tag = int(node.group(1).decode())
173
+
174
+ elif b"<Data>" in line:
175
+ lines = f.read()
176
+ lines = lines[:lines.find(b"</Data>")].split()
177
+ data = np.fromiter(lines, dtype=np.float64, count=len(lines))
178
+ except StopIteration:
179
+ raise ParseError(f"Failed to find end tag in XML file {filename}")
180
+
181
+ getDictData(data.reshape((-1, counter)), data_dict)
182
+ return data_dict
183
+
184
+
185
+ def read_xml(xml_file):
186
+ root = ET.parse(xml_file).getroot()
187
+
188
+ dataDict = {}
189
+ colCtr = 0
190
+
191
+ # time_output = root.find("TimeOutput")
192
+ # if time_output:
193
+ # hdrs.append(child[0].text)
194
+ # dataDict[child[0].text] = colCtr
195
+ # colCtr += 1
196
+
197
+ for child in root.findall("ElementOutput"):
198
+
199
+ eleKey = child.attrib["eleTag"]
200
+ try:
201
+ secKey = child[0].attrib["number"]
202
+ except IndexError:
203
+ warnings.warn(f"Skipping element '{eleKey}'")
204
+ continue
205
+
206
+ dataDict[eleKey] = {secKey: {}}
207
+
208
+ for respCtr in range(len(child[0][0])):
209
+ respKey = child[0][0][respCtr].text
210
+ if respKey in dataDict[eleKey][secKey].keys():
211
+ respKey = respKey + "_"
212
+ dataDict[eleKey][secKey][respKey] = colCtr
213
+ colCtr += 1
214
+
215
+ for child in root.findall("NodeOutput"):
216
+
217
+ tag = int(child.attrib["nodeTag"])
218
+
219
+ dataDict[tag] = {}
220
+
221
+ for resp in child.findall("ResponseType"):
222
+ respKey = resp.text
223
+ if respKey in dataDict[tag].keys():
224
+ respKey = respKey + "_"
225
+ dataDict[tag][respKey] = colCtr
226
+ colCtr += 1
227
+
228
+ data_element = root.find("Data")
229
+ data = np.array(data_element.text.split(), dtype=float)
230
+ getDictData(data.reshape((-1, colCtr)), dataDict)
231
+ return dataDict
232
+
@@ -0,0 +1,27 @@
1
+ import sys
2
+ import zipfile
3
+ import tempfile
4
+ import subprocess
5
+
6
+ def process_zip_file(archive, main_file, temp_dir):
7
+ # Create a temporary directory
8
+ with tempfile.TemporaryDirectory() as temp_dir:
9
+ # Extract the zip file into the temporary directory
10
+ with zipfile.ZipFile(archive, 'r') as zip_ref:
11
+ zip_ref.extractall(temp_dir)
12
+
13
+ # Start a subprocess with the temporary directory as the cwd
14
+ process = subprocess.Popen(
15
+ [sys.executable, "-m", "opensees", main_file], # Replace with the command you want to run
16
+ cwd=temp_dir,
17
+ stdout=subprocess.PIPE,
18
+ stderr=subprocess.PIPE
19
+ )
20
+
21
+ # Wait for the process to complete and get the output
22
+ stdout, stderr = process.communicate()
23
+
24
+ # Print the output for debugging purposes
25
+ print("STDOUT:", stdout.decode())
26
+ print("STDERR:", stderr.decode())
27
+
File without changes
@@ -0,0 +1,20 @@
1
+ from django import template
2
+ from django.utils.safestring import SafeString
3
+
4
+ register = template.Library()
5
+
6
+ @register.filter(is_safe=True)
7
+ def display_predictor(predictor):
8
+ out = "" # str(predictor.__class__.__name__) #"" # f'<h6 style="display:inline">{predictor.name}</h6> '
9
+ out = out + predictor.description
10
+ out = out + "\n".join((out, '<table class="table align-items-center"><tbody>'))
11
+
12
+ #for key, val in predictor.conf.get("config", {}).items():
13
+ for key, val in predictor.conf.items():
14
+ name = predictor.schema["properties"].get(key, {"name": key}).get("name", key)
15
+ out = out + f"<tr><td>{name}</td><td><code>{val}</code></td><tr>"
16
+
17
+ out = out + "</tbody></table>"
18
+ return SafeString(out)
19
+
20
+ #register.filter("display_predictor", display_predictor)
@@ -0,0 +1,19 @@
1
+ #===----------------------------------------------------------------------===#
2
+ #
3
+ # STAIRLab -- STructural Artificial Intelligence Laboratory
4
+ #
5
+ #===----------------------------------------------------------------------===#
6
+ #
7
+ # Author: Claudio Perez
8
+ #
9
+ #----------------------------------------------------------------------------#
10
+ from django.urls import path, re_path
11
+ from .views import new_prediction, asset_predictors, predictor_profile, predictor_upload
12
+
13
+ urlpatterns = [
14
+ re_path("^inventory/[0-9 A-Z-]*/predictors/new", new_prediction),
15
+ re_path("^inventory/[0-9 A-Z-]*/predictors/[0-9]", predictor_profile),
16
+ re_path("upload/", predictor_upload),
17
+ re_path("^inventory/[0-9 A-Z-]*/predictors/", asset_predictors, name="asset_predictors"),
18
+ re_path("^inventory/[0-9 A-Z-]*/predictors/", asset_predictors),
19
+ ]