easy-utils-dev 2.136__py3-none-any.whl → 2.138__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- easy_utils_dev/debugger.py +1 -1
- easy_utils_dev/ept.py +654 -555
- easy_utils_dev/wsnoclib.py +60 -14
- {easy_utils_dev-2.136.dist-info → easy_utils_dev-2.138.dist-info}/METADATA +1 -1
- {easy_utils_dev-2.136.dist-info → easy_utils_dev-2.138.dist-info}/RECORD +7 -7
- {easy_utils_dev-2.136.dist-info → easy_utils_dev-2.138.dist-info}/WHEEL +0 -0
- {easy_utils_dev-2.136.dist-info → easy_utils_dev-2.138.dist-info}/top_level.txt +0 -0
easy_utils_dev/ept.py
CHANGED
|
@@ -1,556 +1,655 @@
|
|
|
1
|
-
import xml.etree.ElementTree as ET
|
|
2
|
-
from bs4 import BeautifulSoup
|
|
3
|
-
from easy_utils_dev.simple_sqlite import initDB
|
|
4
|
-
from easy_utils_dev.utils import getRandomKey , getTimestamp , lget , mkdirs , start_thread
|
|
5
|
-
import json , os , glob
|
|
6
|
-
from easy_utils_dev.FastQueue import FastQueue
|
|
7
|
-
from easy_utils_dev.debugger import DEBUGGER
|
|
8
|
-
import zipfile
|
|
9
|
-
import tempfile
|
|
10
|
-
from collections import defaultdict
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
__LIBPATH__ = os.path.dirname(os.path.abspath(__file__))
|
|
14
|
-
MAPPER = {
|
|
15
|
-
'PSS32' : {
|
|
16
|
-
"PHY" :[2,20,3,21,4,22,5,23,6,24,7,25,8,26,9,27,10,28,11,29,12,30,13,31,14,32,15,33,16,34,17,35]
|
|
17
|
-
} ,
|
|
18
|
-
'PSS16II' : {
|
|
19
|
-
"PHY" :[3,13,4,14,5,15,6,16,7,17,8,18,9,19,10,20]
|
|
20
|
-
} ,
|
|
21
|
-
'PSS16' : {
|
|
22
|
-
"PHY" :[3,13,4,14,5,15,6,16,7,17,8,18,9,19,10,20]
|
|
23
|
-
} ,
|
|
24
|
-
'PSS8' : {
|
|
25
|
-
"PHY" :[2,8,3,9,4,10,5,11]
|
|
26
|
-
} ,
|
|
27
|
-
}
|
|
28
|
-
ns = {"ept": "http://upm.lucent.com/EPTdesign"}
|
|
29
|
-
|
|
30
|
-
class EPTManager :
|
|
31
|
-
def __init__(self ,
|
|
32
|
-
design_path,
|
|
33
|
-
include_parent_attrs=True ,
|
|
34
|
-
include_grantparent_attrs=False ,
|
|
35
|
-
ept_db_path=f"ept_{getTimestamp()}.db" ,
|
|
36
|
-
debug_name='EPTManager',
|
|
37
|
-
debug_home_path=None
|
|
38
|
-
) -> None:
|
|
39
|
-
self.root = None
|
|
40
|
-
self.logger = DEBUGGER(name=debug_name, homePath=debug_home_path)
|
|
41
|
-
self.design_path = design_path
|
|
42
|
-
self.ept_db_path = ept_db_path
|
|
43
|
-
self.include_parent_attrs = include_parent_attrs
|
|
44
|
-
self.include_grantparent_attrs = include_grantparent_attrs
|
|
45
|
-
self.sites = []
|
|
46
|
-
self.queue = FastQueue(request_max_count=4)
|
|
47
|
-
self.nes = []
|
|
48
|
-
self.tmp_design_path = None
|
|
49
|
-
self.creation_mode = 'fast' # 'fast' or 'slow' but accurate'
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
def convert_slotid_to_physical_slot(self , shType , slotid ) :
|
|
53
|
-
slotid = int(slotid) - 1
|
|
54
|
-
return MAPPER[shType]['PHY'][slotid]
|
|
55
|
-
|
|
56
|
-
def fix_xml_file(self , xml_content ) :
|
|
57
|
-
xml_content = xml_content.splitlines()
|
|
58
|
-
for i , line in enumerate(xml_content) :
|
|
59
|
-
if '<EPTdesign' in line :
|
|
60
|
-
line = line.split(' ')[0]
|
|
61
|
-
line = f"{line}>"
|
|
62
|
-
xml_content[i] = line
|
|
63
|
-
break
|
|
64
|
-
return ''.join(xml_content)
|
|
65
|
-
|
|
66
|
-
def Database(self) :
|
|
67
|
-
db = initDB()
|
|
68
|
-
db.config_database_path(self.ept_db_path)
|
|
69
|
-
return db
|
|
70
|
-
|
|
71
|
-
def create_ept_tables_from_sql(self) :
|
|
72
|
-
self.logger.info("Creating EPT Database Tables from SQL ...")
|
|
73
|
-
db = self.Database()
|
|
74
|
-
db.execute_script( f"{os.path.join(__LIBPATH__ , 'ept_sql' , 'create_ept_tables.sql')}")
|
|
75
|
-
self.logger.info("Creating EPT Database Tables from SQL completed")
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
def create_ept_columns(self , drop_cols=[]) :
|
|
79
|
-
self.logger.info("Creating EPT Database Tables ...")
|
|
80
|
-
db = self.Database()
|
|
81
|
-
drop_cols = [str(col).upper() for col in drop_cols]
|
|
82
|
-
tags = [str(tag.name) for tag in self.root.find_all() ]
|
|
83
|
-
tags = list(set(tags))
|
|
84
|
-
for tagName in tags :
|
|
85
|
-
tags = self.root.find_all(tagName)
|
|
86
|
-
tableColumns = [
|
|
87
|
-
{
|
|
88
|
-
'column' : 'parentId' ,
|
|
89
|
-
'params' : 'TEXT'
|
|
90
|
-
},
|
|
91
|
-
{
|
|
92
|
-
'column' : 'parentTag' ,
|
|
93
|
-
'params' : 'TEXT'
|
|
94
|
-
},
|
|
95
|
-
{
|
|
96
|
-
'column' : 'parentAttrs' ,
|
|
97
|
-
'params' : 'TEXT'
|
|
98
|
-
},
|
|
99
|
-
{
|
|
100
|
-
'column' : 'grandparentId' ,
|
|
101
|
-
'params' : 'TEXT'
|
|
102
|
-
},
|
|
103
|
-
{
|
|
104
|
-
'column' : 'grandparentTag' ,
|
|
105
|
-
'params' : 'TEXT'
|
|
106
|
-
},
|
|
107
|
-
{
|
|
108
|
-
'column' : 'grandparentAttrs' ,
|
|
109
|
-
'params' : 'TEXT'
|
|
110
|
-
},
|
|
111
|
-
]
|
|
112
|
-
added = []
|
|
113
|
-
for tag in tags :
|
|
114
|
-
attrs = tag.attrs
|
|
115
|
-
for attr in list(attrs.keys()) :
|
|
116
|
-
_input = {
|
|
117
|
-
'column' : str(attr) ,
|
|
118
|
-
'params' : 'TEXT'
|
|
119
|
-
}
|
|
120
|
-
if not str(attr).upper() in added and not str(attr).upper() in drop_cols :
|
|
121
|
-
if '-' in str(attr) :
|
|
122
|
-
continue
|
|
123
|
-
self.logger.debug(f'[{tagName}] : Adding Column : {_input}')
|
|
124
|
-
tableColumns.append(_input)
|
|
125
|
-
added.append(str(attr).upper())
|
|
126
|
-
if len(tableColumns) > 0 :
|
|
127
|
-
db.createTable( tableName=tagName , data=tableColumns , autoId=False )
|
|
128
|
-
|
|
129
|
-
def create_ept_rows(self) :
|
|
130
|
-
self.logger.info("Creating EPT Rows ...")
|
|
131
|
-
all_tags = list(self.root.find_all())
|
|
132
|
-
unique_table_names = list({str(tag.name) for tag in all_tags})
|
|
133
|
-
grouped_by_table = defaultdict(list)
|
|
134
|
-
for t in all_tags:
|
|
135
|
-
grouped_by_table[str(t.name)].append(t)
|
|
136
|
-
db = initDB()
|
|
137
|
-
db.config_database_path(self.ept_db_path)
|
|
138
|
-
table_columns_map = {}
|
|
139
|
-
for table_name in unique_table_names:
|
|
140
|
-
query = f"PRAGMA table_info({table_name})"
|
|
141
|
-
cols = db.execute_dict(query)
|
|
142
|
-
table_columns_map[table_name] = [c['name'] for c in cols]
|
|
143
|
-
for table_name in unique_table_names:
|
|
144
|
-
elements = grouped_by_table.get(table_name)
|
|
145
|
-
if not elements:
|
|
146
|
-
continue
|
|
147
|
-
column_names = table_columns_map[table_name]
|
|
148
|
-
rows = []
|
|
149
|
-
rows_append = rows.append
|
|
150
|
-
for tag in elements:
|
|
151
|
-
template = dict.fromkeys(column_names, None)
|
|
152
|
-
attrs = tag.attrs
|
|
153
|
-
if attrs:
|
|
154
|
-
for key in column_names:
|
|
155
|
-
if key in attrs:
|
|
156
|
-
template[key] = attrs[key]
|
|
157
|
-
parent = tag.parent
|
|
158
|
-
if parent is not None:
|
|
159
|
-
template['parentId'] = parent.attrs.get('id')
|
|
160
|
-
template['parentTag'] = parent.name
|
|
161
|
-
if self.include_parent_attrs:
|
|
162
|
-
template['parentAttrs'] = json.dumps(parent.attrs)
|
|
163
|
-
grandparent = getattr(parent, 'parent', None)
|
|
164
|
-
if grandparent is not None:
|
|
165
|
-
template['grandparentId'] = grandparent.attrs.get('id')
|
|
166
|
-
template['grandparentTag'] = grandparent.name
|
|
167
|
-
if self.include_grantparent_attrs:
|
|
168
|
-
template['grandparentAttrs'] = json.dumps(grandparent.attrs)
|
|
169
|
-
rows_append(template)
|
|
170
|
-
if rows:
|
|
171
|
-
db.insert_to_table_bulk(tableName=table_name , values=rows)
|
|
172
|
-
self.logger.info("Creating EPT Rows completed")
|
|
173
|
-
|
|
174
|
-
def parse(self) :
|
|
175
|
-
if self.design_path.endswith('.ept') :
|
|
176
|
-
self.extract_ept(self.design_path)
|
|
177
|
-
|
|
178
|
-
with open(self.design_path , 'r' , encoding='utf-8') as file :
|
|
179
|
-
xml_content = file.read()
|
|
180
|
-
xml_content = self.fix_xml_file(xml_content)
|
|
181
|
-
self.root = BeautifulSoup( xml_content, 'xml')
|
|
182
|
-
return self.root
|
|
183
|
-
|
|
184
|
-
def extract_ept(self , ept_path):
|
|
185
|
-
extract_to = tempfile.gettempdir() + f"/ept_extraction"
|
|
186
|
-
self.logger.debug(f"Extracting .EPT content to '{extract_to}'")
|
|
187
|
-
mkdirs(extract_to)
|
|
188
|
-
with zipfile.ZipFile(ept_path, 'r') as zip_ref:
|
|
189
|
-
zip_ref.extractall(extract_to)
|
|
190
|
-
xml_dir = glob.glob(f"{extract_to}/*.xml")[0]
|
|
191
|
-
self.design_path = xml_dir
|
|
192
|
-
self.tmp_design_path = xml_dir
|
|
193
|
-
self.logger.debug(f"EPT.XML location '{xml_dir}'")
|
|
194
|
-
return xml_dir
|
|
195
|
-
|
|
196
|
-
def _create_v_dirs(self) :
|
|
197
|
-
db = self.Database()
|
|
198
|
-
dirs = self.get_all_dirs()
|
|
199
|
-
db.createTable(
|
|
200
|
-
'c_dirs' ,
|
|
201
|
-
data=[
|
|
202
|
-
{
|
|
203
|
-
'column' : 'SOURCESITE' ,
|
|
204
|
-
'params' : 'TEXT'
|
|
205
|
-
},
|
|
206
|
-
{
|
|
207
|
-
'column' : 'SOURCEPACKID' ,
|
|
208
|
-
'params' : 'TEXT'
|
|
209
|
-
},
|
|
210
|
-
{
|
|
211
|
-
'column' : 'SPANID' ,
|
|
212
|
-
'params' : 'TEXT'
|
|
213
|
-
},
|
|
214
|
-
{
|
|
215
|
-
'column' : 'SOURCEAPN' ,
|
|
216
|
-
'params' : 'TEXT'
|
|
217
|
-
},
|
|
218
|
-
{
|
|
219
|
-
'column' : 'SOURCEPACKIDREF' ,
|
|
220
|
-
'params' : 'TEXT'
|
|
221
|
-
},
|
|
222
|
-
{
|
|
223
|
-
'column' : 'DESTINATIONSITE' ,
|
|
224
|
-
'params' : 'TEXT'
|
|
225
|
-
},
|
|
226
|
-
{
|
|
227
|
-
'column' : 'SOURCEBOARD' ,
|
|
228
|
-
'params' : 'TEXT'
|
|
229
|
-
},
|
|
230
|
-
{
|
|
231
|
-
'column' : 'SOURCEPHYSICALSLOT' ,
|
|
232
|
-
'params' : 'TEXT'
|
|
233
|
-
},
|
|
234
|
-
{
|
|
235
|
-
'column' : 'FULLSLOT' ,
|
|
236
|
-
'params' : 'TEXT'
|
|
237
|
-
},
|
|
238
|
-
{
|
|
239
|
-
'column' : 'SHELFTYPE' ,
|
|
240
|
-
'params' : 'TEXT'
|
|
241
|
-
}
|
|
242
|
-
]
|
|
243
|
-
)
|
|
244
|
-
db.insert_to_table_bulk(tableName='c_dirs' , values=dirs)
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
def get_site_data_by_id(self , id ) -> dict :
|
|
248
|
-
db = self.Database()
|
|
249
|
-
query = f"select * from site where id='{id}' "
|
|
250
|
-
siteData = lget(db.execute_dict(query) , 0 , {})
|
|
251
|
-
return siteData
|
|
252
|
-
|
|
253
|
-
def get_all_amplifiers(self) :
|
|
254
|
-
db = self.Database()
|
|
255
|
-
query = f"select * from circuitpack where packIDRef IS NOT NULL and type in (select packName from OAtype where packName is NOT NULL or packName != '')"
|
|
256
|
-
packs = db.execute_dict(query)
|
|
257
|
-
return packs
|
|
258
|
-
|
|
259
|
-
def get_shelf_data_by_id(self , id ) -> dict :
|
|
260
|
-
db = self.Database()
|
|
261
|
-
query = f"select * from shelf where id='{id}' "
|
|
262
|
-
shelfData = lget(db.execute_dict(query) , 0 , {})
|
|
263
|
-
return shelfData
|
|
264
|
-
|
|
265
|
-
def get_ne_data_by_id(self , id ) -> dict :
|
|
266
|
-
db = self.Database()
|
|
267
|
-
query = f"select * from ne where id='{id}' "
|
|
268
|
-
neData = lget(db.execute_dict(query) , 0 , {})
|
|
269
|
-
return neData
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
def get_table_data_by_id(self , table , id ) :
|
|
273
|
-
db = self.Database()
|
|
274
|
-
query = f"select * from {table} where id='{id}' "
|
|
275
|
-
data = lget(db.execute_dict(query) , 0 , {})
|
|
276
|
-
return data
|
|
277
|
-
|
|
278
|
-
def _create_crossconnections_table(self) :
|
|
279
|
-
query = f"""
|
|
280
|
-
CREATE VIEW c_crossconnections AS
|
|
281
|
-
SELECT DISTINCT
|
|
282
|
-
p.owner as wdmdemand,
|
|
283
|
-
sh.number || '-' || sm.physicalslot || '-L' || p.portnumber as physicalslot ,
|
|
284
|
-
sh.type as shelftype ,
|
|
285
|
-
sh.number as shelfid ,
|
|
286
|
-
sm.physicalslot ,
|
|
287
|
-
ch.deployedname,
|
|
288
|
-
ch.name as eptname,
|
|
289
|
-
s.name as sitename ,
|
|
290
|
-
b.ot as boardtype ,
|
|
291
|
-
rch.name as channelnumber
|
|
292
|
-
FROM port p
|
|
293
|
-
JOIN circuitpack cp ON p.parentId = cp.id
|
|
294
|
-
JOIN shelf sh ON cp.parentId = sh.id
|
|
295
|
-
JOIN site s ON sh.grandparentId = s.id
|
|
296
|
-
JOIN OTtype b on b.OTtype = cp.type
|
|
297
|
-
JOIN wdmdemand ch ON p.owner = ch.id AND ch.category = 'Trail'
|
|
298
|
-
JOIN slot_mapping sm ON sh.type = sm.shelfType AND cp.slotid = sm.logicalSlot
|
|
299
|
-
JOIN channel rch ON rch.num = CAST(REPLACE(REPLACE(ch.assignedChannels_primary, '[', ''), ']', '') AS INTEGER)
|
|
300
|
-
WHERE ch.assignedChannels_primary IS NOT NULL
|
|
301
|
-
AND rch.name IS NOT NULL
|
|
302
|
-
AND cp.type IN (SELECT OTtype FROM OTtype WHERE otkind != 'alien' );
|
|
303
|
-
"""
|
|
304
|
-
db = self.Database()
|
|
305
|
-
db.execute_dict(query)
|
|
306
|
-
|
|
307
|
-
def _create_ports_inventory(self) :
|
|
308
|
-
query = f"""
|
|
309
|
-
CREATE VIEW c_port_inventory AS
|
|
310
|
-
select
|
|
311
|
-
p.pluggable as moduletype,
|
|
312
|
-
p.pluggableapn as partnumber,
|
|
313
|
-
p.portnumber,
|
|
314
|
-
p.connectortype,
|
|
315
|
-
cp.slotid,
|
|
316
|
-
cp.id as eptpackid ,
|
|
317
|
-
p.id as eptpid ,
|
|
318
|
-
sh.id as eptshid ,
|
|
319
|
-
s.id as eptsid ,
|
|
320
|
-
sh.number as shelfid,
|
|
321
|
-
sm.physicalslot,
|
|
322
|
-
s.name as sitename,
|
|
323
|
-
CASE
|
|
324
|
-
WHEN p.connectortype LIKE 'Line%'
|
|
325
|
-
THEN sh.number || '-' || sm.physicalslot || '-L' || p.portnumber
|
|
326
|
-
WHEN p.connectortype LIKE 'Client%'
|
|
327
|
-
THEN sh.number || '-' || sm.physicalslot || '-C' || p.portnumber
|
|
328
|
-
WHEN p.connectortype LIKE '%VOA%'
|
|
329
|
-
THEN sh.number || '-' || sm.physicalslot || '-VA' || p.portnumber
|
|
330
|
-
ELSE NULL
|
|
331
|
-
END as custom_portname ,
|
|
332
|
-
CASE
|
|
333
|
-
WHEN p.pluggable IS NULL AND physicalslot IS NOT NULL AND p.pluggableapn is NULL
|
|
334
|
-
THEN 'free'
|
|
335
|
-
ELSE 'busy'
|
|
336
|
-
END as portstatus
|
|
337
|
-
from port p
|
|
338
|
-
JOIN circuitpack cp ON p.parentId = cp.id
|
|
339
|
-
JOIN shelf sh ON cp.parentId = sh.id
|
|
340
|
-
JOIN site s ON sh.grandparentId = s.id
|
|
341
|
-
JOIN slot_mapping sm ON sh.type = sm.shelfType AND cp.slotid = sm.logicalSlot
|
|
342
|
-
JOIN OTtype ot ON ot.OTtype = cp.type
|
|
343
|
-
WHERE ( p.connectortype IS NOT NULL ) ;
|
|
344
|
-
"""
|
|
345
|
-
db = self.Database()
|
|
346
|
-
db.execute_dict(query)
|
|
347
|
-
|
|
348
|
-
def _create_enhanced_dirs(self) :
|
|
349
|
-
query = f"""
|
|
350
|
-
CREATE VIEW c_enhanced_dirs AS
|
|
351
|
-
select DISTINCT
|
|
352
|
-
line.id as eptlineid ,
|
|
353
|
-
line.span as spanid ,
|
|
354
|
-
site.id as eptsitenameid ,
|
|
355
|
-
spn.name as eptsegementname ,
|
|
356
|
-
line.wdmlink as wdmlinkid ,
|
|
357
|
-
cp.type as boardtype ,
|
|
358
|
-
sh.type as shelftype,
|
|
359
|
-
sp.physicalslot as slot ,
|
|
360
|
-
sh.number as shelfid ,
|
|
361
|
-
site.name as sitename ,
|
|
362
|
-
segt.id as segementid ,
|
|
363
|
-
|
|
364
|
-
CASE
|
|
365
|
-
WHEN segt2.asite = site.id THEN site_b.name
|
|
366
|
-
WHEN segt2.bsite = site.id THEN site_a.name
|
|
367
|
-
END AS farsitename,
|
|
368
|
-
|
|
369
|
-
CASE
|
|
370
|
-
WHEN segtinfo.orient = 'AB' AND segt2.asite = site.id THEN segtinfo.dist
|
|
371
|
-
WHEN segtinfo.orient = 'AB' AND segt2.bsite = site.id THEN segtinfo.dist
|
|
372
|
-
WHEN segtinfo.orient = 'BA' AND segt2.asite = site.id THEN segtinfo.dist
|
|
373
|
-
WHEN segtinfo.orient = 'BA' AND segt2.bsite = site.id THEN segtinfo.dist
|
|
374
|
-
END AS distance ,
|
|
375
|
-
sh.number || "-" || sp.physicalSlot as fullslot
|
|
376
|
-
from line
|
|
377
|
-
JOIN circuitpack cp ON cp.wdmline = line.id AND ( cp.packIDRef IS NOT NULL OR cp.type = oa.OAtype )
|
|
378
|
-
JOIN site on site.id = sh.grandparentId
|
|
379
|
-
JOIN OAtype as oa
|
|
380
|
-
JOIN shelf sh ON sh.id = cp.parentId
|
|
381
|
-
JOIN slot_mapping sp on sp.logicalSlot = cp.slotid AND sh.type = sp.shelftype
|
|
382
|
-
JOIN span spn ON spn.id = line.span AND spn.ashelfset IS NOT NULL
|
|
383
|
-
JOIN segt ON segt.grandparentId = spn.id
|
|
384
|
-
JOIN segt segt2 ON segt.id = segt2.id AND segt2.asite IS NOT NULL
|
|
385
|
-
JOIN segtinfo ON segtinfo.parentId = segt.id
|
|
386
|
-
JOIN site AS site_a ON site_a.id = segt2.asite
|
|
387
|
-
JOIN site AS site_b ON site_b.id = segt2.bsite
|
|
388
|
-
GROUP BY line.id ;
|
|
389
|
-
|
|
390
|
-
"""
|
|
391
|
-
db = self.Database()
|
|
392
|
-
db.execute_dict(query)
|
|
393
|
-
|
|
394
|
-
def _create_card_inventory(self) :
|
|
395
|
-
query = f"""
|
|
396
|
-
CREATE VIEW c_card_inventory AS
|
|
397
|
-
SELECT DISTINCT
|
|
398
|
-
sh.number || '-' || pack.physicalslot AS slot,
|
|
399
|
-
sh.number AS shelfid,
|
|
400
|
-
pack.physicalslot,
|
|
401
|
-
s.name AS sitename,
|
|
402
|
-
pack.apn,
|
|
403
|
-
COALESCE(ott.ot, pack.type) AS boardname,
|
|
404
|
-
pack.source_table
|
|
405
|
-
FROM (
|
|
406
|
-
-- Circuitpack → logicalSlot → physicalslot via slot_mapping
|
|
407
|
-
SELECT cp.id,
|
|
408
|
-
cp.parentId,
|
|
409
|
-
sm.physicalslot,
|
|
410
|
-
cp.type,
|
|
411
|
-
cp.apn,
|
|
412
|
-
'circuitpack' AS source_table
|
|
413
|
-
FROM circuitpack cp
|
|
414
|
-
JOIN shelf sh ON cp.parentId = sh.id
|
|
415
|
-
JOIN slot_mapping sm
|
|
416
|
-
ON sh.type = sm.shelfType
|
|
417
|
-
AND cp.slotid = sm.logicalSlot
|
|
418
|
-
|
|
419
|
-
UNION ALL
|
|
420
|
-
|
|
421
|
-
-- Commonpack → already has physicalslot
|
|
422
|
-
SELECT id,
|
|
423
|
-
parentId,
|
|
424
|
-
physicalslot,
|
|
425
|
-
type,
|
|
426
|
-
apn,
|
|
427
|
-
'commonpack' AS source_table
|
|
428
|
-
FROM commonpack
|
|
429
|
-
) pack
|
|
430
|
-
JOIN shelf sh
|
|
431
|
-
ON pack.parentId = sh.id
|
|
432
|
-
JOIN site s
|
|
433
|
-
ON sh.grandparentId = s.id
|
|
434
|
-
LEFT JOIN OAtype ota
|
|
435
|
-
ON ota.OAtype = pack.type
|
|
436
|
-
LEFT JOIN OTtype ott
|
|
437
|
-
ON ott.OTtype = pack.type;
|
|
438
|
-
"""
|
|
439
|
-
db = self.Database()
|
|
440
|
-
db.execute_dict(query)
|
|
441
|
-
|
|
442
|
-
def _create_shelf_inventory(self) :
|
|
443
|
-
query = f"""
|
|
444
|
-
CREATE VIEW c_shelf_info AS
|
|
445
|
-
SELECT
|
|
446
|
-
sh.type AS shelftype ,
|
|
447
|
-
sh.number AS shelfnumber,
|
|
448
|
-
sh.apn AS partnumber ,
|
|
449
|
-
st.name AS sitename
|
|
450
|
-
FROM shelf sh
|
|
451
|
-
JOIN site st ON st.id = sh.grandparentId
|
|
452
|
-
WHERE sh.number != 0
|
|
453
|
-
UNION ALL
|
|
454
|
-
SELECT
|
|
455
|
-
cp.type AS shelftype ,
|
|
456
|
-
cp.dcmpseudoshelf AS shelfnumber,
|
|
457
|
-
cp.apn AS partnumber ,
|
|
458
|
-
st.name as sitename
|
|
459
|
-
FROM circuitpack cp
|
|
460
|
-
JOIN shelf sh ON sh.id = cp.parentId
|
|
461
|
-
JOIN site st ON sh.grandparentId = st.id
|
|
462
|
-
WHERE cp.dcmpseudoshelf IS NOT NUll ;
|
|
463
|
-
"""
|
|
464
|
-
db = self.Database()
|
|
465
|
-
db.execute_dict(query)
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
def convert_design(self , drop_cols=[] ) :
|
|
469
|
-
start = getTimestamp()
|
|
470
|
-
db = self.Database()
|
|
471
|
-
self.parse()
|
|
472
|
-
if self.creation_mode == 'fast' :
|
|
473
|
-
self.create_ept_tables_from_sql()
|
|
474
|
-
elif self.creation_mode == 'slow' :
|
|
475
|
-
self.create_ept_columns(drop_cols=drop_cols)
|
|
476
|
-
self.create_ept_rows()
|
|
477
|
-
db.execute_script(f"{os.path.join(__LIBPATH__ , 'ept_sql' , 'create_dirs.sql')}")
|
|
478
|
-
a = start_thread(self._create_v_dirs , daemon=True)
|
|
479
|
-
b = start_thread(self._create_crossconnections_table , daemon=True)
|
|
480
|
-
c = start_thread(self._create_card_inventory , daemon=True)
|
|
481
|
-
d = start_thread(self._create_shelf_inventory , daemon=True)
|
|
482
|
-
s = start_thread(self._create_ports_inventory , daemon=True)
|
|
483
|
-
s = start_thread(self._create_enhanced_dirs , daemon=True)
|
|
484
|
-
a.join() ; b.join() ; c.join() ; d.join() ; s.join()
|
|
485
|
-
|
|
486
|
-
end = getTimestamp()
|
|
487
|
-
if os.path.exists(self.tmp_design_path) :
|
|
488
|
-
os.remove(self.tmp_design_path)
|
|
489
|
-
self.logger.info(f"Design converted in {round((end - start)/60 , 2)} mins")
|
|
490
|
-
|
|
491
|
-
def get_all_dirs(self, filter_source_ne=None) :
|
|
492
|
-
db = self.Database()
|
|
493
|
-
packs = self.get_all_amplifiers()
|
|
494
|
-
_packs = []
|
|
495
|
-
for pack in packs :
|
|
496
|
-
parentId = pack['parentId']
|
|
497
|
-
wdmline = pack['wdmline']
|
|
498
|
-
shelf = self.get_shelf_data_by_id(parentId)
|
|
499
|
-
shelfNumber = shelf['number']
|
|
500
|
-
shelfType = shelf['type']
|
|
501
|
-
physicalslot = self.convert_slotid_to_physical_slot(shelfType , pack.get('slotid'))
|
|
502
|
-
grandparentId = shelf['grandparentId']
|
|
503
|
-
ne = self.get_site_data_by_id(grandparentId)
|
|
504
|
-
sourceNE = ne['name']
|
|
505
|
-
if filter_source_ne and filter_source_ne != sourceNE :
|
|
506
|
-
return
|
|
507
|
-
span = self.get_table_data_by_id('line' , wdmline)
|
|
508
|
-
spanId = span['span']
|
|
509
|
-
query = f"select grandparentId from line where span='{spanId}' "
|
|
510
|
-
spans = db.execute_dict(query)
|
|
511
|
-
for span in spans :
|
|
512
|
-
siteData = self.get_site_data_by_id(span['grandparentId'])
|
|
513
|
-
if siteData.get('name') != sourceNE :
|
|
514
|
-
DestinationNE = siteData.get('name')
|
|
515
|
-
break
|
|
516
|
-
fullSlot = f"{shelfNumber}/{physicalslot}"
|
|
517
|
-
_packs.append({
|
|
518
|
-
'SOURCESITE' : sourceNE ,
|
|
519
|
-
'SOURCEPACKID' : pack.get('id') ,
|
|
520
|
-
"SPANID" : spanId ,
|
|
521
|
-
'SOURCEAPN' : pack.get('apn') ,
|
|
522
|
-
'SOURCEPACKIDREF' : pack.get('packidref') ,
|
|
523
|
-
'DESTINATIONSITE' : DestinationNE ,
|
|
524
|
-
'SOURCEBOARD' : pack.get('type') ,
|
|
525
|
-
'SOURCEPHYSICALSLOT' : physicalslot ,
|
|
526
|
-
'FULLSLOT' : fullSlot ,
|
|
527
|
-
'SHELFTYPE' : shelfType ,
|
|
528
|
-
})
|
|
529
|
-
self.logger.debug(f"Source:{sourceNE}/{fullSlot}/{pack.get('type')} -> {spanId} -> {DestinationNE}")
|
|
530
|
-
return _packs
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
#
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
1
|
+
import xml.etree.ElementTree as ET
|
|
2
|
+
from bs4 import BeautifulSoup
|
|
3
|
+
from easy_utils_dev.simple_sqlite import initDB
|
|
4
|
+
from easy_utils_dev.utils import getRandomKey , getTimestamp , lget , mkdirs , start_thread
|
|
5
|
+
import json , os , glob
|
|
6
|
+
from easy_utils_dev.FastQueue import FastQueue
|
|
7
|
+
from easy_utils_dev.debugger import DEBUGGER
|
|
8
|
+
import zipfile
|
|
9
|
+
import tempfile
|
|
10
|
+
from collections import defaultdict
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
__LIBPATH__ = os.path.dirname(os.path.abspath(__file__))
|
|
14
|
+
MAPPER = {
|
|
15
|
+
'PSS32' : {
|
|
16
|
+
"PHY" :[2,20,3,21,4,22,5,23,6,24,7,25,8,26,9,27,10,28,11,29,12,30,13,31,14,32,15,33,16,34,17,35]
|
|
17
|
+
} ,
|
|
18
|
+
'PSS16II' : {
|
|
19
|
+
"PHY" :[3,13,4,14,5,15,6,16,7,17,8,18,9,19,10,20]
|
|
20
|
+
} ,
|
|
21
|
+
'PSS16' : {
|
|
22
|
+
"PHY" :[3,13,4,14,5,15,6,16,7,17,8,18,9,19,10,20]
|
|
23
|
+
} ,
|
|
24
|
+
'PSS8' : {
|
|
25
|
+
"PHY" :[2,8,3,9,4,10,5,11]
|
|
26
|
+
} ,
|
|
27
|
+
}
|
|
28
|
+
ns = {"ept": "http://upm.lucent.com/EPTdesign"}
|
|
29
|
+
|
|
30
|
+
class EPTManager :
|
|
31
|
+
def __init__(self ,
|
|
32
|
+
design_path=None,
|
|
33
|
+
include_parent_attrs=True ,
|
|
34
|
+
include_grantparent_attrs=False ,
|
|
35
|
+
ept_db_path=f"ept_{getTimestamp()}.db" ,
|
|
36
|
+
debug_name='EPTManager',
|
|
37
|
+
debug_home_path=None
|
|
38
|
+
) -> None:
|
|
39
|
+
self.root = None
|
|
40
|
+
self.logger = DEBUGGER(name=debug_name, homePath=debug_home_path)
|
|
41
|
+
self.design_path = design_path
|
|
42
|
+
self.ept_db_path = ept_db_path
|
|
43
|
+
self.include_parent_attrs = include_parent_attrs
|
|
44
|
+
self.include_grantparent_attrs = include_grantparent_attrs
|
|
45
|
+
self.sites = []
|
|
46
|
+
self.queue = FastQueue(request_max_count=4)
|
|
47
|
+
self.nes = []
|
|
48
|
+
self.tmp_design_path = None
|
|
49
|
+
self.creation_mode = 'fast' # 'fast' or 'slow' but accurate'
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def convert_slotid_to_physical_slot(self , shType , slotid ) :
|
|
53
|
+
slotid = int(slotid) - 1
|
|
54
|
+
return MAPPER[shType]['PHY'][slotid]
|
|
55
|
+
|
|
56
|
+
def fix_xml_file(self , xml_content ) :
|
|
57
|
+
xml_content = xml_content.splitlines()
|
|
58
|
+
for i , line in enumerate(xml_content) :
|
|
59
|
+
if '<EPTdesign' in line :
|
|
60
|
+
line = line.split(' ')[0]
|
|
61
|
+
line = f"{line}>"
|
|
62
|
+
xml_content[i] = line
|
|
63
|
+
break
|
|
64
|
+
return ''.join(xml_content)
|
|
65
|
+
|
|
66
|
+
def Database(self) :
|
|
67
|
+
db = initDB()
|
|
68
|
+
db.config_database_path(self.ept_db_path)
|
|
69
|
+
return db
|
|
70
|
+
|
|
71
|
+
def create_ept_tables_from_sql(self) :
|
|
72
|
+
self.logger.info("Creating EPT Database Tables from SQL ...")
|
|
73
|
+
db = self.Database()
|
|
74
|
+
db.execute_script( f"{os.path.join(__LIBPATH__ , 'ept_sql' , 'create_ept_tables.sql')}")
|
|
75
|
+
self.logger.info("Creating EPT Database Tables from SQL completed")
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def create_ept_columns(self , drop_cols=[]) :
|
|
79
|
+
self.logger.info("Creating EPT Database Tables ...")
|
|
80
|
+
db = self.Database()
|
|
81
|
+
drop_cols = [str(col).upper() for col in drop_cols]
|
|
82
|
+
tags = [str(tag.name) for tag in self.root.find_all() ]
|
|
83
|
+
tags = list(set(tags))
|
|
84
|
+
for tagName in tags :
|
|
85
|
+
tags = self.root.find_all(tagName)
|
|
86
|
+
tableColumns = [
|
|
87
|
+
{
|
|
88
|
+
'column' : 'parentId' ,
|
|
89
|
+
'params' : 'TEXT'
|
|
90
|
+
},
|
|
91
|
+
{
|
|
92
|
+
'column' : 'parentTag' ,
|
|
93
|
+
'params' : 'TEXT'
|
|
94
|
+
},
|
|
95
|
+
{
|
|
96
|
+
'column' : 'parentAttrs' ,
|
|
97
|
+
'params' : 'TEXT'
|
|
98
|
+
},
|
|
99
|
+
{
|
|
100
|
+
'column' : 'grandparentId' ,
|
|
101
|
+
'params' : 'TEXT'
|
|
102
|
+
},
|
|
103
|
+
{
|
|
104
|
+
'column' : 'grandparentTag' ,
|
|
105
|
+
'params' : 'TEXT'
|
|
106
|
+
},
|
|
107
|
+
{
|
|
108
|
+
'column' : 'grandparentAttrs' ,
|
|
109
|
+
'params' : 'TEXT'
|
|
110
|
+
},
|
|
111
|
+
]
|
|
112
|
+
added = []
|
|
113
|
+
for tag in tags :
|
|
114
|
+
attrs = tag.attrs
|
|
115
|
+
for attr in list(attrs.keys()) :
|
|
116
|
+
_input = {
|
|
117
|
+
'column' : str(attr) ,
|
|
118
|
+
'params' : 'TEXT'
|
|
119
|
+
}
|
|
120
|
+
if not str(attr).upper() in added and not str(attr).upper() in drop_cols :
|
|
121
|
+
if '-' in str(attr) :
|
|
122
|
+
continue
|
|
123
|
+
self.logger.debug(f'[{tagName}] : Adding Column : {_input}')
|
|
124
|
+
tableColumns.append(_input)
|
|
125
|
+
added.append(str(attr).upper())
|
|
126
|
+
if len(tableColumns) > 0 :
|
|
127
|
+
db.createTable( tableName=tagName , data=tableColumns , autoId=False )
|
|
128
|
+
|
|
129
|
+
def create_ept_rows(self) :
|
|
130
|
+
self.logger.info("Creating EPT Rows ...")
|
|
131
|
+
all_tags = list(self.root.find_all())
|
|
132
|
+
unique_table_names = list({str(tag.name) for tag in all_tags})
|
|
133
|
+
grouped_by_table = defaultdict(list)
|
|
134
|
+
for t in all_tags:
|
|
135
|
+
grouped_by_table[str(t.name)].append(t)
|
|
136
|
+
db = initDB()
|
|
137
|
+
db.config_database_path(self.ept_db_path)
|
|
138
|
+
table_columns_map = {}
|
|
139
|
+
for table_name in unique_table_names:
|
|
140
|
+
query = f"PRAGMA table_info({table_name})"
|
|
141
|
+
cols = db.execute_dict(query)
|
|
142
|
+
table_columns_map[table_name] = [c['name'] for c in cols]
|
|
143
|
+
for table_name in unique_table_names:
|
|
144
|
+
elements = grouped_by_table.get(table_name)
|
|
145
|
+
if not elements:
|
|
146
|
+
continue
|
|
147
|
+
column_names = table_columns_map[table_name]
|
|
148
|
+
rows = []
|
|
149
|
+
rows_append = rows.append
|
|
150
|
+
for tag in elements:
|
|
151
|
+
template = dict.fromkeys(column_names, None)
|
|
152
|
+
attrs = tag.attrs
|
|
153
|
+
if attrs:
|
|
154
|
+
for key in column_names:
|
|
155
|
+
if key in attrs:
|
|
156
|
+
template[key] = attrs[key]
|
|
157
|
+
parent = tag.parent
|
|
158
|
+
if parent is not None:
|
|
159
|
+
template['parentId'] = parent.attrs.get('id')
|
|
160
|
+
template['parentTag'] = parent.name
|
|
161
|
+
if self.include_parent_attrs:
|
|
162
|
+
template['parentAttrs'] = json.dumps(parent.attrs)
|
|
163
|
+
grandparent = getattr(parent, 'parent', None)
|
|
164
|
+
if grandparent is not None:
|
|
165
|
+
template['grandparentId'] = grandparent.attrs.get('id')
|
|
166
|
+
template['grandparentTag'] = grandparent.name
|
|
167
|
+
if self.include_grantparent_attrs:
|
|
168
|
+
template['grandparentAttrs'] = json.dumps(grandparent.attrs)
|
|
169
|
+
rows_append(template)
|
|
170
|
+
if rows:
|
|
171
|
+
db.insert_to_table_bulk(tableName=table_name , values=rows)
|
|
172
|
+
self.logger.info("Creating EPT Rows completed")
|
|
173
|
+
|
|
174
|
+
def parse(self) :
|
|
175
|
+
if self.design_path.endswith('.ept') :
|
|
176
|
+
self.extract_ept(self.design_path)
|
|
177
|
+
|
|
178
|
+
with open(self.design_path , 'r' , encoding='utf-8') as file :
|
|
179
|
+
xml_content = file.read()
|
|
180
|
+
xml_content = self.fix_xml_file(xml_content)
|
|
181
|
+
self.root = BeautifulSoup( xml_content, 'xml')
|
|
182
|
+
return self.root
|
|
183
|
+
|
|
184
|
+
def extract_ept(self , ept_path):
|
|
185
|
+
extract_to = tempfile.gettempdir() + f"/ept_extraction"
|
|
186
|
+
self.logger.debug(f"Extracting .EPT content to '{extract_to}'")
|
|
187
|
+
mkdirs(extract_to)
|
|
188
|
+
with zipfile.ZipFile(ept_path, 'r') as zip_ref:
|
|
189
|
+
zip_ref.extractall(extract_to)
|
|
190
|
+
xml_dir = glob.glob(f"{extract_to}/*.xml")[0]
|
|
191
|
+
self.design_path = xml_dir
|
|
192
|
+
self.tmp_design_path = xml_dir
|
|
193
|
+
self.logger.debug(f"EPT.XML location '{xml_dir}'")
|
|
194
|
+
return xml_dir
|
|
195
|
+
|
|
196
|
+
def _create_v_dirs(self) :
|
|
197
|
+
db = self.Database()
|
|
198
|
+
dirs = self.get_all_dirs()
|
|
199
|
+
db.createTable(
|
|
200
|
+
'c_dirs' ,
|
|
201
|
+
data=[
|
|
202
|
+
{
|
|
203
|
+
'column' : 'SOURCESITE' ,
|
|
204
|
+
'params' : 'TEXT'
|
|
205
|
+
},
|
|
206
|
+
{
|
|
207
|
+
'column' : 'SOURCEPACKID' ,
|
|
208
|
+
'params' : 'TEXT'
|
|
209
|
+
},
|
|
210
|
+
{
|
|
211
|
+
'column' : 'SPANID' ,
|
|
212
|
+
'params' : 'TEXT'
|
|
213
|
+
},
|
|
214
|
+
{
|
|
215
|
+
'column' : 'SOURCEAPN' ,
|
|
216
|
+
'params' : 'TEXT'
|
|
217
|
+
},
|
|
218
|
+
{
|
|
219
|
+
'column' : 'SOURCEPACKIDREF' ,
|
|
220
|
+
'params' : 'TEXT'
|
|
221
|
+
},
|
|
222
|
+
{
|
|
223
|
+
'column' : 'DESTINATIONSITE' ,
|
|
224
|
+
'params' : 'TEXT'
|
|
225
|
+
},
|
|
226
|
+
{
|
|
227
|
+
'column' : 'SOURCEBOARD' ,
|
|
228
|
+
'params' : 'TEXT'
|
|
229
|
+
},
|
|
230
|
+
{
|
|
231
|
+
'column' : 'SOURCEPHYSICALSLOT' ,
|
|
232
|
+
'params' : 'TEXT'
|
|
233
|
+
},
|
|
234
|
+
{
|
|
235
|
+
'column' : 'FULLSLOT' ,
|
|
236
|
+
'params' : 'TEXT'
|
|
237
|
+
},
|
|
238
|
+
{
|
|
239
|
+
'column' : 'SHELFTYPE' ,
|
|
240
|
+
'params' : 'TEXT'
|
|
241
|
+
}
|
|
242
|
+
]
|
|
243
|
+
)
|
|
244
|
+
db.insert_to_table_bulk(tableName='c_dirs' , values=dirs)
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def get_site_data_by_id(self , id ) -> dict :
|
|
248
|
+
db = self.Database()
|
|
249
|
+
query = f"select * from site where id='{id}' "
|
|
250
|
+
siteData = lget(db.execute_dict(query) , 0 , {})
|
|
251
|
+
return siteData
|
|
252
|
+
|
|
253
|
+
def get_all_amplifiers(self) :
|
|
254
|
+
db = self.Database()
|
|
255
|
+
query = f"select * from circuitpack where packIDRef IS NOT NULL and type in (select packName from OAtype where packName is NOT NULL or packName != '')"
|
|
256
|
+
packs = db.execute_dict(query)
|
|
257
|
+
return packs
|
|
258
|
+
|
|
259
|
+
def get_shelf_data_by_id(self , id ) -> dict :
|
|
260
|
+
db = self.Database()
|
|
261
|
+
query = f"select * from shelf where id='{id}' "
|
|
262
|
+
shelfData = lget(db.execute_dict(query) , 0 , {})
|
|
263
|
+
return shelfData
|
|
264
|
+
|
|
265
|
+
def get_ne_data_by_id(self , id ) -> dict :
|
|
266
|
+
db = self.Database()
|
|
267
|
+
query = f"select * from ne where id='{id}' "
|
|
268
|
+
neData = lget(db.execute_dict(query) , 0 , {})
|
|
269
|
+
return neData
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def get_table_data_by_id(self , table , id ) :
|
|
273
|
+
db = self.Database()
|
|
274
|
+
query = f"select * from {table} where id='{id}' "
|
|
275
|
+
data = lget(db.execute_dict(query) , 0 , {})
|
|
276
|
+
return data
|
|
277
|
+
|
|
278
|
+
def _create_crossconnections_table(self) :
|
|
279
|
+
query = f"""
|
|
280
|
+
CREATE VIEW c_crossconnections AS
|
|
281
|
+
SELECT DISTINCT
|
|
282
|
+
p.owner as wdmdemand,
|
|
283
|
+
sh.number || '-' || sm.physicalslot || '-L' || p.portnumber as physicalslot ,
|
|
284
|
+
sh.type as shelftype ,
|
|
285
|
+
sh.number as shelfid ,
|
|
286
|
+
sm.physicalslot ,
|
|
287
|
+
ch.deployedname,
|
|
288
|
+
ch.name as eptname,
|
|
289
|
+
s.name as sitename ,
|
|
290
|
+
b.ot as boardtype ,
|
|
291
|
+
rch.name as channelnumber
|
|
292
|
+
FROM port p
|
|
293
|
+
JOIN circuitpack cp ON p.parentId = cp.id
|
|
294
|
+
JOIN shelf sh ON cp.parentId = sh.id
|
|
295
|
+
JOIN site s ON sh.grandparentId = s.id
|
|
296
|
+
JOIN OTtype b on b.OTtype = cp.type
|
|
297
|
+
JOIN wdmdemand ch ON p.owner = ch.id AND ch.category = 'Trail'
|
|
298
|
+
JOIN slot_mapping sm ON sh.type = sm.shelfType AND cp.slotid = sm.logicalSlot
|
|
299
|
+
JOIN channel rch ON rch.num = CAST(REPLACE(REPLACE(ch.assignedChannels_primary, '[', ''), ']', '') AS INTEGER)
|
|
300
|
+
WHERE ch.assignedChannels_primary IS NOT NULL
|
|
301
|
+
AND rch.name IS NOT NULL
|
|
302
|
+
AND cp.type IN (SELECT OTtype FROM OTtype WHERE otkind != 'alien' );
|
|
303
|
+
"""
|
|
304
|
+
db = self.Database()
|
|
305
|
+
db.execute_dict(query)
|
|
306
|
+
|
|
307
|
+
def _create_ports_inventory(self) :
|
|
308
|
+
query = f"""
|
|
309
|
+
CREATE VIEW c_port_inventory AS
|
|
310
|
+
select
|
|
311
|
+
p.pluggable as moduletype,
|
|
312
|
+
p.pluggableapn as partnumber,
|
|
313
|
+
p.portnumber,
|
|
314
|
+
p.connectortype,
|
|
315
|
+
cp.slotid,
|
|
316
|
+
cp.id as eptpackid ,
|
|
317
|
+
p.id as eptpid ,
|
|
318
|
+
sh.id as eptshid ,
|
|
319
|
+
s.id as eptsid ,
|
|
320
|
+
sh.number as shelfid,
|
|
321
|
+
sm.physicalslot,
|
|
322
|
+
s.name as sitename,
|
|
323
|
+
CASE
|
|
324
|
+
WHEN p.connectortype LIKE 'Line%'
|
|
325
|
+
THEN sh.number || '-' || sm.physicalslot || '-L' || p.portnumber
|
|
326
|
+
WHEN p.connectortype LIKE 'Client%'
|
|
327
|
+
THEN sh.number || '-' || sm.physicalslot || '-C' || p.portnumber
|
|
328
|
+
WHEN p.connectortype LIKE '%VOA%'
|
|
329
|
+
THEN sh.number || '-' || sm.physicalslot || '-VA' || p.portnumber
|
|
330
|
+
ELSE NULL
|
|
331
|
+
END as custom_portname ,
|
|
332
|
+
CASE
|
|
333
|
+
WHEN p.pluggable IS NULL AND physicalslot IS NOT NULL AND p.pluggableapn is NULL
|
|
334
|
+
THEN 'free'
|
|
335
|
+
ELSE 'busy'
|
|
336
|
+
END as portstatus
|
|
337
|
+
from port p
|
|
338
|
+
JOIN circuitpack cp ON p.parentId = cp.id
|
|
339
|
+
JOIN shelf sh ON cp.parentId = sh.id
|
|
340
|
+
JOIN site s ON sh.grandparentId = s.id
|
|
341
|
+
JOIN slot_mapping sm ON sh.type = sm.shelfType AND cp.slotid = sm.logicalSlot
|
|
342
|
+
JOIN OTtype ot ON ot.OTtype = cp.type
|
|
343
|
+
WHERE ( p.connectortype IS NOT NULL ) ;
|
|
344
|
+
"""
|
|
345
|
+
db = self.Database()
|
|
346
|
+
db.execute_dict(query)
|
|
347
|
+
|
|
348
|
+
def _create_enhanced_dirs(self) :
|
|
349
|
+
query = f"""
|
|
350
|
+
CREATE VIEW c_enhanced_dirs AS
|
|
351
|
+
select DISTINCT
|
|
352
|
+
line.id as eptlineid ,
|
|
353
|
+
line.span as spanid ,
|
|
354
|
+
site.id as eptsitenameid ,
|
|
355
|
+
spn.name as eptsegementname ,
|
|
356
|
+
line.wdmlink as wdmlinkid ,
|
|
357
|
+
cp.type as boardtype ,
|
|
358
|
+
sh.type as shelftype,
|
|
359
|
+
sp.physicalslot as slot ,
|
|
360
|
+
sh.number as shelfid ,
|
|
361
|
+
site.name as sitename ,
|
|
362
|
+
segt.id as segementid ,
|
|
363
|
+
|
|
364
|
+
CASE
|
|
365
|
+
WHEN segt2.asite = site.id THEN site_b.name
|
|
366
|
+
WHEN segt2.bsite = site.id THEN site_a.name
|
|
367
|
+
END AS farsitename,
|
|
368
|
+
|
|
369
|
+
CASE
|
|
370
|
+
WHEN segtinfo.orient = 'AB' AND segt2.asite = site.id THEN segtinfo.dist
|
|
371
|
+
WHEN segtinfo.orient = 'AB' AND segt2.bsite = site.id THEN segtinfo.dist
|
|
372
|
+
WHEN segtinfo.orient = 'BA' AND segt2.asite = site.id THEN segtinfo.dist
|
|
373
|
+
WHEN segtinfo.orient = 'BA' AND segt2.bsite = site.id THEN segtinfo.dist
|
|
374
|
+
END AS distance ,
|
|
375
|
+
sh.number || "-" || sp.physicalSlot as fullslot
|
|
376
|
+
from line
|
|
377
|
+
JOIN circuitpack cp ON cp.wdmline = line.id AND ( cp.packIDRef IS NOT NULL OR cp.type = oa.OAtype )
|
|
378
|
+
JOIN site on site.id = sh.grandparentId
|
|
379
|
+
JOIN OAtype as oa
|
|
380
|
+
JOIN shelf sh ON sh.id = cp.parentId
|
|
381
|
+
JOIN slot_mapping sp on sp.logicalSlot = cp.slotid AND sh.type = sp.shelftype
|
|
382
|
+
JOIN span spn ON spn.id = line.span AND spn.ashelfset IS NOT NULL
|
|
383
|
+
JOIN segt ON segt.grandparentId = spn.id
|
|
384
|
+
JOIN segt segt2 ON segt.id = segt2.id AND segt2.asite IS NOT NULL
|
|
385
|
+
JOIN segtinfo ON segtinfo.parentId = segt.id
|
|
386
|
+
JOIN site AS site_a ON site_a.id = segt2.asite
|
|
387
|
+
JOIN site AS site_b ON site_b.id = segt2.bsite
|
|
388
|
+
GROUP BY line.id ;
|
|
389
|
+
|
|
390
|
+
"""
|
|
391
|
+
db = self.Database()
|
|
392
|
+
db.execute_dict(query)
|
|
393
|
+
|
|
394
|
+
def _create_card_inventory(self) :
|
|
395
|
+
query = f"""
|
|
396
|
+
CREATE VIEW c_card_inventory AS
|
|
397
|
+
SELECT DISTINCT
|
|
398
|
+
sh.number || '-' || pack.physicalslot AS slot,
|
|
399
|
+
sh.number AS shelfid,
|
|
400
|
+
pack.physicalslot,
|
|
401
|
+
s.name AS sitename,
|
|
402
|
+
pack.apn,
|
|
403
|
+
COALESCE(ott.ot, pack.type) AS boardname,
|
|
404
|
+
pack.source_table
|
|
405
|
+
FROM (
|
|
406
|
+
-- Circuitpack → logicalSlot → physicalslot via slot_mapping
|
|
407
|
+
SELECT cp.id,
|
|
408
|
+
cp.parentId,
|
|
409
|
+
sm.physicalslot,
|
|
410
|
+
cp.type,
|
|
411
|
+
cp.apn,
|
|
412
|
+
'circuitpack' AS source_table
|
|
413
|
+
FROM circuitpack cp
|
|
414
|
+
JOIN shelf sh ON cp.parentId = sh.id
|
|
415
|
+
JOIN slot_mapping sm
|
|
416
|
+
ON sh.type = sm.shelfType
|
|
417
|
+
AND cp.slotid = sm.logicalSlot
|
|
418
|
+
|
|
419
|
+
UNION ALL
|
|
420
|
+
|
|
421
|
+
-- Commonpack → already has physicalslot
|
|
422
|
+
SELECT id,
|
|
423
|
+
parentId,
|
|
424
|
+
physicalslot,
|
|
425
|
+
type,
|
|
426
|
+
apn,
|
|
427
|
+
'commonpack' AS source_table
|
|
428
|
+
FROM commonpack
|
|
429
|
+
) pack
|
|
430
|
+
JOIN shelf sh
|
|
431
|
+
ON pack.parentId = sh.id
|
|
432
|
+
JOIN site s
|
|
433
|
+
ON sh.grandparentId = s.id
|
|
434
|
+
LEFT JOIN OAtype ota
|
|
435
|
+
ON ota.OAtype = pack.type
|
|
436
|
+
LEFT JOIN OTtype ott
|
|
437
|
+
ON ott.OTtype = pack.type;
|
|
438
|
+
"""
|
|
439
|
+
db = self.Database()
|
|
440
|
+
db.execute_dict(query)
|
|
441
|
+
|
|
442
|
+
def _create_shelf_inventory(self) :
|
|
443
|
+
query = f"""
|
|
444
|
+
CREATE VIEW c_shelf_info AS
|
|
445
|
+
SELECT
|
|
446
|
+
sh.type AS shelftype ,
|
|
447
|
+
sh.number AS shelfnumber,
|
|
448
|
+
sh.apn AS partnumber ,
|
|
449
|
+
st.name AS sitename
|
|
450
|
+
FROM shelf sh
|
|
451
|
+
JOIN site st ON st.id = sh.grandparentId
|
|
452
|
+
WHERE sh.number != 0
|
|
453
|
+
UNION ALL
|
|
454
|
+
SELECT
|
|
455
|
+
cp.type AS shelftype ,
|
|
456
|
+
cp.dcmpseudoshelf AS shelfnumber,
|
|
457
|
+
cp.apn AS partnumber ,
|
|
458
|
+
st.name as sitename
|
|
459
|
+
FROM circuitpack cp
|
|
460
|
+
JOIN shelf sh ON sh.id = cp.parentId
|
|
461
|
+
JOIN site st ON sh.grandparentId = st.id
|
|
462
|
+
WHERE cp.dcmpseudoshelf IS NOT NUll ;
|
|
463
|
+
"""
|
|
464
|
+
db = self.Database()
|
|
465
|
+
db.execute_dict(query)
|
|
466
|
+
|
|
467
|
+
|
|
468
|
+
def convert_design(self , drop_cols=[] ) :
|
|
469
|
+
start = getTimestamp()
|
|
470
|
+
db = self.Database()
|
|
471
|
+
self.parse()
|
|
472
|
+
if self.creation_mode == 'fast' :
|
|
473
|
+
self.create_ept_tables_from_sql()
|
|
474
|
+
elif self.creation_mode == 'slow' :
|
|
475
|
+
self.create_ept_columns(drop_cols=drop_cols)
|
|
476
|
+
self.create_ept_rows()
|
|
477
|
+
db.execute_script(f"{os.path.join(__LIBPATH__ , 'ept_sql' , 'create_dirs.sql')}")
|
|
478
|
+
a = start_thread(self._create_v_dirs , daemon=True)
|
|
479
|
+
b = start_thread(self._create_crossconnections_table , daemon=True)
|
|
480
|
+
c = start_thread(self._create_card_inventory , daemon=True)
|
|
481
|
+
d = start_thread(self._create_shelf_inventory , daemon=True)
|
|
482
|
+
s = start_thread(self._create_ports_inventory , daemon=True)
|
|
483
|
+
s = start_thread(self._create_enhanced_dirs , daemon=True)
|
|
484
|
+
a.join() ; b.join() ; c.join() ; d.join() ; s.join()
|
|
485
|
+
|
|
486
|
+
end = getTimestamp()
|
|
487
|
+
if os.path.exists(self.tmp_design_path) :
|
|
488
|
+
os.remove(self.tmp_design_path)
|
|
489
|
+
self.logger.info(f"Design converted in {round((end - start)/60 , 2)} mins")
|
|
490
|
+
|
|
491
|
+
def get_all_dirs(self, filter_source_ne=None) :
|
|
492
|
+
db = self.Database()
|
|
493
|
+
packs = self.get_all_amplifiers()
|
|
494
|
+
_packs = []
|
|
495
|
+
for pack in packs :
|
|
496
|
+
parentId = pack['parentId']
|
|
497
|
+
wdmline = pack['wdmline']
|
|
498
|
+
shelf = self.get_shelf_data_by_id(parentId)
|
|
499
|
+
shelfNumber = shelf['number']
|
|
500
|
+
shelfType = shelf['type']
|
|
501
|
+
physicalslot = self.convert_slotid_to_physical_slot(shelfType , pack.get('slotid'))
|
|
502
|
+
grandparentId = shelf['grandparentId']
|
|
503
|
+
ne = self.get_site_data_by_id(grandparentId)
|
|
504
|
+
sourceNE = ne['name']
|
|
505
|
+
if filter_source_ne and filter_source_ne != sourceNE :
|
|
506
|
+
return
|
|
507
|
+
span = self.get_table_data_by_id('line' , wdmline)
|
|
508
|
+
spanId = span['span']
|
|
509
|
+
query = f"select grandparentId from line where span='{spanId}' "
|
|
510
|
+
spans = db.execute_dict(query)
|
|
511
|
+
for span in spans :
|
|
512
|
+
siteData = self.get_site_data_by_id(span['grandparentId'])
|
|
513
|
+
if siteData.get('name') != sourceNE :
|
|
514
|
+
DestinationNE = siteData.get('name')
|
|
515
|
+
break
|
|
516
|
+
fullSlot = f"{shelfNumber}/{physicalslot}"
|
|
517
|
+
_packs.append({
|
|
518
|
+
'SOURCESITE' : sourceNE ,
|
|
519
|
+
'SOURCEPACKID' : pack.get('id') ,
|
|
520
|
+
"SPANID" : spanId ,
|
|
521
|
+
'SOURCEAPN' : pack.get('apn') ,
|
|
522
|
+
'SOURCEPACKIDREF' : pack.get('packidref') ,
|
|
523
|
+
'DESTINATIONSITE' : DestinationNE ,
|
|
524
|
+
'SOURCEBOARD' : pack.get('type') ,
|
|
525
|
+
'SOURCEPHYSICALSLOT' : physicalslot ,
|
|
526
|
+
'FULLSLOT' : fullSlot ,
|
|
527
|
+
'SHELFTYPE' : shelfType ,
|
|
528
|
+
})
|
|
529
|
+
self.logger.debug(f"Source:{sourceNE}/{fullSlot}/{pack.get('type')} -> {spanId} -> {DestinationNE}")
|
|
530
|
+
return _packs
|
|
531
|
+
|
|
532
|
+
def convert_db_to_xml(self, output_path=None):
|
|
533
|
+
"""
|
|
534
|
+
Convert the EPT database back to XML format.
|
|
535
|
+
|
|
536
|
+
Args:
|
|
537
|
+
output_path (str): Path where to save the XML file. If None, returns XML string.
|
|
538
|
+
|
|
539
|
+
Returns:
|
|
540
|
+
str: XML content if output_path is None, otherwise None
|
|
541
|
+
"""
|
|
542
|
+
self.logger.info("Converting EPT database back to XML...")
|
|
543
|
+
|
|
544
|
+
db = self.Database()
|
|
545
|
+
|
|
546
|
+
# Get all table names (excluding system tables)
|
|
547
|
+
cursor, conn = db.db_connect()
|
|
548
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name NOT IN ('sqlite_sequence', 'c_dirs', 'c_tmp_site_view')")
|
|
549
|
+
tables = [row[0] for row in cursor.fetchall()]
|
|
550
|
+
conn.close()
|
|
551
|
+
|
|
552
|
+
# Create XML root element
|
|
553
|
+
root = ET.Element('EPTdesign')
|
|
554
|
+
root.set('xmlns', 'http://upm.lucent.com/EPTdesign')
|
|
555
|
+
root.set('xmlns:xsi', 'http://www.w3.org/2001/XMLSchema-instance')
|
|
556
|
+
root.set('xsi:schemaLocation', 'http://upm.lucent.com/EPTdesign file:///C:/Users/a1abdelh/Nokia/1830%20EPT%20R23.06.00.FP1/config/eqp/EPTdesign.xsd')
|
|
557
|
+
|
|
558
|
+
# Dictionary to store elements by their ID for building hierarchy
|
|
559
|
+
elements_by_id = {}
|
|
560
|
+
root_elements = []
|
|
561
|
+
|
|
562
|
+
# Process each table
|
|
563
|
+
for table_name in tables:
|
|
564
|
+
try:
|
|
565
|
+
# Get all rows from the table
|
|
566
|
+
rows = db.execute_dict(f"SELECT * FROM {table_name}")
|
|
567
|
+
|
|
568
|
+
for row in rows:
|
|
569
|
+
# Create XML element
|
|
570
|
+
element = ET.SubElement(root, table_name)
|
|
571
|
+
|
|
572
|
+
# Set attributes from the row data
|
|
573
|
+
for key, value in row.items():
|
|
574
|
+
if key not in ['parentId', 'parentTag', 'parentAttrs', 'grandparentId', 'grandparentTag', 'grandparentAttrs'] and value is not None:
|
|
575
|
+
element.set(key, str(value))
|
|
576
|
+
|
|
577
|
+
# Store element for hierarchy building
|
|
578
|
+
element_id = row.get('id') or row.get('parentId')
|
|
579
|
+
if element_id:
|
|
580
|
+
elements_by_id[element_id] = element
|
|
581
|
+
|
|
582
|
+
# If this is a root element (no parent), add to root_elements
|
|
583
|
+
if not row.get('parentId') and not row.get('grandparentId'):
|
|
584
|
+
root_elements.append(element)
|
|
585
|
+
|
|
586
|
+
except Exception as e:
|
|
587
|
+
self.logger.warning(f"Error processing table {table_name}: {e}")
|
|
588
|
+
continue
|
|
589
|
+
|
|
590
|
+
# Build hierarchy by moving elements to their correct parent
|
|
591
|
+
for table_name in tables:
|
|
592
|
+
try:
|
|
593
|
+
rows = db.execute_dict(f"SELECT * FROM {table_name}")
|
|
594
|
+
|
|
595
|
+
for row in rows:
|
|
596
|
+
element_id = row.get('id') or row.get('parentId')
|
|
597
|
+
parent_id = row.get('parentId')
|
|
598
|
+
|
|
599
|
+
if element_id and parent_id and element_id in elements_by_id and parent_id in elements_by_id:
|
|
600
|
+
# Move element to its parent
|
|
601
|
+
parent_element = elements_by_id[parent_id]
|
|
602
|
+
element = elements_by_id[element_id]
|
|
603
|
+
|
|
604
|
+
# Remove from root and add to parent
|
|
605
|
+
if element in root:
|
|
606
|
+
root.remove(element)
|
|
607
|
+
parent_element.append(element)
|
|
608
|
+
|
|
609
|
+
except Exception as e:
|
|
610
|
+
self.logger.warning(f"Error building hierarchy for table {table_name}: {e}")
|
|
611
|
+
continue
|
|
612
|
+
|
|
613
|
+
# Convert to string
|
|
614
|
+
xml_string = ET.tostring(root, encoding='unicode', method='xml')
|
|
615
|
+
|
|
616
|
+
# Pretty print the XML
|
|
617
|
+
try:
|
|
618
|
+
from xml.dom import minidom
|
|
619
|
+
dom = minidom.parseString(xml_string)
|
|
620
|
+
xml_string = dom.toprettyxml(indent=' ')
|
|
621
|
+
except:
|
|
622
|
+
pass
|
|
623
|
+
|
|
624
|
+
if output_path:
|
|
625
|
+
with open(output_path, 'w', encoding='utf-8') as f:
|
|
626
|
+
f.write(xml_string)
|
|
627
|
+
self.logger.info(f"XML saved to {output_path}")
|
|
628
|
+
return None
|
|
629
|
+
else:
|
|
630
|
+
return xml_string
|
|
631
|
+
|
|
632
|
+
if __name__ == "__main__" :
|
|
633
|
+
# XMLFILEPATH = "IGG_2.2_08122025.xml"
|
|
634
|
+
XMLFILEPATH = "IGG_2.2_08122025.xml"
|
|
635
|
+
ept = EPTManager(
|
|
636
|
+
ept_db_path=f"ept_mcc.db" ,
|
|
637
|
+
design_path=XMLFILEPATH,
|
|
638
|
+
include_parent_attrs=True ,
|
|
639
|
+
include_grantparent_attrs=False
|
|
640
|
+
)
|
|
641
|
+
## Convert XML to EPT Database
|
|
642
|
+
# ept.parse()
|
|
643
|
+
# ept.create_ept_columns(drop_cols=[])
|
|
644
|
+
# ept.create_ept_rows()
|
|
645
|
+
|
|
646
|
+
# # Get All Dirs
|
|
647
|
+
# with open(f"ept_{getTimestamp()}.json" , 'w') as file :
|
|
648
|
+
# file.write(json.dumps(ept.get_all_dirs() , indent=4))
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
# from easy_utils_dev.simple_sqlite import initDB
|
|
652
|
+
|
|
653
|
+
# db = initDB()
|
|
654
|
+
# db.config_database_path("ept_1755437540.db")
|
|
556
655
|
# print(db.execute_script("create_dirs.sql"))
|