easy-utils-dev 2.126__py3-none-any.whl → 2.128__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of easy-utils-dev might be problematic. Click here for more details.
- easy_utils_dev/__init__.py +2 -1
- easy_utils_dev/ept.py +338 -0
- easy_utils_dev/ne1830PSS.py +15 -30
- {easy_utils_dev-2.126.dist-info → easy_utils_dev-2.128.dist-info}/METADATA +1 -2
- {easy_utils_dev-2.126.dist-info → easy_utils_dev-2.128.dist-info}/RECORD +7 -7
- easy_utils_dev-2.126.dist-info/license.dat +0 -1
- {easy_utils_dev-2.126.dist-info → easy_utils_dev-2.128.dist-info}/WHEEL +0 -0
- {easy_utils_dev-2.126.dist-info → easy_utils_dev-2.128.dist-info}/top_level.txt +0 -0
easy_utils_dev/__init__.py
CHANGED
easy_utils_dev/ept.py
ADDED
|
@@ -0,0 +1,338 @@
|
|
|
1
|
+
import xml.etree.ElementTree as ET
|
|
2
|
+
from bs4 import BeautifulSoup
|
|
3
|
+
from easy_utils_dev.simple_sqlite import initDB
|
|
4
|
+
from easy_utils_dev.utils import getRandomKey , getTimestamp , lget , mkdirs
|
|
5
|
+
import json , os , glob
|
|
6
|
+
from easy_utils_dev.FastQueue import FastQueue
|
|
7
|
+
from easy_utils_dev.debugger import DEBUGGER
|
|
8
|
+
import zipfile
|
|
9
|
+
import tempfile
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
__LIBPATH__ = os.path.dirname(os.path.abspath(__file__))
|
|
13
|
+
MAPPER = {
|
|
14
|
+
'PSS32' : {
|
|
15
|
+
"PHY" :[2,20,3,21,4,22,5,23,6,24,7,25,8,26,9,27,10,28,11,29,12,30,13,31,14,32,15,33,16,34,17,35]
|
|
16
|
+
} ,
|
|
17
|
+
'PSS16II' : {
|
|
18
|
+
"PHY" :[3,13,4,14,5,15,6,16,7,17,8,18,9,19,10,20]
|
|
19
|
+
} ,
|
|
20
|
+
'PSS16' : {
|
|
21
|
+
"PHY" :[3,13,4,14,5,15,6,16,7,17,8,18,9,19,10,20]
|
|
22
|
+
} ,
|
|
23
|
+
'PSS8' : {
|
|
24
|
+
"PHY" :[2,8,3,9,4,10,5,11]
|
|
25
|
+
} ,
|
|
26
|
+
}
|
|
27
|
+
ns = {"ept": "http://upm.lucent.com/EPTdesign"}
|
|
28
|
+
|
|
29
|
+
class EPTManager :
|
|
30
|
+
def __init__(self ,
|
|
31
|
+
design_path,
|
|
32
|
+
include_parent_attrs=True ,
|
|
33
|
+
include_grantparent_attrs=False ,
|
|
34
|
+
ept_db_path=f"ept_{getTimestamp()}.db" ,
|
|
35
|
+
debug_name='EPTManager',
|
|
36
|
+
debug_home_path=None
|
|
37
|
+
) -> None:
|
|
38
|
+
self.root = None
|
|
39
|
+
self.logger = DEBUGGER(name=debug_name, homePath=debug_home_path)
|
|
40
|
+
self.design_path = design_path
|
|
41
|
+
self.ept_db_path = ept_db_path
|
|
42
|
+
self.include_parent_attrs = include_parent_attrs
|
|
43
|
+
self.include_grantparent_attrs = include_grantparent_attrs
|
|
44
|
+
self.sites = []
|
|
45
|
+
self.queue = FastQueue(request_max_count=4)
|
|
46
|
+
self.nes = []
|
|
47
|
+
self.tmp_design_path = None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def convert_slotid_to_physical_slot(self , shType , slotid ) :
|
|
51
|
+
slotid = int(slotid) - 1
|
|
52
|
+
return MAPPER[shType]['PHY'][slotid]
|
|
53
|
+
|
|
54
|
+
def fix_xml_file(self , xml_content ) :
|
|
55
|
+
xml_content = xml_content.splitlines()
|
|
56
|
+
for i , line in enumerate(xml_content) :
|
|
57
|
+
if '<EPTdesign' in line :
|
|
58
|
+
line = line.split(' ')[0]
|
|
59
|
+
line = f"{line}>"
|
|
60
|
+
xml_content[i] = line
|
|
61
|
+
break
|
|
62
|
+
return ''.join(xml_content)
|
|
63
|
+
|
|
64
|
+
def Database(self) :
|
|
65
|
+
db = initDB()
|
|
66
|
+
db.config_database_path(self.ept_db_path)
|
|
67
|
+
return db
|
|
68
|
+
|
|
69
|
+
def create_ept_columns(self , drop_cols=[]) :
|
|
70
|
+
self.logger.info("Creating EPT Database Tables ...")
|
|
71
|
+
db = self.Database()
|
|
72
|
+
drop_cols = [str(col).upper() for col in drop_cols]
|
|
73
|
+
tags = [str(tag.name) for tag in self.root.find_all() ]
|
|
74
|
+
tags = list(set(tags))
|
|
75
|
+
for tagName in tags :
|
|
76
|
+
tags = self.root.find_all(tagName)
|
|
77
|
+
tableColumns = [
|
|
78
|
+
{
|
|
79
|
+
'column' : 'parentId' ,
|
|
80
|
+
'params' : 'TEXT'
|
|
81
|
+
},
|
|
82
|
+
{
|
|
83
|
+
'column' : 'parentTag' ,
|
|
84
|
+
'params' : 'TEXT'
|
|
85
|
+
},
|
|
86
|
+
{
|
|
87
|
+
'column' : 'parentAttrs' ,
|
|
88
|
+
'params' : 'TEXT'
|
|
89
|
+
},
|
|
90
|
+
{
|
|
91
|
+
'column' : 'grandparentId' ,
|
|
92
|
+
'params' : 'TEXT'
|
|
93
|
+
},
|
|
94
|
+
{
|
|
95
|
+
'column' : 'grandparentTag' ,
|
|
96
|
+
'params' : 'TEXT'
|
|
97
|
+
},
|
|
98
|
+
{
|
|
99
|
+
'column' : 'grandparentAttrs' ,
|
|
100
|
+
'params' : 'TEXT'
|
|
101
|
+
},
|
|
102
|
+
]
|
|
103
|
+
added = []
|
|
104
|
+
for tag in tags :
|
|
105
|
+
attrs = tag.attrs
|
|
106
|
+
for attr in list(attrs.keys()) :
|
|
107
|
+
_input = {
|
|
108
|
+
'column' : str(attr) ,
|
|
109
|
+
'params' : 'TEXT'
|
|
110
|
+
}
|
|
111
|
+
if not str(attr).upper() in added and not str(attr).upper() in drop_cols :
|
|
112
|
+
if '-' in str(attr) :
|
|
113
|
+
continue
|
|
114
|
+
self.logger.debug(f'[{tagName}] : Adding Column : {_input}')
|
|
115
|
+
tableColumns.append(_input)
|
|
116
|
+
added.append(str(attr).upper())
|
|
117
|
+
if len(tableColumns) > 0 :
|
|
118
|
+
db.createTable( tableName=tagName , data=tableColumns , autoId=False )
|
|
119
|
+
|
|
120
|
+
def create_ept_rows(self) :
|
|
121
|
+
self.logger.info("Creating EPT Rows ...")
|
|
122
|
+
tags = [str(tag.name) for tag in self.root.find_all() ]
|
|
123
|
+
tags = list(set(tags))
|
|
124
|
+
db = initDB()
|
|
125
|
+
db.config_database_path(self.ept_db_path)
|
|
126
|
+
for tableName in tags :
|
|
127
|
+
tags = self.root.find_all(tableName)
|
|
128
|
+
rows = []
|
|
129
|
+
query = f"PRAGMA table_info({tableName})"
|
|
130
|
+
columns = db.execute_dict(query)
|
|
131
|
+
for tag in tags :
|
|
132
|
+
template = {}
|
|
133
|
+
for column in columns :
|
|
134
|
+
template[column['name']] = None
|
|
135
|
+
attrs = tag.attrs
|
|
136
|
+
if len(list(attrs.keys())) > 0 :
|
|
137
|
+
for key , _ in template.items() :
|
|
138
|
+
template[key] = attrs.get(key , None)
|
|
139
|
+
template['parentId'] = tag.parent.attrs.get('id')
|
|
140
|
+
template['parentTag'] = tag.parent.name
|
|
141
|
+
template['grandparentId'] = tag.parent.parent.attrs.get('id')
|
|
142
|
+
template['grandparentTag'] = tag.parent.parent.name
|
|
143
|
+
if self.include_parent_attrs :
|
|
144
|
+
template['parentAttrs'] = json.dumps(tag.parent.attrs)
|
|
145
|
+
if self.include_grantparent_attrs :
|
|
146
|
+
template['grandparentAttrs'] = json.dumps(tag.parent.parent.attrs)
|
|
147
|
+
rows.append(template)
|
|
148
|
+
# print(f"[{tableName}] : Adding Row ")
|
|
149
|
+
if len(rows) > 0 :
|
|
150
|
+
db.insert_to_table_bulk(tableName=tableName , values=rows)
|
|
151
|
+
|
|
152
|
+
def parse(self) :
|
|
153
|
+
if self.design_path.endswith('.ept') :
|
|
154
|
+
self.extract_ept(self.design_path)
|
|
155
|
+
|
|
156
|
+
with open(self.design_path , 'r') as file :
|
|
157
|
+
xml_content = file.read()
|
|
158
|
+
xml_content = self.fix_xml_file(xml_content)
|
|
159
|
+
self.root = BeautifulSoup( xml_content, 'xml')
|
|
160
|
+
return self.root
|
|
161
|
+
|
|
162
|
+
def extract_ept(self , ept_path):
|
|
163
|
+
extract_to = tempfile.gettempdir() + f"/ept_extraction"
|
|
164
|
+
self.logger.debug(f"Extracting .EPT content to '{extract_to}'")
|
|
165
|
+
mkdirs(extract_to)
|
|
166
|
+
with zipfile.ZipFile(ept_path, 'r') as zip_ref:
|
|
167
|
+
zip_ref.extractall(extract_to)
|
|
168
|
+
xml_dir = glob.glob(f"{extract_to}/*.xml")[0]
|
|
169
|
+
self.design_path = xml_dir
|
|
170
|
+
self.tmp_design_path = xml_dir
|
|
171
|
+
self.logger.debug(f"EPT.XML location '{xml_dir}'")
|
|
172
|
+
return xml_dir
|
|
173
|
+
|
|
174
|
+
def _create_v_dirs(self) :
|
|
175
|
+
db = self.Database()
|
|
176
|
+
dirs = self.get_all_dirs()
|
|
177
|
+
|
|
178
|
+
db.createTable(
|
|
179
|
+
'c_dirs' ,
|
|
180
|
+
data=[
|
|
181
|
+
{
|
|
182
|
+
'column' : 'SOURCESITE' ,
|
|
183
|
+
'params' : 'TEXT'
|
|
184
|
+
},
|
|
185
|
+
{
|
|
186
|
+
'column' : 'SOURCEPACKID' ,
|
|
187
|
+
'params' : 'TEXT'
|
|
188
|
+
},
|
|
189
|
+
{
|
|
190
|
+
'column' : 'SPANID' ,
|
|
191
|
+
'params' : 'TEXT'
|
|
192
|
+
},
|
|
193
|
+
{
|
|
194
|
+
'column' : 'SOURCEAPN' ,
|
|
195
|
+
'params' : 'TEXT'
|
|
196
|
+
},
|
|
197
|
+
{
|
|
198
|
+
'column' : 'SOURCEPACKIDREF' ,
|
|
199
|
+
'params' : 'TEXT'
|
|
200
|
+
},
|
|
201
|
+
{
|
|
202
|
+
'column' : 'DESTINATIONSITE' ,
|
|
203
|
+
'params' : 'TEXT'
|
|
204
|
+
},
|
|
205
|
+
{
|
|
206
|
+
'column' : 'SOURCEBOARD' ,
|
|
207
|
+
'params' : 'TEXT'
|
|
208
|
+
},
|
|
209
|
+
{
|
|
210
|
+
'column' : 'SOURCEPHYSICALSLOT' ,
|
|
211
|
+
'params' : 'TEXT'
|
|
212
|
+
},
|
|
213
|
+
{
|
|
214
|
+
'column' : 'FULLSLOT' ,
|
|
215
|
+
'params' : 'TEXT'
|
|
216
|
+
},
|
|
217
|
+
{
|
|
218
|
+
'column' : 'SHELFTYPE' ,
|
|
219
|
+
'params' : 'TEXT'
|
|
220
|
+
}
|
|
221
|
+
]
|
|
222
|
+
)
|
|
223
|
+
db.insert_to_table_bulk(tableName='c_dirs' , values=dirs)
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def get_site_data_by_id(self , id ) -> dict :
|
|
227
|
+
db = self.Database()
|
|
228
|
+
query = f"select * from site where id='{id}' "
|
|
229
|
+
siteData = lget(db.execute_dict(query) , 0 , {})
|
|
230
|
+
return siteData
|
|
231
|
+
|
|
232
|
+
def get_all_amplifiers(self) :
|
|
233
|
+
db = self.Database()
|
|
234
|
+
query = f"select * from circuitpack where packIDRef IS NOT NULL and type in (select packName from OAtype where packName is NOT NULL or packName != '')"
|
|
235
|
+
packs = db.execute_dict(query)
|
|
236
|
+
return packs
|
|
237
|
+
|
|
238
|
+
def get_shelf_data_by_id(self , id ) -> dict :
|
|
239
|
+
db = self.Database()
|
|
240
|
+
query = f"select * from shelf where id='{id}' "
|
|
241
|
+
shelfData = lget(db.execute_dict(query) , 0 , {})
|
|
242
|
+
return shelfData
|
|
243
|
+
|
|
244
|
+
def get_ne_data_by_id(self , id ) -> dict :
|
|
245
|
+
db = self.Database()
|
|
246
|
+
query = f"select * from ne where id='{id}' "
|
|
247
|
+
neData = lget(db.execute_dict(query) , 0 , {})
|
|
248
|
+
return neData
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def get_table_data_by_id(self , table , id ) :
|
|
252
|
+
db = self.Database()
|
|
253
|
+
query = f"select * from {table} where id='{id}' "
|
|
254
|
+
data = lget(db.execute_dict(query) , 0 , {})
|
|
255
|
+
return data
|
|
256
|
+
|
|
257
|
+
def convert_design(self , drop_cols=[] ) :
|
|
258
|
+
start = getTimestamp()
|
|
259
|
+
db = self.Database()
|
|
260
|
+
self.parse()
|
|
261
|
+
self.create_ept_columns(drop_cols=drop_cols)
|
|
262
|
+
self.create_ept_rows()
|
|
263
|
+
db.execute_script(f"{os.path.join(__LIBPATH__ , 'ept_sql' , 'create_dirs.sql')}")
|
|
264
|
+
self._create_v_dirs()
|
|
265
|
+
end = getTimestamp()
|
|
266
|
+
if os.path.exists(self.tmp_design_path) :
|
|
267
|
+
os.remove(self.tmp_design_path)
|
|
268
|
+
self.logger.info(f"Design converted in {round((end - start)/60 , 2)} mins")
|
|
269
|
+
|
|
270
|
+
def get_all_dirs(self, filter_source_ne=None) :
|
|
271
|
+
db = self.Database()
|
|
272
|
+
packs = self.get_all_amplifiers()
|
|
273
|
+
_packs = []
|
|
274
|
+
def __g_p(pack) :
|
|
275
|
+
parentId = pack['parentId']
|
|
276
|
+
wdmline = pack['wdmline']
|
|
277
|
+
shelf = self.get_shelf_data_by_id(parentId)
|
|
278
|
+
shelfNumber = shelf['number']
|
|
279
|
+
shelfType = shelf['type']
|
|
280
|
+
physicalslot = self.convert_slotid_to_physical_slot(shelfType , pack.get('slotid'))
|
|
281
|
+
grandparentId = shelf['grandparentId']
|
|
282
|
+
ne = self.get_site_data_by_id(grandparentId)
|
|
283
|
+
sourceNE = ne['name']
|
|
284
|
+
if filter_source_ne and filter_source_ne != sourceNE :
|
|
285
|
+
return
|
|
286
|
+
span = self.get_table_data_by_id('line' , wdmline)
|
|
287
|
+
spanId = span['span']
|
|
288
|
+
query = f"select grandparentId from line where span='{spanId}' "
|
|
289
|
+
spans = db.execute_dict(query)
|
|
290
|
+
for span in spans :
|
|
291
|
+
siteData = self.get_site_data_by_id(span['grandparentId'])
|
|
292
|
+
if siteData.get('name') != sourceNE :
|
|
293
|
+
DestinationNE = siteData.get('name')
|
|
294
|
+
break
|
|
295
|
+
fullSlot = f"{shelfNumber}/{physicalslot}"
|
|
296
|
+
_packs.append({
|
|
297
|
+
'SOURCESITE' : sourceNE ,
|
|
298
|
+
'SOURCEPACKID' : pack.get('id') ,
|
|
299
|
+
"SPANID" : spanId ,
|
|
300
|
+
'SOURCEAPN' : pack.get('apn') ,
|
|
301
|
+
'SOURCEPACKIDREF' : pack.get('packidref') ,
|
|
302
|
+
'DESTINATIONSITE' : DestinationNE ,
|
|
303
|
+
'SOURCEBOARD' : pack.get('type') ,
|
|
304
|
+
'SOURCEPHYSICALSLOT' : physicalslot ,
|
|
305
|
+
'FULLSLOT' : fullSlot ,
|
|
306
|
+
'SHELFTYPE' : shelfType ,
|
|
307
|
+
})
|
|
308
|
+
self.logger.debug(f"Source:{sourceNE}/{fullSlot}/{pack.get('type')} -> {spanId} -> {DestinationNE}")
|
|
309
|
+
for pack in packs :
|
|
310
|
+
self.queue.addToQueue(action=__g_p , actionArgs={'pack' : pack})
|
|
311
|
+
self.queue.runQueue(maxRequests=10)
|
|
312
|
+
return _packs
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
if __name__ == "__main__" :
|
|
316
|
+
# XMLFILEPATH = "IGG_2.2_08122025.xml"
|
|
317
|
+
XMLFILEPATH = "IGG_2.2_08122025.xml"
|
|
318
|
+
ept = EPTManager(
|
|
319
|
+
ept_db_path=f"ept_mcc.db" ,
|
|
320
|
+
design_path=XMLFILEPATH,
|
|
321
|
+
include_parent_attrs=True ,
|
|
322
|
+
include_grantparent_attrs=False
|
|
323
|
+
)
|
|
324
|
+
## Convert XML to EPT Database
|
|
325
|
+
# ept.parse()
|
|
326
|
+
# ept.create_ept_columns(drop_cols=[])
|
|
327
|
+
# ept.create_ept_rows()
|
|
328
|
+
|
|
329
|
+
# # Get All Dirs
|
|
330
|
+
# with open(f"ept_{getTimestamp()}.json" , 'w') as file :
|
|
331
|
+
# file.write(json.dumps(ept.get_all_dirs() , indent=4))
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
# from easy_utils_dev.simple_sqlite import initDB
|
|
335
|
+
|
|
336
|
+
# db = initDB()
|
|
337
|
+
# db.config_database_path("ept_1755437540.db")
|
|
338
|
+
# print(db.execute_script("create_dirs.sql"))
|
easy_utils_dev/ne1830PSS.py
CHANGED
|
@@ -13,7 +13,7 @@ from easy_utils_dev import exceptions
|
|
|
13
13
|
|
|
14
14
|
class PSS1830 :
|
|
15
15
|
def __init__(self , sim=False , debug_name='Auto1830PSS' , auto_enable_tcp_forward=False,file_name=None,debug_home_path=None ) -> None:
|
|
16
|
-
self.port =
|
|
16
|
+
self.port = None
|
|
17
17
|
self.logger = DEBUGGER(debug_name,file_name=file_name, homePath=debug_home_path)
|
|
18
18
|
self.connected = False
|
|
19
19
|
self.channel = None
|
|
@@ -45,8 +45,8 @@ class PSS1830 :
|
|
|
45
45
|
self.tcpForwardStatus=None
|
|
46
46
|
self.resetRequired = False
|
|
47
47
|
self.screenBuffer = ""
|
|
48
|
-
self.main_controller_client = None
|
|
49
|
-
self.standby_controller_client = None
|
|
48
|
+
self.main_controller_client : paramiko.SSHClient = None
|
|
49
|
+
self.standby_controller_client : paramiko.SSHClient = None
|
|
50
50
|
self.create_jumphost = self.nfmtJumpServer
|
|
51
51
|
if self.auto_enable_tcp_forward :
|
|
52
52
|
self.logger.info(f'***WARNING*** : Auto enable tcp forwarding is enabled. This will allow tcp fowarding in target machine then restarting sshd service agent.')
|
|
@@ -62,14 +62,14 @@ class PSS1830 :
|
|
|
62
62
|
if level == 'debug' :
|
|
63
63
|
self._change_paramiko_debug
|
|
64
64
|
|
|
65
|
-
def nfmtJumpServer(self, ip , usr , pw ) :
|
|
65
|
+
def nfmtJumpServer(self, ip , usr , pw , port=22 ) :
|
|
66
66
|
self.logger.info(f"""ssh to jump-host -> address={ip}""")
|
|
67
67
|
# print(f"this function will be deperecated soon. use create_jumphost() instead.")
|
|
68
68
|
try :
|
|
69
69
|
self.jumpServerInSameInstance = True
|
|
70
70
|
self.jumpserver = self.createClient()
|
|
71
71
|
self.logger.info(f'connecting to jump-host with {usr}@{ip} port=[{self.port}] ..')
|
|
72
|
-
self.jumpserver.connect(ip ,
|
|
72
|
+
self.jumpserver.connect(ip , port , usr , pw )
|
|
73
73
|
self.isjumpserver = True
|
|
74
74
|
self.nfmtip = ip
|
|
75
75
|
self.nfmtsshuser = usr
|
|
@@ -83,7 +83,7 @@ class PSS1830 :
|
|
|
83
83
|
self.jumpserver.close()
|
|
84
84
|
self.logger.debug(f"""re-establish the connection after modifying the sshd file and restarting the sshd service ..""")
|
|
85
85
|
self.jumpserver = self.createClient()
|
|
86
|
-
self.jumpserver.connect(ip ,
|
|
86
|
+
self.jumpserver.connect(ip , port , usr , pw , banner_timeout=200 , timeout=200, auth_timeout=200)
|
|
87
87
|
self.logger.info(f"""connecting to jump-host [{ip}] - connected""")
|
|
88
88
|
self.connected = True
|
|
89
89
|
self.jumpserver.nfmtip = ip
|
|
@@ -467,10 +467,13 @@ class PSS1830 :
|
|
|
467
467
|
|
|
468
468
|
def switch_to_standby_ec(self) :
|
|
469
469
|
if self.connect_to_standby_ec :
|
|
470
|
-
self.logger.
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
470
|
+
self.logger.info(f"connecting to standby EC {self.neip} ...")
|
|
471
|
+
if not self.sim :
|
|
472
|
+
response = self.ssh_execute(ssh=self.client , command=f"ifconfig ilan | awk '/inet / {{print $2}}'")
|
|
473
|
+
if not response :
|
|
474
|
+
raise exceptions.InvalidRemoteEcIp(f"couldn't connect to remote EC")
|
|
475
|
+
if self.sim :
|
|
476
|
+
response = "100.0.81.1"
|
|
474
477
|
if response :
|
|
475
478
|
current_ec_ip = response.replace('\n' , '')
|
|
476
479
|
switcher = {
|
|
@@ -653,22 +656,6 @@ class PSS1830 :
|
|
|
653
656
|
def switch_to_cli_shell(self) :
|
|
654
657
|
return self.client.invoke_shell()
|
|
655
658
|
|
|
656
|
-
|
|
657
|
-
def isRequireAknowledge(self , newRelease) :
|
|
658
|
-
if '-' in str(newRelease) :
|
|
659
|
-
newRelease = float(newRelease.split('-')[0])
|
|
660
|
-
if int(newRelease) in range( 0 , 15 ) :
|
|
661
|
-
return True
|
|
662
|
-
elif int(newRelease) >= 23 :
|
|
663
|
-
return False
|
|
664
|
-
else :
|
|
665
|
-
return True
|
|
666
|
-
|
|
667
|
-
def getTime(self) :
|
|
668
|
-
now = datetime.now()
|
|
669
|
-
dt_string = now.strftime("%d-%m-%Y_%H-%M-%S")
|
|
670
|
-
return dt_string
|
|
671
|
-
|
|
672
659
|
def get_neversion(self,supress_error=False,cache=True) :
|
|
673
660
|
"""
|
|
674
661
|
retreive the NE version depending on connection mode
|
|
@@ -714,7 +701,6 @@ class PSS1830 :
|
|
|
714
701
|
self.close_cliconnection()
|
|
715
702
|
except :
|
|
716
703
|
pass
|
|
717
|
-
|
|
718
704
|
try :
|
|
719
705
|
self.client.close()
|
|
720
706
|
except :
|
|
@@ -735,13 +721,12 @@ class PSS1830 :
|
|
|
735
721
|
cli = f'paging status disable'
|
|
736
722
|
self.cli_execute(cli)
|
|
737
723
|
self.logger.debug('paging disabled.')
|
|
738
|
-
|
|
739
724
|
|
|
740
725
|
def channels_report(self, exportCsv=False) :
|
|
741
726
|
self.logger.info('Generating Channels Report .. ')
|
|
742
727
|
channels = self.get_xcs()
|
|
743
728
|
header = ['NE', 'shelf', 'slot', 'port' , 'powerRx' , "powerTx" , "channel" , "prefec" ,"postFec" , "shape" , "phase" , "trackMode"]
|
|
744
|
-
csvFpath = f"channels_report_{self.host}_{
|
|
729
|
+
csvFpath = f"channels_report_{self.host}_{getTimestamp()}.csv"
|
|
745
730
|
if exportCsv :
|
|
746
731
|
csvFile = open(csvFpath, 'w', encoding='UTF8' , newline='')
|
|
747
732
|
csvFile = csv.writer(csvFile)
|
|
@@ -834,7 +819,7 @@ class PSS1830 :
|
|
|
834
819
|
except :
|
|
835
820
|
return ''
|
|
836
821
|
|
|
837
|
-
def
|
|
822
|
+
def config_backup_db_server(self , ip , user, password , protocol , path , backupname="BACKUP") :
|
|
838
823
|
self.cli_execute(f'config database server ip {ip}')
|
|
839
824
|
self.cli_execute(f'config database server protocol {protocol}')
|
|
840
825
|
self.cli_execute(f'config database path {path}{backupname}')
|
|
@@ -2,7 +2,7 @@ easy_utils_dev/EasySsh.py,sha256=PbFK1vSBcnt_SfkZXZGh5wg6Sg9R2WlXeGdvjZluJ9E,359
|
|
|
2
2
|
easy_utils_dev/Events.py,sha256=MdI53gAyXX_2jmChpayayQM0ZitgjtkyUNrQYGEEZnw,2978
|
|
3
3
|
easy_utils_dev/FastQueue.py,sha256=Drt8B_hEdmg9eAt7OWSgTyoJ3rUHkeJHk9xdaehtEsY,5622
|
|
4
4
|
easy_utils_dev/NameObject.py,sha256=Z4Qp3qfMcQeMPw35PV_xOu8kigRtfSRZ4h7woR0t3Gg,270
|
|
5
|
-
easy_utils_dev/__init__.py,sha256=
|
|
5
|
+
easy_utils_dev/__init__.py,sha256=cHuXcH3IjYRApwVhVjKmgMSVGIJBmGWJhgtsKawq6_0,801
|
|
6
6
|
easy_utils_dev/abortable.py,sha256=n5t-6BVszf4cWzrQ25VzEUUKfmeP4sQ6wzQmiSUSG7M,3043
|
|
7
7
|
easy_utils_dev/brevosmtp.py,sha256=A5n13MnVQnDuSjYQ91-MLftmqfg3VQ-36Zqw9OtoTB4,3184
|
|
8
8
|
easy_utils_dev/check_license.py,sha256=C8vKXwaduoF3FSuDJ-J_j5jStNNyUdS-mOdLNfsCjmc,4825
|
|
@@ -11,12 +11,13 @@ easy_utils_dev/custom_env.py,sha256=vxrjikpSNJlKfoBE-ef88UExlpXucUe-HcwHMn3gfB0,
|
|
|
11
11
|
easy_utils_dev/debugger.py,sha256=08lYSg9Mx0l440aCk4Z1ofNUlN9pTL9um2CL_cCyUKs,18305
|
|
12
12
|
easy_utils_dev/easy_oracle.py,sha256=Jyc3HSl6eyLayjS8NoE4GOaf8otQlonR5_qOg2h1DjE,2157
|
|
13
13
|
easy_utils_dev/encryptor.py,sha256=f5Zjn0DGtXCyhldpVnBtfcTb4h4Wp0eQPHusEYwIags,1512
|
|
14
|
+
easy_utils_dev/ept.py,sha256=SQfp77FwXbWohfS2JzMghoIhWRjo-oqbOZacWiVm49k,12918
|
|
14
15
|
easy_utils_dev/exceptions.py,sha256=6eTYBa8AIXC0wI6zgkqsLreSXyPf459G-ToO7ziQuK4,1669
|
|
15
16
|
easy_utils_dev/filescompressor.py,sha256=iKAtLfkEXOuvvqF56jH0D9KAAeZ7iaa_sRaJnyYkxiE,2875
|
|
16
17
|
easy_utils_dev/generate_license.py,sha256=fr_eoSjKCmDmAEBc6FWFXZxGQOHx9XO6hEK8dcyVUlA,3319
|
|
17
18
|
easy_utils_dev/keycloakapi.py,sha256=eHnc5fSC-YVYQU6EFs3ehutITnkHNJktx5BH_hpfxwM,7116
|
|
18
19
|
easy_utils_dev/lralib.py,sha256=3IjaZbTK_hhyETNP7eALsFyQjWM-jb2rg-rjvgdDDIc,6328
|
|
19
|
-
easy_utils_dev/ne1830PSS.py,sha256=
|
|
20
|
+
easy_utils_dev/ne1830PSS.py,sha256=tfQGEllBWynwDdsy6Fa7nbazPkV4aNXE6HHkN122RBw,69781
|
|
20
21
|
easy_utils_dev/nsp_kafka.py,sha256=oPJUk8GYMLjqXP_H0EbXQwLkpBxf_uGr6l2QtZO1cc8,10725
|
|
21
22
|
easy_utils_dev/openid_server.py,sha256=_odeg6omuizSUEJLtbAVn2PnG9vkcUAQ7rU3K5yXk_I,2545
|
|
22
23
|
easy_utils_dev/optics_utils.py,sha256=G-hFX2iiUCSJjk7BICBRGvVoDq0IBONLZSjagoB5FMg,964
|
|
@@ -28,8 +29,7 @@ easy_utils_dev/utils.py,sha256=BmVnbxc336c6WTeDFcEHN6Mavt7fJrIEyK4GXODV3gI,13345
|
|
|
28
29
|
easy_utils_dev/winserviceapi.py,sha256=2ZP6jaSt1-5vEJYXqwBhwX-1-eQ3V3YzntsoOoko2cw,18804
|
|
29
30
|
easy_utils_dev/wsnoclib.py,sha256=tC-RmjddaLpihPCRBLGC2RnRpFJqexhvExUr1KncoQM,29063
|
|
30
31
|
easy_utils_dev/wsselib.py,sha256=YweScnoAAH_t29EeIjBpkQ6HtX0Rp9mQudRsRce2SE8,7920
|
|
31
|
-
easy_utils_dev-2.
|
|
32
|
-
easy_utils_dev-2.
|
|
33
|
-
easy_utils_dev-2.
|
|
34
|
-
easy_utils_dev-2.
|
|
35
|
-
easy_utils_dev-2.126.dist-info/RECORD,,
|
|
32
|
+
easy_utils_dev-2.128.dist-info/METADATA,sha256=Sj8wdN5Dz9Wk0OFol4zVd6a7vATyfrb3KYUUiCjNjDA,510
|
|
33
|
+
easy_utils_dev-2.128.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
|
34
|
+
easy_utils_dev-2.128.dist-info/top_level.txt,sha256=7vBsrpq7NmilkdU3YUvfd5iVDNBaT07u_-ut4F7zc7A,15
|
|
35
|
+
easy_utils_dev-2.128.dist-info/RECORD,,
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
WjVSTMZcO/R9SHU/xGiati/I2kjJ86OPfyyjx5RwTBE=||30-10-2025||aGlnaF9hdmFpbA==
|
|
File without changes
|
|
File without changes
|