synapse 2.186.0__py311-none-any.whl → 2.188.0__py311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse might be problematic. Click here for more details.
- synapse/cortex.py +133 -9
- synapse/datamodel.py +20 -4
- synapse/exc.py +14 -1
- synapse/lib/ast.py +6 -4
- synapse/lib/auth.py +9 -0
- synapse/lib/hive.py +1 -1
- synapse/lib/httpapi.py +2 -1
- synapse/lib/modelrev.py +771 -11
- synapse/lib/nexus.py +6 -0
- synapse/lib/node.py +5 -3
- synapse/lib/scrape.py +18 -104
- synapse/lib/spooled.py +26 -3
- synapse/lib/storm.py +51 -28
- synapse/lib/stormlib/model.py +320 -250
- synapse/lib/stormlib/modelext.py +31 -0
- synapse/lib/stormlib/scrape.py +1 -4
- synapse/lib/stormtypes.py +53 -11
- synapse/lib/version.py +2 -2
- synapse/lib/view.py +9 -3
- synapse/models/base.py +27 -0
- synapse/models/files.py +22 -0
- synapse/models/inet.py +49 -4
- synapse/models/infotech.py +49 -22
- synapse/models/orgs.py +64 -2
- synapse/models/proj.py +1 -6
- synapse/models/risk.py +65 -0
- synapse/tests/test_cortex.py +21 -0
- synapse/tests/test_lib_agenda.py +13 -0
- synapse/tests/test_lib_auth.py +15 -0
- synapse/tests/test_lib_cell.py +2 -1
- synapse/tests/test_lib_httpapi.py +6 -0
- synapse/tests/test_lib_modelrev.py +918 -379
- synapse/tests/test_lib_nexus.py +26 -0
- synapse/tests/test_lib_scrape.py +14 -6
- synapse/tests/test_lib_spooled.py +34 -0
- synapse/tests/test_lib_storm.py +48 -0
- synapse/tests/test_lib_stormlib_model.py +0 -270
- synapse/tests/test_lib_stormlib_modelext.py +76 -1
- synapse/tests/test_lib_stormlib_scrape.py +0 -8
- synapse/tests/test_lib_stormtypes.py +12 -1
- synapse/tests/test_lib_trigger.py +8 -0
- synapse/tests/test_lib_view.py +24 -0
- synapse/tests/test_model_base.py +11 -0
- synapse/tests/test_model_files.py +19 -0
- synapse/tests/test_model_inet.py +33 -0
- synapse/tests/test_model_infotech.py +14 -11
- synapse/tests/test_model_orgs.py +39 -0
- synapse/tests/test_model_proj.py +11 -1
- synapse/tests/test_model_risk.py +32 -0
- synapse/tools/changelog.py +11 -3
- {synapse-2.186.0.dist-info → synapse-2.188.0.dist-info}/METADATA +1 -1
- {synapse-2.186.0.dist-info → synapse-2.188.0.dist-info}/RECORD +55 -58
- synapse/assets/__init__.py +0 -35
- synapse/assets/storm/migrations/model-0.2.28.storm +0 -355
- synapse/tests/test_assets.py +0 -25
- {synapse-2.186.0.dist-info → synapse-2.188.0.dist-info}/LICENSE +0 -0
- {synapse-2.186.0.dist-info → synapse-2.188.0.dist-info}/WHEEL +0 -0
- {synapse-2.186.0.dist-info → synapse-2.188.0.dist-info}/top_level.txt +0 -0
synapse/lib/nexus.py
CHANGED
|
@@ -341,6 +341,12 @@ class NexsRoot(s_base.Base):
|
|
|
341
341
|
if meta is None:
|
|
342
342
|
meta = {}
|
|
343
343
|
|
|
344
|
+
if (nexus := self._nexskids.get(nexsiden)) is None:
|
|
345
|
+
raise s_exc.NoSuchIden(mesg=f'No Nexus Pusher with iden {nexsiden}.', iden=nexsiden)
|
|
346
|
+
|
|
347
|
+
if event not in nexus._nexshands:
|
|
348
|
+
raise s_exc.NoSuchName(mesg=f'No Nexus handler for event {event}.', name=event)
|
|
349
|
+
|
|
344
350
|
async with self.cell.nexslock:
|
|
345
351
|
self.reqNotReadOnly()
|
|
346
352
|
# Keep a reference to the shielded task to ensure it isn't GC'd
|
synapse/lib/node.py
CHANGED
|
@@ -173,11 +173,13 @@ class Node:
|
|
|
173
173
|
if prop is None:
|
|
174
174
|
return None
|
|
175
175
|
|
|
176
|
-
if prop.modl.form(prop.type.name) is None:
|
|
176
|
+
if prop.modl.form(prop.type.name) is not None:
|
|
177
|
+
buid = s_common.buid((prop.type.name, valu))
|
|
178
|
+
elif prop.type.name == 'ndef':
|
|
179
|
+
buid = s_common.buid(valu)
|
|
180
|
+
else:
|
|
177
181
|
return None
|
|
178
182
|
|
|
179
|
-
buid = s_common.buid((prop.type.name, valu))
|
|
180
|
-
|
|
181
183
|
step = cache.get(buid, s_common.novalu)
|
|
182
184
|
if step is s_common.novalu:
|
|
183
185
|
step = cache[buid] = await node.snap.getNodeByBuid(buid)
|
synapse/lib/scrape.py
CHANGED
|
@@ -24,8 +24,6 @@ ipaddress = s_common.ipaddress
|
|
|
24
24
|
|
|
25
25
|
logger = logging.getLogger(__name__)
|
|
26
26
|
|
|
27
|
-
SCRAPE_SPAWN_LENGTH = 5000
|
|
28
|
-
|
|
29
27
|
tldlist = list(s_data.get('iana.tlds'))
|
|
30
28
|
tldlist.extend([
|
|
31
29
|
'bit',
|
|
@@ -445,6 +443,9 @@ def _rewriteRawValu(text: str, offsets: dict, info: dict):
|
|
|
445
443
|
info['match'] = match
|
|
446
444
|
info['offset'] = baseoff + offset
|
|
447
445
|
|
|
446
|
+
def _genMatchList(text: str, regx: regex.Regex, opts: dict):
|
|
447
|
+
return [info for info in _genMatches(text, regx, opts)]
|
|
448
|
+
|
|
448
449
|
def _genMatches(text: str, regx: regex.Regex, opts: dict):
|
|
449
450
|
|
|
450
451
|
cb = opts.get('callback')
|
|
@@ -504,23 +505,9 @@ def genMatches(text: str, regx: regex.Regex, opts: dict):
|
|
|
504
505
|
for match in _genMatches(text, regx, opts):
|
|
505
506
|
yield match
|
|
506
507
|
|
|
507
|
-
def _spawn_genmatches(sock, text, regx, opts): # pragma: no cover
|
|
508
|
-
'''
|
|
509
|
-
Multiprocessing target for generating matches.
|
|
510
|
-
'''
|
|
511
|
-
try:
|
|
512
|
-
for info in _genMatches(text, regx, opts):
|
|
513
|
-
sock.sendall(s_msgpack.en((True, info)))
|
|
514
|
-
|
|
515
|
-
sock.sendall(s_msgpack.en((True, None)))
|
|
516
|
-
|
|
517
|
-
except Exception as e:
|
|
518
|
-
mesg = s_common.retnexc(e)
|
|
519
|
-
sock.sendall(s_msgpack.en(mesg))
|
|
520
|
-
|
|
521
508
|
async def genMatchesAsync(text: str, regx: regex.Regex, opts: dict):
|
|
522
509
|
'''
|
|
523
|
-
Generate regular expression matches for a blob of text,
|
|
510
|
+
Generate regular expression matches for a blob of text, using the shared forked process pool.
|
|
524
511
|
|
|
525
512
|
Args:
|
|
526
513
|
text (str): The text to generate matches for.
|
|
@@ -547,30 +534,9 @@ async def genMatchesAsync(text: str, regx: regex.Regex, opts: dict):
|
|
|
547
534
|
Yields:
|
|
548
535
|
dict: A dictionary of match results.
|
|
549
536
|
'''
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
return
|
|
554
|
-
|
|
555
|
-
link00, sock00 = await s_link.linksock()
|
|
556
|
-
|
|
557
|
-
try:
|
|
558
|
-
async with link00:
|
|
559
|
-
|
|
560
|
-
todo = s_common.todo(_spawn_genmatches, sock00, text, regx, opts)
|
|
561
|
-
link00.schedCoro(s_coro.spawn(todo, log_conf=s_common._getLogConfFromEnv()))
|
|
562
|
-
|
|
563
|
-
while (mesg := await link00.rx()) is not None:
|
|
564
|
-
|
|
565
|
-
info = s_common.result(mesg)
|
|
566
|
-
if info is None:
|
|
567
|
-
return
|
|
568
|
-
|
|
569
|
-
yield info
|
|
570
|
-
|
|
571
|
-
finally:
|
|
572
|
-
sock00.close()
|
|
573
|
-
|
|
537
|
+
matches = await s_coro.semafork(_genMatchList, text, regx, opts)
|
|
538
|
+
for info in matches:
|
|
539
|
+
yield info
|
|
574
540
|
|
|
575
541
|
def _contextMatches(scrape_text, text, ruletype, refang, offsets):
|
|
576
542
|
|
|
@@ -585,33 +551,16 @@ def _contextMatches(scrape_text, text, ruletype, refang, offsets):
|
|
|
585
551
|
|
|
586
552
|
yield info
|
|
587
553
|
|
|
588
|
-
def
|
|
589
|
-
|
|
590
|
-
offsets = {}
|
|
591
|
-
if refang:
|
|
592
|
-
scrape_text, offsets = refang_text2(text)
|
|
593
|
-
|
|
594
|
-
for ruletype, blobs in _regexes.items():
|
|
595
|
-
if form and form != ruletype:
|
|
596
|
-
continue
|
|
554
|
+
def _contextScrapeList(text, form=None, refang=True, first=False):
|
|
555
|
+
return [info for info in _contextScrape(text, form=form, refang=refang, first=first)]
|
|
597
556
|
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
yield info
|
|
601
|
-
|
|
602
|
-
if first:
|
|
603
|
-
return
|
|
604
|
-
|
|
605
|
-
async def _contextScrapeAsync(text, form=None, refang=True, first=False):
|
|
557
|
+
def _contextScrape(text, form=None, refang=True, first=False):
|
|
606
558
|
scrape_text = text
|
|
607
559
|
offsets = {}
|
|
608
560
|
if refang:
|
|
609
561
|
scrape_text, offsets = refang_text2(text)
|
|
610
562
|
|
|
611
563
|
for ruletype, blobs in _regexes.items():
|
|
612
|
-
|
|
613
|
-
await asyncio.sleep(0)
|
|
614
|
-
|
|
615
564
|
if form and form != ruletype:
|
|
616
565
|
continue
|
|
617
566
|
|
|
@@ -666,27 +615,12 @@ def scrape(text, ptype=None, refang=True, first=False):
|
|
|
666
615
|
Returns:
|
|
667
616
|
(str, object): Yield tuples of node ndef values.
|
|
668
617
|
'''
|
|
669
|
-
|
|
670
|
-
for info in contextScrape(text, form=ptype, refang=refang, first=first):
|
|
618
|
+
for info in _contextScrape(text, form=ptype, refang=refang, first=first):
|
|
671
619
|
yield info.get('form'), info.get('valu')
|
|
672
620
|
|
|
673
|
-
def _spawn_scrape(sock, text, form=None, refang=True, first=False): # pragma: no cover
|
|
674
|
-
'''
|
|
675
|
-
Multiprocessing target for scraping text.
|
|
676
|
-
'''
|
|
677
|
-
try:
|
|
678
|
-
for info in _contextScrape(text, form=form, refang=refang, first=first):
|
|
679
|
-
sock.sendall(s_msgpack.en((True, info)))
|
|
680
|
-
|
|
681
|
-
sock.sendall(s_msgpack.en((True, None)))
|
|
682
|
-
|
|
683
|
-
except Exception as e:
|
|
684
|
-
mesg = s_common.retnexc(e)
|
|
685
|
-
sock.sendall(s_msgpack.en(mesg))
|
|
686
|
-
|
|
687
621
|
async def contextScrapeAsync(text, form=None, refang=True, first=False):
|
|
688
622
|
'''
|
|
689
|
-
Scrape types from a blob of text and yield info dictionaries,
|
|
623
|
+
Scrape types from a blob of text and yield info dictionaries, using the shared forked process pool.
|
|
690
624
|
|
|
691
625
|
Args:
|
|
692
626
|
text (str): Text to scrape.
|
|
@@ -712,33 +646,13 @@ async def contextScrapeAsync(text, form=None, refang=True, first=False):
|
|
|
712
646
|
Returns:
|
|
713
647
|
(dict): Yield info dicts of results.
|
|
714
648
|
'''
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
return
|
|
719
|
-
|
|
720
|
-
link00, sock00 = await s_link.linksock()
|
|
721
|
-
|
|
722
|
-
try:
|
|
723
|
-
async with link00:
|
|
724
|
-
|
|
725
|
-
todo = s_common.todo(_spawn_scrape, sock00, text, form=form, refang=refang, first=first)
|
|
726
|
-
link00.schedCoro(s_coro.spawn(todo, log_conf=s_common._getLogConfFromEnv()))
|
|
727
|
-
|
|
728
|
-
while (mesg := await link00.rx()) is not None:
|
|
729
|
-
|
|
730
|
-
info = s_common.result(mesg)
|
|
731
|
-
if info is None:
|
|
732
|
-
return
|
|
733
|
-
|
|
734
|
-
yield info
|
|
735
|
-
|
|
736
|
-
finally:
|
|
737
|
-
sock00.close()
|
|
649
|
+
matches = await s_coro.semafork(_contextScrapeList, text, form=form, refang=refang, first=first)
|
|
650
|
+
for info in matches:
|
|
651
|
+
yield info
|
|
738
652
|
|
|
739
653
|
async def scrapeAsync(text, ptype=None, refang=True, first=False):
|
|
740
654
|
'''
|
|
741
|
-
Scrape types from a blob of text and return node tuples,
|
|
655
|
+
Scrape types from a blob of text and return node tuples, using the shared forked process pool.
|
|
742
656
|
|
|
743
657
|
Args:
|
|
744
658
|
text (str): Text to scrape.
|
|
@@ -749,6 +663,6 @@ async def scrapeAsync(text, ptype=None, refang=True, first=False):
|
|
|
749
663
|
Returns:
|
|
750
664
|
(str, object): Yield tuples of node ndef values.
|
|
751
665
|
'''
|
|
752
|
-
|
|
753
|
-
|
|
666
|
+
matches = await s_coro.semafork(_contextScrapeList, text, form=ptype, refang=refang, first=first)
|
|
667
|
+
for info in matches:
|
|
754
668
|
yield info.get('form'), info.get('valu')
|
synapse/lib/spooled.py
CHANGED
|
@@ -7,6 +7,8 @@ import synapse.lib.const as s_const
|
|
|
7
7
|
import synapse.lib.msgpack as s_msgpack
|
|
8
8
|
import synapse.lib.lmdbslab as s_lmdbslab
|
|
9
9
|
|
|
10
|
+
MAX_SPOOL_SIZE = 10000
|
|
11
|
+
|
|
10
12
|
class Spooled(s_base.Base):
|
|
11
13
|
'''
|
|
12
14
|
A Base class that can be used to implement objects which fallback to lmdb.
|
|
@@ -15,7 +17,7 @@ class Spooled(s_base.Base):
|
|
|
15
17
|
together. Under memory pressure, these objects have a better shot of getting paged out.
|
|
16
18
|
'''
|
|
17
19
|
|
|
18
|
-
async def __anit__(self, dirn=None, size=
|
|
20
|
+
async def __anit__(self, dirn=None, size=MAX_SPOOL_SIZE, cell=None):
|
|
19
21
|
'''
|
|
20
22
|
Args:
|
|
21
23
|
dirn(Optional[str]): base directory used for backing slab. If None, system temporary directory is used
|
|
@@ -55,7 +57,7 @@ class Set(Spooled):
|
|
|
55
57
|
A minimal set-like implementation that will spool to a slab on large growth.
|
|
56
58
|
'''
|
|
57
59
|
|
|
58
|
-
async def __anit__(self, dirn=None, size=
|
|
60
|
+
async def __anit__(self, dirn=None, size=MAX_SPOOL_SIZE, cell=None):
|
|
59
61
|
await Spooled.__anit__(self, dirn=dirn, size=size, cell=cell)
|
|
60
62
|
self.realset = set()
|
|
61
63
|
self.len = 0
|
|
@@ -84,6 +86,27 @@ class Set(Spooled):
|
|
|
84
86
|
|
|
85
87
|
return len(self.realset)
|
|
86
88
|
|
|
89
|
+
async def copy(self):
|
|
90
|
+
newset = await Set.anit(dirn=self.dirn, size=self.size, cell=self.cell)
|
|
91
|
+
|
|
92
|
+
if self.fallback:
|
|
93
|
+
await newset._initFallBack()
|
|
94
|
+
await self.slab.copydb(None, newset.slab)
|
|
95
|
+
newset.len = self.len
|
|
96
|
+
|
|
97
|
+
else:
|
|
98
|
+
newset.realset = self.realset.copy()
|
|
99
|
+
|
|
100
|
+
return newset
|
|
101
|
+
|
|
102
|
+
async def clear(self):
|
|
103
|
+
if self.fallback:
|
|
104
|
+
self.len = 0
|
|
105
|
+
await self.slab.trash()
|
|
106
|
+
await self._initFallBack()
|
|
107
|
+
else:
|
|
108
|
+
self.realset.clear()
|
|
109
|
+
|
|
87
110
|
async def add(self, valu):
|
|
88
111
|
|
|
89
112
|
if self.fallback:
|
|
@@ -117,7 +140,7 @@ class Set(Spooled):
|
|
|
117
140
|
|
|
118
141
|
class Dict(Spooled):
|
|
119
142
|
|
|
120
|
-
async def __anit__(self, dirn=None, size=
|
|
143
|
+
async def __anit__(self, dirn=None, size=MAX_SPOOL_SIZE, cell=None):
|
|
121
144
|
|
|
122
145
|
await Spooled.__anit__(self, dirn=dirn, size=size, cell=cell)
|
|
123
146
|
self.realdict = {}
|
synapse/lib/storm.py
CHANGED
|
@@ -1357,30 +1357,42 @@ stormcmds = (
|
|
|
1357
1357
|
'descr': "List existing cron jobs in the cortex.",
|
|
1358
1358
|
'cmdargs': (),
|
|
1359
1359
|
'storm': '''
|
|
1360
|
+
init {
|
|
1361
|
+
$conf = ({
|
|
1362
|
+
"columns": [
|
|
1363
|
+
{"name": "user", "width": 24},
|
|
1364
|
+
{"name": "iden", "width": 10},
|
|
1365
|
+
{"name": "view", "width": 10},
|
|
1366
|
+
{"name": "en?", "width": 3},
|
|
1367
|
+
{"name": "rpt?", "width": 4},
|
|
1368
|
+
{"name": "now?", "width": 4},
|
|
1369
|
+
{"name": "err?", "width": 4},
|
|
1370
|
+
{"name": "# start", "width": 7},
|
|
1371
|
+
{"name": "last start", "width": 16},
|
|
1372
|
+
{"name": "last end", "width": 16},
|
|
1373
|
+
{"name": "query", "newlines": "split"},
|
|
1374
|
+
],
|
|
1375
|
+
"separators": {
|
|
1376
|
+
"row:outline": false,
|
|
1377
|
+
"column:outline": false,
|
|
1378
|
+
"header:row": "#",
|
|
1379
|
+
"data:row": "",
|
|
1380
|
+
"column": "",
|
|
1381
|
+
},
|
|
1382
|
+
})
|
|
1383
|
+
$printer = $lib.tabular.printer($conf)
|
|
1384
|
+
}
|
|
1360
1385
|
$crons = $lib.cron.list()
|
|
1361
|
-
|
|
1362
1386
|
if $crons {
|
|
1363
|
-
$lib.print(
|
|
1364
|
-
|
|
1387
|
+
$lib.print($printer.header())
|
|
1365
1388
|
for $cron in $crons {
|
|
1366
|
-
|
|
1367
1389
|
$job = $cron.pprint()
|
|
1368
|
-
|
|
1369
|
-
|
|
1370
|
-
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
$
|
|
1374
|
-
$isrunning = $job.isrunning.ljust(4)
|
|
1375
|
-
$iserr = $job.iserr.ljust(4)
|
|
1376
|
-
$startcount = $lib.str.format("{startcount}", startcount=$job.startcount).ljust(7)
|
|
1377
|
-
$laststart = $job.laststart.ljust(16)
|
|
1378
|
-
$lastend = $job.lastend.ljust(16)
|
|
1379
|
-
|
|
1380
|
-
$lib.print("{user} {iden} {view} {enabled} {isrecur} {isrunning} {iserr} {startcount} {laststart} {lastend} {query}",
|
|
1381
|
-
user=$user, iden=$iden, view=$view, enabled=$enabled, isrecur=$isrecur,
|
|
1382
|
-
isrunning=$isrunning, iserr=$iserr, startcount=$startcount,
|
|
1383
|
-
laststart=$laststart, lastend=$lastend, query=$job.query)
|
|
1390
|
+
$row = (
|
|
1391
|
+
$job.user, $job.idenshort, $job.viewshort, $job.enabled,
|
|
1392
|
+
$job.isrecur, $job.isrunning, $job.iserr, `{$job.startcount}`,
|
|
1393
|
+
$job.laststart, $job.lastend, $job.query
|
|
1394
|
+
)
|
|
1395
|
+
$lib.print($printer.row($row))
|
|
1384
1396
|
}
|
|
1385
1397
|
} else {
|
|
1386
1398
|
$lib.print("No cron jobs found")
|
|
@@ -5268,6 +5280,13 @@ class BackgroundCmd(Cmd):
|
|
|
5268
5280
|
async for item in genr:
|
|
5269
5281
|
yield item
|
|
5270
5282
|
|
|
5283
|
+
_query = await s_stormtypes.tostr(self.opts.query)
|
|
5284
|
+
query = await runt.getStormQuery(_query)
|
|
5285
|
+
|
|
5286
|
+
# make sure the subquery *could* have run
|
|
5287
|
+
async with runt.getSubRuntime(query) as subr:
|
|
5288
|
+
query.validate(subr)
|
|
5289
|
+
|
|
5271
5290
|
runtprims = await s_stormtypes.toprim(self.runt.getScopeVars(), use_list=True)
|
|
5272
5291
|
runtvars = {k: v for (k, v) in runtprims.items() if s_msgpack.isok(v)}
|
|
5273
5292
|
|
|
@@ -5277,12 +5296,6 @@ class BackgroundCmd(Cmd):
|
|
|
5277
5296
|
'vars': runtvars,
|
|
5278
5297
|
}
|
|
5279
5298
|
|
|
5280
|
-
_query = await s_stormtypes.tostr(self.opts.query)
|
|
5281
|
-
query = await runt.getStormQuery(_query)
|
|
5282
|
-
|
|
5283
|
-
# make sure the subquery *could* have run with existing vars
|
|
5284
|
-
query.validate(runt)
|
|
5285
|
-
|
|
5286
5299
|
coro = self.execStormTask(query, opts)
|
|
5287
5300
|
runt.snap.core.schedCoro(coro)
|
|
5288
5301
|
|
|
@@ -5339,9 +5352,12 @@ class ParallelCmd(Cmd):
|
|
|
5339
5352
|
raise s_exc.StormRuntimeError(mesg=mesg)
|
|
5340
5353
|
|
|
5341
5354
|
size = await s_stormtypes.toint(self.opts.size)
|
|
5342
|
-
query = await runt.getStormQuery(self.opts.query)
|
|
5343
5355
|
|
|
5344
|
-
|
|
5356
|
+
_query = await s_stormtypes.tostr(self.opts.query)
|
|
5357
|
+
query = await runt.getStormQuery(_query)
|
|
5358
|
+
|
|
5359
|
+
async with runt.getSubRuntime(query) as subr:
|
|
5360
|
+
query.validate(subr)
|
|
5345
5361
|
|
|
5346
5362
|
async with await s_base.Base.anit() as base:
|
|
5347
5363
|
|
|
@@ -5979,6 +5995,13 @@ class RunAsCmd(Cmd):
|
|
|
5979
5995
|
|
|
5980
5996
|
NOTE: This command requires admin privileges.
|
|
5981
5997
|
|
|
5998
|
+
NOTE: Heavy objects (for example a View or Layer) are bound to the context which they
|
|
5999
|
+
are instantiated in and methods on them will be run using the user in that
|
|
6000
|
+
context. This means that executing a method on a variable containing a heavy
|
|
6001
|
+
object which was instantiated outside of the runas command and then used
|
|
6002
|
+
within the runas command will check the permissions of the outer user, not
|
|
6003
|
+
the one specified by the runas command.
|
|
6004
|
+
|
|
5982
6005
|
Examples:
|
|
5983
6006
|
|
|
5984
6007
|
// Create a node as another user.
|