agently 4.0.6.9__py3-none-any.whl → 4.0.6.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
agently/core/Prompt.py CHANGED
@@ -14,9 +14,9 @@
14
14
 
15
15
  import re
16
16
  from textwrap import dedent
17
- from typing import Any, Literal, Mapping, Sequence, TYPE_CHECKING, cast, overload, TypeVar
17
+ from typing import Any, Literal, TYPE_CHECKING, cast, overload, TypeVar
18
18
 
19
- from agently.utils import RuntimeData, Settings
19
+ from agently.utils import RuntimeData, Settings, DataFormatter
20
20
 
21
21
  if TYPE_CHECKING:
22
22
  from agently.types.data.prompt import ChatMessage, PromptStandardSlot
@@ -80,8 +80,6 @@ class Prompt(RuntimeData):
80
80
  ):
81
81
  super().__init__(prompt_dict, parent=parent_prompt, name=name)
82
82
 
83
- self._placeholder_pattern = re.compile(r"\$\{\s*([^}]+?)\s*\}")
84
-
85
83
  self.settings = Settings(
86
84
  name="Prompt-Settings",
87
85
  parent=parent_settings,
@@ -103,42 +101,6 @@ class Prompt(RuntimeData):
103
101
  self.to_json_prompt = self.prompt_generator.to_json_prompt
104
102
  self.to_yaml_prompt = self.prompt_generator.to_yaml_prompt
105
103
 
106
- def _substitute_placeholder(self, obj: T, variable_mappings: dict[str, Any]) -> T | Any:
107
- if not isinstance(variable_mappings, dict):
108
- raise TypeError(f"Variable mappings require a dictionary but got: { variable_mappings }")
109
-
110
- if isinstance(obj, str):
111
- full_match = self._placeholder_pattern.fullmatch(obj)
112
- if full_match:
113
- key = full_match.group(1).strip()
114
- return variable_mappings.get(key, obj)
115
- else:
116
-
117
- def replacer(match):
118
- key = match.group(1).strip()
119
- return str(variable_mappings.get(key, match.group(0)))
120
-
121
- return self._placeholder_pattern.sub(replacer, obj)
122
-
123
- if isinstance(obj, Mapping):
124
- return {
125
- self._substitute_placeholder(key, variable_mappings): self._substitute_placeholder(
126
- value, variable_mappings
127
- )
128
- for key, value in obj.items()
129
- }
130
-
131
- if isinstance(obj, Sequence) and not isinstance(obj, (str, bytes, bytearray)):
132
- if isinstance(obj, tuple):
133
- return tuple(self._substitute_placeholder(value, variable_mappings) for value in obj)
134
- else:
135
- return [self._substitute_placeholder(value, variable_mappings) for value in obj]
136
-
137
- if isinstance(obj, set):
138
- return {self._substitute_placeholder(value, variable_mappings) for value in obj}
139
-
140
- return obj
141
-
142
104
  @overload
143
105
  def set(
144
106
  self,
@@ -165,8 +127,8 @@ class Prompt(RuntimeData):
165
127
  value = dedent(value.strip())
166
128
  if mappings is not None:
167
129
  super().set(
168
- self._substitute_placeholder(key, mappings),
169
- self._substitute_placeholder(value, mappings),
130
+ DataFormatter.substitute_placeholder(key, mappings),
131
+ DataFormatter.substitute_placeholder(value, mappings),
170
132
  )
171
133
  else:
172
134
  super().set(key, value)
@@ -178,7 +140,7 @@ class Prompt(RuntimeData):
178
140
  ):
179
141
  if mappings is not None:
180
142
  super().update(
181
- self._substitute_placeholder(new, mappings),
143
+ DataFormatter.substitute_placeholder(new, mappings),
182
144
  )
183
145
  else:
184
146
  super().update(new)
@@ -193,8 +155,8 @@ class Prompt(RuntimeData):
193
155
  value = dedent(value.strip())
194
156
  if mappings is not None:
195
157
  super().append(
196
- self._substitute_placeholder(key, mappings),
197
- self._substitute_placeholder(value, mappings),
158
+ DataFormatter.substitute_placeholder(key, mappings),
159
+ DataFormatter.substitute_placeholder(value, mappings),
198
160
  )
199
161
  else:
200
162
  super().append(key, value)
@@ -30,16 +30,17 @@ class TriggerFlowChunk:
30
30
  *,
31
31
  name: str | None = None,
32
32
  ):
33
- self.name = name if name is not None else uuid.uuid4().hex
33
+ self.id = uuid.uuid4().hex
34
+ self.name = name if name is not None else self.id
34
35
  self._handler = handler
35
- self.trigger = f"Chunk[{ handler.__name__ }]-{ self.name }"
36
+ self.trigger = f"Chunk[{ handler.__name__ }]-{ self.id }"
36
37
 
37
38
  async def async_call(self, data: "TriggerFlowEventData"):
38
39
  result = await FunctionShifter.asyncify(self._handler)(data)
39
- await data.async_emit(self.trigger, result, layer_marks=data.layer_marks.copy())
40
+ await data.async_emit(self.trigger, result, _layer_marks=data._layer_marks.copy())
40
41
  return result
41
42
 
42
43
  def call(self, data: "TriggerFlowEventData"):
43
44
  result = FunctionShifter.syncify(self._handler)(data)
44
- data.emit(self.trigger, result, layer_marks=data.layer_marks.copy())
45
+ data.emit(self.trigger, result, _layer_marks=data._layer_marks.copy())
45
46
  return result
@@ -97,7 +97,7 @@ class TriggerFlowExecution:
97
97
  self,
98
98
  trigger_event: str,
99
99
  value: Any = None,
100
- layer_marks: list[str] | None = None,
100
+ _layer_marks: list[str] | None = None,
101
101
  *,
102
102
  trigger_type: Literal["event", "runtime_data", "flow_data"] = "event",
103
103
  ):
@@ -134,7 +134,7 @@ class TriggerFlowExecution:
134
134
  trigger_type=trigger_type,
135
135
  value=value,
136
136
  execution=self,
137
- layer_marks=layer_marks,
137
+ _layer_marks=_layer_marks,
138
138
  )
139
139
  )
140
140
  )
@@ -51,6 +51,7 @@ class TriggerFlow:
51
51
  self._skip_exceptions = skip_exceptions
52
52
  self._executions: dict[str, "TriggerFlowExecution"] = {}
53
53
  self._start_process = TriggerFlowProcess(
54
+ flow_chunk=self.chunk,
54
55
  trigger_event="START",
55
56
  blue_print=self._blue_print,
56
57
  block_data=TriggerFlowBlockData(
@@ -60,6 +61,8 @@ class TriggerFlow:
60
61
 
61
62
  self.chunks = self._blue_print.chunks
62
63
 
64
+ self.set_settings = self.settings.set_settings
65
+
63
66
  self.get_flow_data = self._flow_data.get
64
67
  self.set_flow_data = FunctionShifter.syncify(self.async_set_flow_data)
65
68
  self.append_flow_data = FunctionShifter.syncify(self.async_append_flow_data)
@@ -74,10 +77,6 @@ class TriggerFlow:
74
77
  self.start_execution = FunctionShifter.syncify(self.async_start_execution)
75
78
  self.start = FunctionShifter.syncify(self.async_start)
76
79
 
77
- def set_settings(self, key: str, value: "SerializableValue"):
78
- self.settings.set_settings(key, value)
79
- return self
80
-
81
80
  @overload
82
81
  def chunk(self, handler_or_name: "TriggerFlowHandler") -> TriggerFlowChunk: ...
83
82
 
@@ -17,7 +17,7 @@ import uuid
17
17
  from asyncio import Event
18
18
  from threading import Lock
19
19
 
20
- from typing import Any, Literal, TYPE_CHECKING, overload
20
+ from typing import Callable, Any, Literal, TYPE_CHECKING, overload, cast
21
21
  from typing_extensions import Self
22
22
 
23
23
 
@@ -31,15 +31,18 @@ from agently.types.trigger_flow import TriggerFlowBlockData
31
31
 
32
32
 
33
33
  class TriggerFlowBaseProcess:
34
+
34
35
  def __init__(
35
36
  self,
36
37
  *,
38
+ flow_chunk,
37
39
  trigger_event: str,
38
40
  blue_print: "TriggerFlowBluePrint",
39
41
  block_data: "TriggerFlowBlockData",
40
42
  trigger_type: Literal["event", "runtime_data", "flow_data"] = "event",
41
43
  **options,
42
44
  ):
45
+ self._flow_chunk = flow_chunk
43
46
  self.trigger_event = trigger_event
44
47
  self.trigger_type: Literal["event", "runtime_data", "flow_data"] = trigger_type
45
48
  self._blue_print = blue_print
@@ -55,6 +58,7 @@ class TriggerFlowBaseProcess:
55
58
  **options,
56
59
  ):
57
60
  return type(self)(
61
+ flow_chunk=self._flow_chunk,
58
62
  trigger_event=trigger_event,
59
63
  trigger_type=trigger_type,
60
64
  blue_print=blue_print,
@@ -112,8 +116,12 @@ class TriggerFlowBaseProcess:
112
116
  if isinstance(trigger_or_triggers, TriggerFlowChunk):
113
117
  trigger_or_triggers = trigger_or_triggers.trigger
114
118
  if isinstance(trigger_or_triggers, str):
119
+ if trigger_or_triggers in self._blue_print.chunks:
120
+ trigger = self._blue_print.chunks[trigger_or_triggers].trigger
121
+ else:
122
+ trigger = trigger_or_triggers
115
123
  return self._new(
116
- trigger_event=trigger_or_triggers,
124
+ trigger_event=trigger,
117
125
  trigger_type="event",
118
126
  blue_print=self._blue_print,
119
127
  block_data=TriggerFlowBlockData(
@@ -178,7 +186,7 @@ class TriggerFlowBaseProcess:
178
186
  if mode == "simple_or"
179
187
  else (data.trigger_type, data.trigger_event, data.value)
180
188
  ),
181
- layer_marks=data.layer_marks.copy(),
189
+ _layer_marks=data._layer_marks.copy(),
182
190
  )
183
191
  case "and":
184
192
  if data.trigger_type in values and data.trigger_event in values[trigger_type]: # type: ignore
@@ -191,7 +199,7 @@ class TriggerFlowBaseProcess:
191
199
  await data.async_emit(
192
200
  when_trigger,
193
201
  values,
194
- layer_marks=data.layer_marks.copy(),
202
+ _layer_marks=data._layer_marks.copy(),
195
203
  )
196
204
 
197
205
  for trigger_type, trigger_event_dict in values.items():
@@ -213,15 +221,22 @@ class TriggerFlowBaseProcess:
213
221
 
214
222
  def to(
215
223
  self,
216
- chunk: "TriggerFlowChunk | TriggerFlowHandler | str",
224
+ chunk: "TriggerFlowChunk | TriggerFlowHandler | str | tuple[str, TriggerFlowHandler]",
217
225
  side_branch: bool = False,
226
+ name: str | None = None,
218
227
  ):
219
228
  if isinstance(chunk, str):
220
229
  if chunk in self._blue_print.chunks:
221
230
  chunk = self._blue_print.chunks[chunk]
222
231
  else:
223
232
  raise NotImplementedError(f"Cannot find chunk named '{ chunk }'")
224
- chunk = TriggerFlowChunk(chunk) if callable(chunk) else chunk
233
+ elif isinstance(chunk, tuple):
234
+ chunk_name = chunk[0]
235
+ chunk_func = chunk[1]
236
+ chunk = self._flow_chunk(chunk_name)(chunk_func)
237
+ else:
238
+ chunk = self._flow_chunk(name or chunk.__name__)(chunk) if callable(chunk) else chunk
239
+ assert isinstance(chunk, TriggerFlowChunk)
225
240
  self._blue_print.add_handler(
226
241
  self.trigger_type,
227
242
  self.trigger_event,
@@ -235,34 +250,51 @@ class TriggerFlowBaseProcess:
235
250
  **self._options,
236
251
  )
237
252
 
238
- def side_branch(self, chunk: "TriggerFlowChunk | TriggerFlowHandler"):
239
- return self.to(chunk, side_branch=True)
253
+ def side_branch(
254
+ self,
255
+ chunk: "TriggerFlowChunk | TriggerFlowHandler",
256
+ *,
257
+ name: str | None = None,
258
+ ):
259
+ return self.to(
260
+ chunk,
261
+ side_branch=True,
262
+ name=name,
263
+ )
240
264
 
241
265
  def batch(
242
266
  self,
243
- *chunks: "TriggerFlowChunk | TriggerFlowHandler",
267
+ *chunks: "TriggerFlowChunk | TriggerFlowHandler | tuple[str, TriggerFlowHandler]",
244
268
  side_branch: bool = False,
245
269
  ):
246
270
  batch_trigger = f"Batch-{ uuid.uuid4().hex }"
247
271
  results = {}
248
- chunks_to_wait = {}
272
+ triggers_to_wait = {}
273
+ trigger_to_chunk_name = {}
249
274
 
250
275
  async def wait_all_chunks(data: "TriggerFlowEventData"):
251
- if data.event in chunks_to_wait:
252
- results[data.event] = data.value
253
- chunks_to_wait[data.event] = True
254
- for done in chunks_to_wait.values():
276
+ if data.event in triggers_to_wait:
277
+ results[trigger_to_chunk_name[data.event]] = data.value
278
+ triggers_to_wait[data.event] = True
279
+ for done in triggers_to_wait.values():
255
280
  if done is False:
256
281
  return
257
282
  await data.async_emit(
258
283
  batch_trigger,
259
284
  results,
260
- layer_marks=data.layer_marks.copy(),
285
+ _layer_marks=data._layer_marks.copy(),
261
286
  )
262
287
 
263
288
  for chunk in chunks:
264
- chunk = TriggerFlowChunk(chunk) if callable(chunk) else chunk
265
- chunks_to_wait[chunk.name] = False
289
+ if isinstance(chunk, tuple):
290
+ chunk_name = chunk[0]
291
+ chunk_func = chunk[1]
292
+ chunk = self._flow_chunk(chunk_name)(chunk_func)
293
+ else:
294
+ chunk = self._flow_chunk(chunk.__name__)(chunk) if callable(chunk) else chunk
295
+ triggers_to_wait[chunk.trigger] = False
296
+ trigger_to_chunk_name[chunk.trigger] = chunk.name
297
+ results[chunk.name] = None
266
298
  self._blue_print.add_handler(
267
299
  self.trigger_type,
268
300
  self.trigger_event,
@@ -299,13 +331,13 @@ class TriggerFlowBaseProcess:
299
331
  await data.async_emit(
300
332
  collect_trigger,
301
333
  self._block_data.global_data.get(f"collections.{ collection_name}"),
302
- layer_marks=data.layer_marks.copy(),
334
+ _layer_marks=data._layer_marks.copy(),
303
335
  )
304
336
  elif mode == "filled_then_empty":
305
337
  await data.async_emit(
306
338
  collect_trigger,
307
339
  self._block_data.global_data.get(f"collections.{ collection_name}"),
308
- layer_marks=data.layer_marks.copy(),
340
+ _layer_marks=data._layer_marks.copy(),
309
341
  )
310
342
  del self._block_data.global_data[f"collections.{ collection_name}"]
311
343
 
@@ -49,7 +49,7 @@ class TriggerFlowForEachProcess(TriggerFlowBaseProcess):
49
49
  data.async_emit(
50
50
  send_item_trigger,
51
51
  item,
52
- data.layer_marks.copy(),
52
+ data._layer_marks.copy(),
53
53
  )
54
54
  )
55
55
  data.layer_out()
@@ -62,7 +62,7 @@ class TriggerFlowForEachProcess(TriggerFlowBaseProcess):
62
62
  await data.async_emit(
63
63
  send_item_trigger,
64
64
  data.value,
65
- data.layer_marks.copy(),
65
+ data._layer_marks.copy(),
66
66
  )
67
67
  data.layer_out()
68
68
 
@@ -103,7 +103,7 @@ class TriggerFlowForEachProcess(TriggerFlowBaseProcess):
103
103
  await data.async_emit(
104
104
  end_for_each_trigger,
105
105
  list(for_each_results[for_each_instance_id].values()),
106
- data.layer_marks.copy(),
106
+ data._layer_marks.copy(),
107
107
  )
108
108
  for_each_results.delete(for_each_instance_id)
109
109
 
@@ -58,7 +58,7 @@ class TriggerFlowMatchCaseProcess(TriggerFlowBaseProcess):
58
58
  await data.async_emit(
59
59
  f"Match-{ match_id }-Case-{ case_id }",
60
60
  data.value,
61
- layer_marks=data.layer_marks.copy(),
61
+ _layer_marks=data._layer_marks.copy(),
62
62
  )
63
63
  return
64
64
  elif mode == "hit_all":
@@ -71,7 +71,7 @@ class TriggerFlowMatchCaseProcess(TriggerFlowBaseProcess):
71
71
  data.async_emit(
72
72
  f"Match-{ match_id }-Case-{ case_id }",
73
73
  data.value,
74
- layer_marks=data.layer_marks.copy(),
74
+ _layer_marks=data._layer_marks.copy(),
75
75
  )
76
76
  )
77
77
  data.layer_out()
@@ -81,13 +81,13 @@ class TriggerFlowMatchCaseProcess(TriggerFlowBaseProcess):
81
81
  await data.async_emit(
82
82
  f"Match-{ match_id }-Else",
83
83
  data.value,
84
- layer_marks=data.layer_marks.copy(),
84
+ _layer_marks=data._layer_marks.copy(),
85
85
  )
86
86
  else:
87
87
  await data.async_emit(
88
88
  f"Match-{ match_id }-Result",
89
89
  data.value,
90
- layer_marks=data.layer_marks.copy(),
90
+ _layer_marks=data._layer_marks.copy(),
91
91
  )
92
92
 
93
93
  self.to(match_case)
@@ -164,7 +164,7 @@ class TriggerFlowMatchCaseProcess(TriggerFlowBaseProcess):
164
164
  await data.async_emit(
165
165
  f"Match-{ match_id }-Result",
166
166
  list(match_results.values()),
167
- layer_marks=data.layer_marks.copy(),
167
+ _layer_marks=data._layer_marks.copy(),
168
168
  )
169
169
  del data._system_runtime_data[f"match_results.{ data.upper_layer_mark }"]
170
170
  else:
@@ -172,7 +172,7 @@ class TriggerFlowMatchCaseProcess(TriggerFlowBaseProcess):
172
172
  await data.async_emit(
173
173
  f"Match-{ match_id }-Result",
174
174
  data.value,
175
- layer_marks=data.layer_marks.copy(),
175
+ _layer_marks=data._layer_marks.copy(),
176
176
  )
177
177
 
178
178
  for trigger in branch_ends:
@@ -1,3 +1,18 @@
1
+ # Copyright 2023-2025 AgentEra(Agently.Tech)
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
1
16
  from agently.utils import LazyImport
2
17
 
3
18
  LazyImport.import_package("chromadb")
@@ -24,7 +24,16 @@ if TYPE_CHECKING:
24
24
  from agently.types.data.serializable import SerializableValue
25
25
 
26
26
  AgentlyModelResponseEvent = Literal[
27
- "error", "original_delta", "delta", "tool_calls", "original_done", "done", "meta", "extra"
27
+ "error",
28
+ "original_delta",
29
+ "reasoning_delta",
30
+ "delta",
31
+ "tool_calls",
32
+ "original_done",
33
+ "reasoning_done",
34
+ "done",
35
+ "meta",
36
+ "extra",
28
37
  ]
29
38
 
30
39
  AgentlyModelResponseMessage: TypeAlias = tuple[AgentlyModelResponseEvent, Any]
@@ -77,29 +77,39 @@ class ResponseParser(AgentlyPlugin, Protocol):
77
77
  def get_async_generator(
78
78
  self,
79
79
  type: Literal["instant", "streaming_parse"],
80
+ *,
81
+ specific: list[str] | str | None = None,
80
82
  ) -> AsyncGenerator["StreamingData", None]: ...
81
83
 
82
84
  @overload
83
85
  def get_async_generator(
84
86
  self,
85
87
  type: Literal["all"],
88
+ *,
89
+ specific: list[str] | str | None = None,
86
90
  ) -> AsyncGenerator[tuple[str, Any], None]: ...
87
91
 
88
92
  @overload
89
93
  def get_async_generator(
90
94
  self,
91
- type: Literal["delta", "typed_delta", "original"],
95
+ type: Literal["delta", "specific", "original"],
96
+ *,
97
+ specific: list[str] | str | None = None,
92
98
  ) -> AsyncGenerator[str, None]: ...
93
99
 
94
100
  @overload
95
101
  def get_async_generator(
96
102
  self,
97
- type: Literal["all", "original", "delta", "typed_delta", "instant", "streaming_parse"] | None = "delta",
103
+ type: Literal["all", "original", "delta", "specific", "instant", "streaming_parse"] | None = "delta",
104
+ *,
105
+ specific: list[str] | str | None = None,
98
106
  ) -> AsyncGenerator: ...
99
107
 
100
108
  def get_async_generator(
101
109
  self,
102
- type: Literal["all", "original", "delta", "typed_delta", "instant", "streaming_parse"] | None = "delta",
110
+ type: Literal["all", "original", "delta", "specific", "instant", "streaming_parse"] | None = "delta",
111
+ *,
112
+ specific: list[str] | str | None = None,
103
113
  ) -> AsyncGenerator:
104
114
  """
105
115
  'instant' is Agently v3 compatible for 'streaming_parse'
@@ -110,29 +120,39 @@ class ResponseParser(AgentlyPlugin, Protocol):
110
120
  def get_generator(
111
121
  self,
112
122
  type: Literal["instant", "streaming_parse"],
123
+ *,
124
+ specific: list[str] | str | None = None,
113
125
  ) -> Generator["StreamingData", None, None]: ...
114
126
 
115
127
  @overload
116
128
  def get_generator(
117
129
  self,
118
130
  type: Literal["all"],
131
+ *,
132
+ specific: list[str] | str | None = None,
119
133
  ) -> Generator[tuple[str, Any], None, None]: ...
120
134
 
121
135
  @overload
122
136
  def get_generator(
123
137
  self,
124
- type: Literal["delta", "typed_delta", "original"],
138
+ type: Literal["delta", "specific", "original"],
139
+ *,
140
+ specific: list[str] | str | None = None,
125
141
  ) -> Generator[str, None, None]: ...
126
142
 
127
143
  @overload
128
144
  def get_generator(
129
145
  self,
130
- type: Literal["all", "original", "delta", "typed_delta", "instant", "streaming_parse"] | None = "delta",
146
+ type: Literal["all", "original", "delta", "specific", "instant", "streaming_parse"] | None = "delta",
147
+ *,
148
+ specific: list[str] | str | None = None,
131
149
  ) -> Generator: ...
132
150
 
133
151
  def get_generator(
134
152
  self,
135
- type: Literal["all", "original", "delta", "typed_delta", "instant", "streaming_parse"] | None = "delta",
153
+ type: Literal["all", "original", "delta", "specific", "instant", "streaming_parse"] | None = "delta",
154
+ *,
155
+ specific: list[str] | str | None = None,
136
156
  ) -> Generator:
137
157
  """
138
158
  'instant' is Agently v3 compatible for 'streaming_parse'
@@ -43,7 +43,7 @@ class TriggerFlowEventData:
43
43
  trigger_type: Literal["event", "runtime_data", "flow_data"],
44
44
  value: Any,
45
45
  execution: "TriggerFlowExecution",
46
- layer_marks: list[str] | None = None,
46
+ _layer_marks: list[str] | None = None,
47
47
  ):
48
48
  self.trigger_event = trigger_event
49
49
  self.trigger_type = trigger_type
@@ -51,7 +51,7 @@ class TriggerFlowEventData:
51
51
  self.type = trigger_type
52
52
  self.value = value
53
53
  self.execution_id = execution.id
54
- self.layer_marks = layer_marks if layer_marks is not None else []
54
+ self._layer_marks = _layer_marks if _layer_marks is not None else []
55
55
  self.settings = execution.settings
56
56
 
57
57
  self.get_flow_data = execution.get_flow_data
@@ -84,17 +84,17 @@ class TriggerFlowEventData:
84
84
 
85
85
  @property
86
86
  def upper_layer_mark(self):
87
- return self.layer_marks[-2] if len(self.layer_marks) > 1 else None
87
+ return self._layer_marks[-2] if len(self._layer_marks) > 1 else None
88
88
 
89
89
  @property
90
90
  def layer_mark(self):
91
- return self.layer_marks[-1] if len(self.layer_marks) > 0 else None
91
+ return self._layer_marks[-1] if len(self._layer_marks) > 0 else None
92
92
 
93
93
  def layer_in(self):
94
- self.layer_marks.append(uuid.uuid4().hex)
94
+ self._layer_marks.append(uuid.uuid4().hex)
95
95
 
96
96
  def layer_out(self):
97
- self.layer_marks = self.layer_marks[:-1] if len(self.layer_marks) > 0 else []
97
+ self._layer_marks = self._layer_marks[:-1] if len(self._layer_marks) > 0 else []
98
98
 
99
99
 
100
100
  TriggerFlowHandler = Callable[[TriggerFlowEventData], Any]