pyglove 0.5.0.dev202508250811__py3-none-any.whl → 0.5.0.dev202511300809__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. pyglove/core/__init__.py +8 -1
  2. pyglove/core/geno/base.py +7 -3
  3. pyglove/core/io/file_system.py +295 -2
  4. pyglove/core/io/file_system_test.py +291 -0
  5. pyglove/core/logging.py +45 -1
  6. pyglove/core/logging_test.py +12 -21
  7. pyglove/core/monitoring.py +657 -0
  8. pyglove/core/monitoring_test.py +289 -0
  9. pyglove/core/symbolic/__init__.py +7 -0
  10. pyglove/core/symbolic/base.py +89 -35
  11. pyglove/core/symbolic/base_test.py +3 -3
  12. pyglove/core/symbolic/dict.py +31 -12
  13. pyglove/core/symbolic/dict_test.py +49 -0
  14. pyglove/core/symbolic/list.py +17 -3
  15. pyglove/core/symbolic/list_test.py +24 -2
  16. pyglove/core/symbolic/object.py +3 -1
  17. pyglove/core/symbolic/object_test.py +13 -10
  18. pyglove/core/symbolic/ref.py +19 -7
  19. pyglove/core/symbolic/ref_test.py +94 -7
  20. pyglove/core/symbolic/unknown_symbols.py +147 -0
  21. pyglove/core/symbolic/unknown_symbols_test.py +100 -0
  22. pyglove/core/typing/annotation_conversion.py +8 -1
  23. pyglove/core/typing/annotation_conversion_test.py +14 -19
  24. pyglove/core/typing/class_schema.py +24 -1
  25. pyglove/core/typing/json_schema.py +221 -8
  26. pyglove/core/typing/json_schema_test.py +508 -12
  27. pyglove/core/typing/type_conversion.py +17 -3
  28. pyglove/core/typing/type_conversion_test.py +7 -2
  29. pyglove/core/typing/value_specs.py +5 -1
  30. pyglove/core/typing/value_specs_test.py +5 -0
  31. pyglove/core/utils/__init__.py +2 -0
  32. pyglove/core/utils/contextual.py +9 -4
  33. pyglove/core/utils/contextual_test.py +10 -0
  34. pyglove/core/utils/error_utils.py +59 -25
  35. pyglove/core/utils/json_conversion.py +360 -63
  36. pyglove/core/utils/json_conversion_test.py +146 -13
  37. pyglove/core/views/html/controls/tab.py +33 -0
  38. pyglove/core/views/html/controls/tab_test.py +37 -0
  39. pyglove/ext/evolution/base_test.py +1 -1
  40. {pyglove-0.5.0.dev202508250811.dist-info → pyglove-0.5.0.dev202511300809.dist-info}/METADATA +8 -1
  41. {pyglove-0.5.0.dev202508250811.dist-info → pyglove-0.5.0.dev202511300809.dist-info}/RECORD +44 -40
  42. {pyglove-0.5.0.dev202508250811.dist-info → pyglove-0.5.0.dev202511300809.dist-info}/WHEEL +0 -0
  43. {pyglove-0.5.0.dev202508250811.dist-info → pyglove-0.5.0.dev202511300809.dist-info}/licenses/LICENSE +0 -0
  44. {pyglove-0.5.0.dev202508250811.dist-info → pyglove-0.5.0.dev202511300809.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,289 @@
1
+ # Copyright 2025 The PyGlove Authors
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import time
16
+ import unittest
17
+ from pyglove.core import monitoring
18
+ from pyglove.core.symbolic import error_info # pylint: disable=unused-import
19
+
20
+
21
+ class MetricCollectionTest(unittest.TestCase):
22
+ """Tests for metric collection."""
23
+
24
+ def test_default_metric_collection_cls(self):
25
+ self.assertIs(
26
+ monitoring.default_metric_collection_cls(),
27
+ monitoring.InMemoryMetricCollection
28
+ )
29
+
30
+ class TestMetricCollection(monitoring.MetricCollection):
31
+ pass
32
+
33
+ monitoring.set_default_metric_collection_cls(TestMetricCollection)
34
+ self.assertIs(
35
+ monitoring.default_metric_collection_cls(),
36
+ TestMetricCollection
37
+ )
38
+ monitoring.set_default_metric_collection_cls(
39
+ monitoring.InMemoryMetricCollection
40
+ )
41
+ self.assertIs(
42
+ monitoring.default_metric_collection_cls(),
43
+ monitoring.InMemoryMetricCollection
44
+ )
45
+
46
+ def test_metric_collection(self):
47
+ collection = monitoring.metric_collection('/test')
48
+ self.assertEqual(collection.namespace, '/test')
49
+ self.assertIsInstance(collection, monitoring.InMemoryMetricCollection)
50
+
51
+ def test_creation_failures(self):
52
+ collection = monitoring.InMemoryMetricCollection('/test')
53
+ counter = collection.get_counter('counter', 'counter description')
54
+ self.assertIsInstance(counter, monitoring.Counter)
55
+ with self.assertRaisesRegex(
56
+ ValueError, 'Metric .* already exists with a different type'
57
+ ):
58
+ collection.get_distribution('counter', 'counter description')
59
+
60
+ with self.assertRaisesRegex(
61
+ ValueError, 'Metric .* already exists with a different description'
62
+ ):
63
+ collection.get_counter('counter', 'different description')
64
+
65
+ with self.assertRaisesRegex(
66
+ ValueError,
67
+ 'Metric .* already exists with different parameter definitions'
68
+ ):
69
+ collection.get_counter(
70
+ 'counter', 'counter description', parameters={'field1': str}
71
+ )
72
+
73
+
74
+ class InMemoryDistributionValueTest(unittest.TestCase):
75
+ """Tests for in memory distribution value."""
76
+
77
+ def test_empty_distribution(self):
78
+ dist = monitoring._InMemoryDistributionValue()
79
+ self.assertEqual(dist.count, 0)
80
+ self.assertEqual(dist.sum, 0.0)
81
+ self.assertEqual(dist.mean, 0.0)
82
+ self.assertEqual(dist.stddev, 0.0)
83
+ self.assertEqual(dist.variance, 0.0)
84
+ self.assertEqual(dist.median, 0.0)
85
+ self.assertEqual(dist.percentile(50), 0.0)
86
+ self.assertEqual(dist.fraction_less_than(100), 0.0)
87
+
88
+ def test_add_value(self):
89
+ dist = monitoring._InMemoryDistributionValue()
90
+ dist.add(1)
91
+ dist.add(3)
92
+ dist.add(10)
93
+ dist.add(2)
94
+ self.assertEqual(dist.count, 4)
95
+ self.assertEqual(dist.sum, 16.0)
96
+ self.assertEqual(dist.mean, 4.0)
97
+ self.assertEqual(dist.stddev, 3.5355339059327378)
98
+ self.assertEqual(dist.variance, 12.5)
99
+ self.assertEqual(dist.median, 2.5)
100
+ self.assertEqual(dist.percentile(50), 2.5)
101
+ self.assertEqual(dist.percentile(10), 1.3)
102
+ self.assertEqual(dist.fraction_less_than(100), 1.0)
103
+ self.assertEqual(dist.fraction_less_than(1), 0.0)
104
+ self.assertEqual(dist.fraction_less_than(10), 0.75)
105
+
106
+ def test_add_value_no_numpy(self):
107
+ numpy = monitoring.numpy
108
+ monitoring.numpy = None
109
+ dist = monitoring._InMemoryDistributionValue()
110
+ dist.add(1)
111
+ dist.add(3)
112
+ dist.add(10)
113
+ dist.add(2)
114
+ self.assertEqual(dist.count, 4)
115
+ self.assertEqual(dist.sum, 16.0)
116
+ self.assertEqual(dist.mean, 4.0)
117
+ self.assertEqual(dist.stddev, 3.5355339059327378)
118
+ self.assertEqual(dist.variance, 12.5)
119
+ self.assertEqual(dist.median, 2.5)
120
+ self.assertEqual(dist.percentile(50), 2.5)
121
+ self.assertEqual(dist.percentile(10), 1.3)
122
+ self.assertEqual(dist.fraction_less_than(100), 1.0)
123
+ self.assertEqual(dist.fraction_less_than(1), 0.0)
124
+ self.assertEqual(dist.fraction_less_than(10), 0.75)
125
+ monitoring.numpy = numpy
126
+
127
+ def test_window_size(self):
128
+ dist = monitoring._InMemoryDistributionValue(window_size=3)
129
+ dist.add(1)
130
+ dist.add(3)
131
+ dist.add(10)
132
+ dist.add(2)
133
+ self.assertEqual(dist.count, 3)
134
+ self.assertEqual(dist.sum, 15.0)
135
+ self.assertEqual(dist.mean, 5.0)
136
+ self.assertEqual(dist.stddev, 3.5590260840104366)
137
+ self.assertEqual(dist.variance, 12.666666666666664)
138
+ self.assertEqual(dist.median, 3.0)
139
+ self.assertEqual(dist.percentile(50), 3.0)
140
+ self.assertEqual(dist.percentile(10), 2.2)
141
+ self.assertEqual(dist.fraction_less_than(100), 1.0)
142
+ self.assertEqual(dist.fraction_less_than(1), 0.0)
143
+ self.assertEqual(dist.fraction_less_than(10), 0.6666666666666666)
144
+
145
+
146
+ class InMemoryCounterTest(unittest.TestCase):
147
+ """Tests for in memory counter."""
148
+
149
+ def test_counter_without_parameters(self):
150
+ collection = monitoring.InMemoryMetricCollection('/test')
151
+ counter = collection.get_counter('counter', 'counter description')
152
+ self.assertEqual(counter.namespace, '/test')
153
+ self.assertEqual(counter.name, 'counter')
154
+ self.assertEqual(counter.description, 'counter description')
155
+ self.assertEqual(counter.parameter_definitions, {})
156
+ self.assertEqual(counter.full_name, '/test/counter')
157
+ self.assertEqual(counter.value(), 0)
158
+ self.assertEqual(counter.increment(), 1)
159
+ self.assertEqual(counter.value(), 1)
160
+ self.assertEqual(counter.increment(2), 3)
161
+ self.assertEqual(counter.value(), 3)
162
+ self.assertIs(collection.metrics()[0], counter)
163
+
164
+ def test_counter_with_parameters(self):
165
+ collection = monitoring.InMemoryMetricCollection('/test')
166
+ counter = collection.get_counter(
167
+ 'counter', 'counter description', {'field1': str}
168
+ )
169
+ self.assertEqual(counter.namespace, '/test')
170
+ self.assertEqual(counter.name, 'counter')
171
+ self.assertEqual(counter.description, 'counter description')
172
+ self.assertEqual(counter.parameter_definitions, {'field1': str})
173
+ self.assertEqual(counter.full_name, '/test/counter')
174
+ self.assertEqual(counter.value(field1='foo'), 0)
175
+ self.assertEqual(counter.increment(field1='foo'), 1)
176
+ self.assertEqual(counter.value(field1='bar'), 0)
177
+ self.assertEqual(counter.increment(field1='bar'), 1)
178
+ self.assertEqual(counter.increment(field1='foo', delta=2), 3)
179
+ self.assertEqual(counter.value(field1='foo'), 3)
180
+
181
+ with self.assertRaisesRegex(TypeError, '.* has type .* but expected type'):
182
+ counter.increment(field1=1)
183
+
184
+ with self.assertRaisesRegex(KeyError, '.* is required but not given'):
185
+ counter.increment()
186
+
187
+ with self.assertRaisesRegex(KeyError, '.* is not defined but provided'):
188
+ counter.increment(field1='foo', field2='a')
189
+
190
+
191
+ class InMemoryScalarTest(unittest.TestCase):
192
+ """Tests for in memory scalar."""
193
+
194
+ def test_scalar_without_parameters(self):
195
+ collection = monitoring.InMemoryMetricCollection('/test')
196
+ scalar = collection.get_scalar('scalar', 'scalar description')
197
+ self.assertEqual(scalar.namespace, '/test')
198
+ self.assertEqual(scalar.name, 'scalar')
199
+ self.assertEqual(scalar.description, 'scalar description')
200
+ self.assertEqual(scalar.parameter_definitions, {})
201
+ self.assertEqual(scalar.full_name, '/test/scalar')
202
+ self.assertEqual(scalar.value(), 0)
203
+ self.assertEqual(scalar.increment(), 1)
204
+ self.assertEqual(scalar.value(), 1)
205
+ scalar.set(3)
206
+ self.assertEqual(scalar.increment(2), 5)
207
+ self.assertEqual(scalar.value(), 5)
208
+
209
+ def test_scalar_with_parameters(self):
210
+ collection = monitoring.InMemoryMetricCollection('/test')
211
+ scalar = collection.get_scalar(
212
+ 'scalar', 'scalar description', {'field1': str}, float
213
+ )
214
+ self.assertEqual(scalar.namespace, '/test')
215
+ self.assertEqual(scalar.name, 'scalar')
216
+ self.assertEqual(scalar.description, 'scalar description')
217
+ self.assertEqual(scalar.parameter_definitions, {'field1': str})
218
+ self.assertEqual(scalar.full_name, '/test/scalar')
219
+ self.assertEqual(scalar.value(field1='foo'), 0.0)
220
+ scalar.set(2.5, field1='bar')
221
+ self.assertEqual(scalar.value(field1='bar'), 2.5)
222
+ self.assertEqual(scalar.increment(1.1, field1='bar'), 3.6)
223
+ self.assertEqual(scalar.value(field1='bar'), 3.6)
224
+ self.assertEqual(scalar.value(field1='foo'), 0.0)
225
+
226
+
227
+ class InMemoryDistributionTest(unittest.TestCase):
228
+ """Tests for in memory distribution."""
229
+
230
+ def test_distribution_without_parameters(self):
231
+ collection = monitoring.InMemoryMetricCollection('/test')
232
+ dist = collection.get_distribution(
233
+ 'distribution', 'distribution description'
234
+ )
235
+ self.assertEqual(dist.namespace, '/test')
236
+ self.assertEqual(dist.name, 'distribution')
237
+ self.assertEqual(dist.description, 'distribution description')
238
+ self.assertEqual(dist.parameter_definitions, {})
239
+ self.assertEqual(dist.full_name, '/test/distribution')
240
+ v = dist.value()
241
+ self.assertEqual(v.count, 0)
242
+ dist.record(1)
243
+ dist.record(2)
244
+ dist.record(3)
245
+ v = dist.value()
246
+ self.assertEqual(v.count, 3)
247
+
248
+ dist = collection.get_distribution(
249
+ 'distribution2', 'distribution description'
250
+ )
251
+ with dist.record_duration():
252
+ time.sleep(0.1)
253
+ self.assertGreaterEqual(dist.value().mean, 100)
254
+
255
+ def test_distribution_with_parameters(self):
256
+ collection = monitoring.InMemoryMetricCollection('/test')
257
+ dist = collection.get_distribution(
258
+ 'distribution', 'distribution description', {'field1': str}
259
+ )
260
+ self.assertEqual(dist.namespace, '/test')
261
+ self.assertEqual(dist.name, 'distribution')
262
+ self.assertEqual(dist.description, 'distribution description')
263
+ self.assertEqual(dist.parameter_definitions, {'field1': str})
264
+ self.assertEqual(dist.full_name, '/test/distribution')
265
+ value = dist.value(field1='foo')
266
+ self.assertEqual(value.count, 0)
267
+ dist.record(1, field1='foo')
268
+ dist.record(2, field1='foo')
269
+ dist.record(3, field1='bar')
270
+ value = dist.value(field1='foo')
271
+ self.assertEqual(value.count, 2)
272
+ value = dist.value(field1='bar')
273
+ self.assertEqual(value.count, 1)
274
+
275
+ dist = collection.get_distribution(
276
+ 'distribution2', 'distribution description', {'error': str}
277
+ )
278
+ with self.assertRaises(ValueError):
279
+ with dist.record_duration():
280
+ time.sleep(0.1)
281
+ raise ValueError()
282
+ self.assertGreaterEqual(dist.value(error='ValueError').mean, 100)
283
+ with dist.record_duration():
284
+ time.sleep(0.1)
285
+ self.assertGreaterEqual(dist.value(error='').mean, 100)
286
+
287
+
288
+ if __name__ == '__main__':
289
+ unittest.main()
@@ -147,4 +147,11 @@ from pyglove.core.symbolic.list import mark_as_insertion
147
147
  from pyglove.core.symbolic.base import WritePermissionError
148
148
  from pyglove.core.symbolic.error_info import ErrorInfo
149
149
 
150
+ # Unknown symbols.
151
+ from pyglove.core.symbolic.unknown_symbols import UnknownSymbol
152
+ from pyglove.core.symbolic.unknown_symbols import UnknownType
153
+ from pyglove.core.symbolic.unknown_symbols import UnknownFunction
154
+ from pyglove.core.symbolic.unknown_symbols import UnknownMethod
155
+ from pyglove.core.symbolic.unknown_symbols import UnknownTypedObject
156
+
150
157
  # pylint: enable=g-bad-import-order
@@ -503,10 +503,12 @@ class Symbolic(
503
503
  return default
504
504
 
505
505
  def _sym_inferred(self, key: Union[str, int], **kwargs) -> Any:
506
- v = self.sym_getattr(key)
507
- if isinstance(v, Inferential):
508
- v = v.infer(**kwargs)
509
- return v
506
+ return self._infer_if_applicable(self.sym_getattr(key), **kwargs)
507
+
508
+ def _infer_if_applicable(self, value: Any, **kwargs) -> Any:
509
+ if isinstance(value, Inferential):
510
+ return value.infer(**kwargs)
511
+ return value
510
512
 
511
513
  @abc.abstractmethod
512
514
  def sym_keys(self) -> Iterator[Union[str, int]]:
@@ -944,7 +946,7 @@ class Symbolic(
944
946
 
945
947
  def to_json(self, **kwargs) -> utils.JSONValueType:
946
948
  """Alias for `sym_jsonify`."""
947
- return to_json(self, **kwargs)
949
+ return utils.to_json(self, **kwargs)
948
950
 
949
951
  def to_json_str(self, json_indent: Optional[int] = None, **kwargs) -> str:
950
952
  """Serializes current object into a JSON string."""
@@ -1983,10 +1985,12 @@ def is_abstract(x: Any) -> bool:
1983
1985
  def contains(
1984
1986
  x: Any,
1985
1987
  value: Any = None,
1986
- type: Optional[Union[ # pylint: disable=redefined-builtin
1988
+ type: Union[ # pylint: disable=redefined-builtin
1987
1989
  Type[Any],
1988
- Tuple[Type[Any]]]]=None
1989
- ) -> bool:
1990
+ Tuple[Type[Any], ...],
1991
+ None,
1992
+ ]=None,
1993
+ ) -> bool:
1990
1994
  """Returns if a value contains values of specific type.
1991
1995
 
1992
1996
  Example::
@@ -2035,10 +2039,12 @@ def contains(
2035
2039
  def from_json(
2036
2040
  json_value: Any,
2037
2041
  *,
2042
+ context: Optional[utils.JSONConversionContext] = None,
2043
+ auto_symbolic: bool = True,
2044
+ auto_import: bool = True,
2045
+ convert_unknown: bool = False,
2038
2046
  allow_partial: bool = False,
2039
2047
  root_path: Optional[utils.KeyPath] = None,
2040
- auto_import: bool = True,
2041
- auto_dict: bool = False,
2042
2048
  value_spec: Optional[pg_typing.ValueSpec] = None,
2043
2049
  **kwargs,
2044
2050
  ) -> Any:
@@ -2059,14 +2065,23 @@ def from_json(
2059
2065
 
2060
2066
  Args:
2061
2067
  json_value: Input JSON value.
2062
- allow_partial: Whether to allow elements of the list to be partial.
2063
- root_path: KeyPath of loaded object in its object tree.
2068
+ context: JSON conversion context.
2069
+ auto_symbolic: If True, list and dict will be automatically converted to
2070
+ `pg.List` and `pg.Dict`. Otherwise, they will be plain lists
2071
+ and dicts.
2064
2072
  auto_import: If True, when a '_type' is not registered, PyGlove will
2065
2073
  identify its parent module and automatically import it. For example,
2066
2074
  if the type is 'foo.bar.A', PyGlove will try to import 'foo.bar' and
2067
2075
  find the class 'A' within the imported module.
2068
- auto_dict: If True, dict with '_type' that cannot be loaded will remain
2069
- as dict, with '_type' renamed to 'type_name'.
2076
+ convert_unknown: If True, when a '_type' is not registered and cannot
2077
+ be imported, PyGlove will create objects of:
2078
+ - `pg.symbolic.UnknownType` for unknown types;
2079
+ - `pg.symbolic.UnknownTypedObject` for objects of unknown types;
2080
+ - `pg.symbolic.UnknownFunction` for unknown functions;
2081
+ - `pg.symbolic.UnknownMethod` for unknown methods.
2082
+ If False, TypeError will be raised.
2083
+ allow_partial: Whether to allow elements of the list to be partial.
2084
+ root_path: KeyPath of loaded object in its object tree.
2070
2085
  value_spec: The value spec for the symbolic list or dict.
2071
2086
  **kwargs: Allow passing through keyword arguments to from_json of specific
2072
2087
  types.
@@ -2082,10 +2097,23 @@ def from_json(
2082
2097
  if isinstance(json_value, Symbolic):
2083
2098
  return json_value
2084
2099
 
2100
+ if context is None:
2101
+ if (isinstance(json_value, dict) and (
2102
+ context_node := json_value.get(utils.JSONConvertible.CONTEXT_KEY))):
2103
+ context = utils.JSONConversionContext.from_json(
2104
+ context_node,
2105
+ auto_import=auto_import,
2106
+ convert_unknown=convert_unknown,
2107
+ **kwargs
2108
+ )
2109
+ json_value = json_value[utils.JSONConvertible.ROOT_VALUE_KEY]
2110
+ else:
2111
+ context = utils.JSONConversionContext()
2112
+
2085
2113
  typename_resolved = kwargs.pop('_typename_resolved', False)
2086
2114
  if not typename_resolved:
2087
2115
  json_value = utils.json_conversion.resolve_typenames(
2088
- json_value, auto_import=auto_import, auto_dict=auto_dict
2116
+ json_value, auto_import, convert_unknown
2089
2117
  )
2090
2118
 
2091
2119
  def _load_child(k, v):
@@ -2094,6 +2122,7 @@ def from_json(
2094
2122
  root_path=utils.KeyPath(k, root_path),
2095
2123
  _typename_resolved=True,
2096
2124
  allow_partial=allow_partial,
2125
+ context=context,
2097
2126
  **kwargs,
2098
2127
  )
2099
2128
 
@@ -2109,24 +2138,42 @@ def from_json(
2109
2138
  )
2110
2139
  )
2111
2140
  return tuple(_load_child(i, v) for i, v in enumerate(json_value[1:]))
2112
- return Symbolic.ListType.from_json( # pytype: disable=attribute-error
2141
+ if json_value and json_value[0] == utils.JSONConvertible.SYMBOLIC_MARKER:
2142
+ auto_symbolic = True
2143
+ if auto_symbolic:
2144
+ from_json_fn = Symbolic.ListType.from_json # pytype: disable=attribute-error
2145
+ else:
2146
+ from_json_fn = utils.from_json
2147
+ return from_json_fn(
2113
2148
  json_value,
2149
+ context=context,
2114
2150
  value_spec=value_spec,
2115
2151
  root_path=root_path,
2116
2152
  allow_partial=allow_partial,
2117
2153
  **kwargs,
2118
2154
  )
2119
2155
  elif isinstance(json_value, dict):
2156
+ if utils.JSONConvertible.REF_KEY in json_value:
2157
+ x = context.get_shared(
2158
+ json_value[utils.JSONConvertible.REF_KEY]
2159
+ ).value
2160
+ return x
2120
2161
  if utils.JSONConvertible.TYPE_NAME_KEY not in json_value:
2121
- return Symbolic.DictType.from_json( # pytype: disable=attribute-error
2122
- json_value,
2123
- value_spec=value_spec,
2124
- root_path=root_path,
2125
- allow_partial=allow_partial,
2126
- **kwargs,
2162
+ auto_symbolic = json_value.get(
2163
+ utils.JSONConvertible.SYMBOLIC_MARKER, auto_symbolic
2127
2164
  )
2165
+ if auto_symbolic:
2166
+ return Symbolic.DictType.from_json( # pytype: disable=attribute-error
2167
+ json_value,
2168
+ context=context,
2169
+ value_spec=value_spec,
2170
+ root_path=root_path,
2171
+ allow_partial=allow_partial,
2172
+ **kwargs,
2173
+ )
2128
2174
  return utils.from_json(
2129
2175
  json_value,
2176
+ context=context,
2130
2177
  _typename_resolved=True,
2131
2178
  root_path=root_path,
2132
2179
  allow_partial=allow_partial,
@@ -2138,10 +2185,12 @@ def from_json(
2138
2185
  def from_json_str(
2139
2186
  json_str: str,
2140
2187
  *,
2188
+ context: Optional[utils.JSONConversionContext] = None,
2189
+ auto_import: bool = True,
2190
+ convert_unknown: bool = False,
2141
2191
  allow_partial: bool = False,
2142
2192
  root_path: Optional[utils.KeyPath] = None,
2143
- auto_import: bool = True,
2144
- auto_dict: bool = False,
2193
+ value_spec: Optional[pg_typing.ValueSpec] = None,
2145
2194
  **kwargs,
2146
2195
  ) -> Any:
2147
2196
  """Deserialize (maybe) symbolic object from JSON string.
@@ -2161,15 +2210,22 @@ def from_json_str(
2161
2210
 
2162
2211
  Args:
2163
2212
  json_str: JSON string.
2164
- allow_partial: If True, allow a partial symbolic object to be created.
2165
- Otherwise error will be raised on partial value.
2166
- root_path: The symbolic path used for the deserialized root object.
2213
+ context: JSON conversion context.
2167
2214
  auto_import: If True, when a '_type' is not registered, PyGlove will
2168
2215
  identify its parent module and automatically import it. For example,
2169
2216
  if the type is 'foo.bar.A', PyGlove will try to import 'foo.bar' and
2170
2217
  find the class 'A' within the imported module.
2171
- auto_dict: If True, dict with '_type' that cannot be loaded will remain
2172
- as dict, with '_type' renamed to 'type_name'.
2218
+ convert_unknown: If True, when a '_type' is not registered and cannot
2219
+ be imported, PyGlove will create objects of:
2220
+ - `pg.symbolic.UnknownType` for unknown types;
2221
+ - `pg.symbolic.UnknownTypedObject` for objects of unknown types;
2222
+ - `pg.symbolic.UnknownFunction` for unknown functions;
2223
+ - `pg.symbolic.UnknownMethod` for unknown methods.
2224
+ If False, TypeError will be raised.
2225
+ allow_partial: If True, allow a partial symbolic object to be created.
2226
+ Otherwise error will be raised on partial value.
2227
+ root_path: The symbolic path used for the deserialized root object.
2228
+ value_spec: The value spec for the symbolic list or dict.
2173
2229
  **kwargs: Additional keyword arguments that will be passed to
2174
2230
  ``pg.from_json``.
2175
2231
 
@@ -2193,10 +2249,12 @@ def from_json_str(
2193
2249
 
2194
2250
  return from_json(
2195
2251
  _decode_int_keys(json.loads(json_str)),
2252
+ context=context,
2253
+ auto_import=auto_import,
2254
+ convert_unknown=convert_unknown,
2196
2255
  allow_partial=allow_partial,
2197
2256
  root_path=root_path,
2198
- auto_import=auto_import,
2199
- auto_dict=auto_dict,
2257
+ value_spec=value_spec,
2200
2258
  **kwargs
2201
2259
  )
2202
2260
 
@@ -2232,10 +2290,6 @@ def to_json(value: Any, **kwargs) -> Any:
2232
2290
  Returns:
2233
2291
  JSON value.
2234
2292
  """
2235
- # NOTE(daiyip): special handling `sym_jsonify` since symbolized
2236
- # classes may have conflicting `to_json` method in their existing classes.
2237
- if isinstance(value, Symbolic):
2238
- return value.sym_jsonify(**kwargs)
2239
2293
  return utils.to_json(value, **kwargs)
2240
2294
 
2241
2295
 
@@ -20,9 +20,9 @@ from pyglove.core import typing as pg_typing
20
20
  from pyglove.core import utils
21
21
  from pyglove.core import views
22
22
  from pyglove.core.symbolic import base
23
- from pyglove.core.symbolic.dict import Dict
24
- from pyglove.core.symbolic.inferred import ValueFromParentChain
25
- from pyglove.core.symbolic.object import Object
23
+ from pyglove.core.symbolic.dict import Dict # pylint: disable=g-importing-member
24
+ from pyglove.core.symbolic.inferred import ValueFromParentChain # pylint: disable=g-importing-member
25
+ from pyglove.core.symbolic.object import Object # pylint: disable=g-importing-member
26
26
 
27
27
 
28
28
  class FieldUpdateTest(unittest.TestCase):
@@ -156,6 +156,8 @@ class Dict(dict, base.Symbolic, pg_typing.CustomTyping):
156
156
  # Not okay:
157
157
  d.a.f2.abc = 1
158
158
  """
159
+ # Remove symbolic marker if present.
160
+ json_value.pop(utils.JSONConvertible.SYMBOLIC_MARKER, None)
159
161
  return cls(
160
162
  {
161
163
  k: base.from_json(
@@ -236,7 +238,8 @@ class Dict(dict, base.Symbolic, pg_typing.CustomTyping):
236
238
  accessor_writable=True,
237
239
  # We delay seal operation until members are filled.
238
240
  sealed=False,
239
- root_path=root_path)
241
+ root_path=root_path
242
+ )
240
243
 
241
244
  dict.__init__(self)
242
245
  self._value_spec = None
@@ -247,9 +250,10 @@ class Dict(dict, base.Symbolic, pg_typing.CustomTyping):
247
250
  for k, v in kwargs.items():
248
251
  dict_obj[k] = v
249
252
 
253
+ iter_items = getattr(dict_obj, 'sym_items', dict_obj.items)
250
254
  if value_spec:
251
255
  if pass_through:
252
- for k, v in dict_obj.items():
256
+ for k, v in iter_items():
253
257
  super().__setitem__(k, self._relocate_if_symbolic(k, v))
254
258
 
255
259
  # NOTE(daiyip): when pass_through is on, we simply trust input
@@ -258,11 +262,11 @@ class Dict(dict, base.Symbolic, pg_typing.CustomTyping):
258
262
  # repeated validation and transformation.
259
263
  self._value_spec = value_spec
260
264
  else:
261
- for k, v in dict_obj.items():
265
+ for k, v in iter_items():
262
266
  super().__setitem__(k, self._formalized_value(k, None, v))
263
267
  self.use_value_spec(value_spec, allow_partial)
264
268
  else:
265
- for k, v in dict_obj.items():
269
+ for k, v in iter_items():
266
270
  self._set_item_without_permission_check(k, v)
267
271
 
268
272
  # NOTE(daiyip): We set onchange callback at the end of init to avoid
@@ -537,7 +541,7 @@ class Dict(dict, base.Symbolic, pg_typing.CustomTyping):
537
541
  raise KeyError(self._error_message(
538
542
  f'Key must be string or int type. Encountered {key!r}.'))
539
543
 
540
- old_value = self.get(key, pg_typing.MISSING_VALUE)
544
+ old_value = self.sym_getattr(key, pg_typing.MISSING_VALUE)
541
545
  if old_value is value:
542
546
  return None
543
547
 
@@ -644,6 +648,13 @@ class Dict(dict, base.Symbolic, pg_typing.CustomTyping):
644
648
  except AttributeError as e:
645
649
  raise KeyError(key) from e
646
650
 
651
+ def get(self, key: Union[str, int], default: Any = None) -> Any:
652
+ """Get item in this Dict."""
653
+ try:
654
+ return self.sym_inferred(key)
655
+ except AttributeError:
656
+ return default
657
+
647
658
  def __setitem__(self, key: Union[str, int], value: Any) -> None:
648
659
  """Set item in this Dict.
649
660
 
@@ -751,11 +762,13 @@ class Dict(dict, base.Symbolic, pg_typing.CustomTyping):
751
762
 
752
763
  def items(self) -> Iterator[Tuple[Union[str, int], Any]]: # pytype: disable=signature-mismatch
753
764
  """Returns an iterator of (key, value) items in current dict."""
754
- return self.sym_items()
765
+ for k, v in self.sym_items():
766
+ yield k, self._infer_if_applicable(v)
755
767
 
756
768
  def values(self) -> Iterator[Any]: # pytype: disable=signature-mismatch
757
769
  """Returns an iterator of values in current dict.."""
758
- return self.sym_values()
770
+ for v in self.sym_values():
771
+ yield self._infer_if_applicable(v)
759
772
 
760
773
  def copy(self) -> 'Dict':
761
774
  """Overridden copy using symbolic copy."""
@@ -824,12 +837,15 @@ class Dict(dict, base.Symbolic, pg_typing.CustomTyping):
824
837
  hide_default_values: bool = False,
825
838
  exclude_keys: Optional[Sequence[Union[str, int]]] = None,
826
839
  use_inferred: bool = False,
840
+ omit_symbolic_marker: bool = True,
827
841
  **kwargs,
828
842
  ) -> utils.JSONValueType:
829
843
  """Converts current object to a dict with plain Python objects."""
830
844
  exclude_keys = set(exclude_keys or [])
845
+ json_repr = {}
846
+ if not omit_symbolic_marker:
847
+ json_repr[utils.JSONConvertible.SYMBOLIC_MARKER] = True
831
848
  if self._value_spec and self._value_spec.schema:
832
- json_repr = dict()
833
849
  matched_keys, _ = self._value_spec.schema.resolve(self.keys()) # pytype: disable=attribute-error
834
850
  for key_spec, keys in matched_keys.items():
835
851
  # NOTE(daiyip): The key values of frozen field can safely be excluded
@@ -851,20 +867,23 @@ class Dict(dict, base.Symbolic, pg_typing.CustomTyping):
851
867
  hide_frozen=hide_frozen,
852
868
  hide_default_values=hide_default_values,
853
869
  use_inferred=use_inferred,
854
- **kwargs)
855
- return json_repr
870
+ omit_symbolic_marker=omit_symbolic_marker,
871
+ **kwargs
872
+ )
856
873
  else:
857
- return {
874
+ json_repr.update({
858
875
  k: base.to_json(
859
876
  self.sym_inferred(k, default=v) if (
860
877
  use_inferred and isinstance(v, base.Inferential)) else v,
861
878
  hide_frozen=hide_frozen,
862
879
  hide_default_values=hide_default_values,
863
880
  use_inferred=use_inferred,
881
+ omit_symbolic_marker=omit_symbolic_marker,
864
882
  **kwargs)
865
883
  for k, v in self.sym_items()
866
884
  if k not in exclude_keys
867
- }
885
+ })
886
+ return json_repr
868
887
 
869
888
  def custom_apply(
870
889
  self,