syncmodels 0.1.313__py2.py3-none-any.whl → 0.1.315__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
syncmodels/__init__.py CHANGED
@@ -2,4 +2,4 @@
2
2
 
3
3
  __author__ = """Asterio Gonzalez"""
4
4
  __email__ = "asterio.gonzalez@gmail.com"
5
- __version__ = "0.1.313"
5
+ __version__ = "0.1.315"
syncmodels/crawler.py CHANGED
@@ -152,7 +152,7 @@ from syncmodels.http import (
152
152
  # from syncmodels.registry import iRegistry
153
153
  from syncmodels.auth import iAuthenticator
154
154
  from syncmodels.storage import WaveStorage, Storage
155
- from syncmodels.exceptions import NonRecoverable, NonRecoverableAuth, BadData
155
+ from syncmodels.exceptions import BadLogic, NonRecoverable, NonRecoverableAuth, BadData
156
156
  from syncmodels.syncmodels import SyncModel, COPY
157
157
  from .crud import parse_duri, DEFAULT_DATABASE, DEFAULT_NAMESPACE
158
158
 
@@ -770,6 +770,10 @@ class iBot(iAgent):
770
770
  # abort responsability chain
771
771
  log.error("Inexpected responsability chain abortion")
772
772
  break
773
+ except (BadLogic,) as why:
774
+ # separate to have more fine grain control
775
+ await self.handle_non_recoverable(why, context)
776
+
773
777
  except Exception as why:
774
778
  log.error(why)
775
779
  log.error("".join(traceback.format_exception(*sys.exc_info())))
@@ -1071,7 +1075,7 @@ class iBot(iAgent):
1071
1075
  result = await extract_result(response)
1072
1076
  # result = await response.text()
1073
1077
  log.error(
1074
- "[%s] server sent: %s",
1078
+ "Forbidden [%s] server sent: %s",
1075
1079
  response.status,
1076
1080
  result,
1077
1081
  )
@@ -1498,6 +1502,7 @@ class NormalizePlugin(iPlugin):
1498
1502
  SCORE = 950 # before SetURIPlugin
1499
1503
 
1500
1504
  async def handle(self, stream: List[Dict], context: Dict):
1505
+ stream[:] = stream[: iPlugin.MAX_RECORDS] # debug code
1501
1506
 
1502
1507
  for idx, data in enumerate(stream):
1503
1508
 
@@ -1697,6 +1702,8 @@ class HashStreamPlugin(iPlugin):
1697
1702
  SCORE = 50 # before SortPlugin
1698
1703
 
1699
1704
  async def handle(self, stream: List[Dict], context: Dict):
1705
+ stream[:] = stream[: iPlugin.MAX_RECORDS] # debug code
1706
+
1700
1707
  blueprint = hashlib.sha1(b"")
1701
1708
  for data in stream:
1702
1709
  new = hashlib.sha1(pickle.dumps(data))
@@ -2644,13 +2651,16 @@ class iAsyncCrawler(iCrawler):
2644
2651
  else:
2645
2652
  item, model = data, None
2646
2653
 
2654
+ result = False
2647
2655
  if item:
2648
2656
  # assert isinstance(item, BaseModel)
2649
2657
  # result = await self.syncmodel.put(item)
2650
2658
  context.update(data)
2651
2659
  context[MODEL_KEY] = model
2652
2660
  context["item"] = item
2653
- result = all([await sync.put(**context) for sync in self.syncmodel])
2661
+ result = all(
2662
+ [await sync.put(context=context, **context) for sync in self.syncmodel]
2663
+ )
2654
2664
  # save original item if a raw storage has been specified
2655
2665
  if self.raw_storage:
2656
2666
  fqid = item.id
@@ -2663,6 +2673,7 @@ class iAsyncCrawler(iCrawler):
2663
2673
  await self.save(nice=True)
2664
2674
  else:
2665
2675
  foo = 1
2676
+ return result
2666
2677
 
2667
2678
  async def update_meta(self, tube, meta: Dict) -> bool:
2668
2679
  meta = json_compatible(meta)
syncmodels/crud.py CHANGED
@@ -175,7 +175,7 @@ class iCRUD:
175
175
  "Get an object from URI"
176
176
  raise NotImplementedError()
177
177
 
178
- async def put(self, uri: URI, data: JSON = None, **kw) -> bool:
178
+ async def put(self, uri: URI, data: JSON = None, context={}, **kw) -> bool:
179
179
  "Put an object from URI"
180
180
  raise NotImplementedError()
181
181
 
@@ -351,7 +351,7 @@ class iStorage(iCRUD):
351
351
  log.error(why)
352
352
  log.error("".join(traceback.format_exception(*sys.exc_info())))
353
353
 
354
- async def put(self, uri: URI, data: JSON = None, **kw) -> bool:
354
+ async def put(self, uri: URI, data: JSON = None, context={}, **kw) -> bool:
355
355
  if data is None:
356
356
  data = kw
357
357
  # else:
syncmodels/definitions.py CHANGED
@@ -100,6 +100,8 @@ GEOMETRY_COMP_KEY = "geometry__"
100
100
  GEOJSON_KEY = "geojson"
101
101
  GEOLINK_KEY = "geolink"
102
102
 
103
+ COMPARISON_PATTERNS = "compare__"
104
+
103
105
  GEOSPECS_KEYS = [GEOMETRY_KEY, GEOMETRY_SHAPE_KEY, GEOJSON_KEY]
104
106
 
105
107
  UBICATION_KEY = "ubication"
@@ -128,6 +130,7 @@ MONOTONIC_SINCE_OPERATOR = "since_operator__"
128
130
  DATETIME_KEY = "datetime"
129
131
  DATETIME_LAST_KEY = "last"
130
132
  ID_KEY = "id"
133
+ PUSHED = "pushed__"
131
134
  FQUID_KEY = "fquid"
132
135
  ORG_KEY = "id__"
133
136
  ORG_URL = "url__"
syncmodels/exceptions.py CHANGED
@@ -23,5 +23,9 @@ class NonRecoverableAuth(NonRecoverable):
23
23
  """
24
24
 
25
25
 
26
+ class BadLogic(NonRecoverable):
27
+ """Bad Logic File"""
28
+
29
+
26
30
  class BadData(SyncModelException):
27
31
  """Data is not properly formatted."""
@@ -137,7 +137,7 @@ class GeojsonManager:
137
137
  "returns the uri of a related object"
138
138
  for candidate in [ORG_KEY, ID_KEY]:
139
139
  if _sid := data.get(candidate):
140
- _sid = parse_duri(_sid)
140
+ _sid = parse_duri(str(_sid))
141
141
  if _sid["fscheme"] != "test":
142
142
 
143
143
  _sid["path"] = "/{thing}/geo".format_map(_sid)
@@ -250,6 +250,10 @@ class XPathAnalyzer:
250
250
  return found > 0
251
251
 
252
252
  def _analyze_meta(self, line, ctx) -> bool:
253
+ # TODO: tampodary skip meta info
254
+ return False
255
+
256
+ def _analyze_meta_old(self, line, ctx) -> bool:
253
257
  keyword = ctx.get("keyword") or {"name", "itemprop"}
254
258
  item = ctx["item"]
255
259
  info = ctx["info"]
@@ -292,6 +296,10 @@ class XPathAnalyzer:
292
296
  return self._analyze_text(line, ctx)
293
297
 
294
298
  def _analyze_ldjson(self, line, ctx) -> bool:
299
+ # TODO: this funcion has been temporary disabled
300
+ return False
301
+
302
+ def _analyze_ldjson_old(self, line, ctx) -> bool:
295
303
  # TODO: helper for loading json
296
304
  line = line.strip()
297
305
  line = html2text(line)
@@ -45,7 +45,7 @@ from syncmodels.crud import parse_duri
45
45
  from ..definitions import TASK_KEY, KIND_KEY, ORG_URL, DURI, ID_KEY, VOL_DATA
46
46
  from ..session import iSession
47
47
  from ..crawler import iBot
48
- from ..crawler import MetaExtractPlugin, PutPlugin, SetURIPlugin
48
+ from ..crawler import MetaExtractPlugin, PutPlugin, SetURIPlugin, SortPlugin
49
49
 
50
50
  from .swarm import SwarmBot, SwarmCrawler, SWARM_REGISTER, SWARM_TASKS
51
51
  from .analyzer import XPathAnalyzer
@@ -717,13 +717,15 @@ class BrowserLogicBot(SwarmBot):
717
717
  )
718
718
 
719
719
  def _add_plugins(self):
720
- super()._add_plugins()
720
+ # super()._add_plugins()
721
+ # self.plugins.clear()
721
722
 
722
- self.plugins.clear()
723
723
  # self.add_plugin(HashStreamPlugin())
724
724
  self.add_plugin(MetaExtractPlugin(geojson=False))
725
725
  self.add_plugin(PutPlugin())
726
726
  self.add_plugin(SetURIPlugin())
727
+ # self.add_plugin(SortPlugin())
728
+
727
729
  # self.add_plugin(RenameKeys())
728
730
  # self.add_plugin(UnwrapResponse())
729
731
  # self.add_plugin(RegExtractor())
@@ -1822,9 +1824,10 @@ class BrowserLogicSession(iBrowserSession):
1822
1824
 
1823
1825
  async def _locate(self, page, selector, ctx):
1824
1826
  elements = []
1825
- timeout = ctx["info"].get("timeout", 5)
1826
- num_selectors = len(ctx["info"]["selector"])
1827
- timeout = 1 + timeout // num_selectors
1827
+ # timeout = ctx["info"].get("timeout", 5)
1828
+ # num_selectors = len(ctx["info"]["selector"])
1829
+ # timeout = 1 + timeout // num_selectors
1830
+ timeout = 1.2
1828
1831
  t1 = time.time() + timeout
1829
1832
  while time.time() < t1:
1830
1833
  try:
@@ -1839,7 +1842,6 @@ class BrowserLogicSession(iBrowserSession):
1839
1842
  await asyncio.sleep(1)
1840
1843
  except Exception as why:
1841
1844
  log.error("selector: %s -> %s", selector, why)
1842
-
1843
1845
  foo = 1
1844
1846
 
1845
1847
  async def _mark_element(self, element, style):
syncmodels/storage.py CHANGED
@@ -26,7 +26,13 @@ from surrealist import Surreal as Surrealist
26
26
 
27
27
  from agptools.logs import logger
28
28
  from agptools.helpers import parse_uri, build_uri, DATE, tf
29
- from agptools.containers import merge as merge_dict, build_dict, json_compatible
29
+ from agptools.containers import (
30
+ merge as merge_dict,
31
+ build_dict,
32
+ json_compatible,
33
+ Walk,
34
+ CWalk,
35
+ )
30
36
 
31
37
  from syncmodels.definitions import (
32
38
  REVERSE_SORT_KEY,
@@ -40,6 +46,13 @@ from syncmodels.http import (
40
46
  # USER_AGENT,
41
47
  APPLICATION_JSON,
42
48
  )
49
+ from syncmodels.exceptions import (
50
+ SyncModelException,
51
+ BadLogic,
52
+ NonRecoverable,
53
+ NonRecoverableAuth,
54
+ BadData,
55
+ )
43
56
 
44
57
  from .helpers.importers import JSONVerter
45
58
 
@@ -72,6 +85,8 @@ from .definitions import (
72
85
  SORT_KEY,
73
86
  LIMIT_KEY,
74
87
  WAVE_LAST_KEY,
88
+ COMPARISON_PATTERNS,
89
+ PUSHED,
75
90
  )
76
91
  from .crud import (
77
92
  DEFAULT_DATABASE,
@@ -120,6 +135,12 @@ UTC_TZ = pytz.timezone("UTC")
120
135
 
121
136
 
122
137
  # REGEXP_FQUI = re.compile(r"((?P<ns>[^/]*?)/)?(?P<table>[^:]+):(?P<uid>.*)$")
138
+
139
+
140
+ def comparable_struct(data, patterns):
141
+ wdata = Walk(data)
142
+
143
+
123
144
  def split_fqui(fqid):
124
145
  "try to split FQUID into table and uid"
125
146
  try:
@@ -130,7 +151,7 @@ def split_fqui(fqid):
130
151
 
131
152
 
132
153
  def normalize_payload(data, keys):
133
- for key in set(keys).intersection(data):
154
+ for key in set(keys or []).intersection(data):
134
155
  value = DATE(data[key])
135
156
  if isinstance(value, datetime):
136
157
  if not value.tzinfo:
@@ -765,7 +786,7 @@ class WaveStorage(iWaves, iStorage):
765
786
  async def update(self, query: URI | QUERY, data) -> List[JSON]:
766
787
  return await self.storage.update(query, data)
767
788
 
768
- async def put(self, uri: URI, data: JSON = None, **kw) -> bool:
789
+ async def put(self, uri: URI, data: JSON = None, context={}, **kw) -> bool:
769
790
  """
770
791
  Try to insert a new wave object into storage:
771
792
 
@@ -818,8 +839,8 @@ class WaveStorage(iWaves, iStorage):
818
839
  if model := kw.get(MODEL_KEY):
819
840
  self.register_metadata(uri, {"model": model})
820
841
 
821
- sort_keys = kw.get(SORT_KEY, [])
822
- reverse_sort_keys = kw.get(REVERSE_SORT_KEY, [])
842
+ sort_keys = kw.get(SORT_KEY) or []
843
+ reverse_sort_keys = kw.get(REVERSE_SORT_KEY) or []
823
844
  sort_kw_presence = all([kw.get(_) for _ in sort_keys])
824
845
  sort_data_presence = all([data.get(_) for _ in reverse_sort_keys])
825
846
 
@@ -844,7 +865,7 @@ class WaveStorage(iWaves, iStorage):
844
865
  # stream = [kw]
845
866
  if not (sort_keys := SortKeyFinder.find_sort_key(stream=stream)):
846
867
  kind = kw.get(KIND_KEY)
847
- sort_keys = kw[SORT_KEY] = SortKeyFinder.get(kind)
868
+ sort_keys = kw[SORT_KEY] = SortKeyFinder.get(kind) or []
848
869
 
849
870
  kw[SORT_KEY] = sort_keys
850
871
  log.debug("[%s] found: %s as sort_keys", uri, sort_keys)
@@ -871,7 +892,7 @@ class WaveStorage(iWaves, iStorage):
871
892
  normalize_payload(data, sort_keys)
872
893
 
873
894
  monotonic = data.setdefault(MONOTONIC_KEY, monotonic_wave())
874
- for monotonic_key in set(sort_keys or []).intersection(data):
895
+ for monotonic_key in set(sort_keys).intersection(data):
875
896
  monotonic_value = DATE(data[monotonic_key])
876
897
 
877
898
  # seconds
@@ -897,16 +918,16 @@ class WaveStorage(iWaves, iStorage):
897
918
 
898
919
  data_sort_blueprint = build_dict(data, sort_keys)
899
920
  # data_sort_blueprint = build_comparisson_dict(data, reverse_sort_keys)
900
- blueprint = {
921
+ data_sort_bp = {
901
922
  MONOTONIC_SINCE_KEY: monotonic_key,
902
923
  MONOTONIC_SINCE_VALUE: since_value,
903
924
  MONOTONIC_SINCE_OPERATOR: ">=",
904
925
  ORDER_KEY: monotonic_key,
905
926
  DIRECTION_KEY: DIRECTION_DESC,
906
- LIMIT_KEY: kw.get(
907
- LIMIT_KEY, 50 # TODO: agp: set in definition?
908
- ), # TODO: this is temporal, ideally None
909
- ORG_KEY: uid,
927
+ # LIMIT_KEY: kw.get(
928
+ # LIMIT_KEY, 50 # TODO: agp: set in definition?
929
+ # ), # TODO: this is temporal, ideally None
930
+ # ORG_KEY: uid,
910
931
  # **data_sort_blueprint, # implies sv = True
911
932
  }
912
933
  # TODO: LIMIT 1 ?
@@ -921,8 +942,6 @@ class WaveStorage(iWaves, iStorage):
921
942
  sort_keys,
922
943
  MASK,
923
944
  )
924
- # existing = await self.storage.query(query, **blueprint)
925
- # N = len(existing)
926
945
 
927
946
  # TODO: agp: cache and get behaviour from database?
928
947
  if not (behavior := self.behavior_uri.get(query)):
@@ -938,77 +957,97 @@ class WaveStorage(iWaves, iStorage):
938
957
 
939
958
  self.behavior_uri[query] = behavior
940
959
 
941
- # t0 = time.time()
942
-
960
+ t0 = time.time()
943
961
  # search the same data
944
962
  # TODO: update blueprint
945
- blueprint = {
946
- # MONOTONIC_SINCE_KEY: monotonic_key,
947
- # MONOTONIC_SINCE_VALUE: since_value,
948
- # MONOTONIC_SINCE_OPERATOR: ">=",
949
- # ORDER_KEY: monotonic_key,
950
- # DIRECTION_KEY: DIRECTION_DESC,
963
+ identical_bp = {
951
964
  LIMIT_KEY: kw.get(
952
- LIMIT_KEY, 50 # TODO: agp: set in definition?
965
+ LIMIT_KEY, 10 # TODO: agp: set in definition?
953
966
  ), # TODO: this is temporal, ideally None
954
967
  ORG_KEY: uid,
955
968
  **data_sort_blueprint, # implies sv = True
956
969
  }
957
970
  identical = await self.storage.query(
958
971
  query,
959
- **blueprint,
972
+ **identical_bp,
960
973
  # **data_sort_bp,
961
974
  )
975
+
962
976
  # TODO: try to create only a single query
963
977
  # TODO: review different structures case
978
+ similar_bp = {
979
+ LIMIT_KEY: kw.get(
980
+ LIMIT_KEY, 50 # TODO: agp: set in definition?
981
+ ), # TODO: this is temporal, ideally None
982
+ ORG_KEY: uid,
983
+ **data_sort_blueprint, # implies sv = True
984
+ }
964
985
  similar = await self.storage.query(
965
986
  query,
966
- **blueprint,
967
- # **data_sort_bp,
987
+ **similar_bp,
988
+ **data_sort_bp,
968
989
  )
969
- # t1 = time.time()
970
- # _elapsed = t1 - t0
990
+ t1 = time.time()
991
+ _elapsed = t1 - t0
971
992
  existing = identical + similar
972
- N2 = len(existing)
973
- if data_sort_blueprint and N2 > 1:
993
+ N = len(existing)
994
+ log.debug(
995
+ "[%s] found [%s] similar records in %s secs",
996
+ identical_bp,
997
+ N,
998
+ _elapsed,
999
+ )
1000
+ if data_sort_blueprint and N > 1:
974
1001
  if behavior & ALLOW_DUPLICATED_ITEMS:
975
1002
  log.debug(
976
1003
  "tube [%s] has multiples records: [%s] records, but ALLOW_SAME_DATE_DIFFERENT_VALUES is defined",
977
1004
  uid,
978
- N2,
1005
+ N,
979
1006
  )
980
1007
  existing.clear()
981
1008
  else:
982
1009
  log.debug(
983
1010
  "tube has multiples records: [%s] = %s records, must just 1 and sort_key is defined by: [%s]",
984
1011
  uid,
985
- N2,
1012
+ N,
986
1013
  data_sort_blueprint,
987
1014
  )
988
1015
 
989
1016
  push = True
1017
+ patterns = kw.get(COMPARISON_PATTERNS)
1018
+ if patterns:
1019
+ wdata = CWalk(data, include=patterns, exclude=MASK)
1020
+ if not wdata:
1021
+ log.warning("patterns don't get any data")
1022
+ raise BadLogic(data)
1023
+ else:
1024
+ patterns = [r".*"]
1025
+ wdata = CWalk(data, include=patterns, exclude=MASK)
990
1026
  for exists in existing:
1027
+ wexists = CWalk(exists, include=patterns, exclude=MASK)
991
1028
  existing_sort_blueprint = build_dict(exists, reverse_sort_keys)
992
1029
  # existing_sort_blueprint = build_comparisson_dict(exists, reverse_sort_keys)
993
1030
 
994
1031
  same_sort_key = existing_sort_blueprint == data_sort_blueprint
995
1032
 
996
1033
  # check if we must "duplicate" data inside tube
997
- keys0 = set(exists).difference(MASK)
998
- keys1 = set(data).difference(MASK)
1034
+ # keys0 = set(exists).difference(MASK)
1035
+ # keys1 = set(data).difference(MASK)
1036
+ keys0 = set(wexists)
1037
+ keys1 = set(wdata)
999
1038
  same_structure = keys0 == keys1
1000
1039
 
1001
1040
  same_values = False
1002
1041
  if same_sort_key and same_structure:
1003
1042
  for key in keys0:
1004
- if exists[key] != data[key]:
1043
+ if wexists[key] != wdata[key]:
1005
1044
  log.debug(
1006
1045
  "[%s].[%s].[%s]: %s != %s",
1007
1046
  uid,
1008
1047
  data_sort_blueprint,
1009
1048
  key,
1010
- exists[key],
1011
- data[key],
1049
+ wexists[key],
1050
+ wdata[key],
1012
1051
  )
1013
1052
  break
1014
1053
  else:
@@ -1131,6 +1170,7 @@ class WaveStorage(iWaves, iStorage):
1131
1170
  data[ID_KEY] = "{thing}:{id}".format_map(_uri)
1132
1171
 
1133
1172
  # must push the data?
1173
+ context[PUSHED] = push
1134
1174
  if push:
1135
1175
  if isinstance(self.storage, SurrealistStorage):
1136
1176
  log_records.debug(
@@ -1235,6 +1275,8 @@ class WaveStorage(iWaves, iStorage):
1235
1275
  return all([res0, res1, res2])
1236
1276
  else:
1237
1277
  return True
1278
+ except SyncModelException as why:
1279
+ raise why
1238
1280
  except Exception as why: # pragma: nocover
1239
1281
  log.error(why)
1240
1282
  log.error("".join(traceback.format_exception(*sys.exc_info())))
syncmodels/syncmodels.py CHANGED
@@ -255,7 +255,7 @@ class SyncModel(iCRUD):
255
255
  # )
256
256
  # return []
257
257
 
258
- async def put(self, item: BaseModel, **kw) -> bool:
258
+ async def put(self, item: BaseModel, context={}, **kw) -> bool:
259
259
  """Try to create / update an item of `type_` class from raw data
260
260
 
261
261
  - get the pydantic item
@@ -291,7 +291,7 @@ class SyncModel(iCRUD):
291
291
  kw["uri"] = fqid
292
292
  kw["data"] = data
293
293
  for storage in self.storage:
294
- result = await storage.put(**kw)
294
+ result = await storage.put(context=context, **kw)
295
295
  results.append(result)
296
296
 
297
297
  return all(results)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: syncmodels
3
- Version: 0.1.313
3
+ Version: 0.1.315
4
4
  Summary: Synchronizable Models
5
5
  Home-page: https://github.com/asterio.gonzalez/syncmodels
6
6
  Author: Asterio Gonzalez
@@ -18,7 +18,7 @@ Classifier: Programming Language :: Python :: 3.11
18
18
  Requires-Python: >=3.6
19
19
  License-File: LICENSE
20
20
  License-File: AUTHORS.rst
21
- Requires-Dist: agptools>=0.1.313
21
+ Requires-Dist: agptools>=0.1.315
22
22
  Requires-Dist: aiocache
23
23
  Requires-Dist: aiohttp
24
24
  Requires-Dist: Click
@@ -1,9 +1,9 @@
1
- syncmodels/__init__.py,sha256=F-gwFoSn9hneX4i5kpR9dYY6CUDVidYhx9nW2WiEJcU,142
1
+ syncmodels/__init__.py,sha256=PBl-Xi0JTeRosOEvZ6CFBlA-pigf3ngrZFcaMEhU-HY,142
2
2
  syncmodels/context.py,sha256=k1Gs_ip9BfyRFpyRnzqYvRDKo0sYBqJsh6z9sWln9oE,451
3
- syncmodels/crawler.py,sha256=vfMn39Fev1yKY8ELvTK3rM-RUPiiy8YoxAG2q7b9MkM,94247
4
- syncmodels/crud.py,sha256=viHBwzczcjNyFiLxL7VGYSbWJW5VjU8AvKaPufBMP7M,15303
5
- syncmodels/definitions.py,sha256=2P-Sfgj18viSHZ-wAK4WfQEzDKdyygs8Z-XLzA0jg_k,5420
6
- syncmodels/exceptions.py,sha256=8EOYW8h_2noeoKAYqG4aEQTgB1FEkamxKt3t4pDJ3pM,626
3
+ syncmodels/crawler.py,sha256=18pAO0AkckennZcBCJvgCrlJ9QLu_8Hv8bIVi_VYM_c,94651
4
+ syncmodels/crud.py,sha256=ozumS7XgmXSFcFN2SZBH0jB0j_1vK2xE-FeFcTG7ikw,15327
5
+ syncmodels/definitions.py,sha256=vQ6-Zsftzy5y02z6Dd3_p5cd37Zqk0lcVrv-06gnDZk,5475
6
+ syncmodels/exceptions.py,sha256=ZLAwu19cs2UN2Sv3jaLnixT_jRI7T42TfyutCkUsuIk,685
7
7
  syncmodels/geofactory.py,sha256=1FkrdEn0QA0O4_lSUAwjqXH2dmlQWi32AkntnG4AEQY,10372
8
8
  syncmodels/http.py,sha256=FFVT3QJJgur2dv1Q_7l9ZsWN8z6_gUjOT9hJff1ZAqk,3335
9
9
  syncmodels/parallel.py,sha256=Ll8HmyFF9v9fIofqqSgfhyTlklvb77mTtNdG5Y9lqdQ,7145
@@ -11,8 +11,8 @@ syncmodels/registry.py,sha256=YaQtgbSwa0je1MpCcVHALI3_b85vrddyOlhsnrUcKZs,8224
11
11
  syncmodels/requests.py,sha256=wWoC5hPDm1iBM_zrlyKRauzhXgdKR3pT5RqyC-5UZhQ,538
12
12
  syncmodels/runner.py,sha256=IHDKuQ3yJ1DN9wktMiIrerPepYX61tc3AzbFfuUqEFw,5454
13
13
  syncmodels/schema.py,sha256=uinUt8Asq_x7xa6MKWVXNyoWO6gKocjGPppjimaXzEU,2492
14
- syncmodels/storage.py,sha256=N69l-CSZOZomwVJMALKbPI1jPsngxDNlxbxX9SV4vfg,70505
15
- syncmodels/syncmodels.py,sha256=EzSC4C75V4wJDmsLLbp8YUVwqA6A16KCNW8nB-MqPcs,10567
14
+ syncmodels/storage.py,sha256=invDyQMfDHavQH5Fnbi7afSeIgH6NoVtD_LlNPOqSTI,71646
15
+ syncmodels/syncmodels.py,sha256=jcUxVbv1hrx5hI81VCO1onIM6WyORTqJVPwIqlPocOc,10596
16
16
  syncmodels/timequeue.py,sha256=YRd3ULRaIhoszaBsYhfr0epMqAbL6-NwVEtScjUYttM,595
17
17
  syncmodels/wave.py,sha256=Gra22BLiA9z2nF-6diXpjAc4GZv9nebmyvHxdAfXec4,7764
18
18
  syncmodels/auth/__init__.py,sha256=xs9Y_bTR99TKt7NyEtMI3lLPxQy9FuP6CYakgAAthCo,1139
@@ -24,7 +24,7 @@ syncmodels/cli/surreal.py,sha256=eL7pDicLo0_68JhpCZacde6DOVcfHE_UEzczvgDuvAc,106
24
24
  syncmodels/cli/wingdbstub.py,sha256=q4z-RqHN1ttzNtiLYTzqQG2ZYZ6W3NOnEd2E5NGhfao,17165
25
25
  syncmodels/cli/workspace.py,sha256=wajZnxf567nYoQysTEgxrDAp8ZBU8zSuoP4KyZtqvdc,2461
26
26
  syncmodels/helpers/__init__.py,sha256=qZet64gMJNAAqzUdEqCV5WDk5D2Dbw1Kxlt9Jo6x3m4,23
27
- syncmodels/helpers/crawler.py,sha256=FrYYUzGcbLK1gszscc1rzwl9Y86P4Q1H_-k2Z804CcM,4800
27
+ syncmodels/helpers/crawler.py,sha256=2WiDxnjxZKbGa8x71bgeSc2wmabKa_wD-ofQMbgVXLo,4805
28
28
  syncmodels/helpers/explorer.py,sha256=-Dol3z1pALCMI9OPSvVbROaTzLjbUpS0suJ82Z6Rmb4,7447
29
29
  syncmodels/helpers/faker.py,sha256=tPtibNh28KoHb9kcwE9WaPdPrSrN6xMbCv5HhNFEVG0,1222
30
30
  syncmodels/helpers/general.py,sha256=UAcSfrvsaT15iJuxsR3WMk51UjpLLGDf14xmpBojndg,6160
@@ -37,8 +37,8 @@ syncmodels/helpers/surreal.py,sha256=zoWtGm5oAxwvgJNq_NTpKOHN3h9FNObhFDLuiBOl1YY
37
37
  syncmodels/helpers/units.py,sha256=g50m5DQrAyP_qpDRa4LCEA5Rz2UZUmlIixfWG_ddw9I,3571
38
38
  syncmodels/logic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
39
  syncmodels/logic/activity_logger.py,sha256=8wjvgRwaNbibYWGgl-trovSS70yNkoCTlb-AIx3aZEE,14053
40
- syncmodels/logic/analyzer.py,sha256=7JDIvZIiDVgdABaaRk7TEbzOFM6HAH3RaSsIDmL4QVE,12291
41
- syncmodels/logic/browser.py,sha256=W_s_rIWceGni2GMdhlrdZTTxTG-43n2CQDhPgyJfdE0,84711
40
+ syncmodels/logic/analyzer.py,sha256=OiRZBJoqjc_qb3w1jBjGftWgd20Cig0s7_GLTCbp3fw,12539
41
+ syncmodels/logic/browser.py,sha256=vMxGaYIR1ov0tOMbWNpd4MMFQBunRuvxE8F-Gfo9cTo,84795
42
42
  syncmodels/logic/swarm.py,sha256=eRBVlNAOzzWKFGCb7LGLx2aj7yQlTY1OwLoeSEllvXY,17207
43
43
  syncmodels/mapper/__init__.py,sha256=jS82LFr9zzyqXBz82tSw04vDowhTpKxhg_W2XvhUlt0,129
44
44
  syncmodels/mapper/fiware.py,sha256=auszPmhCS46z_68MXjksrQAFUfctjbVrVdBvOpOkMj8,523
@@ -302,10 +302,10 @@ syncmodels/session/postgresql.py,sha256=ZMIu1Rv93pKfvFlovFBmWArzlrT2xaQWNYGZT_LW
302
302
  syncmodels/session/sql.py,sha256=bD7zXRrEKKJmqY2UoibWENuWb5zHrrU72F3_dYbS6LY,6569
303
303
  syncmodels/session/sqlite.py,sha256=nCDjopLiBpX1F10qkKoARM7JrVdIpJ1WdGOduFVxaiA,2080
304
304
  syncmodels/source/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
305
- syncmodels-0.1.313.dist-info/AUTHORS.rst,sha256=3ZPoqg8Aav8DSYKd0fwcwn4_5HwSiMLart0E5Un00-U,168
306
- syncmodels-0.1.313.dist-info/LICENSE,sha256=uzMOYtIiUsnsD0xHJR7aJWJ4v_bvan0kTnvufy5eNoA,1075
307
- syncmodels-0.1.313.dist-info/METADATA,sha256=NRPuII9unMl4-z_TQqvb9W-Tp94LIT6CXU3ctK_v3us,2700
308
- syncmodels-0.1.313.dist-info/WHEEL,sha256=SrDKpSbFN1G94qcmBqS9nyHcDMp9cUS9OC06hC0G3G0,109
309
- syncmodels-0.1.313.dist-info/entry_points.txt,sha256=dMnigjZsHMxTwXiiZyBZdBbMYE0-hY3L5cG15EcDAzw,51
310
- syncmodels-0.1.313.dist-info/top_level.txt,sha256=2DfQ9NuAhKMjY3BvQGVBA7GfqTm7EoHNbaehSUiqiHQ,11
311
- syncmodels-0.1.313.dist-info/RECORD,,
305
+ syncmodels-0.1.315.dist-info/AUTHORS.rst,sha256=3ZPoqg8Aav8DSYKd0fwcwn4_5HwSiMLart0E5Un00-U,168
306
+ syncmodels-0.1.315.dist-info/LICENSE,sha256=uzMOYtIiUsnsD0xHJR7aJWJ4v_bvan0kTnvufy5eNoA,1075
307
+ syncmodels-0.1.315.dist-info/METADATA,sha256=bojiX01oxFCT4ssaQM1RCj1aSocBx1zkjXCRv_vgspY,2700
308
+ syncmodels-0.1.315.dist-info/WHEEL,sha256=SrDKpSbFN1G94qcmBqS9nyHcDMp9cUS9OC06hC0G3G0,109
309
+ syncmodels-0.1.315.dist-info/entry_points.txt,sha256=dMnigjZsHMxTwXiiZyBZdBbMYE0-hY3L5cG15EcDAzw,51
310
+ syncmodels-0.1.315.dist-info/top_level.txt,sha256=2DfQ9NuAhKMjY3BvQGVBA7GfqTm7EoHNbaehSUiqiHQ,11
311
+ syncmodels-0.1.315.dist-info/RECORD,,