syncmodels 0.1.318__py2.py3-none-any.whl → 0.1.320__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- syncmodels/__init__.py +1 -1
- syncmodels/storage.py +263 -235
- {syncmodels-0.1.318.dist-info → syncmodels-0.1.320.dist-info}/METADATA +2 -2
- {syncmodels-0.1.318.dist-info → syncmodels-0.1.320.dist-info}/RECORD +9 -9
- {syncmodels-0.1.318.dist-info → syncmodels-0.1.320.dist-info}/AUTHORS.rst +0 -0
- {syncmodels-0.1.318.dist-info → syncmodels-0.1.320.dist-info}/LICENSE +0 -0
- {syncmodels-0.1.318.dist-info → syncmodels-0.1.320.dist-info}/WHEEL +0 -0
- {syncmodels-0.1.318.dist-info → syncmodels-0.1.320.dist-info}/entry_points.txt +0 -0
- {syncmodels-0.1.318.dist-info → syncmodels-0.1.320.dist-info}/top_level.txt +0 -0
syncmodels/__init__.py
CHANGED
syncmodels/storage.py
CHANGED
@@ -878,284 +878,312 @@ class WaveStorage(iWaves, iStorage):
|
|
878
878
|
foo = 1
|
879
879
|
# return
|
880
880
|
|
881
|
-
|
882
|
-
# - (within grace period)
|
883
|
-
# - if an identical object has been found, just ignore it.
|
884
|
-
# - if an object with the same `sort_key` exists in the tube warning about the differences.
|
885
|
-
# - if such *holder* object is not found in `tube` the insert it.
|
886
|
-
|
887
|
-
# >>>>
|
888
|
-
# now
|
889
|
-
# ['2005-06-01T00:00:00.000+02:00']
|
890
|
-
# [datetime.datetime(2005, 6, 1, 0, 0, tzinfo=tzoffset(None, 7200))]
|
891
|
-
|
881
|
+
push = True
|
892
882
|
normalize_payload(data, sort_keys)
|
893
883
|
|
894
884
|
monotonic = data.setdefault(MONOTONIC_KEY, monotonic_wave())
|
895
|
-
|
896
|
-
|
897
|
-
|
898
|
-
|
899
|
-
|
900
|
-
|
901
|
-
|
902
|
-
|
903
|
-
|
904
|
-
|
905
|
-
|
906
|
-
|
907
|
-
|
908
|
-
|
909
|
-
|
910
|
-
|
911
|
-
|
912
|
-
|
913
|
-
|
914
|
-
|
915
|
-
|
916
|
-
|
917
|
-
|
918
|
-
|
919
|
-
|
920
|
-
|
921
|
-
|
922
|
-
|
923
|
-
|
924
|
-
|
925
|
-
|
926
|
-
|
927
|
-
|
928
|
-
|
929
|
-
|
930
|
-
|
931
|
-
|
932
|
-
}
|
933
|
-
# TODO: LIMIT 1 ?
|
934
|
-
|
935
|
-
# MASK = set([ID_KEY, MONOTONIC_KEY, *sort_keys])
|
936
|
-
MASK = set([ID_KEY, MONOTONIC_KEY])
|
937
|
-
|
938
|
-
if not reverse_sort_keys and sort_keys:
|
939
|
-
MASK.update(sort_keys)
|
940
|
-
log.debug(
|
941
|
-
"including sort_keys: [%s] in excluded MASK: [%s] to find similar registers",
|
942
|
-
sort_keys,
|
943
|
-
MASK,
|
944
|
-
)
|
885
|
+
data_sort_blueprint = []
|
886
|
+
|
887
|
+
async def prevously_inserted():
|
888
|
+
"""check if *same* object has been inserted in `tube` previously:
|
889
|
+
- (within grace period)
|
890
|
+
- if an identical object has been found, just ignore it.
|
891
|
+
- if an object with the same `sort_key` exists in the tube warning about the differences.
|
892
|
+
- if such *holder* object is not found in `tube` the insert it.
|
893
|
+
|
894
|
+
# Note:
|
895
|
+
# now
|
896
|
+
# ['2005-06-01T00:00:00.000+02:00']
|
897
|
+
# [datetime.datetime(2005, 6, 1, 0, 0, tzinfo=tzoffset(None, 7200))]
|
898
|
+
"""
|
899
|
+
nonlocal push
|
900
|
+
nonlocal sort_keys
|
901
|
+
nonlocal reverse_sort_keys
|
902
|
+
nonlocal namespace
|
903
|
+
nonlocal database
|
904
|
+
nonlocal thing
|
905
|
+
nonlocal uid
|
906
|
+
nonlocal monotonic
|
907
|
+
nonlocal data_sort_blueprint
|
908
|
+
for monotonic_key in set(sort_keys).intersection(data):
|
909
|
+
monotonic_value = DATE(data[monotonic_key])
|
910
|
+
|
911
|
+
# seconds
|
912
|
+
grace_period = kw.get(GRACE_PERIOD_KEY, DEFAULT_GRACE_PERIOD)
|
913
|
+
grace_period = timedelta(seconds=grace_period)
|
914
|
+
since_value = monotonic_value - grace_period
|
915
|
+
# pass to UTC time
|
916
|
+
if not since_value.tzinfo:
|
917
|
+
# x = x.replace(tzinfo=timezone.utc)
|
918
|
+
# x = x.replace(tzinfo=LOCAL_TZ)
|
919
|
+
since_value = pytz.utc.localize(since_value)
|
920
|
+
since_value = since_value.astimezone(UTC_TZ)
|
921
|
+
since_value = since_value.strftime("%Y-%m-%dT%H:%M:%SZ")
|
945
922
|
|
946
|
-
|
947
|
-
if not (behavior := self.behavior_uri.get(query)):
|
948
|
-
for (
|
949
|
-
pattern,
|
950
|
-
permissions,
|
951
|
-
) in self.behavior_templates.items():
|
952
|
-
if re.match(pattern, query):
|
953
|
-
behavior = permissions
|
954
|
-
break
|
923
|
+
break
|
955
924
|
else:
|
956
|
-
|
957
|
-
|
958
|
-
|
959
|
-
|
960
|
-
|
961
|
-
|
962
|
-
|
963
|
-
|
964
|
-
|
965
|
-
|
966
|
-
|
967
|
-
|
968
|
-
|
969
|
-
|
970
|
-
|
971
|
-
|
972
|
-
|
973
|
-
|
974
|
-
|
925
|
+
monotonic_key = MONOTONIC_KEY # ??
|
926
|
+
grace_period = kw.get(GRACE_PERIOD_KEY, DEFAULT_GRACE_PERIOD)
|
927
|
+
grace_period *= 10**9 # nanoseconds
|
928
|
+
since_value = monotonic - grace_period
|
929
|
+
|
930
|
+
query = f"{namespace}://{database}/{thing}"
|
931
|
+
|
932
|
+
data_sort_blueprint = build_dict(data, sort_keys)
|
933
|
+
# data_sort_blueprint = build_comparisson_dict(data, reverse_sort_keys)
|
934
|
+
data_sort_bp = {
|
935
|
+
MONOTONIC_SINCE_KEY: monotonic_key,
|
936
|
+
MONOTONIC_SINCE_VALUE: since_value,
|
937
|
+
MONOTONIC_SINCE_OPERATOR: ">=",
|
938
|
+
ORDER_KEY: monotonic_key,
|
939
|
+
DIRECTION_KEY: DIRECTION_DESC,
|
940
|
+
# LIMIT_KEY: kw.get(
|
941
|
+
# LIMIT_KEY, 50 # TODO: agp: set in definition?
|
942
|
+
# ), # TODO: this is temporal, ideally None
|
943
|
+
# ORG_KEY: uid,
|
944
|
+
# **data_sort_blueprint, # implies sv = True
|
945
|
+
}
|
946
|
+
# TODO: LIMIT 1 ?
|
975
947
|
|
976
|
-
|
977
|
-
|
978
|
-
|
979
|
-
|
980
|
-
|
981
|
-
), # TODO: this is temporal, ideally None
|
982
|
-
ORG_KEY: uid,
|
983
|
-
**data_sort_blueprint, # implies sv = True
|
984
|
-
}
|
985
|
-
similar = await self.storage.query(
|
986
|
-
query,
|
987
|
-
**similar_bp,
|
988
|
-
**data_sort_bp,
|
989
|
-
)
|
990
|
-
t1 = time.time()
|
991
|
-
_elapsed = t1 - t0
|
992
|
-
existing = identical + similar
|
993
|
-
N = len(existing)
|
994
|
-
log.debug(
|
995
|
-
"[%s] found [%s] similar records in %s secs",
|
996
|
-
identical_bp,
|
997
|
-
N,
|
998
|
-
_elapsed,
|
999
|
-
)
|
1000
|
-
if data_sort_blueprint and N > 1:
|
1001
|
-
if behavior & ALLOW_DUPLICATED_ITEMS:
|
1002
|
-
log.debug(
|
1003
|
-
"tube [%s] has multiples records: [%s] records, but ALLOW_SAME_DATE_DIFFERENT_VALUES is defined",
|
1004
|
-
uid,
|
1005
|
-
N,
|
1006
|
-
)
|
1007
|
-
existing.clear()
|
1008
|
-
else:
|
948
|
+
# MASK = set([ID_KEY, MONOTONIC_KEY, *sort_keys])
|
949
|
+
MASK = set([ID_KEY, MONOTONIC_KEY])
|
950
|
+
|
951
|
+
if not reverse_sort_keys and sort_keys:
|
952
|
+
MASK.update(sort_keys)
|
1009
953
|
log.debug(
|
1010
|
-
"
|
1011
|
-
|
1012
|
-
|
1013
|
-
data_sort_blueprint,
|
954
|
+
"including sort_keys: [%s] in excluded MASK: [%s] to find similar registers",
|
955
|
+
sort_keys,
|
956
|
+
MASK,
|
1014
957
|
)
|
1015
958
|
|
1016
|
-
|
1017
|
-
|
1018
|
-
|
1019
|
-
|
1020
|
-
|
1021
|
-
|
1022
|
-
|
1023
|
-
|
1024
|
-
patterns = [r".*"]
|
1025
|
-
wdata = CWalk(data, include=patterns, exclude=MASK)
|
1026
|
-
for exists in existing:
|
1027
|
-
wexists = CWalk(exists, include=patterns, exclude=MASK)
|
1028
|
-
existing_sort_blueprint = build_dict(exists, reverse_sort_keys)
|
1029
|
-
# existing_sort_blueprint = build_comparisson_dict(exists, reverse_sort_keys)
|
1030
|
-
|
1031
|
-
same_sort_key = existing_sort_blueprint == data_sort_blueprint
|
1032
|
-
|
1033
|
-
# check if we must "duplicate" data inside tube
|
1034
|
-
# keys0 = set(exists).difference(MASK)
|
1035
|
-
# keys1 = set(data).difference(MASK)
|
1036
|
-
keys0 = set(wexists)
|
1037
|
-
keys1 = set(wdata)
|
1038
|
-
same_structure = keys0 == keys1
|
1039
|
-
|
1040
|
-
same_values = False
|
1041
|
-
if same_sort_key and same_structure:
|
1042
|
-
for key in keys0:
|
1043
|
-
if wexists[key] != wdata[key]:
|
1044
|
-
log.debug(
|
1045
|
-
"[%s].[%s].[%s]: %s != %s",
|
1046
|
-
uid,
|
1047
|
-
data_sort_blueprint,
|
1048
|
-
key,
|
1049
|
-
wexists[key],
|
1050
|
-
wdata[key],
|
1051
|
-
)
|
959
|
+
# TODO: agp: cache and get behaviour from database?
|
960
|
+
if not (behavior := self.behavior_uri.get(query)):
|
961
|
+
for (
|
962
|
+
pattern,
|
963
|
+
permissions,
|
964
|
+
) in self.behavior_templates.items():
|
965
|
+
if re.match(pattern, query):
|
966
|
+
behavior = permissions
|
1052
967
|
break
|
1053
968
|
else:
|
1054
|
-
|
969
|
+
behavior = ALL_RESTRICTIONS
|
970
|
+
|
971
|
+
self.behavior_uri[query] = behavior
|
972
|
+
|
973
|
+
t0 = time.time()
|
974
|
+
# search the same data
|
975
|
+
# TODO: update blueprint
|
976
|
+
identical_bp = {
|
977
|
+
LIMIT_KEY: kw.get(
|
978
|
+
LIMIT_KEY, 10 # TODO: agp: set in definition?
|
979
|
+
), # TODO: this is temporal, ideally None
|
980
|
+
ORG_KEY: uid,
|
981
|
+
**data_sort_blueprint, # implies sv = True
|
982
|
+
}
|
983
|
+
identical = await self.storage.query(
|
984
|
+
query,
|
985
|
+
**identical_bp,
|
986
|
+
# **data_sort_bp,
|
987
|
+
)
|
1055
988
|
|
1056
|
-
|
989
|
+
# TODO: try to create only a single query
|
990
|
+
# TODO: review different structures case
|
991
|
+
similar_bp = {
|
992
|
+
LIMIT_KEY: kw.get(
|
993
|
+
LIMIT_KEY, 50 # TODO: agp: set in definition?
|
994
|
+
), # TODO: this is temporal, ideally None
|
995
|
+
ORG_KEY: uid,
|
996
|
+
**data_sort_blueprint, # implies sv = True
|
997
|
+
}
|
998
|
+
similar = await self.storage.query(
|
999
|
+
query,
|
1000
|
+
**similar_bp,
|
1001
|
+
**data_sort_bp,
|
1002
|
+
)
|
1003
|
+
t1 = time.time()
|
1004
|
+
_elapsed = t1 - t0
|
1005
|
+
existing = identical + similar
|
1006
|
+
N = len(existing)
|
1007
|
+
log.debug(
|
1008
|
+
"[%s] found [%s] similar records in %s secs",
|
1009
|
+
identical_bp,
|
1010
|
+
N,
|
1011
|
+
_elapsed,
|
1012
|
+
)
|
1013
|
+
if data_sort_blueprint and N > 1:
|
1014
|
+
if behavior & ALLOW_DUPLICATED_ITEMS:
|
1015
|
+
log.debug(
|
1016
|
+
"tube [%s] has multiples records: [%s] records, but ALLOW_SAME_DATE_DIFFERENT_VALUES is defined",
|
1017
|
+
uid,
|
1018
|
+
N,
|
1019
|
+
)
|
1020
|
+
existing.clear()
|
1021
|
+
else:
|
1057
1022
|
log.debug(
|
1058
|
-
"[%s]
|
1023
|
+
"tube has multiples records: [%s] = %s records, must just 1 and sort_key is defined by: [%s]",
|
1059
1024
|
uid,
|
1025
|
+
N,
|
1060
1026
|
data_sort_blueprint,
|
1061
1027
|
)
|
1062
|
-
|
1063
|
-
|
1064
|
-
|
1028
|
+
|
1029
|
+
push = True
|
1030
|
+
patterns = kw.get(COMPARISON_PATTERNS)
|
1031
|
+
if patterns:
|
1032
|
+
wdata = CWalk(data, include=patterns, exclude=MASK)
|
1033
|
+
if not wdata:
|
1034
|
+
log.warning("patterns don't get any data")
|
1035
|
+
raise BadLogic(data)
|
1065
1036
|
else:
|
1037
|
+
patterns = [r".*"]
|
1038
|
+
wdata = CWalk(data, include=patterns, exclude=MASK)
|
1039
|
+
for exists in existing:
|
1040
|
+
wexists = CWalk(exists, include=patterns, exclude=MASK)
|
1041
|
+
existing_sort_blueprint = build_dict(exists, reverse_sort_keys)
|
1042
|
+
# existing_sort_blueprint = build_comparisson_dict(exists, reverse_sort_keys)
|
1043
|
+
|
1044
|
+
same_sort_key = existing_sort_blueprint == data_sort_blueprint
|
1045
|
+
|
1046
|
+
# check if we must "duplicate" data inside tube
|
1047
|
+
# keys0 = set(exists).difference(MASK)
|
1048
|
+
# keys1 = set(data).difference(MASK)
|
1049
|
+
keys0 = set(wexists)
|
1050
|
+
keys1 = set(wdata)
|
1051
|
+
same_structure = keys0 == keys1
|
1052
|
+
|
1066
1053
|
same_values = False
|
1054
|
+
if same_sort_key and same_structure:
|
1055
|
+
for key in keys0:
|
1056
|
+
if wexists[key] != wdata[key]:
|
1057
|
+
log.debug(
|
1058
|
+
"[%s].[%s].[%s]: %s != %s",
|
1059
|
+
uid,
|
1060
|
+
data_sort_blueprint,
|
1061
|
+
key,
|
1062
|
+
wexists[key],
|
1063
|
+
wdata[key],
|
1064
|
+
)
|
1065
|
+
break
|
1066
|
+
else:
|
1067
|
+
same_values = True
|
1067
1068
|
|
1068
|
-
|
1069
|
-
if same_sort_key:
|
1070
|
-
# same sort_key
|
1071
|
-
if same_structure:
|
1072
|
-
# EP preserver known structure
|
1073
|
-
if same_values:
|
1074
|
-
# new object and existing one are identical
|
1075
|
-
# including `sort_keys`
|
1076
|
-
# object is not inserted, continue with the next one
|
1069
|
+
if not same_values:
|
1077
1070
|
log.debug(
|
1078
|
-
"[%s]
|
1071
|
+
"[%s].sort_keys: %s",
|
1079
1072
|
uid,
|
1080
1073
|
data_sort_blueprint,
|
1081
1074
|
)
|
1082
|
-
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
1075
|
+
log.debug(
|
1076
|
+
"existing: %s", DATE(exists.get(MONOTONIC_KEY))
|
1077
|
+
)
|
1078
|
+
log.debug("new data: %s", DATE(data.get(MONOTONIC_KEY)))
|
1079
|
+
foo = 1
|
1080
|
+
else:
|
1081
|
+
same_values = False
|
1082
|
+
|
1083
|
+
# explain why object will be skipped
|
1084
|
+
if same_sort_key:
|
1085
|
+
# same sort_key
|
1086
|
+
if same_structure:
|
1087
|
+
# EP preserver known structure
|
1088
|
+
if same_values:
|
1089
|
+
# new object and existing one are identical
|
1090
|
+
# including `sort_keys`
|
1091
|
+
# object is not inserted, continue with the next one
|
1086
1092
|
log.debug(
|
1087
|
-
"[%s][%s]
|
1093
|
+
"[%s][%s]: SKIP, new and existing are identical.",
|
1088
1094
|
uid,
|
1089
1095
|
data_sort_blueprint,
|
1090
1096
|
)
|
1097
|
+
push = False
|
1098
|
+
break
|
1099
|
+
elif data_sort_blueprint:
|
1100
|
+
if behavior & ALLOW_SAME_DATE_DIFFERENT_VALUES:
|
1101
|
+
log.debug(
|
1102
|
+
"[%s][%s], EP send a modified version of an already sent object, but behavior has ALLOW_SAME_DATE_DIFFERENT_VALUES, so restriction is RELAXED",
|
1103
|
+
uid,
|
1104
|
+
data_sort_blueprint,
|
1105
|
+
)
|
1106
|
+
else:
|
1107
|
+
# but EP modified an already sent object
|
1108
|
+
log.error(
|
1109
|
+
"[%s][%s], EP send a modified version of an already sent object",
|
1110
|
+
uid,
|
1111
|
+
data_sort_blueprint,
|
1112
|
+
)
|
1113
|
+
push = False
|
1114
|
+
break
|
1091
1115
|
else:
|
1092
1116
|
# but EP modified an already sent object
|
1093
|
-
log.
|
1094
|
-
"[%s]
|
1117
|
+
log.debug(
|
1118
|
+
"OK [%s] EP send a modified version of an already sent object, but data has't sort_keys, so must be inserted each time data is different",
|
1095
1119
|
uid,
|
1096
|
-
data_sort_blueprint,
|
1097
1120
|
)
|
1098
|
-
|
1099
|
-
|
1121
|
+
foo = 1
|
1122
|
+
|
1100
1123
|
else:
|
1101
|
-
#
|
1124
|
+
# and EP yield more/less data for same object
|
1102
1125
|
log.debug(
|
1103
|
-
"
|
1126
|
+
"[%s].[%s], EP send a different structure that previous ones",
|
1104
1127
|
uid,
|
1128
|
+
data_sort_blueprint,
|
1129
|
+
)
|
1130
|
+
log.debug(
|
1131
|
+
"[%s].[%s]: existing: [%s]",
|
1132
|
+
uid,
|
1133
|
+
data_sort_blueprint,
|
1134
|
+
exists,
|
1135
|
+
)
|
1136
|
+
log.debug(
|
1137
|
+
"[%s].[%s]: new : [%s]",
|
1138
|
+
uid,
|
1139
|
+
data_sort_blueprint,
|
1140
|
+
data,
|
1105
1141
|
)
|
1106
1142
|
foo = 1
|
1107
1143
|
|
1108
1144
|
else:
|
1109
|
-
#
|
1110
|
-
|
1111
|
-
|
1112
|
-
|
1113
|
-
|
1114
|
-
|
1115
|
-
|
1116
|
-
|
1117
|
-
|
1118
|
-
|
1119
|
-
|
1120
|
-
|
1121
|
-
|
1122
|
-
|
1123
|
-
|
1124
|
-
|
1125
|
-
|
1126
|
-
|
1127
|
-
|
1128
|
-
|
1129
|
-
else:
|
1130
|
-
# sort_key values differs
|
1131
|
-
if same_structure:
|
1132
|
-
# struct doesn't change
|
1133
|
-
if same_values:
|
1134
|
-
# data is unchanged but the `sort_keys`
|
1145
|
+
# sort_key values differs
|
1146
|
+
if same_structure:
|
1147
|
+
# struct doesn't change
|
1148
|
+
if same_values:
|
1149
|
+
# data is unchanged but the `sort_keys`
|
1150
|
+
log.error(
|
1151
|
+
"[%s]: data is unchanged but the `sort_keys`: %s <--> %s",
|
1152
|
+
uid,
|
1153
|
+
data_sort_blueprint,
|
1154
|
+
existing_sort_blueprint,
|
1155
|
+
)
|
1156
|
+
push = False
|
1157
|
+
break
|
1158
|
+
else:
|
1159
|
+
# a new update of the same object
|
1160
|
+
pass
|
1161
|
+
# push = True
|
1162
|
+
else:
|
1163
|
+
# a new object with a change in its structure
|
1164
|
+
# push = True
|
1135
1165
|
log.error(
|
1136
|
-
"[%s]:
|
1166
|
+
"[%s].[%s]: has change its structure: [%s]",
|
1137
1167
|
uid,
|
1138
|
-
|
1139
|
-
existing_sort_blueprint,
|
1168
|
+
keys0.symmetric_difference(keys1),
|
1140
1169
|
)
|
1141
|
-
|
1142
|
-
|
1143
|
-
|
1144
|
-
|
1145
|
-
|
1146
|
-
|
1147
|
-
|
1148
|
-
|
1149
|
-
|
1150
|
-
|
1151
|
-
|
1152
|
-
|
1153
|
-
|
1154
|
-
)
|
1155
|
-
foo = 1
|
1170
|
+
foo = 1
|
1171
|
+
else:
|
1172
|
+
pass
|
1173
|
+
# push = True
|
1174
|
+
|
1175
|
+
# TODO: hack to speed up the process for some data
|
1176
|
+
# TODO: remove when not needed
|
1177
|
+
must_check = kw.get(KIND_KEY) not in ("raw_energy",)
|
1178
|
+
if must_check:
|
1179
|
+
t0 = time.time()
|
1180
|
+
await prevously_inserted()
|
1181
|
+
elapsed = time.time() - t0
|
1182
|
+
log.info("[%s] prevously_inserted took: %s secs", uid, elapsed)
|
1156
1183
|
else:
|
1184
|
+
# hack for not altering the data
|
1185
|
+
# push = False
|
1157
1186
|
pass
|
1158
|
-
# push = True
|
1159
1187
|
|
1160
1188
|
# check if ORG_KEY could be formated wrong
|
1161
1189
|
if _uri["id"] is None:
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: syncmodels
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.320
|
4
4
|
Summary: Synchronizable Models
|
5
5
|
Home-page: https://github.com/asterio.gonzalez/syncmodels
|
6
6
|
Author: Asterio Gonzalez
|
@@ -18,7 +18,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
18
18
|
Requires-Python: >=3.6
|
19
19
|
License-File: LICENSE
|
20
20
|
License-File: AUTHORS.rst
|
21
|
-
Requires-Dist: agptools>=0.1.
|
21
|
+
Requires-Dist: agptools>=0.1.320
|
22
22
|
Requires-Dist: aiocache
|
23
23
|
Requires-Dist: aiohttp
|
24
24
|
Requires-Dist: Click
|
@@ -1,4 +1,4 @@
|
|
1
|
-
syncmodels/__init__.py,sha256=
|
1
|
+
syncmodels/__init__.py,sha256=ETpXwu3wkhEY55ukY2qUaZk1ZK2-wNWB-xZeZr-rttY,142
|
2
2
|
syncmodels/context.py,sha256=k1Gs_ip9BfyRFpyRnzqYvRDKo0sYBqJsh6z9sWln9oE,451
|
3
3
|
syncmodels/crawler.py,sha256=FthNzipF8cafNvogFC7AFn_N4qxOHkJTeo3Lbcw0P_g,94746
|
4
4
|
syncmodels/crud.py,sha256=ozumS7XgmXSFcFN2SZBH0jB0j_1vK2xE-FeFcTG7ikw,15327
|
@@ -11,7 +11,7 @@ syncmodels/registry.py,sha256=YaQtgbSwa0je1MpCcVHALI3_b85vrddyOlhsnrUcKZs,8224
|
|
11
11
|
syncmodels/requests.py,sha256=wWoC5hPDm1iBM_zrlyKRauzhXgdKR3pT5RqyC-5UZhQ,538
|
12
12
|
syncmodels/runner.py,sha256=IHDKuQ3yJ1DN9wktMiIrerPepYX61tc3AzbFfuUqEFw,5454
|
13
13
|
syncmodels/schema.py,sha256=uinUt8Asq_x7xa6MKWVXNyoWO6gKocjGPppjimaXzEU,2492
|
14
|
-
syncmodels/storage.py,sha256=
|
14
|
+
syncmodels/storage.py,sha256=cNhnIbM_jPqBDfkIWK8ZwF9y8wjk6zZn7fSpUTMpZEY,73786
|
15
15
|
syncmodels/syncmodels.py,sha256=jcUxVbv1hrx5hI81VCO1onIM6WyORTqJVPwIqlPocOc,10596
|
16
16
|
syncmodels/timequeue.py,sha256=YRd3ULRaIhoszaBsYhfr0epMqAbL6-NwVEtScjUYttM,595
|
17
17
|
syncmodels/wave.py,sha256=Gra22BLiA9z2nF-6diXpjAc4GZv9nebmyvHxdAfXec4,7764
|
@@ -302,10 +302,10 @@ syncmodels/session/postgresql.py,sha256=ZMIu1Rv93pKfvFlovFBmWArzlrT2xaQWNYGZT_LW
|
|
302
302
|
syncmodels/session/sql.py,sha256=bD7zXRrEKKJmqY2UoibWENuWb5zHrrU72F3_dYbS6LY,6569
|
303
303
|
syncmodels/session/sqlite.py,sha256=nCDjopLiBpX1F10qkKoARM7JrVdIpJ1WdGOduFVxaiA,2080
|
304
304
|
syncmodels/source/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
305
|
-
syncmodels-0.1.
|
306
|
-
syncmodels-0.1.
|
307
|
-
syncmodels-0.1.
|
308
|
-
syncmodels-0.1.
|
309
|
-
syncmodels-0.1.
|
310
|
-
syncmodels-0.1.
|
311
|
-
syncmodels-0.1.
|
305
|
+
syncmodels-0.1.320.dist-info/AUTHORS.rst,sha256=3ZPoqg8Aav8DSYKd0fwcwn4_5HwSiMLart0E5Un00-U,168
|
306
|
+
syncmodels-0.1.320.dist-info/LICENSE,sha256=uzMOYtIiUsnsD0xHJR7aJWJ4v_bvan0kTnvufy5eNoA,1075
|
307
|
+
syncmodels-0.1.320.dist-info/METADATA,sha256=tvrOmh5Zz7FDRGXbLnTqqsemgn72SlqASzFResjs3W4,2700
|
308
|
+
syncmodels-0.1.320.dist-info/WHEEL,sha256=SrDKpSbFN1G94qcmBqS9nyHcDMp9cUS9OC06hC0G3G0,109
|
309
|
+
syncmodels-0.1.320.dist-info/entry_points.txt,sha256=dMnigjZsHMxTwXiiZyBZdBbMYE0-hY3L5cG15EcDAzw,51
|
310
|
+
syncmodels-0.1.320.dist-info/top_level.txt,sha256=2DfQ9NuAhKMjY3BvQGVBA7GfqTm7EoHNbaehSUiqiHQ,11
|
311
|
+
syncmodels-0.1.320.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|