sarapy 1.1.6__py3-none-any.whl → 1.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sarapy/dataProcessing/OpsProcessor.py +27 -7
- sarapy/mlProcessors/FertilizerFMCreator.py +54 -0
- sarapy/mlProcessors/FertilizerTransformer.py +81 -0
- sarapy/preprocessing/TransformToOutputData.py +4 -2
- sarapy/version.py +1 -1
- {sarapy-1.1.6.dist-info → sarapy-1.3.0.dist-info}/METADATA +9 -1
- {sarapy-1.1.6.dist-info → sarapy-1.3.0.dist-info}/RECORD +10 -9
- sarapy/dataProcessing/amg_decoder.py +0 -125
- {sarapy-1.1.6.dist-info → sarapy-1.3.0.dist-info}/LICENCE +0 -0
- {sarapy-1.1.6.dist-info → sarapy-1.3.0.dist-info}/WHEEL +0 -0
- {sarapy-1.1.6.dist-info → sarapy-1.3.0.dist-info}/top_level.txt +0 -0
|
@@ -4,6 +4,7 @@ import pandas as pd
|
|
|
4
4
|
from sarapy.mlProcessors import PlantinFMCreator
|
|
5
5
|
from sarapy.mlProcessors import PlantinClassifier
|
|
6
6
|
from sarapy.preprocessing import TransformInputData, TransformToOutputData
|
|
7
|
+
from sarapy.mlProcessors import FertilizerFMCreator, FertilizerTransformer
|
|
7
8
|
|
|
8
9
|
class OpsProcessor():
|
|
9
10
|
"""Clase para procesar las operaciones de los operarios. La información se toma de la base de datos
|
|
@@ -40,7 +41,17 @@ class OpsProcessor():
|
|
|
40
41
|
|
|
41
42
|
self._plantin_classifier = PlantinClassifier.PlantinClassifier(**kwargs_plclass)
|
|
42
43
|
self.plantinFMCreator = PlantinFMCreator.PlantinFMCreator(**fmcreator_kargs)
|
|
43
|
-
|
|
44
|
+
|
|
45
|
+
##mapa de argumentos para FertilizerTransformer
|
|
46
|
+
ft_map = {"regresor_file", "poly_features_file"}
|
|
47
|
+
ft_kwargs = {}
|
|
48
|
+
##recorro kwargs y usando ft_map creo un nuevo diccionario con los valores que se pasaron
|
|
49
|
+
for key, value in kwargs.items():
|
|
50
|
+
if key in ft_map:
|
|
51
|
+
ft_kwargs[key] = value
|
|
52
|
+
|
|
53
|
+
self._ftfmcreator = FertilizerFMCreator.FertilizerFMCreator()
|
|
54
|
+
self._fertilizer_transformer = FertilizerTransformer.FertilizerTransformer(**ft_kwargs)
|
|
44
55
|
|
|
45
56
|
self._operationsDict = {} ##diccionario de operarios con sus operaciones
|
|
46
57
|
self._platin_classifiedOperations = np.array([]) ##array con las operaciones clasificadas para plantin
|
|
@@ -77,7 +88,7 @@ class OpsProcessor():
|
|
|
77
88
|
|
|
78
89
|
Returns:
|
|
79
90
|
Lista de diccionarios con las clasificaciones. Cada diccionario tiene la forma
|
|
80
|
-
{"id_db_h": 10, "id_db_dw": 10, "tag_seedling": 1, "tag_fertilizer":
|
|
91
|
+
{"id_db_h": 10, "id_db_dw": 10, "tag_seedling": 1, "tag_fertilizer": gramos (float)}
|
|
81
92
|
"""
|
|
82
93
|
|
|
83
94
|
##chqueo que newSample no esté vacío
|
|
@@ -86,13 +97,16 @@ class OpsProcessor():
|
|
|
86
97
|
#Si tenemos nuevas operaciones, actualizamos el diccionario de operaciones
|
|
87
98
|
self.updateOperationsDict(newSample) #actualizamos diccionario interno de la clase
|
|
88
99
|
pl_clas = self.classifyForPlantin(**kwargs) #clasificamos las operaciones para plantín
|
|
89
|
-
|
|
100
|
+
|
|
101
|
+
#estimamos los gramos de fertilizante
|
|
102
|
+
dst_ft = self._ftfmcreator.transform(newSample[:,2]).astype(int)
|
|
103
|
+
ft_grams = self._fertilizer_transformer.transform(dst_ft.reshape(-1,1))
|
|
90
104
|
id_db_h_nums, id_db_dw_nums = self.getActualOperationsNumbers() #obtenemos los números de operaciones desde el diccionario de operaciones
|
|
91
105
|
date_oprc = newSample[:,3]
|
|
92
106
|
return self.transformToOutputData.fit_transform(np.column_stack((id_db_h_nums,
|
|
93
107
|
id_db_dw_nums,
|
|
94
108
|
pl_clas,
|
|
95
|
-
|
|
109
|
+
ft_grams,
|
|
96
110
|
date_oprc)))
|
|
97
111
|
else:
|
|
98
112
|
self.resetAllNewSamplesValues()
|
|
@@ -185,7 +199,7 @@ class OpsProcessor():
|
|
|
185
199
|
##clasificamos las operaciones para plantín
|
|
186
200
|
operations = self.operationsDict[ID_NPDP]["sample_ops"]
|
|
187
201
|
features, dst_pt, inest_pt = self.plantinFMCreator.fit_transform(operations)
|
|
188
|
-
classified_ops = self._plantin_classifier.classify(features, **classify_kwargs)
|
|
202
|
+
classified_ops = self._plantin_classifier.classify(features, dst_pt, inest_pt, **classify_kwargs)
|
|
189
203
|
|
|
190
204
|
##chequeo si first_day_op_classified es True, si es así, no se considera la primera fila de las classified_ops
|
|
191
205
|
if self.operationsDict[ID_NPDP]["first_day_op_classified"]:
|
|
@@ -324,7 +338,8 @@ if __name__ == "__main__":
|
|
|
324
338
|
|
|
325
339
|
import time
|
|
326
340
|
start_time = time.time()
|
|
327
|
-
op = OpsProcessor.OpsProcessor(classifier_file='modelos\\pipeline_rf.pkl', imputeDistances = False
|
|
341
|
+
op = OpsProcessor.OpsProcessor(classifier_file='modelos\\pipeline_rf.pkl', imputeDistances = False,
|
|
342
|
+
regresor_file='modelos\\regresor.pkl', poly_features_file='modelos\\poly_features.pkl')
|
|
328
343
|
classifications = op.processOperations(raw_ops, update_samePlace=True, useRatioStats=True)
|
|
329
344
|
end_time = time.time()
|
|
330
345
|
execution_time = end_time - start_time
|
|
@@ -334,4 +349,9 @@ if __name__ == "__main__":
|
|
|
334
349
|
df = pd.DataFrame(classifications)
|
|
335
350
|
tag_seedling = df["tag_seedling"].values
|
|
336
351
|
print(tag_seedling.mean())
|
|
337
|
-
print(df["tag_seedling"].shape)
|
|
352
|
+
print(df["tag_seedling"].shape)
|
|
353
|
+
|
|
354
|
+
##datos de fertilizante
|
|
355
|
+
tag_fertilizer = df["tag_fertilizer"].values
|
|
356
|
+
print(tag_fertilizer[1500:1560])
|
|
357
|
+
print(tag_fertilizer.mean())
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
from sklearn.base import BaseEstimator, TransformerMixin
|
|
3
|
+
from sarapy.dataProcessing import TLMSensorDataProcessor
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class FertilizerFMCreator():
|
|
7
|
+
"""Clase para crear la matriz de características para el procesamiento del fertilizante"""
|
|
8
|
+
|
|
9
|
+
def __init__(self):
|
|
10
|
+
self._dataPositions = {"DST_FT": 0}
|
|
11
|
+
self.dst_ft = None ##cuando no se ha transformado ningún dato, se inicializa en None
|
|
12
|
+
|
|
13
|
+
def transform(self, X):
|
|
14
|
+
"""
|
|
15
|
+
Transforma los datos de telemetría para retornar los datos de distorsión de fertilizante.
|
|
16
|
+
|
|
17
|
+
Params:
|
|
18
|
+
- X: Es un array con los datos de telemetría. La forma de X es (n,1)
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
- dst_ft: Array con los valores de distorsión de fertilizante.
|
|
22
|
+
"""
|
|
23
|
+
tlmDataExtractor = TLMSensorDataProcessor.TLMSensorDataProcessor()
|
|
24
|
+
tlmdeDP = tlmDataExtractor.dataPositions #posiciones de los datos transformados de tlmDataExtractor
|
|
25
|
+
|
|
26
|
+
tlmExtracted = tlmDataExtractor.fit_transform(X)
|
|
27
|
+
|
|
28
|
+
self.dst_ft = tlmExtracted[:,tlmdeDP["DSTRFT"]]
|
|
29
|
+
|
|
30
|
+
return self.dst_ft
|
|
31
|
+
|
|
32
|
+
if __name__ == "__main__":
|
|
33
|
+
import os
|
|
34
|
+
import pandas as pd
|
|
35
|
+
import numpy as np
|
|
36
|
+
from sarapy.preprocessing import TransformInputData
|
|
37
|
+
from sarapy.mlProcessors import PlantinFMCreator
|
|
38
|
+
import sarapy.utils.getRawOperations as getRawOperations
|
|
39
|
+
tindata = TransformInputData.TransformInputData()
|
|
40
|
+
|
|
41
|
+
##cargo los archivos examples\2024-09-04\UPM001N\data.json y examples\2024-09-04\UPM001N\historical-data.json
|
|
42
|
+
data_path = os.path.join(os.getcwd(), "examples\\2024-09-04\\UPM001N\\data.json")
|
|
43
|
+
historical_data_path = os.path.join(os.getcwd(), "examples\\2024-09-04\\UPM001N\\historical-data.json")
|
|
44
|
+
raw_data = pd.read_json(data_path, orient="records").to_dict(orient="records")
|
|
45
|
+
raw_data2 = pd.read_json(historical_data_path, orient="records").to_dict(orient="records")
|
|
46
|
+
|
|
47
|
+
raw_ops = np.array(getRawOperations.getRawOperations(raw_data, raw_data2))
|
|
48
|
+
X = tindata.fit_transform(raw_ops) #transforma los datos de operaciones a un array de numpy
|
|
49
|
+
|
|
50
|
+
from sarapy.mlProcessors import FertilizerFMCreator
|
|
51
|
+
|
|
52
|
+
ftfmcreator = FertilizerFMCreator.FertilizerFMCreator()
|
|
53
|
+
dst_ft = ftfmcreator.transform(X[:,2])
|
|
54
|
+
print(dst_ft[:10]) #imprime los primeros 10 valores de DST_FT
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import pickle
|
|
2
|
+
|
|
3
|
+
class FertilizerTransformer:
|
|
4
|
+
"""
|
|
5
|
+
Clase para tomar los valores de distorsión de fertilizante y transformarlos a gramos
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
def __init__(self, regresor_file, poly_features_file):
|
|
9
|
+
"""Constructor de la clase FertilizerImputer.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
- regresor: Regresor que transforma los valores de distorsión a gramos.
|
|
13
|
+
- poly_features: Grado del polinomio a utilizar en la transformación de los datos.
|
|
14
|
+
"""
|
|
15
|
+
##cargo el regresor con pickle. Usamos try para capturar el error FileNotFoundError
|
|
16
|
+
try:
|
|
17
|
+
with open(regresor_file, 'rb') as file:
|
|
18
|
+
self._regresor = pickle.load(file)
|
|
19
|
+
print("Regresor cargado con éxito.")
|
|
20
|
+
except FileNotFoundError:
|
|
21
|
+
print("El archivo no se encuentra en el directorio actual.")
|
|
22
|
+
|
|
23
|
+
##cargo las características polinómicas con pickle. Usamos try para capturar el error FileNotFoundError
|
|
24
|
+
try:
|
|
25
|
+
with open(poly_features_file, 'rb') as file:
|
|
26
|
+
self._poly_features = pickle.load(file)
|
|
27
|
+
print("Características polinómicas cargadas con éxito.")
|
|
28
|
+
except FileNotFoundError:
|
|
29
|
+
print("El archivo no se encuentra en el directorio actual.")
|
|
30
|
+
|
|
31
|
+
self.fertilizer_grams = None ##cuando no se ha transformado ningún dato, se inicializa en None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def transform(self, X):
|
|
35
|
+
"""Transforma los datos de distorsión de fertilizante a gramos.
|
|
36
|
+
|
|
37
|
+
Params:
|
|
38
|
+
- X: Es un array con los datos de distorsión de fertilizante. La forma de X es (n,1)
|
|
39
|
+
|
|
40
|
+
Ejemplo: [12. 1. 12. 0. 0. 0. 0. 0. 0. 12.]
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
- 0: Array con los valores de distorsión de fertilizante transformados a gramos.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
X_poly = self._poly_features.fit_transform(X)
|
|
47
|
+
self.fertilizer_grams = self._regresor.predict(X_poly)
|
|
48
|
+
|
|
49
|
+
##retorno con shape (n,)
|
|
50
|
+
return self.fertilizer_grams.reshape(-1,)
|
|
51
|
+
|
|
52
|
+
if __name__ == "__main__":
|
|
53
|
+
import os
|
|
54
|
+
import pandas as pd
|
|
55
|
+
import numpy as np
|
|
56
|
+
from sarapy.preprocessing import TransformInputData
|
|
57
|
+
from sarapy.mlProcessors import PlantinFMCreator
|
|
58
|
+
import sarapy.utils.getRawOperations as getRawOperations
|
|
59
|
+
tindata = TransformInputData.TransformInputData()
|
|
60
|
+
|
|
61
|
+
##cargo los archivos examples\2024-09-04\UPM001N\data.json y examples\2024-09-04\UPM001N\historical-data.json
|
|
62
|
+
data_path = os.path.join(os.getcwd(), "examples\\2024-09-04\\UPM001N\\data.json")
|
|
63
|
+
historical_data_path = os.path.join(os.getcwd(), "examples\\2024-09-04\\UPM001N\\historical-data.json")
|
|
64
|
+
raw_data = pd.read_json(data_path, orient="records").to_dict(orient="records")
|
|
65
|
+
raw_data2 = pd.read_json(historical_data_path, orient="records").to_dict(orient="records")
|
|
66
|
+
|
|
67
|
+
raw_ops = np.array(getRawOperations.getRawOperations(raw_data, raw_data2))
|
|
68
|
+
X = tindata.fit_transform(raw_ops) #transforma los datos de operaciones a un array de numpy
|
|
69
|
+
|
|
70
|
+
from sarapy.mlProcessors import FertilizerFMCreator
|
|
71
|
+
|
|
72
|
+
ftfmcreator = FertilizerFMCreator.FertilizerFMCreator()
|
|
73
|
+
dst_ft = ftfmcreator.transform(X[:,2])
|
|
74
|
+
##convierto a int dst_ft
|
|
75
|
+
dst_ft = dst_ft.astype(int)
|
|
76
|
+
|
|
77
|
+
from sarapy.mlProcessors import FertilizerTransformer
|
|
78
|
+
|
|
79
|
+
fertransformer = FertilizerTransformer.FertilizerTransformer(regresor_file='modelos\\regresor.pkl', poly_features_file='modelos\\poly_features.pkl')
|
|
80
|
+
gramos = fertransformer.transform(dst_ft.reshape(-1,1))
|
|
81
|
+
print(gramos[:10])
|
|
@@ -53,9 +53,11 @@ class TransformToOutputData(BaseEstimator, TransformerMixin):
|
|
|
53
53
|
date_data = X[:,4].astype(int)
|
|
54
54
|
date_oprc = np.array([datetime.datetime.fromtimestamp(date, datetime.timezone.utc) for date in date_data])
|
|
55
55
|
self.temp_df.loc[:,"date_oprc"] = date_oprc.flatten()
|
|
56
|
-
##convierto las columnas "id_db_h", "id_db_dw", "tag_seedling"
|
|
57
|
-
for col in ["id_db_h", "id_db_dw", "tag_seedling"
|
|
56
|
+
##convierto las columnas "id_db_h", "id_db_dw", "tag_seedling" a int
|
|
57
|
+
for col in ["id_db_h", "id_db_dw", "tag_seedling"]:
|
|
58
58
|
self.temp_df[col] = self.temp_df[col].astype(float).astype(int)
|
|
59
|
+
##convierto la columna "tag_fertilizer" a float de y redondeo a 3 decimales
|
|
60
|
+
self.temp_df["tag_fertilizer"] = self.temp_df["tag_fertilizer"].astype(float).round(3)
|
|
59
61
|
|
|
60
62
|
return self
|
|
61
63
|
|
sarapy/version.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
## Version of the package
|
|
2
|
-
__version__ = "1.
|
|
2
|
+
__version__ = "1.3.0"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: sarapy
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.3.0
|
|
4
4
|
Home-page: https://github.com/lucasbaldezzari/sarapy
|
|
5
5
|
Author: Lucas Baldezzari
|
|
6
6
|
Author-email: Lucas Baldezzari <lmbaldezzari@gmail.com>
|
|
@@ -19,6 +19,14 @@ Requires-Dist: geopy
|
|
|
19
19
|
|
|
20
20
|
Library for processing SARAPICO project metadata of _AMG SA_.
|
|
21
21
|
|
|
22
|
+
#### Version 1.3.0
|
|
23
|
+
|
|
24
|
+
- Se agrega funcionalidad para estimar la cantidad de gramos de fertilizante en base al nivel de distorsión de fertilizante arrojado por la electrónica. Se implementa mlProcessor.FertlizerFMCreator y mlProcessor.FertilizerTransformer. Además se modifican las clases OpsProcessor y TransformToOutputData para poder usar estas nuevas clases.
|
|
25
|
+
|
|
26
|
+
#### Version 1.2.6
|
|
27
|
+
|
|
28
|
+
- Se agrega línea dentro de clase OpsProcessor.
|
|
29
|
+
|
|
22
30
|
#### Version 1.1.6
|
|
23
31
|
|
|
24
32
|
- Se modifica PlantinClassifier para mejorar el NO conteo de plantines en OSP.
|
|
@@ -1,25 +1,26 @@
|
|
|
1
1
|
sarapy/__init__.py,sha256=aVoywqGSscYYDycLaYJnz08dlQabl9gH0h4Q5KtHM9o,74
|
|
2
|
-
sarapy/version.py,sha256=
|
|
2
|
+
sarapy/version.py,sha256=acDWjqFOk0iMpHIL6Wr7i_HRiJz9vmiX2XxZnzY5Ss8,51
|
|
3
3
|
sarapy/dataProcessing/GeoProcessor.py,sha256=ARjgKTXDVdf_cFCXyFmzlnmmmay3HG3q-yeJ9QrAcQU,5919
|
|
4
|
-
sarapy/dataProcessing/OpsProcessor.py,sha256=
|
|
4
|
+
sarapy/dataProcessing/OpsProcessor.py,sha256=XLCousbVeST-y4hjr27AKrg9-bOMTdkWl3fDJOgXFSM,18321
|
|
5
5
|
sarapy/dataProcessing/TLMSensorDataProcessor.py,sha256=GfSIRYD_biFlOMTfSQSwW0HsUouZuUL3ScvL4uUHTPQ,23824
|
|
6
6
|
sarapy/dataProcessing/TimeSeriesProcessor.py,sha256=-uic18Sut9yMCenbLO1-VabmKifKABt_FbCCP_fLcmE,5403
|
|
7
7
|
sarapy/dataProcessing/__init__.py,sha256=Kqs5sFtq6RMEa3KLJFbsGRoYsIxHL1UUGMuplyCyQFk,200
|
|
8
|
-
sarapy/
|
|
8
|
+
sarapy/mlProcessors/FertilizerFMCreator.py,sha256=LNi86CI6eVuQ0_UBVJNd_-L79fcY2-zY2NCm9ypl6OM,2354
|
|
9
|
+
sarapy/mlProcessors/FertilizerTransformer.py,sha256=vdGNbvlmvtJZxr0oOE4Zb8Es-K-BKQh4oyhIGfDAcxQ,3571
|
|
9
10
|
sarapy/mlProcessors/PlantinClassifier.py,sha256=Lswb1aNCaBVCMx88nbmbn2dMoPQ_7tGZSS7Cx5EGS1I,7973
|
|
10
11
|
sarapy/mlProcessors/PlantinFMCreator.py,sha256=FeEz0MiorkMPodPgN9FtBDJ22WVLoLwK4g-CFI46DAk,8568
|
|
11
12
|
sarapy/mlProcessors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
13
|
sarapy/preprocessing/DistancesImputer.py,sha256=NvbVAh5m0yFxVgDbEFnEX7RSG13qLjO7i2gqjDAWsf4,9106
|
|
13
14
|
sarapy/preprocessing/FertilizerImputer.py,sha256=zK6ONAilwPHvj-bC7yxnQYOkDBCCkWh6__57vYK9anM,1490
|
|
14
15
|
sarapy/preprocessing/TransformInputData.py,sha256=wDwyg4U-jgTqvNUFngsmhSKwyhoIyVWlhezEhhPf2qE,4090
|
|
15
|
-
sarapy/preprocessing/TransformToOutputData.py,sha256=
|
|
16
|
+
sarapy/preprocessing/TransformToOutputData.py,sha256=rWAsZKWLxn1qtxUMV_cWeKs4dPkFcBEt6IqKL9A64js,3933
|
|
16
17
|
sarapy/preprocessing/__init__.py,sha256=Wg_Csy8Xiz8BN8A4-T7iPwcL_ol5ApEx6YtybItKB8M,100
|
|
17
18
|
sarapy/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
19
|
sarapy/utils/amg_decoder.py,sha256=JZ7cbu7DlCuatuq2F7aBfUr7S7U-K5poBgxw5nY6rNI,4319
|
|
19
20
|
sarapy/utils/amg_ppk.py,sha256=c0GusnxdntU-E0JOezzbIfC7SWoJmKAbad_zYDCJ3-c,1060
|
|
20
21
|
sarapy/utils/getRawOperations.py,sha256=8aA1fIkNCnUYgiWfnFggRT_U35z432gZBrZ7seGO5w4,817
|
|
21
|
-
sarapy-1.
|
|
22
|
-
sarapy-1.
|
|
23
|
-
sarapy-1.
|
|
24
|
-
sarapy-1.
|
|
25
|
-
sarapy-1.
|
|
22
|
+
sarapy-1.3.0.dist-info/LICENCE,sha256=N00sU3vSQ6F5c2vML9_qP4IFTkCPFFj0YGDB2CZP-uQ,840
|
|
23
|
+
sarapy-1.3.0.dist-info/METADATA,sha256=LxQX-etiFzuxuysDNA6R2eH1vJAv7FL2qs2m615oxnE,3780
|
|
24
|
+
sarapy-1.3.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
|
|
25
|
+
sarapy-1.3.0.dist-info/top_level.txt,sha256=4mUGZXfX2Fw47fpY6MQkaJeuOs_8tbjLkkNp34DJWiA,7
|
|
26
|
+
sarapy-1.3.0.dist-info/RECORD,,
|
|
@@ -1,125 +0,0 @@
|
|
|
1
|
-
from dateutil import parser
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
"""
|
|
5
|
-
En 'estructura_datos' se registra cuantos bits se ocupan para cada dato.
|
|
6
|
-
Por ejemplo, los primeros 6 bits para anio, los siguientes 4 para mes y asi.
|
|
7
|
-
"""
|
|
8
|
-
|
|
9
|
-
estructura_datos = {
|
|
10
|
-
"anio": 6,
|
|
11
|
-
"mes": 4,
|
|
12
|
-
"dia": 5,
|
|
13
|
-
"hora": 5,
|
|
14
|
-
"minutos": 6,
|
|
15
|
-
"segundos": 6,
|
|
16
|
-
"operacion": 16,
|
|
17
|
-
"PT": 2,
|
|
18
|
-
"FR": 2,
|
|
19
|
-
"OR": 2,
|
|
20
|
-
"MO": 2,
|
|
21
|
-
"TLM_NPDP": 64,
|
|
22
|
-
"TLM_GPDP": 16,
|
|
23
|
-
"ID_NPDP": -1,
|
|
24
|
-
"ID_OPRR": -1,
|
|
25
|
-
"ID_GPDP": -1,
|
|
26
|
-
"ID_CDLL": -1,
|
|
27
|
-
"size_GNSS": 16,
|
|
28
|
-
"Latitud": 32,
|
|
29
|
-
"Longitud": 32,
|
|
30
|
-
"Precision": 32,
|
|
31
|
-
} # Agregar mas campos segun sea necesario
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
def extraer_bits(trama, inicio, n_bits):
|
|
35
|
-
try:
|
|
36
|
-
byte_index = inicio // 8
|
|
37
|
-
bit_offset = inicio % 8
|
|
38
|
-
|
|
39
|
-
valor = 0
|
|
40
|
-
bits_procesados = 0
|
|
41
|
-
while bits_procesados < n_bits:
|
|
42
|
-
byte_actual = trama[byte_index]
|
|
43
|
-
bits_restantes = n_bits - bits_procesados
|
|
44
|
-
bits_a_extraer = min(bits_restantes, 8 - bit_offset)
|
|
45
|
-
|
|
46
|
-
mascara = (1 << bits_a_extraer) - 1
|
|
47
|
-
bits_extraidos = (byte_actual >> (8 - bit_offset - bits_a_extraer)) & mascara
|
|
48
|
-
|
|
49
|
-
valor = (valor << bits_a_extraer) | bits_extraidos
|
|
50
|
-
|
|
51
|
-
bits_procesados += bits_a_extraer
|
|
52
|
-
byte_index += 1
|
|
53
|
-
bit_offset = 0
|
|
54
|
-
|
|
55
|
-
return valor
|
|
56
|
-
except IndexError as ex:
|
|
57
|
-
raise ex
|
|
58
|
-
except Exception as ex:
|
|
59
|
-
print(f"Error inesperado en extraer_bits: {ex}")
|
|
60
|
-
raise ex
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
def process_dynamic_id(trama, inicio):
|
|
64
|
-
# Lee el primer byte para determinar la longitud del ID
|
|
65
|
-
longitud_id_bytes = extraer_bits(trama, inicio, 8) # 8 bits = 1 byte
|
|
66
|
-
inicio += 8 # Avanza el indice de inicio 8 bits para pasar al contenido del ID
|
|
67
|
-
|
|
68
|
-
# Ahora, extrae el ID basandose en la longitud obtenida
|
|
69
|
-
id_value = extraer_bits(trama, inicio, longitud_id_bytes * 8) # Convierte la longitud a bits
|
|
70
|
-
inicio += longitud_id_bytes * 8 # Avanza el indice de inicio para pasar al final del ID
|
|
71
|
-
|
|
72
|
-
return id_value, inicio
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
def process_data(trama):
|
|
76
|
-
|
|
77
|
-
if not isinstance(trama, bytes):
|
|
78
|
-
raise ValueError("La trama debe ser un bytearray")
|
|
79
|
-
|
|
80
|
-
inicio = 0
|
|
81
|
-
resultado = {}
|
|
82
|
-
for campo, n_bits in estructura_datos.items():
|
|
83
|
-
try:
|
|
84
|
-
if n_bits == -1: # Verifica si el campo es dinamico
|
|
85
|
-
resultado[campo], inicio = process_dynamic_id(trama, inicio)
|
|
86
|
-
else:
|
|
87
|
-
if campo == "TLM_NPDP" or campo == "TLM_GPDP":
|
|
88
|
-
resultado[campo] = trama[inicio // 8: (inicio + n_bits) // 8]
|
|
89
|
-
else:
|
|
90
|
-
resultado[campo] = extraer_bits(trama, inicio, n_bits)
|
|
91
|
-
inicio += n_bits
|
|
92
|
-
if campo == "Precision":
|
|
93
|
-
# Suponiendo que size_GNSS sigue inmediatamente despues de Precision
|
|
94
|
-
raw = trama[inicio // 8: (inicio // 8 ) + resultado["size_GNSS"] - 12]
|
|
95
|
-
resultado["RAW"] = raw
|
|
96
|
-
except IndexError as ex:
|
|
97
|
-
print(f"Error al procesar campo {campo}: {ex}. Posiblemente la trama es mas corta de lo esperado.")
|
|
98
|
-
break # Salir del bucle en caso de un error de indice
|
|
99
|
-
except Exception as ex:
|
|
100
|
-
print(f"Error inesperado al procesar campo {campo}: {ex}")
|
|
101
|
-
break # Salir del bucle en caso de errores inesperados
|
|
102
|
-
|
|
103
|
-
if len(set(estructura_datos.keys()) - set(resultado.keys())) == 0:
|
|
104
|
-
|
|
105
|
-
anio = 2020 + resultado["anio"]
|
|
106
|
-
mes = str(resultado["mes"]).zfill(2)
|
|
107
|
-
dia = str(resultado["dia"]).zfill(2)
|
|
108
|
-
hora = str(resultado["hora"]).zfill(2)
|
|
109
|
-
minutos = str(resultado["minutos"]).zfill(2)
|
|
110
|
-
segundos = str(resultado["segundos"]).zfill(2)
|
|
111
|
-
resultado["date_oprc"] = parser.parse(f"{anio}-{mes}-{dia}T{hora}:{minutos}:{segundos}+00:00")
|
|
112
|
-
|
|
113
|
-
resultado["Latitud"] = (resultado["Latitud"] - 2 ** 32) / 10 ** 7
|
|
114
|
-
resultado["Longitud"] = (resultado["Longitud"] - 2 ** 32) / 10 ** 7
|
|
115
|
-
|
|
116
|
-
del resultado["anio"]
|
|
117
|
-
del resultado["mes"]
|
|
118
|
-
del resultado["dia"]
|
|
119
|
-
del resultado["hora"]
|
|
120
|
-
del resultado["minutos"]
|
|
121
|
-
del resultado["segundos"]
|
|
122
|
-
del resultado["size_GNSS"]
|
|
123
|
-
|
|
124
|
-
return resultado
|
|
125
|
-
|
|
File without changes
|
|
File without changes
|
|
File without changes
|