gamsapi 52.5.0__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (257) hide show
  1. gams/__init__.py +27 -0
  2. gams/_version.py +1 -0
  3. gams/connect/__init__.py +28 -0
  4. gams/connect/agents/__init__.py +24 -0
  5. gams/connect/agents/_excel/__init__.py +32 -0
  6. gams/connect/agents/_excel/excelagent.py +312 -0
  7. gams/connect/agents/_excel/workbook.py +155 -0
  8. gams/connect/agents/_sqlconnectors/__init__.py +42 -0
  9. gams/connect/agents/_sqlconnectors/_accesshandler.py +211 -0
  10. gams/connect/agents/_sqlconnectors/_databasehandler.py +250 -0
  11. gams/connect/agents/_sqlconnectors/_mysqlhandler.py +168 -0
  12. gams/connect/agents/_sqlconnectors/_postgreshandler.py +131 -0
  13. gams/connect/agents/_sqlconnectors/_pyodbchandler.py +112 -0
  14. gams/connect/agents/_sqlconnectors/_sqlalchemyhandler.py +74 -0
  15. gams/connect/agents/_sqlconnectors/_sqlitehandler.py +262 -0
  16. gams/connect/agents/_sqlconnectors/_sqlserverhandler.py +179 -0
  17. gams/connect/agents/concatenate.py +440 -0
  18. gams/connect/agents/connectagent.py +743 -0
  19. gams/connect/agents/csvreader.py +675 -0
  20. gams/connect/agents/csvwriter.py +151 -0
  21. gams/connect/agents/domainwriter.py +143 -0
  22. gams/connect/agents/excelreader.py +756 -0
  23. gams/connect/agents/excelwriter.py +467 -0
  24. gams/connect/agents/filter.py +223 -0
  25. gams/connect/agents/gamsreader.py +112 -0
  26. gams/connect/agents/gamswriter.py +239 -0
  27. gams/connect/agents/gdxreader.py +109 -0
  28. gams/connect/agents/gdxwriter.py +146 -0
  29. gams/connect/agents/labelmanipulator.py +303 -0
  30. gams/connect/agents/projection.py +539 -0
  31. gams/connect/agents/pythoncode.py +71 -0
  32. gams/connect/agents/rawcsvreader.py +248 -0
  33. gams/connect/agents/rawexcelreader.py +312 -0
  34. gams/connect/agents/schema/CSVReader.yaml +92 -0
  35. gams/connect/agents/schema/CSVWriter.yaml +44 -0
  36. gams/connect/agents/schema/Concatenate.yaml +52 -0
  37. gams/connect/agents/schema/DomainWriter.yaml +25 -0
  38. gams/connect/agents/schema/ExcelReader.yaml +121 -0
  39. gams/connect/agents/schema/ExcelWriter.yaml +78 -0
  40. gams/connect/agents/schema/Filter.yaml +74 -0
  41. gams/connect/agents/schema/GAMSReader.yaml +20 -0
  42. gams/connect/agents/schema/GAMSWriter.yaml +47 -0
  43. gams/connect/agents/schema/GDXReader.yaml +23 -0
  44. gams/connect/agents/schema/GDXWriter.yaml +32 -0
  45. gams/connect/agents/schema/LabelManipulator.yaml +99 -0
  46. gams/connect/agents/schema/Projection.yaml +24 -0
  47. gams/connect/agents/schema/PythonCode.yaml +6 -0
  48. gams/connect/agents/schema/RawCSVReader.yaml +34 -0
  49. gams/connect/agents/schema/RawExcelReader.yaml +42 -0
  50. gams/connect/agents/schema/SQLReader.yaml +75 -0
  51. gams/connect/agents/schema/SQLWriter.yaml +103 -0
  52. gams/connect/agents/sqlreader.py +301 -0
  53. gams/connect/agents/sqlwriter.py +276 -0
  54. gams/connect/connectdatabase.py +275 -0
  55. gams/connect/connectvalidator.py +93 -0
  56. gams/connect/errors.py +34 -0
  57. gams/control/__init__.py +136 -0
  58. gams/control/database.py +2231 -0
  59. gams/control/execution.py +1900 -0
  60. gams/control/options.py +2792 -0
  61. gams/control/workspace.py +1198 -0
  62. gams/core/__init__.py +24 -0
  63. gams/core/cfg/__init__.py +26 -0
  64. gams/core/cfg/_cfgmcc.cp312-win_amd64.pyd +0 -0
  65. gams/core/cfg/cfgmcc.py +519 -0
  66. gams/core/dct/__init__.py +26 -0
  67. gams/core/dct/_dctmcc.cp312-win_amd64.pyd +0 -0
  68. gams/core/dct/dctmcc.py +574 -0
  69. gams/core/embedded/__init__.py +26 -0
  70. gams/core/embedded/gamsemb.py +1024 -0
  71. gams/core/emp/__init__.py +24 -0
  72. gams/core/emp/emplexer.py +89 -0
  73. gams/core/emp/empyacc.py +281 -0
  74. gams/core/gdx/__init__.py +26 -0
  75. gams/core/gdx/_gdxcc.cp312-win_amd64.pyd +0 -0
  76. gams/core/gdx/gdxcc.py +866 -0
  77. gams/core/gev/__init__.py +26 -0
  78. gams/core/gev/_gevmcc.cp312-win_amd64.pyd +0 -0
  79. gams/core/gev/gevmcc.py +855 -0
  80. gams/core/gmd/__init__.py +26 -0
  81. gams/core/gmd/_gmdcc.cp312-win_amd64.pyd +0 -0
  82. gams/core/gmd/gmdcc.py +917 -0
  83. gams/core/gmo/__init__.py +26 -0
  84. gams/core/gmo/_gmomcc.cp312-win_amd64.pyd +0 -0
  85. gams/core/gmo/gmomcc.py +2046 -0
  86. gams/core/idx/__init__.py +26 -0
  87. gams/core/idx/_idxcc.cp312-win_amd64.pyd +0 -0
  88. gams/core/idx/idxcc.py +510 -0
  89. gams/core/numpy/__init__.py +29 -0
  90. gams/core/numpy/_gams2numpy.cp312-win_amd64.pyd +0 -0
  91. gams/core/numpy/gams2numpy.py +1048 -0
  92. gams/core/opt/__init__.py +26 -0
  93. gams/core/opt/_optcc.cp312-win_amd64.pyd +0 -0
  94. gams/core/opt/optcc.py +840 -0
  95. gams/engine/__init__.py +204 -0
  96. gams/engine/api/__init__.py +13 -0
  97. gams/engine/api/auth_api.py +7653 -0
  98. gams/engine/api/cleanup_api.py +751 -0
  99. gams/engine/api/default_api.py +887 -0
  100. gams/engine/api/hypercube_api.py +2629 -0
  101. gams/engine/api/jobs_api.py +5229 -0
  102. gams/engine/api/licenses_api.py +2220 -0
  103. gams/engine/api/namespaces_api.py +7783 -0
  104. gams/engine/api/usage_api.py +5627 -0
  105. gams/engine/api/users_api.py +5931 -0
  106. gams/engine/api_client.py +804 -0
  107. gams/engine/api_response.py +21 -0
  108. gams/engine/configuration.py +601 -0
  109. gams/engine/exceptions.py +216 -0
  110. gams/engine/models/__init__.py +86 -0
  111. gams/engine/models/bad_input.py +89 -0
  112. gams/engine/models/cleanable_job_result.py +104 -0
  113. gams/engine/models/cleanable_job_result_page.py +113 -0
  114. gams/engine/models/engine_license.py +107 -0
  115. gams/engine/models/files_not_found.py +93 -0
  116. gams/engine/models/forwarded_token_response.py +112 -0
  117. gams/engine/models/generic_key_value_pair.py +89 -0
  118. gams/engine/models/hypercube.py +160 -0
  119. gams/engine/models/hypercube_page.py +111 -0
  120. gams/engine/models/hypercube_summary.py +91 -0
  121. gams/engine/models/hypercube_token.py +97 -0
  122. gams/engine/models/identity_provider.py +107 -0
  123. gams/engine/models/identity_provider_ldap.py +121 -0
  124. gams/engine/models/identity_provider_oauth2.py +146 -0
  125. gams/engine/models/identity_provider_oauth2_scope.py +89 -0
  126. gams/engine/models/identity_provider_oauth2_with_secret.py +152 -0
  127. gams/engine/models/identity_provider_oidc.py +133 -0
  128. gams/engine/models/identity_provider_oidc_with_secret.py +143 -0
  129. gams/engine/models/inex.py +91 -0
  130. gams/engine/models/invitation.py +136 -0
  131. gams/engine/models/invitation_quota.py +106 -0
  132. gams/engine/models/invitation_token.py +87 -0
  133. gams/engine/models/job.py +165 -0
  134. gams/engine/models/job_no_text_entry.py +138 -0
  135. gams/engine/models/job_no_text_entry_page.py +111 -0
  136. gams/engine/models/license.py +91 -0
  137. gams/engine/models/log_piece.py +96 -0
  138. gams/engine/models/message.py +87 -0
  139. gams/engine/models/message_and_token.py +99 -0
  140. gams/engine/models/message_with_webhook_id.py +89 -0
  141. gams/engine/models/model_auth_token.py +87 -0
  142. gams/engine/models/model_configuration.py +125 -0
  143. gams/engine/models/model_default_instance.py +99 -0
  144. gams/engine/models/model_default_user_instance.py +98 -0
  145. gams/engine/models/model_hypercube_job.py +106 -0
  146. gams/engine/models/model_hypercube_usage.py +130 -0
  147. gams/engine/models/model_instance_info.py +116 -0
  148. gams/engine/models/model_instance_info_full.py +123 -0
  149. gams/engine/models/model_instance_pool_info.py +112 -0
  150. gams/engine/models/model_job_labels.py +179 -0
  151. gams/engine/models/model_job_usage.py +133 -0
  152. gams/engine/models/model_pool_usage.py +124 -0
  153. gams/engine/models/model_usage.py +115 -0
  154. gams/engine/models/model_user.py +96 -0
  155. gams/engine/models/model_userinstance_info.py +119 -0
  156. gams/engine/models/model_userinstancepool_info.py +95 -0
  157. gams/engine/models/model_version.py +91 -0
  158. gams/engine/models/models.py +120 -0
  159. gams/engine/models/namespace.py +104 -0
  160. gams/engine/models/namespace_quota.py +96 -0
  161. gams/engine/models/namespace_with_permission.py +96 -0
  162. gams/engine/models/not_found.py +91 -0
  163. gams/engine/models/password_policy.py +97 -0
  164. gams/engine/models/perm_and_username.py +89 -0
  165. gams/engine/models/quota.py +117 -0
  166. gams/engine/models/quota_exceeded.py +97 -0
  167. gams/engine/models/status_code_meaning.py +89 -0
  168. gams/engine/models/stream_entry.py +89 -0
  169. gams/engine/models/system_wide_license.py +92 -0
  170. gams/engine/models/text_entries.py +87 -0
  171. gams/engine/models/text_entry.py +101 -0
  172. gams/engine/models/time_span.py +95 -0
  173. gams/engine/models/time_span_pool_worker.py +99 -0
  174. gams/engine/models/token_forward_error.py +87 -0
  175. gams/engine/models/user.py +127 -0
  176. gams/engine/models/user_group_member.py +96 -0
  177. gams/engine/models/user_groups.py +108 -0
  178. gams/engine/models/vapid_info.py +87 -0
  179. gams/engine/models/webhook.py +138 -0
  180. gams/engine/models/webhook_parameterized_event.py +99 -0
  181. gams/engine/py.typed +0 -0
  182. gams/engine/rest.py +258 -0
  183. gams/magic/__init__.py +32 -0
  184. gams/magic/gams_magic.py +142 -0
  185. gams/magic/interactive.py +402 -0
  186. gams/tools/__init__.py +30 -0
  187. gams/tools/errors.py +34 -0
  188. gams/tools/toolcollection/__init__.py +24 -0
  189. gams/tools/toolcollection/alg/__init__.py +24 -0
  190. gams/tools/toolcollection/alg/rank.py +51 -0
  191. gams/tools/toolcollection/data/__init__.py +24 -0
  192. gams/tools/toolcollection/data/csvread.py +444 -0
  193. gams/tools/toolcollection/data/csvwrite.py +311 -0
  194. gams/tools/toolcollection/data/exceldump.py +47 -0
  195. gams/tools/toolcollection/data/sqlitewrite.py +276 -0
  196. gams/tools/toolcollection/gdxservice/__init__.py +24 -0
  197. gams/tools/toolcollection/gdxservice/gdxencoding.py +104 -0
  198. gams/tools/toolcollection/gdxservice/gdxrename.py +94 -0
  199. gams/tools/toolcollection/linalg/__init__.py +24 -0
  200. gams/tools/toolcollection/linalg/cholesky.py +57 -0
  201. gams/tools/toolcollection/linalg/eigenvalue.py +56 -0
  202. gams/tools/toolcollection/linalg/eigenvector.py +58 -0
  203. gams/tools/toolcollection/linalg/invert.py +55 -0
  204. gams/tools/toolcollection/linalg/ols.py +138 -0
  205. gams/tools/toolcollection/tooltemplate.py +321 -0
  206. gams/tools/toolcollection/win32/__init__.py +24 -0
  207. gams/tools/toolcollection/win32/excelmerge.py +93 -0
  208. gams/tools/toolcollection/win32/exceltalk.py +76 -0
  209. gams/tools/toolcollection/win32/msappavail.py +49 -0
  210. gams/tools/toolcollection/win32/shellexecute.py +54 -0
  211. gams/tools/tools.py +116 -0
  212. gams/transfer/__init__.py +35 -0
  213. gams/transfer/_abcs/__init__.py +37 -0
  214. gams/transfer/_abcs/container_abcs.py +433 -0
  215. gams/transfer/_internals/__init__.py +63 -0
  216. gams/transfer/_internals/algorithms.py +436 -0
  217. gams/transfer/_internals/casepreservingdict.py +124 -0
  218. gams/transfer/_internals/constants.py +270 -0
  219. gams/transfer/_internals/domainviolation.py +103 -0
  220. gams/transfer/_internals/specialvalues.py +172 -0
  221. gams/transfer/containers/__init__.py +26 -0
  222. gams/transfer/containers/_container.py +1794 -0
  223. gams/transfer/containers/_io/__init__.py +28 -0
  224. gams/transfer/containers/_io/containers.py +164 -0
  225. gams/transfer/containers/_io/gdx.py +1029 -0
  226. gams/transfer/containers/_io/gmd.py +872 -0
  227. gams/transfer/containers/_mixins/__init__.py +26 -0
  228. gams/transfer/containers/_mixins/ccc.py +1274 -0
  229. gams/transfer/syms/__init__.py +33 -0
  230. gams/transfer/syms/_methods/__init__.py +24 -0
  231. gams/transfer/syms/_methods/tables.py +120 -0
  232. gams/transfer/syms/_methods/toDict.py +115 -0
  233. gams/transfer/syms/_methods/toList.py +83 -0
  234. gams/transfer/syms/_methods/toValue.py +60 -0
  235. gams/transfer/syms/_mixins/__init__.py +32 -0
  236. gams/transfer/syms/_mixins/equals.py +626 -0
  237. gams/transfer/syms/_mixins/generateRecords.py +499 -0
  238. gams/transfer/syms/_mixins/pivot.py +313 -0
  239. gams/transfer/syms/_mixins/pve.py +627 -0
  240. gams/transfer/syms/_mixins/sa.py +27 -0
  241. gams/transfer/syms/_mixins/sapve.py +27 -0
  242. gams/transfer/syms/_mixins/saua.py +27 -0
  243. gams/transfer/syms/_mixins/sauapve.py +199 -0
  244. gams/transfer/syms/_mixins/spve.py +1528 -0
  245. gams/transfer/syms/_mixins/ve.py +936 -0
  246. gams/transfer/syms/container_syms/__init__.py +31 -0
  247. gams/transfer/syms/container_syms/_alias.py +984 -0
  248. gams/transfer/syms/container_syms/_equation.py +333 -0
  249. gams/transfer/syms/container_syms/_parameter.py +973 -0
  250. gams/transfer/syms/container_syms/_set.py +604 -0
  251. gams/transfer/syms/container_syms/_universe_alias.py +461 -0
  252. gams/transfer/syms/container_syms/_variable.py +321 -0
  253. gamsapi-52.5.0.dist-info/METADATA +150 -0
  254. gamsapi-52.5.0.dist-info/RECORD +257 -0
  255. gamsapi-52.5.0.dist-info/WHEEL +5 -0
  256. gamsapi-52.5.0.dist-info/licenses/LICENSE +22 -0
  257. gamsapi-52.5.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,936 @@
1
+ #
2
+ # GAMS - General Algebraic Modeling System Python API
3
+ #
4
+ # Copyright (c) 2017-2026 GAMS Development Corp. <support@gams.com>
5
+ # Copyright (c) 2017-2026 GAMS Software GmbH <support@gams.com>
6
+ #
7
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
8
+ # of this software and associated documentation files (the "Software"), to deal
9
+ # in the Software without restriction, including without limitation the rights
10
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11
+ # copies of the Software, and to permit persons to whom the Software is
12
+ # furnished to do so, subject to the following conditions:
13
+ #
14
+ # The above copyright notice and this permission notice shall be included in all
15
+ # copies or substantial portions of the Software.
16
+ #
17
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23
+ # SOFTWARE.
24
+ #
25
+
26
+ import copy
27
+ import pandas as pd
28
+ from pandas.api.types import CategoricalDtype, infer_dtype
29
+ import numpy as np
30
+ from typing import Optional
31
+ from gams.transfer._internals import (
32
+ generate_unique_labels,
33
+ DomainStatus,
34
+ cartesian_product,
35
+ EPS,
36
+ UNDEF,
37
+ NA,
38
+ SpecialValues,
39
+ )
40
+
41
+
42
+ class VEMixin:
43
+ @property
44
+ def _attributes(self):
45
+ return ["level", "marginal", "lower", "upper", "scale"]
46
+
47
+ @property
48
+ def type(self):
49
+ return self._type
50
+
51
+ @property
52
+ def summary(self):
53
+ """Summary of the symbol"""
54
+ return {
55
+ "name": self.name,
56
+ "description": self.description,
57
+ "type": self.type,
58
+ "domain": self.domain_names,
59
+ "domain_type": self.domain_type,
60
+ "dimension": self.dimension,
61
+ "number_records": self.number_records,
62
+ }
63
+
64
+ def dropDefaults(self) -> None:
65
+ """
66
+ Drop records that are set to GAMS default records (check .default_records property for values)
67
+ """
68
+ mask = np.all(
69
+ self.records[self._attributes]
70
+ == np.array(list(self.default_records.values())),
71
+ axis=1,
72
+ ) & ~np.any(
73
+ (self.records[self._attributes] == 0.0)
74
+ & SpecialValues.isEps(self.records[self._attributes]),
75
+ axis=1,
76
+ )
77
+ self.records = self.records[~mask].reset_index(drop=True)
78
+
79
+ def dropNA(self) -> None:
80
+ """
81
+ Drop records from the symbol that are GAMS NA
82
+ """
83
+ mask = SpecialValues.isNA(self.records[self._attributes]).any(axis=1)
84
+ self.records = self.records[~mask].reset_index(drop=True)
85
+
86
+ def dropUndef(self) -> None:
87
+ """
88
+ Drop records from the symbol that are GAMS Undef
89
+ """
90
+ mask = SpecialValues.isUndef(self.records[self._attributes]).any(axis=1)
91
+ self.records = self.records[~mask].reset_index(drop=True)
92
+
93
+ def dropEps(self) -> None:
94
+ """
95
+ Drop records from the symbol that are GAMS EPS (zero 0.0 records will be retained)
96
+ """
97
+ mask = SpecialValues.isEps(self.records[self._attributes]).any(axis=1)
98
+ self.records = self.records[~mask].reset_index(drop=True)
99
+
100
+ def dropMissing(self) -> None:
101
+ """
102
+ Drop records from the symbol that are NaN (includes both NA and Undef special values)
103
+ """
104
+ mask = pd.isna(self.records[self._attributes]).any(axis=1)
105
+ self.records = self.records[~mask].reset_index(drop=True)
106
+
107
+ def toValue(self, column: Optional[str] = None) -> float:
108
+ """
109
+ Convenience method to return symbol records as a Python float. Only possible with scalar symbols
110
+
111
+ Parameters
112
+ ----------
113
+ column : str, optional
114
+ Attribute can be specified with column argument, by default None
115
+
116
+ Returns
117
+ -------
118
+ float
119
+ Value of the symbol
120
+ """
121
+ from gams.transfer.syms._methods.toValue import toValueVariableEquation
122
+
123
+ if not self.isValid():
124
+ raise Exception(
125
+ f"Cannot extract value because `{self.name}` is not a valid symbol object. "
126
+ f"Use `{self.name}.isValid(verbose=True)` to debug."
127
+ )
128
+
129
+ return toValueVariableEquation(self, column=column)
130
+
131
+ def toList(self, columns: Optional[str] = None) -> list:
132
+ """
133
+ Convenience method to return symbol records as a Python list
134
+
135
+ Parameters
136
+ ----------
137
+ columns : str, optional
138
+ Controls which attributes to include in the list, by default None
139
+
140
+ Returns
141
+ -------
142
+ list
143
+ Records as a Python list
144
+ """
145
+ from gams.transfer.syms._methods.toList import toListVariableEquation
146
+
147
+ if not self.isValid():
148
+ raise Exception(
149
+ f"Cannot extract list because `{self.name}` is not a valid symbol object. "
150
+ f"Use `{self.name}.isValid(verbose=True)` to debug."
151
+ )
152
+ return toListVariableEquation(self, columns=columns)
153
+
154
+ def toDict(
155
+ self, columns: Optional[str] = None, orient: Optional[str] = None
156
+ ) -> dict:
157
+ """
158
+ Convenience method to return symbol records as a Python dictionary
159
+
160
+ Parameters
161
+ ----------
162
+ columns : str, optional
163
+ Controls which attributes to include in the dict, by default None
164
+ orient : str, optional
165
+ Orient can take values natural or columns and will control the shape of the dict. Must use orient="columns" if attempting to set symbol records with setRecords, by default None
166
+
167
+ Returns
168
+ -------
169
+ dict
170
+ Records as a Python dictionary
171
+ """
172
+ from gams.transfer.syms._methods.toDict import toDictVariableEquation
173
+
174
+ if not self.isValid():
175
+ raise Exception(
176
+ f"Cannot extract dict because `{self.name}` is not a valid symbol object. "
177
+ f"Use `{self.name}.isValid(verbose=True)` to debug."
178
+ )
179
+
180
+ return toDictVariableEquation(self, columns=columns, orient=orient)
181
+
182
+ def setRecords(self, records, uels_on_axes: bool = False) -> None:
183
+ """
184
+ Main convenience method to set standard pandas.DataFrame formatted records
185
+
186
+ Parameters
187
+ ----------
188
+ records : Any
189
+ Records to set for the symbol
190
+ uels_on_axes : bool, optional
191
+ If True, setRecords will assume that all domain information is contained in the axes of the pandas object – data will be flattened (if necessary), by default False
192
+ """
193
+ if isinstance(records, (int, float)):
194
+ self._from_int_float(records)
195
+
196
+ elif isinstance(records, np.ndarray):
197
+ self._from_ndarray(records)
198
+
199
+ elif isinstance(records, pd.DataFrame):
200
+ self._from_dataframe(records, uels_on_axes=uels_on_axes)
201
+
202
+ elif isinstance(records, pd.Series):
203
+ self._from_series(records)
204
+
205
+ elif isinstance(records, dict):
206
+ self._from_dict(records)
207
+
208
+ else:
209
+ self._from_else(records)
210
+
211
+ def _from_dict(self, records):
212
+ if all(
213
+ i in self._attributes and isinstance(records[i], (np.ndarray, int, float))
214
+ for i in records.keys()
215
+ ):
216
+ self._from_dict_of_arrays(records)
217
+ else:
218
+ self._from_else(records)
219
+
220
+ def _from_series(self, records):
221
+ from gams.transfer.syms._methods.tables import (
222
+ _assert_axes_no_nans,
223
+ _get_implied_dimension_from_axes,
224
+ _flatten_and_convert,
225
+ )
226
+
227
+ records = copy.deepcopy(records)
228
+
229
+ # check if index has NaNs
230
+ try:
231
+ _assert_axes_no_nans(records)
232
+ except Exception as err:
233
+ raise err
234
+
235
+ # check indices for variable/equation attributes
236
+ n_idx = self._position_of_attributes(records)
237
+ if sum(n_idx) > 1:
238
+ raise Exception(
239
+ "Detected symbol attributes in more than one level of a MultiIndex object"
240
+ )
241
+
242
+ # special things if scalar
243
+ if self.is_scalar:
244
+ if sum(n_idx):
245
+ recs = pd.DataFrame(columns=records.index.tolist())
246
+ for i in records.index:
247
+ recs.loc[0, i] = records[i]
248
+ self._from_dataframe(recs)
249
+ elif records.size == 1:
250
+ records = pd.DataFrame(records, columns=["level"])
251
+ self._from_dataframe(records)
252
+ else:
253
+ raise Exception(
254
+ f"Attempting to set records for a scalar symbol but records.size > 1. "
255
+ "pandas.Series must have size exactly = 1 before setting records. "
256
+ "(Note: pandas.Series.index is ignored for scalar symbols)"
257
+ )
258
+ else:
259
+ # check dimensionality of data
260
+ dim = _get_implied_dimension_from_axes(records) - sum(n_idx)
261
+ if dim != self.dimension:
262
+ raise Exception(
263
+ f"Dimensionality of table ({dim}) is inconsistent "
264
+ f"with symbol domain specification ({self.dimension})"
265
+ )
266
+
267
+ # flatten and convert to categorical
268
+ records = _flatten_and_convert(records)
269
+
270
+ # remap special values (str -> float)
271
+ records = self._remap_str_special_values(records)
272
+
273
+ # unstack attributes column (if provided)
274
+ if any(n_idx):
275
+ attr = records.iloc[:, n_idx.index(True)].cat.categories.tolist()
276
+ records = (
277
+ records.set_index(records.columns.tolist()[:-1])
278
+ .unstack(n_idx.index(True))
279
+ .reset_index(drop=False)
280
+ )
281
+ records.columns = ["*"] * self.dimension + attr
282
+ else:
283
+ records.columns = ["*"] * self.dimension + ["level"]
284
+
285
+ self._from_dataframe(records)
286
+
287
+ def _from_dataframe(self, records, uels_on_axes=False):
288
+ if self.is_scalar:
289
+ self._from_flat_dataframe(records)
290
+
291
+ else:
292
+ if uels_on_axes:
293
+ self._from_table_dataframe(records)
294
+ else:
295
+ self._from_flat_dataframe(records)
296
+
297
+ def _from_flat_dataframe(self, records):
298
+ records = pd.DataFrame(copy.deepcopy(records))
299
+ usr_cols = list(records.columns)
300
+
301
+ # FILL DEFAULT VALUES
302
+ # fill in columns that might not have been supplied
303
+ if set(records[self.dimension :].columns) != set(self._attributes):
304
+ for i in set(self._attributes) - set(records[self.dimension :].columns):
305
+ records[i] = self.default_records[i]
306
+
307
+ # check dimensionality
308
+ r, c = records.shape
309
+ if len(records.columns) != self.dimension + len(self._attributes):
310
+ raise Exception(
311
+ f"Dimensionality of records "
312
+ f"({c - len(self._attributes)}) "
313
+ "is inconsistent w/ variable domain specification "
314
+ f"({self.dimension}), "
315
+ "must resolve before records can be added.\n\nNOTE: "
316
+ "columns not named "
317
+ f"{self._attributes} will be interpreted as domain columns, "
318
+ "check that the DataFrame conforms to the required "
319
+ "notation.\nUser passed DataFrame with columns: "
320
+ f"{usr_cols}"
321
+ )
322
+
323
+ if self.is_scalar and r > 1:
324
+ raise Exception(
325
+ f"Attempting to set {r} records for a scalar symbol. "
326
+ f"Must define a domain for symbol `{self.name}` in order to set multiple records."
327
+ )
328
+
329
+ # reorder columns to fit standard format
330
+ records = pd.concat(
331
+ [records.iloc[:, : self.dimension], records[self._attributes]], axis=1
332
+ )
333
+
334
+ # keep user defined categories if provided
335
+ for i in range(self.dimension):
336
+ # create categorical
337
+ if not isinstance(records.iloc[:, i].dtype, CategoricalDtype):
338
+ records.isetitem(
339
+ i,
340
+ records.iloc[:, i].astype(
341
+ CategoricalDtype(
342
+ categories=records.iloc[:, i].unique(),
343
+ ordered=True,
344
+ )
345
+ ),
346
+ )
347
+
348
+ # capture user categories
349
+ old_cats = records.iloc[:, i].cat.categories.tolist()
350
+ is_ordered = records.iloc[:, i].cat.ordered
351
+
352
+ # convert any non-str categories to str, strip trailing white-space and de-dup
353
+ new_cats = list(dict.fromkeys(list(map(str.rstrip, map(str, old_cats)))))
354
+
355
+ # if categories are not unique after strip then need to remake the categorical
356
+ if len(old_cats) != len(new_cats):
357
+ # convert data to str, strip white-space and make categorical
358
+ records.isetitem(
359
+ i,
360
+ records.iloc[:, i]
361
+ .astype(str)
362
+ .map(str.rstrip)
363
+ .astype(CategoricalDtype(categories=new_cats, ordered=is_ordered)),
364
+ )
365
+
366
+ else:
367
+ # only need to rename the categories
368
+ records.isetitem(i, records.iloc[:, i].cat.rename_categories(new_cats))
369
+
370
+ # remap special values (str -> float)
371
+ records = self._remap_str_special_values(records)
372
+
373
+ # must be able to convert data columns to type float
374
+ cols = list(records.columns)
375
+ for i in records.columns[self.dimension :]:
376
+ records.isetitem(cols.index(i), records[i].astype(float))
377
+
378
+ # reset column names
379
+ records.columns = (
380
+ generate_unique_labels(records.columns[: self.dimension].tolist())
381
+ + self._attributes
382
+ )
383
+
384
+ # set records
385
+ self.records = records
386
+
387
+ def _from_table_dataframe(self, records):
388
+ from gams.transfer.syms._methods.tables import (
389
+ _assert_axes_no_nans,
390
+ _get_implied_dimension_from_axes,
391
+ _flatten_and_convert,
392
+ )
393
+
394
+ records = pd.DataFrame(copy.deepcopy(records))
395
+
396
+ # check if index has NaNs
397
+ try:
398
+ _assert_axes_no_nans(records)
399
+ except Exception as err:
400
+ raise err
401
+
402
+ # check indices for variable/equation attributes
403
+ n_idx = self._position_of_attributes(records)
404
+
405
+ if sum(n_idx) > 1:
406
+ raise Exception(
407
+ "Detected symbol attributes in more than one DataFrame index. "
408
+ "All symbol attributes must be indexed in exactly one index object"
409
+ "(or column object) or within exactly one level of a MultiIndex object"
410
+ )
411
+
412
+ # check dimensionality of data
413
+ dim = _get_implied_dimension_from_axes(records) - sum(n_idx)
414
+ if dim != self.dimension:
415
+ raise Exception(
416
+ f"Dimensionality of table ({dim}) is inconsistent "
417
+ f"with symbol domain specification ({self.dimension})"
418
+ )
419
+
420
+ # flatten and convert to categorical
421
+ records = _flatten_and_convert(records)
422
+
423
+ # remap special values (str -> float)
424
+ records = self._remap_str_special_values(records)
425
+
426
+ # unstack attributes column (if provided)
427
+ if any(n_idx):
428
+ attr = records.iloc[:, n_idx.index(True)].cat.categories.tolist()
429
+ records = (
430
+ records.set_index(records.columns.tolist()[:-1])
431
+ .unstack(n_idx.index(True))
432
+ .reset_index(drop=False)
433
+ )
434
+ records.columns = ["*"] * self.dimension + attr
435
+
436
+ else:
437
+ records.columns = ["*"] * self.dimension + ["level"]
438
+
439
+ # FILL DEFAULT VALUES
440
+ # fill in columns that might not have been supplied
441
+ if set(records.columns) != set(self._attributes):
442
+ for i in set(self._attributes) - set(records.columns):
443
+ records[i] = self.default_records[i]
444
+
445
+ # reorder columns to fit standard format
446
+ records = pd.concat(
447
+ [records.iloc[:, : self.dimension], records[self._attributes]], axis=1
448
+ )
449
+
450
+ # convert data column to type float
451
+ for i in range(self.dimension, self.dimension + len(self._attributes)):
452
+ if not isinstance(records.iloc[:, i].dtype, float):
453
+ records.isetitem(i, records.iloc[:, i].astype(float))
454
+
455
+ # reset column names
456
+ records.columns = generate_unique_labels(self.domain_names) + self._attributes
457
+
458
+ # set records
459
+ self.records = records
460
+
461
+ def _from_int_float(self, records):
462
+ if not self.is_scalar:
463
+ raise Exception(
464
+ "Attempting to set a record with a scalar value, however the "
465
+ "symbol is not currently defined as a scalar (i.e., <symbol>.is_scalar == False)"
466
+ )
467
+
468
+ # note we do not drop zeros when setting
469
+ self._from_flat_dataframe(pd.DataFrame([records], columns=["level"]))
470
+
471
+ def _from_ndarray(self, records):
472
+ records = {"level": records}
473
+ self._from_dict_of_arrays(records)
474
+
475
+ def _from_dict_of_arrays(self, records):
476
+ # check all keys in records dict
477
+ if any(i not in self._attributes for i in records.keys()):
478
+ raise Exception(
479
+ f"Unrecognized variable attribute detected in `records`. "
480
+ f"Attributes must be {self._attributes}, user passed "
481
+ f"dict keys: {list(records.keys())}."
482
+ )
483
+
484
+ # convert all values to numpy array (float dtype)
485
+ for k, v in records.items():
486
+ try:
487
+ records[k] = np.array(v, dtype=float)
488
+ except Exception as err:
489
+ raise Exception(
490
+ f"Could not successfully convert `{k}` "
491
+ f"records to a numpy array (dtype=float), reason: {err}."
492
+ )
493
+
494
+ # user flexibility for (n,1) and (1,n) arrays (auto reshape)
495
+ for k, arr in records.items():
496
+ if self.dimension == 1 and (
497
+ arr.shape == (1, arr.size) or arr.shape == (arr.size, 1)
498
+ ):
499
+ records[k] = arr.reshape((arr.size,))
500
+
501
+ # check dimension of array and symbol
502
+ for k, arr in records.items():
503
+ if arr.ndim != self.dimension:
504
+ raise Exception(
505
+ f"Attempting to set `{k}` records for a {self.dimension}-dimensional "
506
+ f"symbol with a numpy array that is {arr.ndim}-dimensional "
507
+ "-- array reshape necessary. (Note: transfer will auto "
508
+ "reshape array if symbol is 1D and array is either (1,n) or (n,1))"
509
+ )
510
+
511
+ # all records arrays must have the same size
512
+ shapes = [arr.shape for k, arr in records.items()]
513
+ if any(i != shapes[0] for i in shapes):
514
+ raise Exception(
515
+ "Arrays passed into `records` do not have the same shape -- array reshape necessary"
516
+ )
517
+
518
+ # symbol must have regular domain_type if not a scalar
519
+ if self.dimension > 0 and self._domain_status is not DomainStatus.regular:
520
+ raise Exception(
521
+ "Data conversion for non-scalar array (i.e., matrix) format into "
522
+ "records is only possible for symbols defined over valid domain set objects "
523
+ "(i.e., has a 'regular' domain type). \n"
524
+ "Current symbol specifics\n"
525
+ "------------------------------\n"
526
+ f"Domain type: '{self.domain_type}'\n"
527
+ f"Symbol domain: {self.domain}\n"
528
+ f"Symbol dimension: {self.dimension}\n"
529
+ )
530
+
531
+ # all domain sets have to be valid
532
+ for i in self.domain:
533
+ if not i.isValid():
534
+ raise Exception(
535
+ f"Domain set `{i.name}` is invalid and cannot "
536
+ "be used to convert array-to-records. "
537
+ "Use `<symbol>.isValid(verbose=True)` to debug this "
538
+ "domain set symbol before proceeding."
539
+ )
540
+
541
+ # make sure all arrays have the proper (anticipated) shape
542
+ for k, arr in records.items():
543
+ if arr.shape != self.shape:
544
+ raise Exception(
545
+ f"User passed array with shape `{arr.shape}` but anticipated "
546
+ f"shape was `{self.shape}` based "
547
+ "on domain set information -- "
548
+ "must reconcile before array-to-records conversion is possible."
549
+ )
550
+
551
+ # create array of codes
552
+ codes = [np.arange(len(d.getUELs(ignore_unused=True))) for d in self.domain]
553
+
554
+ # create dataframe
555
+ if self.is_scalar:
556
+ df = pd.DataFrame(index=[0], columns=list(records.keys()))
557
+ else:
558
+ df = pd.DataFrame(cartesian_product(*tuple(codes)))
559
+
560
+ # create categoricals
561
+ for n, d in enumerate(self.domain):
562
+ dtype = CategoricalDtype(
563
+ categories=d.records.iloc[:, 0].cat.categories,
564
+ ordered=d.records.iloc[:, 0].cat.ordered,
565
+ )
566
+ df.isetitem(
567
+ n, pd.Categorical.from_codes(codes=df.iloc[:, n], dtype=dtype)
568
+ )
569
+
570
+ # insert matrix elements
571
+ for i in records.keys():
572
+ df[i] = records[i].reshape(-1, 1)
573
+
574
+ # drop zeros and reset index
575
+ df = self._filter_zero_records(df)
576
+
577
+ # FILL DEFAULT VALUES
578
+ # fill in columns that might not have been supplied
579
+ if set(records.keys()) != set(self._attributes):
580
+ for i in set(self._attributes) - set(records.keys()):
581
+ df[i] = self.default_records[i]
582
+
583
+ # reorder columns to fit standard format
584
+ df = pd.concat([df.iloc[:, : self.dimension], df[self._attributes]], axis=1)
585
+
586
+ # reset column names
587
+ df.columns = generate_unique_labels(self.domain_names) + self._attributes
588
+
589
+ # set records
590
+ self.records = df
591
+
592
+ def _from_else(self, records):
593
+ try:
594
+ records = pd.DataFrame(records)
595
+ except Exception as err:
596
+ raise Exception(
597
+ "Data structure passed as argument 'records' could not be "
598
+ f"successfully converted into a pandas DataFrame (reason: {err})."
599
+ )
600
+ usr_cols = list(records.columns)
601
+
602
+ # FILL DEFAULT VALUES
603
+ # fill in columns that might not have been supplied
604
+ if set(records[self.dimension :].columns) != set(self._attributes):
605
+ for i in set(self._attributes) - set(records[self.dimension :].columns):
606
+ records[i] = self.default_records[i]
607
+
608
+ # check dimensionality
609
+ r, c = records.shape
610
+ if len(records.columns) != self.dimension + len(self._attributes):
611
+ raise Exception(
612
+ f"Dimensionality of records "
613
+ f"({c - len(self._attributes)}) "
614
+ "is inconsistent w/ variable domain specification "
615
+ f"({self.dimension}), "
616
+ "must resolve before records can be added.\n\nNOTE: "
617
+ "columns not named "
618
+ f"{self._attributes} will be interpreted as domain columns, "
619
+ "check that the DataFrame conforms to the required "
620
+ "notation.\nUser passed DataFrame with columns: "
621
+ f"{usr_cols}"
622
+ )
623
+
624
+ if self.is_scalar and r > 1:
625
+ raise Exception(
626
+ f"Attempting to set {r} records for a scalar symbol. "
627
+ f"Must define a domain for symbol `{self.name}` in order to set multiple records."
628
+ )
629
+
630
+ # reorder columns to fit standard format
631
+ records = pd.concat(
632
+ [records.iloc[:, : self.dimension], records[self._attributes]], axis=1
633
+ )
634
+
635
+ # keep user defined categories if provided
636
+ for i in range(self.dimension):
637
+ # create categorical
638
+ if not isinstance(records.iloc[:, i].dtype, CategoricalDtype):
639
+ records.isetitem(
640
+ i,
641
+ records.iloc[:, i].astype(
642
+ CategoricalDtype(
643
+ categories=records.iloc[:, i].unique(),
644
+ ordered=True,
645
+ )
646
+ ),
647
+ )
648
+
649
+ # capture user categories
650
+ old_cats = records.iloc[:, i].cat.categories.tolist()
651
+ is_ordered = records.iloc[:, i].cat.ordered
652
+
653
+ # convert any non-str categories to str, strip trailing white-space and de-dup
654
+ new_cats = list(dict.fromkeys(list(map(str.rstrip, map(str, old_cats)))))
655
+
656
+ # if categories are not unique after strip then need to remake the categorical
657
+ if len(old_cats) != len(new_cats):
658
+ # convert data to str, strip white-space and make categorical
659
+ records.isetitem(
660
+ i,
661
+ records.iloc[:, i]
662
+ .astype(str)
663
+ .map(str.rstrip)
664
+ .astype(CategoricalDtype(categories=new_cats, ordered=is_ordered)),
665
+ )
666
+
667
+ else:
668
+ # only need to rename the categories
669
+ records.isetitem(i, records.iloc[:, i].cat.rename_categories(new_cats))
670
+
671
+ # remap special values (str -> float)
672
+ records = self._remap_str_special_values(records)
673
+
674
+ # must be able to convert data columns to type float
675
+ cols = list(records.columns)
676
+ for i in records.columns[self.dimension :]:
677
+ records.isetitem(cols.index(i), records[i].astype(float))
678
+
679
+ # reset column names
680
+ records.columns = generate_unique_labels(self.domain_names) + self._attributes
681
+
682
+ # set records
683
+ self.records = records
684
+
685
+ def toSparseCoo(self, column: str = "level") -> Optional["coo_matrix"]:
686
+ """
687
+ Convert column to a sparse COOrdinate numpy.array format
688
+
689
+ Parameters
690
+ ----------
691
+ column : str, optional
692
+ The column to convert, by default "level"
693
+
694
+ Returns
695
+ -------
696
+ coo_matrix, optional
697
+ A column in coo_matrix format
698
+ """
699
+ from scipy.sparse import coo_matrix
700
+
701
+ if not isinstance(column, str):
702
+ raise TypeError("Argument 'column' must be type str")
703
+
704
+ if column not in self._attributes:
705
+ raise TypeError(
706
+ f"Argument 'column' must be one of the following: {self._attributes}"
707
+ )
708
+
709
+ if not self.isValid():
710
+ raise Exception(
711
+ "Cannot create sparse array (i.e., coo_matrix) because symbol "
712
+ "is invalid -- use `<symbol>.isValid(verbose=True)` to debug symbol state."
713
+ )
714
+ else:
715
+ if self._domain_status is DomainStatus.regular:
716
+ if self.hasDomainViolations():
717
+ raise Exception(
718
+ "Cannot create sparse array because there are domain violations "
719
+ "(i.e., UELs in the symbol are not a subset of UELs contained in domain sets)."
720
+ )
721
+
722
+ if self.records is not None:
723
+ if self.is_scalar:
724
+ row = [0]
725
+ col = [0]
726
+ m = 1
727
+ n = 1
728
+
729
+ elif self.dimension == 1:
730
+ if self._domain_status is DomainStatus.regular:
731
+ col = (
732
+ self.records.iloc[:, 0]
733
+ .map(self.domain[0]._getUELCodes(0, ignore_unused=True))
734
+ .to_numpy(dtype=int)
735
+ )
736
+ else:
737
+ col = self.records.iloc[:, 0].cat.codes.to_numpy(dtype=int)
738
+
739
+ row = np.zeros(len(col), dtype=int)
740
+ m, *n = self.shape
741
+ assert n == []
742
+ n = m
743
+ m = 1
744
+
745
+ elif self.dimension == 2:
746
+ if self._domain_status is DomainStatus.regular:
747
+ row = (
748
+ self.records.iloc[:, 0]
749
+ .map(self.domain[0]._getUELCodes(0, ignore_unused=True))
750
+ .to_numpy(dtype=int)
751
+ )
752
+ col = (
753
+ self.records.iloc[:, 1]
754
+ .map(self.domain[1]._getUELCodes(0, ignore_unused=True))
755
+ .to_numpy(dtype=int)
756
+ )
757
+ else:
758
+ row = self.records.iloc[:, 0].cat.codes.to_numpy(dtype=int)
759
+ col = self.records.iloc[:, 1].cat.codes.to_numpy(dtype=int)
760
+
761
+ m, n = self.shape
762
+
763
+ else:
764
+ raise Exception(
765
+ "Sparse coo_matrix formats are only "
766
+ "available for data that has dimension <= 2"
767
+ )
768
+
769
+ return coo_matrix(
770
+ (
771
+ self.records.loc[:, column].to_numpy(dtype=float),
772
+ (row, col),
773
+ ),
774
+ shape=(m, n),
775
+ dtype=float,
776
+ )
777
+ else:
778
+ return None
779
+
780
+ def toDense(self, column: str = "level") -> Optional[np.ndarray]:
781
+ """
782
+ Convert column to a dense numpy.array format
783
+
784
+ Parameters
785
+ ----------
786
+ column : str, optional
787
+ The column to convert, by default "level"
788
+
789
+ Returns
790
+ -------
791
+ np.ndarray, optional
792
+ A column to a dense numpy.array format
793
+ """
794
+ if not isinstance(column, str):
795
+ raise TypeError(f"Argument 'column' must be type str")
796
+
797
+ if column not in self._attributes:
798
+ raise TypeError(
799
+ f"Argument 'column' must be one of the following: {self._attributes}"
800
+ )
801
+
802
+ if not self.isValid():
803
+ raise Exception(
804
+ "Cannot create dense array (i.e., matrix) format because symbol "
805
+ "is invalid -- use `<symbol>.isValid(verbose=True)` to debug symbol state."
806
+ )
807
+ else:
808
+ if self.records is not None:
809
+ if self.is_scalar:
810
+ return self.records.loc[:, column].to_numpy(dtype=float)[0]
811
+
812
+ else:
813
+ #
814
+ #
815
+ # checks
816
+ if self.domain_type == "regular":
817
+ if self.hasDomainViolations():
818
+ raise Exception(
819
+ "Cannot create dense array because there are domain violations "
820
+ "(i.e., UELs in the symbol are not a subset of UELs contained in domain sets)."
821
+ )
822
+
823
+ # check order of domain UELs in categorical and order of domain UELs in data
824
+ for symobj in self.domain:
825
+ data_cats = symobj.records.iloc[:, 0].unique().tolist()
826
+ cats = symobj.records.iloc[:, 0].cat.categories.tolist()
827
+
828
+ if data_cats != cats[: len(data_cats)]:
829
+ raise Exception(
830
+ f"`toDense` requires that UEL data order of domain set `{symobj.name}` must be "
831
+ "equal be equal to UEL category order (i.e., the order that set elements "
832
+ "appear in rows of the dataframe and the order set elements are specified by the categorical). "
833
+ "Users can efficiently reorder their domain set UELs to data order with "
834
+ "the `reorderUELs()` method (no arguments) -- preexisting unused categories "
835
+ "will be appended (maintaining their order)."
836
+ )
837
+ else:
838
+ # check order of domain UELs in categorical and order of domain UELs in data
839
+ for n in range(self.dimension):
840
+ # check if any invalid codes
841
+ if any(
842
+ code == -1 for code in self.records.iloc[:, n].cat.codes
843
+ ):
844
+ raise Exception(
845
+ f"Invalid category detected in dimension `{n}` (code == -1), "
846
+ "cannot create array until all categories are properly resolved"
847
+ )
848
+
849
+ data_cats = self.records.iloc[:, n].unique().tolist()
850
+ cats = self.records.iloc[:, n].cat.categories.tolist()
851
+
852
+ if data_cats != cats[: len(data_cats)]:
853
+ raise Exception(
854
+ f"`toDense` requires (for 'relaxed' symbols) that UEL data order must be "
855
+ "equal be equal to UEL category order (i.e., the order that set elements "
856
+ "appear in rows of the dataframe and the order set elements are specified by the categorical). "
857
+ "Users can efficiently reorder UELs to data order with "
858
+ "the `reorderUELs()` method (no arguments) -- preexisting unused categories "
859
+ "will be appended (maintaining their order)."
860
+ )
861
+
862
+ #
863
+ #
864
+ # create indexing scheme
865
+ if self.domain_type == "regular":
866
+ idx = [
867
+ self.records.iloc[:, n]
868
+ .map(domainobj._getUELCodes(0, ignore_unused=True))
869
+ .to_numpy(dtype=int)
870
+ for n, domainobj in enumerate(self.domain)
871
+ ]
872
+
873
+ else:
874
+ idx = [
875
+ self.records.iloc[:, n].cat.codes.to_numpy(dtype=int)
876
+ for n, domainobj in enumerate(self.domain)
877
+ ]
878
+
879
+ # fill the dense array
880
+ a = np.zeros(self.shape)
881
+ val = self.records.loc[:, column].to_numpy(dtype=float)
882
+ a[tuple(idx)] = val
883
+
884
+ return a
885
+ else:
886
+ return None
887
+
888
+ def _position_of_attributes(self, records):
889
+ idx = []
890
+ for axis in records.axes:
891
+ for n in range(axis.nlevels):
892
+ if isinstance(axis, pd.MultiIndex):
893
+ if all(
894
+ str(i).casefold() in self._attributes for i in axis.levels[n]
895
+ ):
896
+ idx.append(True)
897
+ else:
898
+ idx.append(False)
899
+
900
+ else:
901
+ if all(str(i).casefold() in self._attributes for i in axis):
902
+ idx.append(True)
903
+ else:
904
+ idx.append(False)
905
+ return idx
906
+
907
+ def _remap_str_special_values(self, records):
908
+ # convert str "eps", "na", & "undef" special value strings to float equivalents
909
+ for i in records.columns[self.dimension :]:
910
+ if infer_dtype(records[i]) not in [
911
+ "integer",
912
+ "floating",
913
+ "mixed-integer-float",
914
+ ]:
915
+ idx = records.loc[:, i].isin(EPS)
916
+ if idx.any():
917
+ records.loc[records[idx].index, i] = SpecialValues.EPS
918
+
919
+ idx = records.loc[:, i].isin(UNDEF)
920
+ if idx.any():
921
+ records.loc[records[idx].index, i] = SpecialValues.UNDEF
922
+
923
+ idx = records.loc[:, i].isin(NA)
924
+ if idx.any():
925
+ records.loc[records[idx].index, i] = SpecialValues.NA
926
+
927
+ return records
928
+
929
+ def _filter_zero_records(self, records):
930
+ idx = records[records[records.columns[self.dimension :]].eq(0).all(1)].index
931
+ eps_idx = records[
932
+ SpecialValues.isEps(records[records.columns[self.dimension :]]).any(1)
933
+ ].index
934
+ idx = idx.difference(eps_idx)
935
+
936
+ return records.drop(idx).reset_index(drop=True)