gamsapi 52.5.0__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gams/__init__.py +27 -0
- gams/_version.py +1 -0
- gams/connect/__init__.py +28 -0
- gams/connect/agents/__init__.py +24 -0
- gams/connect/agents/_excel/__init__.py +32 -0
- gams/connect/agents/_excel/excelagent.py +312 -0
- gams/connect/agents/_excel/workbook.py +155 -0
- gams/connect/agents/_sqlconnectors/__init__.py +42 -0
- gams/connect/agents/_sqlconnectors/_accesshandler.py +211 -0
- gams/connect/agents/_sqlconnectors/_databasehandler.py +250 -0
- gams/connect/agents/_sqlconnectors/_mysqlhandler.py +168 -0
- gams/connect/agents/_sqlconnectors/_postgreshandler.py +131 -0
- gams/connect/agents/_sqlconnectors/_pyodbchandler.py +112 -0
- gams/connect/agents/_sqlconnectors/_sqlalchemyhandler.py +74 -0
- gams/connect/agents/_sqlconnectors/_sqlitehandler.py +262 -0
- gams/connect/agents/_sqlconnectors/_sqlserverhandler.py +179 -0
- gams/connect/agents/concatenate.py +440 -0
- gams/connect/agents/connectagent.py +743 -0
- gams/connect/agents/csvreader.py +675 -0
- gams/connect/agents/csvwriter.py +151 -0
- gams/connect/agents/domainwriter.py +143 -0
- gams/connect/agents/excelreader.py +756 -0
- gams/connect/agents/excelwriter.py +467 -0
- gams/connect/agents/filter.py +223 -0
- gams/connect/agents/gamsreader.py +112 -0
- gams/connect/agents/gamswriter.py +239 -0
- gams/connect/agents/gdxreader.py +109 -0
- gams/connect/agents/gdxwriter.py +146 -0
- gams/connect/agents/labelmanipulator.py +303 -0
- gams/connect/agents/projection.py +539 -0
- gams/connect/agents/pythoncode.py +71 -0
- gams/connect/agents/rawcsvreader.py +248 -0
- gams/connect/agents/rawexcelreader.py +312 -0
- gams/connect/agents/schema/CSVReader.yaml +92 -0
- gams/connect/agents/schema/CSVWriter.yaml +44 -0
- gams/connect/agents/schema/Concatenate.yaml +52 -0
- gams/connect/agents/schema/DomainWriter.yaml +25 -0
- gams/connect/agents/schema/ExcelReader.yaml +121 -0
- gams/connect/agents/schema/ExcelWriter.yaml +78 -0
- gams/connect/agents/schema/Filter.yaml +74 -0
- gams/connect/agents/schema/GAMSReader.yaml +20 -0
- gams/connect/agents/schema/GAMSWriter.yaml +47 -0
- gams/connect/agents/schema/GDXReader.yaml +23 -0
- gams/connect/agents/schema/GDXWriter.yaml +32 -0
- gams/connect/agents/schema/LabelManipulator.yaml +99 -0
- gams/connect/agents/schema/Projection.yaml +24 -0
- gams/connect/agents/schema/PythonCode.yaml +6 -0
- gams/connect/agents/schema/RawCSVReader.yaml +34 -0
- gams/connect/agents/schema/RawExcelReader.yaml +42 -0
- gams/connect/agents/schema/SQLReader.yaml +75 -0
- gams/connect/agents/schema/SQLWriter.yaml +103 -0
- gams/connect/agents/sqlreader.py +301 -0
- gams/connect/agents/sqlwriter.py +276 -0
- gams/connect/connectdatabase.py +275 -0
- gams/connect/connectvalidator.py +93 -0
- gams/connect/errors.py +34 -0
- gams/control/__init__.py +136 -0
- gams/control/database.py +2231 -0
- gams/control/execution.py +1900 -0
- gams/control/options.py +2792 -0
- gams/control/workspace.py +1198 -0
- gams/core/__init__.py +24 -0
- gams/core/cfg/__init__.py +26 -0
- gams/core/cfg/_cfgmcc.cp312-win_amd64.pyd +0 -0
- gams/core/cfg/cfgmcc.py +519 -0
- gams/core/dct/__init__.py +26 -0
- gams/core/dct/_dctmcc.cp312-win_amd64.pyd +0 -0
- gams/core/dct/dctmcc.py +574 -0
- gams/core/embedded/__init__.py +26 -0
- gams/core/embedded/gamsemb.py +1024 -0
- gams/core/emp/__init__.py +24 -0
- gams/core/emp/emplexer.py +89 -0
- gams/core/emp/empyacc.py +281 -0
- gams/core/gdx/__init__.py +26 -0
- gams/core/gdx/_gdxcc.cp312-win_amd64.pyd +0 -0
- gams/core/gdx/gdxcc.py +866 -0
- gams/core/gev/__init__.py +26 -0
- gams/core/gev/_gevmcc.cp312-win_amd64.pyd +0 -0
- gams/core/gev/gevmcc.py +855 -0
- gams/core/gmd/__init__.py +26 -0
- gams/core/gmd/_gmdcc.cp312-win_amd64.pyd +0 -0
- gams/core/gmd/gmdcc.py +917 -0
- gams/core/gmo/__init__.py +26 -0
- gams/core/gmo/_gmomcc.cp312-win_amd64.pyd +0 -0
- gams/core/gmo/gmomcc.py +2046 -0
- gams/core/idx/__init__.py +26 -0
- gams/core/idx/_idxcc.cp312-win_amd64.pyd +0 -0
- gams/core/idx/idxcc.py +510 -0
- gams/core/numpy/__init__.py +29 -0
- gams/core/numpy/_gams2numpy.cp312-win_amd64.pyd +0 -0
- gams/core/numpy/gams2numpy.py +1048 -0
- gams/core/opt/__init__.py +26 -0
- gams/core/opt/_optcc.cp312-win_amd64.pyd +0 -0
- gams/core/opt/optcc.py +840 -0
- gams/engine/__init__.py +204 -0
- gams/engine/api/__init__.py +13 -0
- gams/engine/api/auth_api.py +7653 -0
- gams/engine/api/cleanup_api.py +751 -0
- gams/engine/api/default_api.py +887 -0
- gams/engine/api/hypercube_api.py +2629 -0
- gams/engine/api/jobs_api.py +5229 -0
- gams/engine/api/licenses_api.py +2220 -0
- gams/engine/api/namespaces_api.py +7783 -0
- gams/engine/api/usage_api.py +5627 -0
- gams/engine/api/users_api.py +5931 -0
- gams/engine/api_client.py +804 -0
- gams/engine/api_response.py +21 -0
- gams/engine/configuration.py +601 -0
- gams/engine/exceptions.py +216 -0
- gams/engine/models/__init__.py +86 -0
- gams/engine/models/bad_input.py +89 -0
- gams/engine/models/cleanable_job_result.py +104 -0
- gams/engine/models/cleanable_job_result_page.py +113 -0
- gams/engine/models/engine_license.py +107 -0
- gams/engine/models/files_not_found.py +93 -0
- gams/engine/models/forwarded_token_response.py +112 -0
- gams/engine/models/generic_key_value_pair.py +89 -0
- gams/engine/models/hypercube.py +160 -0
- gams/engine/models/hypercube_page.py +111 -0
- gams/engine/models/hypercube_summary.py +91 -0
- gams/engine/models/hypercube_token.py +97 -0
- gams/engine/models/identity_provider.py +107 -0
- gams/engine/models/identity_provider_ldap.py +121 -0
- gams/engine/models/identity_provider_oauth2.py +146 -0
- gams/engine/models/identity_provider_oauth2_scope.py +89 -0
- gams/engine/models/identity_provider_oauth2_with_secret.py +152 -0
- gams/engine/models/identity_provider_oidc.py +133 -0
- gams/engine/models/identity_provider_oidc_with_secret.py +143 -0
- gams/engine/models/inex.py +91 -0
- gams/engine/models/invitation.py +136 -0
- gams/engine/models/invitation_quota.py +106 -0
- gams/engine/models/invitation_token.py +87 -0
- gams/engine/models/job.py +165 -0
- gams/engine/models/job_no_text_entry.py +138 -0
- gams/engine/models/job_no_text_entry_page.py +111 -0
- gams/engine/models/license.py +91 -0
- gams/engine/models/log_piece.py +96 -0
- gams/engine/models/message.py +87 -0
- gams/engine/models/message_and_token.py +99 -0
- gams/engine/models/message_with_webhook_id.py +89 -0
- gams/engine/models/model_auth_token.py +87 -0
- gams/engine/models/model_configuration.py +125 -0
- gams/engine/models/model_default_instance.py +99 -0
- gams/engine/models/model_default_user_instance.py +98 -0
- gams/engine/models/model_hypercube_job.py +106 -0
- gams/engine/models/model_hypercube_usage.py +130 -0
- gams/engine/models/model_instance_info.py +116 -0
- gams/engine/models/model_instance_info_full.py +123 -0
- gams/engine/models/model_instance_pool_info.py +112 -0
- gams/engine/models/model_job_labels.py +179 -0
- gams/engine/models/model_job_usage.py +133 -0
- gams/engine/models/model_pool_usage.py +124 -0
- gams/engine/models/model_usage.py +115 -0
- gams/engine/models/model_user.py +96 -0
- gams/engine/models/model_userinstance_info.py +119 -0
- gams/engine/models/model_userinstancepool_info.py +95 -0
- gams/engine/models/model_version.py +91 -0
- gams/engine/models/models.py +120 -0
- gams/engine/models/namespace.py +104 -0
- gams/engine/models/namespace_quota.py +96 -0
- gams/engine/models/namespace_with_permission.py +96 -0
- gams/engine/models/not_found.py +91 -0
- gams/engine/models/password_policy.py +97 -0
- gams/engine/models/perm_and_username.py +89 -0
- gams/engine/models/quota.py +117 -0
- gams/engine/models/quota_exceeded.py +97 -0
- gams/engine/models/status_code_meaning.py +89 -0
- gams/engine/models/stream_entry.py +89 -0
- gams/engine/models/system_wide_license.py +92 -0
- gams/engine/models/text_entries.py +87 -0
- gams/engine/models/text_entry.py +101 -0
- gams/engine/models/time_span.py +95 -0
- gams/engine/models/time_span_pool_worker.py +99 -0
- gams/engine/models/token_forward_error.py +87 -0
- gams/engine/models/user.py +127 -0
- gams/engine/models/user_group_member.py +96 -0
- gams/engine/models/user_groups.py +108 -0
- gams/engine/models/vapid_info.py +87 -0
- gams/engine/models/webhook.py +138 -0
- gams/engine/models/webhook_parameterized_event.py +99 -0
- gams/engine/py.typed +0 -0
- gams/engine/rest.py +258 -0
- gams/magic/__init__.py +32 -0
- gams/magic/gams_magic.py +142 -0
- gams/magic/interactive.py +402 -0
- gams/tools/__init__.py +30 -0
- gams/tools/errors.py +34 -0
- gams/tools/toolcollection/__init__.py +24 -0
- gams/tools/toolcollection/alg/__init__.py +24 -0
- gams/tools/toolcollection/alg/rank.py +51 -0
- gams/tools/toolcollection/data/__init__.py +24 -0
- gams/tools/toolcollection/data/csvread.py +444 -0
- gams/tools/toolcollection/data/csvwrite.py +311 -0
- gams/tools/toolcollection/data/exceldump.py +47 -0
- gams/tools/toolcollection/data/sqlitewrite.py +276 -0
- gams/tools/toolcollection/gdxservice/__init__.py +24 -0
- gams/tools/toolcollection/gdxservice/gdxencoding.py +104 -0
- gams/tools/toolcollection/gdxservice/gdxrename.py +94 -0
- gams/tools/toolcollection/linalg/__init__.py +24 -0
- gams/tools/toolcollection/linalg/cholesky.py +57 -0
- gams/tools/toolcollection/linalg/eigenvalue.py +56 -0
- gams/tools/toolcollection/linalg/eigenvector.py +58 -0
- gams/tools/toolcollection/linalg/invert.py +55 -0
- gams/tools/toolcollection/linalg/ols.py +138 -0
- gams/tools/toolcollection/tooltemplate.py +321 -0
- gams/tools/toolcollection/win32/__init__.py +24 -0
- gams/tools/toolcollection/win32/excelmerge.py +93 -0
- gams/tools/toolcollection/win32/exceltalk.py +76 -0
- gams/tools/toolcollection/win32/msappavail.py +49 -0
- gams/tools/toolcollection/win32/shellexecute.py +54 -0
- gams/tools/tools.py +116 -0
- gams/transfer/__init__.py +35 -0
- gams/transfer/_abcs/__init__.py +37 -0
- gams/transfer/_abcs/container_abcs.py +433 -0
- gams/transfer/_internals/__init__.py +63 -0
- gams/transfer/_internals/algorithms.py +436 -0
- gams/transfer/_internals/casepreservingdict.py +124 -0
- gams/transfer/_internals/constants.py +270 -0
- gams/transfer/_internals/domainviolation.py +103 -0
- gams/transfer/_internals/specialvalues.py +172 -0
- gams/transfer/containers/__init__.py +26 -0
- gams/transfer/containers/_container.py +1794 -0
- gams/transfer/containers/_io/__init__.py +28 -0
- gams/transfer/containers/_io/containers.py +164 -0
- gams/transfer/containers/_io/gdx.py +1029 -0
- gams/transfer/containers/_io/gmd.py +872 -0
- gams/transfer/containers/_mixins/__init__.py +26 -0
- gams/transfer/containers/_mixins/ccc.py +1274 -0
- gams/transfer/syms/__init__.py +33 -0
- gams/transfer/syms/_methods/__init__.py +24 -0
- gams/transfer/syms/_methods/tables.py +120 -0
- gams/transfer/syms/_methods/toDict.py +115 -0
- gams/transfer/syms/_methods/toList.py +83 -0
- gams/transfer/syms/_methods/toValue.py +60 -0
- gams/transfer/syms/_mixins/__init__.py +32 -0
- gams/transfer/syms/_mixins/equals.py +626 -0
- gams/transfer/syms/_mixins/generateRecords.py +499 -0
- gams/transfer/syms/_mixins/pivot.py +313 -0
- gams/transfer/syms/_mixins/pve.py +627 -0
- gams/transfer/syms/_mixins/sa.py +27 -0
- gams/transfer/syms/_mixins/sapve.py +27 -0
- gams/transfer/syms/_mixins/saua.py +27 -0
- gams/transfer/syms/_mixins/sauapve.py +199 -0
- gams/transfer/syms/_mixins/spve.py +1528 -0
- gams/transfer/syms/_mixins/ve.py +936 -0
- gams/transfer/syms/container_syms/__init__.py +31 -0
- gams/transfer/syms/container_syms/_alias.py +984 -0
- gams/transfer/syms/container_syms/_equation.py +333 -0
- gams/transfer/syms/container_syms/_parameter.py +973 -0
- gams/transfer/syms/container_syms/_set.py +604 -0
- gams/transfer/syms/container_syms/_universe_alias.py +461 -0
- gams/transfer/syms/container_syms/_variable.py +321 -0
- gamsapi-52.5.0.dist-info/METADATA +150 -0
- gamsapi-52.5.0.dist-info/RECORD +257 -0
- gamsapi-52.5.0.dist-info/WHEEL +5 -0
- gamsapi-52.5.0.dist-info/licenses/LICENSE +22 -0
- gamsapi-52.5.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
#
|
|
2
|
+
# GAMS - General Algebraic Modeling System Python API
|
|
3
|
+
#
|
|
4
|
+
# Copyright (c) 2017-2026 GAMS Development Corp. <support@gams.com>
|
|
5
|
+
# Copyright (c) 2017-2026 GAMS Software GmbH <support@gams.com>
|
|
6
|
+
#
|
|
7
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
8
|
+
# of this software and associated documentation files (the "Software"), to deal
|
|
9
|
+
# in the Software without restriction, including without limitation the rights
|
|
10
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
11
|
+
# copies of the Software, and to permit persons to whom the Software is
|
|
12
|
+
# furnished to do so, subject to the following conditions:
|
|
13
|
+
#
|
|
14
|
+
# The above copyright notice and this permission notice shall be included in all
|
|
15
|
+
# copies or substantial portions of the Software.
|
|
16
|
+
#
|
|
17
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
18
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
19
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
20
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
21
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
22
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
23
|
+
# SOFTWARE.
|
|
24
|
+
#
|
|
25
|
+
|
|
26
|
+
import pandas as pd
|
|
27
|
+
from gams.connect.agents._sqlconnectors._databasehandler import DatabaseConnector
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class SQLServerConnector(DatabaseConnector):
|
|
31
|
+
SUPPORTED_INSERT_METHODS = ["default", "bulkInsert", "bcp"]
|
|
32
|
+
QUOTE_CHAR = ["``"]
|
|
33
|
+
|
|
34
|
+
def connect(self, connection_details, connection_args, **kwargs) -> None:
|
|
35
|
+
|
|
36
|
+
import pymssql as sql
|
|
37
|
+
|
|
38
|
+
self._bcp_credentials = connection_details
|
|
39
|
+
self._engine = sql.connect(**connection_details, **connection_args)
|
|
40
|
+
self._conn = self._engine.cursor()
|
|
41
|
+
|
|
42
|
+
def create_transaction(self):
|
|
43
|
+
"""sqlserver is transaction safe.
|
|
44
|
+
Any change within a transaction does not get committed in case of a failure."""
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
def _check_table_exists(self, tableName: str, schema: str | None) -> bool:
|
|
48
|
+
tableExists = False
|
|
49
|
+
|
|
50
|
+
# striping escape characters is not required. SQLSServer is insensitive to use of escape character.
|
|
51
|
+
qualified_table_name = f"{schema}.{tableName}" if schema else tableName
|
|
52
|
+
|
|
53
|
+
query = f"""SELECT OBJECT_ID('{qualified_table_name}', 'U') AS ObjectID;"""
|
|
54
|
+
self._conn.execute(query)
|
|
55
|
+
res = self._conn.fetchone()
|
|
56
|
+
# TODO: check for SQLSERVER type(res)
|
|
57
|
+
### res can be = (obj,) | None | (None,)
|
|
58
|
+
if isinstance(res, tuple):
|
|
59
|
+
if res[0]:
|
|
60
|
+
tableExists = True
|
|
61
|
+
|
|
62
|
+
return tableExists
|
|
63
|
+
|
|
64
|
+
def _create_table(
|
|
65
|
+
self,
|
|
66
|
+
df: pd.DataFrame,
|
|
67
|
+
tableName: str,
|
|
68
|
+
schema: str | None,
|
|
69
|
+
ifExists: str,
|
|
70
|
+
**kwargs,
|
|
71
|
+
) -> None:
|
|
72
|
+
"""
|
|
73
|
+
Drops an exisiting table and creates a new table with the same name. Uses specific SQL queries for each DBMS flavour.
|
|
74
|
+
"""
|
|
75
|
+
tableCols = ""
|
|
76
|
+
for col, dtype in df.dtypes.items():
|
|
77
|
+
if dtype == "float64":
|
|
78
|
+
tableCols += f"[{col}] FLOAT,"
|
|
79
|
+
elif dtype == "int64":
|
|
80
|
+
tableCols += f"[{col}] BIGINT,"
|
|
81
|
+
elif dtype in ["object", "category"]:
|
|
82
|
+
tableCols += f"[{col}] VARCHAR(255),"
|
|
83
|
+
|
|
84
|
+
tableCols = tableCols[:-1]
|
|
85
|
+
|
|
86
|
+
if schema:
|
|
87
|
+
tableName = schema + "." + tableName
|
|
88
|
+
|
|
89
|
+
if ifExists == "replace":
|
|
90
|
+
try:
|
|
91
|
+
self._conn.execute(
|
|
92
|
+
f"""IF OBJECT_ID('{tableName}', 'U') IS NOT NULL DROP TABLE {tableName};"""
|
|
93
|
+
)
|
|
94
|
+
except Exception as e:
|
|
95
|
+
self._raise_error(
|
|
96
|
+
f"Cannot drop table >{tableName}<.\nException from {type(e).__module__}: {type(e).__name__}> {e}"
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
self._conn.execute(f"""CREATE TABLE {tableName}({tableCols});""")
|
|
100
|
+
if self._traceValue > 1:
|
|
101
|
+
self._traceLog(
|
|
102
|
+
f"Created new table: >{tableName}< with columns: >{tableCols}<"
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
def _write_file_to_sqlserver(
|
|
106
|
+
self, df: pd.DataFrame, tableName: str, insertMethod: str
|
|
107
|
+
):
|
|
108
|
+
"""
|
|
109
|
+
Function to import data from file to SQL Server DBMS:
|
|
110
|
+
- `bcp`, uses the bulk-copy-program utility to import a txt file given the following exists on the system. 1)bcp utility, 2)Relevant ODBC driver. This works when operating on a remote dbms server.
|
|
111
|
+
- `bulkInsert`, uses the `BULK INSERT` query to import a csv file. Does not work if operating on a Remote DBMS server.
|
|
112
|
+
"""
|
|
113
|
+
import tempfile
|
|
114
|
+
|
|
115
|
+
with tempfile.TemporaryDirectory() as tmpdirname:
|
|
116
|
+
with tempfile.NamedTemporaryFile(
|
|
117
|
+
mode="w", dir=tmpdirname, delete=False, suffix=".csv"
|
|
118
|
+
) as fp:
|
|
119
|
+
df.to_csv(fp.name, index=False, header=False)
|
|
120
|
+
fp.flush()
|
|
121
|
+
fp.seek(0)
|
|
122
|
+
fp.close()
|
|
123
|
+
if insertMethod == "bulkInsert":
|
|
124
|
+
self._conn.execute(
|
|
125
|
+
f"""BULK INSERT {tableName}
|
|
126
|
+
FROM "{fp.name}"
|
|
127
|
+
WITH (FORMAT = 'CSV', FIRSTROW = 1,KEEPIDENTITY)"""
|
|
128
|
+
)
|
|
129
|
+
elif insertMethod == "bcp":
|
|
130
|
+
from shutil import which
|
|
131
|
+
from subprocess import PIPE, run
|
|
132
|
+
|
|
133
|
+
self._engine.commit() # this requires the table to be commited and present in the database. Only then we can start a new transaction.
|
|
134
|
+
cmd = f"""bcp {tableName} in "{fp.name}" -U "{self._bcp_credentials['user']}" -P "{self._bcp_credentials['password']}" -S "{self._bcp_credentials['host']},{self._bcp_credentials['port']}" -q -c -t "," -d {self._bcp_credentials['database']}"""
|
|
135
|
+
if self._traceValue > 1:
|
|
136
|
+
self._traceLog(f"Command to be executed: {cmd}\n")
|
|
137
|
+
if which(
|
|
138
|
+
"bcp"
|
|
139
|
+
): # check if bcp is present on the system, returns path if present else None
|
|
140
|
+
cmd_res = run(
|
|
141
|
+
cmd,
|
|
142
|
+
stdout=PIPE,
|
|
143
|
+
stderr=PIPE,
|
|
144
|
+
universal_newlines=True,
|
|
145
|
+
shell=True,
|
|
146
|
+
) # shell=True is required for successful run on Linux
|
|
147
|
+
if cmd_res.returncode != 0:
|
|
148
|
+
self._raise_error(
|
|
149
|
+
f"Error occured while running bcp utility.\n {cmd_res.stdout}"
|
|
150
|
+
)
|
|
151
|
+
else:
|
|
152
|
+
self._raise_error("bcp utility not found on the system.")
|
|
153
|
+
|
|
154
|
+
def _insert_data(self, df: pd.DataFrame, writeFunction_args: dict):
|
|
155
|
+
tableName = writeFunction_args["name"]
|
|
156
|
+
insertMethod = writeFunction_args["insertMethod"]
|
|
157
|
+
|
|
158
|
+
if writeFunction_args["schema"]:
|
|
159
|
+
tableName = writeFunction_args["schema"] + "." + tableName
|
|
160
|
+
|
|
161
|
+
if insertMethod == "default":
|
|
162
|
+
placeHolder = "%s," * (len(df.columns) - 1)
|
|
163
|
+
if df.isnull().values.any(): # replace NaN with None, for SQL NULL
|
|
164
|
+
df = df.astype(object).where(pd.notnull(df), None)
|
|
165
|
+
df_list = list(
|
|
166
|
+
df.itertuples(index=False, name=None)
|
|
167
|
+
) # sql server does not accept nested lists, it has to be tuples
|
|
168
|
+
query = f"INSERT INTO {tableName} VALUES(" + placeHolder + "%s)"
|
|
169
|
+
if len(df_list) > 0:
|
|
170
|
+
self._conn.executemany(query, df_list) # type: ignore
|
|
171
|
+
|
|
172
|
+
elif self._traceValue > 1:
|
|
173
|
+
self._traceLog(
|
|
174
|
+
f"Empty symbol. No rows were inserted in table >{tableName}<."
|
|
175
|
+
)
|
|
176
|
+
elif insertMethod in ["bulkInsert", "bcp"]:
|
|
177
|
+
self._write_file_to_sqlserver(
|
|
178
|
+
df=df, tableName=tableName, insertMethod=insertMethod
|
|
179
|
+
)
|
|
@@ -0,0 +1,440 @@
|
|
|
1
|
+
#
|
|
2
|
+
# GAMS - General Algebraic Modeling System Python API
|
|
3
|
+
#
|
|
4
|
+
# Copyright (c) 2017-2026 GAMS Development Corp. <support@gams.com>
|
|
5
|
+
# Copyright (c) 2017-2026 GAMS Software GmbH <support@gams.com>
|
|
6
|
+
#
|
|
7
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
8
|
+
# of this software and associated documentation files (the "Software"), to deal
|
|
9
|
+
# in the Software without restriction, including without limitation the rights
|
|
10
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
11
|
+
# copies of the Software, and to permit persons to whom the Software is
|
|
12
|
+
# furnished to do so, subject to the following conditions:
|
|
13
|
+
#
|
|
14
|
+
# The above copyright notice and this permission notice shall be included in all
|
|
15
|
+
# copies or substantial portions of the Software.
|
|
16
|
+
#
|
|
17
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
18
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
19
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
20
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
21
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
22
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
23
|
+
# SOFTWARE.
|
|
24
|
+
#
|
|
25
|
+
|
|
26
|
+
import re
|
|
27
|
+
import warnings
|
|
28
|
+
import gams.transfer as gt
|
|
29
|
+
import pandas as pd
|
|
30
|
+
from gams.connect.agents.connectagent import ConnectAgent
|
|
31
|
+
import numpy as np
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class Concatenate(ConnectAgent):
|
|
35
|
+
def __init__(self, cdb, inst, agent_index):
|
|
36
|
+
super().__init__(cdb, inst, agent_index)
|
|
37
|
+
self._parse_options(self._inst)
|
|
38
|
+
self.__gt2pytypemap__ = {gt.Set: "set", gt.Parameter: "parameter"}
|
|
39
|
+
|
|
40
|
+
def _parse_options(self, inst):
|
|
41
|
+
# root options
|
|
42
|
+
self._output_dimensions = inst["outputDimensions"]
|
|
43
|
+
self._dimension_map = self._dict_get(inst, "dimensionMap", {})
|
|
44
|
+
self._universal_dimension = inst["universalDimension"]
|
|
45
|
+
self._emptyuel = inst["emptyUel"]
|
|
46
|
+
self._output_name = {
|
|
47
|
+
"set": inst["setName"],
|
|
48
|
+
"parameter": inst["parameterName"],
|
|
49
|
+
}
|
|
50
|
+
self._symbols_dimension = inst["symbolsDimension"]
|
|
51
|
+
self._skip = inst["skip"]
|
|
52
|
+
self._trace = inst["trace"]
|
|
53
|
+
self._dim_start = int(self._symbols_dimension)
|
|
54
|
+
|
|
55
|
+
# symbol options
|
|
56
|
+
self._symbols = inst["symbols"]
|
|
57
|
+
self._write_all = self._symbols == "all"
|
|
58
|
+
|
|
59
|
+
def _create_symbols_list(self) -> None:
|
|
60
|
+
"""Creates the symbols list"""
|
|
61
|
+
|
|
62
|
+
if self._write_all:
|
|
63
|
+
self._symbols = []
|
|
64
|
+
for name, sym in self._cdb.container.data.items():
|
|
65
|
+
if isinstance(sym, (gt.Set, gt.Parameter)):
|
|
66
|
+
self._symbols.append({"name": name})
|
|
67
|
+
|
|
68
|
+
remove_symbols = [] # remove symbols with no data
|
|
69
|
+
for i, sym_opt in enumerate(self._symbols):
|
|
70
|
+
regex = r"(?P<name>[a-zA-Z0-9_]+)?(\((?P<domains>[a-zA-Z0-9_,\s]*)\))?"
|
|
71
|
+
ms = re.fullmatch(regex, sym_opt["name"])
|
|
72
|
+
if ms is None:
|
|
73
|
+
self._connect_error(f"Invalid symbol name {sym_opt['name']}.")
|
|
74
|
+
sym_opt["sname"] = ms.group("name")
|
|
75
|
+
|
|
76
|
+
self._symbols_exist_cdb(sym_opt["sname"], should_exist=True)
|
|
77
|
+
sym = self._cdb.container[sym_opt["sname"]]
|
|
78
|
+
|
|
79
|
+
if not isinstance(sym, (gt.Set, gt.Parameter)):
|
|
80
|
+
self._connect_error(
|
|
81
|
+
f"Symbol type >{type(sym)}< of symbol >{sym_opt['sname']}< is not supported. Supported symbol types are sets and parameters. If you would like to concatenate variables or equations, use Connect Agent Projection to turn these into parameters."
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
if self._skip == "set" and isinstance(sym, gt.Set): # skip sets
|
|
85
|
+
remove_symbols.append(i)
|
|
86
|
+
continue
|
|
87
|
+
if self._skip == "par" and isinstance(sym, gt.Parameter): # skip parameters
|
|
88
|
+
remove_symbols.append(i)
|
|
89
|
+
continue
|
|
90
|
+
|
|
91
|
+
if self._trace > 2:
|
|
92
|
+
self._cdb.print_log(
|
|
93
|
+
f"Connect Container symbol={sym_opt['sname']}:\n {sym.records}\n"
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
if ms.group("domains") is not None:
|
|
97
|
+
sym_opt["dim"] = [dom.strip() for dom in ms.group("domains").split(",")]
|
|
98
|
+
|
|
99
|
+
if sym.dimension != len(sym_opt["dim"]):
|
|
100
|
+
self._connect_error(
|
|
101
|
+
f"Number of specified dimensions of symbol >{sym_opt['name']}< does not correspond to the symbol's number of dimensions in the database ({len(sym_opt['dim'])}<>{sym.dimension})."
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
else:
|
|
105
|
+
sym_opt["dim"] = []
|
|
106
|
+
if (
|
|
107
|
+
sym.dimension > 0
|
|
108
|
+
): # if symbol dim is not specified: use dimension_map to map domains to output dimensions, domains that cannot be mapped will be universal output dimensions
|
|
109
|
+
sym_opt["dim"] = [
|
|
110
|
+
self._dimension_map.get(d, d) for d in sym.domain_names
|
|
111
|
+
]
|
|
112
|
+
|
|
113
|
+
for i in reversed(remove_symbols):
|
|
114
|
+
del self._symbols[i]
|
|
115
|
+
|
|
116
|
+
def _create_output_dimensions(self) -> None:
|
|
117
|
+
"""Creates output dimensions"""
|
|
118
|
+
# if outputDimensions is all, generate output dimensions from symbol dimensions
|
|
119
|
+
if self._output_dimensions == "all":
|
|
120
|
+
self._output_dimensions = []
|
|
121
|
+
for sym_opt in self._symbols:
|
|
122
|
+
for idx, d in enumerate(sym_opt["dim"]):
|
|
123
|
+
if d not in self._output_dimensions:
|
|
124
|
+
self._output_dimensions.append(d)
|
|
125
|
+
elif self._output_dimensions.count(d) < sym_opt["dim"][
|
|
126
|
+
: idx + 1
|
|
127
|
+
].count(d):
|
|
128
|
+
self._output_dimensions.append(d)
|
|
129
|
+
else:
|
|
130
|
+
regex = r"([a-zA-Z0-9_]*)"
|
|
131
|
+
invalid_dim = [
|
|
132
|
+
dim for dim in self._output_dimensions if not re.fullmatch(regex, dim)
|
|
133
|
+
]
|
|
134
|
+
if invalid_dim != []:
|
|
135
|
+
self._connect_error(f"Invalid output dimension(s) >{invalid_dim}<.")
|
|
136
|
+
|
|
137
|
+
if "symbols" in self._output_dimensions and self._symbols_dimension:
|
|
138
|
+
self._connect_error("'symbols' is a preserved output dimension.")
|
|
139
|
+
|
|
140
|
+
def _make_dimensions_unique(self, dim_list: list) -> list:
|
|
141
|
+
"""Makes dimensions unique. Example: ['i', 'j', 'j'] -> ['i', 'j', 'j.1']
|
|
142
|
+
|
|
143
|
+
Parameters
|
|
144
|
+
----------
|
|
145
|
+
dim_list : list
|
|
146
|
+
Dimensions
|
|
147
|
+
|
|
148
|
+
Returns
|
|
149
|
+
-------
|
|
150
|
+
list
|
|
151
|
+
Unique dimensions
|
|
152
|
+
"""
|
|
153
|
+
cp_dim_list = dim_list.copy()
|
|
154
|
+
counts = {}
|
|
155
|
+
for i, dim in enumerate(cp_dim_list):
|
|
156
|
+
cur_count = counts.get(dim, 0)
|
|
157
|
+
if cur_count > 0:
|
|
158
|
+
cp_dim_list[i] = "%s.%d" % (dim, cur_count)
|
|
159
|
+
counts[dim] = cur_count + 1
|
|
160
|
+
|
|
161
|
+
return cp_dim_list
|
|
162
|
+
|
|
163
|
+
def _add_universal_dimensions(
|
|
164
|
+
self,
|
|
165
|
+
nb_uni_dim: int,
|
|
166
|
+
unknown_dim: list,
|
|
167
|
+
unique_output_dimensions: list,
|
|
168
|
+
) -> int:
|
|
169
|
+
"""Adds universal dimensions. Also updates the output dimensions with the newly added universal dimensions.
|
|
170
|
+
|
|
171
|
+
Parameters
|
|
172
|
+
----------
|
|
173
|
+
nb_uni_dim : int
|
|
174
|
+
Number of universal dimensions
|
|
175
|
+
unknown_dim : list
|
|
176
|
+
Unknown dimensions
|
|
177
|
+
unique_output_dimensions : list
|
|
178
|
+
Unique output dimensions
|
|
179
|
+
|
|
180
|
+
Returns
|
|
181
|
+
-------
|
|
182
|
+
int
|
|
183
|
+
Updated number of universal dimensions
|
|
184
|
+
list
|
|
185
|
+
Updated unique output dimensions
|
|
186
|
+
"""
|
|
187
|
+
for i in range(nb_uni_dim, len(unknown_dim)):
|
|
188
|
+
uni_name = f"{self._universal_dimension}_{i}"
|
|
189
|
+
if uni_name in self._output_dimensions:
|
|
190
|
+
self._connect_error(
|
|
191
|
+
f"Automatically added universal column >{uni_name}< is already specified under option outputDimensions. Please set another base name for universal dimensions via option universalDimension or rename the output dimension."
|
|
192
|
+
)
|
|
193
|
+
self._output_dimensions.append(uni_name)
|
|
194
|
+
unique_output_dimensions.append(uni_name)
|
|
195
|
+
nb_uni_dim += 1
|
|
196
|
+
|
|
197
|
+
return nb_uni_dim, unique_output_dimensions
|
|
198
|
+
|
|
199
|
+
def _save_categories(
|
|
200
|
+
self,
|
|
201
|
+
unique_output_dimensions: list,
|
|
202
|
+
dataframes: dict,
|
|
203
|
+
output_types: list,
|
|
204
|
+
) -> dict:
|
|
205
|
+
"""Save categories that might be lost after pandas.concat due to pandas bug https://github.com/pandas-dev/pandas/issues/51362
|
|
206
|
+
|
|
207
|
+
Parameters
|
|
208
|
+
----------
|
|
209
|
+
unique_output_dimensions : list
|
|
210
|
+
Unique output dimensions
|
|
211
|
+
dataframes : dict
|
|
212
|
+
Dictionary of dataframes to be concatenated (by output type)
|
|
213
|
+
output_types : list
|
|
214
|
+
Output types
|
|
215
|
+
|
|
216
|
+
Returns
|
|
217
|
+
-------
|
|
218
|
+
dict
|
|
219
|
+
Dictionary that maps dimensions of the output symbols to the union of categories from the input symbols (by output type)
|
|
220
|
+
"""
|
|
221
|
+
|
|
222
|
+
output_dim_cat_map = {}
|
|
223
|
+
for ot in output_types:
|
|
224
|
+
output_dim_cat_map[ot] = {}
|
|
225
|
+
# initialize mapping
|
|
226
|
+
for i in range(len(unique_output_dimensions)):
|
|
227
|
+
output_dim_cat_map[ot][i + self._dim_start] = []
|
|
228
|
+
for df in dataframes[ot]:
|
|
229
|
+
# iterate over all dimensions except symbols (0) and value/text column (-1) and save categories
|
|
230
|
+
for d in list(df.columns[self._dim_start : -1]):
|
|
231
|
+
idx = (
|
|
232
|
+
unique_output_dimensions.index(d) + self._dim_start
|
|
233
|
+
) # dimension position in the output symbol
|
|
234
|
+
dim_series = df[d].cat.remove_unused_categories()
|
|
235
|
+
output_dim_cat_map[ot][idx].extend(
|
|
236
|
+
dim_series.cat.categories.tolist()
|
|
237
|
+
)
|
|
238
|
+
# make categories unique
|
|
239
|
+
for k, v in output_dim_cat_map[ot].items():
|
|
240
|
+
output_dim_cat_map[ot][k] = list(dict.fromkeys(v))
|
|
241
|
+
|
|
242
|
+
return output_dim_cat_map
|
|
243
|
+
|
|
244
|
+
def _concatenate_dataframes(
|
|
245
|
+
self,
|
|
246
|
+
dataframes: dict,
|
|
247
|
+
output_types: list,
|
|
248
|
+
unique_output_dimensions: list,
|
|
249
|
+
gt_na_values: list,
|
|
250
|
+
output_dim_cat_map: dict,
|
|
251
|
+
) -> dict:
|
|
252
|
+
"""Concatenates dataframes of sets and parameters respectively.
|
|
253
|
+
|
|
254
|
+
Parameters
|
|
255
|
+
----------
|
|
256
|
+
dataframes : dict
|
|
257
|
+
Dictionary of dataframes to be concatenated (by output type)
|
|
258
|
+
output_types : list
|
|
259
|
+
Output types
|
|
260
|
+
unique_output_dimensions : list
|
|
261
|
+
Unique output dimensions
|
|
262
|
+
gt_na_values : list
|
|
263
|
+
gt.SpecialValues.NA to be recovered after pd.concat
|
|
264
|
+
output_dim_cat_map: dict
|
|
265
|
+
categories to be recovered after pd.concat
|
|
266
|
+
|
|
267
|
+
Returns
|
|
268
|
+
-------
|
|
269
|
+
dict
|
|
270
|
+
Output symbols
|
|
271
|
+
"""
|
|
272
|
+
outputs = {}
|
|
273
|
+
symbols = ["symbols"] if self._symbols_dimension else []
|
|
274
|
+
for ot in output_types:
|
|
275
|
+
val_col = "text" if ot == "set" else "value"
|
|
276
|
+
# pandas-version-check
|
|
277
|
+
with warnings.catch_warnings(): # pandas 2.1.0 has a FutureWarning for concatenating empty DataFrames
|
|
278
|
+
warnings.filterwarnings(
|
|
279
|
+
"ignore",
|
|
280
|
+
message=".*The behavior of DataFrame concatenation with empty or all-NA entries is deprecated.*",
|
|
281
|
+
category=FutureWarning,
|
|
282
|
+
)
|
|
283
|
+
outputs[ot] = pd.DataFrame(
|
|
284
|
+
pd.concat(dataframes[ot]),
|
|
285
|
+
columns=symbols + unique_output_dimensions + [val_col],
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
# recover gt.SpecialValues.NA after pd.concat
|
|
289
|
+
if ot == "parameter" and any(gt_na_values):
|
|
290
|
+
outputs[ot] = outputs[ot].astype(
|
|
291
|
+
{"value": np.dtype("object")}
|
|
292
|
+
) # needs to be data type object for mask to work with gt.SpecialValues.NA
|
|
293
|
+
outputs[ot]["value"] = outputs[ot]["value"].mask(
|
|
294
|
+
gt_na_values,
|
|
295
|
+
gt.SpecialValues.NA,
|
|
296
|
+
)
|
|
297
|
+
# outputs[ot].loc[gt_na_values, "value"] = gt.SpecialValues.NA # works in general but not with gt.SpecialValues.NA
|
|
298
|
+
|
|
299
|
+
# recover categories if necessary
|
|
300
|
+
for i in list(output_dim_cat_map[ot].keys()):
|
|
301
|
+
if not isinstance(outputs[ot].iloc[:, i].dtype, pd.CategoricalDtype):
|
|
302
|
+
outputs[ot].isetitem(
|
|
303
|
+
i,
|
|
304
|
+
outputs[ot]
|
|
305
|
+
.iloc[:, i]
|
|
306
|
+
.astype(
|
|
307
|
+
pd.CategoricalDtype(
|
|
308
|
+
categories=output_dim_cat_map[ot][i],
|
|
309
|
+
ordered=True,
|
|
310
|
+
)
|
|
311
|
+
),
|
|
312
|
+
)
|
|
313
|
+
|
|
314
|
+
# add empty uel to categoricals
|
|
315
|
+
df = outputs[ot]
|
|
316
|
+
for c in df[df.columns[self._dim_start : -1]]:
|
|
317
|
+
if isinstance(df[c].dtype, pd.CategoricalDtype):
|
|
318
|
+
df[c] = df[c].cat.add_categories(self._emptyuel)
|
|
319
|
+
|
|
320
|
+
outputs[ot].reset_index(inplace=True, drop=True)
|
|
321
|
+
outputs[ot][outputs[ot].columns[self._dim_start : -1]] = outputs[ot][
|
|
322
|
+
outputs[ot].columns[self._dim_start : -1]
|
|
323
|
+
].fillna(self._emptyuel)
|
|
324
|
+
|
|
325
|
+
return outputs
|
|
326
|
+
|
|
327
|
+
def execute(self):
|
|
328
|
+
if self._trace > 0:
|
|
329
|
+
self._log_instructions(self._inst, self._inst_raw)
|
|
330
|
+
self._describe_container(self._cdb.container, "Connect Container (before):")
|
|
331
|
+
|
|
332
|
+
self._create_symbols_list()
|
|
333
|
+
if not self._symbols:
|
|
334
|
+
self._cdb.print_log("No data to concatenate.")
|
|
335
|
+
return
|
|
336
|
+
|
|
337
|
+
self._create_output_dimensions()
|
|
338
|
+
unique_output_dimensions = self._make_dimensions_unique(self._output_dimensions)
|
|
339
|
+
|
|
340
|
+
# create dataframes to concatenate
|
|
341
|
+
output_types = set()
|
|
342
|
+
dataframes = {"set": [], "parameter": []} # dataframes to concatenate
|
|
343
|
+
nb_uni_dim = (
|
|
344
|
+
0 # required number of universal dimensions in the output dimensions
|
|
345
|
+
)
|
|
346
|
+
gt_na_values = [] # recover gt.SpecialValues.NA after pd.concat
|
|
347
|
+
for sym_opt in self._symbols:
|
|
348
|
+
sym = self._cdb.container[sym_opt["sname"]]
|
|
349
|
+
output_types.add(self.__gt2pytypemap__[type(sym)])
|
|
350
|
+
|
|
351
|
+
if sym.dimension > 0:
|
|
352
|
+
# make symbol dimensions unique
|
|
353
|
+
sym_opt["dim"] = self._make_dimensions_unique(sym_opt["dim"])
|
|
354
|
+
|
|
355
|
+
# identify unknown dimensions, unknown dimensions will be aggregated into universal output dimensions
|
|
356
|
+
unknown_dim = [
|
|
357
|
+
i
|
|
358
|
+
for i, x in enumerate(sym_opt["dim"])
|
|
359
|
+
if x not in unique_output_dimensions
|
|
360
|
+
]
|
|
361
|
+
if nb_uni_dim < len(
|
|
362
|
+
unknown_dim
|
|
363
|
+
): # add universal dimensions if required
|
|
364
|
+
(
|
|
365
|
+
nb_uni_dim,
|
|
366
|
+
unique_output_dimensions,
|
|
367
|
+
) = self._add_universal_dimensions(
|
|
368
|
+
nb_uni_dim, unknown_dim, unique_output_dimensions
|
|
369
|
+
)
|
|
370
|
+
for i, dim_idx in enumerate(
|
|
371
|
+
unknown_dim
|
|
372
|
+
): # overwrite current dimension name with universal dimension name
|
|
373
|
+
sym_opt["dim"][dim_idx] = f"{self._universal_dimension}_{i}"
|
|
374
|
+
|
|
375
|
+
if self._trace > 1:
|
|
376
|
+
self._cdb.print_log(
|
|
377
|
+
f"Dimension(s) of symbol={sym_opt['name']}:\n {sym_opt['dim']}\n"
|
|
378
|
+
)
|
|
379
|
+
|
|
380
|
+
sym_records = self._sym_records_no_none(sym).copy(deep=True)
|
|
381
|
+
val_col = "text" if isinstance(sym, gt.Set) else "value"
|
|
382
|
+
sym_records.columns = sym_opt["dim"] + [val_col]
|
|
383
|
+
|
|
384
|
+
# insert "symbols" column
|
|
385
|
+
if self._symbols_dimension:
|
|
386
|
+
new_name = self._dict_get(sym_opt, "newName", sym_opt["sname"])
|
|
387
|
+
sym_records.insert(loc=0, column="symbols", value=new_name)
|
|
388
|
+
|
|
389
|
+
dataframes[self.__gt2pytypemap__[type(sym)]].append(sym_records)
|
|
390
|
+
|
|
391
|
+
# save gt.SpecialValues.NA to recover after pd.concat
|
|
392
|
+
if isinstance(sym, gt.Parameter):
|
|
393
|
+
if all(
|
|
394
|
+
sym_records[val_col].isna()
|
|
395
|
+
): # recover only if all records are NAs according to pandas
|
|
396
|
+
gt_na_values.extend(gt.SpecialValues.isNA(sym_records[val_col]))
|
|
397
|
+
else:
|
|
398
|
+
gt_na_values.extend([False] * len(sym_records[val_col]))
|
|
399
|
+
|
|
400
|
+
output_types = sorted(output_types, reverse=True)
|
|
401
|
+
|
|
402
|
+
for ot in output_types:
|
|
403
|
+
self._symbols_exist_cdb(self._output_name[ot])
|
|
404
|
+
|
|
405
|
+
# save categories that might be lost after pandas.concat
|
|
406
|
+
# TODO: remove when fixed by pandas: https://github.com/pandas-dev/pandas/issues/51362
|
|
407
|
+
output_dim_cat_map = self._save_categories(
|
|
408
|
+
unique_output_dimensions, dataframes, output_types
|
|
409
|
+
)
|
|
410
|
+
outputs = self._concatenate_dataframes(
|
|
411
|
+
dataframes,
|
|
412
|
+
output_types,
|
|
413
|
+
unique_output_dimensions,
|
|
414
|
+
gt_na_values,
|
|
415
|
+
output_dim_cat_map,
|
|
416
|
+
)
|
|
417
|
+
|
|
418
|
+
# write outputs to database
|
|
419
|
+
symbols = ["symbols"] if self._symbols_dimension else []
|
|
420
|
+
for ot in output_types:
|
|
421
|
+
if ot == "set":
|
|
422
|
+
self._cdb.container.addSet(
|
|
423
|
+
self._output_name[ot],
|
|
424
|
+
domain=symbols + self._output_dimensions,
|
|
425
|
+
records=outputs[ot],
|
|
426
|
+
)
|
|
427
|
+
elif ot == "parameter":
|
|
428
|
+
self._cdb.container.addParameter(
|
|
429
|
+
self._output_name[ot],
|
|
430
|
+
domain=symbols + self._output_dimensions,
|
|
431
|
+
records=outputs[ot],
|
|
432
|
+
)
|
|
433
|
+
|
|
434
|
+
if self._trace > 2:
|
|
435
|
+
self._cdb.print_log(
|
|
436
|
+
f"Connect Container symbol={self._output_name[ot]}:\n {self._cdb.container[self._output_name[ot]].records}\n"
|
|
437
|
+
)
|
|
438
|
+
|
|
439
|
+
if self._trace > 0:
|
|
440
|
+
self._describe_container(self._cdb.container, "Connect Container (after):")
|