mrio-toolbox 1.1.2__py3-none-any.whl → 1.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mrio-toolbox might be problematic. Click here for more details.

Files changed (61) hide show
  1. {mrio_toolbox-1.1.2.dist-info → mrio_toolbox-1.1.3.dist-info}/METADATA +1 -1
  2. mrio_toolbox-1.1.3.dist-info/RECORD +5 -0
  3. mrio_toolbox-1.1.3.dist-info/top_level.txt +1 -0
  4. __init__.py +0 -21
  5. _parts/_Axe.py +0 -539
  6. _parts/_Part.py +0 -1739
  7. _parts/__init__.py +0 -7
  8. _parts/part_operations.py +0 -57
  9. extractors/__init__.py +0 -20
  10. extractors/downloaders.py +0 -36
  11. extractors/emerging/__init__.py +0 -3
  12. extractors/emerging/emerging_extractor.py +0 -117
  13. extractors/eora/__init__.py +0 -3
  14. extractors/eora/eora_extractor.py +0 -132
  15. extractors/exiobase/__init__.py +0 -3
  16. extractors/exiobase/exiobase_extractor.py +0 -270
  17. extractors/extractors.py +0 -81
  18. extractors/figaro/__init__.py +0 -3
  19. extractors/figaro/figaro_downloader.py +0 -280
  20. extractors/figaro/figaro_extractor.py +0 -187
  21. extractors/gloria/__init__.py +0 -3
  22. extractors/gloria/gloria_extractor.py +0 -202
  23. extractors/gtap11/__init__.py +0 -7
  24. extractors/gtap11/extraction/__init__.py +0 -3
  25. extractors/gtap11/extraction/extractor.py +0 -129
  26. extractors/gtap11/extraction/harpy_files/__init__.py +0 -6
  27. extractors/gtap11/extraction/harpy_files/_header_sets.py +0 -279
  28. extractors/gtap11/extraction/harpy_files/har_file.py +0 -262
  29. extractors/gtap11/extraction/harpy_files/har_file_io.py +0 -974
  30. extractors/gtap11/extraction/harpy_files/header_array.py +0 -300
  31. extractors/gtap11/extraction/harpy_files/sl4.py +0 -229
  32. extractors/gtap11/gtap_mrio/__init__.py +0 -6
  33. extractors/gtap11/gtap_mrio/mrio_builder.py +0 -158
  34. extractors/icio/__init__.py +0 -3
  35. extractors/icio/icio_extractor.py +0 -121
  36. extractors/wiod/__init__.py +0 -3
  37. extractors/wiod/wiod_extractor.py +0 -143
  38. mrio.py +0 -899
  39. mrio_toolbox-1.1.2.dist-info/RECORD +0 -59
  40. mrio_toolbox-1.1.2.dist-info/top_level.txt +0 -6
  41. msm/__init__.py +0 -6
  42. msm/multi_scale_mapping.py +0 -863
  43. utils/__init__.py +0 -3
  44. utils/converters/__init__.py +0 -5
  45. utils/converters/pandas.py +0 -244
  46. utils/converters/xarray.py +0 -132
  47. utils/formatting/__init__.py +0 -0
  48. utils/formatting/formatter.py +0 -527
  49. utils/loaders/__init__.py +0 -7
  50. utils/loaders/_loader.py +0 -312
  51. utils/loaders/_loader_factory.py +0 -96
  52. utils/loaders/_nc_loader.py +0 -184
  53. utils/loaders/_np_loader.py +0 -112
  54. utils/loaders/_pandas_loader.py +0 -128
  55. utils/loaders/_parameter_loader.py +0 -386
  56. utils/savers/__init__.py +0 -11
  57. utils/savers/_path_checker.py +0 -37
  58. utils/savers/_to_folder.py +0 -165
  59. utils/savers/_to_nc.py +0 -60
  60. {mrio_toolbox-1.1.2.dist-info → mrio_toolbox-1.1.3.dist-info}/WHEEL +0 -0
  61. {mrio_toolbox-1.1.2.dist-info → mrio_toolbox-1.1.3.dist-info}/licenses/LICENSE +0 -0
@@ -1,300 +0,0 @@
1
- """
2
- Created on Mar 02 11:39:45 2018
3
-
4
- """
5
- import numpy as np
6
- from ._header_sets import _HeaderDims
7
- from typing import Union,List,Dict
8
-
9
-
10
- class HeaderArrayObj(object):
11
-
12
- __array_priority__ = 2 #make this precede the np __add__ operations, etc
13
-
14
- def __init__(self):
15
- self._coeff_name=""
16
- self._array=None
17
- self._sets=None
18
- self._long_name=""
19
-
20
- @property
21
- def array(self) -> np.ndarray:
22
- return self._array
23
-
24
- @array.setter
25
- def array(self, obj):
26
- self._array = obj
27
-
28
- @property
29
- def coeff_name(self):
30
- return self._coeff_name
31
-
32
- @coeff_name.setter
33
- def coeff_name(self, obj):
34
- if not issubclass(type(obj), str):
35
- msg = "'obj' must be of 'str' type."
36
- raise TypeError(msg)
37
- if len(obj) < 12:
38
- obj.ljust(12)
39
- self._coeff_name = obj
40
-
41
- @property
42
- def long_name(self):
43
- return self._long_name
44
-
45
- @long_name.setter
46
- def long_name(self, obj):
47
- self._long_name = obj
48
-
49
- @property
50
- def sets(self):
51
- return self._sets
52
-
53
- @sets.setter
54
- def sets(self, obj):
55
- self._sets = obj
56
-
57
- @property
58
- def setNames(self):
59
- return self._sets.setNames
60
-
61
- @setNames.setter
62
- def setNames(self,sNames):
63
- self._sets.setNames=sNames
64
-
65
- @property
66
- def setElements(self):
67
- return self._sets.setElements
68
-
69
- @property
70
- def rank(self):
71
- return len(self.array.shape)
72
-
73
- def __getitem__(self, item) -> 'HeaderArrayObj':
74
- npInd, rankInd, newDim = self._sets.transform_index(item)
75
- return HeaderArrayObj.HeaderArrayFromCompiledData(array=np.array(self.array[npInd][rankInd]), SetDims=newDim)
76
-
77
- def __setitem__(self, key, value):
78
- npInd, rankInd, newDim = self._sets.transform_index(key)
79
- if isinstance(value,HeaderArrayObj):
80
- self.array[npInd] = value.array
81
- elif isinstance(value,(np.ndarray,int,float)):
82
- self.array[npInd]=value
83
- else:
84
- raise TypeError("Only HeaderArrayObj, np.ndarray, int, float allowed in __setitem__")
85
-
86
-
87
-
88
- def is_valid(self, raise_exception=True) -> bool:
89
- """
90
- Checks if ``self`` is a valid ``HeaderArrayObj``.
91
-
92
- :param bool raise_exception: If `False`, `True`/`False` will be returned on check success/failure. Otherwise an exception is raised (the default).
93
- :return bool:
94
- """
95
-
96
- if not isinstance(self._array, (np.ndarray, np.float32, np.int32,np.float64)):
97
- if raise_exception:
98
- raise TypeError("HeaderArrayObj 'array' must be of type 'numpy.ndarray'.")
99
- else:
100
- return False
101
-
102
- if not isinstance(self._sets, _HeaderDims):
103
- raise TypeError("'sets' must be of type _HeaderDims")
104
-
105
- if not isinstance(self.long_name, str):
106
- raise TypeError("'long_name' must be of str type.")
107
-
108
- if not isinstance(self.coeff_name, str):
109
- raise TypeError("'coeff_name' must be of str type.")
110
-
111
- if self._sets.shape != self.array.shape:
112
- if not ( len(self._sets.shape) == 0 and self.array.size == 1):
113
- raise ValueError("shape of set and array do not match")
114
- return True
115
-
116
- @staticmethod
117
- def SetHeaderFromData(setName: str, setElements:'Union[List[str], np.array]', long_name: str=None):
118
- if not isinstance(setName,str):
119
- raise TypeError("setName must be of type str")
120
- if len(setName.strip()) > 12 :
121
- raise ValueError("setName is restricted to 12 Characters")
122
- if long_name is None: long_name=""
123
- if isinstance(long_name, str):
124
- long_name="Set "+setName.strip()+" "+long_name.strip()
125
- else:
126
- raise TypeError("LongName must be string")
127
-
128
- if isinstance(setElements,(list,np.ndarray)):
129
- if not all([isinstance(el,str) for el in setElements]):
130
- raise TypeError("All Set Elements must be of type str")
131
- if not all([len(el)<=12 for el in setElements]):
132
- raise ValueError("Set Elelement strings must be 12 characters at most")
133
-
134
- if isinstance(setElements,list):
135
- array=np.array(setElements)
136
- setElDict={setName:setElements}
137
- elif isinstance(setElements,np.ndarray):
138
- array=setElements
139
- setElDict = {setName: setElements.tolist()}
140
- else:
141
- raise TypeError("SetElemenets must be list of str or np array of strings")
142
-
143
-
144
- return HeaderArrayObj.HeaderArrayFromData(array=array, long_name=long_name, sets=[setName], setElDict=setElDict)
145
-
146
- @staticmethod
147
- def HeaderArrayFromData(array: np.ndarray, coeff_name: str = None, long_name: str = None,
148
- sets: 'List[str]' = None, setElDict: 'Dict[str:List[str]]' = None) -> 'HeaderArrayObj':
149
- """
150
- Creates a new HeaderArrayObj from basic data. I.e. sets and set elements are given as basic list and dict[str:list[str]]
151
- """
152
-
153
- hao = HeaderArrayObj()
154
-
155
- HeaderArrayObj._setHeaderBaseData(array, coeff_name, hao, long_name)
156
- if sets is None:
157
- hao.sets = _HeaderDims.fromShape(array.shape)
158
- else:
159
- if not isinstance(sets,list):
160
- raise TypeError("sets must be of type list")
161
- if not all(isinstance(setName,str) for setName in sets):
162
- raise TypeError("all setNames in sets must be strings")
163
- if not all(len(setName) <= 12 for setName in sets):
164
- raise TypeError("all setNames in sets must be shorter than 13 Characters")
165
-
166
- if setElDict is None: setElDict={}
167
- if not isinstance(setElDict,dict):
168
- raise TypeError("setElDict must be of type dict[str:list[str]]")
169
-
170
- hao.sets = _HeaderDims.fromSetShape(sets, setElDict, array.shape)
171
-
172
- if hao.is_valid():
173
- return hao
174
-
175
- @staticmethod
176
- def HeaderArrayFromCompiledData(array: np.ndarray, coeff_name: str=None, long_name: str=None,
177
- SetDims: _HeaderDims=None) -> 'HeaderArrayObj':
178
- """
179
- Creates a new HeaderArrayObj from precompiled data. I.e. sets are already in _HeaderDim structure
180
- """
181
-
182
- hao = HeaderArrayObj()
183
-
184
- HeaderArrayObj._setHeaderBaseData(array, coeff_name, hao, long_name)
185
- if not isinstance(SetDims,_HeaderDims):
186
- raise TypeError("sets must be of type _HeaderDims")
187
- if SetDims is None:
188
- hao.sets = _HeaderDims.fromShape(array.shape)
189
- else:
190
- hao.sets = SetDims
191
-
192
- if hao.is_valid():
193
- return hao
194
-
195
- @staticmethod
196
- def _setHeaderBaseData(array, coeff_name, hao, long_name) -> None:
197
- if not isinstance(array, (np.ndarray, np.float32, np.int32,np.float64)):
198
- print(type(array))
199
- raise HeaderArrayObj.UnsupportedArrayType("'array' must be of numpy.ndarray type.")
200
-
201
- # Defaults handling
202
- if coeff_name is None:
203
- coeff_name = " " * 12
204
- if long_name is None:
205
- long_name = coeff_name
206
- if len(coeff_name) < 12:
207
- coeff_name = coeff_name.ljust(12)
208
- if len(long_name) < 70:
209
- long_name = long_name.ljust(70)
210
- hao.array = array
211
- hao.coeff_name = coeff_name
212
- hao.long_name = long_name
213
-
214
- def array_operation(self,
215
- other: "Union[np.ndarray, HeaderArrayObj]",
216
- operation: str,
217
- **kwargs) -> 'HeaderArrayObj':
218
- """
219
- This method is implemented to allow for operations on the arrays of HeaderArrayObjs. Most Tablo-like
220
- functionality is replicated with this method.
221
- :param "HeaderArrayObj" other: The second ``HeaderArrayObj`` involved in the operation.
222
- :param str operation: A ``str`` specifying the ``numpy.ndarray`` operation attribute - e.g. ``"__add__"``.
223
- :param dict kwargs: Any additional kwargs are passed to the new ``HeaderArrayObj``.
224
- :return: A new ``HeaderArrayObj`` that results from the operation. Will have a default header name of ``"NEW1"``.
225
- """
226
-
227
-
228
- if issubclass(type(other), HeaderArrayObj):
229
- new_array = getattr(self.array, operation)(other.array)
230
- new_sets=self._sets.matchSets(sets=other._sets)
231
- elif issubclass(type(other), np.ndarray):
232
- new_array = getattr(self.array, operation)(other)
233
- new_sets=self._sets.matchSets(shape=other.shape)
234
- elif issubclass(type(other), (float, int)):
235
- new_array = getattr(self.array, operation)(other)
236
- new_sets=self._sets
237
- else:
238
- msg = "Operation is not permitted for objects that are not of 'numpy.ndarray' type, or 'HeaderArrayObj' type."
239
- raise TypeError(msg)
240
-
241
- return HeaderArrayObj.HeaderArrayFromCompiledData( array=new_array, SetDims=new_sets, **kwargs)
242
-
243
- def __neg__(self):
244
- self.array=-self.array
245
- return self
246
-
247
- def __add__(self, other):
248
- return self.array_operation(other, "__add__")
249
-
250
- def __mul__(self, other):
251
- return self.array_operation(other, "__mul__")
252
-
253
- def __truediv__(self, other):
254
- return self.array_operation(other, "__truediv__")
255
-
256
- def __floordiv__(self, other):
257
- return self.array_operation(other, "__floordiv__")
258
-
259
- def __pow__(self, other):
260
- return self.array_operation(other, "__pow__")
261
-
262
- def __mod__(self, other):
263
- return self.array_operation(other, "__mod__")
264
-
265
- def __sub__(self, other):
266
- return self.array_operation(other, "__sub__")
267
-
268
- def __radd__(self, other):
269
- return self.array_operation(other, "__radd__")
270
-
271
- def __rmul__(self, other):
272
- return self.array_operation(other, "__rmul__")
273
-
274
- def __rtruediv__(self, other):
275
- return self.array_operation(other, "__rtruediv__")
276
-
277
- def __rfloordiv__(self, other):
278
- return self.array_operation(other, "__rfloordiv__")
279
-
280
- def __rpow__(self, other):
281
- return self.array_operation(other, "__rpow__")
282
-
283
- def __rmod__(self, other):
284
- return self.array_operation(other, "__rmod__")
285
-
286
- def __rsub__(self, other):
287
- return self.array_operation(other, "__rsub__")
288
-
289
- def __str__(self) -> str:
290
- outputstr="\n"
291
- outputstr+="CoeffName".ljust(24)+": "+self.coeff_name+"\n"
292
- outputstr+="Rank".ljust(24)+": "+str(len(self.array.shape))+"\n"
293
- if self.sets:
294
- outputstr += self.sets.__str__()
295
-
296
- return outputstr
297
-
298
- class UnsupportedArrayType(TypeError):
299
- """Raised if invalid array type passed."""
300
- pass
@@ -1,229 +0,0 @@
1
- """
2
- Class for decoding SL4 files.
3
- """
4
- from __future__ import print_function, absolute_import
5
- from .har_file import HarFileObj
6
- from .header_array import HeaderArrayObj
7
- import numpy as np
8
-
9
-
10
- class SL4(object):
11
-
12
- def __init__(self, fname, extractList=None):
13
- """
14
- Decodes an SL4 file and returns variable Header (1 dim add containing cumulative solution + subtotals if present)
15
- In addtion Set information is available via the getSet functions
16
- :param fname: str filename of the SL4 file
17
- :param extractList: list(varNames) only required if a subset of variables should be extracted
18
- """
19
- self._sets=[]
20
- self._variables=[]
21
- self.setHeaders={}
22
- self.variableDict={}
23
- self.varTypeDict={}
24
- self.solFile=HarFileObj("fname"+".sol")
25
- self.decode_SL4(fname, extractList)
26
-
27
-
28
- @property
29
- def variableNames(self):
30
- return self._variables
31
- @variableNames.setter
32
- def variableNames(self, val):
33
- raise Exception ("Variables in SL4 class cannot be assigned")
34
-
35
- @property
36
- def setNames(self):
37
- return self._sets
38
- @setNames.setter
39
- def setNames(self,val):
40
- raise Exception ("Sets in SL4 class cannot be assigned")
41
-
42
- def varType(self,name):
43
- if not name.strip().lower() in self.varTypeDict:
44
- raise Exception("Could not find variable '" + name + "' in the SL4 file. Please check the variable list")
45
- return self.varTypeDict[name.strip().lower()]
46
-
47
- def getSet(self,name):
48
- if not name.strip().lower() in self.setHeaders:
49
- raise Exception ("Could not find set '"+name+"' in the SL4 file. Please check the set list")
50
- return self.setHeaders[name.strip().lower()]
51
-
52
- def getVariable(self,name:str) -> HeaderArrayObj:
53
- if not name.strip().lower() in self.variableDict:
54
- raise Exception("Could not find variable '" + name + "' in the SL4 file. Please check the variable list")
55
- return self.variableDict[name.strip().lower()]
56
-
57
- def decode_SL4(self, fname, extractList):
58
- HarObj=HarFileObj(fname)
59
-
60
- #collect the set information. Not sure whether intertemporals work yet
61
- setNames = HarObj["STNM"]
62
- self.generateSetHeaders(HarObj, setNames)
63
- resultsSet=["Cumulative"]
64
- resultsDataHeaders=["CUMS"]
65
- resultsShockComponents = ["SHCK"]
66
- resultsShockList = ["SHCL"]
67
- # check for subtotals
68
- if "STLS" in HarObj.getHeaderArrayNames():
69
- self.appendSubtotals(HarObj, resultsDataHeaders, resultsSet, resultsShockComponents, resultsShockList)
70
-
71
-
72
- # Get the common header needed to extract the variable
73
- varNames= HarObj["VARS"]
74
- varDims = HarObj["VCNA"]
75
- varSetPtr = HarObj["VCAR"]
76
- varLabel = HarObj["VCLB"]
77
- varSizeEnd = HarObj["ORND"]
78
- varSizeExo = HarObj["OREX"]
79
- varExoList = HarObj["OREL"]
80
- cumResCom = HarObj["CMND"]
81
- cumResPtr = HarObj["PCUM"]
82
- shockPtr = HarObj["PSHK"]
83
- shockVal = HarObj["SHOC"]
84
-
85
-
86
- varLower=[name.strip().lower() for name in varNames.array]
87
- if not extractList: extractList=varLower
88
- useVars=[item.lower() for item in extractList]
89
- useDict=dict(zip(useVars,useVars))
90
-
91
- self.varTypeDict=dict(zip(varLower,HarObj["VCTP"].array))
92
- #prepare the different results. By default add cumulative. If subtatoals are present append them to the lists
93
-
94
- self._variables=[name.strip() for name in varNames.array.tolist()]
95
- varSetDict={}
96
- nvar=len(self._variables)
97
-
98
- nexoListUsed=0
99
- nShkListUsed=0
100
- setPos=0
101
-
102
- self.variableDict={}
103
-
104
- #extract the variables. The resulting headers do not distinguish between endo and exo
105
- for i in range(0,nvar):
106
- useIt=varLower[i] in useDict
107
- setPos=self.generateSetDictEntry(i, resultsSet, setNames, setPos, varDims, varSetDict, varSetPtr)
108
-
109
- outDataList = self.assembleVariableData(HarObj, cumResCom, cumResPtr, i, nexoListUsed,
110
- resultsDataHeaders, resultsShockComponents,
111
- resultsShockList, shockPtr, shockVal, varExoList,
112
- varSizeEnd, varSizeExo, generateData=useIt)
113
-
114
- if useIt: self.reshapeAndAdd(i, outDataList, varLabel, varSetDict)
115
- nexo = varSizeExo.array[i, 0]
116
- nendo = varSizeEnd.array[i, 0]
117
- if nexo != 0 and nendo != 0: nexoListUsed += nexo
118
-
119
-
120
- # adjust available information to extracted data
121
- self._variables = [name.strip() for name in varNames.array.tolist() if name.strip().lower() in useDict]
122
-
123
- self.varTypeDict = { key:val for key,val in self.varTypeDict.items() if key.lower() in useDict}
124
-
125
- def generateSetHeaders(self, HarObj, setNames):
126
- setSizes = HarObj["SSZ"]
127
- setLabels = HarObj["STLB"]
128
- setElStat = HarObj["ELST"]
129
- setEls = HarObj["STEL"]
130
- setElPtr = HarObj["ELAD"]
131
- self._sets = [name.strip().lower() for name in setNames.array.tolist()]
132
- nsets = len(self._sets)
133
- self.setHeaders = {}
134
- for i in range(0, nsets):
135
- if setElStat.array[i] == 'k':
136
- start = (setElPtr.array[i, 0] - 1)
137
- end = start + setSizes.array[i, 0]
138
- self.setHeaders[self._sets[i].strip().lower()] = HeaderArrayObj.SetHeaderFromData(self._sets[i],
139
- setEls.array[
140
- start:end],
141
- setLabels.array[i])
142
-
143
- def reshapeAndAdd(self, i, outDataList, varLabel, varSetDict):
144
- flatData = np.concatenate(outDataList)
145
- varSets = [thisSet.strip() for thisSet in varSetDict[self._variables[i].strip().lower()]]
146
- simSizes = tuple(
147
- [self.getSet(thisSet).array.shape[0] for thisSet in varSetDict[self._variables[i].strip().lower()]])
148
- setElDict = {}
149
- for myset in varSets:
150
- setElDict[myset.strip().lower()] = self.getSet(myset).array.tolist()
151
- finalData = flatData.reshape(simSizes,order="F").astype(np.float32)
152
- # create headers for all variables
153
- self.variableDict[self._variables[i].strip().lower()] = \
154
- HeaderArrayObj.HeaderArrayFromData(finalData,self._variables[i].strip()[0:12],varLabel.array[i][0:70], varSets, setElDict)
155
-
156
- def assembleVariableData(self, HarObj, cumResCom, cumResPtr, iVar, nexoListUsed, resultsDataHeaders,
157
- resultsShockComponents, resultsShockList, shockPtr, shockVal, varExoList, varSizeEnd,
158
- varSizeExo, generateData=True):
159
- nexo = varSizeExo.array[iVar, 0]
160
- nendo = varSizeEnd.array[iVar, 0]
161
- # Assemble the data into a vector (subtotals are appended to the list as they are in the results* Lists)
162
- outDataList = []
163
- if not generateData: return outDataList
164
-
165
- for DataHead, ShockComHead, ShockListHead in zip(resultsDataHeaders, resultsShockComponents, resultsShockList):
166
- cumRes = HarObj[DataHead]
167
- shockCom = HarObj[ShockComHead]
168
- shockList = HarObj[ShockListHead]
169
- start = cumResPtr.array[iVar, 0] - 1
170
- end = start + cumResCom.array[iVar, 0]
171
- Data = np.asfortranarray(cumRes.array[start:end, 0])
172
- nshk = shockCom.array[iVar, 0]
173
- nShkListUsed=0
174
- for prevVar in range(0,iVar):
175
- prevNEndo=varSizeEnd.array[prevVar, 0]
176
- prevNExo=varSizeExo.array[prevVar, 0]
177
- prevNShocked=shockCom.array[prevVar, 0]
178
- if prevNShocked != prevNExo or prevNEndo != 0: nShkListUsed+=prevNShocked
179
-
180
- if nexo != 0 and nendo != 0:# partially exo
181
- insertMask = []
182
- for j in range(nexoListUsed, nexoListUsed + nexo):
183
- insertMask.append(varExoList.array[j, 0] - (j - nexoListUsed + 1))
184
-
185
- flatData = np.insert(Data, insertMask, 0)
186
-
187
- self.insertShocks(flatData, iVar, nshk, nexo, shockList, shockPtr, shockVal, nShkListUsed)
188
- elif nendo != 0: #fully endo
189
- flatData = Data
190
- else: # fully exo
191
- flatData = np.zeros(nexo)
192
- self.insertShocks(flatData, iVar, nshk, nexo, shockList, shockPtr, shockVal, nShkListUsed)
193
- outDataList.append(flatData)
194
- return outDataList
195
-
196
- def generateSetDictEntry(self, i, resultsSet, setNames, setPos, varDims, varSetDict, varSetPtr):
197
- ndim = varDims.array[i, 0]
198
- if ndim > 0:
199
- varSetDict[self._variables[i].strip().lower()] = setNames.array[[j - 1 for j in varSetPtr.array[setPos:setPos + ndim, 0]]].tolist()
200
- varSetDict[self._variables[i].strip().lower()] = [name.strip() for name in varSetDict[self._variables[i].strip().lower()]]
201
- setPos += ndim
202
- else:
203
- varSetDict[self._variables[i].strip().lower()] = []
204
- if len(resultsSet) > 1: varSetDict[self._variables[i].strip().lower()].append("#RESULTS")
205
- return setPos
206
-
207
- def appendSubtotals(self, HarObj, resultsDataHeaders, resultsSet, resultsShockComponents, resultsShockList):
208
- nresults = HarObj["STLS"].array.flatten()[0]
209
- for i in range(1, nresults + 1):
210
- resultsDataHeaders.append("%03iS" % i)
211
- resultsShockComponents.append("%03iC" % i)
212
- resultsShockList.append("%03iL" % i)
213
- #resultsSet.append("Subtotal%03i" % i)
214
- description = ["Cumulative Results"]
215
- description.extend(HarObj["STDS"].array.tolist())
216
- for name in HarObj["STDS"].array:
217
- resultsSet.append(str(name) if len(name) <= 12 else str(name)[0:12])
218
- self._sets.append("#results")
219
- self.setHeaders["#results"] = HeaderArrayObj.SetHeaderFromData("#RESULTS", np.array(resultsSet), "Cumlative and Subtotal elements")
220
-
221
- @staticmethod
222
- def insertShocks(flatData, i, nshk, nexo, shockList, shockPtr, shockVal,nShkListUsed):
223
- if nshk > 0:
224
- start = shockPtr.array[i, 0] - 1
225
- if nshk == nexo:
226
- flatData[0:nexo]=shockVal.array[start:start+nshk,0]
227
- else:
228
- flatData[shockList.array[nShkListUsed:nShkListUsed+nshk,0]-1]=shockVal.array[start:start+nshk, 0]
229
-
@@ -1,6 +0,0 @@
1
- """
2
- This module contains the IO builder for the GTAP 11 extractor.
3
- """
4
- from .mrio_builder import build_io
5
-
6
- __all__ = ["build_io"]
@@ -1,158 +0,0 @@
1
- """
2
- Functions to build an MRIO object from GTAP data.
3
- """
4
-
5
- import mrio_toolbox as mrio
6
- import logging
7
-
8
- log = logging.getLogger(__name__)
9
-
10
- def transform_gtap(gtap):
11
- """
12
- Convert the gtap data into an MRIO object.
13
-
14
- Parameters
15
- ----------
16
- gtap : xarray Dataset
17
- Extracted gtap data
18
- """
19
- return mrio.MRIO(data=gtap)
20
-
21
- def build_io(gtap):
22
- """
23
- Create the input-output table from the GTAP data
24
-
25
- This function treats GTAP data as is.
26
- It does not solve any imbalance and stays as close as possible
27
- to the raw dataset.
28
-
29
- The international trade block allocation follows
30
- from the proportionality assumptions.
31
- Given the disagreement between import and export data,
32
- we average the two to get the bilateral trade shares.
33
-
34
- Parameters
35
- ----------
36
- gtap_data : MRIO object
37
- Direct extraction from the gtap data
38
- """
39
- if not isinstance(gtap,mrio.MRIO):
40
- gtap = transform_gtap(gtap)
41
-
42
- #We solve the disagreement between import and export data
43
- #by averaging the two
44
- log.info("Compute the trade shares")
45
- trade = (gtap.VXSB + gtap.VMSB)/2
46
-
47
- #There are some non-null elements in the diagonal of the trade matrix
48
- #These are statistical artefact from GTAP. We set them to zero.
49
- for country in gtap.REG:
50
- trade["all",country,country] = 0
51
-
52
- #Then, we normalize the trade shares
53
- imports = trade.sum(axis=1)
54
- imports.data[imports.data == 0] = 1
55
- shares = trade/imports.expand_dims(axis=1)
56
- #Shares is of shape (sectors,country of origin, country of destination)
57
- #Shares some to 1 for each sector and country of destination
58
-
59
- #Now, we create the inter-industry matrix
60
- log.info("Create the inter-industry matrix")
61
- log.info("Fill the diagonal blocks")
62
- T = gtap.new_part(name="t",dimensions=(("REG","COMM"),("REG","COMM")))
63
-
64
- log.info("Fill the off-diagonal blocks")
65
- for current in gtap.REG:
66
- #We create the inter-industry matrix within each country
67
- #We only need to fit VDFB into the diagonal blocks of the matrix
68
- T[(current,"all"),(current,"all")] = gtap.VDFB["all","all",current]
69
- for partner in gtap.REG:
70
- #We create the international trade block for each trade partner
71
- if partner != current:
72
- #We use the trade shares to split the sectoral imports
73
- #by trade partner
74
- #This is known as the proportionality assumption
75
- #It means that the trade shares of a country in a given market
76
- #Does not depend on the sector buying it
77
- T[(partner,"all"),(current,"all")] = gtap.VMFB[
78
- "all","all",current
79
- ]*shares[
80
- "all",partner,current
81
- ]
82
-
83
- log.info("Create the final demand matrix")
84
- #We turn to the final demand matrix
85
- #First, we create the labels for the final demand categories
86
- gtap.labels["y_labs"] = ["Households","Government","Investment"]
87
-
88
- #Now, we create the final demand matrix
89
- Y = gtap.new_part(name="y",dimensions=(("REG","COMM"),("REG","y_labs")))
90
-
91
- #The process is similar to the inter-industry matrix,
92
- #but we have to loop over the final demand categories
93
- categories = {
94
- "Households":"P",
95
- "Government":"G",
96
- "Investment":"I"
97
- }
98
- for country in gtap.REG:
99
- for category in categories:
100
- #Fill the domestic use of domestic products
101
- Y[(country,"all"),(country,category)] = gtap.parts[
102
- f"VD{categories[category]}B"]["all",country]
103
- for partner in gtap.REG:
104
- if partner != country:
105
- #Fill the imports from each trade partner
106
- Y[(partner,"all"),(country,category)] = gtap.parts[
107
- f"VM{categories[category]}B"
108
- ]["all",country].data[:,None,:]*shares[
109
- "all",partner,country
110
- ].data
111
-
112
- #Finally, we create the value added matrix.
113
- #Our value added in basic prices encompasses the primary endowments,
114
- #margins and net taxes and subsidies.
115
- #Margins also include export taxes and subsidies,
116
- #as these do not abund the value added of the importing country.
117
-
118
- #Prepare the primary inputs / value added labels
119
- log.info("Create the value added matrix")
120
- va_labs = []
121
- for endowment in gtap.ENDW:
122
- va_labs.append(endowment)
123
- va_labs.append("margins")
124
- va_labs.append("net taxes and subsidies")
125
- gtap.labels["va_labs"] = va_labs
126
-
127
- #Create the value added matrix
128
- VA = gtap.new_part(name="va",dimensions=(("va_labs"),("REG","COMM")))
129
-
130
- #Reformat the va_labs
131
- log.info("Cast the va_labs")
132
- endowments = gtap.EVFB.reformat([["ENDW"],["ACTS","REG"]])
133
- endowments = endowments.swap_ax_levels(1,"ACTS","REG")
134
- VA[gtap.ENDW] = endowments
135
-
136
- #Aggregate the margins and export taxes and subsidies
137
- log.info("Aggregate margins and export taxes and subsidies")
138
- margins = gtap.VTWR.sum(-1).sum(0) + gtap.XTRV.sum(2)
139
- margins = margins.flatten()
140
- margins = margins.swap_ax_levels(0,"COMM","REG")
141
- VA[["margins"]] = margins
142
-
143
- #Get the net taxes and subsidies as the residual
144
- log.info("Derive net taxes and subsidies")
145
- output = T.sum(1) + Y.sum(1)
146
- VA[["net taxes and subsidies"]] = output - T.sum(0) - VA.sum(0)
147
-
148
- gtap_mrio = mrio.MRIO()
149
- gtap_mrio.metadata = gtap.metadata
150
- for part in [T,Y,VA]:
151
- gtap_mrio.add_part(part)
152
-
153
- gtap_mrio.rename_dimensions(
154
- ["REG","COMM"],
155
- ["countries","sectors"]
156
- )
157
-
158
- return gtap_mrio