mms-client 1.9.3__py3-none-any.whl → 1.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -115,7 +115,7 @@ def power_positive(alias: str, optional: bool = False):
115
115
 
116
116
  Returns: A Pydantic Field object for the power value.
117
117
  """
118
- return attr(default=None if optional else PydanticUndefined, name=alias, gt=0, le=10000000)
118
+ return attr(default=None if optional else PydanticUndefined, name=alias, ge=0, le=10000000)
119
119
 
120
120
 
121
121
  def price(alias: str, limit: float, optional: bool = False):
@@ -4,7 +4,9 @@ from enum import Enum
4
4
  from typing import Optional
5
5
 
6
6
  from pydantic_extra_types.pendulum_dt import Date
7
+ from pydantic_xml import BaseXmlModel
7
8
  from pydantic_xml import attr
9
+ from pydantic_xml import element
8
10
 
9
11
  from mms_client.types.base import Envelope
10
12
  from mms_client.types.fields import participant
@@ -32,12 +34,19 @@ class BaseMarketRequest(Envelope):
32
34
  user: str = attr(name="UserName", min_length=1, max_length=12, pattern=r"^[A-Z0-9]*$")
33
35
 
34
36
 
37
+ class Defaults(BaseXmlModel):
38
+ """Represents the default settings to apply when submitting a market request."""
39
+
40
+ # Whether or not the submission represents the default
41
+ is_default: bool = attr(name="StandingFlag")
42
+
43
+
35
44
  class MarketQuery(BaseMarketRequest):
36
45
  """Represents the base fields for a market query."""
37
46
 
38
47
  # If the market type is specified as "DAM" (day-ahead market), the number of days should be specified as "1".
39
48
  # Otherwise, this field indicates the number of days ahead for which the data is being queried.
40
- days: int = attr(default=1, name="NumOfDays", ge=1, le=7)
49
+ days: Optional[int] = attr(default=None, name="NumOfDays", ge=1, le=7)
41
50
 
42
51
 
43
52
  class MarketSubmit(BaseMarketRequest):
@@ -48,7 +57,11 @@ class MarketSubmit(BaseMarketRequest):
48
57
 
49
58
  # If the market type is specified as "DAM" (day-ahead market), the number of days should be specified as "1".
50
59
  # Otherwise, this field indicates the number of days ahead for which the data is being submitted.
51
- days: int = attr(default=1, name="NumOfDays", ge=1, le=31)
60
+ days: Optional[int] = attr(default=None, name="NumOfDays", ge=1, le=31)
61
+
62
+ # Default values to include with the submission. The request will be rejected if this is included in a request
63
+ # where it is not allowed.
64
+ defaults: Optional[Defaults] = element(default=None, tag="StandingData")
52
65
 
53
66
 
54
67
  class MarketCancel(BaseMarketRequest):
mms_client/types/offer.py CHANGED
@@ -72,6 +72,12 @@ class OfferStack(Payload):
72
72
  # The unit price of the power, in JPY/kW/segment
73
73
  unit_price: Decimal = price("OfferUnitPrice", 10000.00)
74
74
 
75
+ # The unit price charged for the start up cost of the power, in JPY/kW/segment
76
+ start_up_unit_price: Annotated[Decimal, price("StartUpUnitPrice", 10000.00, True)]
77
+
78
+ # The unit price charged for the ramp down cost of the power, in JPY/kW/segment
79
+ ramp_down_unit_price: Annotated[Decimal, price("RampDownUnitPrice", 10000.00, True)]
80
+
75
81
  # The ID of the offer to which this stack belongs
76
82
  id: Optional[str] = offer_id("OfferId", True)
77
83
 
@@ -0,0 +1,26 @@
1
+ """Contains objects for OMI information."""
2
+
3
+ from pydantic_extra_types.pendulum_dt import Date
4
+ from pydantic_xml import attr
5
+
6
+ from mms_client.types.base import Envelope
7
+ from mms_client.types.fields import participant
8
+
9
+
10
+ class MarketSubmit(Envelope):
11
+ """Represents the base fields for a market registration request."""
12
+
13
+ # Date of the transaction in the format "YYYY-MM-DD"
14
+ date: Date = attr(name="Date")
15
+
16
+ # MMS code of the business entity to which the requesting user belongs, and will be used to track the user who made
17
+ # the request. This value will be checked against the certificate used to make the request.
18
+ participant: str = participant("ParticipantName")
19
+
20
+ # The user name of the person making the request. This value is used to track the user who made the request, and
21
+ # will be checked against the certificate used to make the request.
22
+ user: str = attr(name="UserName", min_length=1, max_length=12, pattern=r"^[A-Z0-9]*$")
23
+
24
+
25
+ class MarketQuery(MarketSubmit):
26
+ """Represents the base fields for a market query."""
@@ -0,0 +1,81 @@
1
+ """Contains objects for MMS settlement."""
2
+
3
+ from typing import Annotated
4
+ from typing import List
5
+ from typing import Optional
6
+
7
+ from pendulum import Timezone
8
+ from pydantic import field_serializer
9
+ from pydantic import field_validator
10
+ from pydantic_core import PydanticUndefined
11
+ from pydantic_extra_types.pendulum_dt import Date
12
+ from pydantic_extra_types.pendulum_dt import DateTime
13
+ from pydantic_xml import attr
14
+ from pydantic_xml import element
15
+
16
+ from mms_client.types.base import Payload
17
+ from mms_client.types.fields import company_short_name
18
+ from mms_client.types.fields import participant
19
+
20
+
21
+ def file_name(alias: str, optional: bool = False):
22
+ """Create a field for a file name.
23
+
24
+ Arguments:
25
+ alias (str): The name of the alias to assign to the Pydanitc field. This value will be used to map the field
26
+ to the JSON/XML key.
27
+ optional (bool): If True, the field will be optional with a default of None. If False, the field will be
28
+ required, with no default.
29
+
30
+ Returns: A Pydantic Field object for the file name.
31
+ """
32
+ return attr(
33
+ default=None if optional else PydanticUndefined,
34
+ name=alias,
35
+ min_length=18,
36
+ max_length=60,
37
+ pattern=r"^([A-Z0-9]{4}_){2}[A-Z0-9_-]{4,46}\.(pdf|zip|csv|xml)$",
38
+ )
39
+
40
+
41
+ class SettlementFile(Payload):
42
+ """Represents a settlement file."""
43
+
44
+ # The name of the settlement file as it is recorded in the system
45
+ name: str = file_name("Name")
46
+
47
+ # The name of the participant (only valid if operating as a TSO)
48
+ participant: Optional[str] = participant("ParticipantName", True)
49
+
50
+ # The name of the company associated with the file (only valid if operating as a TSO)
51
+ company: Optional[str] = company_short_name("CompanyShortName", True)
52
+
53
+ # When the file was submitted (not sure why this can be None but it's in the spec)
54
+ submission_time: Optional[DateTime] = attr(name="SubmissionTime", default=None)
55
+
56
+ # The date when settlement occurred (not included if settlement is in the future)
57
+ settlement_date: Optional[Date] = attr(name="SttlDate", default=None)
58
+
59
+ # The size of the file in bytes, if it has been uploaded
60
+ size: Optional[int] = attr(name="FileSize", default=None, ge=0, lt=1000000000)
61
+
62
+ @field_serializer("submission_time")
63
+ def encode_datetime(self, value: DateTime) -> str:
64
+ """Encode the datetime to an MMS-compliant ISO 8601 string."""
65
+ return value.replace(tzinfo=None).isoformat() if value else ""
66
+
67
+ @field_validator("submission_time")
68
+ def decode_datetime(cls, value: DateTime) -> DateTime: # pylint: disable=no-self-argument
69
+ """Decode the datetime from an MMS-compliant ISO 8601 string."""
70
+ return value.replace(tzinfo=Timezone("Asia/Tokyo"))
71
+
72
+
73
+ class SettlementResults(Payload):
74
+ """Contains a list of settlement files that can be requested separately later."""
75
+
76
+ # The file results that were retrieved by the query
77
+ files: Annotated[List[SettlementFile], element(tag="File", min_length=1)]
78
+
79
+
80
+ class SettlementQuery(Payload, tag="SettlementResultsFileListQuery"):
81
+ """Represents a request to query settlement results file list."""
@@ -0,0 +1,187 @@
1
+ """Contains objects for surplus capacity information."""
2
+
3
+ from enum import Enum
4
+ from typing import Optional
5
+
6
+ from pendulum import Timezone
7
+ from pydantic import field_serializer
8
+ from pydantic import field_validator
9
+ from pydantic_extra_types.pendulum_dt import DateTime
10
+ from pydantic_xml import attr
11
+
12
+ from mms_client.types.base import Payload
13
+ from mms_client.types.enums import AreaCode
14
+ from mms_client.types.fields import company_short_name
15
+ from mms_client.types.fields import participant
16
+ from mms_client.types.fields import power_positive
17
+ from mms_client.types.fields import resource_name
18
+ from mms_client.types.fields import resource_short_name
19
+ from mms_client.types.fields import system_code
20
+
21
+
22
+ class RejectCategory(Enum):
23
+ """Represents the category of the reason for rejecting a surplus capacity request."""
24
+
25
+ FUEL_RESTRICTION = "1"
26
+ RIVER_FLOW_RESTRICTION = "2"
27
+ WORK_RELATED = "3"
28
+ OTHER = "9"
29
+
30
+
31
+ class OperationalRejectCategory(Enum):
32
+ """Represents the category of the reason for rejecting an operational request.
33
+
34
+ This include voltage adjustment, black start, over-power, peak mode or system security pump request.
35
+ """
36
+
37
+ EQUIPMENT_FAILURE = "1"
38
+ NOT_SUPPORTED = "2"
39
+ OTHER = "9"
40
+
41
+
42
+ class SurplusCapacitySubmit(Payload, tag="RemainingReserveData"):
43
+ """Represents the base fields for a surplus capacity response."""
44
+
45
+ # The name of the resource for which the surplus capacity is being submitted
46
+ resource_code: str = resource_name("ResourceName")
47
+
48
+ # The DR pattern number for which the surplus capacity is being submitted
49
+ pattern_number: int = attr(name="DrPatternNumber", ge=1, le=20)
50
+
51
+ # The start block from when the surplus capacity should apply
52
+ start: DateTime = attr(name="StartTime")
53
+
54
+ # The end block until when the surplus capacity should apply
55
+ end: DateTime = attr(name="EndTime")
56
+
57
+ # The available surplus capacity that can be increased or dispatched when needed, such as in response to grid
58
+ # demand fluctuations. This should not be submitted for standalone generators.
59
+ upward_capacity: Optional[int] = power_positive("RemainingReserveUp", True)
60
+
61
+ # In the case where excess surplus capacity is rejected, this field will indicate the category of the reason.
62
+ upward_capacity_rejected: Optional[RejectCategory] = attr(default=None, name="RemainingReserveUpRejectFlag")
63
+
64
+ # If the upward dispatch is rejected, this field will indicate a specific reason.
65
+ upward_capacity_rejection_reason: Optional[str] = attr(
66
+ default=None, name="RemainingReserveUpRejectReason", min_length=1, max_length=50
67
+ )
68
+
69
+ # The available surplus capacity that can be decreased when needed, such as in response to grid demand fluctuations.
70
+ # This should not be submitted for standalone generators.
71
+ downward_capacity: Optional[int] = power_positive("RemainingReserveDown", True)
72
+
73
+ # In the case where excess surplus capacity is rejected, this field will indicate the category of the reason.
74
+ downward_capacity_rejected: Optional[RejectCategory] = attr(default=None, name="RemainingReserveDownRejectFlag")
75
+
76
+ # If the downward dispatch is rejected, this field will indicate a specific reason.
77
+ downward_capacity_rejection_reason: Optional[str] = attr(
78
+ default=None, name="RemainingReserveDownRejectReason", min_length=1, max_length=50
79
+ )
80
+
81
+ # If voltage adjustment is rejected, this field will indicate the category of the reason.
82
+ voltage_adjustment_rejected: Optional[OperationalRejectCategory] = attr(
83
+ default=None, name="VoltageAdjustmentRejectFlag"
84
+ )
85
+
86
+ # If voltage adjustment is rejected, this field will indicate a specific reason.
87
+ voltage_adjustment_rejection_reason: Optional[str] = attr(
88
+ default=None, name="VoltageAdjustmentRejectReason", min_length=1, max_length=50
89
+ )
90
+
91
+ # If black start is rejected, this field will indicate the category of the reason.
92
+ black_start_rejected: Optional[OperationalRejectCategory] = attr(default=None, name="BlackStartRejectFlag")
93
+
94
+ # If black start is rejected, this field will indicate a specific reason.
95
+ black_start_rejection_reason: Optional[str] = attr(
96
+ default=None, name="BlackStartRejectReason", min_length=1, max_length=50
97
+ )
98
+
99
+ # The additional reserve capacity that can be utilized in cases of excessive or "overpower" conditions, such as
100
+ # when demand exceeds usual levels.
101
+ over_power_capacity: Optional[int] = power_positive("OverPowerRemainingReserveUp", True)
102
+
103
+ # In the case where over-power capacity is rejected, this field will indicate the category of the reason.
104
+ over_power_rejected: Optional[OperationalRejectCategory] = attr(default=None, name="OverPowerRejectFlag")
105
+
106
+ # If over-power capacity is rejected, this field will indicate a specific reason.
107
+ over_power_rejection_reason: Optional[str] = attr(
108
+ default=None, name="OverPowerRejectReason", min_length=1, max_length=50
109
+ )
110
+
111
+ # The available surplus capacity that can be increased specifically during peak demand periods
112
+ peak_mode_capacity: Optional[int] = power_positive("PeakModeRemainingReserveUp", True)
113
+
114
+ # In the case where peak mode capacity is rejected, this field will indicate the category of the reason.
115
+ peak_mode_rejected: Optional[OperationalRejectCategory] = attr(default=None, name="PeakModeRejectFlag")
116
+
117
+ # If peak mode capacity is rejected, this field will indicate a specific reason.
118
+ peak_mode_rejection_reason: Optional[str] = attr(
119
+ default=None, name="PeakModeRejectReason", min_length=1, max_length=50
120
+ )
121
+
122
+ # Indicates whether the operation of a pumped-storage hydroelectric pump is restricted or disallowed for system
123
+ # security reasons.
124
+ system_security_pump_rejected: Optional[OperationalRejectCategory] = attr(
125
+ default=None, name="SystemSecurityPumpRejectFlag"
126
+ )
127
+
128
+ # If the operation of a pumped-storage hydroelectric pump is restricted or disallowed for system security reasons,
129
+ # this field will indicate a specific reason.
130
+ system_security_pump_rejection_reason: Optional[str] = attr(
131
+ default=None, name="SystemSecurityPumpRejectReason", min_length=1, max_length=50
132
+ )
133
+
134
+ @field_serializer("start", "end")
135
+ def encode_datetime(self, value: DateTime) -> str:
136
+ """Encode the datetime to an MMS-compliant ISO 8601 string."""
137
+ return value.replace(tzinfo=None).isoformat() if value else ""
138
+
139
+ @field_validator("start", "end")
140
+ def decode_datetime(cls, value: DateTime) -> DateTime: # pylint: disable=no-self-argument
141
+ """Decode the datetime from an MMS-compliant ISO 8601 string."""
142
+ return value.replace(tzinfo=Timezone("Asia/Tokyo"))
143
+
144
+
145
+ class SurplusCapacityData(SurplusCapacitySubmit, tag="RemainingReserveData"):
146
+ """Represents the base fields for a surplus capacity response."""
147
+
148
+ # The region in which the resource for which surplus capacity is being submitted is located
149
+ area: Optional[AreaCode] = attr(default=None, name="Area")
150
+
151
+ # The name of the BSP participant submitting the surplus capacity
152
+ participant: Optional[str] = participant("ParticipantName", True)
153
+
154
+ # The abbreviated name of the company submitting the surplus capacity
155
+ company: Optional[str] = company_short_name("CompanyShortName", True)
156
+
157
+ # The MMS code of the business entity to which the registration applies
158
+ system_code: Optional[str] = system_code("SystemCode", True)
159
+
160
+ # The abbreviated name of the resource being traded
161
+ resource_name: Optional[str] = resource_short_name("ResourceShortName", True)
162
+
163
+
164
+ class SurplusCapacityQuery(Payload, tag="RemainingReserveDataQuery"):
165
+ """Represents the base fields for a surplus capacity query."""
166
+
167
+ # The name of the resource for which the surplus capacity is being submitted
168
+ resource_code: Optional[str] = resource_name("ResourceName", True)
169
+
170
+ # The DR pattern number for which the surplus capacity is being submitted
171
+ pattern_number: Optional[int] = attr(default=None, name="DrPatternNumber", ge=1, le=20)
172
+
173
+ # The start block from when the surplus capacity should apply
174
+ start: DateTime = attr(name="StartTime")
175
+
176
+ # The end block until when the surplus capacity should apply
177
+ end: DateTime = attr(name="EndTime")
178
+
179
+ @field_serializer("start", "end")
180
+ def encode_datetime(self, value: DateTime) -> str:
181
+ """Encode the datetime to an MMS-compliant ISO 8601 string."""
182
+ return value.replace(tzinfo=None).isoformat() if value else ""
183
+
184
+ @field_validator("start", "end")
185
+ def decode_datetime(cls, value: DateTime) -> DateTime: # pylint: disable=no-self-argument
186
+ """Decode the datetime from an MMS-compliant ISO 8601 string."""
187
+ return value.replace(tzinfo=Timezone("Asia/Tokyo"))
@@ -4,6 +4,7 @@ from logging import getLogger
4
4
  from typing import Dict
5
5
  from typing import List
6
6
  from typing import Optional
7
+ from typing import Type
7
8
  from typing import Union
8
9
 
9
10
  from mms_client.types.base import E
@@ -88,3 +89,53 @@ class MMSValidationError(RuntimeError):
88
89
  self.method = method
89
90
  self.messages = messages
90
91
  super().__init__(self.message)
92
+
93
+
94
+ class InvalidContainerError(ValueError):
95
+ """Error raised when the outer XML tag is not the expected one."""
96
+
97
+ def __init__(self, method: str, expected: str, actual: str):
98
+ """Initialize the error.
99
+
100
+ Arguments:
101
+ method (str): The method that caused the error.
102
+ expected (str): The expected outer XML tag.
103
+ actual (str): The actual outer XML tag.
104
+ """
105
+ self.message = f"{method}: Expected payload key '{expected}' in response, but found '{actual}'."
106
+ self.method = method
107
+ self.expected = expected
108
+ self.actual = actual
109
+ super().__init__(self.message)
110
+
111
+
112
+ class EnvelopeNodeNotFoundError(ValueError):
113
+ """Error raised when the envelope node is not found."""
114
+
115
+ def __init__(self, method: str, expected: str):
116
+ """Initialize the error.
117
+
118
+ Arguments:
119
+ method (str): The method that caused the error.
120
+ expected (str): The expected envelope XML tag.
121
+ """
122
+ self.message = f"{method}: Expected envelope node '{expected}' not found in response."
123
+ self.method = method
124
+ self.expected = expected
125
+ super().__init__(self.message)
126
+
127
+
128
+ class DataNodeNotFoundError(ValueError):
129
+ """Error raised when the data node is not found."""
130
+
131
+ def __init__(self, method: str, expected: Type):
132
+ """Initialize the error.
133
+
134
+ Arguments:
135
+ method (str): The method that caused the error.
136
+ expected (Type): The expected data node.
137
+ """
138
+ self.message = f"{method}: Expected data node '{expected.__name__}' not found in response."
139
+ self.method = method
140
+ self.expected = expected
141
+ super().__init__(self.message)
@@ -29,6 +29,9 @@ from mms_client.types.base import Response
29
29
  from mms_client.types.base import ResponseCommon
30
30
  from mms_client.types.base import ResponseData
31
31
  from mms_client.types.base import SchemaType
32
+ from mms_client.utils.errors import DataNodeNotFoundError
33
+ from mms_client.utils.errors import EnvelopeNodeNotFoundError
34
+ from mms_client.utils.errors import InvalidContainerError
32
35
 
33
36
  # Directory containing all our XML schemas
34
37
  XSD_DIR = Path(__file__).parent.parent / "schemas" / "xsd"
@@ -115,11 +118,12 @@ class Serializer:
115
118
  return self._to_canoncialized_xml(payload)
116
119
 
117
120
  def deserialize(
118
- self, data: bytes, envelope_type: Type[E], data_type: Type[P], for_report: bool = False
121
+ self, method: str, data: bytes, envelope_type: Type[E], data_type: Type[P], for_report: bool = False
119
122
  ) -> Response[E, P]:
120
123
  """Deserialize the data to a response object.
121
124
 
122
125
  Arguments:
126
+ method (str): The method for which the data was received.
123
127
  data (bytes): The raw data to be deserialized.
124
128
  envelope_type (Type[Envelope]): The type of envelope to be constructed.
125
129
  data_type (Type[Payload]): The type of data to be constructed.
@@ -128,14 +132,15 @@ class Serializer:
128
132
  Returns: A response object containing the envelope and data extracted from the raw data.
129
133
  """
130
134
  tree = self._from_xml(data)
131
- return self._from_tree(tree, envelope_type, data_type, for_report)
135
+ return self._from_tree(method, tree, envelope_type, data_type, for_report)
132
136
 
133
137
  def deserialize_multi(
134
- self, data: bytes, envelope_type: Type[E], data_type: Type[P], for_report: bool = False
138
+ self, method: str, data: bytes, envelope_type: Type[E], data_type: Type[P], for_report: bool = False
135
139
  ) -> MultiResponse[E, P]:
136
140
  """Deserialize the data to a multi-response object.
137
141
 
138
142
  Arguments:
143
+ method (str): The method for which the data was received.
139
144
  data (bytes): The raw data to be deserialized.
140
145
  envelope_type (Type[Envelope]): The type of envelope to be constructed.
141
146
  data_type (Type[Payload]): The type of data to be constructed.
@@ -144,7 +149,7 @@ class Serializer:
144
149
  Returns: A multi-response object containing the envelope and data extracted from the raw data.
145
150
  """
146
151
  tree = self._from_xml(data)
147
- return self._from_tree_multi(tree, envelope_type, data_type, for_report)
152
+ return self._from_tree_multi(method, tree, envelope_type, data_type, for_report)
148
153
 
149
154
  def _to_canoncialized_xml(self, payload: PayloadBase) -> bytes:
150
155
  """Convert the payload to a canonicalized XML string.
@@ -156,7 +161,8 @@ class Serializer:
156
161
  """
157
162
  # First, convert the payload to a raw XML string
158
163
  raw: bytes = payload.to_xml(
159
- skip_empty=True,
164
+ exclude_none=True,
165
+ exclude_unset=True,
160
166
  encoding="utf-8",
161
167
  xml_declaration=False,
162
168
  ) # type: ignore[assignment]
@@ -170,10 +176,13 @@ class Serializer:
170
176
  buffer.seek(0)
171
177
  return buffer.read()
172
178
 
173
- def _from_tree(self, raw: Element, envelope_type: Type[E], data_type: Type[P], for_report: bool) -> Response[E, P]:
179
+ def _from_tree(
180
+ self, method: str, raw: Element, envelope_type: Type[E], data_type: Type[P], for_report: bool
181
+ ) -> Response[E, P]:
174
182
  """Convert the raw data to a response object.
175
183
 
176
184
  Arguments:
185
+ method (str): The method for which the data was received.
177
186
  raw (Element): The raw data to be converted.
178
187
  envelope_type (Type[Envelope]): The type of envelope to be constructed.
179
188
  data_type (Type[Payload]): The type of data to be constructed.
@@ -184,7 +193,7 @@ class Serializer:
184
193
  # First, attempt to extract the response from the raw data; if the key isn't found then we'll raise an error.
185
194
  # Otherwise, we'll attempt to construct the response from the raw data.
186
195
  if self._payload_key != raw.tag:
187
- raise ValueError(f"Expected payload key '{self._payload_key}' not found in response")
196
+ raise InvalidContainerError(method, self._payload_key, raw.tag)
188
197
  cls: Response[E, P] = _create_response_payload_type( # type: ignore[assignment]
189
198
  self._payload_key,
190
199
  envelope_type, # type: ignore[arg-type]
@@ -195,12 +204,12 @@ class Serializer:
195
204
 
196
205
  # Next, attempt to extract the envelope and data from within the response
197
206
  resp.envelope, resp.envelope_validation, envelope_node = self._from_tree_envelope(
198
- raw, envelope_type, for_report
207
+ method, raw, envelope_type, for_report
199
208
  )
200
209
 
201
210
  # Now, verify that the response doesn't contain an unexpected data type and then retrieve the payload data
202
211
  # from within the envelope
203
- self._verify_tree_data_tag(envelope_node, data_type)
212
+ self._verify_tree_data_tag(method, envelope_node, data_type)
204
213
  resp.payload = self._from_tree_data(envelope_node.find(get_tag(data_type)), data_type)
205
214
 
206
215
  # Finally, attempt to extract the messages from within the payload
@@ -212,11 +221,12 @@ class Serializer:
212
221
  return resp
213
222
 
214
223
  def _from_tree_multi(
215
- self, raw: Element, envelope_type: Type[E], data_type: Type[P], for_report: bool
224
+ self, method: str, raw: Element, envelope_type: Type[E], data_type: Type[P], for_report: bool
216
225
  ) -> MultiResponse[E, P]:
217
226
  """Convert the raw data to a multi-response object.
218
227
 
219
228
  Arguments:
229
+ method (str): The method for which the data was received.
220
230
  raw (Element): The raw data to be converted.
221
231
  envelope_type (Type[Envelope]): The type of envelope to be constructed.
222
232
  data_type (Type[Payload]): The type of data to be constructed.
@@ -227,7 +237,7 @@ class Serializer:
227
237
  # First, attempt to extract the response from the raw data; if the key isn't found then we'll raise an error.
228
238
  # Otherwise, we'll attempt to construct the response from the raw data.
229
239
  if self._payload_key != raw.tag:
230
- raise ValueError(f"Expected payload key '{self._payload_key}' not found in response")
240
+ raise InvalidContainerError(method, self._payload_key, raw.tag)
231
241
  cls: MultiResponse[E, P] = _create_response_payload_type( # type: ignore[assignment]
232
242
  self._payload_key,
233
243
  envelope_type, # type: ignore[arg-type]
@@ -237,12 +247,14 @@ class Serializer:
237
247
  resp = cls.from_xml_tree(raw) # type: ignore[arg-type]
238
248
 
239
249
  # Next, attempt to extract the envelope from the response
240
- resp.envelope, resp.envelope_validation, env_node = self._from_tree_envelope(raw, envelope_type, for_report)
250
+ resp.envelope, resp.envelope_validation, env_node = self._from_tree_envelope(
251
+ method, raw, envelope_type, for_report
252
+ )
241
253
 
242
254
  # Now, verify that the response doesn't contain an unexpected data type and then retrieve the payload data
243
255
  # from within the envelope
244
256
  # NOTE: apparently, mypy doesn't know about setter-getter properties either...
245
- self._verify_tree_data_tag(env_node, data_type)
257
+ self._verify_tree_data_tag(method, env_node, data_type)
246
258
  resp.payload = [
247
259
  self._from_tree_data(item, data_type) for item in env_node.findall(get_tag(data_type)) # type: ignore[misc]
248
260
  ]
@@ -256,11 +268,12 @@ class Serializer:
256
268
  return resp
257
269
 
258
270
  def _from_tree_envelope(
259
- self, raw: Element, envelope_type: Type[E], for_report: bool
271
+ self, method: str, raw: Element, envelope_type: Type[E], for_report: bool
260
272
  ) -> Tuple[E, ResponseCommon, Element]:
261
273
  """Attempt to extract the envelope from within the response.
262
274
 
263
275
  Arguments:
276
+ method (str): The method for which the data was received.
264
277
  raw (Element): The raw data to be converted.
265
278
  envelope_type (Type[Envelope]): The type of envelope to be constructed.
266
279
  for_report (bool): If True, the data will be serialized for a report request.
@@ -274,7 +287,7 @@ class Serializer:
274
287
  envelope_tag = get_tag(envelope_type)
275
288
  envelope_node = raw if for_report else raw.find(envelope_tag)
276
289
  if envelope_node is None or envelope_node.tag != envelope_tag:
277
- raise ValueError(f"Expected envelope type '{envelope_tag}' not found in response")
290
+ raise EnvelopeNodeNotFoundError(method, envelope_tag)
278
291
 
279
292
  # Next, create a new envelope type that contains the envelope type with the appropriate XML tag. We have to do
280
293
  # this because the envelope type doesn't include the ResponseCommon fields, and the tag doesn't match
@@ -288,10 +301,11 @@ class Serializer:
288
301
  envelope_node,
289
302
  )
290
303
 
291
- def _verify_tree_data_tag(self, raw: Element, data_type: Type[P]) -> None:
304
+ def _verify_tree_data_tag(self, method: str, raw: Element, data_type: Type[P]) -> None:
292
305
  """Verify that no types other than the expected data type are present in the response.
293
306
 
294
307
  Arguments:
308
+ method (str): The method for which the data was received.
295
309
  raw (Element): The raw data to be converted.
296
310
  data_type (Type[Payload]): The type of data to be constructed.
297
311
 
@@ -299,8 +313,10 @@ class Serializer:
299
313
  ValueError: If the expected data type is not found in the response.
300
314
  """
301
315
  data_tags = set(node.tag for node in raw)
302
- if not data_tags.issubset([data_type.__name__, data_type.__xml_tag__, "ProcessingStatistics", "Messages"]):
303
- raise ValueError(f"Expected data type '{data_type.__name__}' not found in response")
316
+ if not data_tags.issubset(
317
+ [data_type.__name__, data_type.__xml_tag__, "ProcessingStatistics", "Messages", "StandingData"]
318
+ ):
319
+ raise DataNodeNotFoundError(method, data_type)
304
320
 
305
321
  def _from_tree_data(self, raw: Optional[Element], data_type: Type[P]) -> Optional[ResponseData[P]]:
306
322
  """Attempt to extract the data from within the payload.
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mms-client
3
- Version: 1.9.3
3
+ Version: 1.11.0
4
4
  Summary: API client for accessing the MMS
5
5
  Home-page: https://github.com/ElectroRoute-Japan/mms-client
6
6
  Author: Ryan Wood
7
7
  Author-email: ryan.wood@electroroute.co.jp
8
- Requires-Python: >=3.11,<4.0
8
+ Requires-Python: >=3.12,<4.0
9
9
  Classifier: Development Status :: 5 - Production/Stable
10
10
  Classifier: Framework :: Pydantic :: 2
11
11
  Classifier: Framework :: Pytest
@@ -14,8 +14,8 @@ Classifier: License :: OSI Approved :: The Unlicense (Unlicense)
14
14
  Classifier: Natural Language :: English
15
15
  Classifier: Operating System :: OS Independent
16
16
  Classifier: Programming Language :: Python :: 3
17
- Classifier: Programming Language :: Python :: 3.11
18
17
  Classifier: Programming Language :: Python :: 3 :: Only
18
+ Classifier: Programming Language :: Python :: 3.12
19
19
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
20
20
  Classifier: Typing :: Typed
21
21
  Requires-Dist: backoff (>=2.2.1,<3.0.0)
@@ -80,6 +80,8 @@ This object represents the top-level XML element contained within the `MmsRespon
80
80
  ### Response & MultiResponse
81
81
  These objects contain the actual payload data and inherit from `BaseResponse`. These are what will actually be returned from the deserialization process. They also contain validation data for the top-level paylaod item(s). The difference between `Response` and `MultiResponse` is that the former contains a single item and the latter contains a list.
82
82
 
83
+ Note that the `MultiResponse` object covers a special case where queries made to the MMS may return no items, in which case the response will be the request object itself. This is handled internally by the client, and the user will receive an empty list in such cases.
84
+
83
85
  ## Envelopes
84
86
  Not to be confused with the SOAP envelope, this envelope contains the method parameters used to send requests to the MMS server. For example, if you wanted to send a market-related request, this would take on the form of a `MarketQuery`, `MarketSubmit` or `MarketCancel` object. This is combined with the payload during the serialization process to produce the final XML payload before injecting it into the `MmsRequest`. During the deserialization process, this is extracted from the XML paylod on the `MmsResponse` object. Each of these should inherit from `mms_client.types.base.Envelope`.
85
87
 
@@ -219,12 +221,17 @@ This client is not complete. Currently, it supports the following endpoints:
219
221
  - MarketQuery_OfferQuery
220
222
  - MarketCancel_OfferCancel
221
223
  - MarketQuery_AwardResultsQuery
224
+ - MarketQuery_SettlementResultsFileListQuery
225
+ - MarketSubmit_BupSubmit
226
+ - MarketQuery_BupQuery
222
227
  - RegistrationSubmit_Resource
223
228
  - RegistrationQuery_Resource
224
229
  - ReportCreateRequest
225
230
  - ReportListRequest
226
231
  - ReportDownloadRequestTrnID
227
232
  - BSP_ResourceList
233
+ - MarketSubmit_RemainingReserveData
234
+ - MarketQuery_RemainingReserveDataQuery
228
235
 
229
236
  We can add support for additional endpoints as time goes on, and independent contribution is, of course, welcome. However, support for attachments is currently limited because none of the endpoints we support currently require them. We have implemented attachment support up to the client level, but we haven't developed an architecture for submitting them through an endpoint yet.
230
237