futurehouse-client 0.3.19.dev129__py3-none-any.whl → 0.3.20.dev55__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,22 @@
1
1
  from .clients.job_client import JobClient, JobNames
2
2
  from .clients.rest_client import RestClient as FutureHouseClient
3
- from .clients.rest_client import TaskResponse, TaskResponseVerbose
3
+ from .models.app import (
4
+ FinchTaskResponse,
5
+ PhoenixTaskResponse,
6
+ PQATaskResponse,
7
+ TaskRequest,
8
+ TaskResponse,
9
+ TaskResponseVerbose,
10
+ )
4
11
 
5
12
  __all__ = [
13
+ "FinchTaskResponse",
6
14
  "FutureHouseClient",
7
15
  "JobClient",
8
16
  "JobNames",
17
+ "PQATaskResponse",
18
+ "PhoenixTaskResponse",
19
+ "TaskRequest",
9
20
  "TaskResponse",
10
21
  "TaskResponseVerbose",
11
22
  ]
@@ -54,7 +54,11 @@ from futurehouse_client.models.app import (
54
54
  TaskResponse,
55
55
  TaskResponseVerbose,
56
56
  )
57
- from futurehouse_client.models.rest import ExecutionStatus
57
+ from futurehouse_client.models.rest import (
58
+ ExecutionStatus,
59
+ WorldModel,
60
+ WorldModelResponse,
61
+ )
58
62
  from futurehouse_client.utils.auth import RefreshingJWT
59
63
  from futurehouse_client.utils.general import gather_with_concurrency
60
64
  from futurehouse_client.utils.module_utils import (
@@ -100,6 +104,14 @@ class JobCreationError(RestClientError):
100
104
  """Raised when there's an error creating a job."""
101
105
 
102
106
 
107
+ class WorldModelFetchError(RestClientError):
108
+ """Raised when there's an error fetching a world model."""
109
+
110
+
111
+ class WorldModelCreationError(RestClientError):
112
+ """Raised when there's an error creating a world model."""
113
+
114
+
103
115
  class InvalidTaskDescriptionError(Exception):
104
116
  """Raised when the task description is invalid or empty."""
105
117
 
@@ -791,10 +803,9 @@ class RestClient:
791
803
  pickled_env = cloudpickle.dumps(config.functional_environment)
792
804
  encoded_pickle = base64.b64encode(pickled_env).decode("utf-8")
793
805
  files = []
806
+ ignore_parts = set(FILE_UPLOAD_IGNORE_PARTS) | set(config.ignore_dirs or [])
794
807
  for file_path in Path(config.path).rglob("*") if config.path else []:
795
- if any(
796
- ignore in file_path.parts for ignore in FILE_UPLOAD_IGNORE_PARTS
797
- ):
808
+ if any(ignore in file_path.parts for ignore in ignore_parts):
798
809
  continue
799
810
 
800
811
  if file_path.is_file():
@@ -1053,24 +1064,11 @@ class RestClient:
1053
1064
  status_url = None
1054
1065
 
1055
1066
  try:
1056
- # Upload all chunks except the last one in parallel
1057
- if total_chunks > 1:
1058
- self._upload_chunks_parallel(
1059
- job_name,
1060
- file_path,
1061
- file_name,
1062
- upload_id,
1063
- total_chunks - 1,
1064
- total_chunks,
1065
- )
1066
-
1067
- # Upload the last chunk separately (handles assembly)
1068
- status_url = self._upload_final_chunk(
1067
+ status_url = self._upload_chunks_parallel(
1069
1068
  job_name,
1070
1069
  file_path,
1071
1070
  file_name,
1072
1071
  upload_id,
1073
- total_chunks - 1,
1074
1072
  total_chunks,
1075
1073
  )
1076
1074
 
@@ -1086,149 +1084,74 @@ class RestClient:
1086
1084
  file_path: Path,
1087
1085
  file_name: str,
1088
1086
  upload_id: str,
1089
- num_regular_chunks: int,
1090
1087
  total_chunks: int,
1091
- ) -> None:
1092
- """Upload chunks in parallel batches.
1088
+ ) -> str | None:
1089
+ """Upload all chunks in parallel batches, including the final chunk.
1093
1090
 
1094
1091
  Args:
1095
1092
  job_name: The key of the crow to upload to.
1096
1093
  file_path: The path to the file to upload.
1097
1094
  file_name: The name to use for the file.
1098
1095
  upload_id: The upload ID to use.
1099
- num_regular_chunks: Number of regular chunks (excluding final chunk).
1100
1096
  total_chunks: Total number of chunks.
1101
1097
 
1102
- Raises:
1103
- FileUploadError: If there's an error uploading any chunk.
1104
- """
1105
- if num_regular_chunks <= 0:
1106
- return
1107
-
1108
- # Process chunks in batches
1109
- for batch_start in range(0, num_regular_chunks, self.MAX_CONCURRENT_CHUNKS):
1110
- batch_end = min(
1111
- batch_start + self.MAX_CONCURRENT_CHUNKS, num_regular_chunks
1112
- )
1113
-
1114
- # Upload chunks in this batch concurrently
1115
- with ThreadPoolExecutor(max_workers=self.MAX_CONCURRENT_CHUNKS) as executor:
1116
- futures = {
1117
- executor.submit(
1118
- self._upload_single_chunk,
1119
- job_name,
1120
- file_path,
1121
- file_name,
1122
- upload_id,
1123
- chunk_index,
1124
- total_chunks,
1125
- ): chunk_index
1126
- for chunk_index in range(batch_start, batch_end)
1127
- }
1128
-
1129
- for future in as_completed(futures):
1130
- chunk_index = futures[future]
1131
- try:
1132
- future.result()
1133
- logger.debug(
1134
- f"Uploaded chunk {chunk_index + 1}/{total_chunks} of {file_name}"
1135
- )
1136
- except Exception as e:
1137
- logger.error(f"Error uploading chunk {chunk_index}: {e}")
1138
- raise FileUploadError(
1139
- f"Error uploading chunk {chunk_index} of {file_name}: {e}"
1140
- ) from e
1141
-
1142
- def _upload_single_chunk(
1143
- self,
1144
- job_name: str,
1145
- file_path: Path,
1146
- file_name: str,
1147
- upload_id: str,
1148
- chunk_index: int,
1149
- total_chunks: int,
1150
- ) -> None:
1151
- """Upload a single chunk.
1152
-
1153
- Args:
1154
- job_name: The key of the crow to upload to.
1155
- file_path: The path to the file to upload.
1156
- file_name: The name to use for the file.
1157
- upload_id: The upload ID to use.
1158
- chunk_index: The index of this chunk.
1159
- total_chunks: Total number of chunks.
1098
+ Returns:
1099
+ The status URL from the final chunk response, or None if no chunks.
1160
1100
 
1161
1101
  Raises:
1162
- Exception: If there's an error uploading the chunk.
1102
+ FileUploadError: If there's an error uploading any chunk.
1163
1103
  """
1164
- with open(file_path, "rb") as f:
1165
- # Read the chunk from the file
1166
- f.seek(chunk_index * self.CHUNK_SIZE)
1167
- chunk_data = f.read(self.CHUNK_SIZE)
1104
+ if total_chunks <= 0:
1105
+ return None
1168
1106
 
1169
- # Prepare and send the chunk
1170
- with tempfile.NamedTemporaryFile() as temp_file:
1171
- temp_file.write(chunk_data)
1172
- temp_file.flush()
1107
+ if total_chunks > 1:
1108
+ num_regular_chunks = total_chunks - 1
1109
+ for batch_start in range(0, num_regular_chunks, self.MAX_CONCURRENT_CHUNKS):
1110
+ batch_end = min(
1111
+ batch_start + self.MAX_CONCURRENT_CHUNKS, num_regular_chunks
1112
+ )
1173
1113
 
1174
- # Create form data
1175
- with open(temp_file.name, "rb") as chunk_file_obj:
1176
- files = {
1177
- "chunk": (
1114
+ # Upload chunks in this batch concurrently
1115
+ with ThreadPoolExecutor(
1116
+ max_workers=self.MAX_CONCURRENT_CHUNKS
1117
+ ) as executor:
1118
+ futures = {
1119
+ executor.submit(
1120
+ self._upload_single_chunk,
1121
+ job_name,
1122
+ file_path,
1178
1123
  file_name,
1179
- chunk_file_obj,
1180
- "application/octet-stream",
1181
- )
1124
+ upload_id,
1125
+ chunk_index,
1126
+ total_chunks,
1127
+ ): chunk_index
1128
+ for chunk_index in range(batch_start, batch_end)
1182
1129
  }
1183
- data = {
1184
- "file_name": file_name,
1185
- "chunk_index": chunk_index,
1186
- "total_chunks": total_chunks,
1187
- "upload_id": upload_id,
1188
- }
1189
-
1190
- # Send the chunk
1191
- response = self.multipart_client.post(
1192
- f"/v0.1/crows/{job_name}/upload-chunk",
1193
- files=files,
1194
- data=data,
1195
- )
1196
- response.raise_for_status()
1197
-
1198
- def _upload_final_chunk(
1199
- self,
1200
- job_name: str,
1201
- file_path: Path,
1202
- file_name: str,
1203
- upload_id: str,
1204
- chunk_index: int,
1205
- total_chunks: int,
1206
- ) -> str | None:
1207
- """Upload the final chunk with retry logic for missing chunks.
1208
-
1209
- Args:
1210
- job_name: The key of the crow to upload to.
1211
- file_path: The path to the file to upload.
1212
- file_name: The name to use for the file.
1213
- upload_id: The upload ID to use.
1214
- chunk_index: The index of the final chunk.
1215
- total_chunks: Total number of chunks.
1216
1130
 
1217
- Returns:
1218
- The status URL from the response.
1219
-
1220
- Raises:
1221
- FileUploadError: If there's an error uploading the final chunk.
1222
- """
1131
+ for future in as_completed(futures):
1132
+ chunk_index = futures[future]
1133
+ try:
1134
+ future.result()
1135
+ logger.debug(
1136
+ f"Uploaded chunk {chunk_index + 1}/{total_chunks} of {file_name}"
1137
+ )
1138
+ except Exception as e:
1139
+ logger.error(f"Error uploading chunk {chunk_index}: {e}")
1140
+ raise FileUploadError(
1141
+ f"Error uploading chunk {chunk_index} of {file_name}: {e}"
1142
+ ) from e
1143
+
1144
+ # Upload the final chunk with retry logic
1145
+ final_chunk_index = total_chunks - 1
1223
1146
  retries = 0
1224
1147
  max_retries = 3
1225
- retry_delay = 2.0 # seconds
1148
+ retry_delay = 2.0
1226
1149
 
1227
1150
  while retries < max_retries:
1228
1151
  try:
1229
1152
  with open(file_path, "rb") as f:
1230
1153
  # Read the final chunk from the file
1231
- f.seek(chunk_index * self.CHUNK_SIZE)
1154
+ f.seek(final_chunk_index * self.CHUNK_SIZE)
1232
1155
  chunk_data = f.read(self.CHUNK_SIZE)
1233
1156
 
1234
1157
  # Prepare and send the chunk
@@ -1247,7 +1170,7 @@ class RestClient:
1247
1170
  }
1248
1171
  data = {
1249
1172
  "file_name": file_name,
1250
- "chunk_index": chunk_index,
1173
+ "chunk_index": final_chunk_index,
1251
1174
  "total_chunks": total_chunks,
1252
1175
  "upload_id": upload_id,
1253
1176
  }
@@ -1274,7 +1197,7 @@ class RestClient:
1274
1197
  status_url = response_data.get("status_url")
1275
1198
 
1276
1199
  logger.debug(
1277
- f"Uploaded final chunk {chunk_index + 1}/{total_chunks} of {file_name}"
1200
+ f"Uploaded final chunk {final_chunk_index + 1}/{total_chunks} of {file_name}"
1278
1201
  )
1279
1202
  return status_url
1280
1203
 
@@ -1293,6 +1216,62 @@ class RestClient:
1293
1216
  f"Failed to upload final chunk of {file_name} after {max_retries} retries"
1294
1217
  )
1295
1218
 
1219
+ def _upload_single_chunk(
1220
+ self,
1221
+ job_name: str,
1222
+ file_path: Path,
1223
+ file_name: str,
1224
+ upload_id: str,
1225
+ chunk_index: int,
1226
+ total_chunks: int,
1227
+ ) -> None:
1228
+ """Upload a single chunk.
1229
+
1230
+ Args:
1231
+ job_name: The key of the crow to upload to.
1232
+ file_path: The path to the file to upload.
1233
+ file_name: The name to use for the file.
1234
+ upload_id: The upload ID to use.
1235
+ chunk_index: The index of this chunk.
1236
+ total_chunks: Total number of chunks.
1237
+
1238
+ Raises:
1239
+ Exception: If there's an error uploading the chunk.
1240
+ """
1241
+ with open(file_path, "rb") as f:
1242
+ # Read the chunk from the file
1243
+ f.seek(chunk_index * self.CHUNK_SIZE)
1244
+ chunk_data = f.read(self.CHUNK_SIZE)
1245
+
1246
+ # Prepare and send the chunk
1247
+ with tempfile.NamedTemporaryFile() as temp_file:
1248
+ temp_file.write(chunk_data)
1249
+ temp_file.flush()
1250
+
1251
+ # Create form data
1252
+ with open(temp_file.name, "rb") as chunk_file_obj:
1253
+ files = {
1254
+ "chunk": (
1255
+ file_name,
1256
+ chunk_file_obj,
1257
+ "application/octet-stream",
1258
+ )
1259
+ }
1260
+ data = {
1261
+ "file_name": file_name,
1262
+ "chunk_index": chunk_index,
1263
+ "total_chunks": total_chunks,
1264
+ "upload_id": upload_id,
1265
+ }
1266
+
1267
+ # Send the chunk
1268
+ response = self.multipart_client.post(
1269
+ f"/v0.1/crows/{job_name}/upload-chunk",
1270
+ files=files,
1271
+ data=data,
1272
+ )
1273
+ response.raise_for_status()
1274
+
1296
1275
  @retry(
1297
1276
  stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
1298
1277
  wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
@@ -1416,6 +1395,170 @@ class RestClient:
1416
1395
  destination_path.unlink() # Clean up partial file
1417
1396
  raise RestClientError(f"Error downloading file: {e!s}") from e
1418
1397
 
1398
+ @retry(
1399
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
1400
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
1401
+ retry=retry_if_connection_error,
1402
+ )
1403
+ def get_world_model(
1404
+ self, world_model_id: UUID | None = None, name: str | None = None
1405
+ ) -> WorldModelResponse:
1406
+ """Get a world model snapshot by its ID or name.
1407
+
1408
+ Args:
1409
+ world_model_id: The unique ID of the world model snapshot.
1410
+ name: The name of the world model to get the latest version of.
1411
+
1412
+ Returns:
1413
+ The requested world model snapshot.
1414
+
1415
+ Raises:
1416
+ ValueError: If neither or both `world_model_id` and `name` are provided.
1417
+ WorldModelFetchError: If the API call fails or the model is not found.
1418
+ """
1419
+ if not (world_model_id or name) or (world_model_id and name):
1420
+ raise ValueError("Provide either 'world_model_id' or 'name', but not both.")
1421
+
1422
+ params = {
1423
+ "id": str(world_model_id) if world_model_id else None,
1424
+ "name": name,
1425
+ }
1426
+ # Filter out None values before making the request
1427
+ params = {k: v for k, v in params.items() if v is not None}
1428
+
1429
+ try:
1430
+ response = self.client.get("/v0.1/world-model", params=params)
1431
+ response.raise_for_status()
1432
+ return WorldModelResponse.model_validate(response.json())
1433
+ except HTTPStatusError as e:
1434
+ if e.response.status_code == codes.NOT_FOUND:
1435
+ raise WorldModelFetchError(
1436
+ "World model not found with the specified identifier."
1437
+ ) from e
1438
+ raise WorldModelFetchError(
1439
+ f"Error fetching world model: {e.response.status_code} - {e.response.text}"
1440
+ ) from e
1441
+ except Exception as e:
1442
+ raise WorldModelFetchError(f"An unexpected error occurred: {e}") from e
1443
+
1444
+ @retry(
1445
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
1446
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
1447
+ retry=retry_if_connection_error,
1448
+ )
1449
+ async def aget_world_model(
1450
+ self, world_model_id: UUID | None = None, name: str | None = None
1451
+ ) -> WorldModelResponse:
1452
+ """Asynchronously get a world model snapshot by its ID or name.
1453
+
1454
+ Args:
1455
+ world_model_id: The unique ID of the world model snapshot.
1456
+ name: The name of the world model to get the latest version of.
1457
+
1458
+ Returns:
1459
+ The requested world model snapshot.
1460
+
1461
+ Raises:
1462
+ ValueError: If neither or both `world_model_id` and `name` are provided.
1463
+ WorldModelFetchError: If the API call fails or the model is not found.
1464
+ """
1465
+ if not (world_model_id or name) or (world_model_id and name):
1466
+ raise ValueError("Provide either 'world_model_id' or 'name', but not both.")
1467
+
1468
+ params = {
1469
+ "id": str(world_model_id) if world_model_id else None,
1470
+ "name": name,
1471
+ }
1472
+ params = {k: v for k, v in params.items() if v is not None}
1473
+
1474
+ try:
1475
+ response = await self.async_client.get("/v0.1/world-model", params=params)
1476
+ response.raise_for_status()
1477
+ return WorldModelResponse.model_validate(response.json())
1478
+ except HTTPStatusError as e:
1479
+ if e.response.status_code == codes.NOT_FOUND:
1480
+ raise WorldModelFetchError(
1481
+ "World model not found with the specified identifier."
1482
+ ) from e
1483
+ raise WorldModelFetchError(
1484
+ f"Error fetching world model: {e.response.status_code} - {e.response.text}"
1485
+ ) from e
1486
+ except Exception as e:
1487
+ raise WorldModelFetchError(f"An unexpected error occurred: {e}") from e
1488
+
1489
+ @retry(
1490
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
1491
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
1492
+ retry=retry_if_connection_error,
1493
+ )
1494
+ def create_world_model(self, payload: WorldModel) -> UUID:
1495
+ """Create a new, immutable world model snapshot.
1496
+
1497
+ Args:
1498
+ payload: An instance of WorldModel with the snapshot's data.
1499
+
1500
+ Returns:
1501
+ The UUID of the newly created world model.
1502
+
1503
+ Raises:
1504
+ WorldModelCreationError: If the API call fails.
1505
+ """
1506
+ try:
1507
+ response = self.client.post(
1508
+ "/v0.1/world-models", json=payload.model_dump(mode="json")
1509
+ )
1510
+ response.raise_for_status()
1511
+ # The server returns a raw UUID string in the body
1512
+ return UUID(response.json())
1513
+ except HTTPStatusError as e:
1514
+ if e.response.status_code == codes.BAD_REQUEST:
1515
+ raise WorldModelCreationError(
1516
+ f"Invalid payload for world model creation: {e.response.text}"
1517
+ ) from e
1518
+ raise WorldModelCreationError(
1519
+ f"Error creating world model: {e.response.status_code} - {e.response.text}"
1520
+ ) from e
1521
+ except Exception as e:
1522
+ raise WorldModelCreationError(
1523
+ f"An unexpected error occurred during world model creation: {e}"
1524
+ ) from e
1525
+
1526
+ @retry(
1527
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
1528
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
1529
+ retry=retry_if_connection_error,
1530
+ )
1531
+ async def acreate_world_model(self, payload: WorldModel) -> UUID:
1532
+ """Asynchronously create a new, immutable world model snapshot.
1533
+
1534
+ Args:
1535
+ payload: An instance of WorldModel with the snapshot's data.
1536
+
1537
+ Returns:
1538
+ The UUID of the newly created world model.
1539
+
1540
+ Raises:
1541
+ WorldModelCreationError: If the API call fails.
1542
+ """
1543
+ try:
1544
+ response = await self.async_client.post(
1545
+ "/v0.1/world-models", json=payload.model_dump(mode="json")
1546
+ )
1547
+ response.raise_for_status()
1548
+ return UUID(response.json())
1549
+ except HTTPStatusError as e:
1550
+ if e.response.status_code == codes.BAD_REQUEST:
1551
+ raise WorldModelCreationError(
1552
+ f"Invalid payload for world model creation: {e.response.text}"
1553
+ ) from e
1554
+ raise WorldModelCreationError(
1555
+ f"Error creating world model: {e.response.status_code} - {e.response.text}"
1556
+ ) from e
1557
+ except Exception as e:
1558
+ raise WorldModelCreationError(
1559
+ f"An unexpected error occurred during world model creation: {e}"
1560
+ ) from e
1561
+
1419
1562
 
1420
1563
  def get_installed_packages() -> dict[str, str]:
1421
1564
  """Returns a dictionary of installed packages and their versions."""
@@ -365,6 +365,12 @@ class JobDeploymentConfig(BaseModel):
365
365
  "Can be None if we are deploying a functional environment (through the functional_environment parameter).",
366
366
  )
367
367
 
368
+ ignore_dirs: list[str] | None = Field(
369
+ default=None,
370
+ description="A list of directories to ignore when deploying the job. "
371
+ "This is a list of directories relative to the path parameter.",
372
+ )
373
+
368
374
  name: str | None = Field(
369
375
  default=None,
370
376
  description="The name of the crow job. If None, the crow job will be "
@@ -604,6 +610,10 @@ class RuntimeConfig(BaseModel):
604
610
  default=None,
605
611
  description="Optional job identifier for a continued job",
606
612
  )
613
+ world_model_id: UUID | None = Field(
614
+ default=None,
615
+ description="Optional world model identifier for the task",
616
+ )
607
617
 
608
618
  @field_validator("agent")
609
619
  @classmethod
@@ -1,4 +1,6 @@
1
+ from datetime import datetime
1
2
  from enum import StrEnum, auto
3
+ from uuid import UUID
2
4
 
3
5
  from pydantic import BaseModel, JsonValue
4
6
 
@@ -34,3 +36,37 @@ class ExecutionStatus(StrEnum):
34
36
  @classmethod
35
37
  def terminal_states(cls) -> set["ExecutionStatus"]:
36
38
  return {cls.SUCCESS, cls.FAIL, cls.CANCELLED}
39
+
40
+
41
+ class WorldModel(BaseModel):
42
+ """
43
+ Payload for creating a new world model snapshot.
44
+
45
+ This model is sent to the API.
46
+ """
47
+
48
+ content: str
49
+ prior: UUID | None = None
50
+ name: str | None = None
51
+ description: str | None = None
52
+ trajectory_id: UUID | None = None
53
+ model_metadata: JsonValue | None = None
54
+
55
+
56
+ class WorldModelResponse(BaseModel):
57
+ """
58
+ Response model for a world model snapshot.
59
+
60
+ This model is received from the API.
61
+ """
62
+
63
+ id: UUID
64
+ prior: UUID | None
65
+ name: str
66
+ description: str | None
67
+ content: str
68
+ trajectory_id: UUID | None
69
+ email: str | None
70
+ model_metadata: JsonValue | None
71
+ enabled: bool
72
+ created_at: datetime
File without changes
@@ -0,0 +1,21 @@
1
+ # file generated by setuptools-scm
2
+ # don't change, don't track in version control
3
+
4
+ __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
5
+
6
+ TYPE_CHECKING = False
7
+ if TYPE_CHECKING:
8
+ from typing import Tuple
9
+ from typing import Union
10
+
11
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
12
+ else:
13
+ VERSION_TUPLE = object
14
+
15
+ version: str
16
+ __version__: str
17
+ __version_tuple__: VERSION_TUPLE
18
+ version_tuple: VERSION_TUPLE
19
+
20
+ __version__ = version = '0.3.20.dev55'
21
+ __version_tuple__ = version_tuple = (0, 3, 20, 'dev55')