vibetuner 2.18.1__py3-none-any.whl → 2.30.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vibetuner might be problematic. Click here for more details.

@@ -6,18 +6,13 @@ To extend blob functionality, create wrapper services in the parent services dir
6
6
 
7
7
  import mimetypes
8
8
  from pathlib import Path
9
- from typing import Literal
10
9
 
11
10
  import aioboto3
12
- from aiobotocore.config import AioConfig
13
11
 
14
12
  from vibetuner.config import settings
15
13
  from vibetuner.models import BlobModel
16
14
  from vibetuner.models.blob import BlobStatus
17
-
18
-
19
- S3_SERVICE_NAME: Literal["s3"] = "s3"
20
- DEFAULT_CONTENT_TYPE: str = "application/octet-stream"
15
+ from vibetuner.services.s3_storage import DEFAULT_CONTENT_TYPE, S3StorageService
21
16
 
22
17
 
23
18
  class BlobService:
@@ -34,25 +29,22 @@ class BlobService:
34
29
  raise ValueError(
35
30
  "R2 bucket endpoint URL, access key, and secret key must be set in settings."
36
31
  )
37
- self.session = session or aioboto3.Session(
38
- aws_access_key_id=settings.r2_access_key.get_secret_value(),
39
- aws_secret_access_key=settings.r2_secret_key.get_secret_value(),
40
- region_name=settings.r2_default_region,
41
- )
42
- self.endpoint_url = str(settings.r2_bucket_endpoint_url)
43
- self.config = AioConfig(
44
- request_checksum_calculation="when_required",
45
- response_checksum_validation="when_required",
46
- )
47
32
 
48
- if not default_bucket:
49
- if settings.r2_default_bucket_name is None:
50
- raise ValueError(
51
- "Default bucket name must be provided either in settings or as an argument."
52
- )
53
- self.default_bucket = settings.r2_default_bucket_name
54
- else:
55
- self.default_bucket = default_bucket
33
+ bucket = default_bucket or settings.r2_default_bucket_name
34
+ if bucket is None:
35
+ raise ValueError(
36
+ "Default bucket name must be provided either in settings or as an argument."
37
+ )
38
+
39
+ self.storage = S3StorageService(
40
+ endpoint_url=str(settings.r2_bucket_endpoint_url),
41
+ access_key=settings.r2_access_key.get_secret_value(),
42
+ secret_key=settings.r2_secret_key.get_secret_value(),
43
+ region=settings.r2_default_region,
44
+ default_bucket=bucket,
45
+ session=session,
46
+ )
47
+ self.default_bucket = bucket
56
48
 
57
49
  async def put_object(
58
50
  self,
@@ -80,17 +72,12 @@ class BlobService:
80
72
  raise ValueError("Blob ID must be set before uploading to R2.")
81
73
 
82
74
  try:
83
- async with self.session.client(
84
- service_name=S3_SERVICE_NAME,
85
- endpoint_url=self.endpoint_url,
86
- config=self.config,
87
- ) as s3_client:
88
- await s3_client.put_object(
89
- Bucket=bucket,
90
- Key=blob.full_path,
91
- Body=body,
92
- ContentType=content_type,
93
- )
75
+ await self.storage.put_object(
76
+ key=blob.full_path,
77
+ body=body,
78
+ content_type=content_type,
79
+ bucket=bucket,
80
+ )
94
81
  blob.status = BlobStatus.UPLOADED
95
82
  except Exception:
96
83
  blob.status = BlobStatus.ERROR
@@ -144,16 +131,10 @@ class BlobService:
144
131
  if not blob:
145
132
  raise ValueError(f"Blob not found: {key}")
146
133
 
147
- async with self.session.client(
148
- service_name=S3_SERVICE_NAME,
149
- endpoint_url=self.endpoint_url,
150
- config=self.config,
151
- ) as s3_client:
152
- response = await s3_client.get_object(
153
- Bucket=blob.bucket,
154
- Key=blob.full_path,
155
- )
156
- return await response["Body"].read()
134
+ return await self.storage.get_object(
135
+ key=blob.full_path,
136
+ bucket=blob.bucket,
137
+ )
157
138
 
158
139
  async def delete_object(self, key: str) -> None:
159
140
  """Delete an object from the R2 bucket"""
@@ -172,4 +153,9 @@ class BlobService:
172
153
  if not blob:
173
154
  return False
174
155
 
156
+ if check_bucket:
157
+ return await self.storage.object_exists(
158
+ key=blob.full_path, bucket=blob.bucket
159
+ )
160
+
175
161
  return True
@@ -6,7 +6,7 @@ To extend email functionality, create wrapper services in the parent services di
6
6
 
7
7
  from typing import Literal
8
8
 
9
- import boto3
9
+ import aioboto3
10
10
 
11
11
  from vibetuner.config import settings
12
12
 
@@ -17,11 +17,9 @@ SES_SERVICE_NAME: Literal["ses"] = "ses"
17
17
  class SESEmailService:
18
18
  def __init__(
19
19
  self,
20
- ses_client=None,
21
20
  from_email: str | None = None,
22
21
  ) -> None:
23
- self.ses_client = ses_client or boto3.client(
24
- service_name=SES_SERVICE_NAME,
22
+ self.session = aioboto3.Session(
25
23
  region_name=settings.project.aws_default_region,
26
24
  aws_access_key_id=settings.aws_access_key_id.get_secret_value()
27
25
  if settings.aws_access_key_id
@@ -36,15 +34,16 @@ class SESEmailService:
36
34
  self, to_address: str, subject: str, html_body: str, text_body: str
37
35
  ):
38
36
  """Send email using Amazon SES"""
39
- response = self.ses_client.send_email(
40
- Source=self.from_email,
41
- Destination={"ToAddresses": [to_address]},
42
- Message={
43
- "Subject": {"Data": subject, "Charset": "UTF-8"},
44
- "Body": {
45
- "Html": {"Data": html_body, "Charset": "UTF-8"},
46
- "Text": {"Data": text_body, "Charset": "UTF-8"},
37
+ async with self.session.client(SES_SERVICE_NAME) as ses_client:
38
+ response = await ses_client.send_email(
39
+ Source=self.from_email,
40
+ Destination={"ToAddresses": [to_address]},
41
+ Message={
42
+ "Subject": {"Data": subject, "Charset": "UTF-8"},
43
+ "Body": {
44
+ "Html": {"Data": html_body, "Charset": "UTF-8"},
45
+ "Text": {"Data": text_body, "Charset": "UTF-8"},
46
+ },
47
47
  },
48
- },
49
- )
50
- return response
48
+ )
49
+ return response
@@ -0,0 +1,454 @@
1
+ """ABOUTME: S3-compatible storage service for managing buckets and objects.
2
+ ABOUTME: Provides async operations for R2, MinIO, and other S3-compatible storage providers.
3
+ """
4
+
5
+ from typing import Any, Literal
6
+
7
+ import aioboto3
8
+ from aiobotocore.config import AioConfig
9
+ from botocore.exceptions import ClientError
10
+
11
+
12
+ S3_SERVICE_NAME: Literal["s3"] = "s3"
13
+ DEFAULT_CONTENT_TYPE: str = "application/octet-stream"
14
+
15
+
16
+ class S3StorageService:
17
+ """Async S3-compatible storage service for bucket and object operations.
18
+
19
+ This service provides a clean interface to S3-compatible storage providers
20
+ (AWS S3, Cloudflare R2, MinIO, etc.) without any database dependencies.
21
+
22
+ All operations are async and use aioboto3 for efficient I/O.
23
+ """
24
+
25
+ def __init__(
26
+ self,
27
+ endpoint_url: str,
28
+ access_key: str,
29
+ secret_key: str,
30
+ region: str = "auto",
31
+ default_bucket: str | None = None,
32
+ session: aioboto3.Session | None = None,
33
+ ) -> None:
34
+ """Initialize S3 storage service with explicit configuration.
35
+
36
+ Args:
37
+ endpoint_url: S3-compatible endpoint URL (e.g., "https://xxx.r2.cloudflarestorage.com")
38
+ access_key: Access key ID for authentication
39
+ secret_key: Secret access key for authentication
40
+ region: AWS region (default "auto" for R2/MinIO)
41
+ default_bucket: Optional default bucket for operations
42
+ session: Optional custom aioboto3 session
43
+ """
44
+ self.endpoint_url = endpoint_url
45
+ self.default_bucket = default_bucket
46
+ self.session = session or aioboto3.Session(
47
+ aws_access_key_id=access_key,
48
+ aws_secret_access_key=secret_key,
49
+ region_name=region,
50
+ )
51
+ self.config = AioConfig(
52
+ request_checksum_calculation="when_required",
53
+ response_checksum_validation="when_required",
54
+ )
55
+
56
+ def _get_bucket(self, bucket: str | None) -> str:
57
+ """Get bucket name, using default if not specified.
58
+
59
+ Args:
60
+ bucket: Optional bucket name
61
+
62
+ Returns:
63
+ Bucket name to use
64
+
65
+ Raises:
66
+ ValueError: If no bucket specified and no default bucket set
67
+ """
68
+ if bucket is None:
69
+ if self.default_bucket is None:
70
+ raise ValueError(
71
+ "No bucket specified and no default bucket configured. "
72
+ "Provide bucket parameter or set default_bucket during initialization."
73
+ )
74
+ return self.default_bucket
75
+ return bucket
76
+
77
+ # =========================================================================
78
+ # Object Operations
79
+ # =========================================================================
80
+
81
+ async def put_object(
82
+ self,
83
+ key: str,
84
+ body: bytes,
85
+ content_type: str = DEFAULT_CONTENT_TYPE,
86
+ bucket: str | None = None,
87
+ metadata: dict[str, str] | None = None,
88
+ ) -> None:
89
+ """Upload an object to S3-compatible storage.
90
+
91
+ Args:
92
+ key: Object key (path) in the bucket
93
+ body: Raw bytes to upload
94
+ content_type: MIME type of the object
95
+ bucket: Bucket name (uses default_bucket if None)
96
+ metadata: Optional custom metadata dict
97
+ """
98
+ bucket_name = self._get_bucket(bucket)
99
+
100
+ async with self.session.client(
101
+ service_name=S3_SERVICE_NAME,
102
+ endpoint_url=self.endpoint_url,
103
+ config=self.config,
104
+ ) as s3_client:
105
+ put_params: dict[str, Any] = {
106
+ "Bucket": bucket_name,
107
+ "Key": key,
108
+ "Body": body,
109
+ "ContentType": content_type,
110
+ }
111
+ if metadata:
112
+ put_params["Metadata"] = metadata
113
+
114
+ await s3_client.put_object(**put_params)
115
+
116
+ async def get_object(self, key: str, bucket: str | None = None) -> bytes:
117
+ """Retrieve an object from S3-compatible storage.
118
+
119
+ Args:
120
+ key: Object key (path) in the bucket
121
+ bucket: Bucket name (uses default_bucket if None)
122
+
123
+ Returns:
124
+ Raw bytes of the object
125
+
126
+ Raises:
127
+ ClientError: If object doesn't exist or other S3 error
128
+ """
129
+ bucket_name = self._get_bucket(bucket)
130
+
131
+ async with self.session.client(
132
+ service_name=S3_SERVICE_NAME,
133
+ endpoint_url=self.endpoint_url,
134
+ config=self.config,
135
+ ) as s3_client:
136
+ response = await s3_client.get_object(
137
+ Bucket=bucket_name,
138
+ Key=key,
139
+ )
140
+ return await response["Body"].read()
141
+
142
+ async def delete_object(self, key: str, bucket: str | None = None) -> None:
143
+ """Delete an object from S3-compatible storage.
144
+
145
+ Args:
146
+ key: Object key (path) in the bucket
147
+ bucket: Bucket name (uses default_bucket if None)
148
+ """
149
+ bucket_name = self._get_bucket(bucket)
150
+
151
+ async with self.session.client(
152
+ service_name=S3_SERVICE_NAME,
153
+ endpoint_url=self.endpoint_url,
154
+ config=self.config,
155
+ ) as s3_client:
156
+ await s3_client.delete_object(
157
+ Bucket=bucket_name,
158
+ Key=key,
159
+ )
160
+
161
+ async def object_exists(self, key: str, bucket: str | None = None) -> bool:
162
+ """Check if an object exists in S3-compatible storage.
163
+
164
+ Args:
165
+ key: Object key (path) in the bucket
166
+ bucket: Bucket name (uses default_bucket if None)
167
+
168
+ Returns:
169
+ True if object exists, False otherwise
170
+ """
171
+ bucket_name = self._get_bucket(bucket)
172
+
173
+ try:
174
+ async with self.session.client(
175
+ service_name=S3_SERVICE_NAME,
176
+ endpoint_url=self.endpoint_url,
177
+ config=self.config,
178
+ ) as s3_client:
179
+ await s3_client.head_object(
180
+ Bucket=bucket_name,
181
+ Key=key,
182
+ )
183
+ return True
184
+ except ClientError as e:
185
+ error_code = e.response.get("Error", {}).get("Code", "")
186
+ if error_code == "404":
187
+ return False
188
+ raise
189
+
190
+ async def list_objects(
191
+ self,
192
+ prefix: str | None = None,
193
+ bucket: str | None = None,
194
+ max_keys: int = 1000,
195
+ ) -> list[dict[str, Any]]:
196
+ """List objects in a bucket with optional prefix filter.
197
+
198
+ Args:
199
+ prefix: Optional prefix to filter objects
200
+ bucket: Bucket name (uses default_bucket if None)
201
+ max_keys: Maximum number of keys to return (default 1000)
202
+
203
+ Returns:
204
+ List of object metadata dicts with keys: key, size, last_modified, etag
205
+ """
206
+ bucket_name = self._get_bucket(bucket)
207
+
208
+ async with self.session.client(
209
+ service_name=S3_SERVICE_NAME,
210
+ endpoint_url=self.endpoint_url,
211
+ config=self.config,
212
+ ) as s3_client:
213
+ list_params: dict[str, Any] = {
214
+ "Bucket": bucket_name,
215
+ "MaxKeys": max_keys,
216
+ }
217
+ if prefix:
218
+ list_params["Prefix"] = prefix
219
+
220
+ response = await s3_client.list_objects_v2(**list_params)
221
+
222
+ if "Contents" not in response:
223
+ return []
224
+
225
+ return [
226
+ {
227
+ "key": obj.get("Key", ""),
228
+ "size": obj.get("Size", 0),
229
+ "last_modified": obj.get("LastModified"),
230
+ "etag": obj.get("ETag", "").strip('"'),
231
+ }
232
+ for obj in response["Contents"]
233
+ ]
234
+
235
+ async def get_object_metadata(
236
+ self, key: str, bucket: str | None = None
237
+ ) -> dict[str, Any]:
238
+ """Get metadata for an object without downloading it.
239
+
240
+ Args:
241
+ key: Object key (path) in the bucket
242
+ bucket: Bucket name (uses default_bucket if None)
243
+
244
+ Returns:
245
+ Metadata dict with keys: content_type, size, last_modified, etag, metadata
246
+ """
247
+ bucket_name = self._get_bucket(bucket)
248
+
249
+ async with self.session.client(
250
+ service_name=S3_SERVICE_NAME,
251
+ endpoint_url=self.endpoint_url,
252
+ config=self.config,
253
+ ) as s3_client:
254
+ response = await s3_client.head_object(
255
+ Bucket=bucket_name,
256
+ Key=key,
257
+ )
258
+
259
+ return {
260
+ "content_type": response.get("ContentType"),
261
+ "size": response.get("ContentLength"),
262
+ "last_modified": response.get("LastModified"),
263
+ "etag": response.get("ETag", "").strip('"'),
264
+ "metadata": response.get("Metadata", {}),
265
+ }
266
+
267
+ # =========================================================================
268
+ # Bucket Operations
269
+ # =========================================================================
270
+
271
+ async def list_buckets(self) -> list[dict[str, Any]]:
272
+ """List all buckets accessible with current credentials.
273
+
274
+ Returns:
275
+ List of bucket metadata dicts with keys: name, creation_date
276
+ """
277
+ async with self.session.client(
278
+ service_name=S3_SERVICE_NAME,
279
+ endpoint_url=self.endpoint_url,
280
+ config=self.config,
281
+ ) as s3_client:
282
+ response = await s3_client.list_buckets()
283
+
284
+ return [
285
+ {
286
+ "name": bucket.get("Name", ""),
287
+ "creation_date": bucket.get("CreationDate"),
288
+ }
289
+ for bucket in response.get("Buckets", [])
290
+ ]
291
+
292
+ async def create_bucket(self, bucket: str, region: str | None = None) -> None:
293
+ """Create a new bucket.
294
+
295
+ Args:
296
+ bucket: Name of the bucket to create
297
+ region: Optional region (uses session default if None)
298
+ """
299
+ async with self.session.client(
300
+ service_name=S3_SERVICE_NAME,
301
+ endpoint_url=self.endpoint_url,
302
+ config=self.config,
303
+ ) as s3_client:
304
+ create_params: dict[str, Any] = {"Bucket": bucket}
305
+
306
+ # Only set CreateBucketConfiguration for non-us-east-1 regions
307
+ if region and region not in ("us-east-1", "auto"):
308
+ create_params["CreateBucketConfiguration"] = {
309
+ "LocationConstraint": region
310
+ }
311
+
312
+ await s3_client.create_bucket(**create_params)
313
+
314
+ async def delete_bucket(self, bucket: str, force: bool = False) -> None:
315
+ """Delete a bucket.
316
+
317
+ Args:
318
+ bucket: Name of the bucket to delete
319
+ force: If True, delete all objects in bucket first
320
+
321
+ Note:
322
+ S3 buckets must be empty before deletion unless force=True
323
+ """
324
+ if force:
325
+ # Delete all objects in the bucket first
326
+ objects = await self.list_objects(bucket=bucket)
327
+ async with self.session.client(
328
+ service_name=S3_SERVICE_NAME,
329
+ endpoint_url=self.endpoint_url,
330
+ config=self.config,
331
+ ) as s3_client:
332
+ for obj in objects:
333
+ await s3_client.delete_object(
334
+ Bucket=bucket,
335
+ Key=obj["key"],
336
+ )
337
+
338
+ async with self.session.client(
339
+ service_name=S3_SERVICE_NAME,
340
+ endpoint_url=self.endpoint_url,
341
+ config=self.config,
342
+ ) as s3_client:
343
+ await s3_client.delete_bucket(Bucket=bucket)
344
+
345
+ async def bucket_exists(self, bucket: str) -> bool:
346
+ """Check if a bucket exists and is accessible.
347
+
348
+ Args:
349
+ bucket: Name of the bucket to check
350
+
351
+ Returns:
352
+ True if bucket exists and is accessible, False otherwise
353
+ """
354
+ try:
355
+ async with self.session.client(
356
+ service_name=S3_SERVICE_NAME,
357
+ endpoint_url=self.endpoint_url,
358
+ config=self.config,
359
+ ) as s3_client:
360
+ await s3_client.head_bucket(Bucket=bucket)
361
+ return True
362
+ except ClientError as e:
363
+ error_code = e.response.get("Error", {}).get("Code", "")
364
+ if error_code in ("404", "NoSuchBucket"):
365
+ return False
366
+ raise
367
+
368
+ async def get_bucket_location(self, bucket: str) -> str:
369
+ """Get the region/location of a bucket.
370
+
371
+ Args:
372
+ bucket: Name of the bucket
373
+
374
+ Returns:
375
+ Region string (e.g., "us-east-1", "auto")
376
+ """
377
+ async with self.session.client(
378
+ service_name=S3_SERVICE_NAME,
379
+ endpoint_url=self.endpoint_url,
380
+ config=self.config,
381
+ ) as s3_client:
382
+ response = await s3_client.get_bucket_location(Bucket=bucket)
383
+ location = response.get("LocationConstraint")
384
+ # S3 returns None for us-east-1
385
+ return location if location else "us-east-1"
386
+
387
+ # =========================================================================
388
+ # Advanced Operations
389
+ # =========================================================================
390
+
391
+ async def copy_object(
392
+ self,
393
+ src_key: str,
394
+ dest_key: str,
395
+ src_bucket: str | None = None,
396
+ dest_bucket: str | None = None,
397
+ ) -> None:
398
+ """Copy an object from one location to another.
399
+
400
+ Args:
401
+ src_key: Source object key
402
+ dest_key: Destination object key
403
+ src_bucket: Source bucket (uses default_bucket if None)
404
+ dest_bucket: Destination bucket (uses default_bucket if None)
405
+ """
406
+ src_bucket_name = self._get_bucket(src_bucket)
407
+ dest_bucket_name = self._get_bucket(dest_bucket)
408
+
409
+ async with self.session.client(
410
+ service_name=S3_SERVICE_NAME,
411
+ endpoint_url=self.endpoint_url,
412
+ config=self.config,
413
+ ) as s3_client:
414
+ copy_source = f"{src_bucket_name}/{src_key}"
415
+ await s3_client.copy_object(
416
+ CopySource=copy_source,
417
+ Bucket=dest_bucket_name,
418
+ Key=dest_key,
419
+ )
420
+
421
+ async def generate_presigned_url(
422
+ self,
423
+ key: str,
424
+ bucket: str | None = None,
425
+ expiration: int = 3600,
426
+ method: str = "get_object",
427
+ ) -> str:
428
+ """Generate a presigned URL for temporary access to an object.
429
+
430
+ Args:
431
+ key: Object key
432
+ bucket: Bucket name (uses default_bucket if None)
433
+ expiration: URL expiration time in seconds (default 3600 = 1 hour)
434
+ method: S3 method name ("get_object" or "put_object")
435
+
436
+ Returns:
437
+ Presigned URL string
438
+ """
439
+ bucket_name = self._get_bucket(bucket)
440
+
441
+ async with self.session.client(
442
+ service_name=S3_SERVICE_NAME,
443
+ endpoint_url=self.endpoint_url,
444
+ config=self.config,
445
+ ) as s3_client:
446
+ url = await s3_client.generate_presigned_url(
447
+ ClientMethod=method,
448
+ Params={
449
+ "Bucket": bucket_name,
450
+ "Key": key,
451
+ },
452
+ ExpiresIn=expiration,
453
+ )
454
+ return url
@@ -1,2 +0,0 @@
1
- # Import all your tasks here with (noqa: F401)
2
- # from . import x_tasks
@@ -0,0 +1,28 @@
1
+ from contextlib import asynccontextmanager
2
+ from typing import AsyncGenerator
3
+
4
+ from vibetuner.context import Context, ctx
5
+ from vibetuner.logging import logger
6
+ from vibetuner.mongo import init_models
7
+
8
+
9
+ @asynccontextmanager
10
+ async def base_lifespan() -> AsyncGenerator[Context, None]:
11
+ logger.info("Vibetuner task worker starting")
12
+
13
+ await init_models()
14
+
15
+ yield ctx
16
+
17
+ logger.info("Vibetuner task worker stopping")
18
+
19
+
20
+ try:
21
+ from app.tasks.lifespan import lifespan # ty: ignore
22
+ except ModuleNotFoundError:
23
+ # Silent pass for missing app.tasks.lifespan module (expected in some projects)
24
+ lifespan = base_lifespan
25
+ except ImportError as e:
26
+ # Log warning for any import error (including syntax errors, missing dependencies, etc.)
27
+ logger.warning(f"Failed to import app.tasks.lifespan: {e}. Using base lifespan.")
28
+ lifespan = base_lifespan
vibetuner/tasks/worker.py CHANGED
@@ -1,18 +1,11 @@
1
1
  from streaq import Worker
2
2
 
3
3
  from vibetuner.config import settings
4
- from vibetuner.tasks.context import lifespan
4
+ from vibetuner.tasks.lifespan import lifespan
5
5
 
6
6
 
7
7
  worker = Worker(
8
8
  redis_url=str(settings.redis_url),
9
- queue_name=(
10
- settings.project.project_slug
11
- if not settings.debug
12
- else f"debug-{settings.project.project_slug}"
13
- ),
9
+ queue_name=settings.redis_key_prefix.rstrip(":"),
14
10
  lifespan=lifespan,
15
11
  )
16
-
17
- # Register tasks
18
- # use something like from . import task_module_name // noqa: E402, F401
vibetuner/versioning.py CHANGED
@@ -1,8 +1,12 @@
1
+ from vibetuner.logging import logger
2
+
3
+
1
4
  __version__ = "0.0.0-default"
2
5
 
3
6
  try:
4
7
  from app._version import version as __version__ # type: ignore
5
- except ImportError:
6
- pass
8
+ except (ImportError, ModuleNotFoundError) as e:
9
+ # Log warning for both ImportError and ModuleNotFoundError as requested
10
+ logger.warning(f"Failed to import app._version: {e}. Using default version.")
7
11
 
8
12
  version = __version__