ff-storage 0.1.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,188 @@
1
+ Metadata-Version: 2.4
2
+ Name: ff-storage
3
+ Version: 0.1.4
4
+ Summary: Fenixflow storage package for database and file operations
5
+ Author-email: Fenixflow <dev@fenixflow.com>
6
+ License: Proprietary
7
+ Project-URL: repository, https://gitlab.com/fenixflow/fenix-packages
8
+ Project-URL: documentation, https://gitlab.com/fenixflow/fenix-packages/-/tree/main/ff-storage
9
+ Requires-Python: >=3.10
10
+ Description-Content-Type: text/markdown
11
+ Requires-Dist: psycopg2-binary>=2.9
12
+ Requires-Dist: asyncpg>=0.29
13
+ Requires-Dist: pymysql>=1.1
14
+ Requires-Dist: mysql-connector-python>=8.0
15
+ Requires-Dist: boto3>=1.34
16
+ Requires-Dist: azure-storage-blob>=12.19
17
+ Requires-Dist: aiofiles>=23.0.0
18
+ Requires-Dist: aioboto3>=12.0.0
19
+ Provides-Extra: dev
20
+ Requires-Dist: pytest>=7.4; extra == "dev"
21
+ Requires-Dist: pytest-asyncio>=0.21; extra == "dev"
22
+ Requires-Dist: black>=23.0; extra == "dev"
23
+ Requires-Dist: ruff>=0.1; extra == "dev"
24
+ Requires-Dist: moto[s3]>=5.0.0; extra == "dev"
25
+ Requires-Dist: aioboto3>=12.0.0; extra == "dev"
26
+
27
+ # ff-storage
28
+
29
+ Database and object storage operations for Fenixflow applications.
30
+
31
+ ## Features
32
+
33
+ - **Database Connections**: PostgreSQL and MySQL with connection pooling
34
+ - **Multi-Database Support**: Consistent API for PostgreSQL and MySQL
35
+ - **Migration Management**: Simple SQL file-based migrations
36
+ - **Object Storage**: Async local filesystem and S3/S3-compatible storage
37
+ - **Streaming Support**: Read/write large files without loading into memory
38
+ - **Atomic Operations**: Safe file writes with temp file + rename
39
+ - **Metadata Management**: Store and retrieve metadata with objects
40
+ - **Base Models**: Dataclass-based models with UUID and timestamp support
41
+ - **Query Builder**: SQL query construction utilities
42
+
43
+ ## Installation
44
+
45
+ ```bash
46
+ # From GitLab
47
+ pip install git+https://gitlab.com/fenixflow/fenix-packages.git@main#subdirectory=ff-storage
48
+
49
+ # Local development
50
+ pip install -e .
51
+ ```
52
+
53
+ ## Usage
54
+
55
+ ### PostgreSQL Connection
56
+
57
+ ```python
58
+ from ff_storage import PostgresPool
59
+
60
+ # Create connection pool
61
+ db = PostgresPool(
62
+ dbname="fenix_db",
63
+ user="fenix",
64
+ password="password",
65
+ host="localhost",
66
+ port=5432,
67
+ pool_size=20
68
+ )
69
+
70
+ # Connect and execute query
71
+ db.connect()
72
+ results = db.read_query("SELECT * FROM documents WHERE status = %s", {"status": "active"})
73
+
74
+ # Execute with RETURNING
75
+ new_id = db.execute_query(
76
+ "INSERT INTO documents (title) VALUES (%s) RETURNING id",
77
+ {"title": "New Document"}
78
+ )
79
+
80
+ # Return connection to pool
81
+ db.close_connection()
82
+ ```
83
+
84
+ ### MySQL Connection
85
+
86
+ ```python
87
+ from ff_storage import MySQLPool
88
+
89
+ # Create connection pool
90
+ db = MySQLPool(
91
+ dbname="fenix_db",
92
+ user="root",
93
+ password="password",
94
+ host="localhost",
95
+ port=3306,
96
+ pool_size=10
97
+ )
98
+
99
+ # Connect and execute query
100
+ db.connect()
101
+ results = db.read_query("SELECT * FROM documents WHERE status = %s", {"status": "active"})
102
+
103
+ # Execute INSERT (returns last insert ID)
104
+ new_id = db.execute_query(
105
+ "INSERT INTO documents (title) VALUES (%s)",
106
+ {"title": "New Document"}
107
+ )
108
+
109
+ # Check open connections
110
+ open_conns = db.get_open_connections()
111
+
112
+ # Return connection to pool
113
+ db.close_connection()
114
+ ```
115
+
116
+ ### Migrations
117
+
118
+ ```python
119
+ from ff_storage.db.migrations import MigrationManager
120
+
121
+ # Setup migration manager
122
+ manager = MigrationManager(db_connection, "./migrations")
123
+
124
+ # Run all pending migrations
125
+ manager.migrate()
126
+ ```
127
+
128
+ ### Object Storage
129
+
130
+ ```python
131
+ from ff_storage import LocalObjectStorage, S3ObjectStorage
132
+ import asyncio
133
+
134
+ async def main():
135
+ # Local filesystem storage
136
+ local = LocalObjectStorage("/var/data/documents")
137
+ await local.write("docs/report.pdf", pdf_bytes, {"content-type": "application/pdf"})
138
+ data = await local.read("docs/report.pdf")
139
+ exists = await local.exists("docs/report.pdf")
140
+ files = await local.list_keys(prefix="docs/")
141
+
142
+ # S3 storage (AWS or S3-compatible)
143
+ s3 = S3ObjectStorage(
144
+ bucket="fenix-documents",
145
+ region="us-east-1"
146
+ )
147
+ await s3.write("docs/report.pdf", pdf_bytes)
148
+ data = await s3.read("docs/report.pdf")
149
+
150
+ # Stream large files
151
+ async for chunk in s3.read_stream("large_file.bin", chunk_size=8192):
152
+ process_chunk(chunk)
153
+
154
+ asyncio.run(main())
155
+ ```
156
+
157
+ ## Database Classes
158
+
159
+ ### SQL Base Class
160
+ Abstract base providing interface for all SQL operations:
161
+ - `connect()`: Establish connection
162
+ - `read_query()`: Execute SELECT queries
163
+ - `execute()`: Execute INSERT/UPDATE/DELETE
164
+ - `execute_query()`: Execute with RETURNING
165
+ - `execute_many()`: Batch operations
166
+ - Transaction management methods
167
+
168
+ ### PostgreSQL
169
+ - `Postgres`: Direct connection without pooling
170
+ - `PostgresPool`: Connection pooling for production use
171
+
172
+ ### MySQL
173
+ - `MySQL`: Direct connection without pooling
174
+ - `MySQLPool`: Connection pooling for production use
175
+
176
+ ## Testing
177
+
178
+ ```bash
179
+ # Run tests
180
+ pytest tests/
181
+
182
+ # With coverage
183
+ pytest --cov=ff_storage tests/
184
+ ```
185
+
186
+ ## License
187
+
188
+ Proprietary - Fenixflow Internal Use Only
@@ -0,0 +1,162 @@
1
+ # ff-storage
2
+
3
+ Database and object storage operations for Fenixflow applications.
4
+
5
+ ## Features
6
+
7
+ - **Database Connections**: PostgreSQL and MySQL with connection pooling
8
+ - **Multi-Database Support**: Consistent API for PostgreSQL and MySQL
9
+ - **Migration Management**: Simple SQL file-based migrations
10
+ - **Object Storage**: Async local filesystem and S3/S3-compatible storage
11
+ - **Streaming Support**: Read/write large files without loading into memory
12
+ - **Atomic Operations**: Safe file writes with temp file + rename
13
+ - **Metadata Management**: Store and retrieve metadata with objects
14
+ - **Base Models**: Dataclass-based models with UUID and timestamp support
15
+ - **Query Builder**: SQL query construction utilities
16
+
17
+ ## Installation
18
+
19
+ ```bash
20
+ # From GitLab
21
+ pip install git+https://gitlab.com/fenixflow/fenix-packages.git@main#subdirectory=ff-storage
22
+
23
+ # Local development
24
+ pip install -e .
25
+ ```
26
+
27
+ ## Usage
28
+
29
+ ### PostgreSQL Connection
30
+
31
+ ```python
32
+ from ff_storage import PostgresPool
33
+
34
+ # Create connection pool
35
+ db = PostgresPool(
36
+ dbname="fenix_db",
37
+ user="fenix",
38
+ password="password",
39
+ host="localhost",
40
+ port=5432,
41
+ pool_size=20
42
+ )
43
+
44
+ # Connect and execute query
45
+ db.connect()
46
+ results = db.read_query("SELECT * FROM documents WHERE status = %s", {"status": "active"})
47
+
48
+ # Execute with RETURNING
49
+ new_id = db.execute_query(
50
+ "INSERT INTO documents (title) VALUES (%s) RETURNING id",
51
+ {"title": "New Document"}
52
+ )
53
+
54
+ # Return connection to pool
55
+ db.close_connection()
56
+ ```
57
+
58
+ ### MySQL Connection
59
+
60
+ ```python
61
+ from ff_storage import MySQLPool
62
+
63
+ # Create connection pool
64
+ db = MySQLPool(
65
+ dbname="fenix_db",
66
+ user="root",
67
+ password="password",
68
+ host="localhost",
69
+ port=3306,
70
+ pool_size=10
71
+ )
72
+
73
+ # Connect and execute query
74
+ db.connect()
75
+ results = db.read_query("SELECT * FROM documents WHERE status = %s", {"status": "active"})
76
+
77
+ # Execute INSERT (returns last insert ID)
78
+ new_id = db.execute_query(
79
+ "INSERT INTO documents (title) VALUES (%s)",
80
+ {"title": "New Document"}
81
+ )
82
+
83
+ # Check open connections
84
+ open_conns = db.get_open_connections()
85
+
86
+ # Return connection to pool
87
+ db.close_connection()
88
+ ```
89
+
90
+ ### Migrations
91
+
92
+ ```python
93
+ from ff_storage.db.migrations import MigrationManager
94
+
95
+ # Setup migration manager
96
+ manager = MigrationManager(db_connection, "./migrations")
97
+
98
+ # Run all pending migrations
99
+ manager.migrate()
100
+ ```
101
+
102
+ ### Object Storage
103
+
104
+ ```python
105
+ from ff_storage import LocalObjectStorage, S3ObjectStorage
106
+ import asyncio
107
+
108
+ async def main():
109
+ # Local filesystem storage
110
+ local = LocalObjectStorage("/var/data/documents")
111
+ await local.write("docs/report.pdf", pdf_bytes, {"content-type": "application/pdf"})
112
+ data = await local.read("docs/report.pdf")
113
+ exists = await local.exists("docs/report.pdf")
114
+ files = await local.list_keys(prefix="docs/")
115
+
116
+ # S3 storage (AWS or S3-compatible)
117
+ s3 = S3ObjectStorage(
118
+ bucket="fenix-documents",
119
+ region="us-east-1"
120
+ )
121
+ await s3.write("docs/report.pdf", pdf_bytes)
122
+ data = await s3.read("docs/report.pdf")
123
+
124
+ # Stream large files
125
+ async for chunk in s3.read_stream("large_file.bin", chunk_size=8192):
126
+ process_chunk(chunk)
127
+
128
+ asyncio.run(main())
129
+ ```
130
+
131
+ ## Database Classes
132
+
133
+ ### SQL Base Class
134
+ Abstract base providing interface for all SQL operations:
135
+ - `connect()`: Establish connection
136
+ - `read_query()`: Execute SELECT queries
137
+ - `execute()`: Execute INSERT/UPDATE/DELETE
138
+ - `execute_query()`: Execute with RETURNING
139
+ - `execute_many()`: Batch operations
140
+ - Transaction management methods
141
+
142
+ ### PostgreSQL
143
+ - `Postgres`: Direct connection without pooling
144
+ - `PostgresPool`: Connection pooling for production use
145
+
146
+ ### MySQL
147
+ - `MySQL`: Direct connection without pooling
148
+ - `MySQLPool`: Connection pooling for production use
149
+
150
+ ## Testing
151
+
152
+ ```bash
153
+ # Run tests
154
+ pytest tests/
155
+
156
+ # With coverage
157
+ pytest --cov=ff_storage tests/
158
+ ```
159
+
160
+ ## License
161
+
162
+ Proprietary - Fenixflow Internal Use Only
@@ -0,0 +1,57 @@
1
+ [build-system]
2
+ requires = ["setuptools>=61", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "ff-storage"
7
+ version = "0.1.4"
8
+ description = "Fenixflow storage package for database and file operations"
9
+ readme = "README.md"
10
+ requires-python = ">=3.10"
11
+ license = {text = "Proprietary"}
12
+ authors = [
13
+ {name = "Fenixflow", email = "dev@fenixflow.com"}
14
+ ]
15
+ dependencies = [
16
+ "psycopg2-binary>=2.9",
17
+ "asyncpg>=0.29",
18
+ "pymysql>=1.1",
19
+ "mysql-connector-python>=8.0",
20
+ "boto3>=1.34",
21
+ "azure-storage-blob>=12.19",
22
+ "aiofiles>=23.0.0",
23
+ "aioboto3>=12.0.0",
24
+ ]
25
+
26
+ [project.optional-dependencies]
27
+ dev = [
28
+ "pytest>=7.4",
29
+ "pytest-asyncio>=0.21",
30
+ "black>=23.0",
31
+ "ruff>=0.1",
32
+ "moto[s3]>=5.0.0",
33
+ "aioboto3>=12.0.0",
34
+ ]
35
+
36
+ [project.urls]
37
+ repository = "https://gitlab.com/fenixflow/fenix-packages"
38
+ documentation = "https://gitlab.com/fenixflow/fenix-packages/-/tree/main/ff-storage"
39
+
40
+ [tool.setuptools]
41
+ package-dir = {"" = "src"}
42
+
43
+ [tool.setuptools.packages.find]
44
+ where = ["src"]
45
+
46
+ [tool.black]
47
+ line-length = 100
48
+ target-version = ["py310", "py311"]
49
+
50
+ [tool.ruff]
51
+ line-length = 100
52
+ target-version = "py310"
53
+
54
+ [dependency-groups]
55
+ dev = [
56
+ "pytest>=8.4.1",
57
+ ]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,28 @@
1
+ """
2
+ ff-storage: Database and file storage operations for Fenixflow applications.
3
+ """
4
+
5
+ __version__ = "0.1.0"
6
+
7
+ # Database exports
8
+ from .db.postgres import Postgres, PostgresPool
9
+ from .db.mysql import MySQL, MySQLPool
10
+ from .db.migrations import MigrationManager
11
+
12
+ # Object storage exports
13
+ from .object import ObjectStorage, LocalObjectStorage, S3ObjectStorage
14
+
15
+ __all__ = [
16
+ # PostgreSQL
17
+ "Postgres",
18
+ "PostgresPool",
19
+ # MySQL
20
+ "MySQL",
21
+ "MySQLPool",
22
+ # Migrations
23
+ "MigrationManager",
24
+ # Object Storage
25
+ "ObjectStorage",
26
+ "LocalObjectStorage",
27
+ "S3ObjectStorage",
28
+ ]
@@ -0,0 +1,22 @@
1
+ """
2
+ Database connection and operation modules.
3
+ """
4
+
5
+ from .sql import SQL
6
+ from .postgres import Postgres, PostgresPool, PostgresBase
7
+ from .mysql import MySQL, MySQLPool, MySQLBase
8
+ from .migrations import MigrationManager
9
+
10
+ __all__ = [
11
+ "SQL",
12
+ # PostgreSQL
13
+ "Postgres",
14
+ "PostgresPool",
15
+ "PostgresBase",
16
+ # MySQL
17
+ "MySQL",
18
+ "MySQLPool",
19
+ "MySQLBase",
20
+ # Migrations
21
+ "MigrationManager",
22
+ ]