quantumflow-sdk 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- api/__init__.py +1 -0
- api/auth.py +208 -0
- api/main.py +403 -0
- api/models.py +137 -0
- api/routes/__init__.py +1 -0
- api/routes/auth_routes.py +234 -0
- api/routes/teleport_routes.py +415 -0
- db/__init__.py +15 -0
- db/crud.py +319 -0
- db/database.py +93 -0
- db/models.py +197 -0
- quantumflow/__init__.py +47 -0
- quantumflow/algorithms/__init__.py +48 -0
- quantumflow/algorithms/compression/__init__.py +7 -0
- quantumflow/algorithms/compression/amplitude_amplification.py +189 -0
- quantumflow/algorithms/compression/qft_compression.py +133 -0
- quantumflow/algorithms/compression/token_compression.py +261 -0
- quantumflow/algorithms/cryptography/__init__.py +6 -0
- quantumflow/algorithms/cryptography/qkd.py +205 -0
- quantumflow/algorithms/cryptography/qrng.py +231 -0
- quantumflow/algorithms/machine_learning/__init__.py +7 -0
- quantumflow/algorithms/machine_learning/qnn.py +276 -0
- quantumflow/algorithms/machine_learning/qsvm.py +249 -0
- quantumflow/algorithms/machine_learning/vqe.py +229 -0
- quantumflow/algorithms/optimization/__init__.py +7 -0
- quantumflow/algorithms/optimization/grover.py +223 -0
- quantumflow/algorithms/optimization/qaoa.py +251 -0
- quantumflow/algorithms/optimization/quantum_annealing.py +237 -0
- quantumflow/algorithms/utility/__init__.py +6 -0
- quantumflow/algorithms/utility/circuit_optimizer.py +194 -0
- quantumflow/algorithms/utility/error_correction.py +330 -0
- quantumflow/api/__init__.py +1 -0
- quantumflow/api/routes/__init__.py +4 -0
- quantumflow/api/routes/billing_routes.py +520 -0
- quantumflow/backends/__init__.py +33 -0
- quantumflow/backends/base_backend.py +184 -0
- quantumflow/backends/braket_backend.py +345 -0
- quantumflow/backends/ibm_backend.py +112 -0
- quantumflow/backends/simulator_backend.py +86 -0
- quantumflow/billing/__init__.py +25 -0
- quantumflow/billing/models.py +126 -0
- quantumflow/billing/stripe_service.py +619 -0
- quantumflow/core/__init__.py +12 -0
- quantumflow/core/entanglement.py +164 -0
- quantumflow/core/memory.py +147 -0
- quantumflow/core/quantum_backprop.py +394 -0
- quantumflow/core/quantum_compressor.py +309 -0
- quantumflow/core/teleportation.py +386 -0
- quantumflow/integrations/__init__.py +107 -0
- quantumflow/integrations/autogen_tools.py +501 -0
- quantumflow/integrations/crewai_agents.py +425 -0
- quantumflow/integrations/crewai_tools.py +407 -0
- quantumflow/integrations/langchain_memory.py +385 -0
- quantumflow/integrations/langchain_tools.py +366 -0
- quantumflow/integrations/mcp_server.py +575 -0
- quantumflow_sdk-0.1.0.dist-info/METADATA +190 -0
- quantumflow_sdk-0.1.0.dist-info/RECORD +60 -0
- quantumflow_sdk-0.1.0.dist-info/WHEEL +5 -0
- quantumflow_sdk-0.1.0.dist-info/entry_points.txt +2 -0
- quantumflow_sdk-0.1.0.dist-info/top_level.txt +3 -0
db/crud.py
ADDED
|
@@ -0,0 +1,319 @@
|
|
|
1
|
+
"""
|
|
2
|
+
CRUD Operations for QuantumFlow.
|
|
3
|
+
|
|
4
|
+
Provides clean interface for database operations.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import secrets
|
|
8
|
+
from datetime import datetime, timedelta
|
|
9
|
+
from typing import Optional
|
|
10
|
+
from uuid import UUID
|
|
11
|
+
|
|
12
|
+
from sqlalchemy.orm import Session
|
|
13
|
+
from sqlalchemy import select, and_
|
|
14
|
+
import bcrypt
|
|
15
|
+
|
|
16
|
+
from db.models import User, APIKey, Job, UsageRecord, JobStatus, JobType
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
# ============== User CRUD ==============
|
|
20
|
+
|
|
21
|
+
def get_user(db: Session, user_id: UUID) -> Optional[User]:
|
|
22
|
+
"""Get user by ID."""
|
|
23
|
+
return db.query(User).filter(User.id == user_id).first()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def get_user_by_email(db: Session, email: str) -> Optional[User]:
|
|
27
|
+
"""Get user by email."""
|
|
28
|
+
return db.query(User).filter(User.email == email).first()
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def create_user(
|
|
32
|
+
db: Session,
|
|
33
|
+
email: str,
|
|
34
|
+
password: str,
|
|
35
|
+
name: Optional[str] = None,
|
|
36
|
+
) -> User:
|
|
37
|
+
"""Create a new user."""
|
|
38
|
+
hashed_password = bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt()).decode('utf-8')
|
|
39
|
+
|
|
40
|
+
user = User(
|
|
41
|
+
email=email,
|
|
42
|
+
hashed_password=hashed_password,
|
|
43
|
+
name=name,
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
db.add(user)
|
|
47
|
+
db.commit()
|
|
48
|
+
db.refresh(user)
|
|
49
|
+
|
|
50
|
+
return user
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
|
54
|
+
"""Verify a password against hash."""
|
|
55
|
+
return bcrypt.checkpw(plain_password.encode('utf-8'), hashed_password.encode('utf-8'))
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def authenticate_user(db: Session, email: str, password: str) -> Optional[User]:
|
|
59
|
+
"""Authenticate user by email and password."""
|
|
60
|
+
user = get_user_by_email(db, email)
|
|
61
|
+
if not user:
|
|
62
|
+
return None
|
|
63
|
+
if not verify_password(password, user.hashed_password):
|
|
64
|
+
return None
|
|
65
|
+
return user
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def update_user_login(db: Session, user: User) -> User:
|
|
69
|
+
"""Update user's last login timestamp."""
|
|
70
|
+
user.last_login = datetime.utcnow()
|
|
71
|
+
db.commit()
|
|
72
|
+
db.refresh(user)
|
|
73
|
+
return user
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
# ============== API Key CRUD ==============
|
|
77
|
+
|
|
78
|
+
def create_api_key(
|
|
79
|
+
db: Session,
|
|
80
|
+
user_id: UUID,
|
|
81
|
+
name: str,
|
|
82
|
+
scopes: Optional[list[str]] = None,
|
|
83
|
+
expires_in_days: Optional[int] = None,
|
|
84
|
+
) -> tuple[APIKey, str]:
|
|
85
|
+
"""
|
|
86
|
+
Create a new API key.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
Tuple of (APIKey object, raw key string)
|
|
90
|
+
The raw key is only returned once and should be shown to user.
|
|
91
|
+
"""
|
|
92
|
+
# Generate secure random key
|
|
93
|
+
raw_key = f"qf_{secrets.token_hex(24)}"
|
|
94
|
+
prefix = raw_key[:7]
|
|
95
|
+
|
|
96
|
+
expires_at = None
|
|
97
|
+
if expires_in_days:
|
|
98
|
+
expires_at = datetime.utcnow() + timedelta(days=expires_in_days)
|
|
99
|
+
|
|
100
|
+
api_key = APIKey(
|
|
101
|
+
user_id=user_id,
|
|
102
|
+
key=raw_key,
|
|
103
|
+
name=name,
|
|
104
|
+
prefix=prefix,
|
|
105
|
+
scopes=scopes or ["all"],
|
|
106
|
+
expires_at=expires_at,
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
db.add(api_key)
|
|
110
|
+
db.commit()
|
|
111
|
+
db.refresh(api_key)
|
|
112
|
+
|
|
113
|
+
return api_key, raw_key
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def get_api_key(db: Session, key: str) -> Optional[APIKey]:
|
|
117
|
+
"""Get API key by key string."""
|
|
118
|
+
api_key = db.query(APIKey).filter(
|
|
119
|
+
and_(
|
|
120
|
+
APIKey.key == key,
|
|
121
|
+
APIKey.is_active == True,
|
|
122
|
+
)
|
|
123
|
+
).first()
|
|
124
|
+
|
|
125
|
+
# Check expiration
|
|
126
|
+
if api_key and api_key.expires_at and api_key.expires_at < datetime.utcnow():
|
|
127
|
+
return None
|
|
128
|
+
|
|
129
|
+
return api_key
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def get_user_api_keys(db: Session, user_id: UUID) -> list[APIKey]:
|
|
133
|
+
"""Get all API keys for a user."""
|
|
134
|
+
return db.query(APIKey).filter(APIKey.user_id == user_id).all()
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def revoke_api_key(db: Session, key_id: UUID) -> bool:
|
|
138
|
+
"""Revoke an API key."""
|
|
139
|
+
api_key = db.query(APIKey).filter(APIKey.id == key_id).first()
|
|
140
|
+
if api_key:
|
|
141
|
+
api_key.is_active = False
|
|
142
|
+
db.commit()
|
|
143
|
+
return True
|
|
144
|
+
return False
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def update_api_key_usage(db: Session, api_key: APIKey) -> APIKey:
|
|
148
|
+
"""Update API key's last used timestamp."""
|
|
149
|
+
api_key.last_used = datetime.utcnow()
|
|
150
|
+
db.commit()
|
|
151
|
+
return api_key
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
# ============== Job CRUD ==============
|
|
155
|
+
|
|
156
|
+
def create_job(
|
|
157
|
+
db: Session,
|
|
158
|
+
user_id: UUID,
|
|
159
|
+
job_type: JobType,
|
|
160
|
+
backend: str = "simulator",
|
|
161
|
+
input_data: Optional[dict] = None,
|
|
162
|
+
n_qubits: Optional[int] = None,
|
|
163
|
+
shots: int = 1024,
|
|
164
|
+
) -> Job:
|
|
165
|
+
"""Create a new quantum job."""
|
|
166
|
+
job = Job(
|
|
167
|
+
user_id=user_id,
|
|
168
|
+
job_type=job_type,
|
|
169
|
+
backend=backend,
|
|
170
|
+
input_data=input_data,
|
|
171
|
+
n_qubits=n_qubits,
|
|
172
|
+
shots=shots,
|
|
173
|
+
status=JobStatus.PENDING,
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
db.add(job)
|
|
177
|
+
db.commit()
|
|
178
|
+
db.refresh(job)
|
|
179
|
+
|
|
180
|
+
return job
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def get_job(db: Session, job_id: UUID) -> Optional[Job]:
|
|
184
|
+
"""Get job by ID."""
|
|
185
|
+
return db.query(Job).filter(Job.id == job_id).first()
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def get_user_jobs(
|
|
189
|
+
db: Session,
|
|
190
|
+
user_id: UUID,
|
|
191
|
+
limit: int = 100,
|
|
192
|
+
offset: int = 0,
|
|
193
|
+
status: Optional[JobStatus] = None,
|
|
194
|
+
) -> list[Job]:
|
|
195
|
+
"""Get jobs for a user."""
|
|
196
|
+
query = db.query(Job).filter(Job.user_id == user_id)
|
|
197
|
+
|
|
198
|
+
if status:
|
|
199
|
+
query = query.filter(Job.status == status)
|
|
200
|
+
|
|
201
|
+
return query.order_by(Job.created_at.desc()).offset(offset).limit(limit).all()
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def update_job_status(
|
|
205
|
+
db: Session,
|
|
206
|
+
job: Job,
|
|
207
|
+
status: JobStatus,
|
|
208
|
+
output_data: Optional[dict] = None,
|
|
209
|
+
error_message: Optional[str] = None,
|
|
210
|
+
execution_time_ms: Optional[float] = None,
|
|
211
|
+
fidelity: Optional[float] = None,
|
|
212
|
+
compression_ratio: Optional[float] = None,
|
|
213
|
+
) -> Job:
|
|
214
|
+
"""Update job status and results."""
|
|
215
|
+
job.status = status
|
|
216
|
+
|
|
217
|
+
if status == JobStatus.RUNNING:
|
|
218
|
+
job.started_at = datetime.utcnow()
|
|
219
|
+
elif status in [JobStatus.COMPLETED, JobStatus.FAILED]:
|
|
220
|
+
job.completed_at = datetime.utcnow()
|
|
221
|
+
|
|
222
|
+
if output_data is not None:
|
|
223
|
+
job.output_data = output_data
|
|
224
|
+
if error_message is not None:
|
|
225
|
+
job.error_message = error_message
|
|
226
|
+
if execution_time_ms is not None:
|
|
227
|
+
job.execution_time_ms = execution_time_ms
|
|
228
|
+
if fidelity is not None:
|
|
229
|
+
job.fidelity = fidelity
|
|
230
|
+
if compression_ratio is not None:
|
|
231
|
+
job.compression_ratio = compression_ratio
|
|
232
|
+
|
|
233
|
+
db.commit()
|
|
234
|
+
db.refresh(job)
|
|
235
|
+
|
|
236
|
+
return job
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
# ============== Usage CRUD ==============
|
|
240
|
+
|
|
241
|
+
def create_usage_record(
|
|
242
|
+
db: Session,
|
|
243
|
+
user_id: UUID,
|
|
244
|
+
endpoint: str,
|
|
245
|
+
method: str,
|
|
246
|
+
tokens_input: int = 0,
|
|
247
|
+
tokens_output: int = 0,
|
|
248
|
+
qubits_used: int = 0,
|
|
249
|
+
execution_time_ms: float = 0,
|
|
250
|
+
credits_used: float = 0,
|
|
251
|
+
ip_address: Optional[str] = None,
|
|
252
|
+
user_agent: Optional[str] = None,
|
|
253
|
+
) -> UsageRecord:
|
|
254
|
+
"""Create a usage record."""
|
|
255
|
+
record = UsageRecord(
|
|
256
|
+
user_id=user_id,
|
|
257
|
+
endpoint=endpoint,
|
|
258
|
+
method=method,
|
|
259
|
+
tokens_input=tokens_input,
|
|
260
|
+
tokens_output=tokens_output,
|
|
261
|
+
qubits_used=qubits_used,
|
|
262
|
+
execution_time_ms=execution_time_ms,
|
|
263
|
+
credits_used=credits_used,
|
|
264
|
+
ip_address=ip_address,
|
|
265
|
+
user_agent=user_agent,
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
db.add(record)
|
|
269
|
+
db.commit()
|
|
270
|
+
db.refresh(record)
|
|
271
|
+
|
|
272
|
+
return record
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
def get_user_usage(
|
|
276
|
+
db: Session,
|
|
277
|
+
user_id: UUID,
|
|
278
|
+
start_date: Optional[datetime] = None,
|
|
279
|
+
end_date: Optional[datetime] = None,
|
|
280
|
+
) -> dict:
|
|
281
|
+
"""Get usage summary for a user."""
|
|
282
|
+
query = db.query(UsageRecord).filter(UsageRecord.user_id == user_id)
|
|
283
|
+
|
|
284
|
+
if start_date:
|
|
285
|
+
query = query.filter(UsageRecord.created_at >= start_date)
|
|
286
|
+
if end_date:
|
|
287
|
+
query = query.filter(UsageRecord.created_at <= end_date)
|
|
288
|
+
|
|
289
|
+
records = query.all()
|
|
290
|
+
|
|
291
|
+
return {
|
|
292
|
+
"total_requests": len(records),
|
|
293
|
+
"total_tokens_input": sum(r.tokens_input for r in records),
|
|
294
|
+
"total_tokens_output": sum(r.tokens_output for r in records),
|
|
295
|
+
"total_qubits_used": sum(r.qubits_used for r in records),
|
|
296
|
+
"total_execution_time_ms": sum(r.execution_time_ms for r in records),
|
|
297
|
+
"total_credits_used": sum(r.credits_used for r in records),
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
def get_monthly_usage(db: Session, user_id: UUID) -> int:
|
|
302
|
+
"""Get current month's API call count."""
|
|
303
|
+
now = datetime.utcnow()
|
|
304
|
+
start_of_month = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
|
305
|
+
|
|
306
|
+
count = db.query(UsageRecord).filter(
|
|
307
|
+
and_(
|
|
308
|
+
UsageRecord.user_id == user_id,
|
|
309
|
+
UsageRecord.created_at >= start_of_month,
|
|
310
|
+
)
|
|
311
|
+
).count()
|
|
312
|
+
|
|
313
|
+
return count
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def check_quota(db: Session, user: User) -> bool:
|
|
317
|
+
"""Check if user is within their monthly quota."""
|
|
318
|
+
usage = get_monthly_usage(db, user.id)
|
|
319
|
+
return usage < user.monthly_quota
|
db/database.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database Connection and Session Management.
|
|
3
|
+
|
|
4
|
+
Supports both sync and async operations with PostgreSQL.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
from typing import Generator
|
|
9
|
+
from sqlalchemy import create_engine
|
|
10
|
+
from sqlalchemy.orm import sessionmaker, Session
|
|
11
|
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
|
12
|
+
|
|
13
|
+
# Database URL from environment
|
|
14
|
+
DATABASE_URL = os.getenv(
|
|
15
|
+
"DATABASE_URL",
|
|
16
|
+
"postgresql://localhost:5432/quantumflow"
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
# Convert to async URL if needed
|
|
20
|
+
ASYNC_DATABASE_URL = DATABASE_URL.replace("postgresql://", "postgresql+asyncpg://")
|
|
21
|
+
|
|
22
|
+
# Sync engine (for migrations and simple operations)
|
|
23
|
+
engine = create_engine(
|
|
24
|
+
DATABASE_URL,
|
|
25
|
+
pool_pre_ping=True,
|
|
26
|
+
pool_size=5,
|
|
27
|
+
max_overflow=10,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
SessionLocal = sessionmaker(
|
|
31
|
+
autocommit=False,
|
|
32
|
+
autoflush=False,
|
|
33
|
+
bind=engine,
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
# Async engine (for FastAPI endpoints)
|
|
37
|
+
async_engine = create_async_engine(
|
|
38
|
+
ASYNC_DATABASE_URL,
|
|
39
|
+
pool_pre_ping=True,
|
|
40
|
+
pool_size=5,
|
|
41
|
+
max_overflow=10,
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
AsyncSessionLocal = async_sessionmaker(
|
|
45
|
+
async_engine,
|
|
46
|
+
class_=AsyncSession,
|
|
47
|
+
expire_on_commit=False,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def get_db() -> Generator[Session, None, None]:
|
|
52
|
+
"""
|
|
53
|
+
Sync database session dependency.
|
|
54
|
+
|
|
55
|
+
Usage:
|
|
56
|
+
@app.get("/")
|
|
57
|
+
def endpoint(db: Session = Depends(get_db)):
|
|
58
|
+
...
|
|
59
|
+
"""
|
|
60
|
+
db = SessionLocal()
|
|
61
|
+
try:
|
|
62
|
+
yield db
|
|
63
|
+
finally:
|
|
64
|
+
db.close()
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
async def get_async_db() -> AsyncSession:
|
|
68
|
+
"""
|
|
69
|
+
Async database session dependency.
|
|
70
|
+
|
|
71
|
+
Usage:
|
|
72
|
+
@app.get("/")
|
|
73
|
+
async def endpoint(db: AsyncSession = Depends(get_async_db)):
|
|
74
|
+
...
|
|
75
|
+
"""
|
|
76
|
+
async with AsyncSessionLocal() as session:
|
|
77
|
+
try:
|
|
78
|
+
yield session
|
|
79
|
+
finally:
|
|
80
|
+
await session.close()
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def init_db():
|
|
84
|
+
"""Initialize database tables."""
|
|
85
|
+
from db.models import Base
|
|
86
|
+
Base.metadata.create_all(bind=engine)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
async def init_async_db():
|
|
90
|
+
"""Initialize database tables (async)."""
|
|
91
|
+
from db.models import Base
|
|
92
|
+
async with async_engine.begin() as conn:
|
|
93
|
+
await conn.run_sync(Base.metadata.create_all)
|
db/models.py
ADDED
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database Models for QuantumFlow.
|
|
3
|
+
|
|
4
|
+
Tables:
|
|
5
|
+
- users: User accounts
|
|
6
|
+
- api_keys: API authentication keys
|
|
7
|
+
- jobs: Quantum job execution history
|
|
8
|
+
- usage_records: Usage tracking and billing
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import uuid
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
from typing import Optional
|
|
14
|
+
from sqlalchemy import (
|
|
15
|
+
Column, String, Integer, Float, Boolean, DateTime,
|
|
16
|
+
ForeignKey, Text, JSON, Enum as SQLEnum
|
|
17
|
+
)
|
|
18
|
+
from sqlalchemy.dialects.postgresql import UUID
|
|
19
|
+
from sqlalchemy.orm import declarative_base, relationship
|
|
20
|
+
import enum
|
|
21
|
+
|
|
22
|
+
Base = declarative_base()
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class JobStatus(str, enum.Enum):
|
|
26
|
+
"""Job execution status."""
|
|
27
|
+
PENDING = "pending"
|
|
28
|
+
RUNNING = "running"
|
|
29
|
+
COMPLETED = "completed"
|
|
30
|
+
FAILED = "failed"
|
|
31
|
+
CANCELLED = "cancelled"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class JobType(str, enum.Enum):
|
|
35
|
+
"""Type of quantum job."""
|
|
36
|
+
COMPRESS = "compress"
|
|
37
|
+
GRADIENT = "gradient"
|
|
38
|
+
ENTANGLE = "entangle"
|
|
39
|
+
GROVER = "grover"
|
|
40
|
+
QAOA = "qaoa"
|
|
41
|
+
VQE = "vqe"
|
|
42
|
+
QNN = "qnn"
|
|
43
|
+
QSVM = "qsvm"
|
|
44
|
+
QKD = "qkd"
|
|
45
|
+
QRNG = "qrng"
|
|
46
|
+
CUSTOM = "custom"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class User(Base):
|
|
50
|
+
"""User account model."""
|
|
51
|
+
|
|
52
|
+
__tablename__ = "users"
|
|
53
|
+
|
|
54
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
55
|
+
email = Column(String(255), unique=True, nullable=False, index=True)
|
|
56
|
+
hashed_password = Column(String(255), nullable=False)
|
|
57
|
+
name = Column(String(255), nullable=True)
|
|
58
|
+
|
|
59
|
+
# Account status
|
|
60
|
+
is_active = Column(Boolean, default=True)
|
|
61
|
+
is_verified = Column(Boolean, default=False)
|
|
62
|
+
is_admin = Column(Boolean, default=False)
|
|
63
|
+
|
|
64
|
+
# Timestamps
|
|
65
|
+
created_at = Column(DateTime, default=datetime.utcnow)
|
|
66
|
+
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
|
67
|
+
last_login = Column(DateTime, nullable=True)
|
|
68
|
+
|
|
69
|
+
# Usage limits
|
|
70
|
+
tier = Column(String(50), default="free") # free, pro, enterprise
|
|
71
|
+
monthly_quota = Column(Integer, default=1000) # API calls per month
|
|
72
|
+
|
|
73
|
+
# Stripe billing
|
|
74
|
+
stripe_customer_id = Column(String(255), nullable=True, unique=True)
|
|
75
|
+
stripe_subscription_id = Column(String(255), nullable=True)
|
|
76
|
+
stripe_subscription_item_id = Column(String(255), nullable=True)
|
|
77
|
+
|
|
78
|
+
# Relationships
|
|
79
|
+
api_keys = relationship("APIKey", back_populates="user", cascade="all, delete-orphan")
|
|
80
|
+
jobs = relationship("Job", back_populates="user", cascade="all, delete-orphan")
|
|
81
|
+
usage_records = relationship("UsageRecord", back_populates="user", cascade="all, delete-orphan")
|
|
82
|
+
|
|
83
|
+
def __repr__(self):
|
|
84
|
+
return f"<User {self.email}>"
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class APIKey(Base):
|
|
88
|
+
"""API key for authentication."""
|
|
89
|
+
|
|
90
|
+
__tablename__ = "api_keys"
|
|
91
|
+
|
|
92
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
93
|
+
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
|
|
94
|
+
|
|
95
|
+
# Key info
|
|
96
|
+
key = Column(String(64), unique=True, nullable=False, index=True)
|
|
97
|
+
name = Column(String(255), nullable=False) # User-friendly name
|
|
98
|
+
prefix = Column(String(10), nullable=False) # e.g., "qf_" for display
|
|
99
|
+
|
|
100
|
+
# Permissions
|
|
101
|
+
scopes = Column(JSON, default=list) # ["compress", "gradient", "all"]
|
|
102
|
+
|
|
103
|
+
# Status
|
|
104
|
+
is_active = Column(Boolean, default=True)
|
|
105
|
+
|
|
106
|
+
# Timestamps
|
|
107
|
+
created_at = Column(DateTime, default=datetime.utcnow)
|
|
108
|
+
last_used = Column(DateTime, nullable=True)
|
|
109
|
+
expires_at = Column(DateTime, nullable=True)
|
|
110
|
+
|
|
111
|
+
# Relationships
|
|
112
|
+
user = relationship("User", back_populates="api_keys")
|
|
113
|
+
|
|
114
|
+
def __repr__(self):
|
|
115
|
+
return f"<APIKey {self.prefix}***>"
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
class Job(Base):
|
|
119
|
+
"""Quantum job execution record."""
|
|
120
|
+
|
|
121
|
+
__tablename__ = "jobs"
|
|
122
|
+
|
|
123
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
124
|
+
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
|
|
125
|
+
|
|
126
|
+
# Job info
|
|
127
|
+
job_type = Column(SQLEnum(JobType), nullable=False)
|
|
128
|
+
status = Column(SQLEnum(JobStatus), default=JobStatus.PENDING)
|
|
129
|
+
|
|
130
|
+
# Backend info
|
|
131
|
+
backend = Column(String(50), default="simulator") # simulator, ibm, google
|
|
132
|
+
n_qubits = Column(Integer, nullable=True)
|
|
133
|
+
shots = Column(Integer, default=1024)
|
|
134
|
+
|
|
135
|
+
# Input/Output
|
|
136
|
+
input_data = Column(JSON, nullable=True)
|
|
137
|
+
output_data = Column(JSON, nullable=True)
|
|
138
|
+
error_message = Column(Text, nullable=True)
|
|
139
|
+
|
|
140
|
+
# Metrics
|
|
141
|
+
execution_time_ms = Column(Float, nullable=True)
|
|
142
|
+
fidelity = Column(Float, nullable=True)
|
|
143
|
+
compression_ratio = Column(Float, nullable=True)
|
|
144
|
+
|
|
145
|
+
# Timestamps
|
|
146
|
+
created_at = Column(DateTime, default=datetime.utcnow)
|
|
147
|
+
started_at = Column(DateTime, nullable=True)
|
|
148
|
+
completed_at = Column(DateTime, nullable=True)
|
|
149
|
+
|
|
150
|
+
# Relationships
|
|
151
|
+
user = relationship("User", back_populates="jobs")
|
|
152
|
+
|
|
153
|
+
def __repr__(self):
|
|
154
|
+
return f"<Job {self.id} {self.job_type} {self.status}>"
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class UsageRecord(Base):
|
|
158
|
+
"""Usage tracking for billing and analytics."""
|
|
159
|
+
|
|
160
|
+
__tablename__ = "usage_records"
|
|
161
|
+
|
|
162
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
163
|
+
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
|
|
164
|
+
|
|
165
|
+
# Usage info
|
|
166
|
+
endpoint = Column(String(255), nullable=False) # /v1/compress
|
|
167
|
+
method = Column(String(10), nullable=False) # POST
|
|
168
|
+
|
|
169
|
+
# Metrics
|
|
170
|
+
tokens_input = Column(Integer, default=0)
|
|
171
|
+
tokens_output = Column(Integer, default=0)
|
|
172
|
+
qubits_used = Column(Integer, default=0)
|
|
173
|
+
execution_time_ms = Column(Float, default=0)
|
|
174
|
+
|
|
175
|
+
# Cost (for paid tiers)
|
|
176
|
+
credits_used = Column(Float, default=0)
|
|
177
|
+
|
|
178
|
+
# Timestamp
|
|
179
|
+
created_at = Column(DateTime, default=datetime.utcnow)
|
|
180
|
+
|
|
181
|
+
# Request metadata
|
|
182
|
+
ip_address = Column(String(45), nullable=True)
|
|
183
|
+
user_agent = Column(String(255), nullable=True)
|
|
184
|
+
|
|
185
|
+
# Relationships
|
|
186
|
+
user = relationship("User", back_populates="usage_records")
|
|
187
|
+
|
|
188
|
+
def __repr__(self):
|
|
189
|
+
return f"<UsageRecord {self.endpoint} {self.created_at}>"
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
# Indexes for common queries
|
|
193
|
+
from sqlalchemy import Index
|
|
194
|
+
|
|
195
|
+
Index("ix_jobs_user_created", Job.user_id, Job.created_at.desc())
|
|
196
|
+
Index("ix_jobs_status", Job.status)
|
|
197
|
+
Index("ix_usage_user_created", UsageRecord.user_id, UsageRecord.created_at.desc())
|
quantumflow/__init__.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""
|
|
2
|
+
QuantumFlow - Quantum-optimized AI agent workflow platform.
|
|
3
|
+
|
|
4
|
+
Core Features:
|
|
5
|
+
- 53% token compression via quantum superposition
|
|
6
|
+
- O(log n) memory through quantum entanglement
|
|
7
|
+
- Quantum teleportation for secure messaging
|
|
8
|
+
- BB84 QKD for unconditionally secure key exchange
|
|
9
|
+
- Multi-backend support (IBM, AWS Braket, Simulator)
|
|
10
|
+
|
|
11
|
+
Installation:
|
|
12
|
+
pip install quantumflow
|
|
13
|
+
|
|
14
|
+
Quick Start:
|
|
15
|
+
from quantumflow import QuantumCompressor
|
|
16
|
+
|
|
17
|
+
compressor = QuantumCompressor(backend="simulator")
|
|
18
|
+
result = compressor.compress([100, 200, 150, 175])
|
|
19
|
+
print(f"Compression: {result.compression_percentage}%")
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
from quantumflow.core.quantum_compressor import QuantumCompressor, CompressedResult
|
|
23
|
+
from quantumflow.core.quantum_backprop import QuantumBackprop
|
|
24
|
+
from quantumflow.core.entanglement import Entangler
|
|
25
|
+
from quantumflow.core.memory import QuantumMemory
|
|
26
|
+
from quantumflow.core.teleportation import (
|
|
27
|
+
QuantumTeleporter,
|
|
28
|
+
QKDExchange,
|
|
29
|
+
SecureMessenger,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
__version__ = "0.1.0"
|
|
33
|
+
__all__ = [
|
|
34
|
+
# Core compression
|
|
35
|
+
"QuantumCompressor",
|
|
36
|
+
"CompressedResult",
|
|
37
|
+
# Backpropagation
|
|
38
|
+
"QuantumBackprop",
|
|
39
|
+
# Entanglement
|
|
40
|
+
"Entangler",
|
|
41
|
+
# Memory
|
|
42
|
+
"QuantumMemory",
|
|
43
|
+
# Teleportation & Security
|
|
44
|
+
"QuantumTeleporter",
|
|
45
|
+
"QKDExchange",
|
|
46
|
+
"SecureMessenger",
|
|
47
|
+
]
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""
|
|
2
|
+
QuantumFlow Algorithm Library.
|
|
3
|
+
|
|
4
|
+
Categories:
|
|
5
|
+
- compression: Token compression, QFT, amplitude amplification
|
|
6
|
+
- optimization: QAOA, Grover, quantum annealing
|
|
7
|
+
- machine_learning: VQE, QSVM, QNN
|
|
8
|
+
- cryptography: QKD, QRNG
|
|
9
|
+
- utility: Error correction, circuit optimization
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from quantumflow.algorithms.compression import (
|
|
13
|
+
TokenCompression,
|
|
14
|
+
QFTCompression,
|
|
15
|
+
AmplitudeAmplification,
|
|
16
|
+
)
|
|
17
|
+
from quantumflow.algorithms.optimization import (
|
|
18
|
+
QAOA,
|
|
19
|
+
GroverSearch,
|
|
20
|
+
QuantumAnnealing,
|
|
21
|
+
)
|
|
22
|
+
from quantumflow.algorithms.machine_learning import (
|
|
23
|
+
VQE,
|
|
24
|
+
QSVM,
|
|
25
|
+
QNN,
|
|
26
|
+
)
|
|
27
|
+
from quantumflow.algorithms.cryptography import (
|
|
28
|
+
QKD,
|
|
29
|
+
QRNG,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
__all__ = [
|
|
33
|
+
# Compression
|
|
34
|
+
"TokenCompression",
|
|
35
|
+
"QFTCompression",
|
|
36
|
+
"AmplitudeAmplification",
|
|
37
|
+
# Optimization
|
|
38
|
+
"QAOA",
|
|
39
|
+
"GroverSearch",
|
|
40
|
+
"QuantumAnnealing",
|
|
41
|
+
# ML
|
|
42
|
+
"VQE",
|
|
43
|
+
"QSVM",
|
|
44
|
+
"QNN",
|
|
45
|
+
# Crypto
|
|
46
|
+
"QKD",
|
|
47
|
+
"QRNG",
|
|
48
|
+
]
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
"""Compression Algorithms."""
|
|
2
|
+
|
|
3
|
+
from quantumflow.algorithms.compression.token_compression import TokenCompression
|
|
4
|
+
from quantumflow.algorithms.compression.qft_compression import QFTCompression
|
|
5
|
+
from quantumflow.algorithms.compression.amplitude_amplification import AmplitudeAmplification
|
|
6
|
+
|
|
7
|
+
__all__ = ["TokenCompression", "QFTCompression", "AmplitudeAmplification"]
|