quantumflow-sdk 0.2.1__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- api/main.py +34 -3
- api/models.py +41 -0
- api/routes/algorithm_routes.py +1029 -0
- api/routes/chat_routes.py +565 -0
- api/routes/pipeline_routes.py +578 -0
- db/models.py +357 -0
- quantumflow/algorithms/machine_learning/__init__.py +14 -2
- quantumflow/algorithms/machine_learning/vqe.py +355 -3
- quantumflow/core/__init__.py +10 -1
- quantumflow/core/quantum_compressor.py +379 -1
- quantumflow/integrations/domain_agents.py +617 -0
- quantumflow/pipeline/__init__.py +29 -0
- quantumflow/pipeline/anomaly_detector.py +521 -0
- quantumflow/pipeline/base_pipeline.py +602 -0
- quantumflow/pipeline/checkpoint_manager.py +587 -0
- quantumflow/pipeline/finance/__init__.py +5 -0
- quantumflow/pipeline/finance/portfolio_optimization.py +595 -0
- quantumflow/pipeline/healthcare/__init__.py +5 -0
- quantumflow/pipeline/healthcare/protein_folding.py +994 -0
- quantumflow/pipeline/temporal_memory.py +577 -0
- {quantumflow_sdk-0.2.1.dist-info → quantumflow_sdk-0.4.0.dist-info}/METADATA +3 -3
- {quantumflow_sdk-0.2.1.dist-info → quantumflow_sdk-0.4.0.dist-info}/RECORD +25 -12
- {quantumflow_sdk-0.2.1.dist-info → quantumflow_sdk-0.4.0.dist-info}/WHEEL +0 -0
- {quantumflow_sdk-0.2.1.dist-info → quantumflow_sdk-0.4.0.dist-info}/entry_points.txt +0 -0
- {quantumflow_sdk-0.2.1.dist-info → quantumflow_sdk-0.4.0.dist-info}/top_level.txt +0 -0
db/models.py
CHANGED
|
@@ -31,6 +31,44 @@ class JobStatus(str, enum.Enum):
|
|
|
31
31
|
CANCELLED = "cancelled"
|
|
32
32
|
|
|
33
33
|
|
|
34
|
+
class PipelineStatus(str, enum.Enum):
|
|
35
|
+
"""Pipeline execution status."""
|
|
36
|
+
CREATED = "created"
|
|
37
|
+
RUNNING = "running"
|
|
38
|
+
PAUSED = "paused"
|
|
39
|
+
COMPLETED = "completed"
|
|
40
|
+
FAILED = "failed"
|
|
41
|
+
ROLLED_BACK = "rolled_back"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class PipelineType(str, enum.Enum):
|
|
45
|
+
"""Type of data pipeline."""
|
|
46
|
+
PROTEIN_FOLDING = "protein_folding"
|
|
47
|
+
PORTFOLIO_OPTIMIZATION = "portfolio_optimization"
|
|
48
|
+
CUSTOM = "custom"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class AnomalyType(str, enum.Enum):
|
|
52
|
+
"""Types of detected anomalies."""
|
|
53
|
+
GRADIENT_EXPLOSION = "gradient_explosion"
|
|
54
|
+
GRADIENT_VANISHING = "gradient_vanishing"
|
|
55
|
+
NAN_DETECTED = "nan_detected"
|
|
56
|
+
INF_DETECTED = "inf_detected"
|
|
57
|
+
ENERGY_SPIKE = "energy_spike"
|
|
58
|
+
RMSD_DIVERGENCE = "rmsd_divergence"
|
|
59
|
+
VAR_BREACH = "var_breach"
|
|
60
|
+
DRAWDOWN_BREACH = "drawdown_breach"
|
|
61
|
+
STATISTICAL_DEVIATION = "statistical_deviation"
|
|
62
|
+
CUSTOM = "custom"
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class AnomalySeverity(str, enum.Enum):
|
|
66
|
+
"""Severity level of anomaly."""
|
|
67
|
+
INFO = "info"
|
|
68
|
+
WARNING = "warning"
|
|
69
|
+
CRITICAL = "critical"
|
|
70
|
+
|
|
71
|
+
|
|
34
72
|
class JobType(str, enum.Enum):
|
|
35
73
|
"""Type of quantum job."""
|
|
36
74
|
COMPRESS = "compress"
|
|
@@ -79,6 +117,7 @@ class User(Base):
|
|
|
79
117
|
api_keys = relationship("APIKey", back_populates="user", cascade="all, delete-orphan")
|
|
80
118
|
jobs = relationship("Job", back_populates="user", cascade="all, delete-orphan")
|
|
81
119
|
usage_records = relationship("UsageRecord", back_populates="user", cascade="all, delete-orphan")
|
|
120
|
+
pipelines = relationship("Pipeline", back_populates="user", cascade="all, delete-orphan")
|
|
82
121
|
|
|
83
122
|
def __repr__(self):
|
|
84
123
|
return f"<User {self.email}>"
|
|
@@ -189,9 +228,327 @@ class UsageRecord(Base):
|
|
|
189
228
|
return f"<UsageRecord {self.endpoint} {self.created_at}>"
|
|
190
229
|
|
|
191
230
|
|
|
231
|
+
class Pipeline(Base):
|
|
232
|
+
"""Long-running data pipeline with checkpointing support."""
|
|
233
|
+
|
|
234
|
+
__tablename__ = "pipelines"
|
|
235
|
+
|
|
236
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
237
|
+
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
|
|
238
|
+
|
|
239
|
+
# Pipeline info
|
|
240
|
+
name = Column(String(255), nullable=False)
|
|
241
|
+
pipeline_type = Column(SQLEnum(PipelineType), nullable=False)
|
|
242
|
+
status = Column(SQLEnum(PipelineStatus), default=PipelineStatus.CREATED)
|
|
243
|
+
description = Column(Text, nullable=True)
|
|
244
|
+
|
|
245
|
+
# Configuration
|
|
246
|
+
config = Column(JSON, default=dict) # Pipeline-specific config
|
|
247
|
+
backend = Column(String(50), default="simulator")
|
|
248
|
+
|
|
249
|
+
# Checkpointing settings
|
|
250
|
+
checkpoint_interval_steps = Column(Integer, default=10)
|
|
251
|
+
checkpoint_interval_seconds = Column(Integer, default=300) # 5 minutes
|
|
252
|
+
max_checkpoints = Column(Integer, default=5) # Keep N most recent
|
|
253
|
+
|
|
254
|
+
# Execution state
|
|
255
|
+
current_step = Column(Integer, default=0)
|
|
256
|
+
total_steps = Column(Integer, nullable=True)
|
|
257
|
+
current_checkpoint_id = Column(UUID(as_uuid=True), nullable=True)
|
|
258
|
+
|
|
259
|
+
# Metrics
|
|
260
|
+
total_execution_time_ms = Column(Float, default=0)
|
|
261
|
+
rollback_count = Column(Integer, default=0)
|
|
262
|
+
anomaly_count = Column(Integer, default=0)
|
|
263
|
+
|
|
264
|
+
# Timestamps
|
|
265
|
+
created_at = Column(DateTime, default=datetime.utcnow)
|
|
266
|
+
started_at = Column(DateTime, nullable=True)
|
|
267
|
+
completed_at = Column(DateTime, nullable=True)
|
|
268
|
+
last_checkpoint_at = Column(DateTime, nullable=True)
|
|
269
|
+
|
|
270
|
+
# Relationships
|
|
271
|
+
user = relationship("User", back_populates="pipelines")
|
|
272
|
+
checkpoints = relationship("Checkpoint", back_populates="pipeline", cascade="all, delete-orphan")
|
|
273
|
+
anomaly_events = relationship("AnomalyEvent", back_populates="pipeline", cascade="all, delete-orphan")
|
|
274
|
+
memory_states = relationship("TemporalMemoryState", back_populates="pipeline", cascade="all, delete-orphan")
|
|
275
|
+
|
|
276
|
+
def __repr__(self):
|
|
277
|
+
return f"<Pipeline {self.name} {self.pipeline_type} {self.status}>"
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
class Checkpoint(Base):
|
|
281
|
+
"""State persistence checkpoint for pipeline recovery."""
|
|
282
|
+
|
|
283
|
+
__tablename__ = "checkpoints"
|
|
284
|
+
|
|
285
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
286
|
+
pipeline_id = Column(UUID(as_uuid=True), ForeignKey("pipelines.id"), nullable=False)
|
|
287
|
+
|
|
288
|
+
# Checkpoint info
|
|
289
|
+
step_number = Column(Integer, nullable=False)
|
|
290
|
+
checkpoint_name = Column(String(255), nullable=True)
|
|
291
|
+
|
|
292
|
+
# State data
|
|
293
|
+
state_data = Column(JSON, nullable=False) # Serialized algorithm state
|
|
294
|
+
state_hash = Column(String(64), nullable=False) # SHA-256 for integrity verification
|
|
295
|
+
|
|
296
|
+
# Quantum state (optional, for quantum compression)
|
|
297
|
+
compressed_state = Column(JSON, nullable=True) # Quantum-compressed state vector
|
|
298
|
+
compression_ratio = Column(Float, nullable=True)
|
|
299
|
+
|
|
300
|
+
# Metrics at checkpoint
|
|
301
|
+
metrics = Column(JSON, default=dict) # Energy, loss, gradients, etc.
|
|
302
|
+
|
|
303
|
+
# Validity
|
|
304
|
+
is_valid = Column(Boolean, default=True)
|
|
305
|
+
validation_error = Column(Text, nullable=True)
|
|
306
|
+
|
|
307
|
+
# Timestamps
|
|
308
|
+
created_at = Column(DateTime, default=datetime.utcnow)
|
|
309
|
+
|
|
310
|
+
# Size tracking
|
|
311
|
+
state_size_bytes = Column(Integer, nullable=True)
|
|
312
|
+
|
|
313
|
+
# Relationships
|
|
314
|
+
pipeline = relationship("Pipeline", back_populates="checkpoints")
|
|
315
|
+
|
|
316
|
+
def __repr__(self):
|
|
317
|
+
return f"<Checkpoint {self.id} step={self.step_number}>"
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
class AnomalyEvent(Base):
|
|
321
|
+
"""Detected divergence or anomaly event during pipeline execution."""
|
|
322
|
+
|
|
323
|
+
__tablename__ = "anomaly_events"
|
|
324
|
+
|
|
325
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
326
|
+
pipeline_id = Column(UUID(as_uuid=True), ForeignKey("pipelines.id"), nullable=False)
|
|
327
|
+
|
|
328
|
+
# Anomaly info
|
|
329
|
+
anomaly_type = Column(SQLEnum(AnomalyType), nullable=False)
|
|
330
|
+
severity = Column(SQLEnum(AnomalySeverity), default=AnomalySeverity.WARNING)
|
|
331
|
+
message = Column(Text, nullable=False)
|
|
332
|
+
|
|
333
|
+
# Context
|
|
334
|
+
step_number = Column(Integer, nullable=False)
|
|
335
|
+
detector_name = Column(String(255), nullable=True) # Which detector caught it
|
|
336
|
+
|
|
337
|
+
# Data at anomaly
|
|
338
|
+
anomaly_data = Column(JSON, default=dict) # Values that triggered detection
|
|
339
|
+
threshold = Column(Float, nullable=True) # Threshold that was breached
|
|
340
|
+
actual_value = Column(Float, nullable=True) # Actual value observed
|
|
341
|
+
|
|
342
|
+
# Resolution
|
|
343
|
+
was_auto_resolved = Column(Boolean, default=False)
|
|
344
|
+
resolution_action = Column(String(50), nullable=True) # rollback, skip, retry
|
|
345
|
+
resolved_checkpoint_id = Column(UUID(as_uuid=True), nullable=True)
|
|
346
|
+
|
|
347
|
+
# Timestamps
|
|
348
|
+
created_at = Column(DateTime, default=datetime.utcnow)
|
|
349
|
+
resolved_at = Column(DateTime, nullable=True)
|
|
350
|
+
|
|
351
|
+
# Relationships
|
|
352
|
+
pipeline = relationship("Pipeline", back_populates="anomaly_events")
|
|
353
|
+
|
|
354
|
+
def __repr__(self):
|
|
355
|
+
return f"<AnomalyEvent {self.anomaly_type} {self.severity}>"
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
class TemporalMemoryState(Base):
|
|
359
|
+
"""LSTM-like sequential state storage for pattern matching."""
|
|
360
|
+
|
|
361
|
+
__tablename__ = "temporal_memory_states"
|
|
362
|
+
|
|
363
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
364
|
+
pipeline_id = Column(UUID(as_uuid=True), ForeignKey("pipelines.id"), nullable=False)
|
|
365
|
+
|
|
366
|
+
# Sequence info
|
|
367
|
+
sequence_number = Column(Integer, nullable=False) # Order in the sequence
|
|
368
|
+
run_id = Column(UUID(as_uuid=True), nullable=False) # Groups states by run
|
|
369
|
+
|
|
370
|
+
# State data
|
|
371
|
+
state_vector = Column(JSON, nullable=False) # The actual state embedding
|
|
372
|
+
state_dimension = Column(Integer, nullable=False) # Vector dimension
|
|
373
|
+
|
|
374
|
+
# Quantum compression (optional)
|
|
375
|
+
is_compressed = Column(Boolean, default=False)
|
|
376
|
+
compressed_vector = Column(JSON, nullable=True)
|
|
377
|
+
compression_n_qubits = Column(Integer, nullable=True)
|
|
378
|
+
|
|
379
|
+
# Additional context
|
|
380
|
+
extra_data = Column(JSON, default=dict) # Additional context (labels, tags, etc.)
|
|
381
|
+
|
|
382
|
+
# Pattern matching helpers
|
|
383
|
+
state_norm = Column(Float, nullable=True) # For fast similarity lookup
|
|
384
|
+
cluster_id = Column(Integer, nullable=True) # Optional clustering
|
|
385
|
+
|
|
386
|
+
# Timestamps
|
|
387
|
+
created_at = Column(DateTime, default=datetime.utcnow)
|
|
388
|
+
|
|
389
|
+
# Relationships
|
|
390
|
+
pipeline = relationship("Pipeline", back_populates="memory_states")
|
|
391
|
+
|
|
392
|
+
def __repr__(self):
|
|
393
|
+
return f"<TemporalMemoryState seq={self.sequence_number} run={self.run_id}>"
|
|
394
|
+
|
|
395
|
+
|
|
192
396
|
# Indexes for common queries
|
|
193
397
|
from sqlalchemy import Index
|
|
194
398
|
|
|
195
399
|
Index("ix_jobs_user_created", Job.user_id, Job.created_at.desc())
|
|
196
400
|
Index("ix_jobs_status", Job.status)
|
|
197
401
|
Index("ix_usage_user_created", UsageRecord.user_id, UsageRecord.created_at.desc())
|
|
402
|
+
|
|
403
|
+
# Pipeline indexes
|
|
404
|
+
Index("ix_pipelines_user_status", Pipeline.user_id, Pipeline.status)
|
|
405
|
+
Index("ix_pipelines_type", Pipeline.pipeline_type)
|
|
406
|
+
Index("ix_checkpoints_pipeline_step", Checkpoint.pipeline_id, Checkpoint.step_number.desc())
|
|
407
|
+
Index("ix_anomalies_pipeline_created", AnomalyEvent.pipeline_id, AnomalyEvent.created_at.desc())
|
|
408
|
+
Index("ix_anomalies_type_severity", AnomalyEvent.anomaly_type, AnomalyEvent.severity)
|
|
409
|
+
Index("ix_memory_pipeline_run_seq", TemporalMemoryState.pipeline_id, TemporalMemoryState.run_id, TemporalMemoryState.sequence_number)
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
# ==================== QChat Messaging Models ====================
|
|
413
|
+
|
|
414
|
+
class MessageStatus(str, enum.Enum):
|
|
415
|
+
"""Message delivery status."""
|
|
416
|
+
PENDING = "pending"
|
|
417
|
+
SENT = "sent"
|
|
418
|
+
DELIVERED = "delivered"
|
|
419
|
+
READ = "read"
|
|
420
|
+
FAILED = "failed"
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
class ChannelStatus(str, enum.Enum):
|
|
424
|
+
"""Quantum channel status."""
|
|
425
|
+
PENDING = "pending"
|
|
426
|
+
ESTABLISHING = "establishing"
|
|
427
|
+
READY = "ready"
|
|
428
|
+
COMPROMISED = "compromised"
|
|
429
|
+
EXPIRED = "expired"
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
class ChatUser(Base):
|
|
433
|
+
"""QChat user linked by phone number."""
|
|
434
|
+
|
|
435
|
+
__tablename__ = "chat_users"
|
|
436
|
+
|
|
437
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
438
|
+
phone_number = Column(String(20), unique=True, nullable=False, index=True)
|
|
439
|
+
display_name = Column(String(100), nullable=True)
|
|
440
|
+
|
|
441
|
+
# Firebase UID for auth verification
|
|
442
|
+
firebase_uid = Column(String(128), unique=True, nullable=True)
|
|
443
|
+
|
|
444
|
+
# Public key for quantum key exchange
|
|
445
|
+
public_key = Column(Text, nullable=True)
|
|
446
|
+
|
|
447
|
+
# Device info for push notifications
|
|
448
|
+
device_token = Column(String(255), nullable=True)
|
|
449
|
+
platform = Column(String(20), nullable=True) # ios, android
|
|
450
|
+
|
|
451
|
+
# Status
|
|
452
|
+
is_online = Column(Boolean, default=False)
|
|
453
|
+
last_seen = Column(DateTime, nullable=True)
|
|
454
|
+
|
|
455
|
+
# Timestamps
|
|
456
|
+
created_at = Column(DateTime, default=datetime.utcnow)
|
|
457
|
+
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
|
458
|
+
|
|
459
|
+
def __repr__(self):
|
|
460
|
+
return f"<ChatUser {self.phone_number}>"
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
class QuantumChannel(Base):
|
|
464
|
+
"""Quantum-secure channel between two users."""
|
|
465
|
+
|
|
466
|
+
__tablename__ = "quantum_channels"
|
|
467
|
+
|
|
468
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
469
|
+
|
|
470
|
+
# Participants
|
|
471
|
+
user_a_id = Column(UUID(as_uuid=True), ForeignKey("chat_users.id"), nullable=False)
|
|
472
|
+
user_b_id = Column(UUID(as_uuid=True), ForeignKey("chat_users.id"), nullable=False)
|
|
473
|
+
|
|
474
|
+
# Channel status
|
|
475
|
+
status = Column(SQLEnum(ChannelStatus), default=ChannelStatus.PENDING)
|
|
476
|
+
|
|
477
|
+
# Quantum key distribution
|
|
478
|
+
qkd_key_id = Column(String(64), nullable=True) # Reference to QKD session
|
|
479
|
+
bell_pairs_remaining = Column(Integer, default=1000)
|
|
480
|
+
key_generated_at = Column(DateTime, nullable=True)
|
|
481
|
+
key_expires_at = Column(DateTime, nullable=True)
|
|
482
|
+
|
|
483
|
+
# Security metrics
|
|
484
|
+
eavesdrop_detected_count = Column(Integer, default=0)
|
|
485
|
+
last_eavesdrop_at = Column(DateTime, nullable=True)
|
|
486
|
+
error_rate = Column(Float, default=0.0) # QBER - Quantum Bit Error Rate
|
|
487
|
+
|
|
488
|
+
# Timestamps
|
|
489
|
+
created_at = Column(DateTime, default=datetime.utcnow)
|
|
490
|
+
established_at = Column(DateTime, nullable=True)
|
|
491
|
+
last_message_at = Column(DateTime, nullable=True)
|
|
492
|
+
|
|
493
|
+
# Relationships
|
|
494
|
+
user_a = relationship("ChatUser", foreign_keys=[user_a_id])
|
|
495
|
+
user_b = relationship("ChatUser", foreign_keys=[user_b_id])
|
|
496
|
+
messages = relationship("ChatMessage", back_populates="channel", cascade="all, delete-orphan")
|
|
497
|
+
|
|
498
|
+
def __repr__(self):
|
|
499
|
+
return f"<QuantumChannel {self.id} {self.status}>"
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
class ChatMessage(Base):
|
|
503
|
+
"""Encrypted message in a quantum channel."""
|
|
504
|
+
|
|
505
|
+
__tablename__ = "chat_messages"
|
|
506
|
+
|
|
507
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
508
|
+
channel_id = Column(UUID(as_uuid=True), ForeignKey("quantum_channels.id"), nullable=False)
|
|
509
|
+
sender_id = Column(UUID(as_uuid=True), ForeignKey("chat_users.id"), nullable=False)
|
|
510
|
+
|
|
511
|
+
# Encrypted content (backend never sees plaintext)
|
|
512
|
+
encrypted_content = Column(Text, nullable=False)
|
|
513
|
+
content_hash = Column(String(64), nullable=False) # SHA-256 for integrity
|
|
514
|
+
|
|
515
|
+
# Encryption metadata
|
|
516
|
+
encryption_method = Column(String(50), default="qkd_aes256")
|
|
517
|
+
compression_ratio = Column(Float, nullable=True)
|
|
518
|
+
|
|
519
|
+
# Quantum teleportation metadata
|
|
520
|
+
bell_pair_id = Column(String(64), nullable=True)
|
|
521
|
+
teleport_fidelity = Column(Float, nullable=True)
|
|
522
|
+
|
|
523
|
+
# Security
|
|
524
|
+
eavesdrop_detected = Column(Boolean, default=False)
|
|
525
|
+
|
|
526
|
+
# Status
|
|
527
|
+
status = Column(SQLEnum(MessageStatus), default=MessageStatus.PENDING)
|
|
528
|
+
|
|
529
|
+
# Timestamps
|
|
530
|
+
created_at = Column(DateTime, default=datetime.utcnow)
|
|
531
|
+
sent_at = Column(DateTime, nullable=True)
|
|
532
|
+
delivered_at = Column(DateTime, nullable=True)
|
|
533
|
+
read_at = Column(DateTime, nullable=True)
|
|
534
|
+
|
|
535
|
+
# Auto-delete after delivery (ephemeral mode)
|
|
536
|
+
auto_delete = Column(Boolean, default=True)
|
|
537
|
+
delete_after_seconds = Column(Integer, default=86400) # 24 hours
|
|
538
|
+
|
|
539
|
+
# Relationships
|
|
540
|
+
channel = relationship("QuantumChannel", back_populates="messages")
|
|
541
|
+
sender = relationship("ChatUser", foreign_keys=[sender_id])
|
|
542
|
+
|
|
543
|
+
def __repr__(self):
|
|
544
|
+
return f"<ChatMessage {self.id} {self.status}>"
|
|
545
|
+
|
|
546
|
+
|
|
547
|
+
# QChat indexes
|
|
548
|
+
Index("ix_chat_users_phone", ChatUser.phone_number)
|
|
549
|
+
Index("ix_chat_users_firebase", ChatUser.firebase_uid)
|
|
550
|
+
Index("ix_channels_users", QuantumChannel.user_a_id, QuantumChannel.user_b_id)
|
|
551
|
+
Index("ix_channels_status", QuantumChannel.status)
|
|
552
|
+
Index("ix_messages_channel_created", ChatMessage.channel_id, ChatMessage.created_at.desc())
|
|
553
|
+
Index("ix_messages_sender", ChatMessage.sender_id)
|
|
554
|
+
Index("ix_messages_status", ChatMessage.status)
|
|
@@ -1,7 +1,19 @@
|
|
|
1
1
|
"""Machine Learning Algorithms."""
|
|
2
2
|
|
|
3
|
-
from quantumflow.algorithms.machine_learning.vqe import
|
|
3
|
+
from quantumflow.algorithms.machine_learning.vqe import (
|
|
4
|
+
VQE,
|
|
5
|
+
VQEResult,
|
|
6
|
+
run_ses_vqe,
|
|
7
|
+
calculate_volumetric_cost,
|
|
8
|
+
)
|
|
4
9
|
from quantumflow.algorithms.machine_learning.qsvm import QSVM
|
|
5
10
|
from quantumflow.algorithms.machine_learning.qnn import QNN
|
|
6
11
|
|
|
7
|
-
__all__ = [
|
|
12
|
+
__all__ = [
|
|
13
|
+
"VQE",
|
|
14
|
+
"VQEResult",
|
|
15
|
+
"run_ses_vqe",
|
|
16
|
+
"calculate_volumetric_cost",
|
|
17
|
+
"QSVM",
|
|
18
|
+
"QNN",
|
|
19
|
+
]
|