compair-core 0.3.14__tar.gz → 0.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of compair-core might be problematic. Click here for more details.

Files changed (45) hide show
  1. {compair_core-0.3.14 → compair_core-0.4.0}/PKG-INFO +7 -2
  2. {compair_core-0.3.14 → compair_core-0.4.0}/README.md +6 -1
  3. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/api.py +7 -0
  4. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair/__init__.py +35 -10
  5. compair_core-0.4.0/compair_core/compair/feedback.py +246 -0
  6. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair/models.py +8 -4
  7. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/local_model/app.py +25 -11
  8. compair_core-0.4.0/compair_core/server/local_model/ocr.py +44 -0
  9. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/routers/capabilities.py +4 -0
  10. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core.egg-info/PKG-INFO +7 -2
  11. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core.egg-info/SOURCES.txt +1 -0
  12. {compair_core-0.3.14 → compair_core-0.4.0}/pyproject.toml +1 -1
  13. compair_core-0.3.14/compair_core/compair/feedback.py +0 -79
  14. {compair_core-0.3.14 → compair_core-0.4.0}/LICENSE +0 -0
  15. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/__init__.py +0 -0
  16. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair/celery_app.py +0 -0
  17. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair/default_groups.py +0 -0
  18. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair/embeddings.py +0 -0
  19. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair/logger.py +0 -0
  20. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair/main.py +0 -0
  21. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair/schema.py +0 -0
  22. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair/tasks.py +0 -0
  23. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair/utils.py +0 -0
  24. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair_email/__init__.py +0 -0
  25. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair_email/email.py +0 -0
  26. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair_email/email_core.py +0 -0
  27. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair_email/templates.py +0 -0
  28. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/compair_email/templates_core.py +0 -0
  29. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/__init__.py +0 -0
  30. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/app.py +0 -0
  31. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/deps.py +0 -0
  32. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/local_model/__init__.py +0 -0
  33. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/providers/__init__.py +0 -0
  34. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/providers/console_mailer.py +0 -0
  35. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/providers/contracts.py +0 -0
  36. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/providers/local_storage.py +0 -0
  37. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/providers/noop_analytics.py +0 -0
  38. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/providers/noop_billing.py +0 -0
  39. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/providers/noop_ocr.py +0 -0
  40. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/routers/__init__.py +0 -0
  41. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core/server/settings.py +0 -0
  42. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core.egg-info/dependency_links.txt +0 -0
  43. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core.egg-info/requires.txt +0 -0
  44. {compair_core-0.3.14 → compair_core-0.4.0}/compair_core.egg-info/top_level.txt +0 -0
  45. {compair_core-0.3.14 → compair_core-0.4.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: compair-core
3
- Version: 0.3.14
3
+ Version: 0.4.0
4
4
  Summary: Open-source foundation of the Compair collaboration platform.
5
5
  Author: RocketResearch, Inc.
6
6
  License: MIT
@@ -86,7 +86,8 @@ Container definitions and build pipelines live outside this public package:
86
86
  Key environment variables for the core edition:
87
87
 
88
88
  - `COMPAIR_EDITION` (`core`) – corresponds to this core local implementation.
89
- - `COMPAIR_SQLITE_DIR` / `COMPAIR_SQLITE_NAME` override the default local SQLite path (falls back to `./compair_data` if `/data` is not writable).
89
+ - `COMPAIR_DATABASE_URL` optional explicit SQLAlchemy URL (e.g. `postgresql+psycopg2://user:pass@host/db`). When omitted, Compair falls back to a local SQLite file.
90
+ - `COMPAIR_DB_DIR` / `COMPAIR_DB_NAME` – directory and filename for the bundled SQLite database (default: `~/.compair-core/data/compair.db`). Legacy `COMPAIR_SQLITE_*` variables remain supported.
90
91
  - `COMPAIR_LOCAL_MODEL_URL` – endpoint for your local embeddings/feedback service (defaults to `http://local-model:9000`).
91
92
  - `COMPAIR_EMAIL_BACKEND` – the core mailer logs emails to stdout; cloud overrides this with transactional delivery.
92
93
  - `COMPAIR_REQUIRE_AUTHENTICATION` (`true`) – set to `false` to run the API in single-user mode without login or account management. When disabled, Compair auto-provisions a local user, group, and long-lived session token so you can upload documents immediately.
@@ -94,6 +95,10 @@ Key environment variables for the core edition:
94
95
  - `COMPAIR_INCLUDE_LEGACY_ROUTES` (`false`) – opt-in to the full legacy API surface (used by the hosted product) when running the core edition. Leave unset to expose only the streamlined single-user endpoints in Swagger.
95
96
  - `COMPAIR_EMBEDDING_DIM` – force the embedding vector size stored in the database (defaults to 384 for core, 1536 for cloud). Keep this in sync with whichever embedding model you configure.
96
97
  - `COMPAIR_VECTOR_BACKEND` (`auto`) – set to `pgvector` when running against PostgreSQL with the pgvector extension, or `json` to store embeddings as JSON (the default for SQLite deployments).
98
+ - `COMPAIR_GENERATION_PROVIDER` (`local`) – choose how feedback is produced. Options: `local` (call the bundled FastAPI service), `openai` (use ChatGPT-compatible APIs with an API key), `http` (POST the request to a custom endpoint), or `fallback` (skip generation and surface similar references only).
99
+ - `COMPAIR_OPENAI_API_KEY` / `COMPAIR_OPENAI_MODEL` – when using the OpenAI provider, supply your API key and optional model name (defaults to `gpt-4o-mini`). The fallback kicks in automatically if the key or SDK is unavailable.
100
+ - `COMPAIR_GENERATION_ENDPOINT` – HTTP endpoint invoked when `COMPAIR_GENERATION_PROVIDER=http`; the service receives a JSON payload (`document`, `references`, `length_instruction`) and should return `{"feedback": ...}`.
101
+ - `COMPAIR_OCR_ENDPOINT` – endpoint the backend calls for OCR uploads (defaults to the bundled Tesseract wrapper at `http://local-ocr:9001/ocr-file`). Provide your own service by overriding this URL.
97
102
 
98
103
  See `compair_core/server/settings.py` for the full settings surface.
99
104
 
@@ -51,7 +51,8 @@ Container definitions and build pipelines live outside this public package:
51
51
  Key environment variables for the core edition:
52
52
 
53
53
  - `COMPAIR_EDITION` (`core`) – corresponds to this core local implementation.
54
- - `COMPAIR_SQLITE_DIR` / `COMPAIR_SQLITE_NAME` override the default local SQLite path (falls back to `./compair_data` if `/data` is not writable).
54
+ - `COMPAIR_DATABASE_URL` optional explicit SQLAlchemy URL (e.g. `postgresql+psycopg2://user:pass@host/db`). When omitted, Compair falls back to a local SQLite file.
55
+ - `COMPAIR_DB_DIR` / `COMPAIR_DB_NAME` – directory and filename for the bundled SQLite database (default: `~/.compair-core/data/compair.db`). Legacy `COMPAIR_SQLITE_*` variables remain supported.
55
56
  - `COMPAIR_LOCAL_MODEL_URL` – endpoint for your local embeddings/feedback service (defaults to `http://local-model:9000`).
56
57
  - `COMPAIR_EMAIL_BACKEND` – the core mailer logs emails to stdout; cloud overrides this with transactional delivery.
57
58
  - `COMPAIR_REQUIRE_AUTHENTICATION` (`true`) – set to `false` to run the API in single-user mode without login or account management. When disabled, Compair auto-provisions a local user, group, and long-lived session token so you can upload documents immediately.
@@ -59,6 +60,10 @@ Key environment variables for the core edition:
59
60
  - `COMPAIR_INCLUDE_LEGACY_ROUTES` (`false`) – opt-in to the full legacy API surface (used by the hosted product) when running the core edition. Leave unset to expose only the streamlined single-user endpoints in Swagger.
60
61
  - `COMPAIR_EMBEDDING_DIM` – force the embedding vector size stored in the database (defaults to 384 for core, 1536 for cloud). Keep this in sync with whichever embedding model you configure.
61
62
  - `COMPAIR_VECTOR_BACKEND` (`auto`) – set to `pgvector` when running against PostgreSQL with the pgvector extension, or `json` to store embeddings as JSON (the default for SQLite deployments).
63
+ - `COMPAIR_GENERATION_PROVIDER` (`local`) – choose how feedback is produced. Options: `local` (call the bundled FastAPI service), `openai` (use ChatGPT-compatible APIs with an API key), `http` (POST the request to a custom endpoint), or `fallback` (skip generation and surface similar references only).
64
+ - `COMPAIR_OPENAI_API_KEY` / `COMPAIR_OPENAI_MODEL` – when using the OpenAI provider, supply your API key and optional model name (defaults to `gpt-4o-mini`). The fallback kicks in automatically if the key or SDK is unavailable.
65
+ - `COMPAIR_GENERATION_ENDPOINT` – HTTP endpoint invoked when `COMPAIR_GENERATION_PROVIDER=http`; the service receives a JSON payload (`document`, `references`, `length_instruction`) and should return `{"feedback": ...}`.
66
+ - `COMPAIR_OCR_ENDPOINT` – endpoint the backend calls for OCR uploads (defaults to the bundled Tesseract wrapper at `http://local-ocr:9001/ocr-file`). Provide your own service by overriding this URL.
62
67
 
63
68
  See `compair_core/server/settings.py` for the full settings surface.
64
69
 
@@ -2370,6 +2370,8 @@ def get_activity_feed(
2370
2370
  ):
2371
2371
  """Retrieve recent activities for a user's groups."""
2372
2372
  require_feature(HAS_ACTIVITY, "Activity feed")
2373
+ if not IS_CLOUD:
2374
+ raise HTTPException(status_code=501, detail="Activity feed is only available in the Compair Cloud edition.")
2373
2375
  with compair.Session() as session:
2374
2376
  # Get user's groups
2375
2377
 
@@ -3514,7 +3516,11 @@ CORE_PATHS: set[str] = {
3514
3516
  "/load_documents",
3515
3517
  "/load_document",
3516
3518
  "/load_document_by_id",
3519
+ "/load_user_files",
3517
3520
  "/create_doc",
3521
+ "/update_doc",
3522
+ "/delete_doc",
3523
+ "/delete_docs",
3518
3524
  "/process_doc",
3519
3525
  "/status/{task_id}",
3520
3526
  "/upload/ocr-file",
@@ -3523,6 +3529,7 @@ CORE_PATHS: set[str] = {
3523
3529
  "/load_references",
3524
3530
  "/load_feedback",
3525
3531
  "/documents/{document_id}/feedback",
3532
+ "/get_activity_feed",
3526
3533
  }
3527
3534
 
3528
3535
  for route in router.routes:
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import os
4
+ from pathlib import Path
4
5
  from sqlalchemy import Engine, create_engine
5
6
  from sqlalchemy.orm import sessionmaker
6
7
 
@@ -37,27 +38,51 @@ if edition == "cloud":
37
38
 
38
39
 
39
40
  def _handle_engine() -> Engine:
41
+ # Preferred configuration: explicit database URL
42
+ explicit_url = (
43
+ os.getenv("COMPAIR_DATABASE_URL")
44
+ or os.getenv("COMPAIR_DB_URL")
45
+ or os.getenv("DATABASE_URL")
46
+ )
47
+ if explicit_url:
48
+ if explicit_url.startswith("sqlite:"):
49
+ return create_engine(explicit_url, connect_args={"check_same_thread": False})
50
+ return create_engine(explicit_url)
51
+
52
+ # Backwards compatibility with legacy Postgres env variables
40
53
  db = os.getenv("DB")
41
54
  db_user = os.getenv("DB_USER")
42
55
  db_passw = os.getenv("DB_PASSW")
43
- db_url = os.getenv("DB_URL")
56
+ db_host = os.getenv("DB_URL")
44
57
 
45
- if all([db, db_user, db_passw, db_url]):
58
+ if all([db, db_user, db_passw, db_host]):
46
59
  return create_engine(
47
- f"postgresql+psycopg2://{db_user}:{db_passw}@{db_url}/{db}",
60
+ f"postgresql+psycopg2://{db_user}:{db_passw}@{db_host}/{db}",
48
61
  pool_size=10,
49
62
  max_overflow=0,
50
63
  )
51
64
 
52
- sqlite_dir = os.getenv("COMPAIR_SQLITE_DIR", "/data")
65
+ # Local default: place an SQLite database inside COMPAIR_DB_DIR
66
+ db_dir = (
67
+ os.getenv("COMPAIR_DB_DIR")
68
+ or os.getenv("COMPAIR_SQLITE_DIR")
69
+ or os.path.join(Path.home(), ".compair-core", "data")
70
+ )
71
+ db_name = os.getenv("COMPAIR_DB_NAME") or os.getenv("COMPAIR_SQLITE_NAME") or "compair.db"
72
+
73
+ db_path = Path(db_dir).expanduser()
53
74
  try:
54
- os.makedirs(sqlite_dir, exist_ok=True)
75
+ db_path.mkdir(parents=True, exist_ok=True)
55
76
  except OSError:
56
- fallback_dir = os.path.join(os.getcwd(), "compair_data")
57
- os.makedirs(fallback_dir, exist_ok=True)
58
- sqlite_dir = fallback_dir
59
- sqlite_path = os.path.join(sqlite_dir, os.getenv("COMPAIR_SQLITE_NAME", "compair.db"))
60
- return create_engine(f"sqlite:///{sqlite_path}", connect_args={"check_same_thread": False})
77
+ fallback_dir = Path(os.getcwd()) / "compair_data"
78
+ fallback_dir.mkdir(parents=True, exist_ok=True)
79
+ db_path = fallback_dir
80
+
81
+ sqlite_path = db_path / db_name
82
+ return create_engine(
83
+ f"sqlite:///{sqlite_path}",
84
+ connect_args={"check_same_thread": False},
85
+ )
61
86
 
62
87
 
63
88
  def initialize_database() -> None:
@@ -0,0 +1,246 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ from typing import Any, Iterable, List
5
+
6
+ import requests
7
+
8
+ from .logger import log_event
9
+ from .models import Document, User
10
+
11
+ try:
12
+ import openai # type: ignore
13
+ except ImportError: # pragma: no cover - optional dependency
14
+ openai = None # type: ignore
15
+
16
+ try:
17
+ from compair_cloud.feedback import Reviewer as CloudReviewer # type: ignore
18
+ from compair_cloud.feedback import get_feedback as cloud_get_feedback # type: ignore
19
+ except (ImportError, ModuleNotFoundError):
20
+ CloudReviewer = None # type: ignore
21
+ cloud_get_feedback = None # type: ignore
22
+
23
+
24
+ class Reviewer:
25
+ """Edition-aware wrapper that selects a feedback provider based on configuration."""
26
+
27
+ def __init__(self) -> None:
28
+ self.edition = os.getenv("COMPAIR_EDITION", "core").lower()
29
+ self.provider = os.getenv("COMPAIR_GENERATION_PROVIDER", "local").lower()
30
+ self.length_map = {
31
+ "Brief": "1–2 short sentences",
32
+ "Detailed": "A couple short paragraphs",
33
+ "Verbose": "As thorough as reasonably possible without repeating information",
34
+ }
35
+
36
+ self._cloud_impl = None
37
+ self._openai_client = None
38
+ self.openai_model = os.getenv("COMPAIR_OPENAI_MODEL", "gpt-5-nano")
39
+ self.custom_endpoint = os.getenv("COMPAIR_GENERATION_ENDPOINT")
40
+
41
+ if self.edition == "cloud" and CloudReviewer is not None:
42
+ self._cloud_impl = CloudReviewer()
43
+ self.provider = "cloud"
44
+ else:
45
+ if self.provider == "openai":
46
+ api_key = os.getenv("COMPAIR_OPENAI_API_KEY")
47
+ if api_key and openai is not None:
48
+ # Support both legacy (ChatCompletion) and new SDKs
49
+ if hasattr(openai, "api_key"):
50
+ openai.api_key = api_key # type: ignore[assignment]
51
+ if hasattr(openai, "OpenAI"):
52
+ try: # pragma: no cover - optional runtime dependency
53
+ self._openai_client = openai.OpenAI(api_key=api_key) # type: ignore[attr-defined]
54
+ except Exception: # pragma: no cover - if instantiation fails
55
+ self._openai_client = None
56
+ if self._openai_client is None and not hasattr(openai, "ChatCompletion"):
57
+ log_event("openai_feedback_unavailable", reason="openai_library_missing")
58
+ self.provider = "fallback"
59
+ if self.provider == "http" and not self.custom_endpoint:
60
+ log_event("custom_feedback_unavailable", reason="missing_endpoint")
61
+ self.provider = "fallback"
62
+ if self.provider == "local":
63
+ self.model = os.getenv("COMPAIR_LOCAL_GENERATION_MODEL", "local-feedback")
64
+ base_url = os.getenv("COMPAIR_LOCAL_MODEL_URL", "http://local-model:9000")
65
+ route = os.getenv("COMPAIR_LOCAL_GENERATION_ROUTE", "/generate")
66
+ self.endpoint = f"{base_url.rstrip('/')}{route}"
67
+ else:
68
+ self.model = "external"
69
+ self.endpoint = None
70
+ if self.provider not in {"local", "openai", "http", "fallback"}:
71
+ log_event("feedback_provider_unknown", provider=self.provider)
72
+ self.provider = "fallback"
73
+
74
+ @property
75
+ def is_cloud(self) -> bool:
76
+ return self._cloud_impl is not None
77
+
78
+
79
+ def _reference_snippets(references: Iterable[Any], limit: int = 3) -> List[str]:
80
+ snippets: List[str] = []
81
+ for ref in references:
82
+ snippet = getattr(ref, "content", "") or ""
83
+ snippet = snippet.replace("\n", " ").strip()
84
+ if snippet:
85
+ snippets.append(snippet[:200])
86
+ if len(snippets) == limit:
87
+ break
88
+ return snippets
89
+
90
+
91
+ def _fallback_feedback(text: str, references: list[Any]) -> str:
92
+ snippets = _reference_snippets(references)
93
+ if not snippets:
94
+ return "NONE"
95
+ joined = "; ".join(snippets)
96
+ return f"Consider aligning with these reference passages: {joined}"
97
+
98
+
99
+ def _openai_feedback(
100
+ reviewer: Reviewer,
101
+ doc: Document,
102
+ text: str,
103
+ references: list[Any],
104
+ user: User,
105
+ ) -> str | None:
106
+ if openai is None:
107
+ return None
108
+ instruction = reviewer.length_map.get(user.preferred_feedback_length, "1–2 short sentences")
109
+ ref_text = "\n\n".join(_reference_snippets(references, limit=3))
110
+ messages = [
111
+ {
112
+ "role": "system",
113
+ "content": (
114
+ "You are Compair, an assistant that delivers concise, actionable feedback on a user's document. "
115
+ "Focus on clarity, cohesion, and usefulness."
116
+ ),
117
+ },
118
+ {
119
+ "role": "user",
120
+ "content": (
121
+ f"Document:\n{text}\n\nHelpful reference excerpts:\n{ref_text or 'None provided'}\n\n"
122
+ f"Respond with {instruction} that highlights the most valuable revision to make next."
123
+ ),
124
+ },
125
+ ]
126
+
127
+ try:
128
+ if reviewer._openai_client is not None and hasattr(reviewer._openai_client, "responses"):
129
+ response = reviewer._openai_client.responses.create( # type: ignore[union-attr]
130
+ model=reviewer.openai_model,
131
+ input=messages,
132
+ max_output_tokens=256,
133
+ )
134
+ content = getattr(response, "output_text", None)
135
+ if not content and hasattr(response, "outputs"):
136
+ # Legacy compatibility: join content parts
137
+ parts = []
138
+ for item in getattr(response, "outputs", []):
139
+ parts.extend(getattr(item, "content", []))
140
+ content = " ".join(getattr(part, "text", "") for part in parts)
141
+ elif hasattr(openai, "ChatCompletion"):
142
+ chat_response = openai.ChatCompletion.create( # type: ignore[attr-defined]
143
+ model=reviewer.openai_model,
144
+ messages=messages,
145
+ temperature=0.3,
146
+ max_tokens=256,
147
+ )
148
+ content = (
149
+ chat_response["choices"][0]["message"]["content"].strip() # type: ignore[index, assignment]
150
+ )
151
+ else:
152
+ content = None
153
+ except Exception as exc: # pragma: no cover - network/API failure
154
+ log_event("openai_feedback_failed", error=str(exc))
155
+ content = None
156
+ if content:
157
+ content = content.strip()
158
+ if content:
159
+ return content
160
+ return None
161
+
162
+
163
+ def _local_feedback(
164
+ reviewer: Reviewer,
165
+ text: str,
166
+ references: list[Any],
167
+ user: User,
168
+ ) -> str | None:
169
+ payload = {
170
+ "document": text,
171
+ "references": [getattr(ref, "content", "") for ref in references],
172
+ "length_instruction": reviewer.length_map.get(
173
+ user.preferred_feedback_length,
174
+ "1–2 short sentences",
175
+ ),
176
+ }
177
+
178
+ try:
179
+ response = requests.post(reviewer.endpoint, json=payload, timeout=30)
180
+ response.raise_for_status()
181
+ data = response.json()
182
+ feedback = data.get("feedback") or data.get("text")
183
+ if feedback:
184
+ return str(feedback).strip()
185
+ except Exception as exc: # pragma: no cover - network failures stay graceful
186
+ log_event("local_feedback_failed", error=str(exc))
187
+
188
+ return None
189
+
190
+
191
+ def _http_feedback(
192
+ reviewer: Reviewer,
193
+ text: str,
194
+ references: list[Any],
195
+ user: User,
196
+ ) -> str | None:
197
+ if not reviewer.custom_endpoint:
198
+ return None
199
+ payload = {
200
+ "document": text,
201
+ "references": [getattr(ref, "content", "") for ref in references],
202
+ "length_instruction": reviewer.length_map.get(
203
+ user.preferred_feedback_length,
204
+ "1–2 short sentences",
205
+ ),
206
+ }
207
+ try:
208
+ response = requests.post(reviewer.custom_endpoint, json=payload, timeout=30)
209
+ response.raise_for_status()
210
+ data = response.json()
211
+ feedback = data.get("feedback") or data.get("text")
212
+ if isinstance(feedback, str):
213
+ feedback = feedback.strip()
214
+ if feedback:
215
+ return feedback
216
+ except Exception as exc: # pragma: no cover - network failures stay graceful
217
+ log_event("custom_feedback_failed", error=str(exc))
218
+ return None
219
+
220
+
221
+ def get_feedback(
222
+ reviewer: Reviewer,
223
+ doc: Document,
224
+ text: str,
225
+ references: list[Any],
226
+ user: User,
227
+ ) -> str:
228
+ if reviewer.is_cloud and cloud_get_feedback is not None:
229
+ return cloud_get_feedback(reviewer._cloud_impl, doc, text, references, user) # type: ignore[arg-type]
230
+
231
+ if reviewer.provider == "openai":
232
+ feedback = _openai_feedback(reviewer, doc, text, references, user)
233
+ if feedback:
234
+ return feedback
235
+
236
+ if reviewer.provider == "http":
237
+ feedback = _http_feedback(reviewer, text, references, user)
238
+ if feedback:
239
+ return feedback
240
+
241
+ if reviewer.provider == "local" and getattr(reviewer, "endpoint", None):
242
+ feedback = _local_feedback(reviewer, text, references, user)
243
+ if feedback:
244
+ return feedback
245
+
246
+ return _fallback_feedback(text, references)
@@ -76,9 +76,13 @@ def _embedding_column():
76
76
  raise RuntimeError(
77
77
  "pgvector is required when COMPAIR_VECTOR_BACKEND is set to 'pgvector'."
78
78
  )
79
- return mapped_column(Vector(EMBEDDING_DIMENSION), nullable=True)
79
+ return mapped_column(
80
+ Vector(EMBEDDING_DIMENSION),
81
+ nullable=True,
82
+ default=None,
83
+ )
80
84
  # Store embeddings as JSON arrays (works across SQLite/Postgres without pgvector)
81
- return mapped_column(JSON, nullable=True)
85
+ return mapped_column(JSON, nullable=True, default=None)
82
86
 
83
87
 
84
88
  def cosine_similarity(vec1: Sequence[float] | None, vec2: Sequence[float] | None) -> float | None:
@@ -279,10 +283,10 @@ class Document(BaseObject):
279
283
  doc_type: Mapped[str]
280
284
  datetime_created: Mapped[datetime]
281
285
  datetime_modified: Mapped[datetime]
286
+ embedding: Mapped[list[float] | None] = _embedding_column()
282
287
  file_key: Mapped[str | None] = mapped_column(String, nullable=True, default=None)
283
288
  image_key: Mapped[str | None] = mapped_column(String, nullable=True, default=None)
284
289
  is_published: Mapped[bool] = mapped_column(Boolean, default=False)
285
- embedding: Mapped[list[float] | None] = _embedding_column()
286
290
 
287
291
  user = relationship("User", back_populates="documents")
288
292
  groups = relationship("Group", secondary="document_to_group", back_populates="documents")
@@ -315,8 +319,8 @@ class Note(Base):
315
319
  author_id: Mapped[str] = mapped_column(ForeignKey("user.user_id", ondelete="CASCADE"), index=True)
316
320
  group_id: Mapped[str | None] = mapped_column(ForeignKey("group.group_id", ondelete="CASCADE"), index=True, nullable=True)
317
321
  content: Mapped[str] = mapped_column(Text)
318
- datetime_created: Mapped[datetime] = mapped_column(default=datetime.now(timezone.utc))
319
322
  embedding: Mapped[list[float] | None] = _embedding_column()
323
+ datetime_created: Mapped[datetime] = mapped_column(default=datetime.now(timezone.utc))
320
324
 
321
325
  document = relationship("Document", back_populates="notes")
322
326
  author = relationship("User", back_populates="notes")
@@ -46,13 +46,19 @@ class EmbedResponse(BaseModel):
46
46
 
47
47
 
48
48
  class GenerateRequest(BaseModel):
49
+ # Legacy format used by the CLI shim
49
50
  system: str | None = None
50
- prompt: str
51
+ prompt: str | None = None
51
52
  verbosity: str | None = None
52
53
 
54
+ # Core API payload (document + references)
55
+ document: str | None = None
56
+ references: List[str] | None = None
57
+ length_instruction: str | None = None
58
+
53
59
 
54
60
  class GenerateResponse(BaseModel):
55
- text: str
61
+ feedback: str
56
62
 
57
63
 
58
64
  @app.post("/embed", response_model=EmbedResponse)
@@ -62,12 +68,20 @@ def embed(request: EmbedRequest) -> EmbedResponse:
62
68
 
63
69
  @app.post("/generate", response_model=GenerateResponse)
64
70
  def generate(request: GenerateRequest) -> GenerateResponse:
65
- prompt = request.prompt.strip()
66
- if not prompt:
67
- return GenerateResponse(text="NONE")
68
-
69
- first_sentence = prompt.split("\n", 1)[0][:200]
70
- verbosity = request.verbosity or "default"
71
- return GenerateResponse(
72
- text=f"[local-{verbosity}] Key takeaway: {first_sentence}"
73
- )
71
+ # Determine the main text input (document or prompt)
72
+ text_input = request.document or request.prompt or ""
73
+ text_input = text_input.strip()
74
+
75
+ if not text_input:
76
+ return GenerateResponse(feedback="NONE")
77
+
78
+ first_sentence = text_input.split("\n", 1)[0][:200]
79
+ verbosity = request.length_instruction or request.verbosity or "brief response"
80
+ ref_snippet = ""
81
+ if request.references:
82
+ top_ref = (request.references[0] or "").strip()
83
+ if top_ref:
84
+ ref_snippet = f" Reference: {top_ref[:160]}"
85
+
86
+ feedback = f"[local-feedback] {verbosity}: {first_sentence}{ref_snippet}".strip()
87
+ return GenerateResponse(feedback=feedback or "NONE")
@@ -0,0 +1,44 @@
1
+ """Minimal OCR endpoint leveraging pytesseract when available."""
2
+ from __future__ import annotations
3
+
4
+ import io
5
+ import os
6
+ from typing import Any, Dict
7
+
8
+ from fastapi import FastAPI, File, HTTPException, UploadFile
9
+
10
+ app = FastAPI(title="Compair Local OCR", version="0.1.0")
11
+
12
+ try: # Optional dependency
13
+ import pytesseract # type: ignore
14
+ from PIL import Image # type: ignore
15
+ except ImportError: # pragma: no cover - optional
16
+ pytesseract = None # type: ignore
17
+ Image = None # type: ignore
18
+
19
+ _OCR_FALLBACK = os.getenv("COMPAIR_LOCAL_OCR_FALLBACK", "text") # text | none
20
+
21
+
22
+ def _extract_text(data: bytes) -> str:
23
+ if pytesseract is None or Image is None:
24
+ if _OCR_FALLBACK == "text":
25
+ try:
26
+ return data.decode("utf-8")
27
+ except UnicodeDecodeError:
28
+ return data.decode("latin-1", errors="ignore")
29
+ return ""
30
+ try:
31
+ image = Image.open(io.BytesIO(data))
32
+ return pytesseract.image_to_string(image)
33
+ except Exception:
34
+ return ""
35
+
36
+
37
+ @app.post("/ocr-file")
38
+ async def ocr_file(file: UploadFile = File(...)) -> Dict[str, Any]:
39
+ payload = await file.read()
40
+ text = _extract_text(payload)
41
+ if not text:
42
+ raise HTTPException(status_code=501, detail="OCR not available or failed to extract text.")
43
+ return {"extracted_text": text}
44
+
@@ -36,6 +36,10 @@ def capabilities(settings: Settings = Depends(get_settings)) -> dict[str, object
36
36
  "docs": None if edition == "core" else 100,
37
37
  "feedback_per_day": None if edition == "core" else 50,
38
38
  },
39
+ "features": {
40
+ "ocr_upload": settings.ocr_enabled,
41
+ "activity_feed": edition == "cloud",
42
+ },
39
43
  "server": "Compair Cloud" if edition == "cloud" else "Compair Core",
40
44
  "version": settings.version,
41
45
  "legacy_routes": settings.include_legacy_routes,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: compair-core
3
- Version: 0.3.14
3
+ Version: 0.4.0
4
4
  Summary: Open-source foundation of the Compair collaboration platform.
5
5
  Author: RocketResearch, Inc.
6
6
  License: MIT
@@ -86,7 +86,8 @@ Container definitions and build pipelines live outside this public package:
86
86
  Key environment variables for the core edition:
87
87
 
88
88
  - `COMPAIR_EDITION` (`core`) – corresponds to this core local implementation.
89
- - `COMPAIR_SQLITE_DIR` / `COMPAIR_SQLITE_NAME` override the default local SQLite path (falls back to `./compair_data` if `/data` is not writable).
89
+ - `COMPAIR_DATABASE_URL` optional explicit SQLAlchemy URL (e.g. `postgresql+psycopg2://user:pass@host/db`). When omitted, Compair falls back to a local SQLite file.
90
+ - `COMPAIR_DB_DIR` / `COMPAIR_DB_NAME` – directory and filename for the bundled SQLite database (default: `~/.compair-core/data/compair.db`). Legacy `COMPAIR_SQLITE_*` variables remain supported.
90
91
  - `COMPAIR_LOCAL_MODEL_URL` – endpoint for your local embeddings/feedback service (defaults to `http://local-model:9000`).
91
92
  - `COMPAIR_EMAIL_BACKEND` – the core mailer logs emails to stdout; cloud overrides this with transactional delivery.
92
93
  - `COMPAIR_REQUIRE_AUTHENTICATION` (`true`) – set to `false` to run the API in single-user mode without login or account management. When disabled, Compair auto-provisions a local user, group, and long-lived session token so you can upload documents immediately.
@@ -94,6 +95,10 @@ Key environment variables for the core edition:
94
95
  - `COMPAIR_INCLUDE_LEGACY_ROUTES` (`false`) – opt-in to the full legacy API surface (used by the hosted product) when running the core edition. Leave unset to expose only the streamlined single-user endpoints in Swagger.
95
96
  - `COMPAIR_EMBEDDING_DIM` – force the embedding vector size stored in the database (defaults to 384 for core, 1536 for cloud). Keep this in sync with whichever embedding model you configure.
96
97
  - `COMPAIR_VECTOR_BACKEND` (`auto`) – set to `pgvector` when running against PostgreSQL with the pgvector extension, or `json` to store embeddings as JSON (the default for SQLite deployments).
98
+ - `COMPAIR_GENERATION_PROVIDER` (`local`) – choose how feedback is produced. Options: `local` (call the bundled FastAPI service), `openai` (use ChatGPT-compatible APIs with an API key), `http` (POST the request to a custom endpoint), or `fallback` (skip generation and surface similar references only).
99
+ - `COMPAIR_OPENAI_API_KEY` / `COMPAIR_OPENAI_MODEL` – when using the OpenAI provider, supply your API key and optional model name (defaults to `gpt-4o-mini`). The fallback kicks in automatically if the key or SDK is unavailable.
100
+ - `COMPAIR_GENERATION_ENDPOINT` – HTTP endpoint invoked when `COMPAIR_GENERATION_PROVIDER=http`; the service receives a JSON payload (`document`, `references`, `length_instruction`) and should return `{"feedback": ...}`.
101
+ - `COMPAIR_OCR_ENDPOINT` – endpoint the backend calls for OCR uploads (defaults to the bundled Tesseract wrapper at `http://local-ocr:9001/ocr-file`). Provide your own service by overriding this URL.
97
102
 
98
103
  See `compair_core/server/settings.py` for the full settings surface.
99
104
 
@@ -30,6 +30,7 @@ compair_core/server/deps.py
30
30
  compair_core/server/settings.py
31
31
  compair_core/server/local_model/__init__.py
32
32
  compair_core/server/local_model/app.py
33
+ compair_core/server/local_model/ocr.py
33
34
  compair_core/server/providers/__init__.py
34
35
  compair_core/server/providers/console_mailer.py
35
36
  compair_core/server/providers/contracts.py
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "compair-core"
7
- version = "0.3.14"
7
+ version = "0.4.0"
8
8
  description = "Open-source foundation of the Compair collaboration platform."
9
9
  readme = "README.md"
10
10
  license = { text = "MIT" }
@@ -1,79 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import os
4
- import requests
5
- from typing import Any
6
-
7
- from .logger import log_event
8
- from .models import Document, User
9
-
10
- try:
11
- from compair_cloud.feedback import Reviewer as CloudReviewer # type: ignore
12
- from compair_cloud.feedback import get_feedback as cloud_get_feedback # type: ignore
13
- except (ImportError, ModuleNotFoundError):
14
- CloudReviewer = None # type: ignore
15
- cloud_get_feedback = None # type: ignore
16
-
17
-
18
- class Reviewer:
19
- """Edition-aware wrapper that falls back to the local feedback endpoint."""
20
-
21
- def __init__(self) -> None:
22
- self.edition = os.getenv("COMPAIR_EDITION", "core").lower()
23
- self._cloud_impl = None
24
- if self.edition == "cloud" and CloudReviewer is not None:
25
- self._cloud_impl = CloudReviewer()
26
- else:
27
- self.client = None
28
- self.model = os.getenv("COMPAIR_LOCAL_GENERATION_MODEL", "local-feedback")
29
- base_url = os.getenv("COMPAIR_LOCAL_MODEL_URL", "http://local-model:9000")
30
- route = os.getenv("COMPAIR_LOCAL_GENERATION_ROUTE", "/generate")
31
- self.endpoint = f"{base_url.rstrip('/')}{route}"
32
-
33
- @property
34
- def is_cloud(self) -> bool:
35
- return self._cloud_impl is not None
36
-
37
-
38
- def _fallback_feedback(text: str, references: list[Any]) -> str:
39
- if not references:
40
- return "NONE"
41
- top_ref = references[0]
42
- snippet = getattr(top_ref, "content", "") or ""
43
- snippet = snippet.replace("\n", " ").strip()[:200]
44
- if not snippet:
45
- return "NONE"
46
- return f"Check alignment with this reference: {snippet}"
47
-
48
-
49
- def get_feedback(
50
- reviewer: Reviewer,
51
- doc: Document,
52
- text: str,
53
- references: list[Any],
54
- user: User,
55
- ) -> str:
56
- if reviewer.is_cloud and cloud_get_feedback is not None:
57
- return cloud_get_feedback(reviewer._cloud_impl, doc, text, references, user) # type: ignore[arg-type]
58
-
59
- payload = {
60
- "document": text,
61
- "references": [getattr(ref, "content", "") for ref in references],
62
- "length_instruction": {
63
- "Brief": "1–2 short sentences",
64
- "Detailed": "A couple short paragraphs",
65
- "Verbose": "As thorough as reasonably possible without repeating information",
66
- }.get(user.preferred_feedback_length, "1–2 short sentences"),
67
- }
68
-
69
- try:
70
- response = requests.post(reviewer.endpoint, json=payload, timeout=30)
71
- response.raise_for_status()
72
- data = response.json()
73
- feedback = data.get("feedback")
74
- if feedback:
75
- return feedback
76
- except Exception as exc: # pragma: no cover - network failures stay graceful
77
- log_event("local_feedback_failed", error=str(exc))
78
-
79
- return _fallback_feedback(text, references)
File without changes
File without changes