langgraph-runtime-inmem 0.15.0__py3-none-any.whl → 0.17.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,7 +9,7 @@ from langgraph_runtime_inmem import (
9
9
  store,
10
10
  )
11
11
 
12
- __version__ = "0.15.0"
12
+ __version__ = "0.17.0"
13
13
  __all__ = [
14
14
  "ops",
15
15
  "database",
@@ -142,7 +142,9 @@ class InMemConnectionProto:
142
142
 
143
143
 
144
144
  @asynccontextmanager
145
- async def connect(*, __test__: bool = False) -> AsyncIterator["AsyncConnectionProto"]:
145
+ async def connect(
146
+ *, supports_core_api: bool = False, __test__: bool = False
147
+ ) -> AsyncIterator["AsyncConnectionProto"]:
146
148
  yield InMemConnectionProto()
147
149
 
148
150
 
@@ -1947,38 +1947,37 @@ class Runs(Authenticated):
1947
1947
  if not pending_runs and not running_runs:
1948
1948
  return {
1949
1949
  "n_pending": 0,
1950
- "max_age_secs": None,
1951
- "med_age_secs": None,
1950
+ "pending_runs_wait_time_max_secs": None,
1951
+ "pending_runs_wait_time_med_secs": None,
1952
1952
  "n_running": 0,
1953
1953
  }
1954
1954
 
1955
- # Get all creation timestamps
1956
- created_times = [run.get("created_at") for run in (pending_runs + running_runs)]
1957
- created_times = [
1958
- t for t in created_times if t is not None
1959
- ] # Filter out None values
1960
-
1961
- if not created_times:
1962
- return {
1963
- "n_pending": len(pending_runs),
1964
- "n_running": len(running_runs),
1965
- "max_age_secs": None,
1966
- "med_age_secs": None,
1967
- }
1968
-
1969
- # Find oldest (max age)
1970
- oldest_time = min(created_times) # Earliest timestamp = oldest run
1971
-
1972
- # Find median age
1973
- sorted_times = sorted(created_times)
1974
- median_idx = len(sorted_times) // 2
1975
- median_time = sorted_times[median_idx]
1955
+ now = datetime.now(UTC)
1956
+ pending_waits: list[float] = []
1957
+ for run in pending_runs:
1958
+ created_at = run.get("created_at")
1959
+ if not isinstance(created_at, datetime):
1960
+ continue
1961
+ if created_at.tzinfo is None:
1962
+ created_at = created_at.replace(tzinfo=UTC)
1963
+ pending_waits.append((now - created_at).total_seconds())
1964
+
1965
+ max_pending_wait = max(pending_waits) if pending_waits else None
1966
+ if pending_waits:
1967
+ sorted_waits = sorted(pending_waits)
1968
+ half = len(sorted_waits) // 2
1969
+ if len(sorted_waits) % 2 == 1:
1970
+ med_pending_wait = sorted_waits[half]
1971
+ else:
1972
+ med_pending_wait = (sorted_waits[half - 1] + sorted_waits[half]) / 2
1973
+ else:
1974
+ med_pending_wait = None
1976
1975
 
1977
1976
  return {
1978
1977
  "n_pending": len(pending_runs),
1979
1978
  "n_running": len(running_runs),
1980
- "max_age_secs": oldest_time,
1981
- "med_age_secs": median_time,
1979
+ "pending_runs_wait_time_max_secs": max_pending_wait,
1980
+ "pending_runs_wait_time_med_secs": med_pending_wait,
1982
1981
  }
1983
1982
 
1984
1983
  @staticmethod
@@ -2120,7 +2119,6 @@ class Runs(Authenticated):
2120
2119
  ctx: Auth.types.BaseAuthContext | None = None,
2121
2120
  ) -> AsyncIterator[Run]:
2122
2121
  """Create a run."""
2123
- from langgraph_api.config import FF_RICH_THREADS
2124
2122
  from langgraph_api.schema import Run, Thread
2125
2123
 
2126
2124
  assistant_id = _ensure_uuid(assistant_id)
@@ -2167,49 +2165,35 @@ class Runs(Authenticated):
2167
2165
  # Create new thread
2168
2166
  if thread_id is None:
2169
2167
  thread_id = uuid4()
2170
- if FF_RICH_THREADS:
2171
- thread = Thread(
2172
- thread_id=thread_id,
2173
- status="busy",
2174
- metadata={
2175
- "graph_id": assistant["graph_id"],
2176
- "assistant_id": str(assistant_id),
2177
- **(config.get("metadata") or {}),
2178
- **metadata,
2179
- },
2180
- config=Runs._merge_jsonb(
2181
- assistant["config"],
2182
- config,
2183
- {
2184
- "configurable": Runs._merge_jsonb(
2185
- Runs._get_configurable(assistant["config"]),
2186
- )
2187
- },
2188
- ),
2189
- created_at=datetime.now(UTC),
2190
- updated_at=datetime.now(UTC),
2191
- values=b"",
2192
- )
2193
- else:
2194
- thread = Thread(
2195
- thread_id=thread_id,
2196
- status="idle",
2197
- metadata={
2198
- "graph_id": assistant["graph_id"],
2199
- "assistant_id": str(assistant_id),
2200
- **(config.get("metadata") or {}),
2201
- **metadata,
2168
+
2169
+ thread = Thread(
2170
+ thread_id=thread_id,
2171
+ status="busy",
2172
+ metadata={
2173
+ "graph_id": assistant["graph_id"],
2174
+ "assistant_id": str(assistant_id),
2175
+ **(config.get("metadata") or {}),
2176
+ **metadata,
2177
+ },
2178
+ config=Runs._merge_jsonb(
2179
+ assistant["config"],
2180
+ config,
2181
+ {
2182
+ "configurable": Runs._merge_jsonb(
2183
+ Runs._get_configurable(assistant["config"]),
2184
+ )
2202
2185
  },
2203
- config={},
2204
- created_at=datetime.now(UTC),
2205
- updated_at=datetime.now(UTC),
2206
- values=b"",
2207
- )
2186
+ ),
2187
+ created_at=datetime.now(UTC),
2188
+ updated_at=datetime.now(UTC),
2189
+ values=b"",
2190
+ )
2191
+
2208
2192
  await logger.ainfo("Creating thread", thread_id=thread_id)
2209
2193
  conn.store["threads"].append(thread)
2210
2194
  elif existing_thread:
2211
2195
  # Update existing thread
2212
- if FF_RICH_THREADS and existing_thread["status"] != "busy":
2196
+ if existing_thread["status"] != "busy":
2213
2197
  existing_thread["status"] = "busy"
2214
2198
  existing_thread["metadata"] = Runs._merge_jsonb(
2215
2199
  existing_thread["metadata"],
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langgraph-runtime-inmem
3
- Version: 0.15.0
3
+ Version: 0.17.0
4
4
  Summary: Inmem implementation for the LangGraph API server.
5
5
  Author-email: Will Fu-Hinthorn <will@langchain.dev>
6
6
  License: Elastic-2.0
@@ -1,13 +1,13 @@
1
- langgraph_runtime_inmem/__init__.py,sha256=PQldy7XpkzOuPFy0MncSqMKD4D6bNmAChE5P1uwYzas,311
1
+ langgraph_runtime_inmem/__init__.py,sha256=JHK5o7GR1hY1R4dqdV2cSGwXFa-JFQeQ6RB3e8-9M6U,311
2
2
  langgraph_runtime_inmem/checkpoint.py,sha256=nc1G8DqVdIu-ibjKTqXfbPfMbAsKjPObKqegrSzo6Po,4432
3
- langgraph_runtime_inmem/database.py,sha256=QgaA_WQo1IY6QioYd8r-e6-0B0rnC5anS0muIEJWby0,6364
3
+ langgraph_runtime_inmem/database.py,sha256=g2XYa5KN-T8MbDeFH9sfUApDG62Wp4BACumVnDtxYhI,6403
4
4
  langgraph_runtime_inmem/inmem_stream.py,sha256=PFLWbsxU8RqbT5mYJgNk6v5q6TWJRIY1hkZWhJF8nkI,9094
5
5
  langgraph_runtime_inmem/lifespan.py,sha256=fCoYcN_h0cxmj6-muC-f0csPdSpyepZuGRD1yBrq4XM,4755
6
6
  langgraph_runtime_inmem/metrics.py,sha256=_YiSkLnhQvHpMktk38SZo0abyL-5GihfVAtBo0-lFIc,403
7
- langgraph_runtime_inmem/ops.py,sha256=l27lXDzlx6bM--j-mKS_fXi2SEvKBbLrU3aMcQ-QwYc,108630
7
+ langgraph_runtime_inmem/ops.py,sha256=niaZnC5Q7Mk6A1Edr9bC-TusBlvqMx_jvf2-md8aEAw,108036
8
8
  langgraph_runtime_inmem/queue.py,sha256=yV781CDjlsKNvkJ3puHyiHNnJpIIdB1G_gTY9dg6mys,9994
9
9
  langgraph_runtime_inmem/retry.py,sha256=XmldOP4e_H5s264CagJRVnQMDFcEJR_dldVR1Hm5XvM,763
10
10
  langgraph_runtime_inmem/store.py,sha256=rTfL1JJvd-j4xjTrL8qDcynaWF6gUJ9-GDVwH0NBD_I,3506
11
- langgraph_runtime_inmem-0.15.0.dist-info/METADATA,sha256=ITNngpey-5LmTzIg1XcFrIp5vDmq8jzAVsmtkZrfjqY,570
12
- langgraph_runtime_inmem-0.15.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
13
- langgraph_runtime_inmem-0.15.0.dist-info/RECORD,,
11
+ langgraph_runtime_inmem-0.17.0.dist-info/METADATA,sha256=u8rDo2LJnO4kUWjhiaU2ZGAYljKVO1JjqhdYHBJZE2c,570
12
+ langgraph_runtime_inmem-0.17.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
13
+ langgraph_runtime_inmem-0.17.0.dist-info/RECORD,,