lalph 0.3.94 → 0.3.96

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.mjs CHANGED
@@ -28,9 +28,9 @@ import { StringDecoder } from "node:string_decoder";
28
28
  import TreeSitter from "tree-sitter";
29
29
  import TreeSitterJavaScript from "tree-sitter-javascript";
30
30
  import TreeSitterTypeScript from "tree-sitter-typescript";
31
- import Sqlite from "better-sqlite3";
32
31
  import process$1 from "node:process";
33
32
  import tty from "node:tty";
33
+ import Sqlite from "better-sqlite3";
34
34
  //#region \0rolldown/runtime.js
35
35
  var __create = Object.create;
36
36
  var __defProp = Object.defineProperty;
@@ -178282,7 +178282,10 @@ var TokenManager$1 = class extends Service$1()("lalph/Linear/TokenManager", { ma
178282
178282
  })), {
178283
178283
  disableListenLog: true,
178284
178284
  disableLogger: true
178285
- }).pipe(provide$3(layer$15(createServer, { port: 34338 })), build, orDie$2);
178285
+ }).pipe(provide$3(layer$15(createServer, {
178286
+ port: 34338,
178287
+ disablePreemptiveShutdown: true
178288
+ })), build, orDie$2);
178286
178289
  const redirectUri = `http://localhost:34338/callback`;
178287
178290
  const verifier = crypto.randomUUID();
178288
178291
  const verifierSha256 = yield* promise(() => crypto.subtle.digest("SHA-256", new TextEncoder().encode(verifier)));
@@ -181680,7 +181683,7 @@ var ji = Bt, Ii = Object.assign(Qe, { sync: Bt }), zi = Ut, Bi = Object.assign(e
181680
181683
  });
181681
181684
  Ze.glob = Ze;
181682
181685
  //#endregion
181683
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/ApplyPatch.js
181686
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/ApplyPatch.js
181684
181687
  /**
181685
181688
  * @since 1.0.0
181686
181689
  */
@@ -196553,7 +196556,7 @@ var StreamableHTTPClientTransport = class {
196553
196556
  }
196554
196557
  };
196555
196558
  //#endregion
196556
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/McpClient.js
196559
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/McpClient.js
196557
196560
  /**
196558
196561
  * @since 1.0.0
196559
196562
  */
@@ -196598,7 +196601,7 @@ const layer$13 = effect$1(McpClient, gen(function* () {
196598
196601
  });
196599
196602
  }));
196600
196603
  //#endregion
196601
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/ExaSearch.js
196604
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/ExaSearch.js
196602
196605
  /**
196603
196606
  * @since 1.0.0
196604
196607
  */
@@ -211548,7 +211551,7 @@ var require_lib = /* @__PURE__ */ __commonJSMin$1(((exports) => {
211548
211551
  exports.impl = impl;
211549
211552
  }));
211550
211553
  //#endregion
211551
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/WebToMarkdown.js
211554
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/WebToMarkdown.js
211552
211555
  /**
211553
211556
  * @since 1.0.0
211554
211557
  */
@@ -214687,1385 +214690,7 @@ function isEmptyParamsRecord(indexSignature) {
214687
214690
  return indexSignature.parameter === string$3 && isNever(indexSignature.type);
214688
214691
  }
214689
214692
  //#endregion
214690
- //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/schema/VariantSchema.js
214691
- /**
214692
- * @since 4.0.0
214693
- * @category Type IDs
214694
- */
214695
- const TypeId$4 = "~effect/schema/VariantSchema";
214696
- const cacheSymbol = /* @__PURE__ */ Symbol.for(`${TypeId$4}/cache`);
214697
- /**
214698
- * @since 4.0.0
214699
- * @category guards
214700
- */
214701
- const isStruct = (u) => hasProperty(u, TypeId$4);
214702
- const FieldTypeId = "~effect/schema/VariantSchema/Field";
214703
- /**
214704
- * @since 4.0.0
214705
- * @category guards
214706
- */
214707
- const isField = (u) => hasProperty(u, FieldTypeId);
214708
- const extract$1 = /* @__PURE__ */ dual((args) => isStruct(args[0]), (self, variant, options) => {
214709
- const cache = self[cacheSymbol] ?? (self[cacheSymbol] = {});
214710
- const cacheKey = options?.isDefault === true ? "__default" : variant;
214711
- if (cache[cacheKey] !== void 0) return cache[cacheKey];
214712
- const fields = {};
214713
- for (const key of Object.keys(self[TypeId$4])) {
214714
- const value = self[TypeId$4][key];
214715
- if ("~effect/schema/VariantSchema" in value) if (options?.isDefault === true && isSchema(value)) fields[key] = value;
214716
- else fields[key] = extract$1(value, variant);
214717
- else if (FieldTypeId in value) {
214718
- if (variant in value.schemas) fields[key] = value.schemas[variant];
214719
- } else fields[key] = value;
214720
- }
214721
- return cache[cacheKey] = Struct$2(fields);
214722
- });
214723
- /**
214724
- * @since 4.0.0
214725
- * @category constructors
214726
- */
214727
- const make$12 = (options) => {
214728
- function Class(identifier) {
214729
- return function(fields, annotations) {
214730
- const variantStruct = Struct$1(fields);
214731
- const schema = extract$1(variantStruct, options.defaultVariant, { isDefault: true });
214732
- const SClass = Class$2;
214733
- class Base extends SClass(identifier)(schema.fields, annotations) {
214734
- static [TypeId$4] = fields;
214735
- }
214736
- for (const variant of options.variants) Object.defineProperty(Base, variant, { value: extract$1(variantStruct, variant).annotate({
214737
- id: `${identifier}.${variant}`,
214738
- title: `${identifier}.${variant}`
214739
- }) });
214740
- return Base;
214741
- };
214742
- }
214743
- function FieldOnly(keys) {
214744
- return function(schema) {
214745
- const obj = {};
214746
- for (const key of keys) obj[key] = schema;
214747
- return Field$1(obj);
214748
- };
214749
- }
214750
- function FieldExcept(keys) {
214751
- return function(schema) {
214752
- const obj = {};
214753
- for (const variant of options.variants) if (!keys.includes(variant)) obj[variant] = schema;
214754
- return Field$1(obj);
214755
- };
214756
- }
214757
- function UnionVariants(members) {
214758
- return Union$1(members, options.variants);
214759
- }
214760
- return {
214761
- Struct: Struct$1,
214762
- Field: Field$1,
214763
- FieldOnly,
214764
- FieldExcept,
214765
- Class,
214766
- Union: UnionVariants,
214767
- fieldEvolve: dual(2, (self, f) => {
214768
- return Field$1(evolve((isField(self) ? self : Field$1(Object.fromEntries(options.variants.map((variant) => [variant, self])))).schemas, f));
214769
- }),
214770
- extract: dual(2, (self, variant) => extract$1(self, variant, { isDefault: variant === options.defaultVariant }))
214771
- };
214772
- };
214773
- const StructProto = { pipe() {
214774
- return pipeArguments(this, arguments);
214775
- } };
214776
- const Struct$1 = (fields) => {
214777
- const self = Object.create(StructProto);
214778
- self[TypeId$4] = fields;
214779
- return self;
214780
- };
214781
- const FieldProto = {
214782
- [FieldTypeId]: FieldTypeId,
214783
- pipe() {
214784
- return pipeArguments(this, arguments);
214785
- }
214786
- };
214787
- const Field$1 = (schemas) => {
214788
- const self = Object.create(FieldProto);
214789
- self.schemas = schemas;
214790
- return self;
214791
- };
214792
- const Union$1 = (members, variants) => {
214793
- const VariantUnion = Union$2(members.filter((member) => isSchema(member)));
214794
- for (const variant of variants) Object.defineProperty(VariantUnion, variant, { value: Union$2(members.map((member) => extract$1(member, variant))) });
214795
- return VariantUnion;
214796
- };
214797
- //#endregion
214798
- //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/schema/Model.js
214799
- const { Class, Field, FieldExcept, FieldOnly, Struct, Union, extract, fieldEvolve } = /* @__PURE__ */ make$12({
214800
- variants: [
214801
- "select",
214802
- "insert",
214803
- "update",
214804
- "json",
214805
- "jsonCreate",
214806
- "jsonUpdate"
214807
- ],
214808
- defaultVariant: "select"
214809
- });
214810
- /**
214811
- * A field that represents a column that is generated by the database.
214812
- *
214813
- * It is available for selection and update, but not for insertion.
214814
- *
214815
- * @since 4.0.0
214816
- * @category generated
214817
- */
214818
- const Generated = (schema) => Field({
214819
- select: schema,
214820
- update: schema,
214821
- json: schema
214822
- });
214823
- //#endregion
214824
- //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/sql/SqlError.js
214825
- /**
214826
- * @since 4.0.0
214827
- */
214828
- const TypeId$3 = "~effect/sql/SqlError";
214829
- /**
214830
- * @since 4.0.0
214831
- */
214832
- var SqlError = class extends TaggedErrorClass("effect/sql/SqlError")("SqlError", {
214833
- cause: Defect,
214834
- message: /* @__PURE__ */ optional$3(String$1)
214835
- }) {
214836
- /**
214837
- * @since 4.0.0
214838
- */
214839
- [TypeId$3] = TypeId$3;
214840
- };
214841
- /**
214842
- * @since 4.0.0
214843
- */
214844
- var ResultLengthMismatch = class extends TaggedErrorClass("effect/sql/ResultLengthMismatch")("ResultLengthMismatch", {
214845
- expected: Number$1,
214846
- actual: Number$1
214847
- }) {
214848
- /**
214849
- * @since 4.0.0
214850
- */
214851
- [TypeId$3] = TypeId$3;
214852
- /**
214853
- * @since 4.0.0
214854
- */
214855
- get message() {
214856
- return `Expected ${this.expected} results but got ${this.actual}`;
214857
- }
214858
- };
214859
- //#endregion
214860
- //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/sql/SqlResolver.js
214861
- /**
214862
- * @since 4.0.0
214863
- */
214864
- const SqlRequestProto = {
214865
- ...Class$3.prototype,
214866
- [symbol$5](that) {
214867
- return equals$2(this.payload, that.payload);
214868
- },
214869
- [symbol$6]() {
214870
- return hash(this.payload);
214871
- }
214872
- };
214873
- /**
214874
- * @since 4.0.0
214875
- * @category requests
214876
- */
214877
- const request$1 = function() {
214878
- if (arguments.length === 1) {
214879
- const resolver = arguments[0];
214880
- return (payload) => request$2(SqlRequest(payload), resolver);
214881
- }
214882
- return request$2(SqlRequest(arguments[0]), arguments[1]);
214883
- };
214884
- /**
214885
- * @since 4.0.0
214886
- * @category requests
214887
- */
214888
- const SqlRequest = (payload) => {
214889
- const self = Object.create(SqlRequestProto);
214890
- self.payload = payload;
214891
- return self;
214892
- };
214893
- /**
214894
- * Create a resolver for a sql query with a request schema and a result schema.
214895
- *
214896
- * The request schema is used to validate the input of the query.
214897
- * The result schema is used to validate the output of the query.
214898
- *
214899
- * Results are mapped to the requests in order, so the length of the results must match the length of the requests.
214900
- *
214901
- * @since 4.0.0
214902
- * @category resolvers
214903
- */
214904
- const ordered = (options) => {
214905
- const decodeArray = decodeUnknownEffect(Array$1(options.Result));
214906
- return makeGrouped({
214907
- key: transactionKey,
214908
- resolver: fnUntraced(function* (entries) {
214909
- const inputs = yield* partitionRequests(entries, options.Request);
214910
- const results = yield* options.execute(inputs).pipe(provideServices$2(entries[0].services));
214911
- if (results.length !== inputs.length) return yield* new ResultLengthMismatch({
214912
- expected: inputs.length,
214913
- actual: results.length
214914
- });
214915
- const decodedResults = yield* decodeArray(results).pipe(provideServices$2(entries[0].services));
214916
- for (let i = 0; i < entries.length; i++) entries[i].completeUnsafe(succeed$6(decodedResults[i]));
214917
- })
214918
- });
214919
- };
214920
- /**
214921
- * Create a resolver that resolves results by id.
214922
- *
214923
- * @since 4.0.0
214924
- * @category resolvers
214925
- */
214926
- const findById = (options) => {
214927
- const decodeResults = decodeUnknownEffect(Array$1(options.Result));
214928
- return makeGrouped({
214929
- key(entry) {
214930
- const conn = entry.services.mapUnsafe.get(TransactionConnection.key);
214931
- if (!conn) return void 0;
214932
- return byReferenceUnsafe(conn);
214933
- },
214934
- resolver: fnUntraced(function* (entries) {
214935
- const [inputs, idMap] = yield* partitionRequestsById(entries, options.Id);
214936
- const results = yield* options.execute(inputs).pipe(provideServices$2(entries[0].services));
214937
- const decodedResults = yield* decodeResults(results).pipe(provideServices$2(entries[0].services));
214938
- for (let i = 0; i < decodedResults.length; i++) {
214939
- const result = decodedResults[i];
214940
- const id = options.ResultId(result, results[i]);
214941
- const request = get$13(idMap, id);
214942
- if (request._tag === "None") continue;
214943
- remove$5(idMap, id);
214944
- request.value.completeUnsafe(succeed$6(result));
214945
- }
214946
- if (isEmpty$3(idMap)) return;
214947
- forEach$2(idMap, (request) => {
214948
- request.completeUnsafe(constNoSuchElement);
214949
- });
214950
- })
214951
- });
214952
- };
214953
- const void_ = (options) => makeGrouped({
214954
- key: transactionKey,
214955
- resolver: fnUntraced(function* (entries) {
214956
- const inputs = yield* partitionRequests(entries, options.Request);
214957
- yield* options.execute(inputs).pipe(provideServices$2(entries[0].services));
214958
- for (let i = 0; i < entries.length; i++) entries[i].completeUnsafe(void_$3);
214959
- })
214960
- });
214961
- const constNoSuchElement = /* @__PURE__ */ fail$8(/* @__PURE__ */ new NoSuchElementError());
214962
- const partitionRequests = function* (requests, schema) {
214963
- const len = requests.length;
214964
- const inputs = empty$17();
214965
- let entry;
214966
- const encode = encodeEffect(schema);
214967
- const handle = matchCauseEager({
214968
- onFailure(cause) {
214969
- entry.completeUnsafe(failCause$4(cause));
214970
- },
214971
- onSuccess(value) {
214972
- inputs.push(value);
214973
- }
214974
- });
214975
- for (let i = 0; i < len; i++) {
214976
- entry = requests[i];
214977
- yield provideServices$2(handle(encode(entry.request.payload)), entry.services);
214978
- }
214979
- return inputs;
214980
- };
214981
- const partitionRequestsById = function* (requests, schema) {
214982
- const len = requests.length;
214983
- const inputs = empty$17();
214984
- const byIdMap = empty$11();
214985
- let entry;
214986
- const encode = encodeEffect(schema);
214987
- const handle = matchCauseEager({
214988
- onFailure(cause) {
214989
- entry.completeUnsafe(failCause$4(cause));
214990
- },
214991
- onSuccess(value) {
214992
- inputs.push(value);
214993
- }
214994
- });
214995
- for (let i = 0; i < len; i++) {
214996
- entry = requests[i];
214997
- yield provideServices$2(handle(encode(entry.request.payload)), entry.services);
214998
- set$8(byIdMap, entry.request.payload, entry);
214999
- }
215000
- return [inputs, byIdMap];
215001
- };
215002
- function transactionKey(entry) {
215003
- const conn = entry.services.mapUnsafe.get(TransactionConnection.key);
215004
- if (!conn) return void 0;
215005
- return byReferenceUnsafe(conn);
215006
- }
215007
- //#endregion
215008
- //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/sql/SqlSchema.js
215009
- /**
215010
- * Run a sql query with a request schema and a result schema.
215011
- *
215012
- * @since 4.0.0
215013
- * @category constructor
215014
- */
215015
- const findAll = (options) => {
215016
- const encodeRequest = encodeEffect(options.Request);
215017
- const decode = decodeUnknownEffect(mutable(Array$1(options.Result)));
215018
- return (request) => flatMap$4(flatMap$4(encodeRequest(request), options.execute), decode);
215019
- };
215020
- //#endregion
215021
- //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/sql/SqlModel.js
215022
- /**
215023
- * Create some simple data loaders from a model.
215024
- *
215025
- * @since 4.0.0
215026
- * @category repository
215027
- */
215028
- const makeDataLoaders = (Model, options) => gen(function* () {
215029
- const sql = yield* SqlClient;
215030
- const idSchema = Model.fields[options.idColumn];
215031
- const idColumn = options.idColumn;
215032
- const setMaxBatchSize = options.maxBatchSize ? batchN(options.maxBatchSize) : identity;
215033
- const insertExecute = request$1(ordered({
215034
- Request: Model.insert,
215035
- Result: Model,
215036
- execute: (request) => sql.onDialectOrElse({
215037
- mysql: () => forEach$4(request, (request) => sql`insert into ${sql(options.tableName)} ${sql.insert(request)};
215038
- select * from ${sql(options.tableName)} where ${sql(idColumn)} = LAST_INSERT_ID();`.unprepared.pipe(map$9(([, results]) => results[0])), { concurrency: 10 }),
215039
- orElse: () => sql`insert into ${sql(options.tableName)} ${sql.insert(request).returning("*")}`
215040
- })
215041
- }).pipe(setDelay(options.window), setMaxBatchSize, withSpan(`${options.spanPrefix}.insertResolver`)));
215042
- const insert = (insert) => insertExecute(insert).pipe(catchTag$1("ResultLengthMismatch", die$2), withSpan$1(`${options.spanPrefix}.insert`, {}, { captureStackTrace: false }));
215043
- const insertVoidExecute = request$1(void_({
215044
- Request: Model.insert,
215045
- execute: (request) => sql`insert into ${sql(options.tableName)} ${sql.insert(request)}`
215046
- }).pipe(setDelay(options.window), setMaxBatchSize, withSpan(`${options.spanPrefix}.insertVoidResolver`)));
215047
- const insertVoid = (insert) => insertVoidExecute(insert).pipe(withSpan$1(`${options.spanPrefix}.insertVoid`, {}, { captureStackTrace: false }));
215048
- const findByIdExecute = request$1(findById({
215049
- Id: idSchema,
215050
- Result: Model,
215051
- ResultId(request) {
215052
- return request[idColumn];
215053
- },
215054
- execute: (ids) => sql`select * from ${sql(options.tableName)} where ${sql.in(idColumn, ids)}`
215055
- }).pipe(setDelay(options.window), setMaxBatchSize, withSpan(`${options.spanPrefix}.findByIdResolver`)));
215056
- const findById$1 = (id) => findByIdExecute(id).pipe(withSpan$1(`${options.spanPrefix}.findById`, { attributes: { id } }, { captureStackTrace: false }));
215057
- const deleteExecute = request$1(void_({
215058
- Request: idSchema,
215059
- execute: (ids) => sql`delete from ${sql(options.tableName)} where ${sql.in(idColumn, ids)}`
215060
- }).pipe(setDelay(options.window), setMaxBatchSize, withSpan(`${options.spanPrefix}.deleteResolver`)));
215061
- const delete_ = (id) => deleteExecute(id).pipe(withSpan$1(`${options.spanPrefix}.delete`, { attributes: { id } }, { captureStackTrace: false }));
215062
- return {
215063
- insert,
215064
- insertVoid,
215065
- findById: findById$1,
215066
- delete: delete_
215067
- };
215068
- });
215069
- //#endregion
215070
- //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/ai/EmbeddingModel.js
215071
- /**
215072
- * The `EmbeddingModel` module provides provider-agnostic text embedding capabilities.
215073
- *
215074
- * @example
215075
- * ```ts
215076
- * import { Effect } from "effect"
215077
- * import { EmbeddingModel } from "effect/unstable/ai"
215078
- *
215079
- * const program = Effect.gen(function*() {
215080
- * const model = yield* EmbeddingModel.EmbeddingModel
215081
- * return yield* model.embed("hello world")
215082
- * })
215083
- * ```
215084
- *
215085
- * @since 4.0.0
215086
- */
215087
- /**
215088
- * Service tag for embedding model operations.
215089
- *
215090
- * @since 4.0.0
215091
- * @category services
215092
- */
215093
- var EmbeddingModel = class extends Service$1()("effect/unstable/ai/EmbeddingModel") {};
215094
- /**
215095
- * Service tag that provides the current embedding dimensions.
215096
- *
215097
- * @since 4.0.0
215098
- * @category services
215099
- */
215100
- var Dimensions = class extends Service$1()("effect/unstable/ai/EmbeddingModel/Dimensions") {};
215101
- /**
215102
- * Token usage metadata for embedding operations.
215103
- *
215104
- * @since 4.0.0
215105
- * @category models
215106
- */
215107
- var EmbeddingUsage = class extends Class$2("effect/ai/EmbeddingModel/EmbeddingUsage")({ inputTokens: /* @__PURE__ */ UndefinedOr(Finite) }) {};
215108
- /**
215109
- * Response for a single embedding request.
215110
- *
215111
- * @since 4.0.0
215112
- * @category models
215113
- */
215114
- var EmbedResponse = class extends Class$2("effect/ai/EmbeddingModel/EmbedResponse")({ vector: /* @__PURE__ */ Array$1(Finite) }) {};
215115
- /**
215116
- * Response for multiple embeddings.
215117
- *
215118
- * @since 4.0.0
215119
- * @category models
215120
- */
215121
- var EmbedManyResponse = class extends Class$2("effect/ai/EmbeddingModel/EmbedManyResponse")({
215122
- embeddings: /* @__PURE__ */ Array$1(EmbedResponse),
215123
- usage: EmbeddingUsage
215124
- }) {};
215125
- /**
215126
- * Tagged request used by request resolvers for embedding operations.
215127
- *
215128
- * @since 4.0.0
215129
- * @category constructors
215130
- */
215131
- var EmbeddingRequest = class extends TaggedClass$1("EmbeddingRequest") {};
215132
- const invalidProviderResponse = (description) => make$15({
215133
- module: "EmbeddingModel",
215134
- method: "embedMany",
215135
- reason: new InvalidOutputError({ description })
215136
- });
215137
- /**
215138
- * Creates an EmbeddingModel service from a provider embedMany implementation.
215139
- *
215140
- * @since 4.0.0
215141
- * @category constructors
215142
- */
215143
- const make$11 = /* @__PURE__ */ fnUntraced(function* (params) {
215144
- const resolver = make$47((entries) => flatMap$4(params.embedMany({ inputs: entries.map((entry) => entry.request.input) }), (response) => map$9(mapProviderResults(entries.length, response.results), (embeddings) => {
215145
- for (let i = 0; i < entries.length; i++) entries[i].completeUnsafe(succeed$6(embeddings[i]));
215146
- }))).pipe(withSpan("EmbeddingModel.resolver"));
215147
- return EmbeddingModel.of({
215148
- resolver,
215149
- embed: (input) => request$2(new EmbeddingRequest({ input }), resolver).pipe(withSpan$1("EmbeddingModel.embed")),
215150
- embedMany: (input) => (input.length === 0 ? succeed$3(new EmbedManyResponse({
215151
- embeddings: [],
215152
- usage: new EmbeddingUsage({ inputTokens: void 0 })
215153
- })) : params.embedMany({ inputs: input }).pipe(flatMap$4((response) => mapProviderResults(input.length, response.results).pipe(map$9((embeddings) => new EmbedManyResponse({
215154
- embeddings,
215155
- usage: new EmbeddingUsage({ inputTokens: response.usage.inputTokens })
215156
- })))))).pipe(withSpan$1("EmbeddingModel.embedMany"))
215157
- });
215158
- });
215159
- const mapProviderResults = (inputLength, results) => {
215160
- const embeddings = new Array(inputLength);
215161
- if (results.length !== inputLength) return fail$6(invalidProviderResponse(`Provider returned ${results.length} embeddings but expected ${inputLength}`));
215162
- for (let i = 0; i < results.length; i++) {
215163
- const vector = results[i];
215164
- embeddings[i] = new EmbedResponse({ vector });
215165
- }
215166
- return succeed$3(embeddings);
215167
- };
215168
- //#endregion
215169
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/ChunkRepo.js
215170
- /**
215171
- * @since 1.0.0
215172
- * @category Models
215173
- */
215174
- const ChunkId = Number$1.pipe(brand("ChunkRepo/ChunkId"));
215175
- /**
215176
- * @since 1.0.0
215177
- * @category Models
215178
- */
215179
- const SyncId = String$1.pipe(brand("ChunkRepo/SyncId"));
215180
- /**
215181
- * @since 1.0.0
215182
- * @category Models
215183
- */
215184
- const Float32ArraySchema = instanceOf(Float32Array);
215185
- /**
215186
- * @since 1.0.0
215187
- * @category Models
215188
- */
215189
- const Float32ArrayFromArray = Array$1(Number$1).pipe(decodeTo(Float32ArraySchema, transform$3({
215190
- decode: (arr) => new Float32Array(arr),
215191
- encode: (array) => Array.from(array)
215192
- })));
215193
- /**
215194
- * @since 1.0.0
215195
- * @category Models
215196
- */
215197
- const Float32ArrayField = Field({
215198
- insert: Float32ArraySchema,
215199
- update: Float32ArraySchema,
215200
- jsonCreate: Float32ArrayFromArray,
215201
- jsonUpdate: Float32ArrayFromArray
215202
- });
215203
- /**
215204
- * @since 1.0.0
215205
- * @category Models
215206
- */
215207
- var Chunk = class extends Class("Chunk")({
215208
- id: Generated(ChunkId),
215209
- path: String$1,
215210
- content: String$1,
215211
- hash: String$1,
215212
- vector: Float32ArrayField,
215213
- syncId: SyncId
215214
- }) {};
215215
- /**
215216
- * @since 1.0.0
215217
- * @category Services
215218
- */
215219
- var ChunkRepo = class extends Service$1()("clanka/ChunkRepo") {};
215220
- /**
215221
- * @since 1.0.0
215222
- * @category Errors
215223
- */
215224
- var ChunkRepoError = class extends TaggedErrorClass()("ChunkRepoError", { reason: Union$2([SqlError]) }) {
215225
- cause = this.reason;
215226
- message = this.reason.message;
215227
- };
215228
- /**
215229
- * @since 1.0.0
215230
- * @category Layers
215231
- */
215232
- const layer$10 = effect$1(ChunkRepo, gen(function* () {
215233
- const sql = yield* SqlClient;
215234
- const dimensions = yield* Dimensions;
215235
- const loaders = yield* makeDataLoaders(Chunk, {
215236
- tableName: "chunks",
215237
- idColumn: "id",
215238
- window: 10,
215239
- spanPrefix: "ChunkRepo"
215240
- });
215241
- let needsQuantization = true;
215242
- const maybeQuantize = gen(function* () {
215243
- if (!needsQuantization) return;
215244
- needsQuantization = false;
215245
- yield* sql`select vector_init('chunks', 'vector', 'type=FLOAT32,dimension=${sql.literal(String(dimensions))}')`;
215246
- yield* sql`select vector_quantize('chunks', 'vector')`;
215247
- }).pipe(mapError$2((reason) => new ChunkRepoError({ reason })));
215248
- yield* forkScoped(maybeQuantize);
215249
- const search = findAll({
215250
- Request: Struct$2({
215251
- vector: Float32ArraySchema,
215252
- limit: Number$1
215253
- }),
215254
- Result: Chunk,
215255
- execute: ({ vector, limit }) => sql`
215256
- select chunks.id, chunks.path, chunks.content, chunks.hash, chunks.syncId
215257
- from chunks
215258
- JOIN vector_quantize_scan('chunks', 'vector', ${vector}, CAST(${limit} AS INTEGER)) AS v
215259
- ON chunks.id = v.rowid
215260
- `
215261
- });
215262
- const exists = findById({
215263
- Id: String$1,
215264
- Result: Struct$2({
215265
- id: ChunkId,
215266
- hash: String$1
215267
- }),
215268
- ResultId(result) {
215269
- return result.hash;
215270
- },
215271
- execute: (hashes) => sql`select id, hash from chunks where ${sql.in("hash", hashes)}`
215272
- }).pipe(setDelay(5));
215273
- return ChunkRepo.of({
215274
- insert: (insert) => {
215275
- needsQuantization = true;
215276
- return loaders.insert(insert).pipe(catchTags$1({
215277
- SqlError: (reason) => fail$6(new ChunkRepoError({ reason })),
215278
- SchemaError: die$2
215279
- }));
215280
- },
215281
- findById: (id) => loaders.findById(id).pipe(catchTags$1({ SchemaError: die$2 })),
215282
- exists: (hash) => request$1(hash, exists).pipe(map$9((result) => result.id), catchNoSuchElement, catchTags$1({
215283
- SqlError: (reason) => fail$6(new ChunkRepoError({ reason })),
215284
- SchemaError: die$2
215285
- })),
215286
- search: fn("ChunkRepo.search")(function* (options) {
215287
- yield* maybeQuantize;
215288
- return yield* search(options).pipe(catchTags$1({
215289
- SqlError: (reason) => fail$6(new ChunkRepoError({ reason })),
215290
- SchemaError: die$2
215291
- }));
215292
- }),
215293
- quantize: maybeQuantize,
215294
- setSyncId: (chunkId, syncId) => sql`update chunks set syncId = ${syncId} where id = ${chunkId}`.pipe(mapError$2((reason) => new ChunkRepoError({ reason }))),
215295
- deleteByPath: (path) => sql`delete from chunks where path = ${path}`.pipe(mapError$2((reason) => new ChunkRepoError({ reason }))),
215296
- deleteForSyncId: (syncId) => sql`delete from chunks where syncId != ${syncId}`.pipe(mapError$2((reason) => new ChunkRepoError({ reason })))
215297
- });
215298
- }));
215299
- //#endregion
215300
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/CodeChunker.js
215301
- /**
215302
- * @since 1.0.0
215303
- */
215304
- /**
215305
- * @since 1.0.0
215306
- * @category Services
215307
- */
215308
- var CodeChunker = class extends Service$1()("clanka/CodeChunker") {};
215309
- const sourceExtensions = new Set([
215310
- "c",
215311
- "cc",
215312
- "cpp",
215313
- "cs",
215314
- "css",
215315
- "cts",
215316
- "cxx",
215317
- "go",
215318
- "gql",
215319
- "graphql",
215320
- "h",
215321
- "hpp",
215322
- "html",
215323
- "ini",
215324
- "java",
215325
- "js",
215326
- "jsx",
215327
- "kt",
215328
- "kts",
215329
- "less",
215330
- "lua",
215331
- "mjs",
215332
- "mts",
215333
- "php",
215334
- "py",
215335
- "rb",
215336
- "rs",
215337
- "sass",
215338
- "scala",
215339
- "scss",
215340
- "sh",
215341
- "sql",
215342
- "svelte",
215343
- "swift",
215344
- "ts",
215345
- "tsx",
215346
- "vue",
215347
- "xml",
215348
- "zsh"
215349
- ]);
215350
- const documentationExtensions = new Set([
215351
- "adoc",
215352
- "asciidoc",
215353
- "md",
215354
- "mdx",
215355
- "rst",
215356
- "txt"
215357
- ]);
215358
- const ignoredDirectories = new Set([
215359
- ".git",
215360
- ".next",
215361
- ".nuxt",
215362
- ".svelte-kit",
215363
- ".turbo",
215364
- "build",
215365
- "coverage",
215366
- "dist",
215367
- "node_modules",
215368
- "target"
215369
- ]);
215370
- const normalizePath$1 = (path) => path.replace(/\\/g, "/");
215371
- const normalizeText = (content) => content.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
215372
- const meaningfulLinePattern = /[^\s\p{P}]/u;
215373
- const isMeaningfulLine = (line) => meaningfulLinePattern.test(line);
215374
- const languageByExtension = new Map([
215375
- ["js", TreeSitterJavaScript],
215376
- ["jsx", TreeSitterJavaScript],
215377
- ["ts", TreeSitterTypeScript.typescript],
215378
- ["tsx", TreeSitterTypeScript.tsx]
215379
- ]);
215380
- /**
215381
- * @since 1.0.0
215382
- * @category Predicates
215383
- */
215384
- const isProbablyMinified = (content) => {
215385
- const normalized = normalizeText(content);
215386
- if (normalized.length < 2e3) return false;
215387
- const lines = normalized.split("\n");
215388
- if (lines.length <= 2) return true;
215389
- let longLines = 0;
215390
- for (const line of lines) if (line.length >= 300) longLines++;
215391
- return lines.length <= 20 && longLines / lines.length >= .8;
215392
- };
215393
- /**
215394
- * @since 1.0.0
215395
- * @category Predicates
215396
- */
215397
- const isMeaningfulFile = (path) => {
215398
- const parts = normalizePath$1(path).toLowerCase().split("/");
215399
- const fileName = parts.at(-1);
215400
- if (fileName === void 0 || fileName.length === 0) return false;
215401
- if (parts.some((part) => ignoredDirectories.has(part))) return false;
215402
- if (/\.min\.(?:css|js)$/i.test(fileName)) return false;
215403
- const extensionIndex = fileName.lastIndexOf(".");
215404
- if (extensionIndex === -1) return false;
215405
- const extension = fileName.slice(extensionIndex + 1);
215406
- return sourceExtensions.has(extension) || documentationExtensions.has(extension);
215407
- };
215408
- const resolveChunkSettings = (options) => {
215409
- const chunkSize = Math.max(1, options.chunkSize);
215410
- return {
215411
- chunkSize,
215412
- chunkOverlap: Math.max(0, Math.min(chunkSize - 1, options.chunkOverlap)),
215413
- chunkMaxCharacters: options.chunkMaxCharacters !== void 0 && Number.isFinite(options.chunkMaxCharacters) ? Math.max(1, Math.floor(options.chunkMaxCharacters)) : Number.POSITIVE_INFINITY
215414
- };
215415
- };
215416
- const getPathExtension = (path) => {
215417
- const fileName = path.split("/").at(-1);
215418
- if (fileName === void 0) return;
215419
- const extensionIndex = fileName.lastIndexOf(".");
215420
- if (extensionIndex === -1) return;
215421
- return fileName.slice(extensionIndex + 1).toLowerCase();
215422
- };
215423
- const resolveAstLanguage = (path) => {
215424
- const extension = getPathExtension(path);
215425
- if (extension === void 0) return;
215426
- return languageByExtension.get(extension);
215427
- };
215428
- const lineRangeFromNode = (node) => {
215429
- const startLine = node.startPosition.row + 1;
215430
- return {
215431
- startLine,
215432
- endLine: Math.max(startLine, node.endPosition.row + 1)
215433
- };
215434
- };
215435
- const hasOnlyWhitespaceLines = (lines, startLine, endLine) => {
215436
- if (startLine > endLine) return true;
215437
- for (let lineIndex = startLine; lineIndex <= endLine; lineIndex++) if ((lines[lineIndex - 1] ?? "").trim().length > 0) return false;
215438
- return true;
215439
- };
215440
- const lineRangeWithLeadingComments = (node, siblings, nodeIndex, lines) => {
215441
- const baseRange = lineRangeFromNode(node);
215442
- let startLine = baseRange.startLine;
215443
- for (let index = nodeIndex - 1; index >= 0; index--) {
215444
- const sibling = siblings[index];
215445
- if (sibling.type !== "comment") break;
215446
- const commentRange = lineRangeFromNode(sibling);
215447
- if (!hasOnlyWhitespaceLines(lines, commentRange.endLine + 1, startLine - 1)) break;
215448
- startLine = commentRange.startLine;
215449
- }
215450
- return {
215451
- startLine,
215452
- endLine: baseRange.endLine
215453
- };
215454
- };
215455
- const normalizeLineRange = (range, lineCount) => {
215456
- const startLine = Math.max(1, Math.min(lineCount, range.startLine));
215457
- const endLine = Math.max(1, Math.min(lineCount, range.endLine));
215458
- if (endLine < startLine) return;
215459
- return {
215460
- startLine,
215461
- endLine
215462
- };
215463
- };
215464
- const lineLengthPrefixSums = (lines) => {
215465
- const sums = [0];
215466
- for (let index = 0; index < lines.length; index++) sums.push(sums[index] + lines[index].length);
215467
- return sums;
215468
- };
215469
- const lineRangeCharacterLength = (prefixSums, range) => prefixSums[range.endLine] - prefixSums[range.startLine - 1] + (range.endLine - range.startLine);
215470
- const resolveSegmentEndLine = (options) => {
215471
- if (options.settings.chunkMaxCharacters === Number.POSITIVE_INFINITY) return options.maxEndLine;
215472
- let endLine = options.maxEndLine;
215473
- while (endLine > options.startLine && lineRangeCharacterLength(options.prefixSums, {
215474
- startLine: options.startLine,
215475
- endLine
215476
- }) > options.settings.chunkMaxCharacters) endLine--;
215477
- return endLine;
215478
- };
215479
- const splitRange = (range, settings, prefixSums) => {
215480
- if (range.endLine - range.startLine + 1 <= settings.chunkSize && lineRangeCharacterLength(prefixSums, range) <= settings.chunkMaxCharacters) return [range];
215481
- const out = [];
215482
- for (let startLine = range.startLine; startLine <= range.endLine;) {
215483
- const maxEndLine = Math.min(range.endLine, startLine + settings.chunkSize - 1);
215484
- const endLine = resolveSegmentEndLine({
215485
- startLine,
215486
- maxEndLine,
215487
- settings,
215488
- prefixSums
215489
- });
215490
- out.push({
215491
- startLine,
215492
- endLine
215493
- });
215494
- if (endLine >= range.endLine) break;
215495
- startLine = Math.max(startLine + 1, endLine - settings.chunkOverlap + 1);
215496
- }
215497
- return out;
215498
- };
215499
- const nodeText = (node) => {
215500
- if (node === null) return;
215501
- const value = node.text.trim().replace(/\s+/g, " ");
215502
- return value.length === 0 ? void 0 : value;
215503
- };
215504
- const nodeFieldText = (node, fieldName) => nodeText(node.childForFieldName(fieldName));
215505
- const isNamespaceNode = (node) => node.type === "internal_module" || node.type === "module";
215506
- const unwrapDeclarationNode = (node) => {
215507
- let current = node;
215508
- while (true) {
215509
- if (current.type === "export_statement") {
215510
- const declaration = current.childForFieldName("declaration") ?? current.namedChildren[0];
215511
- if (declaration === void 0) return current;
215512
- current = declaration;
215513
- continue;
215514
- }
215515
- if (current.type === "ambient_declaration") {
215516
- const declaration = current.namedChildren.find((child) => child.type.endsWith("_declaration") || isNamespaceNode(child));
215517
- if (declaration === void 0) return current;
215518
- current = declaration;
215519
- continue;
215520
- }
215521
- return current;
215522
- }
215523
- };
215524
- const variableDeclarators = (node) => node.namedChildren.filter((child) => child.type === "variable_declarator");
215525
- const variableTypeFromDeclarator = (node) => {
215526
- const value = node.childForFieldName("value");
215527
- if (value !== null && value.type.includes("function")) return "function";
215528
- return "variable";
215529
- };
215530
- const variableTypeFromDeclaration = (node) => {
215531
- if (variableDeclarators(node).some((declarator) => variableTypeFromDeclarator(declarator) === "function")) return "function";
215532
- return "variable";
215533
- };
215534
- const chunkTypeFromNode = (node) => {
215535
- switch (node.type) {
215536
- case "class_declaration": return "class";
215537
- case "enum_declaration": return "enum";
215538
- case "function_declaration":
215539
- case "generator_function_declaration": return "function";
215540
- case "internal_module":
215541
- case "module": return "namespace";
215542
- case "interface_declaration": return "interface";
215543
- case "generator_method_definition":
215544
- case "method_definition": return "method";
215545
- case "type_alias_declaration": return "type-alias";
215546
- case "lexical_declaration":
215547
- case "variable_declaration": return variableTypeFromDeclaration(node);
215548
- case "variable_declarator": return variableTypeFromDeclarator(node);
215549
- default: return;
215550
- }
215551
- };
215552
- const variableNamesFromDeclaration = (node) => {
215553
- const names = variableDeclarators(node).map((declarator) => nodeFieldText(declarator, "name")).filter((name) => name !== void 0);
215554
- if (names.length === 0) return;
215555
- return names.join(", ");
215556
- };
215557
- const nameFromNode = (node) => {
215558
- switch (node.type) {
215559
- case "class_declaration":
215560
- case "enum_declaration":
215561
- case "function_declaration":
215562
- case "generator_function_declaration":
215563
- case "internal_module":
215564
- case "interface_declaration":
215565
- case "module":
215566
- case "generator_method_definition":
215567
- case "method_definition":
215568
- case "type_alias_declaration":
215569
- case "variable_declarator": return nodeFieldText(node, "name");
215570
- case "lexical_declaration":
215571
- case "variable_declaration": return variableNamesFromDeclaration(node);
215572
- default: return;
215573
- }
215574
- };
215575
- const formatParent = (type, name) => {
215576
- if (type === void 0 && name === void 0) return;
215577
- if (type === void 0) return name;
215578
- if (name === void 0) return type;
215579
- return type + " " + name;
215580
- };
215581
- const collectClassMethodRanges = (classNode, parent, lines) => {
215582
- const body = classNode.childForFieldName("body");
215583
- if (body === null) return [];
215584
- const out = [];
215585
- for (let index = 0; index < body.namedChildren.length; index++) {
215586
- const child = body.namedChildren[index];
215587
- if (!child.type.includes("method")) continue;
215588
- out.push({
215589
- ...lineRangeWithLeadingComments(child, body.namedChildren, index, lines),
215590
- name: nameFromNode(child),
215591
- type: chunkTypeFromNode(child),
215592
- parent
215593
- });
215594
- }
215595
- return out;
215596
- };
215597
- const collectAstRanges = (path, content, lines) => {
215598
- const language = resolveAstLanguage(path);
215599
- if (language === void 0) return [];
215600
- try {
215601
- const parser = new TreeSitter();
215602
- parser.setLanguage(language);
215603
- const tree = parser.parse(content, void 0, { bufferSize: 1024 * 1024 });
215604
- const collectDeclarationRanges = (siblings, parent) => {
215605
- const out = [];
215606
- for (let index = 0; index < siblings.length; index++) {
215607
- const sibling = siblings[index];
215608
- if (sibling.type === "comment" || sibling.type.includes("import")) continue;
215609
- const declarationNode = unwrapDeclarationNode(sibling);
215610
- const type = chunkTypeFromNode(declarationNode);
215611
- const name = nameFromNode(declarationNode);
215612
- if (type === void 0 && name === void 0) continue;
215613
- out.push({
215614
- ...lineRangeWithLeadingComments(sibling, siblings, index, lines),
215615
- name,
215616
- type,
215617
- parent
215618
- });
215619
- if (declarationNode.type === "class_declaration") out.push(...collectClassMethodRanges(declarationNode, formatParent(type, name), lines));
215620
- if (isNamespaceNode(declarationNode)) {
215621
- const body = declarationNode.childForFieldName("body");
215622
- if (body !== null) out.push(...collectDeclarationRanges(body.namedChildren, formatParent(type, name)));
215623
- }
215624
- }
215625
- return out;
215626
- };
215627
- return collectDeclarationRanges(tree.rootNode.namedChildren, void 0);
215628
- } catch {
215629
- return [];
215630
- }
215631
- };
215632
- const chunksFromRanges = (path, lines, ranges, settings) => {
215633
- const hasMethodChildRange = (classRange) => {
215634
- const parent = formatParent("class", classRange.name);
215635
- return ranges.some((range) => range.type === "method" && range.parent === parent && range.startLine >= classRange.startLine && range.endLine <= classRange.endLine);
215636
- };
215637
- const out = [];
215638
- const seen = /* @__PURE__ */ new Set();
215639
- const prefixSums = lineLengthPrefixSums(lines);
215640
- for (const range of ranges) {
215641
- const normalizedRange = normalizeLineRange(range, lines.length);
215642
- if (normalizedRange === void 0) continue;
215643
- const allSegments = splitRange(normalizedRange, settings, prefixSums);
215644
- const segments = range.type === "class" && allSegments.length > 1 && hasMethodChildRange({
215645
- ...normalizedRange,
215646
- name: range.name
215647
- }) ? [allSegments[0]] : allSegments;
215648
- for (const segment of segments) {
215649
- const key = String(segment.startLine) + ":" + String(segment.endLine) + ":" + (range.name ?? "") + ":" + (range.type ?? "") + ":" + (range.parent ?? "");
215650
- if (seen.has(key)) continue;
215651
- seen.add(key);
215652
- const chunkLines = lines.slice(segment.startLine - 1, segment.endLine);
215653
- if (!chunkLines.some(isMeaningfulLine)) continue;
215654
- out.push({
215655
- path,
215656
- startLine: segment.startLine,
215657
- endLine: segment.endLine,
215658
- name: range.name,
215659
- type: range.type,
215660
- parent: range.parent,
215661
- content: chunkLines.join("\n")
215662
- });
215663
- }
215664
- }
215665
- return out.toSorted((left, right) => left.startLine - right.startLine || left.endLine - right.endLine || (left.name ?? "").localeCompare(right.name ?? ""));
215666
- };
215667
- const chunkWithLineWindows = (path, lines, settings) => {
215668
- const out = [];
215669
- const prefixSums = lineLengthPrefixSums(lines);
215670
- for (let index = 0; index < lines.length;) {
215671
- if (!isMeaningfulLine(lines[index])) {
215672
- index++;
215673
- continue;
215674
- }
215675
- const startLine = index + 1;
215676
- const endLine = resolveSegmentEndLine({
215677
- startLine,
215678
- maxEndLine: Math.min(lines.length, startLine + settings.chunkSize - 1),
215679
- settings,
215680
- prefixSums
215681
- });
215682
- const chunkLines = lines.slice(startLine - 1, endLine);
215683
- out.push({
215684
- path,
215685
- startLine,
215686
- endLine,
215687
- name: void 0,
215688
- type: void 0,
215689
- parent: void 0,
215690
- content: chunkLines.join("\n")
215691
- });
215692
- if (endLine >= lines.length) break;
215693
- index = Math.max(startLine + 1, endLine - settings.chunkOverlap + 1) - 1;
215694
- }
215695
- return out;
215696
- };
215697
- /**
215698
- * @since 1.0.0
215699
- * @category Constructors
215700
- */
215701
- const chunkFileContent = (path, content, options) => {
215702
- if (content.trim().length === 0 || isProbablyMinified(content)) return [];
215703
- const normalizedPath = normalizePath$1(path);
215704
- const normalizedContent = normalizeText(content);
215705
- const lines = normalizedContent.split("\n");
215706
- if (lines.at(-1) === "") lines.pop();
215707
- if (lines.length === 0) return [];
215708
- const settings = resolveChunkSettings(options);
215709
- const astRanges = collectAstRanges(normalizedPath, normalizedContent, lines);
215710
- if (astRanges.length > 0) {
215711
- const astChunks = chunksFromRanges(normalizedPath, lines, astRanges, settings);
215712
- if (astChunks.length > 0) return astChunks;
215713
- }
215714
- return chunkWithLineWindows(normalizedPath, lines, settings);
215715
- };
215716
- /**
215717
- * @since 1.0.0
215718
- * @category Layers
215719
- */
215720
- const layer$9 = effect$1(CodeChunker, gen(function* () {
215721
- const spawner = yield* ChildProcessSpawner;
215722
- const fs = yield* FileSystem;
215723
- const pathService = yield* Path$1;
215724
- const listFiles = fn("CodeChunker.listFiles")(function* (options) {
215725
- const root = pathService.resolve(options.root);
215726
- const maxFileSize = options.maxFileSize ?? "1M";
215727
- return yield* pipe$1(spawner.streamLines(make$45("rg", [
215728
- "--files",
215729
- "--hidden",
215730
- "--max-filesize",
215731
- maxFileSize,
215732
- "--glob",
215733
- "!.git"
215734
- ], {
215735
- cwd: root,
215736
- stdin: "ignore"
215737
- })), runCollect, map$9(fromIterable$5), map$9((entries) => entries.map((entry) => normalizePath$1(entry.trim())).filter((entry) => entry.length > 0 && isMeaningfulFile(entry)).sort((left, right) => left.localeCompare(right))), orDie$2);
215738
- });
215739
- const chunkFile = fn("CodeChunker.chunkFile")(function* (options) {
215740
- const root = pathService.resolve(options.root);
215741
- const absolutePath = pathService.resolve(root, options.path);
215742
- const path = normalizePath$1(pathService.relative(root, absolutePath));
215743
- if (path.length === 0 || path === ".." || path.startsWith("../") || !isMeaningfulFile(path)) return [];
215744
- return yield* pipe$1(fs.readFileString(absolutePath), map$9((content) => chunkFileContent(path, content, options)), catch_$2(() => succeed$3([])));
215745
- });
215746
- const chunkFiles = (options) => fromArray(options.paths).pipe(flatMap$2((path) => pipe$1(chunkFile({
215747
- root: options.root,
215748
- path,
215749
- chunkSize: options.chunkSize,
215750
- chunkOverlap: options.chunkOverlap,
215751
- ...options.chunkMaxCharacters === void 0 ? {} : { chunkMaxCharacters: options.chunkMaxCharacters }
215752
- }), fromArrayEffect), { concurrency: 5 }));
215753
- const chunkCodebase = fnUntraced(function* (options) {
215754
- const root = pathService.resolve(options.root);
215755
- return chunkFiles({
215756
- root,
215757
- paths: yield* listFiles({
215758
- root,
215759
- ...options.maxFileSize === void 0 ? {} : { maxFileSize: options.maxFileSize }
215760
- }),
215761
- chunkSize: options.chunkSize,
215762
- chunkOverlap: options.chunkOverlap,
215763
- ...options.chunkMaxCharacters === void 0 ? {} : { chunkMaxCharacters: options.chunkMaxCharacters }
215764
- });
215765
- }, unwrap);
215766
- return CodeChunker.of({
215767
- listFiles,
215768
- chunkFile,
215769
- chunkFiles,
215770
- chunkCodebase
215771
- });
215772
- }));
215773
- //#endregion
215774
- //#region node_modules/.pnpm/@effect+sql-sqlite-node@4.0.0-beta.36_effect@4.0.0-beta.36/node_modules/@effect/sql-sqlite-node/dist/SqliteClient.js
215775
- /**
215776
- * @since 1.0.0
215777
- */
215778
- const ATTR_DB_SYSTEM_NAME = "db.system.name";
215779
- /**
215780
- * @category type ids
215781
- * @since 1.0.0
215782
- */
215783
- const TypeId$2 = "~@effect/sql-sqlite-node/SqliteClient";
215784
- /**
215785
- * @category tags
215786
- * @since 1.0.0
215787
- */
215788
- const SqliteClient = /* @__PURE__ */ Service$1("@effect/sql-sqlite-node/SqliteClient");
215789
- /**
215790
- * @category constructor
215791
- * @since 1.0.0
215792
- */
215793
- const make$10 = (options) => gen(function* () {
215794
- const compiler = makeCompilerSqlite(options.transformQueryNames);
215795
- const transformRows = options.transformResultNames ? defaultTransforms(options.transformResultNames).array : void 0;
215796
- const makeConnection = gen(function* () {
215797
- const scope$3 = yield* scope;
215798
- const db = new Sqlite(options.filename, { readonly: options.readonly ?? false });
215799
- yield* addFinalizer$1(scope$3, sync(() => db.close()));
215800
- if (options.disableWAL !== true) db.pragma("journal_mode = WAL");
215801
- const prepareCache = yield* make$57({
215802
- capacity: options.prepareCacheSize ?? 200,
215803
- timeToLive: options.prepareCacheTTL ?? minutes(10),
215804
- lookup: (sql) => try_({
215805
- try: () => db.prepare(sql),
215806
- catch: (cause) => new SqlError({
215807
- cause,
215808
- message: "Failed to prepare statement "
215809
- })
215810
- })
215811
- });
215812
- const runStatement = (statement, params, raw) => withFiber((fiber) => {
215813
- if (get$15(fiber.services, SafeIntegers)) statement.safeIntegers(true);
215814
- try {
215815
- if (statement.reader) return succeed$3(statement.all(...params));
215816
- const result = statement.run(...params);
215817
- return succeed$3(raw ? result : []);
215818
- } catch (cause) {
215819
- return fail$6(new SqlError({
215820
- cause,
215821
- message: "Failed to execute statement"
215822
- }));
215823
- }
215824
- });
215825
- const run = (sql, params, raw = false) => flatMap$4(get$7(prepareCache, sql), (s) => runStatement(s, params, raw));
215826
- const runValues = (sql, params) => acquireUseRelease(get$7(prepareCache, sql), (statement) => try_({
215827
- try: () => {
215828
- if (statement.reader) {
215829
- statement.raw(true);
215830
- return statement.all(...params);
215831
- }
215832
- statement.run(...params);
215833
- return [];
215834
- },
215835
- catch: (cause) => new SqlError({
215836
- cause,
215837
- message: "Failed to execute statement"
215838
- })
215839
- }), (statement) => sync(() => statement.reader && statement.raw(false)));
215840
- return identity({
215841
- execute(sql, params, transformRows) {
215842
- return transformRows ? map$9(run(sql, params), transformRows) : run(sql, params);
215843
- },
215844
- executeRaw(sql, params) {
215845
- return run(sql, params, true);
215846
- },
215847
- executeValues(sql, params) {
215848
- return runValues(sql, params);
215849
- },
215850
- executeUnprepared(sql, params, transformRows) {
215851
- const effect = runStatement(db.prepare(sql), params ?? [], false);
215852
- return transformRows ? map$9(effect, transformRows) : effect;
215853
- },
215854
- executeStream(_sql, _params) {
215855
- return die("executeStream not implemented");
215856
- },
215857
- export: try_({
215858
- try: () => db.serialize(),
215859
- catch: (cause) => new SqlError({
215860
- cause,
215861
- message: "Failed to export database"
215862
- })
215863
- }),
215864
- backup(destination) {
215865
- return tryPromise({
215866
- try: () => db.backup(destination),
215867
- catch: (cause) => new SqlError({
215868
- cause,
215869
- message: "Failed to backup database"
215870
- })
215871
- });
215872
- },
215873
- loadExtension(path) {
215874
- return try_({
215875
- try: () => db.loadExtension(path),
215876
- catch: (cause) => new SqlError({
215877
- cause,
215878
- message: "Failed to load extension"
215879
- })
215880
- });
215881
- }
215882
- });
215883
- });
215884
- const semaphore = yield* make$72(1);
215885
- const connection = yield* makeConnection;
215886
- const acquirer = semaphore.withPermits(1)(succeed$3(connection));
215887
- const transactionAcquirer = uninterruptibleMask((restore) => {
215888
- const scope = getUnsafe$4(getCurrent().services, Scope);
215889
- return as$1(tap$1(restore(semaphore.take(1)), () => addFinalizer$1(scope, semaphore.release(1))), connection);
215890
- });
215891
- return Object.assign(yield* make$49({
215892
- acquirer,
215893
- compiler,
215894
- transactionAcquirer,
215895
- spanAttributes: [...options.spanAttributes ? Object.entries(options.spanAttributes) : [], [ATTR_DB_SYSTEM_NAME, "sqlite"]],
215896
- transformRows
215897
- }), {
215898
- [TypeId$2]: TypeId$2,
215899
- config: options,
215900
- export: flatMap$4(acquirer, (_) => _.export),
215901
- backup: (destination) => flatMap$4(acquirer, (_) => _.backup(destination)),
215902
- loadExtension: (path) => flatMap$4(acquirer, (_) => _.loadExtension(path))
215903
- });
215904
- });
215905
- /**
215906
- * @category layers
215907
- * @since 1.0.0
215908
- */
215909
- const layer$8 = (config) => effectServices(map$9(make$10(config), (client) => make$87(SqliteClient, client).pipe(add$3(SqlClient, client)))).pipe(provide$3(layer$32));
215910
- //#endregion
215911
- //#region node_modules/.pnpm/@effect+sql-sqlite-node@4.0.0-beta.36_effect@4.0.0-beta.36/node_modules/@effect/sql-sqlite-node/dist/SqliteMigrator.js
215912
- /**
215913
- * @category constructor
215914
- * @since 1.0.0
215915
- */
215916
- const run$1 = /* @__PURE__ */ make$25({});
215917
- /**
215918
- * @category constructor
215919
- * @since 1.0.0
215920
- */
215921
- const layer$7 = (options) => effectDiscard(run$1(options));
215922
- //#endregion
215923
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/internal/sqlite-vector.js
215924
- /**
215925
- * Binary extension for each platform
215926
- */
215927
- const PLATFORM_EXTENSIONS = {
215928
- darwin: ".dylib",
215929
- linux: ".so",
215930
- win32: ".dll"
215931
- };
215932
- /**
215933
- * Detects if the system uses musl libc (Alpine Linux, etc.)
215934
- * Uses multiple detection strategies for reliability
215935
- */
215936
- function isMusl() {
215937
- if (platform() !== "linux") return false;
215938
- for (const file of [
215939
- "/lib/ld-musl-x86_64.so.1",
215940
- "/lib/ld-musl-aarch64.so.1",
215941
- "/lib/ld-musl-armhf.so.1"
215942
- ]) if (existsSync(file)) return true;
215943
- try {
215944
- if (execSync("ldd --version 2>&1", {
215945
- encoding: "utf-8",
215946
- stdio: [
215947
- "pipe",
215948
- "pipe",
215949
- "pipe"
215950
- ]
215951
- }).includes("musl")) return true;
215952
- } catch {}
215953
- try {
215954
- if (existsSync("/etc/os-release")) {
215955
- const osRelease = readFileSync("/etc/os-release", "utf-8");
215956
- if (osRelease.includes("Alpine") || osRelease.includes("musl")) return true;
215957
- }
215958
- } catch {}
215959
- try {
215960
- if ((process.report?.getReport?.())?.header?.glibcVersionRuntime === "") return true;
215961
- } catch {}
215962
- return false;
215963
- }
215964
- /**
215965
- * Gets the current platform identifier
215966
- * @throws {Error} If the platform is unsupported
215967
- */
215968
- function getCurrentPlatform() {
215969
- const platformName = platform();
215970
- const archName = arch();
215971
- if (platformName === "darwin") {
215972
- if (archName === "arm64") return "darwin-arm64";
215973
- if (archName === "x64" || archName === "ia32") return "darwin-x86_64";
215974
- }
215975
- if (platformName === "linux") {
215976
- const muslSuffix = isMusl() ? "-musl" : "";
215977
- if (archName === "arm64") return `linux-arm64${muslSuffix}`;
215978
- if (archName === "x64" || archName === "ia32") return `linux-x86_64${muslSuffix}`;
215979
- }
215980
- if (platformName === "win32") {
215981
- if (archName === "x64" || archName === "ia32") return "win32-x86_64";
215982
- }
215983
- throw new Error(`Unsupported platform: ${platformName}-${archName}. Supported platforms: darwin-arm64, darwin-x86_64, linux-arm64, linux-x86_64, win32-x86_64 (with glibc or musl support for Linux)`);
215984
- }
215985
- /**
215986
- * Gets the package name for the current platform
215987
- */
215988
- function getPlatformPackageName() {
215989
- return `@sqliteai/sqlite-vector-${getCurrentPlatform()}`;
215990
- }
215991
- /**
215992
- * Gets the binary filename for the current platform
215993
- */
215994
- function getBinaryName() {
215995
- const platformName = platform();
215996
- const extension = PLATFORM_EXTENSIONS[platformName];
215997
- if (!extension) throw new Error(`Unknown platform: ${platformName}`);
215998
- return `vector${extension}`;
215999
- }
216000
- /**
216001
- * Error thrown when the SQLite Vector extension cannot be found
216002
- */
216003
- var ExtensionNotFoundError = class extends Error {
216004
- constructor(message) {
216005
- super(message);
216006
- this.name = "ExtensionNotFoundError";
216007
- }
216008
- };
216009
- /**
216010
- * Attempts to load the platform-specific package
216011
- * @returns The path to the extension binary, or null if not found
216012
- */
216013
- function tryLoadPlatformPackage() {
216014
- try {
216015
- const packageName = getPlatformPackageName();
216016
- return import.meta.resolve(packageName).replace(/\/index\.js$/, `/${getBinaryName()}`).replace("file://", "");
216017
- } catch {}
216018
- return null;
216019
- }
216020
- /**
216021
- * Gets the absolute path to the SQLite Vector extension binary for the current platform
216022
- *
216023
- * @returns Absolute path to the extension binary (.so, .dylib, or .dll)
216024
- * @throws {ExtensionNotFoundError} If the extension binary cannot be found
216025
- *
216026
- * @example
216027
- * ```typescript
216028
- * import { getExtensionPath } from '@sqliteai/sqlite-vector';
216029
- *
216030
- * const extensionPath = getExtensionPath();
216031
- * // On macOS ARM64: /path/to/node_modules/@sqliteai/sqlite-vector-darwin-arm64/vector.dylib
216032
- * ```
216033
- */
216034
- function getExtensionPath() {
216035
- const platformPath = tryLoadPlatformPackage();
216036
- if (platformPath) return resolve(platformPath);
216037
- throw new ExtensionNotFoundError(`SQLite Vector extension not found for platform: ${getCurrentPlatform()}\n\nThe platform-specific package "${getPlatformPackageName()}" is not installed.\nThis usually happens when:\n 1. Your platform is not supported\n 2. npm failed to install optional dependencies\n 3. You're installing with --no-optional flag\n\nTry running: npm install --force`);
216038
- }
216039
- //#endregion
216040
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/Sqlite.js
216041
- /**
216042
- * @since 1.0.0
216043
- */
216044
- /**
216045
- * @since 1.0.0
216046
- * @category Layers
216047
- */
216048
- const SqliteLayer = (database) => layer$7({ loader: fromRecord({ "0001_create_chunks": gen(function* () {
216049
- const sql = yield* SqlClient;
216050
- yield* sql`CREATE TABLE IF NOT EXISTS chunks (
216051
- id INTEGER PRIMARY KEY AUTOINCREMENT,
216052
- path TEXT NOT NULL,
216053
- content TEXT NOT NULL,
216054
- hash TEXT NOT NULL,
216055
- vector BLOB NOT NULL,
216056
- syncId TEXT NOT NULL
216057
- )`;
216058
- yield* sql`CREATE INDEX IF NOT EXISTS idx_chunks_hash ON chunks (hash)`;
216059
- }) }) }).pipe(provide$3(effectDiscard(gen(function* () {
216060
- yield* (yield* SqliteClient).loadExtension(getExtensionPath());
216061
- }))), provideMerge(layer$8({ filename: database })), provide$3(effectDiscard(gen(function* () {
216062
- const fs = yield* FileSystem;
216063
- const directory = (yield* Path$1).dirname(database);
216064
- if (directory === ".") return;
216065
- yield* fs.makeDirectory(directory, { recursive: true });
216066
- }))));
216067
- //#endregion
216068
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/SemanticSearch.js
214693
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/SemanticSearch/Service.js
216069
214694
  /**
216070
214695
  * @since 1.0.0
216071
214696
  */
@@ -216074,124 +214699,6 @@ const SqliteLayer = (database) => layer$7({ loader: fromRecord({ "0001_create_ch
216074
214699
  * @category Services
216075
214700
  */
216076
214701
  var SemanticSearch = class extends Service$1()("clanka/SemanticSearch/SemanticSearch") {};
216077
- const normalizePath = (path) => path.replace(/\\/g, "/");
216078
- const resolveChunkConfig = (options) => ({
216079
- chunkSize: 30,
216080
- chunkOverlap: 0,
216081
- chunkMaxCharacters: options.chunkMaxCharacters ?? 1e4
216082
- });
216083
- const makeEmbeddingResolver = (resolver, options) => resolver.pipe(setDelay(options.embeddingRequestDelay ?? millis(50)), batchN(options.embeddingBatchSize ?? 500));
216084
- const chunkEmbeddingInput = (chunk) => {
216085
- const headerLines = ["---", "file: " + chunk.path];
216086
- if (chunk.name !== void 0) headerLines.push("name: " + chunk.name);
216087
- if (chunk.type !== void 0) headerLines.push("type: " + chunk.type);
216088
- if (chunk.parent !== void 0) headerLines.push("parent: " + chunk.parent);
216089
- headerLines.push("---");
216090
- const contentLines = chunk.content.split("\n");
216091
- let contentWithLines = "";
216092
- for (let i = 0; i < contentLines.length; i++) {
216093
- if (i > 0) contentWithLines += "\n";
216094
- contentWithLines += `${chunk.startLine + i}: ${contentLines[i]}`;
216095
- }
216096
- return headerLines.join("\n") + "\n\n" + contentWithLines;
216097
- };
216098
- const hashChunkInput = (input) => createHash("sha256").update(input).digest("hex");
216099
- /**
216100
- * @since 1.0.0
216101
- * @category Layers
216102
- */
216103
- const layer$6 = (options) => effect$1(SemanticSearch, gen(function* () {
216104
- const chunker = yield* CodeChunker;
216105
- const repo = yield* ChunkRepo;
216106
- const embeddings = yield* EmbeddingModel;
216107
- const pathService = yield* Path$1;
216108
- const root = pathService.resolve(options.directory);
216109
- const resolver = makeEmbeddingResolver(embeddings.resolver, options);
216110
- const concurrency = options.concurrency ?? 2e3;
216111
- const chunkConfig = resolveChunkConfig(options);
216112
- const indexHandle = yield* make$56();
216113
- const console = yield* Console$1;
216114
- const resolveIndexedPath = (path) => {
216115
- const absolutePath = pathService.resolve(root, path);
216116
- const relativePath = normalizePath(pathService.relative(root, absolutePath));
216117
- if (relativePath.length === 0 || relativePath === ".." || relativePath.startsWith("../")) return none$4();
216118
- return some$2(relativePath);
216119
- };
216120
- const processChunk = fnUntraced(function* (options) {
216121
- const input = chunkEmbeddingInput(options.chunk);
216122
- const hash = hashChunkInput(input);
216123
- if (options.checkExisting) {
216124
- const id = yield* repo.exists(hash);
216125
- if (isSome(id)) {
216126
- yield* repo.setSyncId(id.value, options.syncId);
216127
- return;
216128
- }
216129
- }
216130
- const result = yield* request$2(new EmbeddingRequest({ input }), resolver);
216131
- const vector = new Float32Array(result.vector);
216132
- yield* repo.insert(Chunk.insert.makeUnsafe({
216133
- path: options.chunk.path,
216134
- hash,
216135
- content: input,
216136
- vector,
216137
- syncId: options.syncId
216138
- }));
216139
- }, ignore$1({
216140
- log: "Warn",
216141
- message: "Failed to process chunk for embedding"
216142
- }), (effect, options) => annotateLogs(effect, { chunk: `${options.chunk.path}/${options.chunk.startLine}` }));
216143
- const runIndex = run$4(indexHandle, gen(function* () {
216144
- const syncId = SyncId.makeUnsafe(crypto.randomUUID());
216145
- yield* logInfo("Starting SemanticSearch index");
216146
- yield* pipe$1(chunker.chunkCodebase({
216147
- root,
216148
- ...chunkConfig
216149
- }), tap((chunk) => processChunk({
216150
- chunk,
216151
- syncId,
216152
- checkExisting: true
216153
- }), { concurrency }), runDrain);
216154
- yield* repo.deleteForSyncId(syncId);
216155
- yield* logInfo("Finished SemanticSearch index");
216156
- }).pipe(withSpan$1("SemanticSearch.index"), withLogSpan("SemanticSearch.index"), provideService$2(Console$1, console)), { onlyIfMissing: true });
216157
- const initialIndex = yield* runIndex;
216158
- yield* runIndex.pipe(delay(minutes(3)), forever, forkScoped);
216159
- return SemanticSearch.of({
216160
- search: fn("SemanticSearch.search")(function* (options) {
216161
- yield* join$2(initialIndex);
216162
- yield* annotateCurrentSpan(options);
216163
- const { vector } = yield* embeddings.embed(options.query);
216164
- return (yield* repo.search({
216165
- vector: new Float32Array(vector),
216166
- limit: options.limit
216167
- })).map((r) => r.content).join("\n\n");
216168
- }, orDie$2),
216169
- updateFile: fn("SemanticSearch.updateFile")(function* (path) {
216170
- yield* join$2(initialIndex);
216171
- const indexedPath = resolveIndexedPath(path);
216172
- if (isNone(indexedPath)) return;
216173
- yield* repo.deleteByPath(indexedPath.value);
216174
- const chunks = yield* chunker.chunkFile({
216175
- root,
216176
- path: indexedPath.value,
216177
- ...chunkConfig
216178
- });
216179
- if (chunks.length === 0) return;
216180
- const syncId = SyncId.makeUnsafe(crypto.randomUUID());
216181
- yield* pipe$1(fromArray(chunks), tap((chunk) => processChunk({
216182
- chunk,
216183
- syncId,
216184
- checkExisting: false
216185
- }), { concurrency }), runDrain);
216186
- }, orDie$2),
216187
- removeFile: fn("SemanticSearch.removeFile")(function* (path) {
216188
- yield* join$2(initialIndex);
216189
- const indexedPath = resolveIndexedPath(path);
216190
- if (isNone(indexedPath)) return;
216191
- yield* repo.deleteByPath(indexedPath.value);
216192
- }, orDie$2)
216193
- });
216194
- })).pipe(provide$3([layer$9, layer$10.pipe(provide$3(SqliteLayer(options.database ?? ".clanka/search.sqlite")))]));
216195
214702
  /**
216196
214703
  * @since 1.0.0
216197
214704
  * @category Utils
@@ -216209,7 +214716,7 @@ const maybeRemoveFile = (path) => serviceOption(SemanticSearch).pipe(flatMap$4(m
216209
214716
  onSome: (service) => service.removeFile(path)
216210
214717
  })));
216211
214718
  //#endregion
216212
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/AgentTools.js
214719
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/AgentTools.js
216213
214720
  /**
216214
214721
  * @since 1.0.0
216215
214722
  */
@@ -216560,7 +215067,7 @@ const AgentToolHandlers = AgentToolHandlersNoDeps.pipe(provide$3([layer$12, laye
216560
215067
  AgentToolHandlersNoDeps.pipe(provide$3([mock(ExaSearch)({}), mock(WebToMarkdown)({})]));
216561
215068
  var ApplyPatchError = class extends TaggedClass$2("ApplyPatchError") {};
216562
215069
  //#endregion
216563
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/TypeBuilder.js
215070
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/TypeBuilder.js
216564
215071
  const resolveDocumentation = resolveAt("documentation");
216565
215072
  const identifierPattern = /^[$A-Z_a-z][$0-9A-Z_a-z]*$/u;
216566
215073
  const Precedence = {
@@ -216833,7 +215340,7 @@ const render = (schema, options) => {
216833
215340
  return printNode({ text: documentation === void 0 ? rendered.text : `${renderJsDoc(documentation, 0, printerOptions)}${printerOptions.newLine}${rendered.text}` }, printerOptions);
216834
215341
  };
216835
215342
  //#endregion
216836
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/ToolkitRenderer.js
215343
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/ToolkitRenderer.js
216837
215344
  /**
216838
215345
  * @since 1.0.0
216839
215346
  */
@@ -216855,7 +215362,7 @@ declare function ${name}(${params}): Promise<${render(tool.successSchema)}>`);
216855
215362
  }) });
216856
215363
  };
216857
215364
  //#endregion
216858
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/ScriptPreprocessing.js
215365
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/ScriptPreprocessing.js
216859
215366
  const isIdentifierChar = (char) => char !== void 0 && /[A-Za-z0-9_$]/.test(char);
216860
215367
  const isIdentifierStartChar = (char) => char !== void 0 && /[A-Za-z_$]/.test(char);
216861
215368
  const hasIdentifierBoundary = (text, index, length) => !isIdentifierChar(text[index - 1]) && !isIdentifierChar(text[index + length]);
@@ -217304,7 +215811,7 @@ const fixAssignedTemplatesForToolCalls = (script) => {
217304
215811
  };
217305
215812
  const preprocessScript = (script) => fixAssignedTemplatesForToolCalls(["applyPatch", "taskComplete"].reduce((current, functionName) => fixCallTemplateArgument(current, functionName), fixTargetCallObjectPropertyTemplates(script)));
217306
215813
  //#endregion
217307
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/AgentExecutor.js
215814
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/AgentExecutor.js
217308
215815
  /**
217309
215816
  * @since 1.0.0
217310
215817
  */
@@ -217489,7 +215996,7 @@ var QueueWriteStream = class extends Writable {
217489
215996
  }
217490
215997
  };
217491
215998
  //#endregion
217492
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/ScriptExtraction.js
215999
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/ScriptExtraction.js
217493
216000
  const stripWrappingCodeFence = (script) => {
217494
216001
  const lines = script.split(/\r?\n/);
217495
216002
  if (lines.length < 2) return script;
@@ -217789,14 +216296,14 @@ const Message$1 = /* @__PURE__ */ Union$2([
217789
216296
  content: Array$1(Union$2([ToolResultPart, ToolApprovalResponsePart]))
217790
216297
  }).annotate({ identifier: "ToolMessage" })
217791
216298
  ]);
217792
- const TypeId$1 = "~effect/unstable/ai/Prompt";
216299
+ const TypeId$4 = "~effect/unstable/ai/Prompt";
217793
216300
  /**
217794
216301
  * Type guard to check if a value is a Prompt.
217795
216302
  *
217796
216303
  * @since 4.0.0
217797
216304
  * @category guards
217798
216305
  */
217799
- const isPrompt = (u) => hasProperty(u, TypeId$1);
216306
+ const isPrompt = (u) => hasProperty(u, TypeId$4);
217800
216307
  const $Prompt = /* @__PURE__ */ declare((u) => isPrompt(u), { identifier: "Prompt" });
217801
216308
  /**
217802
216309
  * Describes a schema that represents a `Prompt` instance.
@@ -217815,7 +216322,7 @@ const Prompt$1 = /* @__PURE__ */ Struct$2({ content: Array$1(toEncoded(Message$1
217815
216322
  })
217816
216323
  })));
217817
216324
  const Proto = {
217818
- [TypeId$1]: TypeId$1,
216325
+ [TypeId$4]: TypeId$4,
217819
216326
  pipe() {
217820
216327
  return pipeArguments(this, arguments);
217821
216328
  }
@@ -217864,7 +216371,7 @@ const empty = /* @__PURE__ */ makePrompt([]);
217864
216371
  * @since 4.0.0
217865
216372
  * @category constructors
217866
216373
  */
217867
- const make$9 = (input) => {
216374
+ const make$12 = (input) => {
217868
216375
  if (typeof input === "string") return makePrompt([makeMessage("user", { content: [makePart("text", { text: input })] })]);
217869
216376
  if (isIterable(input)) return makePrompt(decodeMessagesSync(fromIterable$5(input), { errors: "all" }));
217870
216377
  return input;
@@ -218013,7 +216520,7 @@ const fromResponseParts = (parts) => {
218013
216520
  * @category combinators
218014
216521
  */
218015
216522
  const concat = /* @__PURE__ */ dual(2, (self, input) => {
218016
- const other = make$9(input);
216523
+ const other = make$12(input);
218017
216524
  if (self.content.length === 0) return other;
218018
216525
  if (other.content.length === 0) return self;
218019
216526
  return fromMessages([...self.content, ...other.content]);
@@ -218147,7 +216654,7 @@ var ResponseIdTracker = class extends Service$1()("effect/ai/ResponseIdTracker")
218147
216654
  * @since 4.0.0
218148
216655
  * @category constructors
218149
216656
  */
218150
- const make$8 = /* @__PURE__ */ sync(() => {
216657
+ const make$11 = /* @__PURE__ */ sync(() => {
218151
216658
  const sentParts = /* @__PURE__ */ new Map();
218152
216659
  const none = () => {
218153
216660
  sentParts.clear();
@@ -218484,7 +216991,7 @@ var GenerateObjectResponse = class extends GenerateTextResponse {
218484
216991
  * @since 4.0.0
218485
216992
  * @category constructors
218486
216993
  */
218487
- const make$7 = /* @__PURE__ */ fnUntraced(function* (params) {
216994
+ const make$10 = /* @__PURE__ */ fnUntraced(function* (params) {
218488
216995
  const codecTransformer = params.codecTransformer ?? defaultCodecTransformer;
218489
216996
  const parentSpanTransformer = yield* serviceOption(CurrentSpanTransformer);
218490
216997
  const getSpanTransformer = serviceOption(CurrentSpanTransformer).pipe(map$9(orElse$1(() => parentSpanTransformer)));
@@ -218495,7 +217002,7 @@ const make$7 = /* @__PURE__ */ fnUntraced(function* (params) {
218495
217002
  } }, fnUntraced(function* (span) {
218496
217003
  const spanTransformer = yield* getSpanTransformer;
218497
217004
  const providerOptions = {
218498
- prompt: make$9(options.prompt),
217005
+ prompt: make$12(options.prompt),
218499
217006
  tools: [],
218500
217007
  toolChoice: "none",
218501
217008
  responseFormat: { type: "text" },
@@ -218520,7 +217027,7 @@ const make$7 = /* @__PURE__ */ fnUntraced(function* (params) {
218520
217027
  } }, fnUntraced(function* (span) {
218521
217028
  const spanTransformer = yield* getSpanTransformer;
218522
217029
  const providerOptions = {
218523
- prompt: make$9(options.prompt),
217030
+ prompt: make$12(options.prompt),
218524
217031
  tools: [],
218525
217032
  toolChoice: "none",
218526
217033
  responseFormat: {
@@ -218555,7 +217062,7 @@ const make$7 = /* @__PURE__ */ fnUntraced(function* (params) {
218555
217062
  toolChoice: options.toolChoice
218556
217063
  } });
218557
217064
  const providerOptions = {
218558
- prompt: make$9(options.prompt),
217065
+ prompt: make$12(options.prompt),
218559
217066
  tools: [],
218560
217067
  toolChoice: "none",
218561
217068
  responseFormat: { type: "text" },
@@ -219038,7 +217545,7 @@ const applySpanTransformer = (transformer, response, options) => {
219038
217545
  });
219039
217546
  };
219040
217547
  //#endregion
219041
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/Agent.js
217548
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/Agent.js
219042
217549
  /**
219043
217550
  * @since 1.0.0
219044
217551
  */
@@ -219046,7 +217553,7 @@ const applySpanTransformer = (transformer, response, options) => {
219046
217553
  * @since 1.0.0
219047
217554
  * @category Models
219048
217555
  */
219049
- const TypeId = "~clanka/Agent";
217556
+ const TypeId$3 = "~clanka/Agent";
219050
217557
  /**
219051
217558
  * @since 1.0.0
219052
217559
  * @category Service
@@ -219056,7 +217563,7 @@ const Agent = Service$1("clanka/Agent");
219056
217563
  * @since 1.0.0
219057
217564
  * @category Constructors
219058
217565
  */
219059
- const make$6 = gen(function* () {
217566
+ const make$9 = gen(function* () {
219060
217567
  const executor = yield* AgentExecutor;
219061
217568
  const singleTool = yield* SingleTools.asEffect().pipe(provide$1(SingleToolHandlers));
219062
217569
  const capabilities = yield* executor.capabilities;
@@ -219120,7 +217627,7 @@ ${content}
219120
217627
  let id = agentCounter++;
219121
217628
  const stream = spawn({
219122
217629
  agentId: id,
219123
- prompt: make$9(prompt),
217630
+ prompt: make$12(prompt),
219124
217631
  system: opts.system,
219125
217632
  disableHistory: true
219126
217633
  });
@@ -219292,10 +217799,10 @@ ${content}
219292
217799
  }, unwrap);
219293
217800
  const sendLock = makeUnsafe$9(1);
219294
217801
  return Agent.of({
219295
- [TypeId]: TypeId,
217802
+ [TypeId$3]: TypeId$3,
219296
217803
  send: (options) => spawn({
219297
217804
  agentId: agentCounter++,
219298
- prompt: make$9(options.prompt),
217805
+ prompt: make$12(options.prompt),
219299
217806
  system: options.system
219300
217807
  }).pipe(broadcast({
219301
217808
  capacity: "unbounded",
@@ -219381,14 +217888,14 @@ const SingleToolHandlers = SingleTools.toLayer({ execute: fnUntraced(function* (
219381
217888
  * @since 1.0.0
219382
217889
  * @category Layers
219383
217890
  */
219384
- const layer$5 = effect$1(Agent, make$6);
217891
+ const layer$10 = effect$1(Agent, make$9);
219385
217892
  /**
219386
217893
  * Create an Agent layer that uses a local AgentExecutor.
219387
217894
  *
219388
217895
  * @since 1.0.0
219389
217896
  * @category Layers
219390
217897
  */
219391
- const layerLocal = (options) => layer$5.pipe(provide$3(layerLocal$1(options)));
217898
+ const layerLocal = (options) => layer$10.pipe(provide$3(layerLocal$1(options)));
219392
217899
  /**
219393
217900
  * @since 1.0.0
219394
217901
  * @category Subagent model
@@ -226473,7 +224980,7 @@ const ListThreadItemsMethod200 = ThreadItemListResource;
226473
224980
  const GetThreadMethod200 = ThreadResource;
226474
224981
  const DeleteThreadMethod200 = DeletedThreadResource;
226475
224982
  const ListThreadsMethod200 = ThreadListResource;
226476
- const make$5 = (httpClient, options = {}) => {
224983
+ const make$8 = (httpClient, options = {}) => {
226477
224984
  const unexpectedStatus = (response) => flatMap$4(orElseSucceed(response.json, () => "Unexpected status code"), (description) => fail$6(new HttpClientError({ reason: new StatusCodeError({
226478
224985
  request: response.request,
226479
224986
  response,
@@ -228031,12 +226538,12 @@ const RedactedOpenAiHeaders$1 = {
228031
226538
  * @since 1.0.0
228032
226539
  * @category constructors
228033
226540
  */
228034
- const make$4 = /* @__PURE__ */ fnUntraced(function* (options) {
226541
+ const make$7 = /* @__PURE__ */ fnUntraced(function* (options) {
228035
226542
  const baseClient = yield* HttpClient;
228036
226543
  const apiUrl = options.apiUrl ?? "https://api.openai.com/v1";
228037
226544
  const httpClient = baseClient.pipe(mapRequest(flow(prependUrl(apiUrl), options.apiKey ? bearerToken(value$2(options.apiKey)) : identity, options.organizationId ? setHeader(RedactedOpenAiHeaders$1.OpenAiOrganization, value$2(options.organizationId)) : identity, options.projectId ? setHeader(RedactedOpenAiHeaders$1.OpenAiProject, value$2(options.projectId)) : identity, acceptJson)), options.transformClient ? options.transformClient : identity);
228038
226545
  const httpClientOk = filterStatusOk(httpClient);
228039
- const client = make$5(httpClient, { transformClient: fnUntraced(function* (client) {
226546
+ const client = make$8(httpClient, { transformClient: fnUntraced(function* (client) {
228040
226547
  const config = yield* OpenAiConfig$1.getOrUndefined;
228041
226548
  if (isNotUndefined(config?.transformClient)) return config.transformClient(client);
228042
226549
  return client;
@@ -228080,7 +226587,7 @@ const make$4 = /* @__PURE__ */ fnUntraced(function* (options) {
228080
226587
  * @since 1.0.0
228081
226588
  * @category layers
228082
226589
  */
228083
- const layer$4 = (options) => effect$1(OpenAiClient$1, make$4(options));
226590
+ const layer$9 = (options) => effect$1(OpenAiClient$1, make$7(options));
228084
226591
  /**
228085
226592
  * @since 1.0.0
228086
226593
  * @category Websocket mode
@@ -228088,7 +226595,7 @@ const layer$4 = (options) => effect$1(OpenAiClient$1, make$4(options));
228088
226595
  var OpenAiSocket = class extends Service$1()("@effect/ai-openai/OpenAiClient/OpenAiSocket") {};
228089
226596
  const makeSocket = /* @__PURE__ */ gen(function* () {
228090
226597
  const client = yield* OpenAiClient$1;
228091
- const tracker = yield* make$8;
226598
+ const tracker = yield* make$11;
228092
226599
  const request = yield* orDie$2(client.client.httpClient.preprocess(post("/responses")));
228093
226600
  const socket = yield* makeWebSocket(request.url.replace(/^http/, "ws")).pipe(updateService(WebSocketConstructor, (f) => (url) => f(url, { headers: request.headers })));
228094
226601
  const queueRef = yield* make$68({
@@ -228222,6 +226729,105 @@ const decodeEvent = /* @__PURE__ */ decodeUnknownSync(/* @__PURE__ */ fromJsonSt
228222
226729
  */
228223
226730
  const layerWebSocketMode = /* @__PURE__ */ effectServices(makeSocket);
228224
226731
  //#endregion
226732
+ //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/ai/EmbeddingModel.js
226733
+ /**
226734
+ * The `EmbeddingModel` module provides provider-agnostic text embedding capabilities.
226735
+ *
226736
+ * @example
226737
+ * ```ts
226738
+ * import { Effect } from "effect"
226739
+ * import { EmbeddingModel } from "effect/unstable/ai"
226740
+ *
226741
+ * const program = Effect.gen(function*() {
226742
+ * const model = yield* EmbeddingModel.EmbeddingModel
226743
+ * return yield* model.embed("hello world")
226744
+ * })
226745
+ * ```
226746
+ *
226747
+ * @since 4.0.0
226748
+ */
226749
+ /**
226750
+ * Service tag for embedding model operations.
226751
+ *
226752
+ * @since 4.0.0
226753
+ * @category services
226754
+ */
226755
+ var EmbeddingModel = class extends Service$1()("effect/unstable/ai/EmbeddingModel") {};
226756
+ /**
226757
+ * Service tag that provides the current embedding dimensions.
226758
+ *
226759
+ * @since 4.0.0
226760
+ * @category services
226761
+ */
226762
+ var Dimensions = class extends Service$1()("effect/unstable/ai/EmbeddingModel/Dimensions") {};
226763
+ /**
226764
+ * Token usage metadata for embedding operations.
226765
+ *
226766
+ * @since 4.0.0
226767
+ * @category models
226768
+ */
226769
+ var EmbeddingUsage = class extends Class$2("effect/ai/EmbeddingModel/EmbeddingUsage")({ inputTokens: /* @__PURE__ */ UndefinedOr(Finite) }) {};
226770
+ /**
226771
+ * Response for a single embedding request.
226772
+ *
226773
+ * @since 4.0.0
226774
+ * @category models
226775
+ */
226776
+ var EmbedResponse = class extends Class$2("effect/ai/EmbeddingModel/EmbedResponse")({ vector: /* @__PURE__ */ Array$1(Finite) }) {};
226777
+ /**
226778
+ * Response for multiple embeddings.
226779
+ *
226780
+ * @since 4.0.0
226781
+ * @category models
226782
+ */
226783
+ var EmbedManyResponse = class extends Class$2("effect/ai/EmbeddingModel/EmbedManyResponse")({
226784
+ embeddings: /* @__PURE__ */ Array$1(EmbedResponse),
226785
+ usage: EmbeddingUsage
226786
+ }) {};
226787
+ /**
226788
+ * Tagged request used by request resolvers for embedding operations.
226789
+ *
226790
+ * @since 4.0.0
226791
+ * @category constructors
226792
+ */
226793
+ var EmbeddingRequest = class extends TaggedClass$1("EmbeddingRequest") {};
226794
+ const invalidProviderResponse = (description) => make$15({
226795
+ module: "EmbeddingModel",
226796
+ method: "embedMany",
226797
+ reason: new InvalidOutputError({ description })
226798
+ });
226799
+ /**
226800
+ * Creates an EmbeddingModel service from a provider embedMany implementation.
226801
+ *
226802
+ * @since 4.0.0
226803
+ * @category constructors
226804
+ */
226805
+ const make$6 = /* @__PURE__ */ fnUntraced(function* (params) {
226806
+ const resolver = make$47((entries) => flatMap$4(params.embedMany({ inputs: entries.map((entry) => entry.request.input) }), (response) => map$9(mapProviderResults(entries.length, response.results), (embeddings) => {
226807
+ for (let i = 0; i < entries.length; i++) entries[i].completeUnsafe(succeed$6(embeddings[i]));
226808
+ }))).pipe(withSpan("EmbeddingModel.resolver"));
226809
+ return EmbeddingModel.of({
226810
+ resolver,
226811
+ embed: (input) => request$2(new EmbeddingRequest({ input }), resolver).pipe(withSpan$1("EmbeddingModel.embed")),
226812
+ embedMany: (input) => (input.length === 0 ? succeed$3(new EmbedManyResponse({
226813
+ embeddings: [],
226814
+ usage: new EmbeddingUsage({ inputTokens: void 0 })
226815
+ })) : params.embedMany({ inputs: input }).pipe(flatMap$4((response) => mapProviderResults(input.length, response.results).pipe(map$9((embeddings) => new EmbedManyResponse({
226816
+ embeddings,
226817
+ usage: new EmbeddingUsage({ inputTokens: response.usage.inputTokens })
226818
+ })))))).pipe(withSpan$1("EmbeddingModel.embedMany"))
226819
+ });
226820
+ });
226821
+ const mapProviderResults = (inputLength, results) => {
226822
+ const embeddings = new Array(inputLength);
226823
+ if (results.length !== inputLength) return fail$6(invalidProviderResponse(`Provider returned ${results.length} embeddings but expected ${inputLength}`));
226824
+ for (let i = 0; i < results.length; i++) {
226825
+ const vector = results[i];
226826
+ embeddings[i] = new EmbedResponse({ vector });
226827
+ }
226828
+ return succeed$3(embeddings);
226829
+ };
226830
+ //#endregion
228225
226831
  //#region node_modules/.pnpm/@effect+ai-openai@4.0.0-beta.36_effect@4.0.0-beta.36/node_modules/@effect/ai-openai/dist/OpenAiEmbeddingModel.js
228226
226832
  /**
228227
226833
  * OpenAI Embedding Model implementation.
@@ -228241,7 +226847,7 @@ var Config$2 = class extends Service$1()("@effect/ai-openai/OpenAiEmbeddingModel
228241
226847
  * @since 1.0.0
228242
226848
  * @category constructors
228243
226849
  */
228244
- const model$1 = (model, options) => make$16("openai", model, merge$6(layer$3({
226850
+ const model$1 = (model, options) => make$16("openai", model, merge$6(layer$8({
228245
226851
  model,
228246
226852
  config: {
228247
226853
  ...options.config,
@@ -228254,7 +226860,7 @@ const model$1 = (model, options) => make$16("openai", model, merge$6(layer$3({
228254
226860
  * @since 1.0.0
228255
226861
  * @category constructors
228256
226862
  */
228257
- const make$3 = /* @__PURE__ */ fnUntraced(function* ({ model, config: providerConfig }) {
226863
+ const make$5 = /* @__PURE__ */ fnUntraced(function* ({ model, config: providerConfig }) {
228258
226864
  const client = yield* OpenAiClient$1;
228259
226865
  const makeConfig = gen(function* () {
228260
226866
  const services$5 = yield* services();
@@ -228264,7 +226870,7 @@ const make$3 = /* @__PURE__ */ fnUntraced(function* ({ model, config: providerCo
228264
226870
  ...services$5.mapUnsafe.get(Config$2.key)
228265
226871
  };
228266
226872
  });
228267
- return yield* make$11({ embedMany: fnUntraced(function* ({ inputs }) {
226873
+ return yield* make$6({ embedMany: fnUntraced(function* ({ inputs }) {
228268
226874
  const config = yield* makeConfig;
228269
226875
  const response = yield* client.createEmbedding({
228270
226876
  ...config,
@@ -228279,7 +226885,7 @@ const make$3 = /* @__PURE__ */ fnUntraced(function* ({ model, config: providerCo
228279
226885
  * @since 1.0.0
228280
226886
  * @category layers
228281
226887
  */
228282
- const layer$3 = (options) => effect$1(EmbeddingModel, make$3(options));
226888
+ const layer$8 = (options) => effect$1(EmbeddingModel, make$5(options));
228283
226889
  const mapProviderResponse = (inputLength, response) => {
228284
226890
  if (response.data.length !== inputLength) return fail$6(invalidOutput("Provider returned " + response.data.length + " embeddings but expected " + inputLength));
228285
226891
  const results = new Array(inputLength);
@@ -228678,7 +227284,7 @@ var Config$1 = class extends Service$1()("@effect/ai-openai/OpenAiLanguageModel/
228678
227284
  * @since 1.0.0
228679
227285
  * @category constructors
228680
227286
  */
228681
- const make$2 = /* @__PURE__ */ fnUntraced(function* ({ model, config: providerConfig }) {
227287
+ const make$4 = /* @__PURE__ */ fnUntraced(function* ({ model, config: providerConfig }) {
228682
227288
  const client = yield* OpenAiClient$1;
228683
227289
  const makeConfig = gen(function* () {
228684
227290
  const services$4 = yield* services();
@@ -228719,7 +227325,7 @@ const make$2 = /* @__PURE__ */ fnUntraced(function* ({ model, config: providerCo
228719
227325
  ...options.previousResponseId ? { previous_response_id: options.previousResponseId } : void 0
228720
227326
  };
228721
227327
  });
228722
- return yield* make$7({
227328
+ return yield* make$10({
228723
227329
  codecTransformer: toCodecOpenAI,
228724
227330
  generateText: fnUntraced(function* (options) {
228725
227331
  const config = yield* makeConfig;
@@ -228768,7 +227374,7 @@ const make$2 = /* @__PURE__ */ fnUntraced(function* ({ model, config: providerCo
228768
227374
  * @since 1.0.0
228769
227375
  * @category layers
228770
227376
  */
228771
- const layer$2 = (options) => effect$1(LanguageModel, make$2(options));
227377
+ const layer$7 = (options) => effect$1(LanguageModel, make$4(options));
228772
227378
  /**
228773
227379
  * Provides config overrides for OpenAI language model operations.
228774
227380
  *
@@ -230422,7 +229028,7 @@ const transformToolCallParams = /* @__PURE__ */ fnUntraced(function* (tools, too
230422
229028
  })));
230423
229029
  });
230424
229030
  //#endregion
230425
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/CodexAuth.js
229031
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/CodexAuth.js
230426
229032
  /**
230427
229033
  * @since 1.0.0
230428
229034
  */
@@ -230642,7 +229248,7 @@ var CodexAuth = class CodexAuth extends Service$1()("clanka/CodexAuth") {
230642
229248
  static layerClient = this.layerClientNoDeps.pipe(provide$3(CodexAuth.layer));
230643
229249
  };
230644
229250
  //#endregion
230645
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/Codex.js
229251
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/Codex.js
230646
229252
  /**
230647
229253
  * @since 1.0.0
230648
229254
  */
@@ -230650,13 +229256,13 @@ var CodexAuth = class CodexAuth extends Service$1()("clanka/CodexAuth") {
230650
229256
  * @since 1.0.0
230651
229257
  * @category Layers
230652
229258
  */
230653
- const layerClient$1 = layer$4({ apiUrl: "https://chatgpt.com/backend-api/codex" }).pipe(provide$3(CodexAuth.layerClient));
229259
+ const layerClient$1 = layer$9({ apiUrl: "https://chatgpt.com/backend-api/codex" }).pipe(provide$3(CodexAuth.layerClient));
230654
229260
  /**
230655
229261
  * @since 1.0.0
230656
229262
  * @category Layers
230657
229263
  */
230658
229264
  const modelWebSocket = (model, options) => make$16("openai", model, layerModel(model, options).pipe(merge$6(fresh(layerWebSocketMode))));
230659
- const layerModel = (model, options) => layer$2({
229265
+ const layerModel = (model, options) => layer$7({
230660
229266
  model,
230661
229267
  config: {
230662
229268
  ...omit$2(options ?? {}, ["reasoning"]),
@@ -230668,6 +229274,480 @@ const layerModel = (model, options) => layer$2({
230668
229274
  }
230669
229275
  }).pipe(merge$6(AgentModelConfig.layer({ systemPromptTransform: (system, effect) => withConfigOverride(effect, { instructions: system }) })));
230670
229276
  //#endregion
229277
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/CodeChunker.js
229278
+ /**
229279
+ * @since 1.0.0
229280
+ */
229281
+ /**
229282
+ * @since 1.0.0
229283
+ * @category Services
229284
+ */
229285
+ var CodeChunker = class extends Service$1()("clanka/CodeChunker") {};
229286
+ const sourceExtensions = new Set([
229287
+ "c",
229288
+ "cc",
229289
+ "cpp",
229290
+ "cs",
229291
+ "css",
229292
+ "cts",
229293
+ "cxx",
229294
+ "go",
229295
+ "gql",
229296
+ "graphql",
229297
+ "h",
229298
+ "hpp",
229299
+ "html",
229300
+ "ini",
229301
+ "java",
229302
+ "js",
229303
+ "jsx",
229304
+ "kt",
229305
+ "kts",
229306
+ "less",
229307
+ "lua",
229308
+ "mjs",
229309
+ "mts",
229310
+ "php",
229311
+ "py",
229312
+ "rb",
229313
+ "rs",
229314
+ "sass",
229315
+ "scala",
229316
+ "scss",
229317
+ "sh",
229318
+ "sql",
229319
+ "svelte",
229320
+ "swift",
229321
+ "ts",
229322
+ "tsx",
229323
+ "vue",
229324
+ "xml",
229325
+ "zsh"
229326
+ ]);
229327
+ const documentationExtensions = new Set([
229328
+ "adoc",
229329
+ "asciidoc",
229330
+ "md",
229331
+ "mdx",
229332
+ "rst",
229333
+ "txt"
229334
+ ]);
229335
+ const ignoredDirectories = new Set([
229336
+ ".git",
229337
+ ".next",
229338
+ ".nuxt",
229339
+ ".svelte-kit",
229340
+ ".turbo",
229341
+ "build",
229342
+ "coverage",
229343
+ "dist",
229344
+ "node_modules",
229345
+ "target"
229346
+ ]);
229347
+ const normalizePath$1 = (path) => path.replace(/\\/g, "/");
229348
+ const normalizeText = (content) => content.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
229349
+ const meaningfulLinePattern = /[^\s\p{P}]/u;
229350
+ const isMeaningfulLine = (line) => meaningfulLinePattern.test(line);
229351
+ const languageByExtension = new Map([
229352
+ ["js", TreeSitterJavaScript],
229353
+ ["jsx", TreeSitterJavaScript],
229354
+ ["ts", TreeSitterTypeScript.typescript],
229355
+ ["tsx", TreeSitterTypeScript.tsx]
229356
+ ]);
229357
+ /**
229358
+ * @since 1.0.0
229359
+ * @category Predicates
229360
+ */
229361
+ const isProbablyMinified = (content) => {
229362
+ const normalized = normalizeText(content);
229363
+ if (normalized.length < 2e3) return false;
229364
+ const lines = normalized.split("\n");
229365
+ if (lines.length <= 2) return true;
229366
+ let longLines = 0;
229367
+ for (const line of lines) if (line.length >= 300) longLines++;
229368
+ return lines.length <= 20 && longLines / lines.length >= .8;
229369
+ };
229370
+ /**
229371
+ * @since 1.0.0
229372
+ * @category Predicates
229373
+ */
229374
+ const isMeaningfulFile = (path) => {
229375
+ const parts = normalizePath$1(path).toLowerCase().split("/");
229376
+ const fileName = parts.at(-1);
229377
+ if (fileName === void 0 || fileName.length === 0) return false;
229378
+ if (parts.some((part) => ignoredDirectories.has(part))) return false;
229379
+ if (/\.min\.(?:css|js)$/i.test(fileName)) return false;
229380
+ const extensionIndex = fileName.lastIndexOf(".");
229381
+ if (extensionIndex === -1) return false;
229382
+ const extension = fileName.slice(extensionIndex + 1);
229383
+ return sourceExtensions.has(extension) || documentationExtensions.has(extension);
229384
+ };
229385
+ const resolveChunkSettings = (options) => {
229386
+ const chunkSize = Math.max(1, options.chunkSize);
229387
+ return {
229388
+ chunkSize,
229389
+ chunkOverlap: Math.max(0, Math.min(chunkSize - 1, options.chunkOverlap)),
229390
+ chunkMaxCharacters: options.chunkMaxCharacters !== void 0 && Number.isFinite(options.chunkMaxCharacters) ? Math.max(1, Math.floor(options.chunkMaxCharacters)) : Number.POSITIVE_INFINITY
229391
+ };
229392
+ };
229393
+ const getPathExtension = (path) => {
229394
+ const fileName = path.split("/").at(-1);
229395
+ if (fileName === void 0) return;
229396
+ const extensionIndex = fileName.lastIndexOf(".");
229397
+ if (extensionIndex === -1) return;
229398
+ return fileName.slice(extensionIndex + 1).toLowerCase();
229399
+ };
229400
+ const resolveAstLanguage = (path) => {
229401
+ const extension = getPathExtension(path);
229402
+ if (extension === void 0) return;
229403
+ return languageByExtension.get(extension);
229404
+ };
229405
+ const lineRangeFromNode = (node) => {
229406
+ const startLine = node.startPosition.row + 1;
229407
+ return {
229408
+ startLine,
229409
+ endLine: Math.max(startLine, node.endPosition.row + 1)
229410
+ };
229411
+ };
229412
+ const hasOnlyWhitespaceLines = (lines, startLine, endLine) => {
229413
+ if (startLine > endLine) return true;
229414
+ for (let lineIndex = startLine; lineIndex <= endLine; lineIndex++) if ((lines[lineIndex - 1] ?? "").trim().length > 0) return false;
229415
+ return true;
229416
+ };
229417
+ const lineRangeWithLeadingComments = (node, siblings, nodeIndex, lines) => {
229418
+ const baseRange = lineRangeFromNode(node);
229419
+ let startLine = baseRange.startLine;
229420
+ for (let index = nodeIndex - 1; index >= 0; index--) {
229421
+ const sibling = siblings[index];
229422
+ if (sibling.type !== "comment") break;
229423
+ const commentRange = lineRangeFromNode(sibling);
229424
+ if (!hasOnlyWhitespaceLines(lines, commentRange.endLine + 1, startLine - 1)) break;
229425
+ startLine = commentRange.startLine;
229426
+ }
229427
+ return {
229428
+ startLine,
229429
+ endLine: baseRange.endLine
229430
+ };
229431
+ };
229432
+ const normalizeLineRange = (range, lineCount) => {
229433
+ const startLine = Math.max(1, Math.min(lineCount, range.startLine));
229434
+ const endLine = Math.max(1, Math.min(lineCount, range.endLine));
229435
+ if (endLine < startLine) return;
229436
+ return {
229437
+ startLine,
229438
+ endLine
229439
+ };
229440
+ };
229441
+ const lineLengthPrefixSums = (lines) => {
229442
+ const sums = [0];
229443
+ for (let index = 0; index < lines.length; index++) sums.push(sums[index] + lines[index].length);
229444
+ return sums;
229445
+ };
229446
+ const lineRangeCharacterLength = (prefixSums, range) => prefixSums[range.endLine] - prefixSums[range.startLine - 1] + (range.endLine - range.startLine);
229447
+ const resolveSegmentEndLine = (options) => {
229448
+ if (options.settings.chunkMaxCharacters === Number.POSITIVE_INFINITY) return options.maxEndLine;
229449
+ let endLine = options.maxEndLine;
229450
+ while (endLine > options.startLine && lineRangeCharacterLength(options.prefixSums, {
229451
+ startLine: options.startLine,
229452
+ endLine
229453
+ }) > options.settings.chunkMaxCharacters) endLine--;
229454
+ return endLine;
229455
+ };
229456
+ const splitRange = (range, settings, prefixSums) => {
229457
+ if (range.endLine - range.startLine + 1 <= settings.chunkSize && lineRangeCharacterLength(prefixSums, range) <= settings.chunkMaxCharacters) return [range];
229458
+ const out = [];
229459
+ for (let startLine = range.startLine; startLine <= range.endLine;) {
229460
+ const maxEndLine = Math.min(range.endLine, startLine + settings.chunkSize - 1);
229461
+ const endLine = resolveSegmentEndLine({
229462
+ startLine,
229463
+ maxEndLine,
229464
+ settings,
229465
+ prefixSums
229466
+ });
229467
+ out.push({
229468
+ startLine,
229469
+ endLine
229470
+ });
229471
+ if (endLine >= range.endLine) break;
229472
+ startLine = Math.max(startLine + 1, endLine - settings.chunkOverlap + 1);
229473
+ }
229474
+ return out;
229475
+ };
229476
+ const nodeText = (node) => {
229477
+ if (node === null) return;
229478
+ const value = node.text.trim().replace(/\s+/g, " ");
229479
+ return value.length === 0 ? void 0 : value;
229480
+ };
229481
+ const nodeFieldText = (node, fieldName) => nodeText(node.childForFieldName(fieldName));
229482
+ const isNamespaceNode = (node) => node.type === "internal_module" || node.type === "module";
229483
+ const unwrapDeclarationNode = (node) => {
229484
+ let current = node;
229485
+ while (true) {
229486
+ if (current.type === "export_statement") {
229487
+ const declaration = current.childForFieldName("declaration") ?? current.namedChildren[0];
229488
+ if (declaration === void 0) return current;
229489
+ current = declaration;
229490
+ continue;
229491
+ }
229492
+ if (current.type === "ambient_declaration") {
229493
+ const declaration = current.namedChildren.find((child) => child.type.endsWith("_declaration") || isNamespaceNode(child));
229494
+ if (declaration === void 0) return current;
229495
+ current = declaration;
229496
+ continue;
229497
+ }
229498
+ return current;
229499
+ }
229500
+ };
229501
+ const variableDeclarators = (node) => node.namedChildren.filter((child) => child.type === "variable_declarator");
229502
+ const variableTypeFromDeclarator = (node) => {
229503
+ const value = node.childForFieldName("value");
229504
+ if (value !== null && value.type.includes("function")) return "function";
229505
+ return "variable";
229506
+ };
229507
+ const variableTypeFromDeclaration = (node) => {
229508
+ if (variableDeclarators(node).some((declarator) => variableTypeFromDeclarator(declarator) === "function")) return "function";
229509
+ return "variable";
229510
+ };
229511
+ const chunkTypeFromNode = (node) => {
229512
+ switch (node.type) {
229513
+ case "class_declaration": return "class";
229514
+ case "enum_declaration": return "enum";
229515
+ case "function_declaration":
229516
+ case "generator_function_declaration": return "function";
229517
+ case "internal_module":
229518
+ case "module": return "namespace";
229519
+ case "interface_declaration": return "interface";
229520
+ case "generator_method_definition":
229521
+ case "method_definition": return "method";
229522
+ case "type_alias_declaration": return "type-alias";
229523
+ case "lexical_declaration":
229524
+ case "variable_declaration": return variableTypeFromDeclaration(node);
229525
+ case "variable_declarator": return variableTypeFromDeclarator(node);
229526
+ default: return;
229527
+ }
229528
+ };
229529
+ const variableNamesFromDeclaration = (node) => {
229530
+ const names = variableDeclarators(node).map((declarator) => nodeFieldText(declarator, "name")).filter((name) => name !== void 0);
229531
+ if (names.length === 0) return;
229532
+ return names.join(", ");
229533
+ };
229534
+ const nameFromNode = (node) => {
229535
+ switch (node.type) {
229536
+ case "class_declaration":
229537
+ case "enum_declaration":
229538
+ case "function_declaration":
229539
+ case "generator_function_declaration":
229540
+ case "internal_module":
229541
+ case "interface_declaration":
229542
+ case "module":
229543
+ case "generator_method_definition":
229544
+ case "method_definition":
229545
+ case "type_alias_declaration":
229546
+ case "variable_declarator": return nodeFieldText(node, "name");
229547
+ case "lexical_declaration":
229548
+ case "variable_declaration": return variableNamesFromDeclaration(node);
229549
+ default: return;
229550
+ }
229551
+ };
229552
+ const formatParent = (type, name) => {
229553
+ if (type === void 0 && name === void 0) return;
229554
+ if (type === void 0) return name;
229555
+ if (name === void 0) return type;
229556
+ return type + " " + name;
229557
+ };
229558
+ const collectClassMethodRanges = (classNode, parent, lines) => {
229559
+ const body = classNode.childForFieldName("body");
229560
+ if (body === null) return [];
229561
+ const out = [];
229562
+ for (let index = 0; index < body.namedChildren.length; index++) {
229563
+ const child = body.namedChildren[index];
229564
+ if (!child.type.includes("method")) continue;
229565
+ out.push({
229566
+ ...lineRangeWithLeadingComments(child, body.namedChildren, index, lines),
229567
+ name: nameFromNode(child),
229568
+ type: chunkTypeFromNode(child),
229569
+ parent
229570
+ });
229571
+ }
229572
+ return out;
229573
+ };
229574
+ const collectAstRanges = (path, content, lines) => {
229575
+ const language = resolveAstLanguage(path);
229576
+ if (language === void 0) return [];
229577
+ try {
229578
+ const parser = new TreeSitter();
229579
+ parser.setLanguage(language);
229580
+ const tree = parser.parse(content, void 0, { bufferSize: 1024 * 1024 });
229581
+ const collectDeclarationRanges = (siblings, parent) => {
229582
+ const out = [];
229583
+ for (let index = 0; index < siblings.length; index++) {
229584
+ const sibling = siblings[index];
229585
+ if (sibling.type === "comment" || sibling.type.includes("import")) continue;
229586
+ const declarationNode = unwrapDeclarationNode(sibling);
229587
+ const type = chunkTypeFromNode(declarationNode);
229588
+ const name = nameFromNode(declarationNode);
229589
+ if (type === void 0 && name === void 0) continue;
229590
+ out.push({
229591
+ ...lineRangeWithLeadingComments(sibling, siblings, index, lines),
229592
+ name,
229593
+ type,
229594
+ parent
229595
+ });
229596
+ if (declarationNode.type === "class_declaration") out.push(...collectClassMethodRanges(declarationNode, formatParent(type, name), lines));
229597
+ if (isNamespaceNode(declarationNode)) {
229598
+ const body = declarationNode.childForFieldName("body");
229599
+ if (body !== null) out.push(...collectDeclarationRanges(body.namedChildren, formatParent(type, name)));
229600
+ }
229601
+ }
229602
+ return out;
229603
+ };
229604
+ return collectDeclarationRanges(tree.rootNode.namedChildren, void 0);
229605
+ } catch {
229606
+ return [];
229607
+ }
229608
+ };
229609
+ const chunksFromRanges = (path, lines, ranges, settings) => {
229610
+ const hasMethodChildRange = (classRange) => {
229611
+ const parent = formatParent("class", classRange.name);
229612
+ return ranges.some((range) => range.type === "method" && range.parent === parent && range.startLine >= classRange.startLine && range.endLine <= classRange.endLine);
229613
+ };
229614
+ const out = [];
229615
+ const seen = /* @__PURE__ */ new Set();
229616
+ const prefixSums = lineLengthPrefixSums(lines);
229617
+ for (const range of ranges) {
229618
+ const normalizedRange = normalizeLineRange(range, lines.length);
229619
+ if (normalizedRange === void 0) continue;
229620
+ const allSegments = splitRange(normalizedRange, settings, prefixSums);
229621
+ const segments = range.type === "class" && allSegments.length > 1 && hasMethodChildRange({
229622
+ ...normalizedRange,
229623
+ name: range.name
229624
+ }) ? [allSegments[0]] : allSegments;
229625
+ for (const segment of segments) {
229626
+ const key = String(segment.startLine) + ":" + String(segment.endLine) + ":" + (range.name ?? "") + ":" + (range.type ?? "") + ":" + (range.parent ?? "");
229627
+ if (seen.has(key)) continue;
229628
+ seen.add(key);
229629
+ const chunkLines = lines.slice(segment.startLine - 1, segment.endLine);
229630
+ if (!chunkLines.some(isMeaningfulLine)) continue;
229631
+ out.push({
229632
+ path,
229633
+ startLine: segment.startLine,
229634
+ endLine: segment.endLine,
229635
+ name: range.name,
229636
+ type: range.type,
229637
+ parent: range.parent,
229638
+ content: chunkLines.join("\n")
229639
+ });
229640
+ }
229641
+ }
229642
+ return out.toSorted((left, right) => left.startLine - right.startLine || left.endLine - right.endLine || (left.name ?? "").localeCompare(right.name ?? ""));
229643
+ };
229644
+ const chunkWithLineWindows = (path, lines, settings) => {
229645
+ const out = [];
229646
+ const prefixSums = lineLengthPrefixSums(lines);
229647
+ for (let index = 0; index < lines.length;) {
229648
+ if (!isMeaningfulLine(lines[index])) {
229649
+ index++;
229650
+ continue;
229651
+ }
229652
+ const startLine = index + 1;
229653
+ const endLine = resolveSegmentEndLine({
229654
+ startLine,
229655
+ maxEndLine: Math.min(lines.length, startLine + settings.chunkSize - 1),
229656
+ settings,
229657
+ prefixSums
229658
+ });
229659
+ const chunkLines = lines.slice(startLine - 1, endLine);
229660
+ out.push({
229661
+ path,
229662
+ startLine,
229663
+ endLine,
229664
+ name: void 0,
229665
+ type: void 0,
229666
+ parent: void 0,
229667
+ content: chunkLines.join("\n")
229668
+ });
229669
+ if (endLine >= lines.length) break;
229670
+ index = Math.max(startLine + 1, endLine - settings.chunkOverlap + 1) - 1;
229671
+ }
229672
+ return out;
229673
+ };
229674
+ /**
229675
+ * @since 1.0.0
229676
+ * @category Constructors
229677
+ */
229678
+ const chunkFileContent = (path, content, options) => {
229679
+ if (content.trim().length === 0 || isProbablyMinified(content)) return [];
229680
+ const normalizedPath = normalizePath$1(path);
229681
+ const normalizedContent = normalizeText(content);
229682
+ const lines = normalizedContent.split("\n");
229683
+ if (lines.at(-1) === "") lines.pop();
229684
+ if (lines.length === 0) return [];
229685
+ const settings = resolveChunkSettings(options);
229686
+ const astRanges = collectAstRanges(normalizedPath, normalizedContent, lines);
229687
+ if (astRanges.length > 0) {
229688
+ const astChunks = chunksFromRanges(normalizedPath, lines, astRanges, settings);
229689
+ if (astChunks.length > 0) return astChunks;
229690
+ }
229691
+ return chunkWithLineWindows(normalizedPath, lines, settings);
229692
+ };
229693
+ /**
229694
+ * @since 1.0.0
229695
+ * @category Layers
229696
+ */
229697
+ const layer$6 = effect$1(CodeChunker, gen(function* () {
229698
+ const spawner = yield* ChildProcessSpawner;
229699
+ const fs = yield* FileSystem;
229700
+ const pathService = yield* Path$1;
229701
+ const listFiles = fn("CodeChunker.listFiles")(function* (options) {
229702
+ const root = pathService.resolve(options.root);
229703
+ const maxFileSize = options.maxFileSize ?? "1M";
229704
+ return yield* pipe$1(spawner.streamLines(make$45("rg", [
229705
+ "--files",
229706
+ "--hidden",
229707
+ "--max-filesize",
229708
+ maxFileSize,
229709
+ "--glob",
229710
+ "!.git"
229711
+ ], {
229712
+ cwd: root,
229713
+ stdin: "ignore"
229714
+ })), runCollect, map$9(fromIterable$5), map$9((entries) => entries.map((entry) => normalizePath$1(entry.trim())).filter((entry) => entry.length > 0 && isMeaningfulFile(entry)).sort((left, right) => left.localeCompare(right))), orDie$2);
229715
+ });
229716
+ const chunkFile = fn("CodeChunker.chunkFile")(function* (options) {
229717
+ const root = pathService.resolve(options.root);
229718
+ const absolutePath = pathService.resolve(root, options.path);
229719
+ const path = normalizePath$1(pathService.relative(root, absolutePath));
229720
+ if (path.length === 0 || path === ".." || path.startsWith("../") || !isMeaningfulFile(path)) return [];
229721
+ return yield* pipe$1(fs.readFileString(absolutePath), map$9((content) => chunkFileContent(path, content, options)), catch_$2(() => succeed$3([])));
229722
+ });
229723
+ const chunkFiles = (options) => fromArray(options.paths).pipe(flatMap$2((path) => pipe$1(chunkFile({
229724
+ root: options.root,
229725
+ path,
229726
+ chunkSize: options.chunkSize,
229727
+ chunkOverlap: options.chunkOverlap,
229728
+ ...options.chunkMaxCharacters === void 0 ? {} : { chunkMaxCharacters: options.chunkMaxCharacters }
229729
+ }), fromArrayEffect), { concurrency: 5 }));
229730
+ const chunkCodebase = fnUntraced(function* (options) {
229731
+ const root = pathService.resolve(options.root);
229732
+ return chunkFiles({
229733
+ root,
229734
+ paths: yield* listFiles({
229735
+ root,
229736
+ ...options.maxFileSize === void 0 ? {} : { maxFileSize: options.maxFileSize }
229737
+ }),
229738
+ chunkSize: options.chunkSize,
229739
+ chunkOverlap: options.chunkOverlap,
229740
+ ...options.chunkMaxCharacters === void 0 ? {} : { chunkMaxCharacters: options.chunkMaxCharacters }
229741
+ });
229742
+ }, unwrap);
229743
+ return CodeChunker.of({
229744
+ listFiles,
229745
+ chunkFile,
229746
+ chunkFiles,
229747
+ chunkCodebase
229748
+ });
229749
+ }));
229750
+ //#endregion
230671
229751
  //#region node_modules/.pnpm/@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36/node_modules/@effect/ai-openai-compat/dist/internal/errors.js
230672
229752
  /** @internal */
230673
229753
  const OpenAiErrorBody = /* @__PURE__ */ Struct$2({ error: /* @__PURE__ */ Struct$2({
@@ -230908,7 +229988,7 @@ const RedactedOpenAiHeaders = {
230908
229988
  * @since 1.0.0
230909
229989
  * @category constructors
230910
229990
  */
230911
- const make$1 = /* @__PURE__ */ fnUntraced(function* (options) {
229991
+ const make$3 = /* @__PURE__ */ fnUntraced(function* (options) {
230912
229992
  const httpClient = (yield* HttpClient).pipe(mapRequest((request) => request.pipe(prependUrl(options.apiUrl ?? "https://api.openai.com/v1"), options.apiKey !== void 0 ? bearerToken(value$2(options.apiKey)) : identity, options.organizationId !== void 0 ? setHeader(RedactedOpenAiHeaders.OpenAiOrganization, value$2(options.organizationId)) : identity, options.projectId !== void 0 ? setHeader(RedactedOpenAiHeaders.OpenAiProject, value$2(options.projectId)) : identity, acceptJson)), options.transformClient !== void 0 ? options.transformClient : identity);
230913
229993
  const resolveHttpClient = map$9(OpenAiConfig.getOrUndefined, (config) => config?.transformClient !== void 0 ? config.transformClient(httpClient) : httpClient);
230914
229994
  const decodeResponse = schemaBodyJson(ChatCompletionResponse);
@@ -230946,7 +230026,7 @@ const make$1 = /* @__PURE__ */ fnUntraced(function* (options) {
230946
230026
  * @since 1.0.0
230947
230027
  * @category layers
230948
230028
  */
230949
- const layer$1 = (options) => effect$1(OpenAiClient, make$1(options));
230029
+ const layer$5 = (options) => effect$1(OpenAiClient, make$3(options));
230950
230030
  const CreateEmbeddingResponseSchema = /* @__PURE__ */ Struct$2({
230951
230031
  data: /* @__PURE__ */ Array$1(/* @__PURE__ */ Struct$2({
230952
230032
  embedding: /* @__PURE__ */ Union$2([/* @__PURE__ */ Array$1(Number$1), String$1]),
@@ -231094,7 +230174,7 @@ var Config = class extends Service$1()("@effect/ai-openai-compat/OpenAiLanguageM
231094
230174
  * @since 1.0.0
231095
230175
  * @category constructors
231096
230176
  */
231097
- const make = /* @__PURE__ */ fnUntraced(function* ({ model, config: providerConfig }) {
230177
+ const make$2 = /* @__PURE__ */ fnUntraced(function* ({ model, config: providerConfig }) {
231098
230178
  const client = yield* OpenAiClient;
231099
230179
  const makeConfig = gen(function* () {
231100
230180
  const services$3 = yield* services();
@@ -231134,7 +230214,7 @@ const make = /* @__PURE__ */ fnUntraced(function* ({ model, config: providerConf
231134
230214
  ...toolChoice !== void 0 ? { tool_choice: toolChoice } : void 0
231135
230215
  });
231136
230216
  });
231137
- return yield* make$7({
230217
+ return yield* make$10({
231138
230218
  codecTransformer: toCodecOpenAI,
231139
230219
  generateText: fnUntraced(function* (options) {
231140
230220
  const config = yield* makeConfig;
@@ -231180,7 +230260,7 @@ const make = /* @__PURE__ */ fnUntraced(function* ({ model, config: providerConf
231180
230260
  * @since 1.0.0
231181
230261
  * @category layers
231182
230262
  */
231183
- const layer = (options) => effect$1(LanguageModel, make(options));
230263
+ const layer$4 = (options) => effect$1(LanguageModel, make$2(options));
231184
230264
  const getSystemMessageMode = (model) => model.startsWith("o") || model.startsWith("gpt-5") || model.startsWith("codex-") || model.startsWith("computer-use") ? "developer" : "system";
231185
230265
  const prepareMessages = /* @__PURE__ */ fnUntraced(function* ({ config, options, capabilities, include, toolNameMapper }) {
231186
230266
  const hasConversation = isNotNullish(config.conversation);
@@ -231954,7 +231034,7 @@ const getUsageDetailNumber = (details, field) => {
231954
231034
  return typeof value === "number" ? value : void 0;
231955
231035
  };
231956
231036
  //#endregion
231957
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/CopilotAuth.js
231037
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/CopilotAuth.js
231958
231038
  /**
231959
231039
  * @since 1.0.0
231960
231040
  */
@@ -232145,7 +231225,7 @@ var GithubCopilotAuth = class GithubCopilotAuth extends Service$1()("clanka/Gith
232145
231225
  static layerClient = this.layerClientNoDeps.pipe(provide$3(GithubCopilotAuth.layer));
232146
231226
  };
232147
231227
  //#endregion
232148
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/Copilot.js
231228
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/Copilot.js
232149
231229
  /**
232150
231230
  * @since 1.0.0
232151
231231
  */
@@ -232153,12 +231233,12 @@ var GithubCopilotAuth = class GithubCopilotAuth extends Service$1()("clanka/Gith
232153
231233
  * @since 1.0.0
232154
231234
  * @category Layers
232155
231235
  */
232156
- const layerClient = layer$1({ apiUrl: API_URL }).pipe(provide$3(GithubCopilotAuth.layerClient));
231236
+ const layerClient = layer$5({ apiUrl: API_URL }).pipe(provide$3(GithubCopilotAuth.layerClient));
232157
231237
  /**
232158
231238
  * @since 1.0.0
232159
231239
  * @category Layers
232160
231240
  */
232161
- const model = (model, options) => make$16("openai", model, merge$6(layer({
231241
+ const model = (model, options) => make$16("openai", model, merge$6(layer$4({
232162
231242
  model,
232163
231243
  config: omit$2(options ?? {}, ["systemPromptTransform"])
232164
231244
  }), AgentModelConfig.layer({ systemPromptTransform: options?.systemPromptTransform })));
@@ -232565,7 +231645,7 @@ Object.defineProperties(createChalk.prototype, styles);
232565
231645
  const chalk = createChalk();
232566
231646
  createChalk({ level: stderrColor ? stderrColor.level : 0 });
232567
231647
  //#endregion
232568
- //#region node_modules/.pnpm/clanka@0.2.21_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_09288369fd42a7055df614a5b619d2f6/node_modules/clanka/dist/OutputFormatter.js
231648
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/OutputFormatter.js
232569
231649
  /**
232570
231650
  * @since 1.0.0
232571
231651
  */
@@ -232639,7 +231719,7 @@ var Muxer = class extends Service$1()("clanka/OutputFormatter/Muxer") {};
232639
231719
  * @category Muxer
232640
231720
  */
232641
231721
  const layerMuxer = (formatter) => effect$1(Muxer, gen(function* () {
232642
- const scope$2 = yield* scope;
231722
+ const scope$3 = yield* scope;
232643
231723
  const output = yield* unbounded$1();
232644
231724
  let agentCount = 0;
232645
231725
  let currentAgentId = null;
@@ -232665,12 +231745,948 @@ const layerMuxer = (formatter) => effect$1(Muxer, gen(function* () {
232665
231745
  if (currentAgentId !== id) return void_$2;
232666
231746
  currentAgentId = null;
232667
231747
  return semaphore.release(1);
232668
- }), forkIn(scope$2), asVoid);
231748
+ }), forkIn(scope$3), asVoid);
232669
231749
  },
232670
231750
  output: fromPubSub(output)
232671
231751
  });
232672
231752
  }));
232673
231753
  //#endregion
231754
+ //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/schema/VariantSchema.js
231755
+ /**
231756
+ * @since 4.0.0
231757
+ * @category Type IDs
231758
+ */
231759
+ const TypeId$2 = "~effect/schema/VariantSchema";
231760
+ const cacheSymbol = /* @__PURE__ */ Symbol.for(`${TypeId$2}/cache`);
231761
+ /**
231762
+ * @since 4.0.0
231763
+ * @category guards
231764
+ */
231765
+ const isStruct = (u) => hasProperty(u, TypeId$2);
231766
+ const FieldTypeId = "~effect/schema/VariantSchema/Field";
231767
+ /**
231768
+ * @since 4.0.0
231769
+ * @category guards
231770
+ */
231771
+ const isField = (u) => hasProperty(u, FieldTypeId);
231772
+ const extract$1 = /* @__PURE__ */ dual((args) => isStruct(args[0]), (self, variant, options) => {
231773
+ const cache = self[cacheSymbol] ?? (self[cacheSymbol] = {});
231774
+ const cacheKey = options?.isDefault === true ? "__default" : variant;
231775
+ if (cache[cacheKey] !== void 0) return cache[cacheKey];
231776
+ const fields = {};
231777
+ for (const key of Object.keys(self[TypeId$2])) {
231778
+ const value = self[TypeId$2][key];
231779
+ if ("~effect/schema/VariantSchema" in value) if (options?.isDefault === true && isSchema(value)) fields[key] = value;
231780
+ else fields[key] = extract$1(value, variant);
231781
+ else if (FieldTypeId in value) {
231782
+ if (variant in value.schemas) fields[key] = value.schemas[variant];
231783
+ } else fields[key] = value;
231784
+ }
231785
+ return cache[cacheKey] = Struct$2(fields);
231786
+ });
231787
+ /**
231788
+ * @since 4.0.0
231789
+ * @category constructors
231790
+ */
231791
+ const make$1 = (options) => {
231792
+ function Class(identifier) {
231793
+ return function(fields, annotations) {
231794
+ const variantStruct = Struct$1(fields);
231795
+ const schema = extract$1(variantStruct, options.defaultVariant, { isDefault: true });
231796
+ const SClass = Class$2;
231797
+ class Base extends SClass(identifier)(schema.fields, annotations) {
231798
+ static [TypeId$2] = fields;
231799
+ }
231800
+ for (const variant of options.variants) Object.defineProperty(Base, variant, { value: extract$1(variantStruct, variant).annotate({
231801
+ id: `${identifier}.${variant}`,
231802
+ title: `${identifier}.${variant}`
231803
+ }) });
231804
+ return Base;
231805
+ };
231806
+ }
231807
+ function FieldOnly(keys) {
231808
+ return function(schema) {
231809
+ const obj = {};
231810
+ for (const key of keys) obj[key] = schema;
231811
+ return Field$1(obj);
231812
+ };
231813
+ }
231814
+ function FieldExcept(keys) {
231815
+ return function(schema) {
231816
+ const obj = {};
231817
+ for (const variant of options.variants) if (!keys.includes(variant)) obj[variant] = schema;
231818
+ return Field$1(obj);
231819
+ };
231820
+ }
231821
+ function UnionVariants(members) {
231822
+ return Union$1(members, options.variants);
231823
+ }
231824
+ return {
231825
+ Struct: Struct$1,
231826
+ Field: Field$1,
231827
+ FieldOnly,
231828
+ FieldExcept,
231829
+ Class,
231830
+ Union: UnionVariants,
231831
+ fieldEvolve: dual(2, (self, f) => {
231832
+ return Field$1(evolve((isField(self) ? self : Field$1(Object.fromEntries(options.variants.map((variant) => [variant, self])))).schemas, f));
231833
+ }),
231834
+ extract: dual(2, (self, variant) => extract$1(self, variant, { isDefault: variant === options.defaultVariant }))
231835
+ };
231836
+ };
231837
+ const StructProto = { pipe() {
231838
+ return pipeArguments(this, arguments);
231839
+ } };
231840
+ const Struct$1 = (fields) => {
231841
+ const self = Object.create(StructProto);
231842
+ self[TypeId$2] = fields;
231843
+ return self;
231844
+ };
231845
+ const FieldProto = {
231846
+ [FieldTypeId]: FieldTypeId,
231847
+ pipe() {
231848
+ return pipeArguments(this, arguments);
231849
+ }
231850
+ };
231851
+ const Field$1 = (schemas) => {
231852
+ const self = Object.create(FieldProto);
231853
+ self.schemas = schemas;
231854
+ return self;
231855
+ };
231856
+ const Union$1 = (members, variants) => {
231857
+ const VariantUnion = Union$2(members.filter((member) => isSchema(member)));
231858
+ for (const variant of variants) Object.defineProperty(VariantUnion, variant, { value: Union$2(members.map((member) => extract$1(member, variant))) });
231859
+ return VariantUnion;
231860
+ };
231861
+ //#endregion
231862
+ //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/schema/Model.js
231863
+ const { Class, Field, FieldExcept, FieldOnly, Struct, Union, extract, fieldEvolve } = /* @__PURE__ */ make$1({
231864
+ variants: [
231865
+ "select",
231866
+ "insert",
231867
+ "update",
231868
+ "json",
231869
+ "jsonCreate",
231870
+ "jsonUpdate"
231871
+ ],
231872
+ defaultVariant: "select"
231873
+ });
231874
+ /**
231875
+ * A field that represents a column that is generated by the database.
231876
+ *
231877
+ * It is available for selection and update, but not for insertion.
231878
+ *
231879
+ * @since 4.0.0
231880
+ * @category generated
231881
+ */
231882
+ const Generated = (schema) => Field({
231883
+ select: schema,
231884
+ update: schema,
231885
+ json: schema
231886
+ });
231887
+ //#endregion
231888
+ //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/sql/SqlError.js
231889
+ /**
231890
+ * @since 4.0.0
231891
+ */
231892
+ const TypeId$1 = "~effect/sql/SqlError";
231893
+ /**
231894
+ * @since 4.0.0
231895
+ */
231896
+ var SqlError = class extends TaggedErrorClass("effect/sql/SqlError")("SqlError", {
231897
+ cause: Defect,
231898
+ message: /* @__PURE__ */ optional$3(String$1)
231899
+ }) {
231900
+ /**
231901
+ * @since 4.0.0
231902
+ */
231903
+ [TypeId$1] = TypeId$1;
231904
+ };
231905
+ /**
231906
+ * @since 4.0.0
231907
+ */
231908
+ var ResultLengthMismatch = class extends TaggedErrorClass("effect/sql/ResultLengthMismatch")("ResultLengthMismatch", {
231909
+ expected: Number$1,
231910
+ actual: Number$1
231911
+ }) {
231912
+ /**
231913
+ * @since 4.0.0
231914
+ */
231915
+ [TypeId$1] = TypeId$1;
231916
+ /**
231917
+ * @since 4.0.0
231918
+ */
231919
+ get message() {
231920
+ return `Expected ${this.expected} results but got ${this.actual}`;
231921
+ }
231922
+ };
231923
+ //#endregion
231924
+ //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/sql/SqlResolver.js
231925
+ /**
231926
+ * @since 4.0.0
231927
+ */
231928
+ const SqlRequestProto = {
231929
+ ...Class$3.prototype,
231930
+ [symbol$5](that) {
231931
+ return equals$2(this.payload, that.payload);
231932
+ },
231933
+ [symbol$6]() {
231934
+ return hash(this.payload);
231935
+ }
231936
+ };
231937
+ /**
231938
+ * @since 4.0.0
231939
+ * @category requests
231940
+ */
231941
+ const request$1 = function() {
231942
+ if (arguments.length === 1) {
231943
+ const resolver = arguments[0];
231944
+ return (payload) => request$2(SqlRequest(payload), resolver);
231945
+ }
231946
+ return request$2(SqlRequest(arguments[0]), arguments[1]);
231947
+ };
231948
+ /**
231949
+ * @since 4.0.0
231950
+ * @category requests
231951
+ */
231952
+ const SqlRequest = (payload) => {
231953
+ const self = Object.create(SqlRequestProto);
231954
+ self.payload = payload;
231955
+ return self;
231956
+ };
231957
+ /**
231958
+ * Create a resolver for a sql query with a request schema and a result schema.
231959
+ *
231960
+ * The request schema is used to validate the input of the query.
231961
+ * The result schema is used to validate the output of the query.
231962
+ *
231963
+ * Results are mapped to the requests in order, so the length of the results must match the length of the requests.
231964
+ *
231965
+ * @since 4.0.0
231966
+ * @category resolvers
231967
+ */
231968
+ const ordered = (options) => {
231969
+ const decodeArray = decodeUnknownEffect(Array$1(options.Result));
231970
+ return makeGrouped({
231971
+ key: transactionKey,
231972
+ resolver: fnUntraced(function* (entries) {
231973
+ const inputs = yield* partitionRequests(entries, options.Request);
231974
+ const results = yield* options.execute(inputs).pipe(provideServices$2(entries[0].services));
231975
+ if (results.length !== inputs.length) return yield* new ResultLengthMismatch({
231976
+ expected: inputs.length,
231977
+ actual: results.length
231978
+ });
231979
+ const decodedResults = yield* decodeArray(results).pipe(provideServices$2(entries[0].services));
231980
+ for (let i = 0; i < entries.length; i++) entries[i].completeUnsafe(succeed$6(decodedResults[i]));
231981
+ })
231982
+ });
231983
+ };
231984
+ /**
231985
+ * Create a resolver that resolves results by id.
231986
+ *
231987
+ * @since 4.0.0
231988
+ * @category resolvers
231989
+ */
231990
+ const findById = (options) => {
231991
+ const decodeResults = decodeUnknownEffect(Array$1(options.Result));
231992
+ return makeGrouped({
231993
+ key(entry) {
231994
+ const conn = entry.services.mapUnsafe.get(TransactionConnection.key);
231995
+ if (!conn) return void 0;
231996
+ return byReferenceUnsafe(conn);
231997
+ },
231998
+ resolver: fnUntraced(function* (entries) {
231999
+ const [inputs, idMap] = yield* partitionRequestsById(entries, options.Id);
232000
+ const results = yield* options.execute(inputs).pipe(provideServices$2(entries[0].services));
232001
+ const decodedResults = yield* decodeResults(results).pipe(provideServices$2(entries[0].services));
232002
+ for (let i = 0; i < decodedResults.length; i++) {
232003
+ const result = decodedResults[i];
232004
+ const id = options.ResultId(result, results[i]);
232005
+ const request = get$13(idMap, id);
232006
+ if (request._tag === "None") continue;
232007
+ remove$5(idMap, id);
232008
+ request.value.completeUnsafe(succeed$6(result));
232009
+ }
232010
+ if (isEmpty$3(idMap)) return;
232011
+ forEach$2(idMap, (request) => {
232012
+ request.completeUnsafe(constNoSuchElement);
232013
+ });
232014
+ })
232015
+ });
232016
+ };
232017
+ const void_ = (options) => makeGrouped({
232018
+ key: transactionKey,
232019
+ resolver: fnUntraced(function* (entries) {
232020
+ const inputs = yield* partitionRequests(entries, options.Request);
232021
+ yield* options.execute(inputs).pipe(provideServices$2(entries[0].services));
232022
+ for (let i = 0; i < entries.length; i++) entries[i].completeUnsafe(void_$3);
232023
+ })
232024
+ });
232025
+ const constNoSuchElement = /* @__PURE__ */ fail$8(/* @__PURE__ */ new NoSuchElementError());
232026
+ const partitionRequests = function* (requests, schema) {
232027
+ const len = requests.length;
232028
+ const inputs = empty$17();
232029
+ let entry;
232030
+ const encode = encodeEffect(schema);
232031
+ const handle = matchCauseEager({
232032
+ onFailure(cause) {
232033
+ entry.completeUnsafe(failCause$4(cause));
232034
+ },
232035
+ onSuccess(value) {
232036
+ inputs.push(value);
232037
+ }
232038
+ });
232039
+ for (let i = 0; i < len; i++) {
232040
+ entry = requests[i];
232041
+ yield provideServices$2(handle(encode(entry.request.payload)), entry.services);
232042
+ }
232043
+ return inputs;
232044
+ };
232045
+ const partitionRequestsById = function* (requests, schema) {
232046
+ const len = requests.length;
232047
+ const inputs = empty$17();
232048
+ const byIdMap = empty$11();
232049
+ let entry;
232050
+ const encode = encodeEffect(schema);
232051
+ const handle = matchCauseEager({
232052
+ onFailure(cause) {
232053
+ entry.completeUnsafe(failCause$4(cause));
232054
+ },
232055
+ onSuccess(value) {
232056
+ inputs.push(value);
232057
+ }
232058
+ });
232059
+ for (let i = 0; i < len; i++) {
232060
+ entry = requests[i];
232061
+ yield provideServices$2(handle(encode(entry.request.payload)), entry.services);
232062
+ set$8(byIdMap, entry.request.payload, entry);
232063
+ }
232064
+ return [inputs, byIdMap];
232065
+ };
232066
+ function transactionKey(entry) {
232067
+ const conn = entry.services.mapUnsafe.get(TransactionConnection.key);
232068
+ if (!conn) return void 0;
232069
+ return byReferenceUnsafe(conn);
232070
+ }
232071
+ //#endregion
232072
+ //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/sql/SqlSchema.js
232073
+ /**
232074
+ * Run a sql query with a request schema and a result schema.
232075
+ *
232076
+ * @since 4.0.0
232077
+ * @category constructor
232078
+ */
232079
+ const findAll = (options) => {
232080
+ const encodeRequest = encodeEffect(options.Request);
232081
+ const decode = decodeUnknownEffect(mutable(Array$1(options.Result)));
232082
+ return (request) => flatMap$4(flatMap$4(encodeRequest(request), options.execute), decode);
232083
+ };
232084
+ //#endregion
232085
+ //#region node_modules/.pnpm/effect@4.0.0-beta.36/node_modules/effect/dist/unstable/sql/SqlModel.js
232086
+ /**
232087
+ * Create some simple data loaders from a model.
232088
+ *
232089
+ * @since 4.0.0
232090
+ * @category repository
232091
+ */
232092
+ const makeDataLoaders = (Model, options) => gen(function* () {
232093
+ const sql = yield* SqlClient;
232094
+ const idSchema = Model.fields[options.idColumn];
232095
+ const idColumn = options.idColumn;
232096
+ const setMaxBatchSize = options.maxBatchSize ? batchN(options.maxBatchSize) : identity;
232097
+ const insertExecute = request$1(ordered({
232098
+ Request: Model.insert,
232099
+ Result: Model,
232100
+ execute: (request) => sql.onDialectOrElse({
232101
+ mysql: () => forEach$4(request, (request) => sql`insert into ${sql(options.tableName)} ${sql.insert(request)};
232102
+ select * from ${sql(options.tableName)} where ${sql(idColumn)} = LAST_INSERT_ID();`.unprepared.pipe(map$9(([, results]) => results[0])), { concurrency: 10 }),
232103
+ orElse: () => sql`insert into ${sql(options.tableName)} ${sql.insert(request).returning("*")}`
232104
+ })
232105
+ }).pipe(setDelay(options.window), setMaxBatchSize, withSpan(`${options.spanPrefix}.insertResolver`)));
232106
+ const insert = (insert) => insertExecute(insert).pipe(catchTag$1("ResultLengthMismatch", die$2), withSpan$1(`${options.spanPrefix}.insert`, {}, { captureStackTrace: false }));
232107
+ const insertVoidExecute = request$1(void_({
232108
+ Request: Model.insert,
232109
+ execute: (request) => sql`insert into ${sql(options.tableName)} ${sql.insert(request)}`
232110
+ }).pipe(setDelay(options.window), setMaxBatchSize, withSpan(`${options.spanPrefix}.insertVoidResolver`)));
232111
+ const insertVoid = (insert) => insertVoidExecute(insert).pipe(withSpan$1(`${options.spanPrefix}.insertVoid`, {}, { captureStackTrace: false }));
232112
+ const findByIdExecute = request$1(findById({
232113
+ Id: idSchema,
232114
+ Result: Model,
232115
+ ResultId(request) {
232116
+ return request[idColumn];
232117
+ },
232118
+ execute: (ids) => sql`select * from ${sql(options.tableName)} where ${sql.in(idColumn, ids)}`
232119
+ }).pipe(setDelay(options.window), setMaxBatchSize, withSpan(`${options.spanPrefix}.findByIdResolver`)));
232120
+ const findById$1 = (id) => findByIdExecute(id).pipe(withSpan$1(`${options.spanPrefix}.findById`, { attributes: { id } }, { captureStackTrace: false }));
232121
+ const deleteExecute = request$1(void_({
232122
+ Request: idSchema,
232123
+ execute: (ids) => sql`delete from ${sql(options.tableName)} where ${sql.in(idColumn, ids)}`
232124
+ }).pipe(setDelay(options.window), setMaxBatchSize, withSpan(`${options.spanPrefix}.deleteResolver`)));
232125
+ const delete_ = (id) => deleteExecute(id).pipe(withSpan$1(`${options.spanPrefix}.delete`, { attributes: { id } }, { captureStackTrace: false }));
232126
+ return {
232127
+ insert,
232128
+ insertVoid,
232129
+ findById: findById$1,
232130
+ delete: delete_
232131
+ };
232132
+ });
232133
+ //#endregion
232134
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/ChunkRepo.js
232135
+ /**
232136
+ * @since 1.0.0
232137
+ * @category Models
232138
+ */
232139
+ const ChunkId = Number$1.pipe(brand("ChunkRepo/ChunkId"));
232140
+ /**
232141
+ * @since 1.0.0
232142
+ * @category Models
232143
+ */
232144
+ const SyncId = String$1.pipe(brand("ChunkRepo/SyncId"));
232145
+ /**
232146
+ * @since 1.0.0
232147
+ * @category Models
232148
+ */
232149
+ const Float32ArraySchema = instanceOf(Float32Array);
232150
+ /**
232151
+ * @since 1.0.0
232152
+ * @category Models
232153
+ */
232154
+ const Float32ArrayFromArray = Array$1(Number$1).pipe(decodeTo(Float32ArraySchema, transform$3({
232155
+ decode: (arr) => new Float32Array(arr),
232156
+ encode: (array) => Array.from(array)
232157
+ })));
232158
+ /**
232159
+ * @since 1.0.0
232160
+ * @category Models
232161
+ */
232162
+ const Float32ArrayField = Field({
232163
+ insert: Float32ArraySchema,
232164
+ update: Float32ArraySchema,
232165
+ jsonCreate: Float32ArrayFromArray,
232166
+ jsonUpdate: Float32ArrayFromArray
232167
+ });
232168
+ /**
232169
+ * @since 1.0.0
232170
+ * @category Models
232171
+ */
232172
+ var Chunk = class extends Class("Chunk")({
232173
+ id: Generated(ChunkId),
232174
+ path: String$1,
232175
+ content: String$1,
232176
+ hash: String$1,
232177
+ vector: Float32ArrayField,
232178
+ syncId: SyncId
232179
+ }) {};
232180
+ /**
232181
+ * @since 1.0.0
232182
+ * @category Services
232183
+ */
232184
+ var ChunkRepo = class extends Service$1()("clanka/ChunkRepo") {};
232185
+ /**
232186
+ * @since 1.0.0
232187
+ * @category Errors
232188
+ */
232189
+ var ChunkRepoError = class extends TaggedErrorClass()("ChunkRepoError", { reason: Union$2([SqlError]) }) {
232190
+ cause = this.reason;
232191
+ message = this.reason.message;
232192
+ };
232193
+ /**
232194
+ * @since 1.0.0
232195
+ * @category Layers
232196
+ */
232197
+ const layer$3 = effect$1(ChunkRepo, gen(function* () {
232198
+ const sql = yield* SqlClient;
232199
+ const dimensions = yield* Dimensions;
232200
+ const loaders = yield* makeDataLoaders(Chunk, {
232201
+ tableName: "chunks",
232202
+ idColumn: "id",
232203
+ window: 10,
232204
+ spanPrefix: "ChunkRepo"
232205
+ });
232206
+ let needsQuantization = true;
232207
+ const maybeQuantize = gen(function* () {
232208
+ if (!needsQuantization) return;
232209
+ needsQuantization = false;
232210
+ yield* sql`select vector_init('chunks', 'vector', 'type=FLOAT32,dimension=${sql.literal(String(dimensions))}')`;
232211
+ yield* sql`select vector_quantize('chunks', 'vector')`;
232212
+ }).pipe(mapError$2((reason) => new ChunkRepoError({ reason })));
232213
+ yield* forkScoped(maybeQuantize);
232214
+ const search = findAll({
232215
+ Request: Struct$2({
232216
+ vector: Float32ArraySchema,
232217
+ limit: Number$1
232218
+ }),
232219
+ Result: Chunk,
232220
+ execute: ({ vector, limit }) => sql`
232221
+ select chunks.id, chunks.path, chunks.content, chunks.hash, chunks.syncId
232222
+ from chunks
232223
+ JOIN vector_quantize_scan('chunks', 'vector', ${vector}, CAST(${limit} AS INTEGER)) AS v
232224
+ ON chunks.id = v.rowid
232225
+ `
232226
+ });
232227
+ const exists = findById({
232228
+ Id: String$1,
232229
+ Result: Struct$2({
232230
+ id: ChunkId,
232231
+ hash: String$1
232232
+ }),
232233
+ ResultId(result) {
232234
+ return result.hash;
232235
+ },
232236
+ execute: (hashes) => sql`select id, hash from chunks where ${sql.in("hash", hashes)}`
232237
+ }).pipe(setDelay(5));
232238
+ return ChunkRepo.of({
232239
+ insert: (insert) => {
232240
+ needsQuantization = true;
232241
+ return loaders.insert(insert).pipe(catchTags$1({
232242
+ SqlError: (reason) => fail$6(new ChunkRepoError({ reason })),
232243
+ SchemaError: die$2
232244
+ }));
232245
+ },
232246
+ findById: (id) => loaders.findById(id).pipe(catchTags$1({ SchemaError: die$2 })),
232247
+ exists: (hash) => request$1(hash, exists).pipe(map$9((result) => result.id), catchNoSuchElement, catchTags$1({
232248
+ SqlError: (reason) => fail$6(new ChunkRepoError({ reason })),
232249
+ SchemaError: die$2
232250
+ })),
232251
+ search: fn("ChunkRepo.search")(function* (options) {
232252
+ yield* maybeQuantize;
232253
+ return yield* search(options).pipe(catchTags$1({
232254
+ SqlError: (reason) => fail$6(new ChunkRepoError({ reason })),
232255
+ SchemaError: die$2
232256
+ }));
232257
+ }),
232258
+ quantize: maybeQuantize,
232259
+ setSyncId: (chunkId, syncId) => sql`update chunks set syncId = ${syncId} where id = ${chunkId}`.pipe(mapError$2((reason) => new ChunkRepoError({ reason }))),
232260
+ deleteByPath: (path) => sql`delete from chunks where path = ${path}`.pipe(mapError$2((reason) => new ChunkRepoError({ reason }))),
232261
+ deleteForSyncId: (syncId) => sql`delete from chunks where syncId != ${syncId}`.pipe(mapError$2((reason) => new ChunkRepoError({ reason })))
232262
+ });
232263
+ }));
232264
+ //#endregion
232265
+ //#region node_modules/.pnpm/@effect+sql-sqlite-node@4.0.0-beta.36_effect@4.0.0-beta.36/node_modules/@effect/sql-sqlite-node/dist/SqliteClient.js
232266
+ /**
232267
+ * @since 1.0.0
232268
+ */
232269
+ const ATTR_DB_SYSTEM_NAME = "db.system.name";
232270
+ /**
232271
+ * @category type ids
232272
+ * @since 1.0.0
232273
+ */
232274
+ const TypeId = "~@effect/sql-sqlite-node/SqliteClient";
232275
+ /**
232276
+ * @category tags
232277
+ * @since 1.0.0
232278
+ */
232279
+ const SqliteClient = /* @__PURE__ */ Service$1("@effect/sql-sqlite-node/SqliteClient");
232280
+ /**
232281
+ * @category constructor
232282
+ * @since 1.0.0
232283
+ */
232284
+ const make = (options) => gen(function* () {
232285
+ const compiler = makeCompilerSqlite(options.transformQueryNames);
232286
+ const transformRows = options.transformResultNames ? defaultTransforms(options.transformResultNames).array : void 0;
232287
+ const makeConnection = gen(function* () {
232288
+ const scope$2 = yield* scope;
232289
+ const db = new Sqlite(options.filename, { readonly: options.readonly ?? false });
232290
+ yield* addFinalizer$1(scope$2, sync(() => db.close()));
232291
+ if (options.disableWAL !== true) db.pragma("journal_mode = WAL");
232292
+ const prepareCache = yield* make$57({
232293
+ capacity: options.prepareCacheSize ?? 200,
232294
+ timeToLive: options.prepareCacheTTL ?? minutes(10),
232295
+ lookup: (sql) => try_({
232296
+ try: () => db.prepare(sql),
232297
+ catch: (cause) => new SqlError({
232298
+ cause,
232299
+ message: "Failed to prepare statement "
232300
+ })
232301
+ })
232302
+ });
232303
+ const runStatement = (statement, params, raw) => withFiber((fiber) => {
232304
+ if (get$15(fiber.services, SafeIntegers)) statement.safeIntegers(true);
232305
+ try {
232306
+ if (statement.reader) return succeed$3(statement.all(...params));
232307
+ const result = statement.run(...params);
232308
+ return succeed$3(raw ? result : []);
232309
+ } catch (cause) {
232310
+ return fail$6(new SqlError({
232311
+ cause,
232312
+ message: "Failed to execute statement"
232313
+ }));
232314
+ }
232315
+ });
232316
+ const run = (sql, params, raw = false) => flatMap$4(get$7(prepareCache, sql), (s) => runStatement(s, params, raw));
232317
+ const runValues = (sql, params) => acquireUseRelease(get$7(prepareCache, sql), (statement) => try_({
232318
+ try: () => {
232319
+ if (statement.reader) {
232320
+ statement.raw(true);
232321
+ return statement.all(...params);
232322
+ }
232323
+ statement.run(...params);
232324
+ return [];
232325
+ },
232326
+ catch: (cause) => new SqlError({
232327
+ cause,
232328
+ message: "Failed to execute statement"
232329
+ })
232330
+ }), (statement) => sync(() => statement.reader && statement.raw(false)));
232331
+ return identity({
232332
+ execute(sql, params, transformRows) {
232333
+ return transformRows ? map$9(run(sql, params), transformRows) : run(sql, params);
232334
+ },
232335
+ executeRaw(sql, params) {
232336
+ return run(sql, params, true);
232337
+ },
232338
+ executeValues(sql, params) {
232339
+ return runValues(sql, params);
232340
+ },
232341
+ executeUnprepared(sql, params, transformRows) {
232342
+ const effect = runStatement(db.prepare(sql), params ?? [], false);
232343
+ return transformRows ? map$9(effect, transformRows) : effect;
232344
+ },
232345
+ executeStream(_sql, _params) {
232346
+ return die("executeStream not implemented");
232347
+ },
232348
+ export: try_({
232349
+ try: () => db.serialize(),
232350
+ catch: (cause) => new SqlError({
232351
+ cause,
232352
+ message: "Failed to export database"
232353
+ })
232354
+ }),
232355
+ backup(destination) {
232356
+ return tryPromise({
232357
+ try: () => db.backup(destination),
232358
+ catch: (cause) => new SqlError({
232359
+ cause,
232360
+ message: "Failed to backup database"
232361
+ })
232362
+ });
232363
+ },
232364
+ loadExtension(path) {
232365
+ return try_({
232366
+ try: () => db.loadExtension(path),
232367
+ catch: (cause) => new SqlError({
232368
+ cause,
232369
+ message: "Failed to load extension"
232370
+ })
232371
+ });
232372
+ }
232373
+ });
232374
+ });
232375
+ const semaphore = yield* make$72(1);
232376
+ const connection = yield* makeConnection;
232377
+ const acquirer = semaphore.withPermits(1)(succeed$3(connection));
232378
+ const transactionAcquirer = uninterruptibleMask((restore) => {
232379
+ const scope = getUnsafe$4(getCurrent().services, Scope);
232380
+ return as$1(tap$1(restore(semaphore.take(1)), () => addFinalizer$1(scope, semaphore.release(1))), connection);
232381
+ });
232382
+ return Object.assign(yield* make$49({
232383
+ acquirer,
232384
+ compiler,
232385
+ transactionAcquirer,
232386
+ spanAttributes: [...options.spanAttributes ? Object.entries(options.spanAttributes) : [], [ATTR_DB_SYSTEM_NAME, "sqlite"]],
232387
+ transformRows
232388
+ }), {
232389
+ [TypeId]: TypeId,
232390
+ config: options,
232391
+ export: flatMap$4(acquirer, (_) => _.export),
232392
+ backup: (destination) => flatMap$4(acquirer, (_) => _.backup(destination)),
232393
+ loadExtension: (path) => flatMap$4(acquirer, (_) => _.loadExtension(path))
232394
+ });
232395
+ });
232396
+ /**
232397
+ * @category layers
232398
+ * @since 1.0.0
232399
+ */
232400
+ const layer$2 = (config) => effectServices(map$9(make(config), (client) => make$87(SqliteClient, client).pipe(add$3(SqlClient, client)))).pipe(provide$3(layer$32));
232401
+ //#endregion
232402
+ //#region node_modules/.pnpm/@effect+sql-sqlite-node@4.0.0-beta.36_effect@4.0.0-beta.36/node_modules/@effect/sql-sqlite-node/dist/SqliteMigrator.js
232403
+ /**
232404
+ * @category constructor
232405
+ * @since 1.0.0
232406
+ */
232407
+ const run$1 = /* @__PURE__ */ make$25({});
232408
+ /**
232409
+ * @category constructor
232410
+ * @since 1.0.0
232411
+ */
232412
+ const layer$1 = (options) => effectDiscard(run$1(options));
232413
+ //#endregion
232414
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/internal/sqlite-vector.js
232415
+ /**
232416
+ * Binary extension for each platform
232417
+ */
232418
+ const PLATFORM_EXTENSIONS = {
232419
+ darwin: ".dylib",
232420
+ linux: ".so",
232421
+ win32: ".dll"
232422
+ };
232423
+ /**
232424
+ * Detects if the system uses musl libc (Alpine Linux, etc.)
232425
+ * Uses multiple detection strategies for reliability
232426
+ */
232427
+ function isMusl() {
232428
+ if (platform() !== "linux") return false;
232429
+ for (const file of [
232430
+ "/lib/ld-musl-x86_64.so.1",
232431
+ "/lib/ld-musl-aarch64.so.1",
232432
+ "/lib/ld-musl-armhf.so.1"
232433
+ ]) if (existsSync(file)) return true;
232434
+ try {
232435
+ if (execSync("ldd --version 2>&1", {
232436
+ encoding: "utf-8",
232437
+ stdio: [
232438
+ "pipe",
232439
+ "pipe",
232440
+ "pipe"
232441
+ ]
232442
+ }).includes("musl")) return true;
232443
+ } catch {}
232444
+ try {
232445
+ if (existsSync("/etc/os-release")) {
232446
+ const osRelease = readFileSync("/etc/os-release", "utf-8");
232447
+ if (osRelease.includes("Alpine") || osRelease.includes("musl")) return true;
232448
+ }
232449
+ } catch {}
232450
+ try {
232451
+ if ((process.report?.getReport?.())?.header?.glibcVersionRuntime === "") return true;
232452
+ } catch {}
232453
+ return false;
232454
+ }
232455
+ /**
232456
+ * Gets the current platform identifier
232457
+ * @throws {Error} If the platform is unsupported
232458
+ */
232459
+ function getCurrentPlatform() {
232460
+ const platformName = platform();
232461
+ const archName = arch();
232462
+ if (platformName === "darwin") {
232463
+ if (archName === "arm64") return "darwin-arm64";
232464
+ if (archName === "x64" || archName === "ia32") return "darwin-x86_64";
232465
+ }
232466
+ if (platformName === "linux") {
232467
+ const muslSuffix = isMusl() ? "-musl" : "";
232468
+ if (archName === "arm64") return `linux-arm64${muslSuffix}`;
232469
+ if (archName === "x64" || archName === "ia32") return `linux-x86_64${muslSuffix}`;
232470
+ }
232471
+ if (platformName === "win32") {
232472
+ if (archName === "x64" || archName === "ia32") return "win32-x86_64";
232473
+ }
232474
+ throw new Error(`Unsupported platform: ${platformName}-${archName}. Supported platforms: darwin-arm64, darwin-x86_64, linux-arm64, linux-x86_64, win32-x86_64 (with glibc or musl support for Linux)`);
232475
+ }
232476
+ /**
232477
+ * Gets the package name for the current platform
232478
+ */
232479
+ function getPlatformPackageName() {
232480
+ return `@sqliteai/sqlite-vector-${getCurrentPlatform()}`;
232481
+ }
232482
+ /**
232483
+ * Gets the binary filename for the current platform
232484
+ */
232485
+ function getBinaryName() {
232486
+ const platformName = platform();
232487
+ const extension = PLATFORM_EXTENSIONS[platformName];
232488
+ if (!extension) throw new Error(`Unknown platform: ${platformName}`);
232489
+ return `vector${extension}`;
232490
+ }
232491
+ /**
232492
+ * Error thrown when the SQLite Vector extension cannot be found
232493
+ */
232494
+ var ExtensionNotFoundError = class extends Error {
232495
+ constructor(message) {
232496
+ super(message);
232497
+ this.name = "ExtensionNotFoundError";
232498
+ }
232499
+ };
232500
+ /**
232501
+ * Attempts to load the platform-specific package
232502
+ * @returns The path to the extension binary, or null if not found
232503
+ */
232504
+ function tryLoadPlatformPackage() {
232505
+ try {
232506
+ const packageName = getPlatformPackageName();
232507
+ return import.meta.resolve(packageName).replace(/\/index\.js$/, `/${getBinaryName()}`).replace("file://", "");
232508
+ } catch {}
232509
+ return null;
232510
+ }
232511
+ /**
232512
+ * Gets the absolute path to the SQLite Vector extension binary for the current platform
232513
+ *
232514
+ * @returns Absolute path to the extension binary (.so, .dylib, or .dll)
232515
+ * @throws {ExtensionNotFoundError} If the extension binary cannot be found
232516
+ *
232517
+ * @example
232518
+ * ```typescript
232519
+ * import { getExtensionPath } from '@sqliteai/sqlite-vector';
232520
+ *
232521
+ * const extensionPath = getExtensionPath();
232522
+ * // On macOS ARM64: /path/to/node_modules/@sqliteai/sqlite-vector-darwin-arm64/vector.dylib
232523
+ * ```
232524
+ */
232525
+ function getExtensionPath() {
232526
+ const platformPath = tryLoadPlatformPackage();
232527
+ if (platformPath) return resolve(platformPath);
232528
+ throw new ExtensionNotFoundError(`SQLite Vector extension not found for platform: ${getCurrentPlatform()}\n\nThe platform-specific package "${getPlatformPackageName()}" is not installed.\nThis usually happens when:\n 1. Your platform is not supported\n 2. npm failed to install optional dependencies\n 3. You're installing with --no-optional flag\n\nTry running: npm install --force`);
232529
+ }
232530
+ //#endregion
232531
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/Sqlite.js
232532
+ /**
232533
+ * @since 1.0.0
232534
+ */
232535
+ /**
232536
+ * @since 1.0.0
232537
+ * @category Layers
232538
+ */
232539
+ const SqliteLayer = (database) => layer$1({ loader: fromRecord({ "0001_create_chunks": gen(function* () {
232540
+ const sql = yield* SqlClient;
232541
+ yield* sql`CREATE TABLE IF NOT EXISTS chunks (
232542
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
232543
+ path TEXT NOT NULL,
232544
+ content TEXT NOT NULL,
232545
+ hash TEXT NOT NULL,
232546
+ vector BLOB NOT NULL,
232547
+ syncId TEXT NOT NULL
232548
+ )`;
232549
+ yield* sql`CREATE INDEX IF NOT EXISTS idx_chunks_hash ON chunks (hash)`;
232550
+ }) }) }).pipe(provide$3(effectDiscard(gen(function* () {
232551
+ yield* (yield* SqliteClient).loadExtension(getExtensionPath());
232552
+ }))), provideMerge(layer$2({ filename: database })), provide$3(effectDiscard(gen(function* () {
232553
+ const fs = yield* FileSystem;
232554
+ const directory = (yield* Path$1).dirname(database);
232555
+ if (directory === ".") return;
232556
+ yield* fs.makeDirectory(directory, { recursive: true });
232557
+ }))));
232558
+ //#endregion
232559
+ //#region node_modules/.pnpm/clanka@0.2.25_@effect+ai-openai-compat@4.0.0-beta.36_effect@4.0.0-beta.36__@effect+ai-o_7e66a3c8cbea5282674885bea99d757a/node_modules/clanka/dist/SemanticSearch.js
232560
+ /**
232561
+ * @since 1.0.0
232562
+ */
232563
+ var SemanticSearch_exports = /* @__PURE__ */ __exportAll({
232564
+ SemanticSearch: () => SemanticSearch,
232565
+ chunkEmbeddingInput: () => chunkEmbeddingInput,
232566
+ layer: () => layer,
232567
+ makeEmbeddingResolver: () => makeEmbeddingResolver,
232568
+ maybeRemoveFile: () => maybeRemoveFile,
232569
+ maybeUpdateFile: () => maybeUpdateFile
232570
+ });
232571
+ const normalizePath = (path) => path.replace(/\\/g, "/");
232572
+ const resolveChunkConfig = (options) => ({
232573
+ chunkSize: 30,
232574
+ chunkOverlap: 0,
232575
+ chunkMaxCharacters: options.chunkMaxCharacters ?? 1e4
232576
+ });
232577
+ const makeEmbeddingResolver = (resolver, options) => resolver.pipe(setDelay(options.embeddingRequestDelay ?? millis(50)), batchN(options.embeddingBatchSize ?? 300));
232578
+ const chunkEmbeddingInput = (chunk) => {
232579
+ const headerLines = ["---", "file: " + chunk.path];
232580
+ if (chunk.name !== void 0) headerLines.push("name: " + chunk.name);
232581
+ if (chunk.type !== void 0) headerLines.push("type: " + chunk.type);
232582
+ if (chunk.parent !== void 0) headerLines.push("parent: " + chunk.parent);
232583
+ headerLines.push("---");
232584
+ const contentLines = chunk.content.split("\n");
232585
+ let contentWithLines = "";
232586
+ for (let i = 0; i < contentLines.length; i++) {
232587
+ if (i > 0) contentWithLines += "\n";
232588
+ contentWithLines += `${chunk.startLine + i}: ${contentLines[i]}`;
232589
+ }
232590
+ return headerLines.join("\n") + "\n\n" + contentWithLines;
232591
+ };
232592
+ const hashChunkInput = (input) => createHash("sha256").update(input).digest("hex");
232593
+ /**
232594
+ * @since 1.0.0
232595
+ * @category Layers
232596
+ */
232597
+ const layer = (options) => effect$1(SemanticSearch, gen(function* () {
232598
+ const chunker = yield* CodeChunker;
232599
+ const repo = yield* ChunkRepo;
232600
+ const embeddings = yield* EmbeddingModel;
232601
+ const pathService = yield* Path$1;
232602
+ const root = pathService.resolve(options.directory);
232603
+ const resolver = makeEmbeddingResolver(embeddings.resolver, options);
232604
+ const concurrency = options.concurrency ?? 2e3;
232605
+ const chunkConfig = resolveChunkConfig(options);
232606
+ const indexHandle = yield* make$56();
232607
+ const console = yield* Console$1;
232608
+ const resolveIndexedPath = (path) => {
232609
+ const absolutePath = pathService.resolve(root, path);
232610
+ const relativePath = normalizePath(pathService.relative(root, absolutePath));
232611
+ if (relativePath.length === 0 || relativePath === ".." || relativePath.startsWith("../")) return none$4();
232612
+ return some$2(relativePath);
232613
+ };
232614
+ const processChunk = fnUntraced(function* (options) {
232615
+ const input = chunkEmbeddingInput(options.chunk);
232616
+ const hash = hashChunkInput(input);
232617
+ if (options.checkExisting) {
232618
+ const id = yield* repo.exists(hash);
232619
+ if (isSome(id)) {
232620
+ yield* repo.setSyncId(id.value, options.syncId);
232621
+ return;
232622
+ }
232623
+ }
232624
+ const result = yield* request$2(new EmbeddingRequest({ input }), resolver);
232625
+ const vector = new Float32Array(result.vector);
232626
+ yield* repo.insert(Chunk.insert.makeUnsafe({
232627
+ path: options.chunk.path,
232628
+ hash,
232629
+ content: input,
232630
+ vector,
232631
+ syncId: options.syncId
232632
+ }));
232633
+ }, ignore$1({
232634
+ log: "Warn",
232635
+ message: "Failed to process chunk for embedding"
232636
+ }), (effect, options) => annotateLogs(effect, { chunk: `${options.chunk.path}/${options.chunk.startLine}` }));
232637
+ const runIndex = run$4(indexHandle, gen(function* () {
232638
+ const syncId = SyncId.makeUnsafe(crypto.randomUUID());
232639
+ yield* logInfo("Starting SemanticSearch index");
232640
+ yield* pipe$1(chunker.chunkCodebase({
232641
+ root,
232642
+ ...chunkConfig
232643
+ }), tap((chunk) => processChunk({
232644
+ chunk,
232645
+ syncId,
232646
+ checkExisting: true
232647
+ }), { concurrency }), runDrain);
232648
+ yield* repo.deleteForSyncId(syncId);
232649
+ yield* logInfo("Finished SemanticSearch index");
232650
+ }).pipe(withSpan$1("SemanticSearch.index"), withLogSpan("SemanticSearch.index"), provideService$2(Console$1, console)), { onlyIfMissing: true });
232651
+ const initialIndex = yield* runIndex;
232652
+ yield* runIndex.pipe(delay(minutes(3)), forever, forkScoped);
232653
+ return SemanticSearch.of({
232654
+ search: fn("SemanticSearch.search")(function* (options) {
232655
+ yield* join$2(initialIndex);
232656
+ yield* annotateCurrentSpan(options);
232657
+ const { vector } = yield* embeddings.embed(options.query);
232658
+ return (yield* repo.search({
232659
+ vector: new Float32Array(vector),
232660
+ limit: options.limit
232661
+ })).map((r) => r.content).join("\n\n");
232662
+ }, orDie$2),
232663
+ updateFile: fn("SemanticSearch.updateFile")(function* (path) {
232664
+ yield* join$2(initialIndex);
232665
+ const indexedPath = resolveIndexedPath(path);
232666
+ if (isNone(indexedPath)) return;
232667
+ yield* repo.deleteByPath(indexedPath.value);
232668
+ const chunks = yield* chunker.chunkFile({
232669
+ root,
232670
+ path: indexedPath.value,
232671
+ ...chunkConfig
232672
+ });
232673
+ if (chunks.length === 0) return;
232674
+ const syncId = SyncId.makeUnsafe(crypto.randomUUID());
232675
+ yield* pipe$1(fromArray(chunks), tap((chunk) => processChunk({
232676
+ chunk,
232677
+ syncId,
232678
+ checkExisting: false
232679
+ }), { concurrency }), runDrain);
232680
+ }, orDie$2),
232681
+ removeFile: fn("SemanticSearch.removeFile")(function* (path) {
232682
+ yield* join$2(initialIndex);
232683
+ const indexedPath = resolveIndexedPath(path);
232684
+ if (isNone(indexedPath)) return;
232685
+ yield* repo.deleteByPath(indexedPath.value);
232686
+ }, orDie$2)
232687
+ });
232688
+ })).pipe(provide$3([layer$6, layer$3.pipe(provide$3(SqliteLayer(options.database ?? ".clanka/search.sqlite")))]));
232689
+ //#endregion
232674
232690
  //#region src/ClankaModels.ts
232675
232691
  const ModelServices = layerUndici.pipe(merge$6(layerKvs));
232676
232692
  const Reasoning = Literals([
@@ -241248,10 +241264,10 @@ const SemanticSearchLayer = unwrap$3(gen(function* () {
241248
241264
  const pathService = yield* Path$1;
241249
241265
  const apiKey = yield* redacted("LALPH_OPENAI_API_KEY").pipe(option);
241250
241266
  if (isNone(apiKey)) return empty$15;
241251
- return layer$6({
241267
+ return (yield* promise(() => Promise.resolve().then(() => SemanticSearch_exports))).layer({
241252
241268
  directory: worktree.directory,
241253
241269
  database: pathService.join(worktree.directory, ".lalph", "shared", "search.sqlite")
241254
- }).pipe(orDie$3, provide$3(model$1("text-embedding-3-small", { dimensions: 1536 })), provide$3(layer$4({ apiKey: apiKey.value }).pipe(provide$3(layerUndici))), tapCause$1((cause) => logWarning(`Failed to create SemanticSearch layer`, cause)), catchCause$2(() => empty$15));
241270
+ }).pipe(orDie$3, provide$3(model$1("text-embedding-3-small", { dimensions: 1536 })), provide$3(layer$9({ apiKey: apiKey.value }).pipe(provide$3(layerUndici))), tapCause$1((cause) => logWarning(`Failed to create SemanticSearch layer`, cause)), catchCause$2(() => empty$15));
241255
241271
  }).pipe(orDie$2));
241256
241272
  const runClanka = fnUntraced(function* (options) {
241257
241273
  const muxer = yield* Muxer;
@@ -241294,7 +241310,7 @@ const agentWorker = fnUntraced(function* (options) {
241294
241310
  system: options.system,
241295
241311
  prompt: match$10(options.research, {
241296
241312
  onNone: () => options.prompt,
241297
- onSome: (research) => make$9([{
241313
+ onSome: (research) => make$12([{
241298
241314
  role: "user",
241299
241315
  content: options.prompt
241300
241316
  }, {
@@ -241924,10 +241940,8 @@ const runProject = fnUntraced(function* (options) {
241924
241940
  const gitFlowLayer = resolveGitFlowLayer();
241925
241941
  const fiber = yield* checkForWork(options.project).pipe(andThen(resolveRunEffect(startedDeferred).pipe(provide$1(gitFlowLayer, { local: true }), withWorkerState(options.project.id))), catchTags$1({
241926
241942
  ChosenTaskNotFound(_error) {
241927
- if (executionMode._tag !== "ralph") {
241928
- ralphDone = true;
241929
- return void_$2;
241930
- }
241943
+ if (executionMode._tag !== "ralph") return void_$2;
241944
+ ralphDone = true;
241931
241945
  return log$1(`No more work to process for Ralph, ending after ${currentIteration + 1} iteration(s).`);
241932
241946
  },
241933
241947
  NoMoreWork(_error) {
@@ -242291,7 +242305,7 @@ const commandEdit = make$58("edit").pipe(withDescription("Open the selected proj
242291
242305
  const commandSource = make$58("source").pipe(withDescription("Select the issue source to use (e.g. GitHub Issues or Linear). This applies to all projects."), withHandler(() => selectIssueSource), provide(Settings.layer));
242292
242306
  //#endregion
242293
242307
  //#region package.json
242294
- var version = "0.3.94";
242308
+ var version = "0.3.96";
242295
242309
  //#endregion
242296
242310
  //#region src/commands/projects/ls.ts
242297
242311
  const commandProjectsLs = make$58("ls").pipe(withDescription("List configured projects and how they run (enabled state, concurrency, branch, git flow, review agent)."), withHandler(fnUntraced(function* () {