@ubiquity-os/plugin-sdk 3.6.0 → 3.6.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/llm.d.mts CHANGED
@@ -1,5 +1,6 @@
1
- import { ChatCompletionMessageParam, ChatCompletion, ChatCompletionChunk } from 'openai/resources/chat/completions';
1
+ import { ChatCompletionMessageParam, ChatCompletionCreateParamsNonStreaming, ChatCompletion, ChatCompletionChunk } from 'openai/resources/chat/completions';
2
2
  import { C as Context } from './context-sqbr2o6i.mjs';
3
+ import { PluginInput } from './signature.mjs';
3
4
  import '@octokit/webhooks';
4
5
  import '@ubiquity-os/ubiquity-os-logger';
5
6
  import '@octokit/plugin-rest-endpoint-methods';
@@ -7,54 +8,16 @@ import './octokit.mjs';
7
8
  import '@octokit/core/types';
8
9
  import '@octokit/plugin-paginate-graphql';
9
10
  import '@octokit/plugin-paginate-rest';
10
- import '@octokit/webhooks/node_modules/@octokit/request-error';
11
+ import '@octokit/request-error';
11
12
  import '@octokit/core';
13
+ import '@sinclair/typebox';
12
14
 
13
- type LlmResponseFormat = {
14
- type: "json_object" | "text";
15
- } | {
16
- type: string;
17
- [key: string]: unknown;
18
- };
19
- type LlmPayload = {
20
- repository?: {
21
- owner?: {
22
- login?: string;
23
- };
24
- name?: string;
25
- };
26
- installation?: {
27
- id?: number;
28
- };
29
- };
30
- type LlmAuthContext = {
31
- authToken?: string;
32
- ubiquityKernelToken?: string;
33
- payload?: LlmPayload;
34
- eventPayload?: LlmPayload;
35
- };
36
15
  type LlmCallOptions = {
37
16
  baseUrl?: string;
38
17
  model?: string;
39
18
  stream?: boolean;
40
19
  messages: ChatCompletionMessageParam[];
41
- max_tokens?: number;
42
- max_completion_tokens?: number;
43
- temperature?: number;
44
- top_p?: number;
45
- frequency_penalty?: number;
46
- presence_penalty?: number;
47
- response_format?: LlmResponseFormat;
48
- stop?: string | string[];
49
- n?: number;
50
- logit_bias?: Record<string, number>;
51
- seed?: number;
52
- user?: string;
53
- metadata?: Record<string, unknown>;
54
- tools?: unknown[];
55
- tool_choice?: string | Record<string, unknown>;
56
- [key: string]: unknown;
57
- };
58
- declare function callLlm(options: LlmCallOptions, input: Context | LlmAuthContext): Promise<ChatCompletion | AsyncIterable<ChatCompletionChunk>>;
20
+ } & Partial<Omit<ChatCompletionCreateParamsNonStreaming, "model" | "messages" | "stream">>;
21
+ declare function callLlm(options: LlmCallOptions, input: PluginInput | Context): Promise<ChatCompletion | AsyncIterable<ChatCompletionChunk>>;
59
22
 
60
23
  export { type LlmCallOptions, callLlm };
package/dist/llm.d.ts CHANGED
@@ -1,5 +1,6 @@
1
- import { ChatCompletionMessageParam, ChatCompletion, ChatCompletionChunk } from 'openai/resources/chat/completions';
1
+ import { ChatCompletionMessageParam, ChatCompletionCreateParamsNonStreaming, ChatCompletion, ChatCompletionChunk } from 'openai/resources/chat/completions';
2
2
  import { C as Context } from './context-BbEmsEct.js';
3
+ import { PluginInput } from './signature.js';
3
4
  import '@octokit/webhooks';
4
5
  import '@ubiquity-os/ubiquity-os-logger';
5
6
  import '@octokit/plugin-rest-endpoint-methods';
@@ -7,54 +8,16 @@ import './octokit.js';
7
8
  import '@octokit/core/types';
8
9
  import '@octokit/plugin-paginate-graphql';
9
10
  import '@octokit/plugin-paginate-rest';
10
- import '@octokit/webhooks/node_modules/@octokit/request-error';
11
+ import '@octokit/request-error';
11
12
  import '@octokit/core';
13
+ import '@sinclair/typebox';
12
14
 
13
- type LlmResponseFormat = {
14
- type: "json_object" | "text";
15
- } | {
16
- type: string;
17
- [key: string]: unknown;
18
- };
19
- type LlmPayload = {
20
- repository?: {
21
- owner?: {
22
- login?: string;
23
- };
24
- name?: string;
25
- };
26
- installation?: {
27
- id?: number;
28
- };
29
- };
30
- type LlmAuthContext = {
31
- authToken?: string;
32
- ubiquityKernelToken?: string;
33
- payload?: LlmPayload;
34
- eventPayload?: LlmPayload;
35
- };
36
15
  type LlmCallOptions = {
37
16
  baseUrl?: string;
38
17
  model?: string;
39
18
  stream?: boolean;
40
19
  messages: ChatCompletionMessageParam[];
41
- max_tokens?: number;
42
- max_completion_tokens?: number;
43
- temperature?: number;
44
- top_p?: number;
45
- frequency_penalty?: number;
46
- presence_penalty?: number;
47
- response_format?: LlmResponseFormat;
48
- stop?: string | string[];
49
- n?: number;
50
- logit_bias?: Record<string, number>;
51
- seed?: number;
52
- user?: string;
53
- metadata?: Record<string, unknown>;
54
- tools?: unknown[];
55
- tool_choice?: string | Record<string, unknown>;
56
- [key: string]: unknown;
57
- };
58
- declare function callLlm(options: LlmCallOptions, input: Context | LlmAuthContext): Promise<ChatCompletion | AsyncIterable<ChatCompletionChunk>>;
20
+ } & Partial<Omit<ChatCompletionCreateParamsNonStreaming, "model" | "messages" | "stream">>;
21
+ declare function callLlm(options: LlmCallOptions, input: PluginInput | Context): Promise<ChatCompletion | AsyncIterable<ChatCompletionChunk>>;
59
22
 
60
23
  export { type LlmCallOptions, callLlm };
package/dist/llm.js CHANGED
@@ -23,6 +23,7 @@ __export(llm_exports, {
23
23
  callLlm: () => callLlm
24
24
  });
25
25
  module.exports = __toCommonJS(llm_exports);
26
+ var EMPTY_STRING = "";
26
27
  function normalizeBaseUrl(baseUrl) {
27
28
  let normalized = baseUrl.trim();
28
29
  while (normalized.endsWith("/")) {
@@ -31,54 +32,45 @@ function normalizeBaseUrl(baseUrl) {
31
32
  return normalized;
32
33
  }
33
34
  function getEnvString(name) {
34
- if (typeof process === "undefined" || !process?.env) return "";
35
- return String(process.env[name] ?? "").trim();
35
+ if (typeof process === "undefined" || !process?.env) return EMPTY_STRING;
36
+ return String(process.env[name] ?? EMPTY_STRING).trim();
36
37
  }
37
38
  function getAiBaseUrl(options) {
38
39
  if (typeof options.baseUrl === "string" && options.baseUrl.trim()) {
39
40
  return normalizeBaseUrl(options.baseUrl);
40
41
  }
41
- const envBaseUrl = getEnvString("UBQ_AI_BASE_URL") || getEnvString("UBQ_AI_URL");
42
+ const envBaseUrl = getEnvString("UOS_AI_URL") || getEnvString("UOS_AI_BASE_URL");
42
43
  if (envBaseUrl) return normalizeBaseUrl(envBaseUrl);
43
- return "https://ai.ubq.fi";
44
+ return "https://ai-ubq-fi.deno.dev";
44
45
  }
45
46
  async function callLlm(options, input) {
46
- const inputPayload = input;
47
- const authToken = inputPayload.authToken;
48
- const ubiquityKernelToken = inputPayload.ubiquityKernelToken;
49
- const payload = inputPayload.payload ?? inputPayload.eventPayload;
50
- const owner = payload?.repository?.owner?.login ?? "";
51
- const repo = payload?.repository?.name ?? "";
52
- const installationId = payload?.installation?.id;
53
- if (!authToken) throw new Error("Missing authToken in inputs");
54
- const isKernelTokenRequired = authToken.trim().startsWith("gh");
55
- if (isKernelTokenRequired && !ubiquityKernelToken) {
56
- throw new Error("Missing ubiquityKernelToken in inputs (kernel attestation is required for GitHub auth)");
57
- }
47
+ const authToken = String(input.authToken ?? EMPTY_STRING).trim();
48
+ if (!authToken) throw new Error("Missing authToken in input");
49
+ const kernelToken = "ubiquityKernelToken" in input ? input.ubiquityKernelToken : void 0;
50
+ const payload = getPayload(input);
51
+ const { owner, repo, installationId } = getRepoMetadata(payload);
52
+ ensureKernelToken(authToken, kernelToken);
58
53
  const { baseUrl, model, stream: isStream, messages, ...rest } = options;
59
- const url = `${getAiBaseUrl({ ...options, baseUrl })}/v1/chat/completions`;
54
+ ensureMessages(messages);
55
+ const url = buildAiUrl(options, baseUrl);
60
56
  const body = JSON.stringify({
61
57
  ...rest,
62
58
  ...model ? { model } : {},
63
59
  messages,
64
60
  stream: isStream ?? false
65
61
  });
66
- const headers = {
67
- Authorization: `Bearer ${authToken}`,
68
- "Content-Type": "application/json"
69
- };
70
- if (owner) headers["X-GitHub-Owner"] = owner;
71
- if (repo) headers["X-GitHub-Repo"] = repo;
72
- if (typeof installationId === "number" && Number.isFinite(installationId)) {
73
- headers["X-GitHub-Installation-Id"] = String(installationId);
74
- }
75
- if (ubiquityKernelToken) {
76
- headers["X-Ubiquity-Kernel-Token"] = ubiquityKernelToken;
77
- }
62
+ const headers = buildHeaders(authToken, {
63
+ owner,
64
+ repo,
65
+ installationId,
66
+ ubiquityKernelToken: kernelToken
67
+ });
78
68
  const response = await fetch(url, { method: "POST", headers, body });
79
69
  if (!response.ok) {
80
70
  const err = await response.text();
81
- throw new Error(`LLM API error: ${response.status} - ${err}`);
71
+ const error = new Error(`LLM API error: ${response.status} - ${err}`);
72
+ error.status = response.status;
73
+ throw error;
82
74
  }
83
75
  if (isStream) {
84
76
  if (!response.body) {
@@ -88,29 +80,93 @@ async function callLlm(options, input) {
88
80
  }
89
81
  return response.json();
90
82
  }
83
+ function ensureKernelToken(authToken, kernelToken) {
84
+ const isKernelTokenRequired = authToken.startsWith("gh");
85
+ if (isKernelTokenRequired && !kernelToken) {
86
+ throw new Error("Missing ubiquityKernelToken in input (kernel attestation is required for GitHub auth)");
87
+ }
88
+ }
89
+ function ensureMessages(messages) {
90
+ if (!Array.isArray(messages) || messages.length === 0) {
91
+ throw new Error("messages must be a non-empty array");
92
+ }
93
+ }
94
+ function buildAiUrl(options, baseUrl) {
95
+ return `${getAiBaseUrl({ ...options, baseUrl })}/v1/chat/completions`;
96
+ }
97
+ function getPayload(input) {
98
+ if ("payload" in input) {
99
+ return input.payload;
100
+ }
101
+ return input.eventPayload;
102
+ }
103
+ function getRepoMetadata(payload) {
104
+ const repoPayload = payload;
105
+ return {
106
+ owner: repoPayload?.repository?.owner?.login ?? EMPTY_STRING,
107
+ repo: repoPayload?.repository?.name ?? EMPTY_STRING,
108
+ installationId: repoPayload?.installation?.id
109
+ };
110
+ }
111
+ function buildHeaders(authToken, options) {
112
+ const headers = {
113
+ Authorization: `Bearer ${authToken}`,
114
+ "Content-Type": "application/json"
115
+ };
116
+ if (options.owner) headers["X-GitHub-Owner"] = options.owner;
117
+ if (options.repo) headers["X-GitHub-Repo"] = options.repo;
118
+ if (typeof options.installationId === "number" && Number.isFinite(options.installationId)) {
119
+ headers["X-GitHub-Installation-Id"] = String(options.installationId);
120
+ }
121
+ if (options.ubiquityKernelToken) {
122
+ headers["X-Ubiquity-Kernel-Token"] = options.ubiquityKernelToken;
123
+ }
124
+ return headers;
125
+ }
91
126
  async function* parseSseStream(body) {
92
127
  const reader = body.getReader();
93
128
  const decoder = new TextDecoder();
94
- let buffer = "";
129
+ let buffer = EMPTY_STRING;
95
130
  try {
96
131
  while (true) {
97
132
  const { value, done: isDone } = await reader.read();
98
133
  if (isDone) break;
99
134
  buffer += decoder.decode(value, { stream: true });
100
- const events = buffer.split("\n\n");
101
- buffer = events.pop() || "";
135
+ const { events, remainder } = splitSseEvents(buffer);
136
+ buffer = remainder;
102
137
  for (const event of events) {
103
- if (event.startsWith("data: ")) {
104
- const data = event.slice(6);
105
- if (data === "[DONE]") return;
106
- yield JSON.parse(data);
107
- }
138
+ const data = getEventData(event);
139
+ if (!data) continue;
140
+ if (data.trim() === "[DONE]") return;
141
+ yield parseEventData(data);
108
142
  }
109
143
  }
110
144
  } finally {
111
145
  reader.releaseLock();
112
146
  }
113
147
  }
148
+ function splitSseEvents(buffer) {
149
+ const normalized = buffer.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
150
+ const parts = normalized.split("\n\n");
151
+ const remainder = parts.pop() ?? EMPTY_STRING;
152
+ return { events: parts, remainder };
153
+ }
154
+ function getEventData(event) {
155
+ if (!event.trim()) return null;
156
+ const dataLines = event.split("\n").filter((line) => line.startsWith("data:"));
157
+ if (!dataLines.length) return null;
158
+ const data = dataLines.map((line) => line.startsWith("data: ") ? line.slice(6) : line.slice(5).replace(/^ /, "")).join("\n");
159
+ return data || null;
160
+ }
161
+ function parseEventData(data) {
162
+ try {
163
+ return JSON.parse(data);
164
+ } catch (error) {
165
+ const message = error instanceof Error ? error.message : String(error);
166
+ const preview = data.length > 200 ? `${data.slice(0, 200)}...` : data;
167
+ throw new Error(`LLM stream parse error: ${message}. Data: ${preview}`);
168
+ }
169
+ }
114
170
  // Annotate the CommonJS export names for ESM import in node:
115
171
  0 && (module.exports = {
116
172
  callLlm
package/dist/llm.mjs CHANGED
@@ -1,4 +1,5 @@
1
1
  // src/llm/index.ts
2
+ var EMPTY_STRING = "";
2
3
  function normalizeBaseUrl(baseUrl) {
3
4
  let normalized = baseUrl.trim();
4
5
  while (normalized.endsWith("/")) {
@@ -7,54 +8,45 @@ function normalizeBaseUrl(baseUrl) {
7
8
  return normalized;
8
9
  }
9
10
  function getEnvString(name) {
10
- if (typeof process === "undefined" || !process?.env) return "";
11
- return String(process.env[name] ?? "").trim();
11
+ if (typeof process === "undefined" || !process?.env) return EMPTY_STRING;
12
+ return String(process.env[name] ?? EMPTY_STRING).trim();
12
13
  }
13
14
  function getAiBaseUrl(options) {
14
15
  if (typeof options.baseUrl === "string" && options.baseUrl.trim()) {
15
16
  return normalizeBaseUrl(options.baseUrl);
16
17
  }
17
- const envBaseUrl = getEnvString("UBQ_AI_BASE_URL") || getEnvString("UBQ_AI_URL");
18
+ const envBaseUrl = getEnvString("UOS_AI_URL") || getEnvString("UOS_AI_BASE_URL");
18
19
  if (envBaseUrl) return normalizeBaseUrl(envBaseUrl);
19
- return "https://ai.ubq.fi";
20
+ return "https://ai-ubq-fi.deno.dev";
20
21
  }
21
22
  async function callLlm(options, input) {
22
- const inputPayload = input;
23
- const authToken = inputPayload.authToken;
24
- const ubiquityKernelToken = inputPayload.ubiquityKernelToken;
25
- const payload = inputPayload.payload ?? inputPayload.eventPayload;
26
- const owner = payload?.repository?.owner?.login ?? "";
27
- const repo = payload?.repository?.name ?? "";
28
- const installationId = payload?.installation?.id;
29
- if (!authToken) throw new Error("Missing authToken in inputs");
30
- const isKernelTokenRequired = authToken.trim().startsWith("gh");
31
- if (isKernelTokenRequired && !ubiquityKernelToken) {
32
- throw new Error("Missing ubiquityKernelToken in inputs (kernel attestation is required for GitHub auth)");
33
- }
23
+ const authToken = String(input.authToken ?? EMPTY_STRING).trim();
24
+ if (!authToken) throw new Error("Missing authToken in input");
25
+ const kernelToken = "ubiquityKernelToken" in input ? input.ubiquityKernelToken : void 0;
26
+ const payload = getPayload(input);
27
+ const { owner, repo, installationId } = getRepoMetadata(payload);
28
+ ensureKernelToken(authToken, kernelToken);
34
29
  const { baseUrl, model, stream: isStream, messages, ...rest } = options;
35
- const url = `${getAiBaseUrl({ ...options, baseUrl })}/v1/chat/completions`;
30
+ ensureMessages(messages);
31
+ const url = buildAiUrl(options, baseUrl);
36
32
  const body = JSON.stringify({
37
33
  ...rest,
38
34
  ...model ? { model } : {},
39
35
  messages,
40
36
  stream: isStream ?? false
41
37
  });
42
- const headers = {
43
- Authorization: `Bearer ${authToken}`,
44
- "Content-Type": "application/json"
45
- };
46
- if (owner) headers["X-GitHub-Owner"] = owner;
47
- if (repo) headers["X-GitHub-Repo"] = repo;
48
- if (typeof installationId === "number" && Number.isFinite(installationId)) {
49
- headers["X-GitHub-Installation-Id"] = String(installationId);
50
- }
51
- if (ubiquityKernelToken) {
52
- headers["X-Ubiquity-Kernel-Token"] = ubiquityKernelToken;
53
- }
38
+ const headers = buildHeaders(authToken, {
39
+ owner,
40
+ repo,
41
+ installationId,
42
+ ubiquityKernelToken: kernelToken
43
+ });
54
44
  const response = await fetch(url, { method: "POST", headers, body });
55
45
  if (!response.ok) {
56
46
  const err = await response.text();
57
- throw new Error(`LLM API error: ${response.status} - ${err}`);
47
+ const error = new Error(`LLM API error: ${response.status} - ${err}`);
48
+ error.status = response.status;
49
+ throw error;
58
50
  }
59
51
  if (isStream) {
60
52
  if (!response.body) {
@@ -64,29 +56,93 @@ async function callLlm(options, input) {
64
56
  }
65
57
  return response.json();
66
58
  }
59
+ function ensureKernelToken(authToken, kernelToken) {
60
+ const isKernelTokenRequired = authToken.startsWith("gh");
61
+ if (isKernelTokenRequired && !kernelToken) {
62
+ throw new Error("Missing ubiquityKernelToken in input (kernel attestation is required for GitHub auth)");
63
+ }
64
+ }
65
+ function ensureMessages(messages) {
66
+ if (!Array.isArray(messages) || messages.length === 0) {
67
+ throw new Error("messages must be a non-empty array");
68
+ }
69
+ }
70
+ function buildAiUrl(options, baseUrl) {
71
+ return `${getAiBaseUrl({ ...options, baseUrl })}/v1/chat/completions`;
72
+ }
73
+ function getPayload(input) {
74
+ if ("payload" in input) {
75
+ return input.payload;
76
+ }
77
+ return input.eventPayload;
78
+ }
79
+ function getRepoMetadata(payload) {
80
+ const repoPayload = payload;
81
+ return {
82
+ owner: repoPayload?.repository?.owner?.login ?? EMPTY_STRING,
83
+ repo: repoPayload?.repository?.name ?? EMPTY_STRING,
84
+ installationId: repoPayload?.installation?.id
85
+ };
86
+ }
87
+ function buildHeaders(authToken, options) {
88
+ const headers = {
89
+ Authorization: `Bearer ${authToken}`,
90
+ "Content-Type": "application/json"
91
+ };
92
+ if (options.owner) headers["X-GitHub-Owner"] = options.owner;
93
+ if (options.repo) headers["X-GitHub-Repo"] = options.repo;
94
+ if (typeof options.installationId === "number" && Number.isFinite(options.installationId)) {
95
+ headers["X-GitHub-Installation-Id"] = String(options.installationId);
96
+ }
97
+ if (options.ubiquityKernelToken) {
98
+ headers["X-Ubiquity-Kernel-Token"] = options.ubiquityKernelToken;
99
+ }
100
+ return headers;
101
+ }
67
102
  async function* parseSseStream(body) {
68
103
  const reader = body.getReader();
69
104
  const decoder = new TextDecoder();
70
- let buffer = "";
105
+ let buffer = EMPTY_STRING;
71
106
  try {
72
107
  while (true) {
73
108
  const { value, done: isDone } = await reader.read();
74
109
  if (isDone) break;
75
110
  buffer += decoder.decode(value, { stream: true });
76
- const events = buffer.split("\n\n");
77
- buffer = events.pop() || "";
111
+ const { events, remainder } = splitSseEvents(buffer);
112
+ buffer = remainder;
78
113
  for (const event of events) {
79
- if (event.startsWith("data: ")) {
80
- const data = event.slice(6);
81
- if (data === "[DONE]") return;
82
- yield JSON.parse(data);
83
- }
114
+ const data = getEventData(event);
115
+ if (!data) continue;
116
+ if (data.trim() === "[DONE]") return;
117
+ yield parseEventData(data);
84
118
  }
85
119
  }
86
120
  } finally {
87
121
  reader.releaseLock();
88
122
  }
89
123
  }
124
+ function splitSseEvents(buffer) {
125
+ const normalized = buffer.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
126
+ const parts = normalized.split("\n\n");
127
+ const remainder = parts.pop() ?? EMPTY_STRING;
128
+ return { events: parts, remainder };
129
+ }
130
+ function getEventData(event) {
131
+ if (!event.trim()) return null;
132
+ const dataLines = event.split("\n").filter((line) => line.startsWith("data:"));
133
+ if (!dataLines.length) return null;
134
+ const data = dataLines.map((line) => line.startsWith("data: ") ? line.slice(6) : line.slice(5).replace(/^ /, "")).join("\n");
135
+ return data || null;
136
+ }
137
+ function parseEventData(data) {
138
+ try {
139
+ return JSON.parse(data);
140
+ } catch (error) {
141
+ const message = error instanceof Error ? error.message : String(error);
142
+ const preview = data.length > 200 ? `${data.slice(0, 200)}...` : data;
143
+ throw new Error(`LLM stream parse error: ${message}. Data: ${preview}`);
144
+ }
145
+ }
90
146
  export {
91
147
  callLlm
92
148
  };
@@ -3,12 +3,12 @@ import * as _octokit_plugin_paginate_graphql from '@octokit/plugin-paginate-grap
3
3
  import * as _octokit_plugin_rest_endpoint_methods from '@octokit/plugin-rest-endpoint-methods';
4
4
  export { RestEndpointMethodTypes } from '@octokit/plugin-rest-endpoint-methods';
5
5
  import * as _octokit_plugin_paginate_rest from '@octokit/plugin-paginate-rest';
6
- import * as _octokit_webhooks_node_modules__octokit_request_error from '@octokit/webhooks/node_modules/@octokit/request-error';
6
+ import * as _octokit_request_error from '@octokit/request-error';
7
7
  import { Octokit } from '@octokit/core';
8
8
 
9
9
  declare const customOctokit: typeof Octokit & _octokit_core_types.Constructor<{
10
10
  retry: {
11
- retryRequest: (error: _octokit_webhooks_node_modules__octokit_request_error.RequestError, retries: number, retryAfter: number) => _octokit_webhooks_node_modules__octokit_request_error.RequestError;
11
+ retryRequest: (error: _octokit_request_error.RequestError, retries: number, retryAfter: number) => _octokit_request_error.RequestError;
12
12
  };
13
13
  } & {
14
14
  paginate: _octokit_plugin_paginate_rest.PaginateInterface;
package/dist/octokit.d.ts CHANGED
@@ -3,12 +3,12 @@ import * as _octokit_plugin_paginate_graphql from '@octokit/plugin-paginate-grap
3
3
  import * as _octokit_plugin_rest_endpoint_methods from '@octokit/plugin-rest-endpoint-methods';
4
4
  export { RestEndpointMethodTypes } from '@octokit/plugin-rest-endpoint-methods';
5
5
  import * as _octokit_plugin_paginate_rest from '@octokit/plugin-paginate-rest';
6
- import * as _octokit_webhooks_node_modules__octokit_request_error from '@octokit/webhooks/node_modules/@octokit/request-error';
6
+ import * as _octokit_request_error from '@octokit/request-error';
7
7
  import { Octokit } from '@octokit/core';
8
8
 
9
9
  declare const customOctokit: typeof Octokit & _octokit_core_types.Constructor<{
10
10
  retry: {
11
- retryRequest: (error: _octokit_webhooks_node_modules__octokit_request_error.RequestError, retries: number, retryAfter: number) => _octokit_webhooks_node_modules__octokit_request_error.RequestError;
11
+ retryRequest: (error: _octokit_request_error.RequestError, retries: number, retryAfter: number) => _octokit_request_error.RequestError;
12
12
  };
13
13
  } & {
14
14
  paginate: _octokit_plugin_paginate_rest.PaginateInterface;