yt-embeddings-strapi-plugin 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/_chunks/{App-Cv1cdLAr.js → App-57XBOCeK.js} +10 -2
- package/dist/_chunks/{App-bN58O1bN.mjs → App-D7Dy3o44.mjs} +10 -2
- package/dist/_chunks/{index-BAfBs5PQ.js → index-CZeQ6zu2.js} +1 -1
- package/dist/_chunks/{index-K6X5FM2O.mjs → index-CuxGkc_5.mjs} +1 -1
- package/dist/admin/index.js +1 -1
- package/dist/admin/index.mjs +1 -1
- package/dist/server/index.js +11 -5
- package/dist/server/index.mjs +11 -5
- package/package.json +1 -1
|
@@ -7,7 +7,7 @@ const react = require("react");
|
|
|
7
7
|
const styled = require("styled-components");
|
|
8
8
|
const designSystem = require("@strapi/design-system");
|
|
9
9
|
const icons = require("@strapi/icons");
|
|
10
|
-
const index = require("./index-
|
|
10
|
+
const index = require("./index-CZeQ6zu2.js");
|
|
11
11
|
const qs = require("qs");
|
|
12
12
|
const ReactMarkdown = require("react-markdown");
|
|
13
13
|
const _interopDefault = (e) => e && e.__esModule ? e : { default: e };
|
|
@@ -488,7 +488,15 @@ function VideoDetails() {
|
|
|
488
488
|
get(`/${index.PLUGIN_ID}/yt/videos/${videoId}`),
|
|
489
489
|
get(`/${index.PLUGIN_ID}/yt/videos/${videoId}/chunks`)
|
|
490
490
|
]).then(([videoRes, chunksRes]) => {
|
|
491
|
-
|
|
491
|
+
const v = videoRes?.data?.data || videoRes?.data || null;
|
|
492
|
+
if (v && typeof v.key_moments === "string") {
|
|
493
|
+
try {
|
|
494
|
+
v.key_moments = JSON.parse(v.key_moments);
|
|
495
|
+
} catch {
|
|
496
|
+
v.key_moments = [];
|
|
497
|
+
}
|
|
498
|
+
}
|
|
499
|
+
setVideo(v);
|
|
492
500
|
setChunks(chunksRes?.data?.data || chunksRes?.data || []);
|
|
493
501
|
}).catch((err) => console.error("Failed to load video:", err)).finally(() => setIsLoading(false));
|
|
494
502
|
}, [videoId, get]);
|
|
@@ -5,7 +5,7 @@ import { useRef, useState, useEffect, useCallback } from "react";
|
|
|
5
5
|
import styled from "styled-components";
|
|
6
6
|
import { Button, Modal, Box, TextInput, Link, Accordion, Typography, Tr, Main, Flex, Loader, Table, Thead, Th, Tbody, Td, Badge } from "@strapi/design-system";
|
|
7
7
|
import { ArrowClockwise, ArrowLeft } from "@strapi/icons";
|
|
8
|
-
import { R as RobotIcon, P as PLUGIN_ID } from "./index-
|
|
8
|
+
import { R as RobotIcon, P as PLUGIN_ID } from "./index-CuxGkc_5.mjs";
|
|
9
9
|
import qs from "qs";
|
|
10
10
|
import ReactMarkdown from "react-markdown";
|
|
11
11
|
const MarkdownWrapper = styled.div`
|
|
@@ -482,7 +482,15 @@ function VideoDetails() {
|
|
|
482
482
|
get(`/${PLUGIN_ID}/yt/videos/${videoId}`),
|
|
483
483
|
get(`/${PLUGIN_ID}/yt/videos/${videoId}/chunks`)
|
|
484
484
|
]).then(([videoRes, chunksRes]) => {
|
|
485
|
-
|
|
485
|
+
const v = videoRes?.data?.data || videoRes?.data || null;
|
|
486
|
+
if (v && typeof v.key_moments === "string") {
|
|
487
|
+
try {
|
|
488
|
+
v.key_moments = JSON.parse(v.key_moments);
|
|
489
|
+
} catch {
|
|
490
|
+
v.key_moments = [];
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
setVideo(v);
|
|
486
494
|
setChunks(chunksRes?.data?.data || chunksRes?.data || []);
|
|
487
495
|
}).catch((err) => console.error("Failed to load video:", err)).finally(() => setIsLoading(false));
|
|
488
496
|
}, [videoId, get]);
|
|
@@ -130,7 +130,7 @@ const index = {
|
|
|
130
130
|
defaultMessage: PLUGIN_ID
|
|
131
131
|
},
|
|
132
132
|
Component: async () => {
|
|
133
|
-
const { App } = await Promise.resolve().then(() => require("./App-
|
|
133
|
+
const { App } = await Promise.resolve().then(() => require("./App-57XBOCeK.js"));
|
|
134
134
|
return App;
|
|
135
135
|
}
|
|
136
136
|
});
|
package/dist/admin/index.js
CHANGED
package/dist/admin/index.mjs
CHANGED
package/dist/server/index.js
CHANGED
|
@@ -427,7 +427,7 @@ const SearchYtKnowledgeSchema = zod.z.object({
|
|
|
427
427
|
videoId: zod.z.string().optional(),
|
|
428
428
|
topics: zod.z.array(zod.z.string()).optional(),
|
|
429
429
|
contextWindowSeconds: zod.z.number().min(0).optional().default(30),
|
|
430
|
-
minSimilarity: zod.z.number().min(0).max(1).optional().default(0.
|
|
430
|
+
minSimilarity: zod.z.number().min(0).max(1).optional().default(0.3)
|
|
431
431
|
});
|
|
432
432
|
const GetVideoTranscriptRangeSchema = zod.z.object({
|
|
433
433
|
videoId: zod.z.string().min(1, "Video ID is required"),
|
|
@@ -466,7 +466,7 @@ function formatTime$5(seconds) {
|
|
|
466
466
|
}
|
|
467
467
|
const searchYtKnowledgeMcpTool = {
|
|
468
468
|
name: "search_yt_knowledge",
|
|
469
|
-
description: "Semantically search YouTube video transcripts. Returns relevant passages with timestamps, deep links, video topics, and summary.",
|
|
469
|
+
description: "Semantically search YouTube video transcripts. Returns relevant passages with timestamps, deep links, video topics, and summary. IMPORTANT: After receiving results, use the contextText to directly answer the user's question. Cite the video title, timestamp, and deep link. Do not just list results — synthesize an answer from the transcript content.",
|
|
470
470
|
inputSchema: {
|
|
471
471
|
type: "object",
|
|
472
472
|
properties: {
|
|
@@ -502,7 +502,7 @@ const searchYtKnowledgeMcpTool = {
|
|
|
502
502
|
async function handleSearchYtKnowledge(strapi, args) {
|
|
503
503
|
const results = await strapi.plugin("yt-embeddings-strapi-plugin").service("ytEmbeddings").search(args.query, {
|
|
504
504
|
limit: args.limit ?? 5,
|
|
505
|
-
minSimilarity: args.minSimilarity ?? 0.
|
|
505
|
+
minSimilarity: args.minSimilarity ?? 0.3,
|
|
506
506
|
videoId: args.videoId,
|
|
507
507
|
topics: args.topics,
|
|
508
508
|
contextWindowSeconds: args.contextWindowSeconds ?? 30
|
|
@@ -1325,7 +1325,7 @@ const ytController = ({ strapi }) => ({
|
|
|
1325
1325
|
}
|
|
1326
1326
|
const result = await strapi.plugin(PLUGIN_ID).service("ytEmbeddings").search(q, {
|
|
1327
1327
|
limit: limit ? parseInt(limit, 10) : 5,
|
|
1328
|
-
minSimilarity: minSimilarity ? parseFloat(minSimilarity) : 0.
|
|
1328
|
+
minSimilarity: minSimilarity ? parseFloat(minSimilarity) : 0.3,
|
|
1329
1329
|
videoId,
|
|
1330
1330
|
topics: topics ? topics.split(",") : void 0,
|
|
1331
1331
|
contextWindowSeconds: contextWindowSeconds ? parseInt(contextWindowSeconds, 10) : 30
|
|
@@ -1564,7 +1564,7 @@ function formatTime$2(seconds) {
|
|
|
1564
1564
|
}
|
|
1565
1565
|
const searchYtKnowledgeTool = {
|
|
1566
1566
|
name: "searchYtKnowledge",
|
|
1567
|
-
description: "Semantically search YouTube video transcripts. Returns relevant passages with timestamps, deep links, video topics, and summary.",
|
|
1567
|
+
description: "Semantically search YouTube video transcripts. Returns relevant passages with timestamps, deep links, video topics, and summary. IMPORTANT: After receiving results, use the contextText to directly answer the user's question. Cite the video title, timestamp, and deep link. Do not just list results — synthesize an answer from the transcript content.",
|
|
1568
1568
|
schema: SearchYtKnowledgeSchema,
|
|
1569
1569
|
execute: async (args, strapi) => {
|
|
1570
1570
|
const validated = SearchYtKnowledgeSchema.parse(args);
|
|
@@ -1945,7 +1945,9 @@ const ytEmbeddings = ({ strapi }) => ({
|
|
|
1945
1945
|
throw new Error("[yt-embed] Plugin manager not initialized");
|
|
1946
1946
|
}
|
|
1947
1947
|
const { limit = 5, minSimilarity = 0.2, contextWindowSeconds = 30 } = options;
|
|
1948
|
+
console.log(`[yt-embed search] Embedding query: "${query}" with model`);
|
|
1948
1949
|
const { embedding: queryVector } = await ai.embed({ model: embeddingModel, value: query });
|
|
1950
|
+
console.log(`[yt-embed search] Got embedding vector, length: ${queryVector.length}`);
|
|
1949
1951
|
const vectorStr = `[${queryVector.join(",")}]`;
|
|
1950
1952
|
const params = [vectorStr, minSimilarity, limit * 2];
|
|
1951
1953
|
const filters = [];
|
|
@@ -1974,6 +1976,10 @@ const ytEmbeddings = ({ strapi }) => ({
|
|
|
1974
1976
|
ORDER BY vc.embedding <=> $1::vector
|
|
1975
1977
|
LIMIT $3
|
|
1976
1978
|
`, params);
|
|
1979
|
+
console.log(`[yt-embed search] Query returned ${rows.rows.length} rows (minSimilarity: ${minSimilarity})`);
|
|
1980
|
+
if (rows.rows.length > 0) {
|
|
1981
|
+
console.log(`[yt-embed search] Top similarity: ${rows.rows[0].similarity}`);
|
|
1982
|
+
}
|
|
1977
1983
|
if (!rows.rows.length) return [];
|
|
1978
1984
|
const seen = /* @__PURE__ */ new Set();
|
|
1979
1985
|
const deduped = rows.rows.filter((row) => {
|
package/dist/server/index.mjs
CHANGED
|
@@ -408,7 +408,7 @@ const SearchYtKnowledgeSchema = z.object({
|
|
|
408
408
|
videoId: z.string().optional(),
|
|
409
409
|
topics: z.array(z.string()).optional(),
|
|
410
410
|
contextWindowSeconds: z.number().min(0).optional().default(30),
|
|
411
|
-
minSimilarity: z.number().min(0).max(1).optional().default(0.
|
|
411
|
+
minSimilarity: z.number().min(0).max(1).optional().default(0.3)
|
|
412
412
|
});
|
|
413
413
|
const GetVideoTranscriptRangeSchema = z.object({
|
|
414
414
|
videoId: z.string().min(1, "Video ID is required"),
|
|
@@ -447,7 +447,7 @@ function formatTime$5(seconds) {
|
|
|
447
447
|
}
|
|
448
448
|
const searchYtKnowledgeMcpTool = {
|
|
449
449
|
name: "search_yt_knowledge",
|
|
450
|
-
description: "Semantically search YouTube video transcripts. Returns relevant passages with timestamps, deep links, video topics, and summary.",
|
|
450
|
+
description: "Semantically search YouTube video transcripts. Returns relevant passages with timestamps, deep links, video topics, and summary. IMPORTANT: After receiving results, use the contextText to directly answer the user's question. Cite the video title, timestamp, and deep link. Do not just list results — synthesize an answer from the transcript content.",
|
|
451
451
|
inputSchema: {
|
|
452
452
|
type: "object",
|
|
453
453
|
properties: {
|
|
@@ -483,7 +483,7 @@ const searchYtKnowledgeMcpTool = {
|
|
|
483
483
|
async function handleSearchYtKnowledge(strapi, args) {
|
|
484
484
|
const results = await strapi.plugin("yt-embeddings-strapi-plugin").service("ytEmbeddings").search(args.query, {
|
|
485
485
|
limit: args.limit ?? 5,
|
|
486
|
-
minSimilarity: args.minSimilarity ?? 0.
|
|
486
|
+
minSimilarity: args.minSimilarity ?? 0.3,
|
|
487
487
|
videoId: args.videoId,
|
|
488
488
|
topics: args.topics,
|
|
489
489
|
contextWindowSeconds: args.contextWindowSeconds ?? 30
|
|
@@ -1306,7 +1306,7 @@ const ytController = ({ strapi }) => ({
|
|
|
1306
1306
|
}
|
|
1307
1307
|
const result = await strapi.plugin(PLUGIN_ID).service("ytEmbeddings").search(q, {
|
|
1308
1308
|
limit: limit ? parseInt(limit, 10) : 5,
|
|
1309
|
-
minSimilarity: minSimilarity ? parseFloat(minSimilarity) : 0.
|
|
1309
|
+
minSimilarity: minSimilarity ? parseFloat(minSimilarity) : 0.3,
|
|
1310
1310
|
videoId,
|
|
1311
1311
|
topics: topics ? topics.split(",") : void 0,
|
|
1312
1312
|
contextWindowSeconds: contextWindowSeconds ? parseInt(contextWindowSeconds, 10) : 30
|
|
@@ -1545,7 +1545,7 @@ function formatTime$2(seconds) {
|
|
|
1545
1545
|
}
|
|
1546
1546
|
const searchYtKnowledgeTool = {
|
|
1547
1547
|
name: "searchYtKnowledge",
|
|
1548
|
-
description: "Semantically search YouTube video transcripts. Returns relevant passages with timestamps, deep links, video topics, and summary.",
|
|
1548
|
+
description: "Semantically search YouTube video transcripts. Returns relevant passages with timestamps, deep links, video topics, and summary. IMPORTANT: After receiving results, use the contextText to directly answer the user's question. Cite the video title, timestamp, and deep link. Do not just list results — synthesize an answer from the transcript content.",
|
|
1549
1549
|
schema: SearchYtKnowledgeSchema,
|
|
1550
1550
|
execute: async (args, strapi) => {
|
|
1551
1551
|
const validated = SearchYtKnowledgeSchema.parse(args);
|
|
@@ -1926,7 +1926,9 @@ const ytEmbeddings = ({ strapi }) => ({
|
|
|
1926
1926
|
throw new Error("[yt-embed] Plugin manager not initialized");
|
|
1927
1927
|
}
|
|
1928
1928
|
const { limit = 5, minSimilarity = 0.2, contextWindowSeconds = 30 } = options;
|
|
1929
|
+
console.log(`[yt-embed search] Embedding query: "${query}" with model`);
|
|
1929
1930
|
const { embedding: queryVector } = await embed({ model: embeddingModel, value: query });
|
|
1931
|
+
console.log(`[yt-embed search] Got embedding vector, length: ${queryVector.length}`);
|
|
1930
1932
|
const vectorStr = `[${queryVector.join(",")}]`;
|
|
1931
1933
|
const params = [vectorStr, minSimilarity, limit * 2];
|
|
1932
1934
|
const filters = [];
|
|
@@ -1955,6 +1957,10 @@ const ytEmbeddings = ({ strapi }) => ({
|
|
|
1955
1957
|
ORDER BY vc.embedding <=> $1::vector
|
|
1956
1958
|
LIMIT $3
|
|
1957
1959
|
`, params);
|
|
1960
|
+
console.log(`[yt-embed search] Query returned ${rows.rows.length} rows (minSimilarity: ${minSimilarity})`);
|
|
1961
|
+
if (rows.rows.length > 0) {
|
|
1962
|
+
console.log(`[yt-embed search] Top similarity: ${rows.rows[0].similarity}`);
|
|
1963
|
+
}
|
|
1958
1964
|
if (!rows.rows.length) return [];
|
|
1959
1965
|
const seen = /* @__PURE__ */ new Set();
|
|
1960
1966
|
const deduped = rows.rows.filter((row) => {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "yt-embeddings-strapi-plugin",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.1",
|
|
4
4
|
"description": "Strapi v5 plugin for vector embeddings with OpenAI and Neon PostgreSQL. Enables semantic search, RAG chat, and MCP (Model Context Protocol) integration.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"strapi",
|