devskill 2.0.10 → 2.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/meta.ts CHANGED
@@ -26,6 +26,7 @@ export const submodules: Record<string, string> = {
26
26
  // 'react': 'https://github.com/reactjs/react.dev',
27
27
  'vue': 'https://github.com/vuejs/docs',
28
28
  'pinia': 'https://github.com/vuejs/pinia',
29
+ 'deepinfra': 'https://github.com/deepinfra/docs',
29
30
  }
30
31
 
31
32
  /**
@@ -156,6 +157,7 @@ export const collections: Record<string, string[]> = {
156
157
  'builderx_api-rabbitmq',
157
158
  'builderx_api-mongodb',
158
159
  'git-commit',
160
+ 'deepinfra',
159
161
  ]
160
162
  }
161
163
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "devskill",
3
- "version": "2.0.10",
3
+ "version": "2.0.11",
4
4
  "description": "Equip Cursor, Windsurf, Cline, Antigravity, Claude Code, Codex, GitHub Copilot and other AI Agents with expert programming superpowers via a single interactive prompt.",
5
5
  "homepage": "https://vskill.vercel.app",
6
6
  "repository": {
@@ -0,0 +1,72 @@
1
+ ---
2
+ name: deepinfra
3
+ description: Integration guidelines for DeepInfra API. Directs the agent to adapt DeepInfra REST API calls to the target repository's standard HTTP client.
4
+ metadata:
5
+ author: AI Assistant
6
+ version: "1.0.1"
7
+ source: Generated from sources/deepinfra
8
+ ---
9
+
10
+ # DeepInfra API
11
+
12
+ > This skill focuses on integrating DeepInfra's REST API into any project. **Do not use language-specific SDKs** (like the `openai` NodeJS package or LangChain) unless the project already heavily depends on them.
13
+
14
+ ## Key Integration Rules
15
+
16
+ - **Use the Repository's Native HTTP Client**: Before writing code to call the DeepInfra API, **you must analyze the current repository** to identify how external HTTP requests are typically made (e.g., a custom `useFetch` wrapper, an Axios instance, Elixir `Req`, or standard `fetch`).
17
+ - **Adapt to Existing Patterns**: Send the DeepInfra request using the exact fetching mechanism discovered in the project. Do not introduce raw `fetch` commands or new libraries if a standard API wrapper already exists.
18
+ - **Endpoint**: The base URL for most AI inference tasks (chat completions) is `POST https://api.deepinfra.com/v1/openai/chat/completions`.
19
+ - **Authentication**: You must set the `Authorization: Bearer <API_KEY>` header. Always read the API key from environment variables (e.g., `process.env.DEEPINFRA_API_KEY` or `System.get_env("DEEPINFRA_API_KEY")`) rather than hardcoding.
20
+ - **Headers**: Always include `Content-Type: application/json`.
21
+
22
+ ## Quick Reference
23
+
24
+ ### Standard JSON Payload
25
+
26
+ Adapt this payload to the project's standard API caller:
27
+
28
+ ```json
29
+ {
30
+ "model": "meta-llama/Meta-Llama-3-70B-Instruct",
31
+ "messages": [
32
+ {
33
+ "role": "user",
34
+ "content": "Hello!"
35
+ }
36
+ ]
37
+ }
38
+ ```
39
+
40
+ ### Example Response Structure
41
+
42
+ DeepInfra yields an OpenAI-compatible JSON structure. Handle the response parsing according to the repository's typical data-transformation layers:
43
+
44
+ ```json
45
+ {
46
+ "id": "chatcmpl-guMTxWgpFf",
47
+ "object": "chat.completion",
48
+ "created": 1694623155,
49
+ "model": "meta-llama/Meta-Llama-3-70B-Instruct",
50
+ "choices": [
51
+ {
52
+ "index": 0,
53
+ "message": {
54
+ "role": "assistant",
55
+ "content": "Hello! It's nice to meet you."
56
+ },
57
+ "finish_reason": "stop"
58
+ }
59
+ ],
60
+ "usage": {
61
+ "prompt_tokens": 15,
62
+ "completion_tokens": 16,
63
+ "total_tokens": 31
64
+ }
65
+ }
66
+ ```
67
+
68
+ ### Supported Models
69
+
70
+ Always use the exact model ID from the DeepInfra catalog, such as:
71
+ - `deepseek-ai/DeepSeek-V3`
72
+ - `meta-llama/Meta-Llama-3-70B-Instruct`