cloud-pc-templates 1.2.1 → 1.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -74,17 +74,46 @@ npx cloud-pc-templates ai login loginMode ollamalocal
74
74
  ```
75
75
 
76
76
  **What it does:**
77
- - Initializes connection to your local Ollama installation
78
- - Displays connection status
77
+ 1. Checks if Ollama is running on the default port (11434)
78
+ 2. If Ollama is not running, displays a warning with installation instructions
79
+ 3. Downloads the Ollama Offline Proxy script from GitHub
80
+ 4. Runs the proxy on port 3005 (no API key required)
81
+ 5. Validates the proxy health endpoint
82
+ 6. Displays "Logged in" confirmation
83
+
84
+ **Example (with Ollama running):**
85
+ ```bash
86
+ $ npx cloud-pc-templates ai login loginMode ollamalocal
87
+ 🔍 Checking if Ollama is running...
88
+ ✓ Ollama is running on localhost:11434
79
89
 
80
- **Example:**
90
+ 🚀 Starting Ollama Offline Proxy...
91
+ ✓ Logged in
92
+ - Endpoint checked: http://localhost:3005/health
93
+ - Ollama running on: localhost:11434
94
+ ```
95
+
96
+ **Example (without Ollama running):**
81
97
  ```bash
82
98
  $ npx cloud-pc-templates ai login loginMode ollamalocal
83
- AI Login initialized with mode: ollamalocal
84
- - Connecting to Ollama Local...
85
- - Initializing local connection...
99
+ 🔍 Checking if Ollama is running...
100
+ ⚠️ WARNING: Ollama is not running on localhost:11434
101
+ Please install Ollama and run it before using this login mode.
102
+ Download Ollama from: https://ollama.ai
103
+
104
+ After installation, start Ollama with:
105
+ ollama serve
106
+
107
+ Continuing anyway...
86
108
  ```
87
109
 
110
+ **Features:**
111
+ - No API key required
112
+ - Checks for local Ollama installation
113
+ - Helpful warnings with installation instructions
114
+ - Runs proxy on port 3005
115
+ - Provides detailed status output
116
+
88
117
  ### Command Discovery
89
118
 
90
119
  The CLI features intelligent command discovery. If you don't provide all required arguments, it shows available options:
@@ -116,6 +145,7 @@ cloud-pc-templates/
116
145
  ├── index.js # Main entry point, command tree, and CLI routing
117
146
  ├── handlers/
118
147
  │ ├── ollamacloud.js # Ollama Cloud login functionality
148
+ │ ├── ollamalocal.js # Ollama Local login functionality
119
149
  │ └── launch.js # Website launcher
120
150
  ├── package.json # Project metadata and bin configuration
121
151
  └── README.md # This file
@@ -134,6 +164,12 @@ cloud-pc-templates/
134
164
  - `downloadAndRunProxy()`: Downloads and executes proxy script with API key
135
165
  - `checkAndLoginOllamaCloud()`: Main login orchestrator
136
166
 
167
+ #### handlers/ollamalocal.js
168
+ - `checkOllamaHealth()`: Verifies Ollama is running on port 11434
169
+ - `downloadAndRunProxy()`: Downloads and executes offline proxy script
170
+ - `checkProxyHealth()`: Health check for the offline proxy
171
+ - `checkAndLoginOllamaLocal()`: Main login orchestrator with warning system
172
+
137
173
  #### handlers/launch.js
138
174
  - `openBrowser()`: Cross-platform browser launcher
139
175
  - `launchWebsite()`: Opens cloud-pc-templates.com
@@ -247,6 +283,15 @@ When entering your API key:
247
283
  curl http://localhost:3004/health
248
284
  ```
249
285
 
286
+ ### Ollama Local login fails
287
+ - Make sure Ollama is installed: https://ollama.ai
288
+ - Start Ollama with: `ollama serve`
289
+ - Check if Ollama is running on port 11434:
290
+ ```bash
291
+ curl http://localhost:11434/api/health
292
+ ```
293
+ - Ensure port 3005 is not in use by another application
294
+
250
295
  ### Browser won't open with `launch`
251
296
  - Ensure you have a default browser configured
252
297
  - On Linux, make sure `xdg-open` is installed: `sudo apt-get install xdg-utils`
@@ -0,0 +1,147 @@
1
+ const http = require('http');
2
+ const https = require('https');
3
+ const fs = require('fs');
4
+ const path = require('path');
5
+ const { spawn } = require('child_process');
6
+ const os = require('os');
7
+
8
+ const OLLAMA_PORT = 11434;
9
+ const PROXY_PORT = 3005;
10
+
11
+ // Function to check if Ollama is running locally
12
+ function checkOllamaHealth() {
13
+ return new Promise((resolve) => {
14
+ const request = http.request(
15
+ {
16
+ hostname: 'localhost',
17
+ port: OLLAMA_PORT,
18
+ path: '/api/health',
19
+ method: 'GET'
20
+ },
21
+ (res) => {
22
+ resolve(res.statusCode === 200);
23
+ }
24
+ );
25
+
26
+ request.on('error', () => {
27
+ resolve(false);
28
+ });
29
+
30
+ request.end();
31
+ });
32
+ }
33
+
34
+ // Function to download and run the offline proxy
35
+ async function downloadAndRunProxy() {
36
+ const url = 'https://raw.githubusercontent.com/devashish234073/cloud-pc-templates-marketplace/refs/heads/main/JS-PROXIES/ollamaoffline-proxy.js';
37
+ const tempFile = path.join(os.tmpdir(), 'ollamaoffline-proxy.js');
38
+
39
+ // Download the file
40
+ await new Promise((resolve, reject) => {
41
+ const file = fs.createWriteStream(tempFile);
42
+ https.get(url, (res) => {
43
+ res.pipe(file);
44
+ file.on('finish', () => {
45
+ file.close();
46
+ resolve();
47
+ });
48
+ }).on('error', reject);
49
+ });
50
+
51
+ // Run the proxy (no API key needed)
52
+ return new Promise((resolve, reject) => {
53
+ const child = spawn('node', [tempFile]);
54
+
55
+ let serverReady = false;
56
+
57
+ // Capture stdout to detect when server is ready
58
+ child.stdout.on('data', (data) => {
59
+ const output = data.toString();
60
+ console.log(output);
61
+
62
+ // Check if server indicates it's ready
63
+ if (output.includes('listening') || output.includes('started') || output.includes('running')) {
64
+ serverReady = true;
65
+ }
66
+ });
67
+
68
+ // Capture stderr for error messages
69
+ child.stderr.on('data', (data) => {
70
+ console.error(data.toString());
71
+ });
72
+
73
+ // Wait a bit for server to start, then validate
74
+ setTimeout(async () => {
75
+ try {
76
+ const endpoint = `http://localhost:${PROXY_PORT}/health`;
77
+ const isHealthy = await checkProxyHealth(endpoint);
78
+ if (isHealthy) {
79
+ console.log('✓ Logged in');
80
+ console.log(` - Endpoint checked: ${endpoint}`);
81
+ console.log(` - Ollama running on: localhost:${OLLAMA_PORT}`);
82
+ } else {
83
+ console.log('✓ Proxy started');
84
+ console.log(` - Endpoint: ${endpoint}`);
85
+ console.log(` - Ollama running on: localhost:${OLLAMA_PORT}`);
86
+ }
87
+ resolve();
88
+ } catch (error) {
89
+ reject(error);
90
+ }
91
+ }, 2000);
92
+
93
+ child.on('error', reject);
94
+ });
95
+ }
96
+
97
+ // Function to check health endpoint
98
+ function checkProxyHealth(endpoint) {
99
+ return new Promise((resolve) => {
100
+ const url = new URL(endpoint);
101
+ const protocol = url.protocol === 'https:' ? https : http;
102
+
103
+ const request = protocol.request(url, { method: 'GET' }, (res) => {
104
+ resolve(res.statusCode === 200);
105
+ });
106
+
107
+ request.on('error', () => {
108
+ resolve(false);
109
+ });
110
+
111
+ request.end();
112
+ });
113
+ }
114
+
115
+ // Function to check and login to Ollama Local
116
+ async function checkAndLoginOllamaLocal() {
117
+ try {
118
+ console.log('🔍 Checking if Ollama is running...');
119
+ const isOllamaRunning = await checkOllamaHealth();
120
+
121
+ if (!isOllamaRunning) {
122
+ console.warn('⚠️ WARNING: Ollama is not running on localhost:11434');
123
+ console.warn(' Please install Ollama and run it before using this login mode.');
124
+ console.warn(' Download Ollama from: https://ollama.ai');
125
+ console.warn('');
126
+ console.warn(' After installation, start Ollama with:');
127
+ console.warn(' ollama serve');
128
+ console.warn('');
129
+ console.warn(' Continuing anyway...');
130
+ } else {
131
+ console.log('✓ Ollama is running on localhost:11434');
132
+ }
133
+
134
+ console.log('');
135
+ console.log('🚀 Starting Ollama Offline Proxy...');
136
+ await downloadAndRunProxy();
137
+ } catch (error) {
138
+ console.error('Error during login:', error.message);
139
+ }
140
+ }
141
+
142
+ module.exports = {
143
+ checkAndLoginOllamaLocal,
144
+ checkOllamaHealth,
145
+ downloadAndRunProxy,
146
+ checkProxyHealth
147
+ };
package/index.js CHANGED
@@ -1,6 +1,7 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  const { checkAndLoginOllamaCloud } = require('./handlers/ollamacloud');
4
+ const { checkAndLoginOllamaLocal } = require('./handlers/ollamalocal');
4
5
  const { launchWebsite } = require('./handlers/launch');
5
6
 
6
7
  // Command tree structure
@@ -65,9 +66,7 @@ async function aiLogin(mode) {
65
66
  if (mode === 'ollamacloud') {
66
67
  await checkAndLoginOllamaCloud();
67
68
  } else if (mode === 'ollamalocal') {
68
- console.log(`✓ AI Login initialized with mode: ${mode}`);
69
- console.log(' - Connecting to Ollama Local...');
70
- console.log(' - Initializing local connection...');
69
+ await checkAndLoginOllamaLocal();
71
70
  }
72
71
  }
73
72
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "cloud-pc-templates",
3
- "version": "1.2.1",
3
+ "version": "1.2.2",
4
4
  "description": "",
5
5
  "main": "index.js",
6
6
  "bin": {