@tothalex/nulljs 0.0.48 → 0.0.54
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +25 -32
- package/scripts/install-server.js +0 -199
- package/src/commands/api.ts +0 -16
- package/src/commands/auth.ts +0 -54
- package/src/commands/create.ts +0 -43
- package/src/commands/deploy.ts +0 -160
- package/src/commands/dev/function/index.ts +0 -221
- package/src/commands/dev/function/utils.ts +0 -99
- package/src/commands/dev/index.tsx +0 -126
- package/src/commands/dev/logging-manager.ts +0 -87
- package/src/commands/dev/server/index.ts +0 -48
- package/src/commands/dev/server/utils.ts +0 -37
- package/src/commands/dev/ui/components/scroll-area.tsx +0 -141
- package/src/commands/dev/ui/components/tab-bar.tsx +0 -67
- package/src/commands/dev/ui/index.tsx +0 -71
- package/src/commands/dev/ui/logging-context.tsx +0 -76
- package/src/commands/dev/ui/tabs/functions-tab.tsx +0 -35
- package/src/commands/dev/ui/tabs/server-tab.tsx +0 -36
- package/src/commands/dev/ui/tabs/vite-tab.tsx +0 -35
- package/src/commands/dev/ui/use-logging.tsx +0 -34
- package/src/commands/dev/vite/index.ts +0 -54
- package/src/commands/dev/vite/utils.ts +0 -71
- package/src/commands/host.ts +0 -339
- package/src/commands/index.ts +0 -8
- package/src/commands/profile.ts +0 -189
- package/src/commands/secret.ts +0 -79
- package/src/index.ts +0 -346
- package/src/lib/api.ts +0 -189
- package/src/lib/bundle/external.ts +0 -23
- package/src/lib/bundle/function/index.ts +0 -46
- package/src/lib/bundle/index.ts +0 -2
- package/src/lib/bundle/react/index.ts +0 -2
- package/src/lib/bundle/react/spa.ts +0 -77
- package/src/lib/bundle/react/ssr/client.ts +0 -93
- package/src/lib/bundle/react/ssr/config.ts +0 -77
- package/src/lib/bundle/react/ssr/index.ts +0 -4
- package/src/lib/bundle/react/ssr/props.ts +0 -71
- package/src/lib/bundle/react/ssr/server.ts +0 -83
- package/src/lib/bundle/types.ts +0 -4
- package/src/lib/config.ts +0 -347
- package/src/lib/deployment.ts +0 -244
- package/src/lib/update-server.ts +0 -262
package/src/lib/deployment.ts
DELETED
|
@@ -1,244 +0,0 @@
|
|
|
1
|
-
import { basename } from 'path'
|
|
2
|
-
import { existsSync, readFileSync, writeFileSync } from 'fs'
|
|
3
|
-
import { join } from 'path'
|
|
4
|
-
import { build } from 'vite'
|
|
5
|
-
import { createHash } from 'crypto'
|
|
6
|
-
import type { OutputAsset, OutputChunk, RollupOutput } from 'rollup'
|
|
7
|
-
import chalk from 'chalk'
|
|
8
|
-
|
|
9
|
-
import { functionConfig, spaClientConfig, ssrConfigConfig } from './bundle'
|
|
10
|
-
import { createDeployment } from './api'
|
|
11
|
-
import { getCloudPath } from './config'
|
|
12
|
-
|
|
13
|
-
export const isReact = (file: string) => file.endsWith('.tsx')
|
|
14
|
-
export const isAsset = (file: string) => file.endsWith('.css')
|
|
15
|
-
export const isTypescript = (file: string) => file.endsWith('.ts')
|
|
16
|
-
export const isJavaScript = (file: string) => file.endsWith('.js')
|
|
17
|
-
export const getFileName = (path: string) => basename(path)
|
|
18
|
-
|
|
19
|
-
export const isOutputAsset = (output: OutputAsset | OutputChunk): output is OutputAsset => {
|
|
20
|
-
return output.type === 'asset'
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
export const isOutputChunk = (output: OutputAsset | OutputChunk): output is OutputChunk => {
|
|
24
|
-
return output.type === 'chunk'
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
export type DeploymentType = 'react' | 'function'
|
|
28
|
-
|
|
29
|
-
export type Deployment = {
|
|
30
|
-
name: string
|
|
31
|
-
type: DeploymentType
|
|
32
|
-
assets: Array<{
|
|
33
|
-
fileName: string
|
|
34
|
-
code: string
|
|
35
|
-
}>
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
// Persistent cache utilities
|
|
39
|
-
const getCachePath = (): string => {
|
|
40
|
-
const nulljsPath = getCloudPath()
|
|
41
|
-
return join(nulljsPath, 'deployment-cache.json')
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
const loadDeploymentCache = (): Map<string, string> => {
|
|
45
|
-
const cachePath = getCachePath()
|
|
46
|
-
|
|
47
|
-
if (!existsSync(cachePath)) {
|
|
48
|
-
return new Map()
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
try {
|
|
52
|
-
const cacheContent = readFileSync(cachePath, 'utf-8')
|
|
53
|
-
const cacheObject = JSON.parse(cacheContent)
|
|
54
|
-
return new Map(Object.entries(cacheObject))
|
|
55
|
-
} catch {
|
|
56
|
-
// If cache is corrupted, start fresh
|
|
57
|
-
return new Map()
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
const saveDeploymentCache = (cache: Map<string, string>): void => {
|
|
62
|
-
const cachePath = getCachePath()
|
|
63
|
-
const nulljsPath = getCloudPath()
|
|
64
|
-
|
|
65
|
-
// Ensure .nulljs directory exists
|
|
66
|
-
if (!existsSync(nulljsPath)) {
|
|
67
|
-
require('fs').mkdirSync(nulljsPath, { recursive: true })
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
const cacheObject = Object.fromEntries(cache)
|
|
71
|
-
writeFileSync(cachePath, JSON.stringify(cacheObject, null, 2), 'utf-8')
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
// Load cache on startup
|
|
75
|
-
let deploymentHashCache = loadDeploymentCache()
|
|
76
|
-
|
|
77
|
-
const generateDeploymentHash = (deployment: Deployment): string => {
|
|
78
|
-
const assetsString = deployment.assets
|
|
79
|
-
.map((asset) => `${asset.fileName}:${asset.code}`)
|
|
80
|
-
.sort()
|
|
81
|
-
.join('|')
|
|
82
|
-
|
|
83
|
-
return createHash('sha256').update(assetsString).digest('hex')
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
const getCachedHash = (deploymentName: string, type: DeploymentType): string | null => {
|
|
87
|
-
const cacheKey = `${type}-${deploymentName}`
|
|
88
|
-
return deploymentHashCache.get(cacheKey) || null
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
const saveCachedHash = (deploymentName: string, type: DeploymentType, hash: string): void => {
|
|
92
|
-
const cacheKey = `${type}-${deploymentName}`
|
|
93
|
-
deploymentHashCache.set(cacheKey, hash)
|
|
94
|
-
saveDeploymentCache(deploymentHashCache)
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
const hasDeploymentChanged = (
|
|
98
|
-
deployment: Deployment,
|
|
99
|
-
force: boolean = false,
|
|
100
|
-
logger: any = console
|
|
101
|
-
): boolean => {
|
|
102
|
-
if (force) {
|
|
103
|
-
logger.log(`Force flag set for ${deployment.name}, deploying...`)
|
|
104
|
-
return true
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
const currentHash = generateDeploymentHash(deployment)
|
|
108
|
-
const cachedHash = getCachedHash(deployment.name, deployment.type)
|
|
109
|
-
|
|
110
|
-
if (!cachedHash) {
|
|
111
|
-
logger.log(`No cached build found for ${deployment.name}, deploying...`)
|
|
112
|
-
return true
|
|
113
|
-
}
|
|
114
|
-
|
|
115
|
-
const hasChanged = currentHash !== cachedHash
|
|
116
|
-
|
|
117
|
-
if (hasChanged) {
|
|
118
|
-
logger.log(`Build changed for ${deployment.name}, deploying...`)
|
|
119
|
-
} else {
|
|
120
|
-
logger.log(`No changes detected for ${deployment.name}, skipping deployment`)
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
return hasChanged
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
export type DeploymentResult = {
|
|
127
|
-
deployed: boolean
|
|
128
|
-
cached: boolean
|
|
129
|
-
error?: Error
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
export const deployFunction = async (
|
|
133
|
-
path: string,
|
|
134
|
-
logger: any = console,
|
|
135
|
-
force: boolean = false
|
|
136
|
-
): Promise<DeploymentResult> => {
|
|
137
|
-
const file = basename(path)
|
|
138
|
-
|
|
139
|
-
const deployment: Deployment = {
|
|
140
|
-
name: file,
|
|
141
|
-
type: 'function',
|
|
142
|
-
assets: []
|
|
143
|
-
}
|
|
144
|
-
|
|
145
|
-
try {
|
|
146
|
-
const functionBuild = (await build(functionConfig(path))) as RollupOutput
|
|
147
|
-
|
|
148
|
-
const handler = functionBuild.output.find(
|
|
149
|
-
(output) => output.fileName === 'handler.js'
|
|
150
|
-
) as OutputChunk
|
|
151
|
-
|
|
152
|
-
if (!handler) {
|
|
153
|
-
logger.log(chalk.yellow(`Handler not found for ${deployment.name}`))
|
|
154
|
-
return { deployed: false, cached: false, error: new Error('Handler not found') }
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
deployment.assets.push({
|
|
158
|
-
fileName: handler.fileName,
|
|
159
|
-
code: handler.code
|
|
160
|
-
})
|
|
161
|
-
|
|
162
|
-
if (!hasDeploymentChanged(deployment, force, logger)) {
|
|
163
|
-
return { deployed: false, cached: true }
|
|
164
|
-
}
|
|
165
|
-
|
|
166
|
-
await createDeployment(deployment, logger)
|
|
167
|
-
|
|
168
|
-
const hash = generateDeploymentHash(deployment)
|
|
169
|
-
saveCachedHash(deployment.name, deployment.type, hash)
|
|
170
|
-
|
|
171
|
-
return { deployed: true, cached: false }
|
|
172
|
-
} catch (error) {
|
|
173
|
-
logger.error ? logger.error(error) : logger.log(`Error: ${error}`)
|
|
174
|
-
return { deployed: false, cached: false, error: error as Error }
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
|
|
178
|
-
export const deployPage = async (path: string) => {
|
|
179
|
-
const file = basename(path)
|
|
180
|
-
|
|
181
|
-
const deployment: Deployment = {
|
|
182
|
-
name: file,
|
|
183
|
-
type: 'react',
|
|
184
|
-
assets: []
|
|
185
|
-
}
|
|
186
|
-
|
|
187
|
-
// const serverBuild = (await build(ssrServerConfig(path))) as RollupOutput
|
|
188
|
-
// const server = serverBuild.output.pop() as OutputChunk
|
|
189
|
-
//
|
|
190
|
-
// deployment.assets.push({
|
|
191
|
-
// fileName: 'server.js',
|
|
192
|
-
// code: server.code
|
|
193
|
-
// })
|
|
194
|
-
|
|
195
|
-
const configBuild = (await build(ssrConfigConfig(path))) as RollupOutput
|
|
196
|
-
const config = configBuild.output.pop() as OutputChunk
|
|
197
|
-
|
|
198
|
-
deployment.assets.push({
|
|
199
|
-
fileName: 'config.js',
|
|
200
|
-
code: config.code
|
|
201
|
-
})
|
|
202
|
-
|
|
203
|
-
// const propsBuild = (await build(ssrPropsConfig(path))) as RollupOutput
|
|
204
|
-
// const props = propsBuild.output.pop() as OutputChunk
|
|
205
|
-
//
|
|
206
|
-
// deployment.assets.push({
|
|
207
|
-
// fileName: 'props.js',
|
|
208
|
-
// code: props.code
|
|
209
|
-
// })
|
|
210
|
-
|
|
211
|
-
const clientConfig = spaClientConfig(path)
|
|
212
|
-
const clientBuild = (await build(clientConfig)) as RollupOutput
|
|
213
|
-
|
|
214
|
-
for (const clientAsset of clientBuild.output) {
|
|
215
|
-
const fileName = getFileName(clientAsset.fileName)
|
|
216
|
-
|
|
217
|
-
if (isOutputChunk(clientAsset)) {
|
|
218
|
-
deployment.assets.push({
|
|
219
|
-
fileName,
|
|
220
|
-
code: clientAsset.code
|
|
221
|
-
})
|
|
222
|
-
}
|
|
223
|
-
|
|
224
|
-
if (isOutputAsset(clientAsset)) {
|
|
225
|
-
deployment.assets.push({
|
|
226
|
-
fileName,
|
|
227
|
-
code: clientAsset.source as string
|
|
228
|
-
})
|
|
229
|
-
}
|
|
230
|
-
}
|
|
231
|
-
|
|
232
|
-
if (!hasDeploymentChanged(deployment)) {
|
|
233
|
-
return // Skip deployment if no changes
|
|
234
|
-
}
|
|
235
|
-
|
|
236
|
-
try {
|
|
237
|
-
await createDeployment(deployment)
|
|
238
|
-
const hash = generateDeploymentHash(deployment)
|
|
239
|
-
saveCachedHash(deployment.name, deployment.type, hash)
|
|
240
|
-
} catch (error) {
|
|
241
|
-
console.error(error)
|
|
242
|
-
throw error // Re-throw the error so it propagates to the caller
|
|
243
|
-
}
|
|
244
|
-
}
|
package/src/lib/update-server.ts
DELETED
|
@@ -1,262 +0,0 @@
|
|
|
1
|
-
import { existsSync, createWriteStream, chmodSync, unlinkSync, mkdirSync } from 'node:fs'
|
|
2
|
-
import { join, dirname } from 'node:path'
|
|
3
|
-
import { fileURLToPath } from 'node:url'
|
|
4
|
-
import https from 'node:https'
|
|
5
|
-
import * as tar from 'tar'
|
|
6
|
-
import chalk from 'chalk'
|
|
7
|
-
|
|
8
|
-
// --- Configuration ---
|
|
9
|
-
const S3_BASE_URL = 'https://nulljs.s3.eu-north-1.amazonaws.com'
|
|
10
|
-
const S3_PREFIX = 'releases/'
|
|
11
|
-
const DOWNLOAD_BASE_URL = `${S3_BASE_URL}/${S3_PREFIX}`
|
|
12
|
-
// ---------------------
|
|
13
|
-
|
|
14
|
-
interface PlatformInfo {
|
|
15
|
-
target: string
|
|
16
|
-
extension: string
|
|
17
|
-
binaryName: string
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
interface S3DownloadInfo {
|
|
21
|
-
downloadUrl: string
|
|
22
|
-
version: string
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
// --- Core Helper Functions (Unchanged) ---
|
|
26
|
-
|
|
27
|
-
function getPlatformInfo(): PlatformInfo {
|
|
28
|
-
const platform = process.platform
|
|
29
|
-
const arch = process.arch
|
|
30
|
-
|
|
31
|
-
const platformMap: Record<string, Record<string, string>> = {
|
|
32
|
-
linux: {
|
|
33
|
-
x64: 'x86_64-unknown-linux-gnu',
|
|
34
|
-
arm64: 'aarch64-unknown-linux-gnu'
|
|
35
|
-
},
|
|
36
|
-
darwin: {
|
|
37
|
-
x64: 'x86_64-apple-darwin',
|
|
38
|
-
arm64: 'aarch64-apple-darwin'
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
const target = platformMap[platform]?.[arch]
|
|
43
|
-
if (!target) {
|
|
44
|
-
throw new Error(`Unsupported platform: ${platform}-${arch}`)
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
const extension = '.tar.gz'
|
|
48
|
-
const binaryName = 'server'
|
|
49
|
-
|
|
50
|
-
return { target, extension, binaryName }
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
async function downloadFile(url: string, destination: string): Promise<void> {
|
|
54
|
-
return new Promise((resolve, reject) => {
|
|
55
|
-
const file = createWriteStream(destination)
|
|
56
|
-
|
|
57
|
-
https
|
|
58
|
-
.get(url, (response) => {
|
|
59
|
-
if (response.statusCode === 302 || response.statusCode === 301) {
|
|
60
|
-
// Handle redirect
|
|
61
|
-
return downloadFile(response.headers.location!, destination).then(resolve).catch(reject)
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
if (response.statusCode !== 200) {
|
|
65
|
-
reject(new Error(`Failed to download: ${response.statusCode}`))
|
|
66
|
-
return
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
response.pipe(file)
|
|
70
|
-
|
|
71
|
-
file.on('finish', () => {
|
|
72
|
-
file.close()
|
|
73
|
-
resolve()
|
|
74
|
-
})
|
|
75
|
-
|
|
76
|
-
file.on('error', reject)
|
|
77
|
-
})
|
|
78
|
-
.on('error', reject)
|
|
79
|
-
})
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
async function extractArchive(
|
|
83
|
-
archivePath: string,
|
|
84
|
-
extractPath: string,
|
|
85
|
-
binaryName: string
|
|
86
|
-
): Promise<void> {
|
|
87
|
-
// Extract tar.gz
|
|
88
|
-
await tar.x({
|
|
89
|
-
file: archivePath,
|
|
90
|
-
cwd: extractPath
|
|
91
|
-
})
|
|
92
|
-
|
|
93
|
-
// Make binary executable
|
|
94
|
-
const binaryPath = join(extractPath, binaryName)
|
|
95
|
-
chmodSync(binaryPath, '755')
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
function getServerBinPath(): string {
|
|
99
|
-
// Try to find the server binary relative to this module
|
|
100
|
-
const currentFile = fileURLToPath(import.meta.url)
|
|
101
|
-
const moduleRoot = join(dirname(currentFile), '../..')
|
|
102
|
-
return join(moduleRoot, 'bin', 'server')
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
// --- S3 LISTING Functions (New) ---
|
|
106
|
-
|
|
107
|
-
/**
|
|
108
|
-
* Fetches the raw XML listing from S3.
|
|
109
|
-
*/
|
|
110
|
-
async function fetchXml(url: string): Promise<string> {
|
|
111
|
-
return new Promise((resolve, reject) => {
|
|
112
|
-
https
|
|
113
|
-
.get(url, (res) => {
|
|
114
|
-
if (res.statusCode !== 200) {
|
|
115
|
-
// This likely indicates a 403 Access Denied error
|
|
116
|
-
reject(
|
|
117
|
-
new Error(`S3 Request Failed (${res.statusCode}): Check public 's3:ListBucket' policy.`)
|
|
118
|
-
)
|
|
119
|
-
return
|
|
120
|
-
}
|
|
121
|
-
let data = ''
|
|
122
|
-
res.on('data', (chunk) => (data += chunk))
|
|
123
|
-
res.on('end', () => resolve(data))
|
|
124
|
-
})
|
|
125
|
-
.on('error', reject)
|
|
126
|
-
})
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
/**
|
|
130
|
-
* Lists the S3 prefix, parses the XML, and determines the latest version tag.
|
|
131
|
-
*/
|
|
132
|
-
async function getLatestVersionFromS3(target: string, extension: string): Promise<S3DownloadInfo> {
|
|
133
|
-
// Query S3 for folders using delimiter and prefix
|
|
134
|
-
const listUrl = `${S3_BASE_URL}/?delimiter=/&prefix=${S3_PREFIX}`
|
|
135
|
-
const xmlData = await fetchXml(listUrl)
|
|
136
|
-
|
|
137
|
-
// Regex to find all CommonPrefixes entries (the version folders)
|
|
138
|
-
// Example: <Prefix>releases/v1.0.0/</Prefix>
|
|
139
|
-
const prefixRegex = /<Prefix>(releases\/v[^<]+)\/<\/Prefix>/g
|
|
140
|
-
const versionFolders: string[] = []
|
|
141
|
-
let match: RegExpExecArray | null
|
|
142
|
-
|
|
143
|
-
while ((match = prefixRegex.exec(xmlData)) !== null) {
|
|
144
|
-
// Extract version part: "releases/v1.0.0/" -> "v1.0.0"
|
|
145
|
-
const fullPrefix = match[1]
|
|
146
|
-
const version = fullPrefix.substring(S3_PREFIX.length)
|
|
147
|
-
versionFolders.push(version)
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
if (versionFolders.length === 0) {
|
|
151
|
-
throw new Error('No version folders found in S3 bucket. Has the CI/CD run?')
|
|
152
|
-
}
|
|
153
|
-
|
|
154
|
-
// Sort versions to find the latest (using semantic versioning logic)
|
|
155
|
-
const sortedVersions = versionFolders.sort((a, b) => {
|
|
156
|
-
// Strips 'v', splits by '.', and converts to number arrays for comparison
|
|
157
|
-
const aParts = a.replace('v', '').split('.').map(Number)
|
|
158
|
-
const bParts = b.replace('v', '').split('.').map(Number)
|
|
159
|
-
|
|
160
|
-
// Compare major, minor, and patch numbers (assuming vX.Y.Z)
|
|
161
|
-
for (let i = 0; i < 3; i++) {
|
|
162
|
-
if (aParts[i] > bParts[i]) return 1
|
|
163
|
-
if (aParts[i] < bParts[i]) return -1
|
|
164
|
-
}
|
|
165
|
-
return 0
|
|
166
|
-
})
|
|
167
|
-
|
|
168
|
-
// The latest version is the last one after sorting
|
|
169
|
-
const latestVersion = sortedVersions[sortedVersions.length - 1]
|
|
170
|
-
|
|
171
|
-
// Construct the full download URL
|
|
172
|
-
const downloadUrl = `${DOWNLOAD_BASE_URL}${latestVersion}/nulljs-server-${target}${extension}`
|
|
173
|
-
|
|
174
|
-
return { downloadUrl, version: latestVersion }
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
// --- Main Update Function (Modified) ---
|
|
178
|
-
|
|
179
|
-
export async function updateServer(): Promise<void> {
|
|
180
|
-
try {
|
|
181
|
-
console.log(chalk.blue('🔄 Updating nulljs server binary...'))
|
|
182
|
-
|
|
183
|
-
const { target, extension, binaryName } = getPlatformInfo()
|
|
184
|
-
|
|
185
|
-
const serverBinPath = getServerBinPath()
|
|
186
|
-
const binDir = dirname(serverBinPath)
|
|
187
|
-
const archivePath = join(binDir, `server${extension}`)
|
|
188
|
-
const backupPath = join(binDir, 'server.backup')
|
|
189
|
-
|
|
190
|
-
// Create bin directory if it doesn't exist
|
|
191
|
-
if (!existsSync(binDir)) {
|
|
192
|
-
mkdirSync(binDir, { recursive: true })
|
|
193
|
-
}
|
|
194
|
-
|
|
195
|
-
// Backup existing binary if it exists
|
|
196
|
-
if (existsSync(serverBinPath)) {
|
|
197
|
-
console.log(chalk.blue('💾 Backing up existing binary...'))
|
|
198
|
-
|
|
199
|
-
if (existsSync(backupPath)) {
|
|
200
|
-
unlinkSync(backupPath)
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
const fs = await import('node:fs/promises')
|
|
204
|
-
await fs.copyFile(serverBinPath, backupPath)
|
|
205
|
-
console.log(chalk.green('✅ Backup created'))
|
|
206
|
-
}
|
|
207
|
-
|
|
208
|
-
console.log(chalk.blue(`🔍 Searching for latest server version...`))
|
|
209
|
-
|
|
210
|
-
// 1. Get download URL and version from S3 listing (NEW LOGIC)
|
|
211
|
-
const { downloadUrl, version } = await getLatestVersionFromS3(target, extension)
|
|
212
|
-
console.log(chalk.blue(` Found latest version: ${version} for ${target}`))
|
|
213
|
-
console.log(chalk.blue(` Downloading from: ${downloadUrl}`))
|
|
214
|
-
|
|
215
|
-
try {
|
|
216
|
-
// Download the archive
|
|
217
|
-
await downloadFile(downloadUrl, archivePath)
|
|
218
|
-
console.log(chalk.green('✅ Download completed'))
|
|
219
|
-
|
|
220
|
-
// Remove existing binary before extracting new one
|
|
221
|
-
if (existsSync(serverBinPath)) {
|
|
222
|
-
unlinkSync(serverBinPath)
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
// Extract the binary
|
|
226
|
-
console.log(chalk.blue('📂 Extracting binary...'))
|
|
227
|
-
await extractArchive(archivePath, binDir, binaryName)
|
|
228
|
-
|
|
229
|
-
// Clean up archive
|
|
230
|
-
unlinkSync(archivePath)
|
|
231
|
-
|
|
232
|
-
// Remove backup since update was successful
|
|
233
|
-
if (existsSync(backupPath)) {
|
|
234
|
-
unlinkSync(backupPath)
|
|
235
|
-
}
|
|
236
|
-
|
|
237
|
-
console.log(chalk.green('✅ nulljs server updated successfully'))
|
|
238
|
-
} catch (error) {
|
|
239
|
-
// If update failed and we have a backup, restore it
|
|
240
|
-
if (existsSync(backupPath)) {
|
|
241
|
-
console.log(chalk.yellow('⚠️ Update failed, restoring backup...'))
|
|
242
|
-
|
|
243
|
-
if (existsSync(serverBinPath)) {
|
|
244
|
-
unlinkSync(serverBinPath)
|
|
245
|
-
}
|
|
246
|
-
|
|
247
|
-
const fs = await import('node:fs/promises')
|
|
248
|
-
await fs.copyFile(backupPath, serverBinPath)
|
|
249
|
-
unlinkSync(backupPath)
|
|
250
|
-
|
|
251
|
-
console.log(chalk.green('✅ Backup restored'))
|
|
252
|
-
}
|
|
253
|
-
|
|
254
|
-
throw error
|
|
255
|
-
}
|
|
256
|
-
} catch (error) {
|
|
257
|
-
// Ensure the error message is available if it's not a standard Error object
|
|
258
|
-
const errorMessage = error instanceof Error ? error.message : String(error)
|
|
259
|
-
console.error(chalk.red('❌ Failed to update server binary:'), errorMessage)
|
|
260
|
-
throw error
|
|
261
|
-
}
|
|
262
|
-
}
|