@rharkor/caching-for-turbo 2.0.0 → 2.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,237 +0,0 @@
1
- import { TProvider } from 'src/lib/providers'
2
-
3
- import { Readable } from 'stream'
4
- import { RequestContext } from '../../server'
5
- import { TListFile } from '../../server/cleanup'
6
- import {
7
- S3Client,
8
- GetObjectCommand,
9
- DeleteObjectCommand,
10
- ListObjectsV2Command
11
- } from '@aws-sdk/client-s3'
12
- import { Upload } from '@aws-sdk/lib-storage'
13
- import { getCacheKey } from 'src/lib/constants'
14
- import { core } from 'src/lib/core'
15
-
16
- // Helper function to get input value, prioritizing environment variables for local development
17
- const getInput = (name: string, envName?: string): string | undefined => {
18
- // In GitHub Actions context, try core.getInput first
19
- if (process.env.GITHUB_ACTIONS === 'true') {
20
- const coreInput = core.getInput(name)
21
- if (coreInput) return coreInput
22
- }
23
-
24
- // Fall back to environment variable
25
- const envVar = envName || name.toUpperCase().replace(/-/g, '_')
26
- return process.env[envVar]
27
- }
28
-
29
- export const getS3Provider = (): TProvider => {
30
- const s3AccessKeyId = getInput('s3-access-key-id', 'S3_ACCESS_KEY_ID')
31
- const s3SecretAccessKey = getInput(
32
- 's3-secret-access-key',
33
- 'S3_SECRET_ACCESS_KEY'
34
- )
35
- const s3Bucket = getInput('s3-bucket', 'S3_BUCKET')
36
- const s3Region = getInput('s3-region', 'S3_REGION')
37
- const s3Endpoint =
38
- getInput('s3-endpoint', 'S3_ENDPOINT') || 'https://s3.amazonaws.com'
39
- const s3Prefix = getInput('s3-prefix', 'S3_PREFIX') || 'turbogha/'
40
-
41
- if (!s3AccessKeyId || !s3SecretAccessKey || !s3Bucket || !s3Region) {
42
- throw new Error(
43
- 'S3 provider requires s3-access-key-id, s3-secret-access-key, s3-bucket, and s3-region. Set these as environment variables or GitHub Actions inputs.'
44
- )
45
- }
46
-
47
- const s3Client = new S3Client({
48
- region: s3Region,
49
- endpoint: s3Endpoint,
50
- credentials: {
51
- accessKeyId: s3AccessKeyId,
52
- secretAccessKey: s3SecretAccessKey
53
- }
54
- })
55
-
56
- const getS3Key = (hash: string, tag?: string) => {
57
- const key = getCacheKey(hash, tag)
58
- if (s3Prefix) {
59
- return `${s3Prefix}${key}`
60
- }
61
- return key
62
- }
63
-
64
- const save = async (
65
- ctx: RequestContext,
66
- hash: string,
67
- tag: string,
68
- stream: Readable
69
- ): Promise<void> => {
70
- const objectKey = getS3Key(hash, tag)
71
- console.log({ objectKey, s3Prefix })
72
-
73
- try {
74
- // Use the S3 Upload utility which handles multipart uploads for large files
75
- const upload = new Upload({
76
- client: s3Client,
77
- params: {
78
- Bucket: s3Bucket,
79
- Key: objectKey,
80
- Body: stream,
81
- ContentType: 'application/octet-stream'
82
- }
83
- })
84
-
85
- await upload.done()
86
- ctx.log.info(`Saved artifact to S3: ${objectKey}`)
87
- } catch (error) {
88
- ctx.log.info(`Error saving artifact to S3: ${error}`)
89
- throw error
90
- }
91
- }
92
-
93
- const get = async (
94
- ctx: RequestContext,
95
- hash: string
96
- ): Promise<
97
- [number | undefined, Readable | ReadableStream, string | undefined] | null
98
- > => {
99
- // First try to get with just the hash
100
- const objectKey = getS3Key(hash)
101
-
102
- try {
103
- // Try to find the object
104
- const listCommand = new ListObjectsV2Command({
105
- Bucket: s3Bucket,
106
- Prefix: objectKey,
107
- MaxKeys: 10
108
- })
109
-
110
- const listResponse = await s3Client.send(listCommand)
111
-
112
- if (!listResponse.Contents || listResponse.Contents.length === 0) {
113
- ctx.log.info(`No cached artifact found for ${hash}`)
114
- return null
115
- }
116
-
117
- // Find the most recent object that matches the hash prefix
118
- const matchingObjects = listResponse.Contents.filter(
119
- obj => obj.Key && obj.Key.startsWith(objectKey)
120
- )
121
-
122
- if (matchingObjects.length === 0) {
123
- return null
124
- }
125
-
126
- // Sort by last modified date, newest first
127
- matchingObjects.sort((a, b) => {
128
- const dateA = a.LastModified?.getTime() || 0
129
- const dateB = b.LastModified?.getTime() || 0
130
- return dateB - dateA
131
- })
132
-
133
- const latestObject = matchingObjects[0]
134
- const key = latestObject.Key as string
135
-
136
- // Get the object
137
- const getCommand = new GetObjectCommand({
138
- Bucket: s3Bucket,
139
- Key: key
140
- })
141
-
142
- const response = await s3Client.send(getCommand)
143
-
144
- if (!response.Body) {
145
- ctx.log.info(`Failed to get artifact body from S3`)
146
- return null
147
- }
148
-
149
- const size = response.ContentLength
150
- const stream = response.Body as Readable
151
-
152
- // Extract the tag if it exists
153
- let artifactTag: string | undefined
154
- if (key.includes('#')) {
155
- const parts = key.split('#')
156
- artifactTag = parts[parts.length - 1]
157
- }
158
-
159
- ctx.log.info(`Retrieved artifact from S3: ${key}`)
160
- return [size, stream, artifactTag]
161
- } catch (error) {
162
- ctx.log.info(`Error getting artifact from S3: ${error}`)
163
- return null
164
- }
165
- }
166
-
167
- const deleteObj = async (key: string): Promise<void> => {
168
- try {
169
- const deleteCommand = new DeleteObjectCommand({
170
- Bucket: s3Bucket,
171
- Key: key
172
- })
173
-
174
- await s3Client.send(deleteCommand)
175
- } catch (error) {
176
- core.error(`Error deleting artifact from S3: ${error}`)
177
- throw error
178
- }
179
- }
180
-
181
- const list = async (): Promise<TListFile[]> => {
182
- try {
183
- const files: TListFile[] = []
184
- let continuationToken: string | undefined
185
-
186
- do {
187
- // Create a new command for each request with the current continuation token
188
- const listCommand = new ListObjectsV2Command({
189
- Bucket: s3Bucket,
190
- Prefix: s3Prefix,
191
- MaxKeys: 1000,
192
- ContinuationToken: continuationToken
193
- })
194
-
195
- core.debug(
196
- `Listing S3 objects with prefix ${s3Prefix}${continuationToken ? ' and continuation token' : ''}`
197
- )
198
-
199
- const response = await s3Client.send(listCommand)
200
-
201
- if (response.Contents && response.Contents.length > 0) {
202
- core.debug(`Found ${response.Contents.length} objects`)
203
-
204
- const objects = response.Contents.filter(obj => obj.Key).map(
205
- (obj): TListFile => {
206
- return {
207
- path: obj.Key as string,
208
- createdAt: (obj.LastModified || new Date()).toISOString(),
209
- size: obj.Size || 0
210
- }
211
- }
212
- )
213
-
214
- files.push(...objects)
215
- }
216
-
217
- continuationToken = response.NextContinuationToken
218
- if (continuationToken) {
219
- core.debug(`NextContinuationToken: ${continuationToken}`)
220
- }
221
- } while (continuationToken)
222
-
223
- core.debug(`Total files listed: ${files.length}`)
224
- return files
225
- } catch (error) {
226
- core.error(`Error listing artifacts from S3: ${error}`)
227
- throw error
228
- }
229
- }
230
-
231
- return {
232
- save,
233
- get,
234
- delete: deleteObj,
235
- list
236
- }
237
- }
@@ -1,42 +0,0 @@
1
- import { Readable } from 'stream'
2
- import { TListFile } from './server/cleanup'
3
- import { RequestContext } from './server'
4
- import { getGithubProvider } from './providers/cache'
5
- import { getS3Provider } from './providers/s3'
6
- import { core } from './core'
7
-
8
- export type TProvider = {
9
- save: (
10
- ctx: RequestContext,
11
- hash: string,
12
- tag: string,
13
- stream: Readable
14
- ) => Promise<void>
15
- get: (
16
- ctx: RequestContext,
17
- hash: string
18
- ) => Promise<
19
- [number | undefined, Readable | ReadableStream, string | undefined] | null
20
- >
21
- delete: (key: string) => Promise<void>
22
- list: () => Promise<TListFile[]>
23
- }
24
-
25
- export const getProvider = (): TProvider => {
26
- const provider = core.getInput('provider') || process.env.PROVIDER
27
-
28
- if (!provider) {
29
- throw new Error(
30
- 'Provider is required. Set PROVIDER environment variable or provider input.'
31
- )
32
- }
33
-
34
- if (provider === 'github') {
35
- return getGithubProvider()
36
- }
37
- if (provider === 's3') {
38
- return getS3Provider()
39
- }
40
-
41
- throw new Error(`Provider ${provider} not supported`)
42
- }
@@ -1,111 +0,0 @@
1
- import { RequestContext } from '.'
2
- import parse from 'parse-duration'
3
- import { getProvider } from '../providers'
4
- import { parseFileSize } from './utils'
5
- import { core } from '../core'
6
-
7
- export type TListFile = {
8
- path: string
9
- createdAt: string
10
- size: number
11
- }
12
-
13
- export async function cleanup(ctx: RequestContext) {
14
- const maxAge = core.getInput('max-age') || process.env.MAX_AGE
15
- const maxFiles = core.getInput('max-files') || process.env.MAX_FILES
16
- const maxSize = core.getInput('max-size') || process.env.MAX_SIZE
17
-
18
- if (!maxAge && !maxFiles && !maxSize) {
19
- ctx.log.info('No cleanup options provided, skipping cleanup')
20
- return
21
- }
22
-
23
- const { maxAgeParsed, maxFilesParsed, maxSizeParsed } = {
24
- maxAgeParsed: maxAge ? parse(maxAge) : undefined,
25
- maxFilesParsed: maxFiles ? parseInt(maxFiles) : undefined,
26
- maxSizeParsed: maxSize ? parseFileSize(maxSize) : undefined
27
- }
28
-
29
- if (maxAge && !maxAgeParsed) {
30
- core.error('Invalid max-age provided')
31
- throw new Error('Invalid max-age provided')
32
- }
33
-
34
- if (maxFiles && !maxFilesParsed) {
35
- core.error('Invalid max-files provided')
36
- throw new Error('Invalid max-files provided')
37
- }
38
-
39
- if (maxSize && !maxSizeParsed) {
40
- core.error('Invalid max-size provided')
41
- throw new Error('Invalid max-size provided')
42
- }
43
-
44
- const provider = getProvider()
45
-
46
- const files = await provider.list()
47
-
48
- const fileToDelete: (TListFile & {
49
- reason: 'max-age' | 'max-files' | 'max-size'
50
- })[] = []
51
- if (maxAgeParsed) {
52
- const now = new Date()
53
- const age = new Date(now.getTime() - maxAgeParsed)
54
- fileToDelete.push(
55
- ...files
56
- .filter(file => new Date(file.createdAt) < age)
57
- .map(file => ({ ...file, reason: 'max-age' as const }))
58
- )
59
- }
60
-
61
- if (maxFilesParsed && files.length > maxFilesParsed) {
62
- const sortedByDate = [...files].sort(
63
- (a, b) =>
64
- new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
65
- )
66
- const excessFiles = sortedByDate.slice(0, files.length - maxFilesParsed)
67
- excessFiles.forEach(file => {
68
- if (!fileToDelete.some(f => f.path === file.path)) {
69
- fileToDelete.push({ ...file, reason: 'max-files' })
70
- }
71
- })
72
- }
73
-
74
- if (maxSizeParsed) {
75
- let totalSize = files.reduce((sum, file) => sum + file.size, 0)
76
-
77
- if (totalSize > maxSizeParsed) {
78
- const sortedByDate = [...files].sort(
79
- (a, b) =>
80
- new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
81
- )
82
-
83
- for (const file of sortedByDate) {
84
- if (totalSize <= maxSizeParsed) break
85
-
86
- if (!fileToDelete.some(f => f.path === file.path)) {
87
- fileToDelete.push({ ...file, reason: 'max-size' })
88
- totalSize -= file.size
89
- }
90
- }
91
- }
92
- }
93
-
94
- if (fileToDelete.length > 0) {
95
- ctx.log.info(
96
- `Cleaning up ${fileToDelete.length} files (${fileToDelete.map(
97
- f => `${f.path} (${f.reason})`
98
- )})`
99
- )
100
- for (const file of fileToDelete) {
101
- try {
102
- await provider.delete(file.path)
103
- ctx.log.info(`Deleted ${file.path}`)
104
- } catch (error) {
105
- core.error(`Failed to delete ${file.path}: ${error}`)
106
- }
107
- }
108
- } else {
109
- ctx.log.info('No files to clean up')
110
- }
111
- }
@@ -1,85 +0,0 @@
1
- import Fastify from 'fastify'
2
- import { serverPort } from '../constants'
3
- import { cleanup } from './cleanup'
4
- import { getProvider } from '../providers'
5
-
6
- export type RequestContext = {
7
- log: {
8
- info: (message: string) => void
9
- }
10
- }
11
-
12
- export async function server(): Promise<void> {
13
- //* Create the server
14
- const fastify = Fastify({
15
- logger: true
16
- })
17
-
18
- //? Server status check
19
- fastify.get('/', async () => {
20
- return { ok: true }
21
- })
22
-
23
- //? Shut down the server
24
- const shutdown = async (ctx: RequestContext) => {
25
- //* Handle cleanup
26
- await cleanup(ctx)
27
-
28
- // Exit the server after responding (100ms)
29
- setTimeout(() => process.exit(0), 100)
30
- return { ok: true }
31
- }
32
- fastify.delete('/shutdown', async request => {
33
- return shutdown(request)
34
- })
35
-
36
- //? Handle streaming requets body
37
- // https://www.fastify.io/docs/latest/Reference/ContentTypeParser/#catch-all
38
- fastify.addContentTypeParser(
39
- 'application/octet-stream',
40
- (_req, _payload, done) => {
41
- done(null)
42
- }
43
- )
44
-
45
- //? Upload cache
46
- fastify.put('/v8/artifacts/:hash', async request => {
47
- const hash = (request.params as { hash: string }).hash
48
- request.log.info(`Received artifact for ${hash}`)
49
- const provider = getProvider()
50
- await provider.save(
51
- request,
52
- hash,
53
- String(request.headers['x-artifact-tag'] || ''),
54
- request.raw
55
- )
56
- request.log.info(`Saved artifact for ${hash}`)
57
- return { ok: true }
58
- })
59
-
60
- //? Download cache
61
- fastify.get('/v8/artifacts/:hash', async (request, reply) => {
62
- const hash = (request.params as { hash: string }).hash
63
- request.log.info(`Requested artifact for ${hash}`)
64
- const provider = getProvider()
65
- const result = await provider.get(request, hash)
66
- if (result === null) {
67
- request.log.info(`Artifact for ${hash} not found`)
68
- reply.code(404)
69
- return { ok: false }
70
- }
71
- const [size, stream, artifactTag] = result
72
- if (size) {
73
- reply.header('Content-Length', size)
74
- }
75
- reply.header('Content-Type', 'application/octet-stream')
76
- if (artifactTag) {
77
- reply.header('x-artifact-tag', artifactTag)
78
- }
79
- request.log.info(`Sending artifact for ${hash}`)
80
- return reply.send(stream)
81
- })
82
-
83
- //* Start the server
84
- await fastify.listen({ port: serverPort })
85
- }
@@ -1,91 +0,0 @@
1
- import waitOn from 'wait-on'
2
- import { cachePrefix, cachePath, serverLogFile, serverPort } from '../constants'
3
- import { openSync } from 'fs'
4
- import { spawn } from 'child_process'
5
- import { core } from '../core'
6
-
7
- export const waitForServer = async (): Promise<void> => {
8
- await waitOn({
9
- resources: [`http-get://localhost:${serverPort}`],
10
- timeout: 5000
11
- })
12
- }
13
-
14
- export const exportVariable = (name: string, value: string): void => {
15
- core.exportVariable(name, value)
16
- core.log(` ${name}=${value}`)
17
- }
18
-
19
- export async function launchServer(devRun?: boolean): Promise<void> {
20
- if (!devRun) {
21
- //* Launch a detached child process to run the server
22
- // See: https://nodejs.org/docs/latest-v16.x/api/child_process.html#optionsdetached
23
- const out = openSync(serverLogFile, 'a')
24
- const err = openSync(serverLogFile, 'a')
25
- const child = spawn(process.argv[0], [process.argv[1], '--server'], {
26
- detached: true,
27
- stdio: ['ignore', out, err]
28
- })
29
- child.unref()
30
- core.log(`Cache version: ${cachePath}`)
31
- core.log(`Cache prefix: ${cachePrefix}`)
32
- core.log(`Launched child process: ${child.pid}`)
33
- core.log(`Server log file: ${serverLogFile}`)
34
- }
35
-
36
- //* Wait for server
37
- await waitForServer()
38
- core.info(`Server is now up and running.`)
39
-
40
- //* Export the environment variables for Turbo
41
- if (devRun) {
42
- console.log('Execute:')
43
- console.log(`export TURBOGHA_PORT=${serverPort}`)
44
- console.log(`export TURBO_API=http://localhost:${serverPort}`)
45
- console.log(`export TURBO_TOKEN=turbogha`)
46
- console.log(`export TURBO_TEAM=turbogha`)
47
- } else {
48
- if (core.isCI) {
49
- core.info('The following environment variables are exported:')
50
- } else {
51
- core.info(
52
- 'You need to use the following environment variables for turbo to work:'
53
- )
54
- }
55
- exportVariable('TURBOGHA_PORT', `${serverPort}`)
56
- exportVariable('TURBO_API', `http://localhost:${serverPort}`)
57
- exportVariable('TURBO_TOKEN', 'turbogha')
58
- exportVariable('TURBO_TEAM', 'turbogha')
59
- }
60
- }
61
-
62
- export async function killServer() {
63
- //* Kill the server
64
- await fetch(`http://localhost:${serverPort}/shutdown`, {
65
- method: 'DELETE'
66
- })
67
- }
68
-
69
- export const parseFileSize = (size: string): number => {
70
- const units: { [key: string]: number } = {
71
- b: 1,
72
- kb: 1024,
73
- mb: 1024 * 1024,
74
- gb: 1024 * 1024 * 1024,
75
- tb: 1024 * 1024 * 1024 * 1024
76
- }
77
-
78
- const match = size.toLowerCase().match(/^(\d+)\s*([a-z]+)$/)
79
- if (!match) {
80
- throw new Error(`Invalid file size format: ${size}`)
81
- }
82
-
83
- const [, value, unit] = match
84
- const multiplier = units[unit]
85
-
86
- if (!multiplier) {
87
- throw new Error(`Invalid file size unit: ${unit}`)
88
- }
89
-
90
- return parseInt(value) * multiplier
91
- }
package/src/main.ts DELETED
@@ -1,21 +0,0 @@
1
- import { core } from './lib/core'
2
- import { server } from './lib/server'
3
- import { launchServer } from './lib/server/utils'
4
-
5
- /**
6
- * The main function for the action.
7
- * @returns {Promise<void>} Resolves when the action is complete.
8
- */
9
- export async function run(): Promise<void> {
10
- try {
11
- //* Daemon process
12
- if (process.argv[2] === '--server') {
13
- return server()
14
- }
15
- //* Base process
16
- return launchServer()
17
- } catch (error) {
18
- // Fail the workflow run if an error occurs
19
- if (error instanceof Error) core.setFailed(error.message)
20
- }
21
- }
package/src/post.ts DELETED
@@ -1,26 +0,0 @@
1
- import { readFile } from 'fs/promises'
2
- import { serverLogFile, serverPort } from './lib/constants'
3
- import { core } from './lib/core'
4
-
5
- /**
6
- * The out script for the action.
7
- */
8
- async function post(): Promise<void> {
9
- try {
10
- //* Kill the server
11
- await fetch(`http://localhost:${serverPort}/shutdown`, {
12
- method: 'DELETE'
13
- })
14
-
15
- //* Read the logs
16
- const logs = await readFile(serverLogFile, 'utf-8')
17
- //* Print the logs
18
- core.info(logs)
19
- } catch (error) {
20
- // Fail the workflow run if an error occurs
21
- if (error instanceof Error) core.setFailed(error.message)
22
- }
23
- }
24
-
25
- // Run the out script
26
- post()
package/tsconfig.json DELETED
@@ -1,19 +0,0 @@
1
- {
2
- "$schema": "https://json.schemastore.org/tsconfig",
3
- "compilerOptions": {
4
- "target": "ES2022",
5
- "module": "ESNext",
6
- "rootDir": "./src",
7
- "moduleResolution": "bundler",
8
- "baseUrl": "./",
9
- "sourceMap": true,
10
- "outDir": "./dist",
11
- "noImplicitAny": true,
12
- "esModuleInterop": true,
13
- "forceConsistentCasingInFileNames": true,
14
- "strict": true,
15
- "skipLibCheck": true,
16
- "newLine": "lf"
17
- },
18
- "exclude": ["./dist", "./node_modules", "./__tests__", "./coverage"]
19
- }
package/turbo.json DELETED
@@ -1,9 +0,0 @@
1
- {
2
- "$schema": "https://turbo.build/schema.json",
3
- "tasks": {
4
- "test-build": {
5
- "inputs": ["src"],
6
- "outputs": ["dist"]
7
- }
8
- }
9
- }