houdini-core 2.0.0-go.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json ADDED
@@ -0,0 +1,24 @@
1
+ {
2
+ "name": "houdini-core",
3
+ "version": "2.0.0-go.0",
4
+ "scripts": {
5
+ "compile": "scripts build-go",
6
+ "typedefs": "scripts typedefs --go-package",
7
+ "postinstall": "node postInstall.js"
8
+ },
9
+ "devDependencies": {},
10
+ "dependencies": {
11
+ "@whatwg-node/server": "^0.9.14",
12
+ "graphql-yoga": "^4.0.4",
13
+ "minimatch": "^5.1.0"
14
+ },
15
+ "optionalDependencies": {
16
+ "houdini-core-darwin-amd64": "2.0.0-go.0",
17
+ "houdini-core-darwin-arm64": "2.0.0-go.0",
18
+ "houdini-core-linux-amd64": "2.0.0-go.0",
19
+ "houdini-core-linux-arm64": "2.0.0-go.0",
20
+ "houdini-core-windows-amd64": "2.0.0-go.0",
21
+ "houdini-core-windows-arm64": "2.0.0-go.0"
22
+ },
23
+ "bin": "./shim.cjs"
24
+ }
package/postInstall.js ADDED
@@ -0,0 +1,117 @@
1
+ const fs = require('fs')
2
+ const path = require('path')
3
+ const zlib = require('zlib')
4
+ const https = require('https')
5
+
6
+ // Adjust the version you want to install. You can also make this dynamic.
7
+ const BINARY_DISTRIBUTION_VERSION = '2.0.0-go.0'
8
+
9
+ // Windows binaries end with .exe so we need to special case them.
10
+ const binaryName = process.platform === 'win32' ? 'houdini-core.exe' : 'houdini-core'
11
+
12
+ // Determine package name for this platform
13
+ const platformSpecificPackageName = `houdini-core-${process.platform}-${process.arch}`
14
+
15
+ // Compute the path we want to emit the fallback binary to
16
+ const fallbackBinaryPath = path.join(__dirname, binaryName)
17
+
18
+ function makeRequest(url) {
19
+ return new Promise((resolve, reject) => {
20
+ https
21
+ .get(url, (response) => {
22
+ if (response.statusCode >= 200 && response.statusCode < 300) {
23
+ const chunks = []
24
+ response.on('data', (chunk) => chunks.push(chunk))
25
+ response.on('end', () => {
26
+ resolve(Buffer.concat(chunks))
27
+ })
28
+ } else if (
29
+ response.statusCode >= 300 &&
30
+ response.statusCode < 400 &&
31
+ response.headers.location
32
+ ) {
33
+ // Follow redirects
34
+ makeRequest(response.headers.location).then(resolve, reject)
35
+ } else {
36
+ reject(
37
+ new Error(
38
+ `npm responded with status code ${response.statusCode} when downloading the package!`
39
+ )
40
+ )
41
+ }
42
+ })
43
+ .on('error', (error) => {
44
+ reject(error)
45
+ })
46
+ })
47
+ }
48
+
49
+ function extractFileFromTarball(tarballBuffer, filepath) {
50
+ // Tar archives are organized in 512 byte blocks.
51
+ // Blocks can either be header blocks or data blocks.
52
+ // Header blocks contain file names of the archive in the first 100 bytes, terminated by a null byte.
53
+ // The size of a file is contained in bytes 124-135 of a header block and in octal format.
54
+ // The following blocks will be data blocks containing the file.
55
+ let offset = 0
56
+ while (offset < tarballBuffer.length) {
57
+ const header = tarballBuffer.subarray(offset, offset + 512)
58
+ offset += 512
59
+
60
+ const fileName = header.toString('utf-8', 0, 100).replace(/\0.*/g, '')
61
+ const fileSize = parseInt(header.toString('utf-8', 124, 136).replace(/\0.*/g, ''), 8)
62
+
63
+ if (fileName === filepath) {
64
+ return tarballBuffer.subarray(offset, offset + fileSize)
65
+ }
66
+
67
+ // Clamp offset to the uppoer multiple of 512
68
+ offset = (offset + fileSize + 511) & ~511
69
+ }
70
+ }
71
+
72
+ async function downloadBinaryFromNpm() {
73
+ // Download the tarball of the right binary distribution package
74
+ const tarballDownloadBuffer = await makeRequest(
75
+ `https://registry.npmjs.org/${platformSpecificPackageName}/-/${platformSpecificPackageName}-${BINARY_DISTRIBUTION_VERSION}.tgz`
76
+ )
77
+
78
+ const tarballBuffer = zlib.unzipSync(tarballDownloadBuffer)
79
+
80
+ // Extract binary from package and write to disk
81
+ fs.writeFileSync(
82
+ fallbackBinaryPath,
83
+ extractFileFromTarball(tarballBuffer, `package/bin/${binaryName}`),
84
+ { mode: 0o755 } // Make binary file executable
85
+ )
86
+ }
87
+
88
+ function isPlatformSpecificPackageInstalled() {
89
+ try {
90
+ // Resolving will fail if the optionalDependency was not installed
91
+ require.resolve(`${platformSpecificPackageName}/bin/${binaryName}`)
92
+ return true
93
+ } catch (e) {
94
+ return false
95
+ }
96
+ }
97
+
98
+ if (!platformSpecificPackageName) {
99
+ throw new Error('Platform not supported!')
100
+ }
101
+
102
+ // once we've confirmed the required package is installed we want to overwrite the bin entry of our package.json
103
+ // to point to the correct binary
104
+ function overwriteBinary() {
105
+ const packageJsonPath = path.join(__dirname, 'package.json')
106
+ const packageJson = require(packageJsonPath)
107
+ packageJson.bin = path.join('..', platformSpecificPackageName, 'bin', binaryName)
108
+ fs.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2))
109
+ }
110
+
111
+ // Skip downloading the binary if it was already installed via optionalDependencies
112
+ if (!isPlatformSpecificPackageInstalled()) {
113
+ console.log('Platform specific package not found. Will manually download binary.')
114
+ downloadBinaryFromNpm().then(overwriteBinary)
115
+ } else {
116
+ overwriteBinary()
117
+ }
@@ -0,0 +1,5 @@
1
+ import { Cache } from 'houdini/runtime/cache'
2
+
3
+ import { getCurrentConfig } from './config'
4
+
5
+ export default new Cache(getCurrentConfig())
@@ -0,0 +1,181 @@
1
+ import type { Cache } from 'houdini/runtime/cache'
2
+ import { HoudiniClient as BaseClient, type ObserveParams } from 'houdini/runtime/client'
3
+ import { createPluginHooks } from 'houdini/runtime/client'
4
+ import { DocumentStore, type ClientPlugin } from 'houdini/runtime/documentStore'
5
+ import { flatten } from 'houdini/runtime/flatten'
6
+ import type {
7
+ NestedList,
8
+ DocumentArtifact,
9
+ GraphQLObject,
10
+ GraphQLVariables,
11
+ } from 'houdini/runtime/types'
12
+
13
+ import cacheRef from './cache'
14
+ import { getCurrentConfig, localApiEndpoint } from './config'
15
+ import type { FetchParamFn, ThrowOnErrorParams } from './plugins'
16
+ import {
17
+ fetch as fetchPlugin,
18
+ fetchParams as fetchParamsPlugin,
19
+ fragment as fragmentPlugin,
20
+ mutation as mutationPlugin,
21
+ query as queryPlugin,
22
+ throwOnError as throwOnErrorPlugin,
23
+ optimisticKeys,
24
+ cachePolicy,
25
+ } from './plugins'
26
+ import pluginsFromPlugins from './plugins/injectedPlugins'
27
+
28
+ // export the plugin constructors
29
+ export { fetch, mutation, query, subscription } from './plugins'
30
+ export { DocumentStore, type ClientPlugin, type SendParams } from 'houdini/runtime/documentStore'
31
+
32
+ export type HoudiniClientConstructorArgs = {
33
+ url?: string
34
+ fetchParams?: FetchParamFn
35
+ plugins?: NestedList<ClientPlugin>
36
+ pipeline?: NestedList<ClientPlugin>
37
+ throwOnError?: ThrowOnErrorParams
38
+ cache?: Cache
39
+ }
40
+
41
+ export class HoudiniClient extends BaseClient {
42
+ // this is modified by page entries when they load in order to register the components source
43
+ componentCache: Record<string, any> = {}
44
+
45
+ // store cache configuration for use in document stores
46
+ private _cache?: Cache
47
+ private _enableCache: boolean = false
48
+
49
+ // store throwOnError operations for access by stores
50
+ throwOnError_operations: string[] = []
51
+
52
+ constructor({
53
+ url,
54
+ fetchParams,
55
+ plugins,
56
+ pipeline,
57
+ throwOnError,
58
+ cache = cacheRef,
59
+ }: HoudiniClientConstructorArgs = {}) {
60
+ // if we were given plugins and pipeline there's an error
61
+ if (plugins && pipeline) {
62
+ throw new Error(
63
+ 'A client cannot be given a pipeline and a list of plugins at the same time.'
64
+ )
65
+ }
66
+
67
+ let serverPort = globalThis.process?.env?.HOUDINI_PORT ?? '5173'
68
+
69
+ super({
70
+ config: getCurrentConfig,
71
+ url:
72
+ url ??
73
+ (globalThis.window ? '' : `https://localhost:${serverPort}`) +
74
+ localApiEndpoint(getCurrentConfig()),
75
+ plugins: flatten(
76
+ ([] as NestedList<ClientPlugin>).concat(
77
+ // if they specified a throw behavior
78
+ throwOnError ? [throwOnErrorPlugin(throwOnError)] : [],
79
+ fetchParamsPlugin(fetchParams),
80
+ // if the user wants to specify the entire pipeline, let them do so
81
+ pipeline ??
82
+ // the user doesn't have a specific pipeline so we should just add their desired plugins
83
+ // to the standard set
84
+ (
85
+ [
86
+ optimisticKeys(cache ?? cacheRef),
87
+ // make sure that documents always work
88
+ queryPlugin(cache ?? cacheRef),
89
+ mutationPlugin(cache ?? cacheRef),
90
+ fragmentPlugin(cache ?? cacheRef),
91
+ ] as NestedList<ClientPlugin>
92
+ ).concat(
93
+ // add the specified middlewares
94
+ plugins ?? [],
95
+ // and any middlewares we got from plugins
96
+ pluginsFromPlugins,
97
+ // if they provided a fetch function, use it as the body for the fetch middleware
98
+ fetchPlugin()
99
+ )
100
+ )
101
+ ),
102
+ })
103
+
104
+ // Set cache properties after super call
105
+ this._cache = cache
106
+ this._enableCache = !!cache
107
+
108
+ // Set throwOnError operations for access by stores
109
+ this.throwOnError_operations = throwOnError?.operations ?? []
110
+ }
111
+
112
+ // Override observe to properly handle cachePolicy plugin
113
+ observe<_Data extends GraphQLObject, _Input extends GraphQLVariables | undefined>({
114
+ enableCache = true,
115
+ fetching = false,
116
+ ...rest
117
+ }: ObserveParams<_Data, DocumentArtifact, _Input>): DocumentStore<_Data, _Input> {
118
+ // Create plugins with cachePolicy if cache is enabled
119
+ const plugins: ClientPlugin[] = []
120
+
121
+ // Add cachePolicy first if cache is enabled
122
+ if (this._enableCache && enableCache) {
123
+ // We need to create a placeholder for the setFetching callback
124
+ // that will be set after the store is created
125
+ let storeRef: DocumentStore<_Data, _Input> | null = null
126
+
127
+ plugins.push(
128
+ cachePolicy({
129
+ cache: this._cache,
130
+ enabled: true,
131
+ setFetching: (fetching, data) => {
132
+ if (storeRef) {
133
+ storeRef.update((state) => {
134
+ const newState = { ...state, fetching }
135
+
136
+ // when we set the fetching state to true, we should also generate the appropriate
137
+ // loading state for the document
138
+ if (fetching && data) {
139
+ newState.data = data
140
+ }
141
+
142
+ return newState
143
+ })
144
+ }
145
+ },
146
+ })
147
+ )
148
+
149
+ // Create the document store with the plugins
150
+ const clientPlugins = (this.plugins as ClientPlugin[]).filter(
151
+ (p): p is ClientPlugin => p !== null && typeof p === 'function'
152
+ )
153
+ const store = new DocumentStore<_Data, _Input>({
154
+ client: this,
155
+ plugins: createPluginHooks([...plugins, ...clientPlugins]),
156
+ fetching,
157
+ enableCache,
158
+ config: this.config,
159
+ ...rest,
160
+ })
161
+
162
+ // Set the store reference for the setFetching callback
163
+ storeRef = store
164
+
165
+ return store
166
+ } else {
167
+ // No cache, use the base implementation
168
+ const clientPlugins = (this.plugins as ClientPlugin[]).filter(
169
+ (p): p is ClientPlugin => p !== null && typeof p === 'function'
170
+ )
171
+ return new DocumentStore<_Data, _Input>({
172
+ client: this,
173
+ plugins: createPluginHooks(clientPlugins),
174
+ fetching,
175
+ enableCache,
176
+ config: this.config,
177
+ ...rest,
178
+ })
179
+ }
180
+ }
181
+ }
@@ -0,0 +1,79 @@
1
+ import type { ConfigFile } from 'houdini'
2
+
3
+ import config from './imports/config'
4
+ import pluginConfigs from './imports/pluginConfig'
5
+
6
+ let mockConfig: ConfigFile | null = null
7
+
8
+ export function getMockConfig() {
9
+ return mockConfig
10
+ }
11
+
12
+ export function setMockConfig(config: ConfigFile | null) {
13
+ mockConfig = config
14
+ }
15
+
16
+ export function defaultConfigValues(file: ConfigFile): ConfigFile {
17
+ return {
18
+ defaultKeys: ['id'],
19
+ ...file,
20
+ types: {
21
+ Node: {
22
+ keys: ['id'],
23
+ resolve: {
24
+ queryField: 'node',
25
+ arguments: (node) => ({ id: node.id }),
26
+ },
27
+ },
28
+ ...file.types,
29
+ },
30
+ }
31
+ }
32
+
33
+ export function keyFieldsForType(configFile: ConfigFile, type: string) {
34
+ const withDefault = defaultConfigValues(configFile)
35
+ return withDefault.types?.[type]?.keys || withDefault.defaultKeys!
36
+ }
37
+
38
+ export function computeID(configFile: ConfigFile, type: string, data: any): string {
39
+ const fields = keyFieldsForType(configFile, type)
40
+ let id = ''
41
+
42
+ for (const field of fields) {
43
+ id += data[field] + '__'
44
+ }
45
+
46
+ return id.slice(0, -2)
47
+ }
48
+
49
+ // only compute the config file once
50
+ let _configFile: ConfigFile | null = null
51
+
52
+ export function localApiEndpoint(configFile: ConfigFile) {
53
+ // @ts-ignore
54
+ return configFile.router?.apiEndpoint ?? '/_api'
55
+ }
56
+
57
+ export function getCurrentConfig(): ConfigFile {
58
+ const mockConfig = getMockConfig()
59
+ if (mockConfig) {
60
+ return mockConfig
61
+ }
62
+
63
+ if (_configFile) {
64
+ return _configFile
65
+ }
66
+
67
+ // we have to compute the config file. start with the default values
68
+ // iterate over every plugin config value and merge the result
69
+ let configFile = defaultConfigValues(config)
70
+ for (const pluginConfig of pluginConfigs) {
71
+ configFile = pluginConfig(configFile)
72
+ }
73
+
74
+ // save the result for later
75
+ _configFile = configFile
76
+
77
+ // we're done
78
+ return configFile
79
+ }
@@ -0,0 +1 @@
1
+ export type { CacheTypeDef } from 'houdini/runtime/types'
@@ -0,0 +1,3 @@
1
+ import type { ConfigFile } from 'houdini'
2
+
3
+ export default {} as ConfigFile
@@ -0,0 +1,5 @@
1
+ import type { ConfigFile } from 'houdini'
2
+
3
+ const configs: ((old: ConfigFile) => ConfigFile)[] = []
4
+
5
+ export default configs
@@ -0,0 +1,38 @@
1
+ import type { Cache as InternalCache } from 'houdini/runtime/cache'
2
+
3
+ import _cache from './cache'
4
+ import type { CacheTypeDef } from './generated'
5
+ import { Cache } from './public'
6
+
7
+ export { CachePolicy, PendingValue } from 'houdini/runtime'
8
+
9
+ export * from './client'
10
+ export { getCurrentConfig } from './config'
11
+
12
+ // this template tag gets removed by the preprocessor so it should never be invoked.
13
+ // this function must return any so that we can assign it a type in a variable declaration (ie an inline store)
14
+ // ideally we would be able to parse the input for values but typescript does not yet support that kind of matches in template args
15
+ export function graphql<_Payload, _Result = _Payload>(str: string): _Result
16
+ export function graphql(str: string): never {
17
+ // if we are executing this function as part of the plugin, we need to return
18
+ // the query instead of throwing an error. We don't want to bundle the graphql
19
+ // module into the runtime so all we can do is return the query string
20
+ if (globalThis?.process?.env?.HOUDINI_PLUGIN) {
21
+ // @ts-ignore: this is a totally internal/hidden value. user will never see it and we won't
22
+ // and ever get a typed value of this since it's only used in the result of a dynamic
23
+ // import from the plugin which gives Record<string, any>
24
+ return str
25
+ }
26
+
27
+ // if this is executed, the preprocessor is not enabled
28
+ throw new Error(`! graphql template was invoked at runtime. This should never happen and usually means that your project isn't properly configured.
29
+
30
+ Please make sure you have the appropriate plugin/preprocessor enabled. For more information, visit this link: https://houdinigraphql.com/guides/setting-up-your-project
31
+ `)
32
+ }
33
+
34
+ export const cache = new Cache<CacheTypeDef>(_cache)
35
+
36
+ export function getCache(): InternalCache {
37
+ return _cache
38
+ }
@@ -0,0 +1 @@
1
+ {"type":"module"}
@@ -0,0 +1,178 @@
1
+ import { Cache } from 'houdini/runtime/cache'
2
+ import type { ClientPlugin } from 'houdini/runtime/documentStore'
3
+ import { ArtifactKind, CachePolicy, DataSource, type GraphQLObject } from 'houdini/runtime/types'
4
+
5
+ import cache from '../cache'
6
+
7
+ const serverSide = typeof globalThis.window === 'undefined'
8
+
9
+ export const cachePolicy =
10
+ ({
11
+ enabled,
12
+ setFetching,
13
+ cache: localCache = cache,
14
+ serverSideFallback = true,
15
+ }: {
16
+ enabled: boolean
17
+ setFetching: (val: boolean, data?: any) => void
18
+ cache?: Cache
19
+ serverSideFallback?: boolean
20
+ }): ClientPlugin =>
21
+ () => {
22
+ return {
23
+ beforeNetwork(ctx, { initialValue, next, resolve, marshalVariables }) {
24
+ const { policy, artifact } = ctx
25
+ let useCache = false
26
+ // enforce cache policies for queries
27
+ if (
28
+ enabled &&
29
+ (artifact.kind === ArtifactKind.Query ||
30
+ artifact.kind === ArtifactKind.Fragment) &&
31
+ !ctx.cacheParams?.disableRead
32
+ ) {
33
+ // this function is called as the first step in requesting data. If the policy prefers
34
+ // cached data, we need to load data from the cache (if its available). If the policy
35
+ // prefers network data we need to send a request (the onLoad of the component will
36
+ // resolve the next data)
37
+
38
+ // if the cache policy allows for cached data, look at the cache's value first
39
+ const policyAllowsCache =
40
+ policy !== CachePolicy.NetworkOnly && policy !== CachePolicy.NoCache
41
+ if (policyAllowsCache) {
42
+ // look up the current value in the cache
43
+ const value = localCache.read({
44
+ selection: artifact.selection,
45
+ variables: marshalVariables(ctx),
46
+ fullCheck: true,
47
+ })
48
+
49
+ // we can only use the result if its not a partial result
50
+ const allowed =
51
+ !value.partial ||
52
+ // or the artifact allows for partial responses
53
+ (artifact.kind === ArtifactKind.Query && artifact.partial)
54
+
55
+ // if the policy is cacheOnly and we got this far, we need to return null (no network request will be sent)
56
+ if (policy === CachePolicy.CacheOnly) {
57
+ return resolve(ctx, {
58
+ fetching: false,
59
+ variables: ctx.variables ?? null,
60
+ data: allowed ? value.data : initialValue.data,
61
+ errors: null,
62
+ source: DataSource.Cache,
63
+ partial: allowed ? value.partial : false,
64
+ stale: value.stale,
65
+ })
66
+ }
67
+
68
+ // if we have data, use that unless it's partial data and we don't allow that
69
+ useCache = !!(value.data !== null && allowed)
70
+
71
+ if (useCache) {
72
+ resolve(ctx, {
73
+ fetching: false,
74
+ variables: ctx.variables ?? null,
75
+ data: value.data,
76
+ errors: null,
77
+ source: DataSource.Cache,
78
+ partial: value.partial,
79
+ stale: value.stale,
80
+ })
81
+ }
82
+
83
+ // if we used the cache data and there's no followup necessary, we're done
84
+ if (
85
+ useCache &&
86
+ !value.partial &&
87
+ !value.stale &&
88
+ // if the policy is CacheAndNetwork then we don't want to stop here regardless
89
+ ctx.policy !== 'CacheAndNetwork'
90
+ ) {
91
+ return
92
+ }
93
+ }
94
+ }
95
+
96
+ // we're not using the cached data which means there will be a network request
97
+ // tick the garbage collector asynchronously
98
+ if (enabled) {
99
+ setTimeout(() => {
100
+ localCache._internal_unstable.collectGarbage()
101
+ }, 0)
102
+ }
103
+
104
+ // if we got this far, we are resolving something against the network
105
+ // don't set the fetching state to true if we accepted a cache value
106
+ if (!ctx.stuff?.silenceLoading) {
107
+ // don't set the fetching state to true if we accepted a cache value
108
+ let fetchingState: GraphQLObject | null = null
109
+ if (
110
+ !useCache &&
111
+ 'enableLoadingState' in artifact &&
112
+ artifact.enableLoadingState
113
+ ) {
114
+ fetchingState = localCache.read({
115
+ selection: artifact.selection,
116
+ variables: marshalVariables(ctx),
117
+ loading: true,
118
+ }).data
119
+ }
120
+ setFetching(!useCache, fetchingState)
121
+ }
122
+ // move on
123
+ return next(ctx)
124
+ },
125
+ afterNetwork(ctx, { resolve, value, marshalVariables }) {
126
+ // if we have data coming in from the cache, we should write it and move on
127
+ if (
128
+ ctx.policy !== CachePolicy.NoCache &&
129
+ value.source !== DataSource.Cache &&
130
+ enabled &&
131
+ value.data &&
132
+ !ctx.cacheParams?.disableWrite
133
+ ) {
134
+ // if the cache params specify a fallback behavior, use that
135
+ if (ctx.cacheParams && 'serverSideFallback' in ctx.cacheParams) {
136
+ serverSideFallback =
137
+ ctx.cacheParams?.serverSideFallback ?? serverSideFallback
138
+ }
139
+
140
+ const targetCache =
141
+ serverSide && serverSideFallback
142
+ ? new Cache({ disabled: false, ...ctx.config })
143
+ : localCache
144
+
145
+ let layer
146
+ if (!serverSide && ctx.cacheParams?.layer) {
147
+ layer = ctx.cacheParams.layer.id
148
+ }
149
+
150
+ // write the result of the mutation to the cache
151
+ targetCache.write({
152
+ ...ctx.cacheParams,
153
+ layer,
154
+ selection: ctx.artifact.selection,
155
+ data: value.data,
156
+ variables: marshalVariables(ctx),
157
+ })
158
+
159
+ // we need to embed the fragment context values in our response
160
+ // and apply masking other value transforms. In order to do that,
161
+ // we're going to read back what we just wrote. This only incurs
162
+ // extra computation on the server-side since we have to write the values
163
+ // before we can read them (instead of just transforming the value directly)
164
+ value = {
165
+ ...value,
166
+ data: targetCache.read({
167
+ selection: ctx.artifact.selection,
168
+ variables: marshalVariables(ctx),
169
+ ignoreMasking: serverSide,
170
+ }).data,
171
+ }
172
+ }
173
+
174
+ // we're done. don't change the result value
175
+ resolve(ctx, value)
176
+ },
177
+ }
178
+ }