@tinacms/graphql 1.5.16 → 1.5.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -3019,7 +3019,7 @@ var validateField = async (field) => {
3019
3019
  // package.json
3020
3020
  var package_default = {
3021
3021
  name: "@tinacms/graphql",
3022
- version: "1.5.16",
3022
+ version: "1.5.18",
3023
3023
  main: "dist/index.js",
3024
3024
  module: "dist/index.mjs",
3025
3025
  typings: "dist/index.d.ts",
@@ -3045,7 +3045,6 @@ var package_default = {
3045
3045
  types: "pnpm tsc",
3046
3046
  build: "tinacms-scripts build",
3047
3047
  docs: "pnpm typedoc",
3048
- serve: "pnpm nodemon dist/server.js",
3049
3048
  test: "vitest run",
3050
3049
  "test-watch": "vitest"
3051
3050
  },
@@ -3100,7 +3099,6 @@ var package_default = {
3100
3099
  "@types/yup": "^0.29.14",
3101
3100
  "jest-file-snapshot": "^0.5.0",
3102
3101
  "memory-level": "^1.0.0",
3103
- nodemon: "3.1.4",
3104
3102
  typescript: "^5.7.3",
3105
3103
  vite: "^4.5.9",
3106
3104
  vitest: "^0.32.4",
@@ -6986,13 +6984,14 @@ var Database = class {
6986
6984
  documentPaths,
6987
6985
  async (collection, documentPaths2) => {
6988
6986
  if (collection && !collection.isDetached) {
6989
- await _indexContent(
6990
- this,
6991
- this.contentLevel,
6992
- documentPaths2,
6987
+ await _indexContent({
6988
+ database: this,
6989
+ level: this.contentLevel,
6990
+ documentPaths: documentPaths2,
6993
6991
  enqueueOps,
6994
- collection
6995
- );
6992
+ collection,
6993
+ isPartialReindex: true
6994
+ });
6996
6995
  }
6997
6996
  }
6998
6997
  );
@@ -7099,26 +7098,26 @@ var Database = class {
7099
7098
  );
7100
7099
  const doc = await level2.keys({ limit: 1 }).next();
7101
7100
  if (!doc) {
7102
- await _indexContent(
7103
- this,
7104
- level2,
7105
- contentPaths,
7101
+ await _indexContent({
7102
+ database: this,
7103
+ level: level2,
7104
+ documentPaths: contentPaths,
7106
7105
  enqueueOps,
7107
7106
  collection,
7108
- userFields.map((field) => [
7107
+ passwordFields: userFields.map((field) => [
7109
7108
  ...field.path,
7110
7109
  field.passwordFieldName
7111
7110
  ])
7112
- );
7111
+ });
7113
7112
  }
7114
7113
  } else {
7115
- await _indexContent(
7116
- this,
7114
+ await _indexContent({
7115
+ database: this,
7117
7116
  level,
7118
- contentPaths,
7117
+ documentPaths: contentPaths,
7119
7118
  enqueueOps,
7120
7119
  collection
7121
- );
7120
+ });
7122
7121
  }
7123
7122
  }
7124
7123
  );
@@ -7257,7 +7256,15 @@ var hashPasswordValues = async (data, passwordFields) => Promise.all(
7257
7256
  )
7258
7257
  );
7259
7258
  var isGitKeep = (filepath, collection) => filepath.endsWith(`.gitkeep.${collection?.format || "md"}`);
7260
- var _indexContent = async (database, level, documentPaths, enqueueOps, collection, passwordFields) => {
7259
+ var _indexContent = async ({
7260
+ database,
7261
+ level,
7262
+ documentPaths,
7263
+ enqueueOps,
7264
+ collection,
7265
+ passwordFields,
7266
+ isPartialReindex
7267
+ }) => {
7261
7268
  let collectionIndexDefinitions;
7262
7269
  let collectionPath;
7263
7270
  if (collection) {
@@ -7298,40 +7305,42 @@ var _indexContent = async (database, level, documentPaths, enqueueOps, collectio
7298
7305
  normalizedPath,
7299
7306
  collectionPath || ""
7300
7307
  );
7301
- const item = await rootSublevel.get(normalizedPath);
7302
- if (item) {
7303
- await database.contentLevel.batch([
7304
- ...makeRefOpsForDocument(
7305
- normalizedPath,
7306
- collection?.name,
7307
- collectionReferences,
7308
- item,
7309
- "del",
7310
- level
7311
- ),
7312
- ...makeIndexOpsForDocument(
7313
- normalizedPath,
7314
- collection.name,
7315
- collectionIndexDefinitions,
7316
- item,
7317
- "del",
7318
- level
7319
- ),
7320
- // folder indices
7321
- ...makeIndexOpsForDocument(
7322
- normalizedPath,
7323
- `${collection.name}_${folderKey}`,
7324
- collectionIndexDefinitions,
7325
- item,
7326
- "del",
7327
- level
7328
- ),
7329
- {
7330
- type: "del",
7331
- key: normalizedPath,
7332
- sublevel: rootSublevel
7333
- }
7334
- ]);
7308
+ if (isPartialReindex) {
7309
+ const item = await rootSublevel.get(normalizedPath);
7310
+ if (item) {
7311
+ await database.contentLevel.batch([
7312
+ ...makeRefOpsForDocument(
7313
+ normalizedPath,
7314
+ collection?.name,
7315
+ collectionReferences,
7316
+ item,
7317
+ "del",
7318
+ level
7319
+ ),
7320
+ ...makeIndexOpsForDocument(
7321
+ normalizedPath,
7322
+ collection.name,
7323
+ collectionIndexDefinitions,
7324
+ item,
7325
+ "del",
7326
+ level
7327
+ ),
7328
+ // folder indices
7329
+ ...makeIndexOpsForDocument(
7330
+ normalizedPath,
7331
+ `${collection.name}_${folderKey}`,
7332
+ collectionIndexDefinitions,
7333
+ item,
7334
+ "del",
7335
+ level
7336
+ ),
7337
+ {
7338
+ type: "del",
7339
+ key: normalizedPath,
7340
+ sublevel: rootSublevel
7341
+ }
7342
+ ]);
7343
+ }
7335
7344
  }
7336
7345
  if (!isGitKeep(filepath, collection)) {
7337
7346
  await enqueueOps([
@@ -7567,8 +7576,8 @@ import path6 from "path";
7567
7576
  import normalize from "normalize-path";
7568
7577
  var FilesystemBridge = class {
7569
7578
  constructor(rootPath, outputPath) {
7570
- this.rootPath = rootPath || "";
7571
- this.outputPath = outputPath || rootPath;
7579
+ this.rootPath = path6.resolve(rootPath);
7580
+ this.outputPath = outputPath ? path6.resolve(outputPath) : this.rootPath;
7572
7581
  }
7573
7582
  async glob(pattern, extension) {
7574
7583
  const basePath = path6.join(this.outputPath, ...pattern.split("/"));
@@ -7580,19 +7589,19 @@ var FilesystemBridge = class {
7580
7589
  }
7581
7590
  );
7582
7591
  const posixRootPath = normalize(this.outputPath);
7583
- return items.map((item) => {
7584
- return item.replace(posixRootPath, "").replace(/^\/|\/$/g, "");
7585
- });
7592
+ return items.map(
7593
+ (item) => item.substring(posixRootPath.length).replace(/^\/|\/$/g, "")
7594
+ );
7586
7595
  }
7587
7596
  async delete(filepath) {
7588
7597
  await fs2.remove(path6.join(this.outputPath, filepath));
7589
7598
  }
7590
7599
  async get(filepath) {
7591
- return fs2.readFileSync(path6.join(this.outputPath, filepath)).toString();
7600
+ return (await fs2.readFile(path6.join(this.outputPath, filepath))).toString();
7592
7601
  }
7593
7602
  async put(filepath, data, basePathOverride) {
7594
7603
  const basePath = basePathOverride || this.outputPath;
7595
- await fs2.outputFileSync(path6.join(basePath, filepath), data);
7604
+ await fs2.outputFile(path6.join(basePath, filepath), data);
7596
7605
  }
7597
7606
  };
7598
7607
  var AuditFileSystemBridge = class extends FilesystemBridge {
@@ -74,10 +74,10 @@ export declare class Resolver {
74
74
  name: string;
75
75
  }[];
76
76
  }[];
77
- format?: "json" | "md" | "markdown" | "mdx" | "yaml" | "yml" | "toml";
77
+ format?: import("@tinacms/schema-tools").ContentFormat;
78
78
  ui?: import("@tinacms/schema-tools").UICollection;
79
79
  defaultItem?: import("@tinacms/schema-tools").DefaultItem<Record<string, any>>;
80
- frontmatterFormat?: "yaml" | "toml" | "json";
80
+ frontmatterFormat?: import("@tinacms/schema-tools").ContentFrontmatterFormat;
81
81
  frontmatterDelimiters?: [string, string] | string;
82
82
  match?: {
83
83
  include?: string;
@@ -102,10 +102,10 @@ export declare class Resolver {
102
102
  name: string;
103
103
  }[];
104
104
  }[];
105
- format?: "json" | "md" | "markdown" | "mdx" | "yaml" | "yml" | "toml";
105
+ format?: import("@tinacms/schema-tools").ContentFormat;
106
106
  ui?: import("@tinacms/schema-tools").UICollection;
107
107
  defaultItem?: import("@tinacms/schema-tools").DefaultItem<Record<string, any>>;
108
- frontmatterFormat?: "yaml" | "toml" | "json";
108
+ frontmatterFormat?: import("@tinacms/schema-tools").ContentFrontmatterFormat;
109
109
  frontmatterDelimiters?: [string, string] | string;
110
110
  match?: {
111
111
  include?: string;
@@ -1,6 +1,3 @@
1
- /**
2
-
3
- */
4
1
  import { TinaSchema, Schema } from '@tinacms/schema-tools';
5
2
  export declare const createSchema: ({ schema, flags, }: {
6
3
  schema: Schema;
@@ -1,6 +1,3 @@
1
- /**
2
-
3
- */
4
1
  import { type Schema, type Collection } from '@tinacms/schema-tools';
5
2
  export declare const validateSchema: (schema: Schema) => Promise<{
6
3
  collections: Collection<true>[];
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tinacms/graphql",
3
- "version": "1.5.16",
3
+ "version": "1.5.18",
4
4
  "main": "dist/index.js",
5
5
  "module": "dist/index.mjs",
6
6
  "typings": "dist/index.d.ts",
@@ -44,8 +44,8 @@
44
44
  "readable-stream": "^4.7.0",
45
45
  "scmp": "^2.1.0",
46
46
  "yup": "^0.32.11",
47
- "@tinacms/mdx": "1.6.2",
48
- "@tinacms/schema-tools": "1.7.3"
47
+ "@tinacms/mdx": "1.6.3",
48
+ "@tinacms/schema-tools": "1.7.4"
49
49
  },
50
50
  "publishConfig": {
51
51
  "registry": "https://registry.npmjs.org"
@@ -71,19 +71,17 @@
71
71
  "@types/yup": "^0.29.14",
72
72
  "jest-file-snapshot": "^0.5.0",
73
73
  "memory-level": "^1.0.0",
74
- "nodemon": "3.1.4",
75
74
  "typescript": "^5.7.3",
76
75
  "vite": "^4.5.9",
77
76
  "vitest": "^0.32.4",
78
77
  "zod": "^3.24.2",
79
- "@tinacms/schema-tools": "1.7.3",
80
- "@tinacms/scripts": "1.3.4"
78
+ "@tinacms/schema-tools": "1.7.4",
79
+ "@tinacms/scripts": "1.3.5"
81
80
  },
82
81
  "scripts": {
83
82
  "types": "pnpm tsc",
84
83
  "build": "tinacms-scripts build",
85
84
  "docs": "pnpm typedoc",
86
- "serve": "pnpm nodemon dist/server.js",
87
85
  "test": "vitest run",
88
86
  "test-watch": "vitest"
89
87
  }
package/readme.md DELETED
@@ -1,194 +0,0 @@
1
- ## Getting started
2
-
3
- There's a serve and watch command, which are separate for now:
4
-
5
- ```sh
6
- // terminal 1
7
- cd packages/tina-graphql
8
- yarn watch
9
-
10
- // terminal 2
11
- cd packages/tina-graphql
12
- yarn serve
13
- ```
14
-
15
- You can consume this from the `graphiql` app:
16
-
17
- ```sh
18
- // terminal 3
19
- cd apps/graphiql
20
- yarn start
21
- ```
22
-
23
- Note that this app doesn't use anything from the `client` package right now, it's just an interactive tool to see how things move from the graphql server into tina. That process has been improved in this package but will need to be merged back into the `client` package before this is usable.
24
-
25
- ### Running queries
26
-
27
- By default the app will redirect to `project1` and display the default query generated from the `graphql-helpers` library - which consumes the fixtures from the `project1` folder the the `gql` package, any number of fixtures can be used if you want to add your own, just ensure the `server.ts` file knows about them.
28
-
29
- When you run the initial query, you should see the result along with the Tina sidebar toggle, this indicates that the Tina form has now been populated with the query values. If you change some values around and hit submit, the `onSubmit` function will populate the GraphiQL editor instead of sending it off to the server, you can play around with the mutation before sending it off if you'd like.
30
-
31
- ### Tests
32
-
33
- The most valuable test right now is the `builder.spec.ts`, it's sort of an integration of all the field-level builders. There are also field-level build tests, but not resolvers ones just yet. If you're making changes to the builder just run `yarn test-watch` and hit `p` to provide a pattern, then type "builder", this will isolate that test and if it's passing you probably didn't break anything.
34
-
35
- ## Architecture
36
-
37
- ### Builder
38
-
39
- The builder service is responsible for building out the entire GraphQL schema for a given `.tina` config. This service can run at any time (but needs to be re-run on each schema change) and it's output is a GraphQL schema which can be stored in the schema definition language (SDL) as a string in a database record or as a `.graphql` file. At the top of the schema is a `document` query, this query returns the document, which can be one of any number of templates defined in the `.tina` config. From there, each field in the given template is used to build out the rest of the schema, so each template field is built by the `type` in it's definition
40
-
41
- #### Field-level builders
42
-
43
- Field-level builders take a field definition and produce 4 different GraphQL types:
44
-
45
- ##### `field`
46
-
47
- Builds the type which fits into Tina's field definition shape:
48
-
49
- Given:
50
-
51
- ```yaml
52
- name: Title
53
- label: title
54
- type: text
55
- ```
56
-
57
- ```js
58
- text.build.field({ cache, field });
59
- ```
60
-
61
- Produces
62
-
63
- ```graphql
64
- type TextField {
65
- name: String
66
- label: String
67
- component: String
68
- description: String
69
- }
70
- ```
71
-
72
- ##### `initialValue`
73
-
74
- Tina fields need an initial value when editing existing data. This builder is responsible for providing the shape of that value.
75
-
76
- For most fields this is the same value as `value` - but if you picture the schema as a "graph" - you can see how the "value" of a document reference (ie. a Post has an Author) is not helpful to Tina. Tina only cares about the stored document value of the reference (in this case `/path/to/author.md`) so it's the `initialValue`'s role to provide what makes sense to Tina, regardless of the schema's relationships.
77
-
78
- ##### `value`
79
-
80
- The value of the field, it's the role of this function to provide the shape of the data we should expect for a fully resolved graph.
81
-
82
- For `block` fields, this looks like an array of different shapes, which means it's the `blocks.build.value` function's responsibility to return a `union` array.
83
-
84
- ##### `input`
85
-
86
- When a mutation is made, the shape of this mutation needs to fit the shape create by this function.
87
-
88
- ### Resolvers
89
-
90
- `resolvers` can be thought of as the runtime siblings to `builders`. While it's the job of builders to define the "graph", the resolvers are responsible for taking raw values (like those from a `.md` file) and shaping them so they fit the schema.
91
-
92
- #### Field-level resolvers
93
-
94
- Again, similar to field-level builders, most of the work for resolving the data is passed on to the appropriate field to handle. So if you have a document like so:
95
-
96
- ```md
97
- ---
98
- title: Hello, World!
99
- author: /authors/homer.md
100
- ---
101
- ```
102
-
103
- It's template definition might look like:
104
-
105
- ```yaml
106
- label: Post
107
- ---
108
- fields:
109
- - name: title
110
- label: Title
111
- type: text
112
- - name: author
113
- label: Author
114
- type: select
115
- config:
116
- source:
117
- type: pages
118
- section: authors
119
- ```
120
-
121
- The `text.resolver` object will be responsible for resolving the values related to `title`:
122
-
123
- ##### `field`
124
-
125
- The `field` resolver provides the appropriate values for it's `field` builder counterpart. In the example above the `text.resolve.field` function would return:
126
-
127
- ```json
128
- {
129
- "name": "title",
130
- "label": "Title",
131
- "component": "text"
132
- }
133
- ```
134
-
135
- This would then be passed on to Tina for rendering on the client.
136
-
137
- ##### `initialValue`
138
-
139
- In the example above the `text.resolve.initialValue` would return "Hello, World!"
140
-
141
- For blocks we need to return the object along with a `_template` key, this is used downstream to disambiguate which template the value comes from.
142
-
143
- ##### `value`
144
-
145
- In the example above the `text.resolve.value` would return "Hello, World!", and again, for document references this would return the entire document being referenced, which may or may not be used depending on the graph fields requested
146
-
147
- ##### `input`
148
-
149
- Input resolvers don't do much (except in the case of blocks described later), since the GraphQL mutataion payload has all the necessary information, we just pass the value into these resolvers as a runtime type-check. In the future, this is where field-level validations can take place.
150
-
151
- **Caveats with `blocks`**: `blocks` values are an array of unlike objects, meaning in order to enforce type-safe requests coming into the server, we need to use a somewhat awkward pattern ([read more about the trade-offs here](https://github.com/graphql/graphql-spec/blob/master/rfcs/InputUnion.md#-5-one-of-tagged-union)) which we sort of need to rearrange once it hits the server.
152
-
153
- ## Architecture Diagram
154
-
155
- <iframe style="border:none" width="700" height="350" src="https://whimsical.com/embed/Kh28ULaAYKPRpeCLm3VG63@2Ux7TurymMtzhxz2sLxX"></iframe>
156
-
157
- ## Caveats
158
-
159
- ### Why do we use `GraphQLUnion` instead of `GraphQLInterface` for fields?
160
-
161
- Since `component`, `label`, & `name` are common across all fields, we'd only use a fragment to gather what's unique to that field type, so field definitions using an interface would allow our queries to look like this:
162
-
163
- ```graphql
164
- fields {
165
- component
166
- label
167
- name
168
- ...on SelectField {
169
- options
170
- }
171
- }
172
- ```
173
-
174
- Instead, we use a union - which requires us to load each key inside it's respective fragment:
175
-
176
- ```graphql
177
- fields {
178
- ... on TextareaField {
179
- name
180
- label
181
- component
182
- }
183
- ... on SelectField {
184
- name
185
- label
186
- component
187
- options
188
- }
189
- }
190
- ```
191
-
192
- A GraphQL interface allows you to define a heterogeneous set of types, which have some fields in common. This is a textbook usecase for interfaces, and it's something that could change in the future. But the current reason we're using unions is because unions are exhaustive, and they allow us to scope down the possible field types for a given set of fields.
193
-
194
- An interface would be too broad for our needs, a collection of fields should only contain the types which are possible for that given template config. So while an `interface` would allow us to present **all** possible field types, a `union` gives us the ability to scope down the field list to only allow what the template defines. Using `unions` forces us to be explicit about that in a way that's clear (note: it may be possible to do this with interfaces but there would end up being an interface for each collection of possible fields - making the `interface` term somewhat misleading). Using unions also allows our auto-querybuilder to know that they have populated all possible types of a field, something that seems like it might be more difficult with interfaces.