@project-ajax/create 0.0.27 → 0.0.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -94,14 +94,12 @@ function printNextSteps(directoryName) {
94
94
  if (directoryName === ".") {
95
95
  console.log(`
96
96
  ${chalk.bold("npm install")}
97
- ${chalk.bold("npx workers auth login")}
98
97
  ${chalk.bold("npx workers deploy")}
99
98
  `);
100
99
  } else {
101
100
  console.log(`
102
101
  ${chalk.bold(`cd ${directoryName}`)}
103
102
  ${chalk.bold("npm install")}
104
- ${chalk.bold("npx workers auth login")}
105
103
  ${chalk.bold("npx workers deploy")}
106
104
  `);
107
105
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@project-ajax/create",
3
- "version": "0.0.27",
3
+ "version": "0.0.30",
4
4
  "description": "Initialize a new Notion Project Ajax extensions repo.",
5
5
  "bin": {
6
6
  "create-ajax": "dist/index.js"
@@ -54,7 +54,7 @@ worker.sync("googleCalendarSync", {
54
54
  // Use token to fetch from Google Calendar API
55
55
  console.log("Using Google token:", `${token.slice(0, 10)}...`);
56
56
 
57
- return { objects: [], done: true };
57
+ return { changes: [], hasMore: false };
58
58
  },
59
59
  });
60
60
 
@@ -5,6 +5,41 @@ import * as Schema from "@project-ajax/sdk/schema";
5
5
  const worker = new Worker();
6
6
  export default worker;
7
7
 
8
+ const projectId = "project-1";
9
+ const projectName = "Example Project";
10
+
11
+ worker.sync("projectsSync", {
12
+ // Which field to use in each object as the primary key. Must be unique.
13
+ primaryKeyProperty: "Project ID",
14
+ // The schema of the collection to create in Notion.
15
+ schema: {
16
+ // Name of the collection to create in Notion.
17
+ defaultName: "Projects",
18
+ properties: {
19
+ // See `Schema` for the full list of possible column types.
20
+ "Project Name": Schema.title(),
21
+ "Project ID": Schema.richText(),
22
+ },
23
+ },
24
+ execute: async () => {
25
+ // Fetch and return data
26
+ return {
27
+ changes: [
28
+ // Each change must match the shape of `properties` above.
29
+ {
30
+ type: "upsert" as const,
31
+ key: projectId,
32
+ properties: {
33
+ "Project Name": Builder.title(projectName),
34
+ "Project ID": Builder.richText(projectId),
35
+ },
36
+ },
37
+ ],
38
+ hasMore: false,
39
+ };
40
+ },
41
+ });
42
+
8
43
  worker.sync("mySync", {
9
44
  // Which field to use in each object as the primary key. Must be unique.
10
45
  primaryKeyProperty: "ID",
@@ -16,22 +51,25 @@ worker.sync("mySync", {
16
51
  // See `Schema` for the full list of possible column types.
17
52
  Title: Schema.title(),
18
53
  ID: Schema.richText(),
54
+ Project: Schema.relation("projectsSync"),
19
55
  },
20
56
  },
21
57
  execute: async () => {
22
58
  // Fetch and return data
23
59
  return {
24
- objects: [
25
- // Each object must match the shape of `properties` above.
60
+ changes: [
61
+ // Each change must match the shape of `properties` above.
26
62
  {
63
+ type: "upsert" as const,
27
64
  key: "1",
28
65
  properties: {
29
66
  Title: Builder.title("Item 1"),
30
67
  ID: Builder.richText("1"),
68
+ Project: [Builder.relation(projectId)],
31
69
  },
32
70
  },
33
71
  ],
34
- done: true,
72
+ hasMore: false,
35
73
  };
36
74
  },
37
75
  });
@@ -20,18 +20,10 @@ export default worker;
20
20
  worker.sync("tasksSync", {
21
21
  primaryKeyProperty: "ID",
22
22
  schema: { defaultName: "Tasks", properties: { Name: Schema.title(), ID: Schema.richText() } },
23
- execute: async (context?: { cursor?: string }) => {
24
- const pageSize = 100;
25
- const { items, nextCursor } = await fetchItems({ cursor: context?.cursor, limit: pageSize });
26
- return {
27
- objects: items.map((item) => ({
28
- key: item.id,
29
- properties: { Name: Builder.title(item.name), ID: Builder.richText(item.id) },
30
- })),
31
- done: !nextCursor,
32
- nextContext: nextCursor ? { cursor: nextCursor } : undefined,
33
- };
34
- },
23
+ execute: async () => ({
24
+ changes: [{ type: "upsert", key: "1", properties: { Name: Builder.title("Write docs"), ID: Builder.richText("1") } }],
25
+ hasMore: false,
26
+ }),
35
27
  });
36
28
 
37
29
  worker.tool("sayHello", {
@@ -52,17 +44,30 @@ worker.oauth("googleAuth", { name: "my-google-auth", provider: "google" });
52
44
 
53
45
  - For user-managed OAuth, supply `name`, `authorizationEndpoint`, `tokenEndpoint`, `clientId`, `clientSecret`, and `scope` (optional: `authorizationParams`, `callbackUrl`, `accessTokenExpireMs`).
54
46
 
55
- ### Sync Pagination
47
+ ### Sync
48
+ #### Strategy and Pagination
49
+
50
+ Syncs run in a "sync cycle": a back-to-back chain of `execute` calls that starts at a scheduled trigger and ends when an execution returns `hasMore: false`.
56
51
 
57
- Implement pagination in syncs to avoid exceeding maximum output size limits. Returning too many objects in a single execution can cause the output JSON to exceed size limits and fail.
52
+ - Always use pagination, when available. Returning too many changes in one execution will fail. Start with batch sizes of ~100 changes.
53
+ - `mode=replace` is simpler, and fine for smaller syncs (<10k)
54
+ - Use `mode=incremental` when the sync could return a lot of data (>10k), eg for SaaS tools like Salesforce or Stripe
55
+ - When using `mode=incremental`, emit delete markers as needed if easy to do (below)
56
+
57
+ **Sync strategy (`mode`):**
58
+ - `replace`: each sync cycle must return the full dataset. After the final `hasMore: false`, any records not seen during that cycle are deleted.
59
+ - `incremental`: each sync cycle returns a subset of the full dataset (usually the changes since the last run). Deletions must be explicit via `{ type: "delete", key: "..." }`. Records not mentioned are left unchanged.
58
60
 
59
61
  **How pagination works:**
60
- 1. Return a batch of objects with `done: false` and a `nextContext` value
62
+ 1. Return a batch of changes with `hasMore: true` and a `nextContext` value
61
63
  2. The runtime calls `execute` again with that context
62
- 3. Continue until you return `done: true`
64
+ 3. Continue until you return `hasMore: false`
65
+
66
+ **Example replace sync:**
63
67
 
64
68
  ```ts
65
69
  worker.sync("paginatedSync", {
70
+ mode: "replace",
66
71
  primaryKeyProperty: "ID",
67
72
  schema: { defaultName: "Records", properties: { Name: Schema.title(), ID: Schema.richText() } },
68
73
  execute: async (context?: { page: number }) => {
@@ -70,11 +75,12 @@ worker.sync("paginatedSync", {
70
75
  const pageSize = 100;
71
76
  const { items, hasMore } = await fetchPage(page, pageSize);
72
77
  return {
73
- objects: items.map((item) => ({
78
+ changes: items.map((item) => ({
79
+ type: "upsert",
74
80
  key: item.id,
75
81
  properties: { Name: Builder.title(item.name), ID: Builder.richText(item.id) },
76
82
  })),
77
- done: !hasMore,
83
+ hasMore,
78
84
  nextContext: hasMore ? { page: page + 1 } : undefined,
79
85
  };
80
86
  },
@@ -83,6 +89,72 @@ worker.sync("paginatedSync", {
83
89
 
84
90
  **Context types:** The `nextContext` can be any serializable value—a cursor string, page number, timestamp, or complex object. Type your execute function's context parameter to match.
85
91
 
92
+ **Incremental example (changes only, with deletes):**
93
+ ```ts
94
+ worker.sync("incrementalSync", {
95
+ primaryKeyProperty: "ID",
96
+ mode: "incremental",
97
+ schema: { defaultName: "Records", properties: { Name: Schema.title(), ID: Schema.richText() } },
98
+ execute: async (context?: { cursor?: string }) => {
99
+ const { upserts, deletes, nextCursor } = await fetchChanges(context?.cursor);
100
+ return {
101
+ changes: [
102
+ ...upserts.map((item) => ({
103
+ type: "upsert",
104
+ key: item.id,
105
+ properties: { Name: Builder.title(item.name), ID: Builder.richText(item.id) },
106
+ })),
107
+ ...deletes.map((id) => ({ type: "delete", key: id })),
108
+ ],
109
+ hasMore: Boolean(nextCursor),
110
+ nextContext: nextCursor ? { cursor: nextCursor } : undefined,
111
+ };
112
+ },
113
+ });
114
+ ```
115
+
116
+ #### Relations
117
+
118
+ Two syncs can relate to one another using `Schema.relation(relatedSyncKey)` and `Builder.relation(primaryKey)` entries inside an array.
119
+
120
+ ```ts
121
+ worker.sync("projectsSync", {
122
+ primaryKeyProperty: "Project ID",
123
+ ...
124
+ });
125
+
126
+ // Example sync worker that syncs sample tasks to a database
127
+ worker.sync("tasksSync", {
128
+ primaryKeyProperty: "Task ID",
129
+ ...
130
+ schema: {
131
+ ...
132
+ properties: {
133
+ ...
134
+ Project: Schema.relation("projectsSync"),
135
+ },
136
+ },
137
+
138
+ execute: async () => {
139
+ // Return sample tasks as database entries
140
+ const tasks = fetchTasks()
141
+ const changes = tasks.map((task) => ({
142
+ type: "upsert" as const,
143
+ key: task.id,
144
+ properties: {
145
+ ...
146
+ Project: [Builder.relation(task.projectId)],
147
+ },
148
+ }));
149
+
150
+ return {
151
+ changes,
152
+ hasMore: false,
153
+ };
154
+ },
155
+ });
156
+ ```
157
+
86
158
  ## Build, Test, and Development Commands
87
159
  - Node >= 22 and npm >= 10.9.2 (see `package.json` engines).
88
160
  - `npm run dev`: run `src/index.ts` with live reload.
@@ -22,10 +22,9 @@ npm install
22
22
  Connect to a Notion workspace and deploy the sample worker:
23
23
 
24
24
  ```shell
25
- npx workers auth login
26
- # or target a specific environment:
27
- npx workers auth login --env=dev
28
25
  npx workers deploy
26
+ # or target a specific environment:
27
+ npx workers deploy --env=dev
29
28
  ```
30
29
 
31
30
  Run the sample sync to create a database:
@@ -56,6 +55,8 @@ export default worker;
56
55
 
57
56
  Syncs create or update a Notion database from your source data.
58
57
 
58
+ The most basic sync returns all data that should be copied to the Notion database on each run:
59
+
59
60
  ```ts
60
61
  import * as Builder from "@project-ajax/sdk/builder";
61
62
  import * as Schema from "@project-ajax/sdk/schema";
@@ -73,8 +74,9 @@ worker.sync("tasksSync", {
73
74
  },
74
75
  },
75
76
  execute: async () => ({
76
- objects: [
77
+ changes: [
77
78
  {
79
+ type: "upsert",
78
80
  key: "1",
79
81
  properties: {
80
82
  Name: Builder.title("Write docs"),
@@ -82,11 +84,92 @@ worker.sync("tasksSync", {
82
84
  },
83
85
  },
84
86
  ],
85
- done: true,
87
+ hasMore: false,
86
88
  }),
87
89
  });
88
90
  ```
89
91
 
92
+ Notion will delete stale rows after each run. A stale row is a row that was in the database but that your function did not return.
93
+
94
+ #### Write a sync that paginates
95
+
96
+ When your sync is pulling in many rows of data (>1k), you'll want to use pagination. Breaking down pages to ~100 is a good starting point.
97
+
98
+ You can use **context** to persist things like pagination tokens between `execute` runs. Notion passes `context` as the first argument to `execute`. Return `nextContext` to set the `context` for the next run:
99
+
100
+ ```ts
101
+ worker.sync("fullSync", {
102
+ primaryKeyProperty: "ID",
103
+ mode: "replace",
104
+ schema: { defaultName: "Records", properties: { Name: Schema.title(), ID: Schema.richText() } },
105
+ execute: async (context?: { page: number }) => {
106
+ const { items , nextCursor } = await fetchPage(context?.page);
107
+ return {
108
+ changes: items.map((item) => ({
109
+ type: "upsert",
110
+ key: item.id,
111
+ properties: { Name: Builder.title(item.name), ID: Builder.richText(item.id) },
112
+ })),
113
+ hasMore: nextCursor ? true : false,
114
+ nextCursor
115
+ };
116
+ },
117
+ });
118
+ ```
119
+
120
+ Return `hasMore=false` for each run until you reach the end. On the last run, return `hasMore=true`. At the start of the next cycle, Notion will start anew and call `execute` with `context=null`.
121
+
122
+ #### Write a sync that syncs incrementally
123
+
124
+ When your sync is working with a lot of data (10k+), you'll want to use the `incremental` sync mode. With incremental syncs, you can for example backfill all the data from an API into Notion, and then sync only incremental updates from that point forward.
125
+
126
+ Set the sync's `mode` to `incremental` and use pagination as above:
127
+
128
+ ```ts
129
+ worker.sync("incrementalSync", {
130
+ primaryKeyProperty: "ID",
131
+ mode: "incremental",
132
+ schema: { defaultName: "Records", properties: { Name: Schema.title(), ID: Schema.richText() } },
133
+ execute: async (context?: { cursor?: string }) => {
134
+ const { upserts, deletes, nextCursor } = await fetchChanges(context?.cursor);
135
+ return {
136
+ changes: [
137
+ ...upserts.map((item) => ({
138
+ type: "upsert",
139
+ key: item.id,
140
+ properties: { Name: Builder.title(item.name), ID: Builder.richText(item.id) },
141
+ })),
142
+ ...deletes.map((id) => ({ type: "delete", key: id })),
143
+ ],
144
+ hasMore: Boolean(nextCursor),
145
+ nextContext: nextCursor ? { cursor: nextCursor } : undefined,
146
+ };
147
+ },
148
+ });
149
+ ```
150
+
151
+ Unlike the `replace` sync mode, Notion will not drop "stale" rows and `context` will persist between sync cycles.
152
+
153
+ **Deletes**
154
+
155
+ With incremental syncs, you can delete rows by returning a delete marker, like so:
156
+
157
+ ```ts
158
+ changes: [
159
+ // this is an upsert
160
+ {
161
+ type: "upsert",
162
+ key: item.id,
163
+ properties: { Name: Builder.title(item.name), ID: Builder.richText(item.id) },
164
+ },
165
+ // this is a delete
166
+ {
167
+ type: "delete",
168
+ key: item.id
169
+ }
170
+ ]
171
+ ```
172
+
90
173
  ### Tool
91
174
 
92
175
  Tools are callable by Notion custom agents.
@@ -155,6 +238,9 @@ Log in to Notion (use `--env=dev` for dev):
155
238
  npx workers auth login --env=dev
156
239
  ```
157
240
 
241
+ Login is automatically handled by `npx workers deploy`, so this command is
242
+ typically not needed.
243
+
158
244
  ### `npx workers auth show`
159
245
  Show the active auth token:
160
246
 
@@ -5,6 +5,9 @@ import * as Schema from "@project-ajax/sdk/schema";
5
5
  const worker = new Worker();
6
6
  export default worker;
7
7
 
8
+ const projectId = "project-123";
9
+ const projectName = "Project 1";
10
+
8
11
  // Sample data for demonstration
9
12
  const sampleTasks = [
10
13
  {
@@ -12,21 +15,52 @@ const sampleTasks = [
12
15
  title: "Welcome to Project Ajax",
13
16
  status: "Completed",
14
17
  description: "This is a simple hello world example",
18
+ projectId,
15
19
  },
16
20
  {
17
21
  id: "task-2",
18
22
  title: "Build your first worker",
19
23
  status: "In Progress",
20
24
  description: "Create a sync or tool worker",
25
+ projectId,
21
26
  },
22
27
  {
23
28
  id: "task-3",
24
29
  title: "Deploy to production",
25
30
  status: "Todo",
26
31
  description: "Share your worker with your team",
32
+ projectId,
27
33
  },
28
34
  ];
29
35
 
36
+ worker.sync("projectsSync", {
37
+ primaryKeyProperty: "Project ID",
38
+ schema: {
39
+ defaultName: "Projects",
40
+ databaseIcon: Builder.notionIcon("activity"),
41
+ properties: {
42
+ "Project Name": Schema.title(),
43
+ "Project ID": Schema.richText(),
44
+ },
45
+ },
46
+ execute: async () => {
47
+ return {
48
+ changes: [
49
+ {
50
+ type: "upsert" as const,
51
+ key: projectId,
52
+ icon: Builder.notionIcon("activity"),
53
+ properties: {
54
+ "Project Name": Builder.title(projectName),
55
+ "Project ID": Builder.richText(projectId),
56
+ },
57
+ },
58
+ ],
59
+ hasMore: false,
60
+ };
61
+ },
62
+ });
63
+
30
64
  // Example sync worker that syncs sample tasks to a database
31
65
  worker.sync("tasksSync", {
32
66
  primaryKeyProperty: "Task ID",
@@ -35,9 +69,13 @@ worker.sync("tasksSync", {
35
69
  // Use intervals like "30m", "1h", "1d" (min: 1m, max: 7d)
36
70
  schedule: "continuous",
37
71
 
38
- // Optional: Set to true to delete pages that are not returned from sync executions.
39
- // By default (false), sync only creates and updates pages, never deletes them.
40
- // deleteUnreturnedPages: true,
72
+ // Sync mode:
73
+ // - "replace": Each sync cycle returns the complete dataset. After hasMore:false,
74
+ // pages not seen in this sync run are deleted.
75
+ // - "incremental": Each sync cycle returns a subset of the complete dataset. Use delete markers
76
+ // (e.g., { type: "delete", key: "task-1" }) to remove pages.
77
+ // Defaults to "replace".
78
+ // mode: "replace",
41
79
 
42
80
  schema: {
43
81
  defaultName: "Sample Tasks",
@@ -51,6 +89,7 @@ worker.sync("tasksSync", {
51
89
  { name: "In Progress", color: "blue" },
52
90
  { name: "Todo", color: "default" },
53
91
  ]),
92
+ Project: Schema.relation("projectsSync"),
54
93
  },
55
94
  },
56
95
 
@@ -68,7 +107,8 @@ worker.sync("tasksSync", {
68
107
  }
69
108
  };
70
109
  // Return sample tasks as database entries
71
- const objects = sampleTasks.map((task) => ({
110
+ const changes = sampleTasks.map((task) => ({
111
+ type: "upsert" as const,
72
112
  key: task.id,
73
113
  icon: emojiForStatus(task.status),
74
114
  properties: {
@@ -76,13 +116,19 @@ worker.sync("tasksSync", {
76
116
  "Task ID": Builder.richText(task.id),
77
117
  Description: Builder.richText(task.description),
78
118
  Status: Builder.select(task.status),
119
+ Project: [Builder.relation(projectId)],
79
120
  },
80
121
  pageContentMarkdown: `## ${task.title}\n\n${task.description}`,
81
122
  }));
82
123
 
83
124
  return {
84
- objects,
85
- done: true,
125
+ // List of changes to apply to the Notion database.
126
+ changes,
127
+ // Indicates whether there is more data to fetch this sync cycle. If true, the runtime will call `execute` again with the nextContext.
128
+ hasMore: false,
129
+ // Optional context data Notion will pass back in the next execution.
130
+ // This can be any type of data (cursor, page number, timestamp, etc.).
131
+ nextContext: undefined,
86
132
  };
87
133
  },
88
134
  });