create-pulsekit 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +53 -144
  2. package/package.json +7 -12
package/dist/index.js CHANGED
@@ -69,67 +69,6 @@ function getAppDir() {
69
69
  return import_node_path.default.join(cwd, "app");
70
70
  }
71
71
 
72
- // src/prompts.ts
73
- var import_promises = __toESM(require("readline/promises"));
74
- var import_node_process = require("process");
75
- var import_node_fs2 = __toESM(require("fs"));
76
- var import_node_path2 = __toESM(require("path"));
77
- function readEnvFile() {
78
- const envPath = import_node_path2.default.join(process.cwd(), ".env.local");
79
- if (!import_node_fs2.default.existsSync(envPath)) return {};
80
- const content = import_node_fs2.default.readFileSync(envPath, "utf8");
81
- const env = {};
82
- for (const line of content.split("\n")) {
83
- const trimmed = line.trim();
84
- if (!trimmed || trimmed.startsWith("#")) continue;
85
- const eqIndex = trimmed.indexOf("=");
86
- if (eqIndex === -1) continue;
87
- env[trimmed.slice(0, eqIndex).trim()] = trimmed.slice(eqIndex + 1).trim();
88
- }
89
- return env;
90
- }
91
- async function promptForConfig() {
92
- const env = readEnvFile();
93
- const rl = import_promises.default.createInterface({ input: import_node_process.stdin, output: import_node_process.stdout });
94
- try {
95
- const detectedUrl = env["NEXT_PUBLIC_SUPABASE_URL"] || "";
96
- let supabaseUrl;
97
- if (detectedUrl) {
98
- const answer = await rl.question(` Supabase URL [${detectedUrl}]: `);
99
- supabaseUrl = answer.trim() || detectedUrl;
100
- } else {
101
- supabaseUrl = (await rl.question(" Supabase URL: ")).trim();
102
- if (!supabaseUrl) throw new Error("Supabase URL is required.");
103
- }
104
- const detectedKey = env["NEXT_PUBLIC_SUPABASE_ANON_KEY"] || env["NEXT_PUBLIC_SUPABASE_PUBLISHABLE_DEFAULT_KEY"] || "";
105
- let supabaseAnonKey;
106
- if (detectedKey) {
107
- const masked = detectedKey.slice(0, 10) + "..." + detectedKey.slice(-4);
108
- const answer = await rl.question(` Supabase anon key [${masked}]: `);
109
- supabaseAnonKey = answer.trim() || detectedKey;
110
- } else {
111
- supabaseAnonKey = (await rl.question(" Supabase anon key: ")).trim();
112
- if (!supabaseAnonKey) throw new Error("Supabase anon key is required.");
113
- }
114
- const detectedDb = env["DATABASE_URL"] || "";
115
- let databaseUrl;
116
- if (detectedDb) {
117
- const masked = detectedDb.slice(0, 15) + "..." + detectedDb.slice(-10);
118
- const answer = await rl.question(` DATABASE_URL [${masked}]: `);
119
- databaseUrl = answer.trim() || detectedDb;
120
- } else {
121
- databaseUrl = (await rl.question(" DATABASE_URL (postgres://...): ")).trim();
122
- if (!databaseUrl)
123
- throw new Error("DATABASE_URL is required for migrations.");
124
- }
125
- const siteIdAnswer = await rl.question(" Site ID [default]: ");
126
- const siteId = siteIdAnswer.trim() || "default";
127
- return { supabaseUrl, supabaseAnonKey, databaseUrl, siteId };
128
- } finally {
129
- rl.close();
130
- }
131
- }
132
-
133
72
  // src/install.ts
134
73
  var import_node_child_process = require("child_process");
135
74
  var PACKAGES = [
@@ -159,37 +98,10 @@ async function installPackages(pm) {
159
98
  }
160
99
  }
161
100
 
162
- // src/migrate.ts
163
- var import_postgres = __toESM(require("postgres"));
164
- var SQL_MAP = JSON.parse(
165
- `{"001_init_pulse.sql":"create schema if not exists analytics;\\n\\n-- Add analytics to the schemas exposed by PostgREST\\nalter role authenticator set pgrst.db_schemas = 'public, graphql_public, analytics';\\n\\n-- Schema-level access\\ngrant usage on schema analytics to anon, authenticated, service_role;\\nalter default privileges in schema analytics grant all on tables to anon, authenticated, service_role;\\n\\ncreate table if not exists analytics.pulse_events (\\n id bigserial primary key,\\n site_id text not null,\\n session_id text,\\n path text not null,\\n event_type text not null,\\n meta jsonb,\\n created_at timestamptz not null default now()\\n);\\n\\ncreate index if not exists idx_pulse_events_site_created_at\\n on analytics.pulse_events (site_id, created_at);\\n\\ncreate index if not exists idx_pulse_events_site_path_created_at\\n on analytics.pulse_events (site_id, path, created_at);\\n\\nalter table analytics.pulse_events enable row level security; \\n \\n-- Allow the anon key (API route) to insert events\\ndrop policy if exists \\"Allow anon insert on pulse_events\\" on analytics.pulse_events;\\ncreate policy \\"Allow anon insert on pulse_events\\"\\n on analytics.pulse_events\\n for insert\\n to anon\\n with check (true);\\n\\n-- Only authenticated users (dashboard) can read events\\ndrop policy if exists \\"Allow authenticated select on pulse_events\\" on analytics.pulse_events;\\ncreate policy \\"Allow authenticated select on pulse_events\\"\\n on analytics.pulse_events\\n for select\\n to authenticated\\n using (true);\\n\\ncreate table if not exists analytics.pulse_aggregates (\\n date date not null,\\n site_id text not null,\\n path text not null,\\n total_views integer not null default 0,\\n unique_visitors integer not null default 0,\\n primary key (date, site_id, path)\\n);\\n\\n-- Grant table-level access (must be after table creation)\\ngrant all on all tables in schema analytics to anon, authenticated, service_role;\\ngrant all on all sequences in schema analytics to anon, authenticated, service_role;\\n\\nalter table analytics.pulse_aggregates enable row level security;\\n\\n-- Allow reading aggregates (dashboard)\\ndrop policy if exists \\"Allow authenticated select on pulse_aggregates\\" on analytics.pulse_aggregates;\\ncreate policy \\"Allow authenticated select on pulse_aggregates\\"\\n on analytics.pulse_aggregates\\n for select\\n to authenticated\\n using (true);\\n\\ndrop policy if exists \\"Allow anon select on pulse_aggregates\\" on analytics.pulse_aggregates;\\ncreate policy \\"Allow anon select on pulse_aggregates\\"\\n on analytics.pulse_aggregates\\n for select\\n to anon\\n using (true);\\n\\n-- Reload PostgREST config and schema cache (must be last)\\nnotify pgrst, 'reload config';\\nnotify pgrst, 'reload schema';\\n","002_aggregation_function.sql":"-- Aggregation function: rolls up raw events into daily aggregates\\ncreate or replace function analytics.pulse_refresh_aggregates(days_back integer default 7)\\nreturns void\\nlanguage sql\\nsecurity definer\\nas $$\\n insert into analytics.pulse_aggregates (date, site_id, path, total_views, unique_visitors)\\n select\\n date_trunc('day', created_at)::date as date,\\n site_id,\\n path,\\n count(*) as total_views,\\n count(distinct session_id) as unique_visitors\\n from analytics.pulse_events\\n where created_at >= now() - (days_back || ' days')::interval\\n group by 1, 2, 3\\n on conflict (date, site_id, path) do update\\n set\\n total_views = excluded.total_views,\\n unique_visitors = excluded.unique_visitors;\\n$$;\\n\\n-- Allow all roles to execute the aggregation function\\n-- security definer ensures it runs with the owner's privileges regardless of caller\\ngrant execute on function analytics.pulse_refresh_aggregates(integer) to anon, authenticated, service_role;\\n","003_geo_and_timezone.sql":"-- Add geo columns to pulse_events\\nalter table analytics.pulse_events\\n add column if not exists country text,\\n add column if not exists region text,\\n add column if not exists city text,\\n add column if not exists timezone text,\\n add column if not exists latitude double precision,\\n add column if not exists longitude double precision;\\n\\n-- Timezone-aware stats: queries raw events with AT TIME ZONE\\n-- so the dashboard can display data bucketed by the viewer's local day.\\ncreate or replace function analytics.pulse_stats_by_timezone(\\n p_site_id text,\\n p_timezone text default 'UTC',\\n p_days_back integer default 7\\n)\\nreturns table (\\n date date,\\n path text,\\n total_views bigint,\\n unique_visitors bigint\\n)\\nlanguage sql\\nsecurity definer\\nstable\\nas $$\\n select\\n date_trunc('day', created_at at time zone p_timezone)::date as date,\\n path,\\n count(*) as total_views,\\n count(distinct session_id) as unique_visitors\\n from analytics.pulse_events\\n where site_id = p_site_id\\n and created_at >= now() - make_interval(days => p_days_back + 1)\\n group by 1, 2;\\n$$;\\n\\ngrant execute on function analytics.pulse_stats_by_timezone(text, text, integer)\\n to anon, authenticated, service_role;\\n\\n-- Drop first so return type can change (CREATE OR REPLACE cannot alter return columns)\\ndrop function if exists analytics.pulse_location_stats(text, integer);\\n\\n-- Location stats: visitor counts grouped by country + city, with averaged coordinates\\ncreate or replace function analytics.pulse_location_stats(\\n p_site_id text,\\n p_days_back integer default 7\\n)\\nreturns table (\\n country text,\\n city text,\\n latitude double precision,\\n longitude double precision,\\n total_views bigint,\\n unique_visitors bigint\\n)\\nlanguage sql\\nsecurity definer\\nstable\\nas $$\\n select\\n country,\\n city,\\n avg(latitude) as latitude,\\n avg(longitude) as longitude,\\n count(*) as total_views,\\n count(distinct session_id) as unique_visitors\\n from analytics.pulse_events\\n where site_id = p_site_id\\n and created_at >= now() - make_interval(days => p_days_back)\\n and country is not null\\n group by 1, 2\\n order by total_views desc;\\n$$;\\n\\ngrant execute on function analytics.pulse_location_stats(text, integer)\\n to anon, authenticated, service_role;\\n","004_web_vitals.sql":"-- 004_web_vitals.sql\\n-- Partial index + RPC for Web Vitals p75 aggregation\\n\\n-- Partial index: only covers vitals events, stays small\\nCREATE INDEX IF NOT EXISTS idx_pulse_events_vitals\\n ON analytics.pulse_events (site_id, created_at)\\n WHERE event_type = 'vitals';\\n\\n-- RPC: returns per-metric p75 for each page + site-wide (__overall__)\\nCREATE OR REPLACE FUNCTION analytics.pulse_vitals_stats(\\n p_site_id TEXT,\\n p_days_back INT DEFAULT 7\\n)\\nRETURNS TABLE (\\n path TEXT,\\n metric TEXT,\\n p75 DOUBLE PRECISION,\\n sample_count BIGINT\\n)\\nLANGUAGE sql SECURITY DEFINER STABLE\\nAS $$\\n WITH vitals_raw AS (\\n SELECT\\n e.path,\\n kv.key AS metric,\\n kv.value::double precision AS val\\n FROM analytics.pulse_events e,\\n LATERAL jsonb_each_text(e.meta) AS kv(key, value)\\n WHERE e.site_id = p_site_id\\n AND e.event_type = 'vitals'\\n AND e.created_at >= NOW() - (p_days_back || ' days')::interval\\n AND kv.key IN ('lcp', 'inp', 'cls', 'fcp', 'ttfb')\\n )\\n -- Per-page stats\\n SELECT\\n vr.path,\\n vr.metric,\\n percentile_cont(0.75) WITHIN GROUP (ORDER BY vr.val) AS p75,\\n count(*)::bigint AS sample_count\\n FROM vitals_raw vr\\n GROUP BY vr.path, vr.metric\\n\\n UNION ALL\\n\\n -- Site-wide stats\\n SELECT\\n '__overall__'::text AS path,\\n vr.metric,\\n percentile_cont(0.75) WITHIN GROUP (ORDER BY vr.val) AS p75,\\n count(*)::bigint AS sample_count\\n FROM vitals_raw vr\\n GROUP BY vr.metric;\\n$$;\\n\\nGRANT EXECUTE ON FUNCTION analytics.pulse_vitals_stats(TEXT, INT)\\n TO anon, authenticated, service_role;\\n"}`
166
- );
167
- async function runMigrations(databaseUrl) {
168
- console.log(" Running SQL migrations...\n");
169
- const sql = (0, import_postgres.default)(databaseUrl, { max: 1 });
170
- const files = Object.keys(SQL_MAP).sort();
171
- for (const file of files) {
172
- try {
173
- await sql.unsafe(SQL_MAP[file]);
174
- console.log(` Applied: ${file}`);
175
- } catch (err) {
176
- const msg = err?.message || "";
177
- if (msg.includes("already exists") || msg.includes("duplicate key")) {
178
- console.log(` Skipped (already applied): ${file}`);
179
- } else {
180
- await sql.end();
181
- throw new Error(`Migration ${file} failed: ${msg}`);
182
- }
183
- }
184
- }
185
- await sql.end();
186
- console.log("\n Migrations complete.\n");
187
- }
188
-
189
101
  // src/scaffold.ts
190
- var import_node_fs3 = __toESM(require("fs"));
191
- var import_node_path3 = __toESM(require("path"));
192
- async function scaffoldFiles(siteId) {
102
+ var import_node_fs2 = __toESM(require("fs"));
103
+ var import_node_path2 = __toESM(require("path"));
104
+ function scaffoldFiles() {
193
105
  console.log(" Scaffolding files...\n");
194
106
  const appDir = getAppDir();
195
107
  const pulseRoute = `import { createPulseHandler } from "@pulsekit/next";
@@ -197,14 +109,14 @@ import { createClient } from "@supabase/supabase-js";
197
109
 
198
110
  const supabase = createClient(
199
111
  process.env.NEXT_PUBLIC_SUPABASE_URL!,
200
- process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!
112
+ process.env.NEXT_PUBLIC_SUPABASE_PUBLISHABLE_KEY!
201
113
  );
202
114
 
203
115
  export const POST = createPulseHandler({
204
116
  supabase,
205
117
  config: {
206
118
  allowLocalhost: true,
207
- siteId: ${JSON.stringify(siteId)},
119
+ siteId: "default",
208
120
  },
209
121
  });
210
122
  `;
@@ -213,7 +125,7 @@ import { createClient } from "@supabase/supabase-js";
213
125
 
214
126
  const supabase = createClient(
215
127
  process.env.NEXT_PUBLIC_SUPABASE_URL!,
216
- process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!
128
+ process.env.NEXT_PUBLIC_SUPABASE_PUBLISHABLE_KEY!
217
129
  );
218
130
 
219
131
  export const POST = createRefreshHandler({ supabase });
@@ -224,7 +136,7 @@ import { getPulseTimezone } from "@pulsekit/next";
224
136
 
225
137
  const supabase = createClient(
226
138
  process.env.NEXT_PUBLIC_SUPABASE_URL!,
227
- process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!
139
+ process.env.NEXT_PUBLIC_SUPABASE_PUBLISHABLE_KEY!
228
140
  );
229
141
 
230
142
  export default async function AnalyticsPage() {
@@ -233,7 +145,7 @@ export default async function AnalyticsPage() {
233
145
  return (
234
146
  <PulseDashboard
235
147
  supabase={supabase}
236
- siteId={${JSON.stringify(siteId)}}
148
+ siteId="default"
237
149
  timeframe="7d"
238
150
  timezone={timezone}
239
151
  />
@@ -246,32 +158,32 @@ export default async function AnalyticsPage() {
246
158
  { rel: "admin/analytics/page.tsx", content: dashboardPage }
247
159
  ];
248
160
  for (const { rel, content } of files) {
249
- const fullPath = import_node_path3.default.join(appDir, rel);
250
- import_node_fs3.default.mkdirSync(import_node_path3.default.dirname(fullPath), { recursive: true });
251
- if (import_node_fs3.default.existsSync(fullPath)) {
161
+ const fullPath = import_node_path2.default.join(appDir, rel);
162
+ import_node_fs2.default.mkdirSync(import_node_path2.default.dirname(fullPath), { recursive: true });
163
+ if (import_node_fs2.default.existsSync(fullPath)) {
252
164
  console.log(` Skipped (already exists): ${rel}`);
253
165
  continue;
254
166
  }
255
- import_node_fs3.default.writeFileSync(fullPath, content, "utf8");
167
+ import_node_fs2.default.writeFileSync(fullPath, content, "utf8");
256
168
  console.log(` Created: ${rel}`);
257
169
  }
258
170
  console.log("");
259
171
  }
260
172
 
261
173
  // src/inject.ts
262
- var import_node_fs4 = __toESM(require("fs"));
263
- var import_node_path4 = __toESM(require("path"));
174
+ var import_node_fs3 = __toESM(require("fs"));
175
+ var import_node_path3 = __toESM(require("path"));
264
176
  async function injectPulseTracker() {
265
177
  console.log(" Injecting PulseTracker into layout...\n");
266
178
  const appDir = getAppDir();
267
179
  const candidates = [
268
- import_node_path4.default.join(appDir, "layout.tsx"),
269
- import_node_path4.default.join(appDir, "layout.jsx"),
270
- import_node_path4.default.join(appDir, "layout.js")
180
+ import_node_path3.default.join(appDir, "layout.tsx"),
181
+ import_node_path3.default.join(appDir, "layout.jsx"),
182
+ import_node_path3.default.join(appDir, "layout.js")
271
183
  ];
272
184
  let foundPath = null;
273
185
  for (const candidate of candidates) {
274
- if (import_node_fs4.default.existsSync(candidate)) {
186
+ if (import_node_fs3.default.existsSync(candidate)) {
275
187
  foundPath = candidate;
276
188
  break;
277
189
  }
@@ -280,7 +192,7 @@ async function injectPulseTracker() {
280
192
  printManualInstructions();
281
193
  return;
282
194
  }
283
- let content = import_node_fs4.default.readFileSync(foundPath, "utf8");
195
+ let content = import_node_fs3.default.readFileSync(foundPath, "utf8");
284
196
  if (content.includes("PulseTracker")) {
285
197
  console.log(" PulseTracker already present in layout. Skipping.\n");
286
198
  return;
@@ -307,9 +219,9 @@ async function injectPulseTracker() {
307
219
  const trackerJsx = `${indent} <PulseTracker excludePaths={["/admin/analytics"]} />
308
220
  `;
309
221
  content = content.slice(0, bodyCloseIndex) + trackerJsx + content.slice(bodyCloseIndex);
310
- import_node_fs4.default.writeFileSync(foundPath, content, "utf8");
222
+ import_node_fs3.default.writeFileSync(foundPath, content, "utf8");
311
223
  console.log(
312
- ` Modified: ${import_node_path4.default.relative(process.cwd(), foundPath)}
224
+ ` Modified: ${import_node_path3.default.relative(process.cwd(), foundPath)}
313
225
  `
314
226
  );
315
227
  }
@@ -325,34 +237,29 @@ function printManualInstructions() {
325
237
  );
326
238
  }
327
239
 
328
- // src/env.ts
329
- var import_node_fs5 = __toESM(require("fs"));
330
- var import_node_path5 = __toESM(require("path"));
331
- function writeEnvVars(config) {
332
- console.log(" Updating .env.local...\n");
333
- const envPath = import_node_path5.default.join(process.cwd(), ".env.local");
334
- let existing = "";
335
- if (import_node_fs5.default.existsSync(envPath)) {
336
- existing = import_node_fs5.default.readFileSync(envPath, "utf8");
337
- }
338
- const lines = [];
339
- if (!existing.includes("NEXT_PUBLIC_SUPABASE_URL")) {
340
- lines.push(`NEXT_PUBLIC_SUPABASE_URL=${config.supabaseUrl}`);
341
- }
342
- if (!existing.includes("NEXT_PUBLIC_SUPABASE_ANON_KEY") && !existing.includes("NEXT_PUBLIC_SUPABASE_PUBLISHABLE_DEFAULT_KEY")) {
343
- lines.push(`NEXT_PUBLIC_SUPABASE_ANON_KEY=${config.supabaseAnonKey}`);
344
- }
345
- if (!existing.includes("DATABASE_URL")) {
346
- lines.push(`DATABASE_URL=${config.databaseUrl}`);
347
- }
348
- if (lines.length === 0) {
349
- console.log(" .env.local already has all required variables.\n");
240
+ // src/migration.ts
241
+ var import_node_fs4 = __toESM(require("fs"));
242
+ var import_node_path4 = __toESM(require("path"));
243
+ var SQL_MAP = JSON.parse(
244
+ `{"001_init_pulse.sql":"create schema if not exists analytics;\\n\\n-- Add analytics to the schemas exposed by PostgREST\\nalter role authenticator set pgrst.db_schemas = 'public, graphql_public, analytics';\\n\\n-- Schema-level access\\ngrant usage on schema analytics to anon, authenticated, service_role;\\nalter default privileges in schema analytics grant all on tables to anon, authenticated, service_role;\\n\\ncreate table if not exists analytics.pulse_events (\\n id bigserial primary key,\\n site_id text not null,\\n session_id text,\\n path text not null,\\n event_type text not null,\\n meta jsonb,\\n created_at timestamptz not null default now()\\n);\\n\\ncreate index if not exists idx_pulse_events_site_created_at\\n on analytics.pulse_events (site_id, created_at);\\n\\ncreate index if not exists idx_pulse_events_site_path_created_at\\n on analytics.pulse_events (site_id, path, created_at);\\n\\nalter table analytics.pulse_events enable row level security; \\n \\n-- Allow the anon key (API route) to insert events\\ndrop policy if exists \\"Allow anon insert on pulse_events\\" on analytics.pulse_events;\\ncreate policy \\"Allow anon insert on pulse_events\\"\\n on analytics.pulse_events\\n for insert\\n to anon\\n with check (true);\\n\\n-- Only authenticated users (dashboard) can read events\\ndrop policy if exists \\"Allow authenticated select on pulse_events\\" on analytics.pulse_events;\\ncreate policy \\"Allow authenticated select on pulse_events\\"\\n on analytics.pulse_events\\n for select\\n to authenticated\\n using (true);\\n\\ncreate table if not exists analytics.pulse_aggregates (\\n date date not null,\\n site_id text not null,\\n path text not null,\\n total_views integer not null default 0,\\n unique_visitors integer not null default 0,\\n primary key (date, site_id, path)\\n);\\n\\n-- Grant table-level access (must be after table creation)\\ngrant all on all tables in schema analytics to anon, authenticated, service_role;\\ngrant all on all sequences in schema analytics to anon, authenticated, service_role;\\n\\nalter table analytics.pulse_aggregates enable row level security;\\n\\n-- Allow reading aggregates (dashboard)\\ndrop policy if exists \\"Allow authenticated select on pulse_aggregates\\" on analytics.pulse_aggregates;\\ncreate policy \\"Allow authenticated select on pulse_aggregates\\"\\n on analytics.pulse_aggregates\\n for select\\n to authenticated\\n using (true);\\n\\ndrop policy if exists \\"Allow anon select on pulse_aggregates\\" on analytics.pulse_aggregates;\\ncreate policy \\"Allow anon select on pulse_aggregates\\"\\n on analytics.pulse_aggregates\\n for select\\n to anon\\n using (true);\\n\\n-- Reload PostgREST config and schema cache (must be last)\\nnotify pgrst, 'reload config';\\nnotify pgrst, 'reload schema';\\n","002_aggregation_function.sql":"-- Aggregation function: rolls up raw events into daily aggregates\\ncreate or replace function analytics.pulse_refresh_aggregates(days_back integer default 7)\\nreturns void\\nlanguage sql\\nsecurity definer\\nas $$\\n insert into analytics.pulse_aggregates (date, site_id, path, total_views, unique_visitors)\\n select\\n date_trunc('day', created_at)::date as date,\\n site_id,\\n path,\\n count(*) as total_views,\\n count(distinct session_id) as unique_visitors\\n from analytics.pulse_events\\n where created_at >= now() - (days_back || ' days')::interval\\n group by 1, 2, 3\\n on conflict (date, site_id, path) do update\\n set\\n total_views = excluded.total_views,\\n unique_visitors = excluded.unique_visitors;\\n$$;\\n\\n-- Allow all roles to execute the aggregation function\\n-- security definer ensures it runs with the owner's privileges regardless of caller\\ngrant execute on function analytics.pulse_refresh_aggregates(integer) to anon, authenticated, service_role;\\n","003_geo_and_timezone.sql":"-- Add geo columns to pulse_events\\nalter table analytics.pulse_events\\n add column if not exists country text,\\n add column if not exists region text,\\n add column if not exists city text,\\n add column if not exists timezone text,\\n add column if not exists latitude double precision,\\n add column if not exists longitude double precision;\\n\\n-- Timezone-aware stats: queries raw events with AT TIME ZONE\\n-- so the dashboard can display data bucketed by the viewer's local day.\\ncreate or replace function analytics.pulse_stats_by_timezone(\\n p_site_id text,\\n p_timezone text default 'UTC',\\n p_days_back integer default 7\\n)\\nreturns table (\\n date date,\\n path text,\\n total_views bigint,\\n unique_visitors bigint\\n)\\nlanguage sql\\nsecurity definer\\nstable\\nas $$\\n select\\n date_trunc('day', created_at at time zone p_timezone)::date as date,\\n path,\\n count(*) as total_views,\\n count(distinct session_id) as unique_visitors\\n from analytics.pulse_events\\n where site_id = p_site_id\\n and created_at >= now() - make_interval(days => p_days_back + 1)\\n group by 1, 2;\\n$$;\\n\\ngrant execute on function analytics.pulse_stats_by_timezone(text, text, integer)\\n to anon, authenticated, service_role;\\n\\n-- Drop first so return type can change (CREATE OR REPLACE cannot alter return columns)\\ndrop function if exists analytics.pulse_location_stats(text, integer);\\n\\n-- Location stats: visitor counts grouped by country + city, with averaged coordinates\\ncreate or replace function analytics.pulse_location_stats(\\n p_site_id text,\\n p_days_back integer default 7\\n)\\nreturns table (\\n country text,\\n city text,\\n latitude double precision,\\n longitude double precision,\\n total_views bigint,\\n unique_visitors bigint\\n)\\nlanguage sql\\nsecurity definer\\nstable\\nas $$\\n select\\n country,\\n city,\\n avg(latitude) as latitude,\\n avg(longitude) as longitude,\\n count(*) as total_views,\\n count(distinct session_id) as unique_visitors\\n from analytics.pulse_events\\n where site_id = p_site_id\\n and created_at >= now() - make_interval(days => p_days_back)\\n and country is not null\\n group by 1, 2\\n order by total_views desc;\\n$$;\\n\\ngrant execute on function analytics.pulse_location_stats(text, integer)\\n to anon, authenticated, service_role;\\n","004_web_vitals.sql":"-- 004_web_vitals.sql\\n-- Partial index + RPC for Web Vitals p75 aggregation\\n\\n-- Partial index: only covers vitals events, stays small\\nCREATE INDEX IF NOT EXISTS idx_pulse_events_vitals\\n ON analytics.pulse_events (site_id, created_at)\\n WHERE event_type = 'vitals';\\n\\n-- RPC: returns per-metric p75 for each page + site-wide (__overall__)\\nCREATE OR REPLACE FUNCTION analytics.pulse_vitals_stats(\\n p_site_id TEXT,\\n p_days_back INT DEFAULT 7\\n)\\nRETURNS TABLE (\\n path TEXT,\\n metric TEXT,\\n p75 DOUBLE PRECISION,\\n sample_count BIGINT\\n)\\nLANGUAGE sql SECURITY DEFINER STABLE\\nAS $$\\n WITH vitals_raw AS (\\n SELECT\\n e.path,\\n kv.key AS metric,\\n kv.value::double precision AS val\\n FROM analytics.pulse_events e,\\n LATERAL jsonb_each_text(e.meta) AS kv(key, value)\\n WHERE e.site_id = p_site_id\\n AND e.event_type = 'vitals'\\n AND e.created_at >= NOW() - (p_days_back || ' days')::interval\\n AND kv.key IN ('lcp', 'inp', 'cls', 'fcp', 'ttfb')\\n )\\n -- Per-page stats\\n SELECT\\n vr.path,\\n vr.metric,\\n percentile_cont(0.75) WITHIN GROUP (ORDER BY vr.val) AS p75,\\n count(*)::bigint AS sample_count\\n FROM vitals_raw vr\\n GROUP BY vr.path, vr.metric\\n\\n UNION ALL\\n\\n -- Site-wide stats\\n SELECT\\n '__overall__'::text AS path,\\n vr.metric,\\n percentile_cont(0.75) WITHIN GROUP (ORDER BY vr.val) AS p75,\\n count(*)::bigint AS sample_count\\n FROM vitals_raw vr\\n GROUP BY vr.metric;\\n$$;\\n\\nGRANT EXECUTE ON FUNCTION analytics.pulse_vitals_stats(TEXT, INT)\\n TO anon, authenticated, service_role;\\n"}`
245
+ );
246
+ function writeMigration() {
247
+ console.log(" Writing database migration...\n");
248
+ const supabaseDir = import_node_path4.default.join(process.cwd(), "supabase", "migrations");
249
+ import_node_fs4.default.mkdirSync(supabaseDir, { recursive: true });
250
+ const files = Object.keys(SQL_MAP).sort();
251
+ const combined = files.map((file) => `-- ${file}
252
+ ${SQL_MAP[file]}`).join("\n\n");
253
+ const filename = "20250101000000_pulse_analytics.sql";
254
+ const fullPath = import_node_path4.default.join(supabaseDir, filename);
255
+ if (import_node_fs4.default.existsSync(fullPath)) {
256
+ console.log(` Skipped (already exists): supabase/migrations/${filename}
257
+ `);
350
258
  return;
351
259
  }
352
- const separator = existing.endsWith("\n") || existing === "" ? "" : "\n";
353
- const addition = separator + "\n# PulseKit Analytics\n" + lines.join("\n") + "\n";
354
- import_node_fs5.default.appendFileSync(envPath, addition, "utf8");
355
- console.log(" Updated .env.local with PulseKit variables.\n");
260
+ import_node_fs4.default.writeFileSync(fullPath, combined, "utf8");
261
+ console.log(` Created: supabase/migrations/${filename}
262
+ `);
356
263
  }
357
264
 
358
265
  // src/index.ts
@@ -363,17 +270,19 @@ async function main() {
363
270
  console.log(` Detected package manager: ${pm}
364
271
  `);
365
272
  validateNextJsProject();
366
- const config = await promptForConfig();
367
- writeEnvVars(config);
368
273
  await installPackages(pm);
369
- await runMigrations(config.databaseUrl);
370
- await scaffoldFiles(config.siteId);
274
+ scaffoldFiles();
371
275
  await injectPulseTracker();
372
- console.log("\n Done! PulseKit analytics is ready.\n");
373
- console.log(" Next steps:");
374
- console.log(" 1. Start your dev server");
375
- console.log(" 2. Visit any page to generate pageview events");
376
- console.log(" 3. Go to /admin/analytics to see your dashboard");
276
+ writeMigration();
277
+ console.log("\n Done! PulseKit has been added to your project.\n");
278
+ console.log(" To finish setup:");
279
+ console.log(" 1. Add your Supabase credentials to .env.local:");
280
+ console.log(" NEXT_PUBLIC_SUPABASE_URL=<your-supabase-url>");
281
+ console.log(" NEXT_PUBLIC_SUPABASE_PUBLISHABLE_KEY=<your-anon-key>");
282
+ console.log(" 2. Run the database migration:");
283
+ console.log(" npx supabase link");
284
+ console.log(" npx supabase db push");
285
+ console.log(" 3. Start your dev server and visit /admin/analytics");
377
286
  }
378
287
  main().catch((err) => {
379
288
  console.error("\n Error:", err.message || err);
package/package.json CHANGED
@@ -1,21 +1,16 @@
1
1
  {
2
2
  "name": "create-pulsekit",
3
- "version": "0.0.1",
3
+ "version": "0.0.2",
4
4
  "description": "Set up PulseKit analytics in your Next.js project",
5
5
  "bin": "./dist/index.js",
6
- "files": [
7
- "dist"
8
- ],
9
- "dependencies": {
10
- "postgres": "^3.4.0"
11
- },
12
- "devDependencies": {
13
- "tsup": "^8.0.0",
14
- "typescript": "^5.7.0"
15
- },
6
+ "files": ["dist"],
16
7
  "scripts": {
17
8
  "prebuild": "cp ../core/sql/*.sql src/sql/",
18
9
  "build": "tsup",
19
10
  "clean": "rm -rf dist"
11
+ },
12
+ "devDependencies": {
13
+ "tsup": "^8.0.0",
14
+ "typescript": "^5.7.0"
20
15
  }
21
- }
16
+ }