markdown-notes-engine 1.0.2 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +554 -119
- package/lib/backend/db/connection.js +127 -0
- package/lib/backend/db/schema.sql +144 -0
- package/lib/backend/github.js +2 -4
- package/lib/backend/index.js +69 -28
- package/lib/backend/markdown.js +4 -6
- package/lib/backend/routes/notes.js +35 -4
- package/lib/backend/routes/search.js +2 -2
- package/lib/backend/routes/upload.js +33 -6
- package/lib/backend/storage.js +2 -4
- package/lib/backend/version-control.js +458 -0
- package/lib/frontend/index.js +3 -6
- package/lib/index.js +5 -16
- package/package.json +20 -30
- package/lib/backend/github.mjs +0 -316
- package/lib/backend/index.mjs +0 -74
- package/lib/backend/markdown.mjs +0 -60
- package/lib/backend/routes/notes.mjs +0 -197
- package/lib/backend/routes/search.mjs +0 -28
- package/lib/backend/routes/upload.mjs +0 -122
- package/lib/backend/storage.mjs +0 -119
- package/lib/frontend/index.mjs +0 -15
- package/lib/index.mjs +0 -17
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database Connection Manager
|
|
3
|
+
* Handles PostgreSQL connection pooling using the postgres package
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import postgres from 'postgres';
|
|
7
|
+
import { readFileSync } from 'fs';
|
|
8
|
+
|
|
9
|
+
export class DatabaseConnection {
|
|
10
|
+
constructor(config) {
|
|
11
|
+
// Support both connection string and individual parameters
|
|
12
|
+
let connectionString;
|
|
13
|
+
|
|
14
|
+
if (config.connectionString) {
|
|
15
|
+
// Use provided connection string
|
|
16
|
+
connectionString = config.connectionString;
|
|
17
|
+
} else {
|
|
18
|
+
// Build connection string from individual parameters
|
|
19
|
+
const user = config.user;
|
|
20
|
+
const password = config.password;
|
|
21
|
+
const host = config.host || 'localhost';
|
|
22
|
+
const port = config.port || 5432;
|
|
23
|
+
const database = config.database;
|
|
24
|
+
|
|
25
|
+
connectionString = `postgres://${user}:${password}@${host}:${port}/${database}`;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// postgres package options
|
|
29
|
+
const options = {
|
|
30
|
+
max: config.maxConnections || 20,
|
|
31
|
+
idle_timeout: config.idleTimeout || 30,
|
|
32
|
+
connect_timeout: config.connectionTimeout || 10,
|
|
33
|
+
fetch_types: false,
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
// SSL configuration
|
|
37
|
+
if (config.ssl !== undefined) {
|
|
38
|
+
if (typeof config.ssl === 'boolean') {
|
|
39
|
+
options.ssl = config.ssl ? 'prefer' : false;
|
|
40
|
+
} else {
|
|
41
|
+
options.ssl = config.ssl;
|
|
42
|
+
}
|
|
43
|
+
} else if (config.connectionString || (config.host && config.host !== 'localhost' && !config.host.startsWith('127.'))) {
|
|
44
|
+
// Auto-enable SSL for remote connections (use 'prefer' for better compatibility)
|
|
45
|
+
options.ssl = 'prefer';
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Create postgres connection
|
|
49
|
+
this.sql = postgres(connectionString, options);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Execute a query using tagged template
|
|
54
|
+
* This wrapper provides compatibility with the old API
|
|
55
|
+
* @param {string} text - SQL query text
|
|
56
|
+
* @param {Array} [params] - Query parameters
|
|
57
|
+
* @returns {Promise<Object>} Query result with rows property
|
|
58
|
+
*/
|
|
59
|
+
async query(text, params = []) {
|
|
60
|
+
// Convert parameterized query to postgres format
|
|
61
|
+
// Replace $1, $2, etc. with actual values
|
|
62
|
+
const result = await this.sql.unsafe(text, params);
|
|
63
|
+
|
|
64
|
+
// Return in pg-compatible format
|
|
65
|
+
return { rows: result };
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Get a client for transactions
|
|
70
|
+
* @returns {Promise<Object>} Transaction client
|
|
71
|
+
*/
|
|
72
|
+
async getClient() {
|
|
73
|
+
// Reserve a connection from the pool for transaction safety
|
|
74
|
+
const reserved = await this.sql.reserve();
|
|
75
|
+
|
|
76
|
+
return {
|
|
77
|
+
query: async (text, params = []) => {
|
|
78
|
+
const result = await reserved.unsafe(text, params);
|
|
79
|
+
return { rows: result };
|
|
80
|
+
},
|
|
81
|
+
release: () => {
|
|
82
|
+
// Release the reserved connection back to the pool
|
|
83
|
+
reserved.release();
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Initialize database schema
|
|
90
|
+
* @param {string} schemaPath - Path to schema SQL file
|
|
91
|
+
* @returns {Promise<void>}
|
|
92
|
+
*/
|
|
93
|
+
async initializeSchema(schemaPath) {
|
|
94
|
+
const schema = readFileSync(schemaPath, 'utf-8');
|
|
95
|
+
await this.query(schema);
|
|
96
|
+
|
|
97
|
+
// Initialize default branch if it doesn't exist
|
|
98
|
+
const result = await this.query(
|
|
99
|
+
'SELECT name FROM branches WHERE name = $1',
|
|
100
|
+
['main']
|
|
101
|
+
);
|
|
102
|
+
|
|
103
|
+
if (result.rows.length === 0) {
|
|
104
|
+
// Create initial commit
|
|
105
|
+
const commitResult = await this.query(
|
|
106
|
+
`INSERT INTO commits (message, author)
|
|
107
|
+
VALUES ($1, $2)
|
|
108
|
+
RETURNING id`,
|
|
109
|
+
['Initial commit', 'system']
|
|
110
|
+
);
|
|
111
|
+
|
|
112
|
+
// Create main branch pointing to initial commit
|
|
113
|
+
await this.query(
|
|
114
|
+
'INSERT INTO branches (name, commit_id) VALUES ($1, $2)',
|
|
115
|
+
['main', commitResult.rows[0].id]
|
|
116
|
+
);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
/**
|
|
121
|
+
* Close all connections
|
|
122
|
+
* @returns {Promise<void>}
|
|
123
|
+
*/
|
|
124
|
+
async close() {
|
|
125
|
+
await this.sql.end();
|
|
126
|
+
}
|
|
127
|
+
}
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
-- Git-like Version Control Schema for PostgreSQL
|
|
2
|
+
|
|
3
|
+
-- Content-addressable blob storage
|
|
4
|
+
-- Each unique file content is stored once, identified by its SHA-256 hash
|
|
5
|
+
CREATE TABLE IF NOT EXISTS blobs (
|
|
6
|
+
hash TEXT PRIMARY KEY,
|
|
7
|
+
content TEXT NOT NULL,
|
|
8
|
+
size INTEGER NOT NULL,
|
|
9
|
+
created_at TIMESTAMP DEFAULT NOW()
|
|
10
|
+
);
|
|
11
|
+
|
|
12
|
+
CREATE INDEX IF NOT EXISTS idx_blobs_created_at ON blobs(created_at);
|
|
13
|
+
|
|
14
|
+
-- Commits represent snapshots in time
|
|
15
|
+
-- Similar to Git commits, they have a parent and point to a tree
|
|
16
|
+
CREATE TABLE IF NOT EXISTS commits (
|
|
17
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
18
|
+
parent_id UUID REFERENCES commits(id),
|
|
19
|
+
message TEXT NOT NULL,
|
|
20
|
+
author TEXT NOT NULL,
|
|
21
|
+
created_at TIMESTAMP DEFAULT NOW()
|
|
22
|
+
);
|
|
23
|
+
|
|
24
|
+
CREATE INDEX IF NOT EXISTS idx_commits_parent_id ON commits(parent_id);
|
|
25
|
+
CREATE INDEX IF NOT EXISTS idx_commits_created_at ON commits(created_at);
|
|
26
|
+
|
|
27
|
+
-- Trees map file paths to blob hashes for each commit
|
|
28
|
+
-- This allows us to reconstruct the file system state at any commit
|
|
29
|
+
CREATE TABLE IF NOT EXISTS trees (
|
|
30
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
31
|
+
commit_id UUID NOT NULL REFERENCES commits(id) ON DELETE CASCADE,
|
|
32
|
+
path TEXT NOT NULL,
|
|
33
|
+
blob_hash TEXT NOT NULL REFERENCES blobs(hash),
|
|
34
|
+
created_at TIMESTAMP DEFAULT NOW(),
|
|
35
|
+
UNIQUE(commit_id, path)
|
|
36
|
+
);
|
|
37
|
+
|
|
38
|
+
CREATE INDEX IF NOT EXISTS idx_trees_commit_id ON trees(commit_id);
|
|
39
|
+
CREATE INDEX IF NOT EXISTS idx_trees_path ON trees(path);
|
|
40
|
+
CREATE INDEX IF NOT EXISTS idx_trees_blob_hash ON trees(blob_hash);
|
|
41
|
+
|
|
42
|
+
-- Branches are named pointers to commits
|
|
43
|
+
-- Similar to Git branches, they move as new commits are made
|
|
44
|
+
CREATE TABLE IF NOT EXISTS branches (
|
|
45
|
+
name TEXT PRIMARY KEY,
|
|
46
|
+
commit_id UUID NOT NULL REFERENCES commits(id),
|
|
47
|
+
created_at TIMESTAMP DEFAULT NOW(),
|
|
48
|
+
updated_at TIMESTAMP DEFAULT NOW()
|
|
49
|
+
);
|
|
50
|
+
|
|
51
|
+
CREATE INDEX IF NOT EXISTS idx_branches_commit_id ON branches(commit_id);
|
|
52
|
+
|
|
53
|
+
-- Media references (for tracking S3/R2 uploaded files)
|
|
54
|
+
-- Links media files to markdown content for cleanup and tracking
|
|
55
|
+
CREATE TABLE IF NOT EXISTS media (
|
|
56
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
57
|
+
url TEXT NOT NULL UNIQUE,
|
|
58
|
+
type TEXT NOT NULL CHECK (type IN ('image', 'video')),
|
|
59
|
+
s3_key TEXT NOT NULL,
|
|
60
|
+
blob_hash TEXT REFERENCES blobs(hash),
|
|
61
|
+
created_at TIMESTAMP DEFAULT NOW(),
|
|
62
|
+
updated_at TIMESTAMP DEFAULT NOW()
|
|
63
|
+
);
|
|
64
|
+
|
|
65
|
+
CREATE INDEX IF NOT EXISTS idx_media_blob_hash ON media(blob_hash);
|
|
66
|
+
CREATE INDEX IF NOT EXISTS idx_media_type ON media(type);
|
|
67
|
+
|
|
68
|
+
-- Full-text search index for markdown content
|
|
69
|
+
-- Enables fast search across all note content
|
|
70
|
+
CREATE TABLE IF NOT EXISTS search_index (
|
|
71
|
+
blob_hash TEXT PRIMARY KEY REFERENCES blobs(hash) ON DELETE CASCADE,
|
|
72
|
+
content_tsvector TSVECTOR NOT NULL,
|
|
73
|
+
created_at TIMESTAMP DEFAULT NOW(),
|
|
74
|
+
updated_at TIMESTAMP DEFAULT NOW()
|
|
75
|
+
);
|
|
76
|
+
|
|
77
|
+
CREATE INDEX IF NOT EXISTS idx_search_content ON search_index USING GIN(content_tsvector);
|
|
78
|
+
|
|
79
|
+
-- Migration: Add timestamp columns to existing tables
|
|
80
|
+
-- These statements handle adding columns to tables that already exist
|
|
81
|
+
|
|
82
|
+
-- Add created_at to trees if it doesn't exist
|
|
83
|
+
DO $$
|
|
84
|
+
BEGIN
|
|
85
|
+
IF NOT EXISTS (
|
|
86
|
+
SELECT 1 FROM information_schema.columns
|
|
87
|
+
WHERE table_name = 'trees' AND column_name = 'created_at'
|
|
88
|
+
) THEN
|
|
89
|
+
ALTER TABLE trees ADD COLUMN created_at TIMESTAMP DEFAULT NOW();
|
|
90
|
+
UPDATE trees SET created_at = NOW() WHERE created_at IS NULL;
|
|
91
|
+
CREATE INDEX IF NOT EXISTS idx_trees_created_at ON trees(created_at);
|
|
92
|
+
END IF;
|
|
93
|
+
END $$;
|
|
94
|
+
|
|
95
|
+
-- Add created_at to branches if it doesn't exist
|
|
96
|
+
DO $$
|
|
97
|
+
BEGIN
|
|
98
|
+
IF NOT EXISTS (
|
|
99
|
+
SELECT 1 FROM information_schema.columns
|
|
100
|
+
WHERE table_name = 'branches' AND column_name = 'created_at'
|
|
101
|
+
) THEN
|
|
102
|
+
ALTER TABLE branches ADD COLUMN created_at TIMESTAMP DEFAULT NOW();
|
|
103
|
+
UPDATE branches SET created_at = NOW() WHERE created_at IS NULL;
|
|
104
|
+
END IF;
|
|
105
|
+
END $$;
|
|
106
|
+
|
|
107
|
+
-- Create index on branches.updated_at (this column should exist in new or old schemas)
|
|
108
|
+
CREATE INDEX IF NOT EXISTS idx_branches_updated_at ON branches(updated_at);
|
|
109
|
+
|
|
110
|
+
-- Add updated_at to media if it doesn't exist
|
|
111
|
+
DO $$
|
|
112
|
+
BEGIN
|
|
113
|
+
IF NOT EXISTS (
|
|
114
|
+
SELECT 1 FROM information_schema.columns
|
|
115
|
+
WHERE table_name = 'media' AND column_name = 'updated_at'
|
|
116
|
+
) THEN
|
|
117
|
+
ALTER TABLE media ADD COLUMN updated_at TIMESTAMP DEFAULT NOW();
|
|
118
|
+
UPDATE media SET updated_at = created_at WHERE updated_at IS NULL;
|
|
119
|
+
END IF;
|
|
120
|
+
END $$;
|
|
121
|
+
|
|
122
|
+
-- Create index on media.created_at (this column should exist in new or old schemas)
|
|
123
|
+
CREATE INDEX IF NOT EXISTS idx_media_created_at ON media(created_at);
|
|
124
|
+
|
|
125
|
+
-- Add created_at and updated_at to search_index if they don't exist
|
|
126
|
+
DO $$
|
|
127
|
+
BEGIN
|
|
128
|
+
IF NOT EXISTS (
|
|
129
|
+
SELECT 1 FROM information_schema.columns
|
|
130
|
+
WHERE table_name = 'search_index' AND column_name = 'created_at'
|
|
131
|
+
) THEN
|
|
132
|
+
ALTER TABLE search_index ADD COLUMN created_at TIMESTAMP DEFAULT NOW();
|
|
133
|
+
UPDATE search_index SET created_at = NOW() WHERE created_at IS NULL;
|
|
134
|
+
END IF;
|
|
135
|
+
|
|
136
|
+
IF NOT EXISTS (
|
|
137
|
+
SELECT 1 FROM information_schema.columns
|
|
138
|
+
WHERE table_name = 'search_index' AND column_name = 'updated_at'
|
|
139
|
+
) THEN
|
|
140
|
+
ALTER TABLE search_index ADD COLUMN updated_at TIMESTAMP DEFAULT NOW();
|
|
141
|
+
UPDATE search_index SET updated_at = NOW() WHERE updated_at IS NULL;
|
|
142
|
+
CREATE INDEX IF NOT EXISTS idx_search_updated_at ON search_index(updated_at);
|
|
143
|
+
END IF;
|
|
144
|
+
END $$;
|
package/lib/backend/github.js
CHANGED
|
@@ -3,9 +3,9 @@
|
|
|
3
3
|
* Handles all GitHub API operations for note management
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
-
|
|
6
|
+
import { Octokit } from '@octokit/rest';
|
|
7
7
|
|
|
8
|
-
class GitHubClient {
|
|
8
|
+
export class GitHubClient {
|
|
9
9
|
constructor({ token, owner, repo, branch = 'main' }) {
|
|
10
10
|
this.octokit = new Octokit({ auth: token });
|
|
11
11
|
this.owner = owner;
|
|
@@ -314,5 +314,3 @@ class GitHubClient {
|
|
|
314
314
|
return root;
|
|
315
315
|
}
|
|
316
316
|
}
|
|
317
|
-
|
|
318
|
-
module.exports = { GitHubClient };
|
package/lib/backend/index.js
CHANGED
|
@@ -4,22 +4,36 @@
|
|
|
4
4
|
* Creates an Express router with all note-taking API endpoints
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
7
|
+
import express from 'express';
|
|
8
|
+
import path from 'path';
|
|
9
|
+
import { fileURLToPath } from 'url';
|
|
10
|
+
import { StorageClient } from './storage.js';
|
|
11
|
+
import { MarkdownRenderer } from './markdown.js';
|
|
12
|
+
import { DatabaseConnection } from './db/connection.js';
|
|
13
|
+
import { VersionControlClient } from './version-control.js';
|
|
14
|
+
import notesRoutes from './routes/notes.js';
|
|
15
|
+
import uploadRoutes from './routes/upload.js';
|
|
16
|
+
import searchRoutes from './routes/search.js';
|
|
17
|
+
|
|
18
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
19
|
+
const __dirname = path.dirname(__filename);
|
|
14
20
|
|
|
15
21
|
/**
|
|
16
22
|
* Creates a configured notes router
|
|
17
23
|
* @param {Object} config - Configuration object
|
|
18
|
-
* @param {Object} config.
|
|
19
|
-
* @param {string} config.
|
|
20
|
-
* @param {
|
|
21
|
-
* @param {string} config.
|
|
22
|
-
* @param {string}
|
|
24
|
+
* @param {Object} [config.database] - Database configuration (recommended)
|
|
25
|
+
* @param {string} [config.database.host] - PostgreSQL host
|
|
26
|
+
* @param {number} [config.database.port] - PostgreSQL port
|
|
27
|
+
* @param {string} config.database.database - Database name
|
|
28
|
+
* @param {string} config.database.user - Database user
|
|
29
|
+
* @param {string} config.database.password - Database password
|
|
30
|
+
* @param {string} [config.database.branch='main'] - Version control branch
|
|
31
|
+
* @param {string} [config.database.author='user'] - Default commit author
|
|
32
|
+
* @param {Object} [config.github] - GitHub configuration (legacy)
|
|
33
|
+
* @param {string} [config.github.token] - GitHub personal access token
|
|
34
|
+
* @param {string} [config.github.owner] - Repository owner
|
|
35
|
+
* @param {string} [config.github.repo] - Repository name
|
|
36
|
+
* @param {string} [config.github.branch='main'] - Repository branch
|
|
23
37
|
* @param {Object} config.storage - Storage configuration (R2 or S3)
|
|
24
38
|
* @param {string} config.storage.type - 'r2' or 's3'
|
|
25
39
|
* @param {string} config.storage.accountId - Account ID (R2) or region (S3)
|
|
@@ -29,26 +43,55 @@ const searchRoutes = require('./routes/search');
|
|
|
29
43
|
* @param {string} config.storage.publicUrl - Public URL for accessing files
|
|
30
44
|
* @param {Object} [config.options] - Optional configuration
|
|
31
45
|
* @param {boolean} [config.options.autoUpdateReadme=true] - Auto-update README on note save
|
|
32
|
-
* @
|
|
46
|
+
* @param {boolean} [config.options.autoInitSchema=true] - Auto-initialize database schema
|
|
47
|
+
* @returns {Promise<express.Router>} Configured Express router
|
|
33
48
|
*/
|
|
34
|
-
function createNotesRouter(config) {
|
|
35
|
-
if (!config.github || !config.github.token || !config.github.owner || !config.github.repo) {
|
|
36
|
-
throw new Error('GitHub configuration is required: token, owner, and repo');
|
|
37
|
-
}
|
|
38
|
-
|
|
49
|
+
export async function createNotesRouter(config) {
|
|
39
50
|
if (!config.storage) {
|
|
40
51
|
throw new Error('Storage configuration is required');
|
|
41
52
|
}
|
|
42
53
|
|
|
43
54
|
const router = express.Router();
|
|
44
55
|
|
|
45
|
-
//
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
56
|
+
// Add JSON body parser middleware to the router
|
|
57
|
+
router.use(express.json());
|
|
58
|
+
|
|
59
|
+
let versionControlClient;
|
|
60
|
+
|
|
61
|
+
// Choose between database-backed version control or GitHub
|
|
62
|
+
if (config.database) {
|
|
63
|
+
// Use PostgreSQL-backed version control
|
|
64
|
+
const db = new DatabaseConnection(config.database);
|
|
65
|
+
|
|
66
|
+
// Initialize schema if needed
|
|
67
|
+
const options = config.options || {};
|
|
68
|
+
if (options.autoInitSchema !== false) {
|
|
69
|
+
const schemaPath = path.join(__dirname, 'db', 'schema.sql');
|
|
70
|
+
await db.initializeSchema(schemaPath);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
versionControlClient = new VersionControlClient(
|
|
74
|
+
db,
|
|
75
|
+
config.database.branch || 'main',
|
|
76
|
+
config.database.author || 'user'
|
|
77
|
+
);
|
|
78
|
+
} else if (config.github) {
|
|
79
|
+
// Legacy: Use GitHub for version control
|
|
80
|
+
if (!config.github.token || !config.github.owner || !config.github.repo) {
|
|
81
|
+
throw new Error('GitHub configuration requires: token, owner, and repo');
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Lazy load GitHubClient to avoid requiring @octokit/rest when using database mode
|
|
85
|
+
const { GitHubClient } = await import('./github.js');
|
|
86
|
+
versionControlClient = new GitHubClient({
|
|
87
|
+
token: config.github.token,
|
|
88
|
+
owner: config.github.owner,
|
|
89
|
+
repo: config.github.repo,
|
|
90
|
+
branch: config.github.branch || 'main'
|
|
91
|
+
});
|
|
92
|
+
} else {
|
|
93
|
+
throw new Error('Either database or github configuration is required');
|
|
94
|
+
}
|
|
52
95
|
|
|
53
96
|
const storageClient = new StorageClient(config.storage);
|
|
54
97
|
const markdownRenderer = new MarkdownRenderer();
|
|
@@ -57,7 +100,7 @@ function createNotesRouter(config) {
|
|
|
57
100
|
// Middleware to attach clients to request
|
|
58
101
|
router.use((req, res, next) => {
|
|
59
102
|
req.notesEngine = {
|
|
60
|
-
githubClient,
|
|
103
|
+
githubClient: versionControlClient, // Keep name for backward compatibility
|
|
61
104
|
storageClient,
|
|
62
105
|
markdownRenderer,
|
|
63
106
|
options
|
|
@@ -72,5 +115,3 @@ function createNotesRouter(config) {
|
|
|
72
115
|
|
|
73
116
|
return router;
|
|
74
117
|
}
|
|
75
|
-
|
|
76
|
-
module.exports = { createNotesRouter };
|
package/lib/backend/markdown.js
CHANGED
|
@@ -3,11 +3,11 @@
|
|
|
3
3
|
* Renders markdown to HTML with syntax highlighting
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
6
|
+
import { marked } from 'marked';
|
|
7
|
+
import { markedHighlight } from 'marked-highlight';
|
|
8
|
+
import hljs from 'highlight.js';
|
|
9
9
|
|
|
10
|
-
class MarkdownRenderer {
|
|
10
|
+
export class MarkdownRenderer {
|
|
11
11
|
constructor() {
|
|
12
12
|
// Configure marked with syntax highlighting
|
|
13
13
|
marked.use(markedHighlight({
|
|
@@ -58,5 +58,3 @@ class MarkdownRenderer {
|
|
|
58
58
|
return marked(markdown);
|
|
59
59
|
}
|
|
60
60
|
}
|
|
61
|
-
|
|
62
|
-
module.exports = { MarkdownRenderer };
|
|
@@ -3,15 +3,18 @@
|
|
|
3
3
|
* Handles note CRUD operations
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
-
|
|
6
|
+
import express from 'express';
|
|
7
7
|
const router = express.Router();
|
|
8
8
|
|
|
9
|
+
// JSON body parser middleware
|
|
10
|
+
router.use(express.json());
|
|
11
|
+
|
|
9
12
|
// Get file structure
|
|
10
13
|
router.get('/structure', async (req, res) => {
|
|
11
14
|
try {
|
|
12
15
|
const { githubClient } = req.notesEngine;
|
|
13
16
|
const structure = await githubClient.getFileStructure();
|
|
14
|
-
res.json(
|
|
17
|
+
res.json(structure);
|
|
15
18
|
} catch (error) {
|
|
16
19
|
console.error('Error fetching structure:', error);
|
|
17
20
|
res.status(500).json({ error: 'Failed to fetch file structure' });
|
|
@@ -40,6 +43,33 @@ router.get('/note', async (req, res) => {
|
|
|
40
43
|
}
|
|
41
44
|
});
|
|
42
45
|
|
|
46
|
+
// Get file history (commits)
|
|
47
|
+
router.get('/history', async (req, res) => {
|
|
48
|
+
try {
|
|
49
|
+
const { path } = req.query;
|
|
50
|
+
if (!path) {
|
|
51
|
+
return res.status(400).json({ error: 'Path is required' });
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const { githubClient } = req.notesEngine;
|
|
55
|
+
|
|
56
|
+
// Check if the client supports getFileHistory (database mode)
|
|
57
|
+
if (typeof githubClient.getFileHistory === 'function') {
|
|
58
|
+
const history = await githubClient.getFileHistory(path);
|
|
59
|
+
res.json(history);
|
|
60
|
+
} else {
|
|
61
|
+
// GitHub mode doesn't support file history yet
|
|
62
|
+
res.status(501).json({
|
|
63
|
+
error: 'File history not supported with GitHub backend',
|
|
64
|
+
message: 'Use database backend for file history support'
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
} catch (error) {
|
|
68
|
+
console.error('Error fetching file history:', error);
|
|
69
|
+
res.status(500).json({ error: 'Failed to fetch file history' });
|
|
70
|
+
}
|
|
71
|
+
});
|
|
72
|
+
|
|
43
73
|
// Save note
|
|
44
74
|
router.post('/note', async (req, res) => {
|
|
45
75
|
try {
|
|
@@ -83,7 +113,8 @@ router.post('/note', async (req, res) => {
|
|
|
83
113
|
// Delete note
|
|
84
114
|
router.delete('/note', async (req, res) => {
|
|
85
115
|
try {
|
|
86
|
-
|
|
116
|
+
// Support both query params and body for flexibility
|
|
117
|
+
const path = req.query.path || req.body?.path;
|
|
87
118
|
|
|
88
119
|
if (!path) {
|
|
89
120
|
return res.status(400).json({ error: 'Path is required' });
|
|
@@ -194,4 +225,4 @@ async function updateReadme(githubClient) {
|
|
|
194
225
|
await githubClient.saveFile('README.md', readmeContent, sha);
|
|
195
226
|
}
|
|
196
227
|
|
|
197
|
-
|
|
228
|
+
export default router;
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
* Handles note searching
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
-
|
|
6
|
+
import express from 'express';
|
|
7
7
|
const router = express.Router();
|
|
8
8
|
|
|
9
9
|
// Search notes
|
|
@@ -25,4 +25,4 @@ router.get('/search', async (req, res) => {
|
|
|
25
25
|
}
|
|
26
26
|
});
|
|
27
27
|
|
|
28
|
-
|
|
28
|
+
export default router;
|
|
@@ -3,8 +3,8 @@
|
|
|
3
3
|
* Handles image and video uploads
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
-
|
|
7
|
-
|
|
6
|
+
import express from 'express';
|
|
7
|
+
import fileUpload from 'express-fileupload';
|
|
8
8
|
const router = express.Router();
|
|
9
9
|
|
|
10
10
|
// File upload middleware
|
|
@@ -27,7 +27,16 @@ router.post('/upload-image', async (req, res) => {
|
|
|
27
27
|
folder
|
|
28
28
|
);
|
|
29
29
|
|
|
30
|
-
|
|
30
|
+
// Extract path and filename from URL
|
|
31
|
+
const path = imageUrl.replace(storageClient.publicUrl + '/', '');
|
|
32
|
+
const filename = path.split('/').pop();
|
|
33
|
+
|
|
34
|
+
res.json({
|
|
35
|
+
success: true,
|
|
36
|
+
url: imageUrl,
|
|
37
|
+
path,
|
|
38
|
+
filename
|
|
39
|
+
});
|
|
31
40
|
} catch (error) {
|
|
32
41
|
console.error('Error uploading image:', error);
|
|
33
42
|
res.status(500).json({ error: 'Failed to upload image' });
|
|
@@ -50,7 +59,16 @@ router.post('/upload-image-base64', async (req, res) => {
|
|
|
50
59
|
const { storageClient } = req.notesEngine;
|
|
51
60
|
const imageUrl = await storageClient.uploadImage(buffer, filename, folder);
|
|
52
61
|
|
|
53
|
-
|
|
62
|
+
// Extract path and filename from URL
|
|
63
|
+
const path = imageUrl.replace(storageClient.publicUrl + '/', '');
|
|
64
|
+
const uploadedFilename = path.split('/').pop();
|
|
65
|
+
|
|
66
|
+
res.json({
|
|
67
|
+
success: true,
|
|
68
|
+
url: imageUrl,
|
|
69
|
+
path,
|
|
70
|
+
filename: uploadedFilename
|
|
71
|
+
});
|
|
54
72
|
} catch (error) {
|
|
55
73
|
console.error('Error uploading image:', error);
|
|
56
74
|
res.status(500).json({ error: 'Failed to upload image' });
|
|
@@ -74,7 +92,16 @@ router.post('/upload-video', async (req, res) => {
|
|
|
74
92
|
folder
|
|
75
93
|
);
|
|
76
94
|
|
|
77
|
-
|
|
95
|
+
// Extract path and filename from URL
|
|
96
|
+
const path = videoUrl.replace(storageClient.publicUrl + '/', '');
|
|
97
|
+
const filename = path.split('/').pop();
|
|
98
|
+
|
|
99
|
+
res.json({
|
|
100
|
+
success: true,
|
|
101
|
+
url: videoUrl,
|
|
102
|
+
path,
|
|
103
|
+
filename
|
|
104
|
+
});
|
|
78
105
|
} catch (error) {
|
|
79
106
|
console.error('Error uploading video:', error);
|
|
80
107
|
res.status(500).json({ error: 'Failed to upload video' });
|
|
@@ -119,4 +146,4 @@ router.delete('/video', async (req, res) => {
|
|
|
119
146
|
}
|
|
120
147
|
});
|
|
121
148
|
|
|
122
|
-
|
|
149
|
+
export default router;
|
package/lib/backend/storage.js
CHANGED
|
@@ -3,9 +3,9 @@
|
|
|
3
3
|
* Handles file uploads to R2 or S3
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
-
|
|
6
|
+
import { S3Client, PutObjectCommand, DeleteObjectCommand } from '@aws-sdk/client-s3';
|
|
7
7
|
|
|
8
|
-
class StorageClient {
|
|
8
|
+
export class StorageClient {
|
|
9
9
|
constructor(config) {
|
|
10
10
|
this.config = config;
|
|
11
11
|
this.publicUrl = config.publicUrl;
|
|
@@ -117,5 +117,3 @@ class StorageClient {
|
|
|
117
117
|
return contentTypes[ext] || 'application/octet-stream';
|
|
118
118
|
}
|
|
119
119
|
}
|
|
120
|
-
|
|
121
|
-
module.exports = { StorageClient };
|