@tenjuu99/blog 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,11 +29,16 @@ jobs:
29
29
  - name: Checkout
30
30
  uses: actions/checkout@master
31
31
 
32
+ - name: Use nodejs
33
+ uses: actions/setup-node@v4
34
+ with:
35
+ node-version: '22.x'
36
+
32
37
  - name: npm install
33
38
  run: npm install --omit=dev
34
39
 
35
40
  - name: Build
36
- run: cp .env.prod.sample .env && npm run generate
41
+ run: cp blog.json.prod blog.json && npm run generate
37
42
 
38
43
  - name: Setup Pages
39
44
  uses: actions/configure-pages@v4
package/.node-version ADDED
@@ -0,0 +1 @@
1
+ 22.7.0
package/bin/generate ADDED
@@ -0,0 +1,4 @@
1
+ #!/usr/bin/env node
2
+
3
+ import generate from '../lib/generate.js'
4
+ generate()
package/bin/new ADDED
@@ -0,0 +1,28 @@
1
+ #!/usr/bin/env bash
2
+
3
+ mkdir -p "$(pwd)/src/pages"
4
+ echo "create src/pages"
5
+ mkdir -p "$(pwd)/src/template"
6
+ echo "create src/template"
7
+ mkdir -p "$(pwd)/src/css"
8
+ echo "create src/css"
9
+ mkdir -p "$(pwd)/src/image"
10
+ echo "create src/image"
11
+
12
+ mkdir "$(pwd)/.cache"
13
+ echo "{}" > "$(pwd)/.cache/index.json"
14
+
15
+ echo 'dist/*
16
+ node_modules/
17
+ .env
18
+ .cache
19
+ ' >> .gitignore
20
+
21
+ echo '{
22
+ "site_name": "rewrite here",
23
+ "url_base": "http://localhost:8000",
24
+ "src_dir": "src",
25
+ "dist_dir": "dist",
26
+ "distribute_raw": "image"
27
+ }' > "$(pwd)/blog.json"
28
+ echo "create blog.json"
package/bin/server ADDED
@@ -0,0 +1,26 @@
1
+ #!/usr/bin/env node
2
+
3
+ import server from '../lib/server.js'
4
+ import { srcDir, pageDir } from '../lib/dir.js'
5
+ import { watchers, watch } from '../lib/watcher.js'
6
+ import generate from '../lib/generate.js'
7
+
8
+ watchers.push({
9
+ paths: srcDir,
10
+ watchOptions: {
11
+ ignored: pageDir
12
+ },
13
+ callback: generate
14
+ })
15
+ watchers.push({
16
+ paths: pageDir,
17
+ callback: generate,
18
+ watchOptions: {
19
+ ignoreInitial: true
20
+ },
21
+ event: ['change', 'unlink', 'add']
22
+ })
23
+ watch()
24
+ generate()
25
+
26
+ server().listen(process.env.PORT || 8000)
package/blog.json ADDED
@@ -0,0 +1,8 @@
1
+ {
2
+ "site_name": "test",
3
+ "url_base": "http://localhost:8000",
4
+ "src_dir": "src-sample",
5
+ "dist_dir": "dist",
6
+ "distribute_raw": "image",
7
+ "helper": "helper/index.js"
8
+ }
package/blog.json.prod ADDED
@@ -0,0 +1,9 @@
1
+ {
2
+ "site_name": "blog/sample",
3
+ "url_base": "https://amashigeseiji.github.io/tenjuu99-blog",
4
+ "relative_path": "/tenjuu99-blog",
5
+ "src_dir": "src-sample",
6
+ "dist_dir": "dist",
7
+ "distribute_raw": "image",
8
+ "helper": "helper/index.js"
9
+ }
@@ -1,15 +1,9 @@
1
1
  "use strict"
2
2
  import fs from "node:fs/promises";
3
- import { applyCss, watchCss } from './cssGenerator.js'
4
- import {
5
- replaceIfFilter,
6
- replaceScriptFilter,
7
- replaceVariablesFilter,
8
- includeFilter
9
- } from './filter.js'
10
- import { marked } from "marked";
3
+ import applyCss from './cssGenerator.js'
4
+ import { includeFilter } from './filter.js'
11
5
  import { templateDir, cssDir } from './dir.js'
12
- import chokidar from 'chokidar'
6
+ import { watchers } from './watcher.js'
13
7
 
14
8
  let templates = {}
15
9
 
@@ -24,26 +18,9 @@ const applyTemplate = async (name = 'default.html') => {
24
18
  return templateContent
25
19
  }
26
20
 
27
- const render = async (templateName, data) => {
28
- let template = await applyTemplate(templateName)
29
- template = replaceIfFilter(template, data)
30
- template = await replaceScriptFilter(template, data)
21
+ watchers.push({
22
+ paths: [cssDir, templateDir],
23
+ callback: () => { templates = {} }
24
+ })
31
25
 
32
- let markdown = data.markdown
33
- markdown = await includeFilter(markdown)
34
- markdown = await replaceIfFilter(markdown, data)
35
- markdown = await replaceScriptFilter(markdown, data)
36
- data.markdown = data.__filetype === 'md' ? marked.parse(markdown) : markdown
37
-
38
-
39
- return replaceVariablesFilter(template, data)
40
- }
41
-
42
- const watchTemplate = () => {
43
- chokidar.watch([cssDir, templateDir]).on('change', () => {
44
- templates = {}
45
- })
46
- watchCss()
47
- }
48
-
49
- export { render, watchTemplate }
26
+ export default applyTemplate
package/lib/config.js ADDED
@@ -0,0 +1,32 @@
1
+ import { readFileSync, existsSync } from 'node:fs'
2
+
3
+ const rootDir = process.cwd()
4
+ const config = {
5
+ "site_name": "default",
6
+ "url_base": "http://localhost:8000",
7
+ "src_dir": "src",
8
+ "dist_dir": "dist",
9
+ "distribute_raw": "image",
10
+ "relative_path": "",
11
+ "helper": ""
12
+ }
13
+ try {
14
+ const file = rootDir + '/blog.json'
15
+ if (existsSync(file)) {
16
+ const configOverride = JSON.parse(readFileSync(file, 'utf8'))
17
+ for (const item in configOverride) {
18
+ config[item] = configOverride[item]
19
+ }
20
+ }
21
+ const keys = Object.keys(process.env)
22
+ for (const item in config) {
23
+ const upper = item.toUpperCase()
24
+ if (keys.includes(upper)) {
25
+ config[item] = process.env[upper]
26
+ }
27
+ }
28
+ } catch (e) {
29
+ console.log(e)
30
+ }
31
+
32
+ export default config
@@ -4,7 +4,9 @@ import { minifyCss } from './minify.js'
4
4
  import { createHash } from 'crypto'
5
5
  import path from 'path'
6
6
  import { distDir as distRoot, cssDir } from './dir.js'
7
- import chokidar from 'chokidar'
7
+ import { watchers } from './watcher.js'
8
+ import { styleText } from 'node:util'
9
+ import config from './config.js'
8
10
 
9
11
  let cacheBuster = {}
10
12
  const cacheBusterQuery = 't'
@@ -29,16 +31,16 @@ const cssGenerator = async (src, dist) => {
29
31
  css = minifyCss(css)
30
32
  cacheBuster[key] = createHash('md5').update(css).digest('hex')
31
33
 
32
- return await fs.mkdir(`${distRoot}${path.dirname(dist)}`, { recursive: true }).then(async () => {
33
- await fs.writeFile(`${distRoot}${dist}`, css)
34
- console.log(`generate ${src} => ${distRoot}${dist}`)
34
+ return await fs.mkdir(`${distRoot}${path.dirname(dist)}`, { recursive: true }).then(() => {
35
+ fs.writeFile(`${distRoot}${dist}`, css)
36
+ console.log(styleText('green', '[generate]'), `${src} => ${distRoot}${dist}`)
35
37
  return cacheBuster[key]
36
38
  })
37
39
  }
38
40
 
39
41
  /**
40
42
  * 次のような記述を想定している。
41
- * <link rel="stylesheet" href="${/css/layout.css:/css/base.css,/css/page.css}">
43
+ * <link rel="stylesheet" href="${/css/layout.css<<base.css,page.css}">
42
44
  * href の記述は ${dist:src} の関係になっている。
43
45
  *
44
46
  * これを
@@ -54,15 +56,14 @@ const applyCss = async (text) => {
54
56
  })
55
57
  for (const cssDist of target) {
56
58
  const cacheBuster = await cssGenerator(cssDist.src, cssDist.dist)
57
- text = text.replace(cssDist.matched, `${process.env.RELATIVE_PATH || ''}${cssDist.dist}?${cacheBusterQuery}=${cacheBuster}`)
59
+ text = text.replace(cssDist.matched, `${config.relative_path}${cssDist.dist}?${cacheBusterQuery}=${cacheBuster}`)
58
60
  }
59
61
  return text
60
62
  }
61
63
 
62
- const watchCss = () => {
63
- chokidar.watch([cssDir]).on('change', (path) => {
64
- cacheBuster = {}
65
- })
66
- }
64
+ watchers.push({
65
+ paths: cssDir,
66
+ callback: () => { cacheBuster = {} }
67
+ })
67
68
 
68
- export { applyCss, watchCss }
69
+ export default applyCss
package/lib/dir.js CHANGED
@@ -1,6 +1,9 @@
1
+ import config from './config.js'
2
+
1
3
  const rootDir = process.cwd()
2
- const srcDir = `${rootDir}/${process.env.SRC_DIR}`
3
- const distDir = `${rootDir}/${process.env.DIST_DIR}`
4
+ const srcDir = `${rootDir}/${config.src_dir}`
5
+ const distDir = `${rootDir}/${config.dist_dir}`
6
+ const pageDir = `${srcDir}/pages`
4
7
  const templateDir = `${srcDir}/template`
5
8
  const cssDir = `${srcDir}/css`
6
9
  const cacheDir = `${rootDir}/.cache`
@@ -9,6 +12,7 @@ export {
9
12
  rootDir,
10
13
  srcDir,
11
14
  distDir,
15
+ pageDir,
12
16
  templateDir,
13
17
  cssDir,
14
18
  cacheDir,
package/lib/distribute.js CHANGED
@@ -2,13 +2,15 @@
2
2
  import fs from "node:fs/promises";
3
3
  import path from 'path'
4
4
  import { minifyHtml } from './minify.js'
5
- import { render, watchTemplate } from './applyTemplate.js'
5
+ import render from './render.js'
6
+ import { styleText } from 'node:util'
7
+ import config from './config.js'
6
8
 
7
- const distribute = async (data, srcDir, distDir) => {
8
- if (data['__deleted']) {
9
- for (const i in data['__deleted']) {
10
- console.log(`unlink ${distDir}${data['__deleted'][i].__output}`)
11
- fs.unlink(`${distDir}${data['__deleted'][i].__output}`)
9
+ const distribute = async (data, deleted, srcDir, distDir) => {
10
+ if (deleted) {
11
+ for (const obj of deleted) {
12
+ console.log(styleText('red', '[unlink]'), `${distDir}${obj.__output}`)
13
+ fs.unlink(`${distDir}${obj.__output}`)
12
14
  }
13
15
  delete data['__deleted']
14
16
  }
@@ -18,18 +20,19 @@ const distribute = async (data, srcDir, distDir) => {
18
20
  let writeTo = `${distDir}${data[name].__output}`
19
21
  fs.mkdir(path.dirname(writeTo), { recursive: true}).then(() => {
20
22
  fs.writeFile(writeTo, minifyHtml(rendered))
21
- console.log(`generate ${writeTo}`)
23
+ console.log(styleText('green', '[generate]'), writeTo)
22
24
  })
23
25
  }
24
- const distributeRaw = process.env.DISTRIBUTE_RAW.split(',')
26
+ const distributeRaw = config.distribute_raw.split(',')
25
27
  distributeRaw.forEach((copyDir) => {
26
28
  fs.readdir(`${srcDir}/${copyDir}/`).then(async files => {
27
29
  await fs.stat(`${distDir}/${copyDir}/`).catch(async err => await fs.mkdir(`${distDir}/${copyDir}/`))
28
30
  files.forEach(file => {
29
31
  fs.copyFile(`${srcDir}/${copyDir}/${file}`, `${distDir}/${copyDir}/${file}`)
32
+ console.log(styleText('green', '[copy]'), `${srcDir}/${copyDir}/${file} => ${distDir}/${copyDir}/${file}`)
30
33
  })
31
34
  })
32
35
  })
33
36
  }
34
37
 
35
- export { distribute, watchTemplate }
38
+ export default distribute
package/lib/filter.js CHANGED
@@ -1,6 +1,8 @@
1
1
  import * as helper from '../helper/index.js'
2
2
  import includeFilter from './includeFilter.js'
3
3
  import { srcDir } from './dir.js'
4
+ import config from './config.js'
5
+ console.log(config)
4
6
 
5
7
  /**
6
8
  * @param {string} text
@@ -129,8 +131,8 @@ const replaceScriptFilter = async (text, variables) => {
129
131
  for (const script of scripts) {
130
132
  let helperMerged = {...helper}
131
133
  // env.HELPER が定義されていれば追加ヘルパーとして扱う
132
- if (process.env.HELPER) {
133
- const additional = await import(`${srcDir}/${process.env.HELPER}`)
134
+ if (config.helper) {
135
+ const additional = await import(`${srcDir}/${config.helper}`)
134
136
  helperMerged = Object.assign(helperMerged, additional)
135
137
  }
136
138
  let result = new Function('helper', 'variables', script.script)(helperMerged, variables)
package/lib/generate.js CHANGED
@@ -1,15 +1,16 @@
1
1
  "use strict"
2
- import { distribute } from './distribute.js'
3
- import { indexing } from './indexer.js'
2
+ import distribute from './distribute.js'
3
+ import { indexing, allData, deleted } from './indexer.js'
4
4
  import { srcDir, distDir } from './dir.js'
5
+ import { styleText } from 'node:util'
5
6
 
6
7
  const generate = async () => {
7
8
  const start = performance.now()
8
- const data = await indexing(srcDir + '/pages/')
9
+ await indexing()
9
10
 
10
- await distribute(data, srcDir, distDir)
11
+ await distribute(allData, deleted, srcDir, distDir)
11
12
  const end = performance.now()
12
- console.log('build: ' + (end - start) + "ms")
13
+ console.log(styleText('blue', '[build: ' + (end - start) + "ms]"))
13
14
  }
14
15
 
15
16
  export default generate
package/lib/indexer.js CHANGED
@@ -1,82 +1,45 @@
1
1
  "use strict"
2
- import fs from "node:fs/promises";
3
- import { cacheDir } from './dir.js'
4
-
5
- const parseMetaData = (markdown, filename) => {
6
- const regexp = new RegExp(/^(<!|-)--(?<variables>[\s\S]*?)--(-|>)/)
7
- const matched = markdown.match(regexp)
8
- const markdownReplaced = markdown.replace(regexp, '')
9
- const metaDataDefault = {
10
- name: filename,
11
- title: filename,
12
- url: `/${filename}`,
13
- description: '',
14
- og_description: '',
15
- published: '1970-01-01',
16
- index: true,
17
- noindex: false,
18
- lang: 'ja',
19
- site_name: process.env.SITE_NAME,
20
- url_base: process.env.URL_BASE,
21
- gtag_id: process.env.GTAG_ID,
22
- markdown: markdownReplaced,
23
- relative_path: process.env.RELATIVE_PATH || '',
24
- template: 'default.html',
25
- ext: 'html',
26
- __output: `/${filename}.html`
27
- }
28
- if (!matched) {
29
- return metaDataDefault
30
- }
31
- const metaData = Object.fromEntries(
32
- matched.groups.variables.split('\n').filter(line => line.includes(':'))
33
- .map(line => {
34
- const index = line.indexOf(':')
35
- const key = line.slice(0, index)
36
- let value = line.slice(index + 1).trim()
37
- if (value === 'true' || value === 'false') {
38
- value = JSON.parse(value)
39
- }
40
- return [key, value]
41
- })
42
- )
43
- const metaDataMerged = Object.assign(metaDataDefault, metaData)
44
- if (!metaDataMerged.description) {
45
- metaDataMerged.description = markdownReplaced.replace(/(<([^>]+)>)/gi, '').slice(0, 200).replaceAll("\n", '') + '...'
46
- }
47
- if (!metaDataMerged.og_description) {
48
- metaDataMerged.og_description = metaDataMerged.og_description
49
- }
50
- metaDataMerged['__output'] = filename === 'index' ? '/index.html' : `${metaDataMerged.url}.${metaDataMerged.ext}`
51
-
52
- return metaDataMerged
53
- }
2
+ import { writeFile, readFile } from "node:fs/promises";
3
+ import { readdirSync, existsSync, mkdirSync } from "node:fs";
4
+ import { pageDir, cacheDir } from './dir.js'
5
+ import makePageData from './pageData.js'
54
6
 
55
7
  const indexFile = `${cacheDir}/index.json`
56
8
 
57
- const newIndex = []
58
- const allData = {}
9
+ let newIndex = []
10
+ let allData = {}
11
+ let deleted = []
59
12
 
60
- const indexing = async (targetDir) => {
61
- const targets = await fs.readdir(targetDir).then(files => {
62
- return files.filter(fileName => fileName.match('\.(md|html)$'))
13
+ const collect = (dir, files = {}, namePrefix = '') => {
14
+ const dirents = readdirSync(dir, { withFileTypes: true })
15
+ dirents.forEach((dirent) => {
16
+ if (dirent.isDirectory()) {
17
+ collect(`${dirent.path}/${dirent.name}`, files, namePrefix + dirent.name + '/')
18
+ } else {
19
+ if (dirent.name.match(/\.(md|html)$/)) {
20
+ const pageData = makePageData(`${namePrefix}${dirent.name}`)
21
+ allData[pageData.name] = pageData
22
+ const { name, url, __output } = pageData
23
+ newIndex.push({ name, url, __output })
24
+ }
25
+ }
63
26
  })
64
- for (const file of targets) {
65
- const markdownText = await fs.readFile(`${targetDir}/${file}`, 'utf8')
66
- const [target, ext] = file.split('.')
67
- const metaData = parseMetaData(markdownText, target)
68
- metaData.__filetype = ext
69
- allData[target] = metaData
70
- let { name, title, index, url, published, modified, __output } = metaData
71
- newIndex.push({ name, title, index, url, published, modified, __output })
27
+ }
28
+
29
+ const indexing = async () => {
30
+ newIndex = []
31
+ allData = {}
32
+ deleted = []
33
+ if (!existsSync(cacheDir)) {
34
+ mkdirSync(cacheDir)
72
35
  }
73
- await fs.writeFile(indexFile, JSON.stringify(newIndex))
36
+ const oldIndex = await readFile(indexFile, 'utf8').then(text => JSON.parse(text)).catch(error => [])
37
+
38
+ collect(pageDir)
39
+ writeFile(indexFile, JSON.stringify(newIndex))
74
40
 
75
41
  // 旧インデックスから差分を計算して削除対象をピックアップする
76
- const oldIndex = await fs.readFile(indexFile, 'utf8').then(text => JSON.parse(text)).catch(error => [])
77
- const deleted = oldIndex.filter(oi => !newIndex.map(ni => ni.__output).includes(oi.__output))
78
- allData['__deleted'] = deleted
79
- return allData
42
+ deleted = oldIndex.filter(oi => !newIndex.map(ni => ni.__output).includes(oi.__output))
80
43
  }
81
44
 
82
- export { indexing, allData }
45
+ export { indexing, allData, deleted }
@@ -0,0 +1,67 @@
1
+ "use strict"
2
+ import fs from "node:fs";
3
+ import { pageDir } from './dir.js'
4
+ import config from './config.js'
5
+
6
+ const load = (path) => {
7
+ return fs.readFileSync(path, 'utf8')
8
+ }
9
+
10
+ const makePageData = (filename) => {
11
+ const content = load(`${pageDir}/${filename}`)
12
+ const [name, ext] = filename.split('.')
13
+ return parse(content, name, ext)
14
+ }
15
+
16
+ const parse = (content, name, ext) => {
17
+ const regexp = new RegExp(/^(<!|-)--(?<variables>[\s\S]*?)--(-|>)/)
18
+ const matched = content.match(regexp)
19
+ const markdownReplaced = content.replace(regexp, '')
20
+ const metaDataDefault = {
21
+ name,
22
+ title: name,
23
+ url: `/${name}`,
24
+ description: '',
25
+ og_description: '',
26
+ published: '1970-01-01',
27
+ index: true,
28
+ noindex: false,
29
+ lang: 'ja',
30
+ site_name: config.site_name,
31
+ url_base: config.url_base,
32
+ gtag_id: config.gtag_id,
33
+ markdown: markdownReplaced,
34
+ relative_path: config.relative_path || '',
35
+ template: 'default.html',
36
+ ext: 'html',
37
+ __output: `/${name}.html`,
38
+ __filetype: ext,
39
+ }
40
+ if (!matched) {
41
+ return metaDataDefault
42
+ }
43
+ const metaData = Object.fromEntries(
44
+ matched.groups.variables.split('\n').filter(line => line.includes(':'))
45
+ .map(line => {
46
+ const index = line.indexOf(':')
47
+ const key = line.slice(0, index)
48
+ let value = line.slice(index + 1).trim()
49
+ if (value === 'true' || value === 'false') {
50
+ value = JSON.parse(value)
51
+ }
52
+ return [key, value]
53
+ })
54
+ )
55
+ const metaDataMerged = Object.assign(metaDataDefault, metaData)
56
+ if (!metaDataMerged.description) {
57
+ metaDataMerged.description = markdownReplaced.replace(/(<([^>]+)>)/gi, '').slice(0, 200).replaceAll("\n", '') + '...'
58
+ }
59
+ if (!metaDataMerged.og_description) {
60
+ metaDataMerged.og_description = metaDataMerged.og_description
61
+ }
62
+ metaDataMerged['__output'] = name === 'index' ? '/index.html' : `${metaDataMerged.url}.${metaDataMerged.ext}`
63
+
64
+ return metaDataMerged
65
+ }
66
+
67
+ export default makePageData
package/lib/render.js ADDED
@@ -0,0 +1,24 @@
1
+ import {
2
+ replaceIfFilter,
3
+ replaceScriptFilter,
4
+ replaceVariablesFilter,
5
+ includeFilter
6
+ } from './filter.js'
7
+ import { marked } from "marked";
8
+ import applyTemplate from './applyTemplate.js'
9
+
10
+ const render = async (templateName, data) => {
11
+ let template = await applyTemplate(templateName)
12
+ template = replaceIfFilter(template, data)
13
+ template = await replaceScriptFilter(template, data)
14
+
15
+ let markdown = data.markdown
16
+ markdown = await includeFilter(markdown)
17
+ markdown = await replaceIfFilter(markdown, data)
18
+ markdown = await replaceScriptFilter(markdown, data)
19
+ data.markdown = data.__filetype === 'md' ? marked.parse(markdown) : markdown
20
+
21
+ return replaceVariablesFilter(template, data)
22
+ }
23
+
24
+ export default render
package/lib/server.js ADDED
@@ -0,0 +1,64 @@
1
+ import http from 'http'
2
+ import url from 'url'
3
+ import fs from 'node:fs'
4
+ import { distDir } from './dir.js'
5
+ import { styleText } from 'node:util'
6
+
7
+ const contentType = (ext) => {
8
+ switch (ext) {
9
+ case 'html':
10
+ case 'css':
11
+ return `text/${ext}`
12
+ case 'js':
13
+ return 'text/javascript'
14
+ case 'jpeg':
15
+ case 'png':
16
+ case 'webp':
17
+ case 'avif':
18
+ return `image/${ext}`
19
+ case 'jpg':
20
+ return 'image/jpeg'
21
+ case 'svg':
22
+ return 'image/svg+xml'
23
+ case 'xml':
24
+ case 'json':
25
+ return `application/${ext}`
26
+ case 'rdf':
27
+ return 'application/rdf+xml.rdf'
28
+ default:
29
+ return 'application/octet-stream'
30
+ }
31
+ }
32
+
33
+ const server = () => {
34
+ return http.createServer((request, response) => {
35
+ const url = new URL(`http://${request.headers.host}${request.url}`)
36
+ let path = url.pathname === '/' ? '/index.html' : decodeURIComponent(url.pathname)
37
+ if (!path.includes('.')) {
38
+ path += '.html'
39
+ }
40
+ if (!fs.existsSync(`${distDir}${path}`)) {
41
+ console.log(styleText('red', `[${request.method}] 404`), request.url)
42
+ const errorContent = fs.readFileSync(`${distDir}/404.html`)
43
+ response.writeHead(404)
44
+ response.end(errorContent)
45
+ return
46
+ }
47
+ try {
48
+ const content = fs.readFileSync(`${distDir}${path}`, 'binary')
49
+
50
+ const ext = path.split('.')[1]
51
+ console.log(styleText('green', `[${request.method}] 200`), request.url)
52
+ response.writeHead(200, { 'Content-Type': `${contentType(ext)}; charset=utf-8` })
53
+ response.end(content, 'binary')
54
+ } catch (e) {
55
+ console.log(e)
56
+ console.log(styleText('red', `[${request.method}] 500`), request.url)
57
+ const errorContent = fs.readFileSync(`${distDir}/404.html`)
58
+ response.writeHead(500)
59
+ response.end(errorContent)
60
+ }
61
+ })
62
+ }
63
+
64
+ export default server
package/lib/watcher.js ADDED
@@ -0,0 +1,28 @@
1
+ import chokidar from 'chokidar'
2
+
3
+ const container = []
4
+
5
+ const watchers = {
6
+ push({ paths, event = 'change', callback, watchOptions }) {
7
+ if (!paths || !callback || typeof callback !== 'function') {
8
+ throw new Error('Invalid object type for watcher.')
9
+ }
10
+ container.push({ paths, event, callback, watchOptions })
11
+ }
12
+ }
13
+
14
+ const watch = () => {
15
+ container.forEach((watcher) => {
16
+ const { paths, event, callback, watchOptions } = watcher
17
+ const cwatcher = chokidar.watch(paths, watchOptions)
18
+ if (Array.isArray(event)) {
19
+ event.forEach(e => cwatcher.on(e, callback))
20
+ } else if (typeof event === 'string') {
21
+ cwatcher.on(event, callback)
22
+ } else {
23
+ throw new Error('Invalid event for watcher.')
24
+ }
25
+ })
26
+ }
27
+
28
+ export { watchers , watch }
package/package.json CHANGED
@@ -1,16 +1,17 @@
1
1
  {
2
2
  "name": "@tenjuu99/blog",
3
- "version": "0.1.2",
3
+ "version": "0.1.4",
4
4
  "description": "blog template",
5
5
  "main": "index.js",
6
6
  "scripts": {
7
- "serve": "eval $(cat .env | tr \"\n\" \" \") node server.js",
8
- "generate": "eval $(cat .env | tr \"\n\" \" \") node generate.js",
7
+ "dev": "node bin/server",
8
+ "generate": "node bin/generate",
9
9
  "test": "echo \"Error: no test specified\" && exit 1"
10
10
  },
11
11
  "bin": {
12
- "generate": "generate.js",
13
- "server": "server.js"
12
+ "generate": "bin/generate",
13
+ "server": "bin/server",
14
+ "new": "bin/new"
14
15
  },
15
16
  "author": "AmashigeSeiji",
16
17
  "repository": {
@@ -22,5 +23,8 @@
22
23
  "chokidar": "^3.6.0",
23
24
  "marked": "^13.x"
24
25
  },
25
- "type": "module"
26
+ "type": "module",
27
+ "engines": {
28
+ "node": ">=21.7"
29
+ }
26
30
  }
package/performance.js CHANGED
@@ -1,22 +1,22 @@
1
1
  "use strict"
2
2
  import distribute from './lib/distribute.js'
3
- import { indexing } from './lib/indexer.js'
3
+ import { indexing, allData, deleted } from './lib/indexer.js'
4
4
  import { srcDir, distDir } from './lib/dir.js'
5
5
 
6
6
  process.env.FORCE_BUILD = true
7
- const execute = async () => {
7
+ const doBuild = async () => {
8
8
  const start = performance.now()
9
- const data = await indexing(srcDir)
10
- await distribute(data, srcDir, distDir)
9
+ await indexing()
10
+ await distribute(allData, deleted, srcDir, distDir)
11
11
  const end = performance.now()
12
12
  return end - start
13
13
  }
14
14
  const times = 100
15
15
  let executed = 0
16
- //for (let i = 0; i < times; i++) {
17
- //console.log(await executed())
18
- const buildTime = await execute()
19
- //executed += (buildTime)
16
+ for (let i = 0; i < times; i++) {
17
+ // console.log(await executed())
18
+ const buildTime = await doBuild()
19
+ executed += (buildTime)
20
20
  console.log(buildTime)
21
- //}
22
- //console.log('build average: 100 times: ' + (executed/times) + "ms")
21
+ }
22
+ console.log('build average: 100 times: ' + (executed/times) + "ms")
File without changes
File without changes
File without changes
File without changes
@@ -4,3 +4,8 @@ url: /abc
4
4
  published: 2024-08-31
5
5
  ---
6
6
  hoge
7
+ hoge
8
+
9
+
10
+ abc
11
+ dc
File without changes
File without changes
@@ -0,0 +1 @@
1
+ これは [post](/post) 配下にあります
File without changes
package/generate.js DELETED
@@ -1,4 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- import generate from './lib/generate.js'
4
- generate()
package/server.js DELETED
@@ -1,58 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- import http from 'http'
4
- import url from 'url'
5
- import fs from 'node:fs/promises'
6
- import { srcDir, distDir } from './lib/dir.js'
7
- import chokidar from 'chokidar'
8
- import generate from './lib/generate.js'
9
- import { watchTemplate } from './lib/applyTemplate.js'
10
-
11
- chokidar.watch(srcDir).on('change', (event, path) => {
12
- generate()
13
- })
14
- watchTemplate()
15
- generate()
16
- const contentType = (ext) => {
17
- switch (ext) {
18
- case 'html':
19
- return 'text/html'
20
- case 'css':
21
- return 'text/css'
22
- case 'js':
23
- case 'javascript':
24
- return 'text/javascript'
25
- case 'json':
26
- return 'application/json'
27
- case 'jpg':
28
- case 'jpeg':
29
- return 'image/jpeg'
30
- case 'svg':
31
- return 'image/svg+xml'
32
- case 'xml':
33
- return 'application/xml'
34
- case 'rdf':
35
- return 'application/rdf+xml.rdf'
36
- default:
37
- return 'application/octet-stream'
38
- }
39
- }
40
-
41
- http.createServer((request, response) => {
42
- console.log(request.method, request.url)
43
- const url = new URL(`http://${request.headers.host}${request.url}`)
44
- let path = url.pathname === '/' ? '/index.html' : decodeURIComponent(url.pathname)
45
- if (!path.includes('.')) {
46
- path += '.html'
47
- }
48
- fs.readFile(`${distDir}${path}`, 'binary').catch(async (error) => {
49
- const errorContent = await fs.readFile(`${distDir}/404.html`)
50
- console.log(error)
51
- response.writeHead(404)
52
- response.end(errorContent)
53
- }).then(file => {
54
- const ext = path.split('.')[1]
55
- response.writeHead(200, { 'Content-Type': `${contentType(ext)}; charset=utf-8` })
56
- response.end(file, 'binary')
57
- })
58
- }).listen(process.env.PORT || 8000)