@adobe/helix-html-pipeline 1.3.0 → 1.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,3 +1,25 @@
1
+ ## [1.3.3](https://github.com/adobe/helix-html-pipeline/compare/v1.3.2...v1.3.3) (2022-04-06)
2
+
3
+
4
+ ### Bug Fixes
5
+
6
+ * correct last-modified handling for json ([#33](https://github.com/adobe/helix-html-pipeline/issues/33)) ([c9bb4fd](https://github.com/adobe/helix-html-pipeline/commit/c9bb4fd8e0974788cf0e963567db76685fcba231))
7
+
8
+ ## [1.3.2](https://github.com/adobe/helix-html-pipeline/compare/v1.3.1...v1.3.2) (2022-03-19)
9
+
10
+
11
+ ### Bug Fixes
12
+
13
+ * expect spec-compliant URL ([d65428a](https://github.com/adobe/helix-html-pipeline/commit/d65428a2f0e68471f8eed785706766744a7f168b))
14
+
15
+ ## [1.3.1](https://github.com/adobe/helix-html-pipeline/compare/v1.3.0...v1.3.1) (2022-03-18)
16
+
17
+
18
+ ### Bug Fixes
19
+
20
+ * **deps:** update dependency @adobe/helix-shared-utils to v2.0.6 ([#26](https://github.com/adobe/helix-html-pipeline/issues/26)) ([186c376](https://github.com/adobe/helix-html-pipeline/commit/186c376d0252b0c96ee461670cf45a711aa93f4f))
21
+ * preserve formatting of script tags ([#25](https://github.com/adobe/helix-html-pipeline/issues/25)) ([7009f20](https://github.com/adobe/helix-html-pipeline/commit/7009f20d37190f5704b7f9363c59912b4272c0bf)), closes [#23](https://github.com/adobe/helix-html-pipeline/issues/23)
22
+
1
23
  # [1.3.0](https://github.com/adobe/helix-html-pipeline/compare/v1.2.1...v1.3.0) (2022-03-17)
2
24
 
3
25
 
package/README.md CHANGED
@@ -3,8 +3,8 @@
3
3
  This package contains the common code for `helix-pipeline-service` and `helix-cloudflare-pipeline` for rendering the html response for helix3. it has the following design goals:
4
4
 
5
5
  - be platform neutral, i.e. not using node or browser specific modules or dependencies.
6
- - +/-0 runtime dependencies
7
- - offer extension interfaces where platform abstraction is required (e.g. reading from s3)
6
+ - +/-0 runtime dependencies (eg. node [crypto](https://nodejs.org/api/crypto.html))
7
+ - offer extension interfaces where platform abstraction is required (e.g. reading from S3, sending to SQS)
8
8
 
9
9
  ## Status
10
10
  [![codecov](https://img.shields.io/codecov/c/github/adobe/helix-html-pipeline.svg)](https://codecov.io/gh/adobe/helix-html-pipeline)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@adobe/helix-html-pipeline",
3
- "version": "1.3.0",
3
+ "version": "1.3.3",
4
4
  "description": "Helix HTML Pipeline",
5
5
  "main": "src/index.js",
6
6
  "types": "src/index.d.ts",
@@ -33,7 +33,7 @@
33
33
  },
34
34
  "dependencies": {
35
35
  "@adobe/helix-markdown-support": "3.1.2",
36
- "@adobe/helix-shared-utils": "2.0.5",
36
+ "@adobe/helix-shared-utils": "2.0.6",
37
37
  "github-slugger": "1.4.0",
38
38
  "hast-util-raw": "7.2.1",
39
39
  "hast-util-select": "5.0.1",
@@ -42,11 +42,11 @@
42
42
  "hastscript": "7.0.2",
43
43
  "mdast-util-gfm-footnote": "1.0.1",
44
44
  "mdast-util-gfm-strikethrough": "1.0.1",
45
- "mdast-util-gfm-table": "1.0.3",
45
+ "mdast-util-gfm-table": "1.0.4",
46
46
  "mdast-util-gfm-task-list-item": "1.0.1",
47
47
  "mdast-util-to-hast": "12.1.1",
48
48
  "mdast-util-to-string": "3.1.0",
49
- "micromark-extension-gfm-footnote": "1.0.3",
49
+ "micromark-extension-gfm-footnote": "1.0.4",
50
50
  "micromark-extension-gfm-strikethrough": "1.0.4",
51
51
  "micromark-extension-gfm-table": "1.0.5",
52
52
  "micromark-extension-gfm-tagfilter": "1.0.1",
@@ -54,12 +54,12 @@
54
54
  "micromark-util-combine-extensions": "1.0.0",
55
55
  "mime": "3.0.0",
56
56
  "rehype-format": "4.0.1",
57
- "rehype-minify-whitespace": "5.0.0",
57
+ "rehype-minify-whitespace": "5.0.1",
58
58
  "rehype-parse": "8.0.4",
59
59
  "remark-parse": "10.0.1",
60
60
  "strip-markdown": "5.0.0",
61
61
  "unified": "10.1.2",
62
- "unist-util-map": "3.0.0",
62
+ "unist-util-map": "3.0.1",
63
63
  "unist-util-remove": "3.1.0",
64
64
  "unist-util-remove-position": "4.0.1",
65
65
  "unist-util-select": "4.0.1",
@@ -76,16 +76,16 @@
76
76
  "codecov": "3.8.3",
77
77
  "commitizen": "4.2.4",
78
78
  "cz-conventional-changelog": "3.3.0",
79
- "eslint": "8.11.0",
79
+ "eslint": "8.12.0",
80
80
  "eslint-plugin-header": "3.1.1",
81
81
  "eslint-plugin-import": "2.25.4",
82
- "esmock": "1.7.4",
82
+ "esmock": "1.7.5",
83
83
  "husky": "7.0.4",
84
84
  "js-yaml": "4.1.0",
85
85
  "jsdoc-to-markdown": "7.1.1",
86
86
  "jsdom": "19.0.0",
87
87
  "junit-report-builder": "3.0.0",
88
- "lint-staged": "12.3.5",
88
+ "lint-staged": "12.3.7",
89
89
  "mocha": "9.2.2",
90
90
  "mocha-multi-reporters": "1.5.1",
91
91
  "remark-gfm": "3.0.1",
@@ -12,6 +12,12 @@
12
12
  import {PathInfo, S3Loader, FormsMessageDispatcher, PipelineTimer} from "./index";
13
13
  import {PipelineContent} from "./PipelineContent";
14
14
 
15
+ declare enum PipelineType {
16
+ html = 'html',
17
+ json = 'json',
18
+ form = 'form',
19
+ }
20
+
15
21
  declare interface PipelineOptions {
16
22
  log: Console;
17
23
  s3Loader: S3Loader;
@@ -70,5 +76,10 @@ declare class PipelineState {
70
76
  * optional timer that is used to measure the timing
71
77
  */
72
78
  timer?: PipelineTimer;
79
+
80
+ /**
81
+ * pipeline type. 'html', 'json', 'forms'
82
+ */
83
+ type: PipelineType;
73
84
  }
74
85
 
@@ -38,6 +38,7 @@ export class PipelineState {
38
38
  s3Loader: opts.s3Loader,
39
39
  messageDispatcher: opts.messageDispatcher,
40
40
  timer: opts.timer,
41
+ type: 'html',
41
42
  });
42
43
  }
43
44
  }
package/src/forms-pipe.js CHANGED
@@ -88,6 +88,7 @@ export async function extractBodyData(request) {
88
88
  */
89
89
  export async function formsPipe(state, request) {
90
90
  const { log } = state;
91
+ state.type = 'form';
91
92
 
92
93
  // todo: improve
93
94
  const response = new PipelineResponse('', {
package/src/html-pipe.js CHANGED
@@ -44,6 +44,7 @@ import { validatePathInfo } from './utils/path.js';
44
44
  */
45
45
  export async function htmlPipe(state, req) {
46
46
  const { log } = state;
47
+ state.type = 'html';
47
48
 
48
49
  if (!validatePathInfo(state.info)) {
49
50
  return new PipelineResponse('', {
package/src/json-pipe.js CHANGED
@@ -13,6 +13,7 @@ import fetchMetadata from './steps/fetch-metadata.js';
13
13
  import setCustomResponseHeaders from './steps/set-custom-response-headers.js';
14
14
  import { PipelineResponse } from './PipelineResponse.js';
15
15
  import jsonFilter from './utils/json-filter.js';
16
+ import { updateLastModified } from './utils/last-modified.js';
16
17
 
17
18
  /**
18
19
  * Runs the default pipeline and returns the response.
@@ -22,6 +23,7 @@ import jsonFilter from './utils/json-filter.js';
22
23
  */
23
24
  export async function jsonPipe(state, req) {
24
25
  const { log } = state;
26
+ state.type = 'json';
25
27
  const {
26
28
  owner, repo, ref, contentBusId, partition, s3Loader,
27
29
  } = state;
@@ -70,11 +72,8 @@ export async function jsonPipe(state, req) {
70
72
  raw: limit === undefined && offset === undefined && sheet === undefined,
71
73
  });
72
74
 
73
- // set last-modified (note, that it is not influenced by metadata or helix-config.json)
74
- const lastModified = dataResponse.headers.get('last-modified');
75
- if (lastModified) {
76
- response.headers.set('last-modified', lastModified);
77
- }
75
+ // set last-modified
76
+ updateLastModified(state, response, dataResponse.headers.get('last-modified'));
78
77
 
79
78
  // set surrogate key
80
79
  response.headers.set('x-surrogate-key', `${contentBusId}${path}`.replace(/\//g, '_'));
@@ -39,8 +39,10 @@ export default async function fetchMetadata(state, req, res) {
39
39
  }
40
40
  state.metadata = data;
41
41
 
42
- // also update last-modified
43
- updateLastModified(state, res, extractLastModified(ret.headers));
42
+ if (state.type === 'html') {
43
+ // also update last-modified (only for html pipeline)
44
+ updateLastModified(state, res, extractLastModified(ret.headers));
45
+ }
44
46
  return;
45
47
  }
46
48
 
@@ -12,6 +12,8 @@
12
12
  import { toHtml } from 'hast-util-to-html';
13
13
  // import rehypeFormat from 'rehype-format';
14
14
  import rehypeMinifyWhitespace from 'rehype-minify-whitespace';
15
+ import { visit } from 'unist-util-visit';
16
+
15
17
  /**
16
18
  * Serializes the response document to HTML
17
19
  * @param {PipelineState} state
@@ -32,7 +34,25 @@ export default function stringify(state, req, res) {
32
34
  // TODO: for the next breaking release, pretty print the HTML with rehypeFormat.
33
35
  // TODO: but for backward compatibility, output all on 1 line.
34
36
  // rehypeFormat()(doc);
37
+
38
+ // due to a bug in rehype-minify-whitespace, script content is also minified to 1 line, which
39
+ // can result in errors https://github.com/rehypejs/rehype-minify/issues/44
40
+ // so we 'save' all text first and revert it afterwards
41
+ visit(doc, (node) => {
42
+ if (node.tagName === 'script' && node.children[0]?.type === 'text') {
43
+ node.children[0].savedValue = node.children[0].value;
44
+ }
45
+ });
46
+
35
47
  rehypeMinifyWhitespace()(doc);
48
+
49
+ visit(doc, (node) => {
50
+ if (node.tagName === 'script' && node.children[0]?.type === 'text') {
51
+ node.children[0].value = node.children[0].savedValue;
52
+ delete node.children[0].savedValue;
53
+ }
54
+ });
55
+
36
56
  res.body = toHtml(doc, {
37
57
  upperDoctype: true,
38
58
  });
@@ -125,8 +125,9 @@ export function optimizeImageURL(src, width, format = 'webply', optimize = 'medi
125
125
  * @returns {string} resolved url
126
126
  */
127
127
  export function resolveUrl(from, to) {
128
- const resolvedUrl = new URL(to, new URL(from, 'resolve://'));
129
- if (resolvedUrl.protocol === 'resolve:') {
128
+ const DUMMY_BASE = 'https://__dummmy__';
129
+ const resolvedUrl = new URL(to, new URL(from, DUMMY_BASE));
130
+ if (resolvedUrl.origin === DUMMY_BASE) {
130
131
  // `from` is a relative URL.
131
132
  const { pathname, search, hash } = resolvedUrl;
132
133
  return pathname + search + hash;