@digicatapult/dtdl-parser 0.0.3 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1 +1,29 @@
1
- # dtdl-parser
1
+ # dtdl-parser
2
+
3
+ A library for parsing and validating (DTDL)[https://learn.microsoft.com/en-us/azure/digital-twins/concepts-models] ontologies.
4
+
5
+ ## Prerequisites
6
+
7
+ `node` >= 20
8
+ `dotnet` [CLI](https://learn.microsoft.com/en-us/dotnet/core/install/)
9
+ Run `dotnet workload install wasm-tools` to install `wasm-tools`
10
+
11
+ ## Getting started
12
+
13
+ Install dependencies
14
+
15
+ `npm install`
16
+
17
+ Build javascript files
18
+
19
+ `npm run build`
20
+
21
+ Basic usage
22
+
23
+ ```javascript
24
+ import { parseDirectories, validateDirectories} from "dtdl-parser"
25
+ import { getInterop } from 'dtdl-parser/src/interop'
26
+
27
+ const parser = await getInterop()
28
+ parseDirectories('../dtdl/simple', parser)
29
+ ```
@@ -0,0 +1,62 @@
1
+ {
2
+ "name": "@digicatapult/dtdl-parser",
3
+ "version": "0.0.6",
4
+ "description": "JS tool to parse DTDL defined Ontologies",
5
+ "main": "src/index.ts",
6
+ "type": "module",
7
+ "scripts": {
8
+ "test": "NODE_ENV=test ./node_modules/.bin/mocha --config ./test/mocharc.json ./src/**/*.test.ts",
9
+ "build": "swc ./src ./package.json -d ./build --copy-files $$npm npm run interop:build",
10
+ "interop:debug": "dotnet build interop",
11
+ "interop:build": "dotnet build interop --configuration Release",
12
+ "clean": "rimraf -rf ./build",
13
+ "lint": "eslint .",
14
+ "depcheck": "depcheck"
15
+ },
16
+ "repository": {
17
+ "type": "git",
18
+ "url": "git+https://github.com/digicatapult/dtdl-parser.git"
19
+ },
20
+ "files": [
21
+ "/build"
22
+ ],
23
+ "keywords": [
24
+ "dtdl",
25
+ "digital",
26
+ "twins",
27
+ "parsing",
28
+ "tool"
29
+ ],
30
+ "author": "Digital Catapult",
31
+ "license": "Apache-2.0",
32
+ "bugs": {
33
+ "url": "https://github.com/digicatapult/dtdl-parser/issues"
34
+ },
35
+ "homepage": "https://github.com/digicatapult/dtdl-parser#readme",
36
+ "devDependencies": {
37
+ "@eslint/eslintrc": "^3.1.0",
38
+ "@eslint/js": "^9.12.0",
39
+ "@swc-node/register": "^1.10.9",
40
+ "@swc/cli": "^0.4.1-nightly.20240914",
41
+ "@swc/core": "^1.7.35",
42
+ "@types/chai": "^5.0.0",
43
+ "@types/mocha": "^10.0.9",
44
+ "@types/node": "^22.7.5",
45
+ "@typescript-eslint/eslint-plugin": "^8.8.1",
46
+ "@typescript-eslint/parser": "^8.8.1",
47
+ "chai": "^5.1.1",
48
+ "depcheck": "^1.4.7",
49
+ "eslint": "^9.12.0",
50
+ "eslint-config-prettier": "^9.1.0",
51
+ "eslint-plugin-prettier": "^5.2.1",
52
+ "globals": "^15.11.0",
53
+ "mocha": "^10.7.3",
54
+ "prettier": "^3.3.3",
55
+ "prettier-plugin-organize-imports": "^4.1.0",
56
+ "rimraf": "^6.0.1",
57
+ "typescript": "^5.6.3"
58
+ },
59
+ "engines": {
60
+ "node": ">= 20"
61
+ }
62
+ }
@@ -0,0 +1,2 @@
1
+ let{error:e}=console,n=e=>"Parsing"===e.ExceptionKind,o=e=>"Resolution"===e.ExceptionKind;export const isResolutionException=e=>e instanceof Error&&o(JSON.parse(e.message));export const errorHandler=r=>{if(!(r instanceof Error))return e(`Unexpected error: ${r}`);let t=JSON.parse(r.message);n(t)||o(t)||e("Unknown exception type"),e(t)};
2
+ //# sourceMappingURL=error.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/error.ts"],"sourcesContent":["import { ModelingException, ParsingException, ResolutionException } from '../interop/DtdlErr.js'\n\nconst { error } = console\n\nconst isParsingEx = (exception: ModelingException): exception is ParsingException => {\n return exception.ExceptionKind === 'Parsing'\n}\n\nconst isResolutionEx = (exception: ModelingException): exception is ResolutionException => {\n return exception.ExceptionKind === 'Resolution'\n}\n\nexport const isResolutionException = (err: unknown) => {\n if (!(err instanceof Error)) return false\n return isResolutionEx(JSON.parse(err.message))\n}\n\nexport const errorHandler = (err: unknown) => {\n if (!(err instanceof Error)) return error(`Unexpected error: ${err}`)\n\n const exception = JSON.parse(err.message) as ModelingException\n\n if (!(isParsingEx(exception) || isResolutionEx(exception))) error('Unknown exception type')\n error(exception)\n}\n"],"names":["error","console","isParsingEx","exception","ExceptionKind","isResolutionEx","isResolutionException","err","Error","JSON","parse","message","errorHandler"],"mappings":"AAEA,GAAM,CAAEA,MAAAA,CAAK,CAAE,CAAGC,QAEZC,EAAc,AAACC,GACZA,AAA4B,YAA5BA,EAAUC,aAAa,CAG1BC,EAAiB,AAACF,GACfA,AAA4B,eAA5BA,EAAUC,aAAa,AAGhC,QAAO,MAAME,sBAAwB,AAACC,GACpC,AAAMA,aAAeC,OACdH,EAAeI,KAAKC,KAAK,CAACH,EAAII,OAAO,EAC7C,AAED,QAAO,MAAMC,aAAe,AAACL,IAC3B,GAAI,CAAEA,CAAAA,aAAeC,KAAI,EAAI,OAAOR,EAAM,CAAC,kBAAkB,EAAEO,EAAI,CAAC,EAEpE,IAAMJ,EAAYM,KAAKC,KAAK,CAACH,EAAII,OAAO,EAElCT,EAAYC,IAAcE,EAAeF,IAAaH,EAAM,0BAClEA,EAAMG,EACR,CAAC"}
@@ -0,0 +1,6 @@
1
+ import e from"fs";import r from"path";import{errorHandler as t,isResolutionException as l}from"./error.js";let{log:n,error:s}=console;export const searchForJsonFiles=t=>e.existsSync(t)?e.readdirSync(t).map(e=>r.join(t,e)).reduce((t,l)=>e.statSync(l).isDirectory()?t.concat(searchForJsonFiles(l)):".json"===r.extname(l)?t.concat(l):t,[]):(s(`'${t}' not a valid filepath`),[]);let i=r=>{try{let t=e.readFileSync(r,"utf-8");return JSON.parse(t)}catch(e){return s(`Invalid JSON at '${r}'`),s(e),null}},a=e=>{let r=[];for(let t of e){let e=i(t);if(null===e)return null;r.push(e)}return r},o=(r,i,a)=>{try{let t=e.readFileSync(r,"utf-8");return i.parse(t),n(`Successfully validated '${r}'`),!0}catch(e){if(!a&&l(e))return n(`Successfully validated '${r}'`),!0;return s(`Error parsing '${r}'`),t(e),!1}},u=(e,r)=>{try{let t=JSON.parse(r.parse(JSON.stringify(e)));return n("Successfully parsed"),t}catch(e){return s("Error parsing"),t(e),null}};export const validateDirectories=(e,r,t)=>{n(`${r.parserVersion()}
2
+ `),n(`Validating DTDL at: '${e}'`);let l=searchForJsonFiles(e);if(l.length<1)return!1;for(let e of(n(`Found ${l.length} files:`),n(l),l))if(!o(e,r,t))return!1;return n(`All files validated!
3
+ `),!0};export const parseDirectories=(e,r)=>{n(`${r.parserVersion()}
4
+ `),n(`Parsing DTDL at: '${e}'`);let t=searchForJsonFiles(e);if(t.length<1)return null;n(`Found ${t.length} files:`),n(t);let l=a(t);if(null===l)return null;let s=u(l,r);if(null===s)return null;n(`All files parsed!
5
+ `),n("Entities:"),n(Object.keys(s));let i=Object.values(s).filter(e=>"Interface"===e.EntityKind);return n(`Number of interfaces: ${i.length}`),s};
6
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/index.ts"],"sourcesContent":["import fs from 'fs'\nimport path from 'path'\nimport { DtdlObjectModel, InterfaceInfo } from '../interop/DtdlOm.js'\nimport { errorHandler, isResolutionException } from './error.js'\nimport { Parser } from './interop.js'\n\nconst { log, error } = console\n\nexport const searchForJsonFiles = (directory: string): string[] => {\n if (!fs.existsSync(directory)) {\n error(`'${directory}' not a valid filepath`)\n return []\n }\n\n return fs\n .readdirSync(directory)\n .map((file) => path.join(directory, file))\n .reduce((jsonFiles, fullPath) => {\n if (fs.statSync(fullPath).isDirectory()) {\n return jsonFiles.concat(searchForJsonFiles(fullPath)) //recursive\n } else if (path.extname(fullPath) === '.json') {\n return jsonFiles.concat(fullPath)\n }\n return jsonFiles\n }, [] as string[])\n}\n\nconst readJsonFile = (filepath: string): unknown | null => {\n try {\n const file = fs.readFileSync(filepath, 'utf-8')\n const json = JSON.parse(file)\n return json\n } catch (err) {\n error(`Invalid JSON at '${filepath}'`)\n error(err)\n return null\n }\n}\n\nconst combineJson = (filepaths: string[]) => {\n const combinedJson: unknown[] = []\n\n for (const filepath of filepaths) {\n const json = readJsonFile(filepath)\n if (json === null) {\n return null // exit on any error\n }\n combinedJson.push(json)\n }\n\n return combinedJson\n}\n\nconst validateFile = (filepath: string, parserModule: Parser, incResolutionException: boolean): boolean => {\n try {\n const file = fs.readFileSync(filepath, 'utf-8')\n parserModule.parse(file)\n log(`Successfully validated '${filepath}'`)\n return true\n } catch (err) {\n if (!incResolutionException && isResolutionException(err)) {\n // ignore resolution exception\n log(`Successfully validated '${filepath}'`)\n return true\n }\n error(`Error parsing '${filepath}'`)\n errorHandler(err)\n return false\n }\n}\n\nconst parseDtdl = (json: unknown[], parserModule: Parser): DtdlObjectModel | null => {\n try {\n const model = JSON.parse(parserModule.parse(JSON.stringify(json))) as DtdlObjectModel\n log(`Successfully parsed`)\n return model\n } catch (err) {\n error(`Error parsing`)\n errorHandler(err)\n return null\n }\n}\n\nexport const validateDirectories = (directory: string, parser: Parser, incResolutionException: boolean): boolean => {\n log(`${parser.parserVersion()}\\n`)\n log(`Validating DTDL at: '${directory}'`)\n\n const filepaths = searchForJsonFiles(directory)\n if (filepaths.length < 1) return false\n\n log(`Found ${filepaths.length} files:`)\n log(filepaths)\n\n for (const filepath of filepaths) {\n const isValid = validateFile(filepath, parser, incResolutionException)\n if (!isValid) return false // stop validating if error\n }\n\n log(`All files validated!\\n`)\n return true\n}\n\nexport const parseDirectories = (directory: string, parser: Parser): DtdlObjectModel | null => {\n log(`${parser.parserVersion()}\\n`)\n log(`Parsing DTDL at: '${directory}'`)\n\n const filepaths = searchForJsonFiles(directory)\n if (filepaths.length < 1) return null\n\n log(`Found ${filepaths.length} files:`)\n log(filepaths)\n\n const fullJson = combineJson(filepaths)\n if (fullJson === null) return null\n\n const fullModel = parseDtdl(fullJson, parser)\n if (fullModel === null) return null\n\n log(`All files parsed!\\n`)\n log(`Entities:`)\n log(Object.keys(fullModel))\n\n // Example type guard\n const interfaces: InterfaceInfo[] = Object.values(fullModel).filter(\n (value): value is InterfaceInfo => value.EntityKind === 'Interface'\n )\n log(`Number of interfaces: ${interfaces.length}`)\n\n return fullModel\n}\n"],"names":["fs","path","errorHandler","isResolutionException","log","error","console","searchForJsonFiles","directory","existsSync","readdirSync","map","file","join","reduce","jsonFiles","fullPath","statSync","isDirectory","concat","extname","readJsonFile","filepath","readFileSync","JSON","parse","err","combineJson","filepaths","combinedJson","json","push","validateFile","parserModule","incResolutionException","parseDtdl","model","stringify","validateDirectories","parser","parserVersion","length","parseDirectories","fullJson","fullModel","Object","keys","interfaces","values","filter","value","EntityKind"],"mappings":"AAAA,OAAOA,MAAQ,IAAI,AACnB,QAAOC,MAAU,MAAM,AAEvB,QAASC,gBAAAA,CAAY,CAAEC,yBAAAA,CAAqB,KAAQ,YAAY,CAGhE,GAAM,CAAEC,IAAAA,CAAG,CAAEC,MAAAA,CAAK,CAAE,CAAGC,OAEvB,QAAO,MAAMC,mBAAqB,AAACC,GACjC,AAAKR,EAAGS,UAAU,CAACD,GAKZR,EACJU,WAAW,CAACF,GACZG,GAAG,CAAC,AAACC,GAASX,EAAKY,IAAI,CAACL,EAAWI,IACnCE,MAAM,CAAC,CAACC,EAAWC,IAClB,AAAIhB,EAAGiB,QAAQ,CAACD,GAAUE,WAAW,GAC5BH,EAAUI,MAAM,CAACZ,mBAAmBS,IAClCf,AAA2B,UAA3BA,EAAKmB,OAAO,CAACJ,GACfD,EAAUI,MAAM,CAACH,GAEnBD,EACN,EAAE,GAdLV,EAAM,CAAC,CAAC,EAAEG,EAAU,sBAAsB,CAAC,EACpC,EAAE,CAcZ,CAED,IAAMa,EAAe,AAACC,IACpB,GAAI,CACF,IAAMV,EAAOZ,EAAGuB,YAAY,CAACD,EAAU,SAEvC,OADaE,KAAKC,KAAK,CAACb,EAE1B,CAAE,MAAOc,EAAK,CAGZ,OAFArB,EAAM,CAAC,iBAAiB,EAAEiB,EAAS,CAAC,CAAC,EACrCjB,EAAMqB,GACC,IACT,CACF,EAEMC,EAAc,AAACC,IACnB,IAAMC,EAA0B,EAAE,CAElC,IAAK,IAAMP,KAAYM,EAAW,CAChC,IAAME,EAAOT,EAAaC,GAC1B,GAAIQ,AAAS,OAATA,EACF,OAAO,KAETD,EAAaE,IAAI,CAACD,EACpB,CAEA,OAAOD,CACT,EAEMG,EAAe,CAACV,EAAkBW,EAAsBC,KAC5D,GAAI,CACF,IAAMtB,EAAOZ,EAAGuB,YAAY,CAACD,EAAU,SAGvC,OAFAW,EAAaR,KAAK,CAACb,GACnBR,EAAI,CAAC,wBAAwB,EAAEkB,EAAS,CAAC,CAAC,EACnC,CAAA,CACT,CAAE,MAAOI,EAAK,CACZ,GAAI,CAACQ,GAA0B/B,EAAsBuB,GAGnD,OADAtB,EAAI,CAAC,wBAAwB,EAAEkB,EAAS,CAAC,CAAC,EACnC,CAAA,EAIT,OAFAjB,EAAM,CAAC,eAAe,EAAEiB,EAAS,CAAC,CAAC,EACnCpB,EAAawB,GACN,CAAA,CACT,CACF,EAEMS,EAAY,CAACL,EAAiBG,KAClC,GAAI,CACF,IAAMG,EAAQZ,KAAKC,KAAK,CAACQ,EAAaR,KAAK,CAACD,KAAKa,SAAS,CAACP,KAE3D,OADA1B,EAAI,uBACGgC,CACT,CAAE,MAAOV,EAAK,CAGZ,OAFArB,EAAM,iBACNH,EAAawB,GACN,IACT,CACF,CAEA,QAAO,MAAMY,oBAAsB,CAAC9B,EAAmB+B,EAAgBL,KACrE9B,EAAI,CAAC,EAAEmC,EAAOC,aAAa;AAAK,CAAC,EACjCpC,EAAI,CAAC,qBAAqB,EAAEI,EAAU,CAAC,CAAC,EAExC,IAAMoB,EAAYrB,mBAAmBC,GACrC,GAAIoB,EAAUa,MAAM,CAAG,EAAG,MAAO,CAAA,EAKjC,IAAK,IAAMnB,KAHXlB,EAAI,CAAC,MAAM,EAAEwB,EAAUa,MAAM,CAAC,OAAO,CAAC,EACtCrC,EAAIwB,GAEmBA,GAErB,GAAI,CADYI,EAAaV,EAAUiB,EAAQL,GACjC,MAAO,CAAA,EAIvB,OADA9B,EAAI,CAAC;AAAsB,CAAC,EACrB,CAAA,CACT,CAAC,AAED,QAAO,MAAMsC,iBAAmB,CAAClC,EAAmB+B,KAClDnC,EAAI,CAAC,EAAEmC,EAAOC,aAAa;AAAK,CAAC,EACjCpC,EAAI,CAAC,kBAAkB,EAAEI,EAAU,CAAC,CAAC,EAErC,IAAMoB,EAAYrB,mBAAmBC,GACrC,GAAIoB,EAAUa,MAAM,CAAG,EAAG,OAAO,KAEjCrC,EAAI,CAAC,MAAM,EAAEwB,EAAUa,MAAM,CAAC,OAAO,CAAC,EACtCrC,EAAIwB,GAEJ,IAAMe,EAAWhB,EAAYC,GAC7B,GAAIe,AAAa,OAAbA,EAAmB,OAAO,KAE9B,IAAMC,EAAYT,EAAUQ,EAAUJ,GACtC,GAAIK,AAAc,OAAdA,EAAoB,OAAO,KAE/BxC,EAAI,CAAC;AAAmB,CAAC,EACzBA,EAAI,aACJA,EAAIyC,OAAOC,IAAI,CAACF,IAGhB,IAAMG,EAA8BF,OAAOG,MAAM,CAACJ,GAAWK,MAAM,CACjE,AAACC,GAAkCA,AAAqB,cAArBA,EAAMC,UAAU,EAIrD,OAFA/C,EAAI,CAAC,sBAAsB,EAAE2C,EAAWN,MAAM,CAAC,CAAC,EAEzCG,CACT,CAAC"}
@@ -0,0 +1,2 @@
1
+ export const getInterop=async()=>await import("../interop/modelParser.js");
2
+ //# sourceMappingURL=interop.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/interop.ts"],"sourcesContent":["export const getInterop = async (): Promise<Parser> => {\n const module = await import('../interop/modelParser.js')\n return module as Parser\n}\n\nexport interface Parser {\n parse: (file: string) => string\n parserVersion: () => string\n}\n"],"names":["getInterop"],"mappings":"AAAA,OAAO,MAAMA,WAAa,SACT,MAAM,MAAM,CAAC,4BAE7B"}
package/package.json CHANGED
@@ -1,12 +1,14 @@
1
1
  {
2
2
  "name": "@digicatapult/dtdl-parser",
3
- "version": "0.0.3",
3
+ "version": "0.0.6",
4
4
  "description": "JS tool to parse DTDL defined Ontologies",
5
5
  "main": "src/index.ts",
6
6
  "type": "module",
7
7
  "scripts": {
8
8
  "test": "NODE_ENV=test ./node_modules/.bin/mocha --config ./test/mocharc.json ./src/**/*.test.ts",
9
- "build": "npx swc ./src ./package.json -d ./build --copy-files",
9
+ "build": "swc ./src ./package.json -d ./build --copy-files $$npm npm run interop:build",
10
+ "interop:debug": "dotnet build interop",
11
+ "interop:build": "dotnet build interop --configuration Release",
10
12
  "clean": "rimraf -rf ./build",
11
13
  "lint": "eslint .",
12
14
  "depcheck": "depcheck"
@@ -15,6 +17,9 @@
15
17
  "type": "git",
16
18
  "url": "git+https://github.com/digicatapult/dtdl-parser.git"
17
19
  },
20
+ "files": [
21
+ "/build"
22
+ ],
18
23
  "keywords": [
19
24
  "dtdl",
20
25
  "digital",
package/.depcheckrc DELETED
@@ -1,6 +0,0 @@
1
- ignores: [
2
- '@swc-node/register',
3
- 'prettier-plugin-organize-imports',
4
- 'rimraf',
5
- 'typescript'
6
- ]
@@ -1,4 +0,0 @@
1
- # Github CODEOWNERS file
2
-
3
- # Global code-owners for this repository
4
- * @digicatapult/software-engineering
@@ -1,211 +0,0 @@
1
- name: Release
2
-
3
- on:
4
- push:
5
- branches: ['main', "init-commit"]
6
- jobs:
7
- preconditions:
8
- runs-on: ubuntu-latest
9
- outputs:
10
- repo_name: ${{ steps.repo_ids.outputs.REPO_NAME }}
11
- org_name: ${{ steps.repo_ids.outputs.ORG_NAME }}
12
- steps:
13
- - name: Checkout
14
- uses: actions/checkout@v4
15
- with:
16
- fetch-depth: 0
17
- - name: Check Github token
18
- run: |
19
- if [ -z "${{ secrets.GITHUB_TOKEN }}"]; then
20
- echo "Must provide a GITHUB_TOKEN secret in order to run release workflow"
21
- exit 1
22
- fi
23
- - name: Check npmjs token
24
- run: |
25
- if [ -z "${{ secrets.NPMJS_TOKEN }}"]; then
26
- echo "Must provide a NPMJS_TOKEN secret in order to run release workflow"
27
- exit 1
28
- fi
29
- - name: Get repository identifiers
30
- id: repo_ids
31
- run: |
32
- REPO_NAME=$(echo "${{ github.event.repository.name }}" | tr '[:upper:]' '[:lower:]')
33
- ORG_NAME=$(echo "${{ github.event.repository.owner.name }}" | tr '[:upper:]' '[:lower:]')
34
- echo "REPO_NAME=$REPO_NAME" >> $GITHUB_OUTPUT
35
- echo "ORG_NAME=$ORG_NAME" >> $GITHUB_OUTPUT
36
- lint:
37
- name: Run lint
38
- runs-on: ubuntu-latest
39
- steps:
40
- - uses: actions/checkout@v4
41
- - uses: actions/setup-node@master
42
- with:
43
- node-version: 20.x
44
- - name: Use npm latest
45
- run: npm install -g npm@latest
46
- - name: Cache Node.js modules
47
- uses: actions/cache@v4
48
- with:
49
- path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
50
- key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
51
- restore-keys: |
52
- ${{ runner.OS }}-node-
53
- ${{ runner.OS }}-
54
- - name: Install Packages
55
- run: npm ci
56
- - name: Lint
57
- run: npm run lint
58
-
59
- dependency-check:
60
- name: Run dependency check
61
- runs-on: ubuntu-latest
62
- steps:
63
- - uses: actions/checkout@v4
64
- - uses: actions/setup-node@master
65
- with:
66
- node-version: 20.x
67
- - name: Use npm latest
68
- run: npm install -g npm@latest
69
- - name: Cache Node.js modules
70
- uses: actions/cache@v4
71
- with:
72
- path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
73
- key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
74
- restore-keys: |
75
- ${{ runner.OS }}-node-
76
- ${{ runner.OS }}-
77
- - name: Install Packages
78
- run: npm ci
79
- - name: Dependency Check
80
- run: npm run depcheck
81
- tests:
82
- name: Run tests
83
- strategy:
84
- fail-fast: false
85
- matrix:
86
- command: ['test']
87
- runs-on: ubuntu-latest
88
- steps:
89
- - uses: actions/checkout@v4
90
- - uses: actions/setup-node@v4
91
- with:
92
- node-version: 20.x
93
- - name: Cache Node.js modules
94
- uses: actions/cache@v4
95
- with:
96
- path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
97
- key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
98
- restore-keys: |
99
- ${{ runner.OS }}-node-
100
- ${{ runner.OS }}-
101
- - name: Install Packages
102
- run: npm ci
103
- - name: Build
104
- run: npm run build
105
- - name: Run tests
106
- run: npm run ${{ matrix.command }}
107
- check-version:
108
- name: 'Check version'
109
- runs-on: ubuntu-latest
110
- outputs:
111
- is_new_version: ${{ steps.get_version.outputs.IS_NEW_VERSION }}
112
- version: ${{ steps.get_version.outputs.VERSION }}
113
- build_date: ${{ steps.get_version.outputs.BUILD_DATE }}
114
- is_prerelease: ${{ steps.get_version.outputs.IS_PRERELEASE }}
115
-
116
- steps:
117
- - uses: actions/checkout@v4
118
- - name: Check version
119
- id: get_version
120
- uses: digicatapult/check-version@v1
121
- with:
122
- token: ${{ secrets.GITHUB_TOKEN }}
123
- publish-gh:
124
- name: 'Publish Github package'
125
- needs:
126
- - preconditions
127
- - lint
128
- - dependency-check
129
- - tests
130
- - check-version
131
- runs-on: ubuntu-latest
132
- if: ${{ needs.check-version.outputs.is_new_version == 'true' }}
133
-
134
- steps:
135
- - uses: actions/checkout@v4
136
- - uses: actions/setup-node@v4
137
- with:
138
- node-version: '16.x'
139
- registry-url: 'https://npm.pkg.github.com'
140
- scope: '@digicatapult'
141
- - name: Install packages
142
- run: npm ci
143
- - name: Publish to github packages
144
- run: npm adduser && npm publish --access public
145
- env:
146
- NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
147
-
148
- publish-npm:
149
- name: 'Publish package to NPMJS'
150
- needs:
151
- - preconditions
152
- - lint
153
- - dependency-check
154
- - tests
155
- - check-version
156
- runs-on: ubuntu-latest
157
- if: ${{ needs.check-version.outputs.is_new_version == 'true' }}
158
-
159
- steps:
160
- - uses: actions/checkout@v4
161
- - uses: actions/setup-node@v4
162
- with:
163
- node-version: '18.x'
164
- registry-url: 'https://registry.npmjs.org'
165
- scope: '@digicatapult'
166
- - name: Install packages
167
- run: npm ci
168
- - name: Build
169
- run: npm run build
170
- - name: Publish to npmjs packages
171
- run: npm publish --access public
172
- env:
173
- NODE_AUTH_TOKEN: ${{ secrets.NPMJS_TOKEN }}
174
-
175
- publish:
176
- name: 'Publish release'
177
- needs: [preconditions, lint, dependency-check, tests, check-version]
178
- runs-on: ubuntu-latest
179
- if: ${{ needs.check-version.outputs.is_new_version == 'true' }}
180
-
181
- steps:
182
- - uses: actions/checkout@v4
183
-
184
- - name: Build release version
185
- uses: softprops/action-gh-release@v2
186
- with:
187
- token: '${{ secrets.GITHUB_TOKEN }}'
188
- tag_name: ${{ needs.check-version.outputs.version }}
189
- prerelease: false
190
- name: ${{ needs.check-version.outputs.version }}
191
- generate_release_notes: true
192
- - name: Delete release latest
193
- uses: actions/github-script@v7
194
- with:
195
- github-token: ${{secrets.GITHUB_TOKEN}}
196
- script: |
197
- const { owner, repo } = context.repo
198
- try {
199
- await github.rest.git.deleteRef({ owner, repo, ref: 'tags/latest' })
200
- }
201
- catch (err) {
202
- if (err.status !== 422) throw err
203
- }
204
- - name: Build release latest
205
- uses: softprops/action-gh-release@v2
206
- with:
207
- token: '${{ secrets.GITHUB_TOKEN }}'
208
- tag_name: latest
209
- prerelease: false
210
- name: Latest ${{ needs.check-version.outputs.version }}
211
- generate_release_notes: true
@@ -1,87 +0,0 @@
1
- name: Lint and Test
2
-
3
- on:
4
- push:
5
- branches-ignore: ['main']
6
-
7
- jobs:
8
- repo_ids:
9
- runs-on: ubuntu-latest
10
- outputs:
11
- repo_name: ${{ steps.repo_ids.outputs.REPO_NAME }}
12
- org_name: ${{ steps.repo_ids.outputs.ORG_NAME }}
13
- steps:
14
- - name: Checkout
15
- uses: actions/checkout@v4
16
- with:
17
- fetch-depth: 0
18
- - name: Get repository identifiers
19
- id: repo_ids
20
- run: |
21
- REPO_NAME=$(echo "${{ github.event.repository.name }}" | tr '[:upper:]' '[:lower:]')
22
- ORG_NAME=$(echo "${{ github.event.repository.owner.name }}" | tr '[:upper:]' '[:lower:]')
23
- echo "REPO_NAME=$REPO_NAME" >> $GITHUB_OUTPUT
24
- echo "ORG_NAME=$ORG_NAME" >> $GITHUB_OUTPUT
25
-
26
- static-checks:
27
- name: Run Static Analysis Checks
28
- strategy:
29
- fail-fast: false
30
- matrix:
31
- command: [lint, depcheck]
32
- runs-on: ubuntu-latest
33
- steps:
34
- - uses: actions/checkout@v4
35
- - uses: actions/setup-node@v4
36
- with:
37
- node-version: 20.x
38
- - name: Cache Node.js modules
39
- uses: actions/cache@v4
40
- with:
41
- path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
42
- key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
43
- restore-keys: |
44
- ${{ runner.OS }}-node-
45
- ${{ runner.OS }}-
46
- - name: Install Packages
47
- run: npm ci
48
- - name: Lint
49
- run: npm run ${{ matrix.command }}
50
-
51
- tests:
52
- name: Run tests
53
- strategy:
54
- fail-fast: false
55
- matrix:
56
- command: ['test']
57
- runs-on: ubuntu-latest
58
- steps:
59
- - uses: actions/checkout@v4
60
- - uses: actions/setup-node@v4
61
- with:
62
- node-version: 20.x
63
- - name: Cache Node.js modules
64
- uses: actions/cache@v4
65
- with:
66
- path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
67
- key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
68
- restore-keys: |
69
- ${{ runner.OS }}-node-
70
- ${{ runner.OS }}-
71
- - name: Install Packages
72
- run: npm ci
73
- - name: Build
74
- run: npm run build
75
- - name: Run tests
76
- run: npm run ${{ matrix.command }}
77
-
78
- check-version:
79
- name: 'Check version'
80
- runs-on: ubuntu-latest
81
- steps:
82
- - uses: actions/checkout@v4
83
- - name: Check version
84
- id: get_version
85
- uses: digicatapult/check-version@v1
86
- with:
87
- token: ${{ secrets.GITHUB_TOKEN }}
package/.prettierrc DELETED
@@ -1,9 +0,0 @@
1
- {
2
- "singleQuote": true,
3
- "semi": false,
4
- "trailingComma": "es5",
5
- "printWidth": 120,
6
- "plugins": [
7
- "prettier-plugin-organize-imports"
8
- ]
9
- }
package/.swcrc DELETED
@@ -1,32 +0,0 @@
1
- {
2
- "$schema": "https://swc.rs/schema.json",
3
- "jsc": {
4
- "parser": {
5
- "syntax": "typescript",
6
- "decorators": true,
7
- "topLevelAwait": true,
8
- "importMeta": true
9
- },
10
- "transform": {
11
- "legacyDecorator": true,
12
- "decoratorMetadata": true,
13
- "useDefineForClassFields": true
14
- },
15
- "minify": {
16
- "compress": {
17
- "unused": true
18
- },
19
- "mangle": true
20
- },
21
- "target": "esnext",
22
- "experimental": {
23
- "keepImportAttributes": true
24
- }
25
- },
26
- "exclude": [
27
- "/__tests__/"
28
- ],
29
- "sourceMaps": true,
30
- "isModule": true,
31
- "minify": true
32
- }
@@ -1,11 +0,0 @@
1
- {
2
- "@type": "Interface",
3
- "@id": "dtmi:com:example:Thermostat",
4
- "contents": [
5
- {
6
- "@type": "Telemetry",
7
- "name": "temperature",
8
- "schema": "double"
9
- }
10
- ]
11
- }
@@ -1,9 +0,0 @@
1
- {
2
- "@context": ["dtmi:dtdl:context;3"],
3
- "@id": "dtmi:com:example;1",
4
- "@type": "Interface",
5
- "extends": {
6
- "@type": "Interface",
7
- "@id": "dtmi:com:example:base;1"
8
- }
9
- }
package/eslint.config.mjs DELETED
@@ -1,52 +0,0 @@
1
- import { FlatCompat } from '@eslint/eslintrc'
2
- import js from '@eslint/js'
3
- import typescriptEslint from '@typescript-eslint/eslint-plugin'
4
- import tsParser from '@typescript-eslint/parser'
5
- import prettier from 'eslint-plugin-prettier'
6
- import globals from 'globals'
7
- import path from 'node:path'
8
- import { fileURLToPath } from 'node:url'
9
-
10
- const __filename = fileURLToPath(import.meta.url)
11
- const __dirname = path.dirname(__filename)
12
- const compat = new FlatCompat({
13
- baseDirectory: __dirname,
14
- recommendedConfig: js.configs.recommended,
15
- allConfig: js.configs.all,
16
- })
17
-
18
- export default [
19
- ...compat.extends('eslint:recommended', 'plugin:@typescript-eslint/recommended', 'prettier'),
20
- {
21
- plugins: {
22
- prettier,
23
- '@typescript-eslint': typescriptEslint,
24
- },
25
-
26
- languageOptions: {
27
- globals: {
28
- ...globals.node,
29
- },
30
-
31
- parser: tsParser,
32
- ecmaVersion: 12,
33
- sourceType: 'module',
34
- },
35
-
36
- rules: {
37
- 'prettier/prettier': 'error',
38
- '@typescript-eslint/no-unused-vars': [
39
- 'warn',
40
- {
41
- ignoreRestSiblings: true,
42
- },
43
- ],
44
-
45
- 'no-console': 2,
46
- 'no-duplicate-imports': 'warn',
47
- },
48
- },
49
- {
50
- ignores: ['build/', 'node_modules/', 'interop/'],
51
- },
52
- ]
@@ -1,26 +0,0 @@
1
- <Project Sdk="Microsoft.NET.Sdk">
2
- <PropertyGroup>
3
- <TargetFramework>net8.0</TargetFramework>
4
- <RuntimeIdentifier>browser-wasm</RuntimeIdentifier>
5
- <OutputType>Exe</OutputType>
6
- <WasmMainJSPath>modelParser.js</WasmMainJSPath>
7
- <AllowUnsafeBlocks>true</AllowUnsafeBlocks>
8
- <RunAOTCompilation>false</RunAOTCompilation>
9
- <PublishTrimmed>true</PublishTrimmed>
10
- <InvariantGlobalization>true</InvariantGlobalization>
11
- <EmccInitialHeapSize>16mb</EmccInitialHeapSize>
12
- <DebuggerSupport>false</DebuggerSupport>
13
- <UseSystemResourceKeys>true</UseSystemResourceKeys>
14
- <EventSourceSupport>false</EventSourceSupport>
15
- <WasmEmitSymbolMap>false</WasmEmitSymbolMap>
16
- <WasmAppDir>../build/interop</WasmAppDir>
17
- </PropertyGroup>
18
-
19
- <ItemGroup>
20
- <WasmExtraFilesToDeploy Include="modelParser.js" />
21
- </ItemGroup>
22
-
23
- <ItemGroup>
24
- <PackageReference Include="DTDLParser" Version="1.0.52" />
25
- </ItemGroup>
26
- </Project>
@@ -1,26 +0,0 @@
1
- export type ModelingException = ParsingException | ResolutionException
2
-
3
- export interface ParsingException {
4
- ExceptionKind: 'Parsing'
5
- Errors: ParsingError[]
6
- }
7
-
8
- export interface ParsingError {
9
- PrimaryID?: string
10
- SecondaryID?: string
11
- Property?: string
12
- AuxProperty?: string
13
- Type?: string
14
- Value?: string
15
- Restriction?: string
16
- Transformation?: string
17
- Violations?: string[]
18
- Cause: string
19
- Action: string
20
- ValidationID: string
21
- }
22
-
23
- export interface ResolutionException {
24
- ExceptionKind: 'Resolution'
25
- UndefinedIdentifiers: string[]
26
- }
@@ -1,372 +0,0 @@
1
- export type DtdlObjectModel = { [entityId: string]: EntityType }
2
-
3
- export interface ArrayInfo extends ComplexSchemaInfo {
4
- EntityKind: 'Array'
5
- elementSchema: string
6
- }
7
-
8
- export type ArrayType = ArrayInfo
9
-
10
- export interface BooleanInfo extends PrimitiveSchemaInfo {
11
- EntityKind: 'Boolean'
12
- }
13
-
14
- export type BooleanType = BooleanInfo
15
-
16
- export interface CommandInfo extends ContentInfo {
17
- EntityKind: 'Command'
18
- commandType?: string
19
- request?: string
20
- response?: string
21
- }
22
-
23
- export type CommandType = CommandInfo
24
-
25
- export interface CommandPayloadInfo extends SchemaFieldInfo {
26
- EntityKind: 'CommandPayload' | 'CommandRequest' | 'CommandResponse'
27
- }
28
-
29
- export type CommandPayloadType = CommandPayloadInfo | CommandRequestType | CommandResponseType
30
-
31
- export interface CommandRequestInfo extends CommandPayloadInfo {
32
- EntityKind: 'CommandRequest'
33
- }
34
-
35
- export type CommandRequestType = CommandRequestInfo
36
-
37
- export interface CommandResponseInfo extends CommandPayloadInfo {
38
- EntityKind: 'CommandResponse'
39
- }
40
-
41
- export type CommandResponseType = CommandResponseInfo
42
-
43
- export interface CommandTypeInfo extends EntityInfo {
44
- EntityKind: 'CommandType'
45
- }
46
-
47
- export type CommandTypeType = CommandTypeInfo
48
-
49
- export interface ComplexSchemaInfo extends SchemaInfo {
50
- EntityKind: 'Array' | 'Enum' | 'Map' | 'Object'
51
- }
52
-
53
- export type ComplexSchemaType = ComplexSchemaInfo | ArrayType | EnumType | MapType | ObjectType
54
-
55
- export interface ComponentInfo extends ContentInfo {
56
- EntityKind: 'Component'
57
- schema: string
58
- }
59
-
60
- export type ComponentType = ComponentInfo
61
-
62
- export interface ContentInfo extends NamedEntityInfo {
63
- EntityKind: 'Command' | 'Component' | 'Property' | 'Relationship' | 'Telemetry'
64
- }
65
-
66
- export type ContentType = ContentInfo | CommandType | ComponentType | PropertyType | RelationshipType | TelemetryType
67
-
68
- export interface DateInfo extends TemporalSchemaInfo {
69
- EntityKind: 'Date'
70
- }
71
-
72
- export type DateType = DateInfo
73
-
74
- export interface DateTimeInfo extends TemporalSchemaInfo {
75
- EntityKind: 'DateTime'
76
- }
77
-
78
- export type DateTimeType = DateTimeInfo
79
-
80
- export interface DoubleInfo extends NumericSchemaInfo {
81
- EntityKind: 'Double'
82
- }
83
-
84
- export type DoubleType = DoubleInfo
85
-
86
- export interface DurationInfo extends TemporalSchemaInfo {
87
- EntityKind: 'Duration'
88
- }
89
-
90
- export type DurationType = DurationInfo
91
-
92
- export interface EntityInfo {
93
- EntityKind:
94
- | 'Array'
95
- | 'Boolean'
96
- | 'Command'
97
- | 'CommandPayload'
98
- | 'CommandType'
99
- | 'Component'
100
- | 'Date'
101
- | 'DateTime'
102
- | 'Double'
103
- | 'Duration'
104
- | 'Enum'
105
- | 'EnumValue'
106
- | 'Field'
107
- | 'Float'
108
- | 'Integer'
109
- | 'Interface'
110
- | 'Long'
111
- | 'Map'
112
- | 'MapKey'
113
- | 'MapValue'
114
- | 'Object'
115
- | 'Property'
116
- | 'Relationship'
117
- | 'String'
118
- | 'Telemetry'
119
- | 'Time'
120
- | 'CommandRequest'
121
- | 'CommandResponse'
122
- | 'Unit'
123
- | 'UnitAttribute'
124
- | 'LatentType'
125
- | 'NamedLatentType'
126
- SupplementalTypes: string[]
127
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
128
- SupplementalProperties: { [property: string]: any }
129
- UndefinedTypes: string[]
130
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
131
- UndefinedProperties: { [property: string]: any }
132
- ClassId: string
133
- comment?: string
134
- description: { [languageCode: string]: string }
135
- displayName: { [languageCode: string]: string }
136
- languageMajorVersion: number
137
- Id: string
138
- ChildOf?: string
139
- DefinedIn?: string
140
- }
141
-
142
- export type EntityType =
143
- | EntityInfo
144
- | CommandTypeType
145
- | InterfaceType
146
- | LatentTypeType
147
- | NamedEntityType
148
- | SchemaType
149
- | UnitType
150
-
151
- export interface EnumInfo extends ComplexSchemaInfo {
152
- EntityKind: 'Enum'
153
- enumValues: string[]
154
- valueSchema: string
155
- }
156
-
157
- export type EnumType = EnumInfo
158
-
159
- export interface EnumValueInfo extends NamedEntityInfo {
160
- EntityKind: 'EnumValue'
161
- enumValue: string | number | boolean
162
- }
163
-
164
- export type EnumValueType = EnumValueInfo
165
-
166
- export interface FieldInfo extends SchemaFieldInfo {
167
- EntityKind: 'Field'
168
- }
169
-
170
- export type FieldType = FieldInfo
171
-
172
- export interface FloatInfo extends NumericSchemaInfo {
173
- EntityKind: 'Float'
174
- }
175
-
176
- export type FloatType = FloatInfo
177
-
178
- export interface IntegerInfo extends NumericSchemaInfo {
179
- EntityKind: 'Integer'
180
- }
181
-
182
- export type IntegerType = IntegerInfo
183
-
184
- export interface InterfaceInfo extends EntityInfo {
185
- EntityKind: 'Interface'
186
- contents: { [name: string]: string }
187
- commands: { [name: string]: string }
188
- components: { [name: string]: string }
189
- properties: { [name: string]: string }
190
- relationships: { [name: string]: string }
191
- telemetries: { [name: string]: string }
192
- extends: string[]
193
- extendedBy: string[]
194
- schemas: string[]
195
- }
196
-
197
- export type InterfaceType = InterfaceInfo
198
-
199
- export interface LatentTypeInfo extends EntityInfo {
200
- EntityKind: 'LatentType'
201
- }
202
-
203
- export type LatentTypeType = LatentTypeInfo
204
-
205
- export interface LongInfo extends NumericSchemaInfo {
206
- EntityKind: 'Long'
207
- }
208
-
209
- export type LongType = LongInfo
210
-
211
- export interface MapInfo extends ComplexSchemaInfo {
212
- EntityKind: 'Map'
213
- mapKey: string
214
- mapValue: string
215
- }
216
-
217
- export type MapType = MapInfo
218
-
219
- export interface MapKeyInfo extends NamedEntityInfo {
220
- EntityKind: 'MapKey'
221
- schema: string
222
- }
223
-
224
- export type MapKeyType = MapKeyInfo
225
-
226
- export interface MapValueInfo extends SchemaFieldInfo {
227
- EntityKind: 'MapValue'
228
- }
229
-
230
- export type MapValueType = MapValueInfo
231
-
232
- export interface NamedEntityInfo extends EntityInfo {
233
- EntityKind:
234
- | 'Command'
235
- | 'CommandPayload'
236
- | 'Component'
237
- | 'EnumValue'
238
- | 'Field'
239
- | 'MapKey'
240
- | 'MapValue'
241
- | 'Property'
242
- | 'Relationship'
243
- | 'Telemetry'
244
- | 'CommandRequest'
245
- | 'CommandResponse'
246
- | 'UnitAttribute'
247
- | 'NamedLatentType'
248
- name: string
249
- }
250
-
251
- export type NamedEntityType =
252
- | NamedEntityInfo
253
- | ContentType
254
- | EnumValueType
255
- | MapKeyType
256
- | NamedLatentTypeType
257
- | SchemaFieldType
258
- | UnitAttributeType
259
-
260
- export interface NamedLatentTypeInfo extends NamedEntityInfo {
261
- EntityKind: 'NamedLatentType'
262
- }
263
-
264
- export type NamedLatentTypeType = NamedLatentTypeInfo
265
-
266
- export interface NumericSchemaInfo extends PrimitiveSchemaInfo {
267
- EntityKind: 'Double' | 'Float' | 'Integer' | 'Long'
268
- }
269
-
270
- export type NumericSchemaType = NumericSchemaInfo | DoubleType | FloatType | IntegerType | LongType
271
-
272
- export interface ObjectInfo extends ComplexSchemaInfo {
273
- EntityKind: 'Object'
274
- fields: string[]
275
- }
276
-
277
- export type ObjectType = ObjectInfo
278
-
279
- export interface PrimitiveSchemaInfo extends SchemaInfo {
280
- EntityKind: 'Boolean' | 'Date' | 'DateTime' | 'Double' | 'Duration' | 'Float' | 'Integer' | 'Long' | 'String' | 'Time'
281
- }
282
-
283
- export type PrimitiveSchemaType =
284
- | PrimitiveSchemaInfo
285
- | BooleanType
286
- | NumericSchemaType
287
- | StringType
288
- | TemporalSchemaType
289
-
290
- export interface PropertyInfo extends ContentInfo {
291
- EntityKind: 'Property'
292
- schema: string
293
- writable: boolean
294
- }
295
-
296
- export type PropertyType = PropertyInfo
297
-
298
- export interface RelationshipInfo extends ContentInfo {
299
- EntityKind: 'Relationship'
300
- maxMultiplicity?: number
301
- minMultiplicity?: number
302
- properties: string[]
303
- target?: string
304
- writable: boolean
305
- }
306
-
307
- export type RelationshipType = RelationshipInfo
308
-
309
- export interface SchemaInfo extends EntityInfo {
310
- EntityKind:
311
- | 'Array'
312
- | 'Boolean'
313
- | 'Date'
314
- | 'DateTime'
315
- | 'Double'
316
- | 'Duration'
317
- | 'Enum'
318
- | 'Float'
319
- | 'Integer'
320
- | 'Long'
321
- | 'Map'
322
- | 'Object'
323
- | 'String'
324
- | 'Time'
325
- }
326
-
327
- export type SchemaType = SchemaInfo | ComplexSchemaType | PrimitiveSchemaType
328
-
329
- export interface SchemaFieldInfo extends NamedEntityInfo {
330
- EntityKind: 'CommandPayload' | 'Field' | 'MapValue' | 'CommandRequest' | 'CommandResponse'
331
- schema: string
332
- }
333
-
334
- export type SchemaFieldType = SchemaFieldInfo | CommandPayloadType | FieldType | MapValueType
335
-
336
- export interface StringInfo extends PrimitiveSchemaInfo {
337
- EntityKind: 'String'
338
- }
339
-
340
- export type StringType = StringInfo
341
-
342
- export interface TelemetryInfo extends ContentInfo {
343
- EntityKind: 'Telemetry'
344
- schema: string
345
- }
346
-
347
- export type TelemetryType = TelemetryInfo
348
-
349
- export interface TemporalSchemaInfo extends PrimitiveSchemaInfo {
350
- EntityKind: 'Date' | 'DateTime' | 'Duration' | 'Time'
351
- }
352
-
353
- export type TemporalSchemaType = TemporalSchemaInfo | DateType | DateTimeType | DurationType | TimeType
354
-
355
- export interface TimeInfo extends TemporalSchemaInfo {
356
- EntityKind: 'Time'
357
- }
358
-
359
- export type TimeType = TimeInfo
360
-
361
- export interface UnitInfo extends EntityInfo {
362
- EntityKind: 'Unit'
363
- symbol?: string
364
- }
365
-
366
- export type UnitType = UnitInfo
367
-
368
- export interface UnitAttributeInfo extends NamedEntityInfo {
369
- EntityKind: 'UnitAttribute'
370
- }
371
-
372
- export type UnitAttributeType = UnitAttributeInfo
package/interop/LICENSE DELETED
@@ -1,21 +0,0 @@
1
- MIT License
2
-
3
- Copyright (c) Digital Twin Consortium and contributors.
4
-
5
- Permission is hereby granted, free of charge, to any person obtaining a copy
6
- of this software and associated documentation files (the "Software"), to deal
7
- in the Software without restriction, including without limitation the rights
8
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
- copies of the Software, and to permit persons to whom the Software is
10
- furnished to do so, subject to the following conditions:
11
-
12
- The above copyright notice and this permission notice shall be included in all
13
- copies or substantial portions of the Software.
14
-
15
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
- SOFTWARE
@@ -1,19 +0,0 @@
1
- using DTDLParser;
2
- using System;
3
- using System.Runtime.InteropServices.JavaScript;
4
- using System.Runtime.Versioning;
5
-
6
- namespace DtdlParserJSInterop;
7
-
8
- [SupportedOSPlatform("browser")]
9
- public partial class ModelParserInterop
10
- {
11
- public static void Main() => Console.WriteLine("dotnet loaded");
12
-
13
- [JSExport]
14
- public static string ParserVersion() => typeof(ModelParser).Assembly.FullName;
15
-
16
- [JSExport]
17
- public static string Parse(string dtdl) => new ModelParser().ParseToJson(dtdl);
18
-
19
- }
package/interop/README.md DELETED
@@ -1 +0,0 @@
1
- This directory modifies code from `DTDLParser for .NET`, licensed under an [MIT License](./LICENSE). You can find the original project [here](https://github.com/digitaltwinconsortium/DTDLParser).
@@ -1,11 +0,0 @@
1
- import { dotnet } from './_framework/dotnet.js'
2
-
3
- const { getAssemblyExports, getConfig } = await dotnet.withDiagnosticTracing(false).create()
4
-
5
- const config = getConfig()
6
- const assemblyExports = await getAssemblyExports(config.mainAssemblyName)
7
-
8
- const parserVersion = () => assemblyExports.DtdlParserJSInterop.ModelParserInterop.ParserVersion()
9
- const parse = (dtdl) => assemblyExports.DtdlParserJSInterop.ModelParserInterop.Parse(dtdl)
10
-
11
- export { parse, parserVersion }
@@ -1,11 +0,0 @@
1
- {
2
- "wasmHostProperties": {
3
- "perHostConfig": [
4
- {
5
- "name": "node",
6
- "js-path": "modelParser.js",
7
- "host": "nodejs"
8
- }
9
- ]
10
- }
11
- }
@@ -1,119 +0,0 @@
1
- import { expect } from 'chai'
2
- import { describe, it } from 'mocha'
3
- import path from 'path'
4
- import { parseDirectories, searchForJsonFiles, validateDirectories } from '../index'
5
- import { Parser } from '../interop'
6
-
7
- const fixturesFilepath = path.resolve('src/__tests__/fixtures')
8
-
9
- const exampleModel = {
10
- 'dtmi:com:example:base;1': {
11
- languageMajorVersion: 3,
12
- Id: 'dtmi:com:example:base;1',
13
- ChildOf: 'dtmi:com:example;1',
14
- DefinedIn: 'dtmi:com:example;1',
15
- EntityKind: 'Interface',
16
- ClassId: 'dtmi:dtdl:class:Interface;3',
17
- extendedBy: ['dtmi:com:example;1'],
18
- },
19
- 'dtmi:com:example;1': {
20
- languageMajorVersion: 3,
21
- Id: 'dtmi:com:example;1',
22
- EntityKind: 'Interface',
23
- ClassId: 'dtmi:dtdl:class:Interface;3',
24
- extends: ['dtmi:com:example:base;1'],
25
- },
26
- }
27
-
28
- const mockParser: Parser = {
29
- parse: () => JSON.stringify(exampleModel),
30
- parserVersion: () => '1.0.0',
31
- }
32
-
33
- const mockParserWithParsingException: Parser = {
34
- parse: () => {
35
- throw new Error(
36
- JSON.stringify({
37
- ExceptionKind: 'Parsing',
38
- Errors: [{ Cause: '', Action: '', ValidationID: '' }],
39
- })
40
- )
41
- },
42
- parserVersion: () => '1.0.0',
43
- }
44
-
45
- const mockParserWithResolutionException: Parser = {
46
- parse: () => {
47
- throw new Error(
48
- JSON.stringify({
49
- ExceptionKind: 'Resolution',
50
- })
51
- )
52
- },
53
- parserVersion: () => '1.0.0',
54
- }
55
-
56
- describe('parse', () => {
57
- describe('search for files', () => {
58
- it('should return nested json filepaths', async () => {
59
- const filepaths = searchForJsonFiles(fixturesFilepath)
60
- expect(filepaths.map((fp) => path.basename(fp))).to.deep.equal(['empty.json', 'nested.json'])
61
- })
62
- })
63
-
64
- describe('valid parse', () => {
65
- it('should return model', async () => {
66
- const model = parseDirectories(fixturesFilepath, mockParser)
67
- expect(model).to.deep.equal(exampleModel)
68
- })
69
- })
70
-
71
- describe('invalid directory path', () => {
72
- it('should return null', async () => {
73
- const model = parseDirectories('invalid', mockParser)
74
- expect(model).to.equal(null)
75
- })
76
- })
77
-
78
- describe('parsing exception thrown by interop parser', () => {
79
- it('should return null', async () => {
80
- const model = parseDirectories(fixturesFilepath, mockParserWithParsingException)
81
- expect(model).to.equal(null)
82
- })
83
- })
84
- })
85
-
86
- describe('parse', () => {
87
- describe('valid validate', () => {
88
- it('should returned validated true', async () => {
89
- const isValid = validateDirectories(fixturesFilepath, mockParser, false)
90
- expect(isValid).to.equal(true)
91
- })
92
- })
93
-
94
- describe('invalid directory path', () => {
95
- it('should return null', async () => {
96
- const isValid = validateDirectories('invalid', mockParser, false)
97
- expect(isValid).to.equal(false)
98
- })
99
- })
100
-
101
- describe('parsing exception thrown by interop validate', () => {
102
- it('should return false', async () => {
103
- const isValid = validateDirectories(fixturesFilepath, mockParserWithParsingException, false)
104
- expect(isValid).to.equal(false)
105
- })
106
- })
107
-
108
- describe('resolution exception thrown by interop validate', () => {
109
- it('should return false if including resolution check', async () => {
110
- const isValid = validateDirectories(fixturesFilepath, mockParserWithResolutionException, true)
111
- expect(isValid).to.equal(false)
112
- })
113
-
114
- it('should return true if NOT including resolution check', async () => {
115
- const isValid = validateDirectories(fixturesFilepath, mockParserWithResolutionException, false)
116
- expect(isValid).to.equal(true)
117
- })
118
- })
119
- })
package/src/error.ts DELETED
@@ -1,25 +0,0 @@
1
- import { ModelingException, ParsingException, ResolutionException } from '../interop/DtdlErr.js'
2
-
3
- const { error } = console
4
-
5
- const isParsingEx = (exception: ModelingException): exception is ParsingException => {
6
- return exception.ExceptionKind === 'Parsing'
7
- }
8
-
9
- const isResolutionEx = (exception: ModelingException): exception is ResolutionException => {
10
- return exception.ExceptionKind === 'Resolution'
11
- }
12
-
13
- export const isResolutionException = (err: unknown) => {
14
- if (!(err instanceof Error)) return false
15
- return isResolutionEx(JSON.parse(err.message))
16
- }
17
-
18
- export const errorHandler = (err: unknown) => {
19
- if (!(err instanceof Error)) return error(`Unexpected error: ${err}`)
20
-
21
- const exception = JSON.parse(err.message) as ModelingException
22
-
23
- if (!(isParsingEx(exception) || isResolutionEx(exception))) error('Unknown exception type')
24
- error(exception)
25
- }
package/src/interop.ts DELETED
@@ -1,9 +0,0 @@
1
- export const getInterop = async (): Promise<Parser> => {
2
- const module = await import('../interop/modelParser.js')
3
- return module as Parser
4
- }
5
-
6
- export interface Parser {
7
- parse: (file: string) => string
8
- parserVersion: () => string
9
- }
package/test/mocharc.json DELETED
@@ -1,6 +0,0 @@
1
- {
2
- "timeout": 5000,
3
- "exit": true,
4
- "extension": "ts",
5
- "node-option": ["import=@swc-node/register/esm-register"]
6
- }
package/tsconfig.json DELETED
@@ -1,21 +0,0 @@
1
- {
2
- "compilerOptions": {
3
- "experimentalDecorators": true,
4
- "emitDecoratorMetadata": true,
5
- "target": "ESNext",
6
- "module": "ESNext",
7
- "moduleResolution": "node",
8
- "forceConsistentCasingInFileNames": true,
9
- "strict": true,
10
- "sourceMap": true,
11
- "noUnusedLocals": true,
12
- "noUnusedParameters": true,
13
- "skipLibCheck": true,
14
- "esModuleInterop": true,
15
- "allowSyntheticDefaultImports": true,
16
- "outDir": "build",
17
- "resolveJsonModule": true,
18
- "noImplicitAny": false
19
- },
20
- "exclude": ["node_modules", "**/__tests__", "test"]
21
- }