@rainbow-o23/n3 1.0.37 → 1.0.38
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +56 -4
- package/index.js +56 -4
- package/lib/step/snowflake-step.d.ts +4 -2
- package/lib/typeorm-step/typeorm-save-entity-step.d.ts +1 -0
- package/lib/utils/index.d.ts +1 -0
- package/lib/utils/snowflake.d.ts +14 -0
- package/package.json +3 -4
- package/rollup.config.base.js +1 -2
- package/src/lib/step/abstract-fragmentary-pipeline-step.ts +8 -1
- package/src/lib/step/snowflake-step.ts +11 -5
- package/src/lib/typeorm-step/typeorm-save-entity-step.ts +5 -2
- package/src/lib/utils/index.ts +1 -0
- package/src/lib/utils/snowflake.ts +58 -0
- package/test/step/typeorm-by-sql-transactional.test.ts +1 -1
package/index.cjs
CHANGED
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
var typeorm = require('typeorm');
|
|
4
4
|
var n1 = require('@rainbow-o23/n1');
|
|
5
|
-
var snowflake = require('@theinternetfolks/snowflake');
|
|
6
5
|
var fetch = require('node-fetch');
|
|
7
6
|
|
|
8
7
|
const ERR_PIPELINE_STEP_SNIPPET_NOT_EMPTY = 'O03-00001';
|
|
@@ -703,7 +702,12 @@ class AbstractFragmentaryPipelineStep extends n1.AbstractPipelineStep {
|
|
|
703
702
|
}
|
|
704
703
|
catch (e) {
|
|
705
704
|
const result = await this.handleError(fragment, request, e);
|
|
706
|
-
|
|
705
|
+
try {
|
|
706
|
+
return this.setToOutput(result, request);
|
|
707
|
+
}
|
|
708
|
+
catch (e) {
|
|
709
|
+
return (await this.handleError(fragment, request, e));
|
|
710
|
+
}
|
|
707
711
|
}
|
|
708
712
|
}
|
|
709
713
|
async perform(request) {
|
|
@@ -1061,9 +1065,54 @@ class SnippetPipelineStep extends AbstractFragmentaryPipelineStep {
|
|
|
1061
1065
|
}
|
|
1062
1066
|
}
|
|
1063
1067
|
|
|
1068
|
+
class Snowflake {
|
|
1069
|
+
static EPOCH = Date.UTC(1970, 0, 1).valueOf();
|
|
1070
|
+
static SHARD_ID = 1;
|
|
1071
|
+
static MAX_SEQUENCE = 4095;
|
|
1072
|
+
static sequence = 0;
|
|
1073
|
+
static lastTimestamp = -1;
|
|
1074
|
+
static generate(options = {}) {
|
|
1075
|
+
let { timestamp = Date.now(), shardId } = options;
|
|
1076
|
+
if (timestamp instanceof Date) {
|
|
1077
|
+
timestamp = timestamp.getTime();
|
|
1078
|
+
}
|
|
1079
|
+
else {
|
|
1080
|
+
timestamp = new Date(timestamp).getTime();
|
|
1081
|
+
}
|
|
1082
|
+
shardId = shardId || Snowflake.SHARD_ID;
|
|
1083
|
+
if (timestamp === Snowflake.lastTimestamp) {
|
|
1084
|
+
Snowflake.sequence = (Snowflake.sequence + 1) % Snowflake.MAX_SEQUENCE;
|
|
1085
|
+
if (Snowflake.sequence === 0) {
|
|
1086
|
+
timestamp = Snowflake.waitNextMillis(timestamp);
|
|
1087
|
+
}
|
|
1088
|
+
}
|
|
1089
|
+
else {
|
|
1090
|
+
Snowflake.sequence = 0;
|
|
1091
|
+
}
|
|
1092
|
+
Snowflake.lastTimestamp = timestamp;
|
|
1093
|
+
const result = (BigInt(timestamp - Snowflake.EPOCH) << BigInt(22)) |
|
|
1094
|
+
(BigInt(shardId % 1024) << BigInt(12)) |
|
|
1095
|
+
BigInt(Snowflake.sequence);
|
|
1096
|
+
return result.toString();
|
|
1097
|
+
}
|
|
1098
|
+
static waitNextMillis(currentTimestamp) {
|
|
1099
|
+
let timestamp = Date.now();
|
|
1100
|
+
while (timestamp <= currentTimestamp) {
|
|
1101
|
+
timestamp = Date.now();
|
|
1102
|
+
}
|
|
1103
|
+
return timestamp;
|
|
1104
|
+
}
|
|
1105
|
+
}
|
|
1106
|
+
|
|
1064
1107
|
class SnowflakePipelineStep extends AbstractFragmentaryPipelineStep {
|
|
1108
|
+
_shardId;
|
|
1109
|
+
constructor(options) {
|
|
1110
|
+
super(options);
|
|
1111
|
+
const config = this.getConfig();
|
|
1112
|
+
this._shardId = config.getNumber(`snowflake.shard.id`);
|
|
1113
|
+
}
|
|
1065
1114
|
async doPerform(_data, _request) {
|
|
1066
|
-
return
|
|
1115
|
+
return Snowflake.generate({ shardId: this._shardId });
|
|
1067
1116
|
}
|
|
1068
1117
|
}
|
|
1069
1118
|
|
|
@@ -1602,12 +1651,15 @@ class TypeOrmLoadEntityByIdPipelineStep extends AbstractTypeOrmPipelineStep {
|
|
|
1602
1651
|
class TypeOrmSaveEntityPipelineStep extends AbstractTypeOrmPipelineStep {
|
|
1603
1652
|
_entityName;
|
|
1604
1653
|
_fillIdBySnowflake;
|
|
1654
|
+
_snowflakeShardId;
|
|
1605
1655
|
_uniquenessCheckSnippet;
|
|
1606
1656
|
_uniquenessCheckFunc;
|
|
1607
1657
|
constructor(options) {
|
|
1608
1658
|
super(options);
|
|
1659
|
+
const config = this.getConfig();
|
|
1609
1660
|
this._entityName = options.entityName;
|
|
1610
1661
|
this._fillIdBySnowflake = options.fillIdBySnowflake ?? false;
|
|
1662
|
+
this._snowflakeShardId = config.getNumber(`snowflake.shard.id`);
|
|
1611
1663
|
this._uniquenessCheckSnippet = options.uniquenessCheckSnippet;
|
|
1612
1664
|
this._uniquenessCheckFunc = Utils.createSyncFunction(this.getUniquenessCheckSnippet(), {
|
|
1613
1665
|
createDefault: () => (void 0),
|
|
@@ -1650,7 +1702,7 @@ class TypeOrmSaveEntityPipelineStep extends AbstractTypeOrmPipelineStep {
|
|
|
1650
1702
|
if (this.isFillIdBySnowflake()) {
|
|
1651
1703
|
const id = entity[column.propertyName];
|
|
1652
1704
|
if (id == null || `${id ?? ''}`.trim().length === 0) {
|
|
1653
|
-
entity[column.propertyName] =
|
|
1705
|
+
entity[column.propertyName] = Snowflake.generate({ shardId: this._snowflakeShardId });
|
|
1654
1706
|
}
|
|
1655
1707
|
}
|
|
1656
1708
|
return await repository.save(entity);
|
package/index.js
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import { DataSource } from 'typeorm';
|
|
2
2
|
import { UncatchableError, AbstractPipelineStep, CatchableError, ExposedUncatchableError, ERR_UNKNOWN, PipelineRepository, StepHelpersUtils } from '@rainbow-o23/n1';
|
|
3
|
-
import { Snowflake } from '@theinternetfolks/snowflake';
|
|
4
3
|
import fetch from 'node-fetch';
|
|
5
4
|
|
|
6
5
|
const ERR_PIPELINE_STEP_SNIPPET_NOT_EMPTY = 'O03-00001';
|
|
@@ -701,7 +700,12 @@ class AbstractFragmentaryPipelineStep extends AbstractPipelineStep {
|
|
|
701
700
|
}
|
|
702
701
|
catch (e) {
|
|
703
702
|
const result = await this.handleError(fragment, request, e);
|
|
704
|
-
|
|
703
|
+
try {
|
|
704
|
+
return this.setToOutput(result, request);
|
|
705
|
+
}
|
|
706
|
+
catch (e) {
|
|
707
|
+
return (await this.handleError(fragment, request, e));
|
|
708
|
+
}
|
|
705
709
|
}
|
|
706
710
|
}
|
|
707
711
|
async perform(request) {
|
|
@@ -1059,9 +1063,54 @@ class SnippetPipelineStep extends AbstractFragmentaryPipelineStep {
|
|
|
1059
1063
|
}
|
|
1060
1064
|
}
|
|
1061
1065
|
|
|
1066
|
+
class Snowflake {
|
|
1067
|
+
static EPOCH = Date.UTC(1970, 0, 1).valueOf();
|
|
1068
|
+
static SHARD_ID = 1;
|
|
1069
|
+
static MAX_SEQUENCE = 4095;
|
|
1070
|
+
static sequence = 0;
|
|
1071
|
+
static lastTimestamp = -1;
|
|
1072
|
+
static generate(options = {}) {
|
|
1073
|
+
let { timestamp = Date.now(), shardId } = options;
|
|
1074
|
+
if (timestamp instanceof Date) {
|
|
1075
|
+
timestamp = timestamp.getTime();
|
|
1076
|
+
}
|
|
1077
|
+
else {
|
|
1078
|
+
timestamp = new Date(timestamp).getTime();
|
|
1079
|
+
}
|
|
1080
|
+
shardId = shardId || Snowflake.SHARD_ID;
|
|
1081
|
+
if (timestamp === Snowflake.lastTimestamp) {
|
|
1082
|
+
Snowflake.sequence = (Snowflake.sequence + 1) % Snowflake.MAX_SEQUENCE;
|
|
1083
|
+
if (Snowflake.sequence === 0) {
|
|
1084
|
+
timestamp = Snowflake.waitNextMillis(timestamp);
|
|
1085
|
+
}
|
|
1086
|
+
}
|
|
1087
|
+
else {
|
|
1088
|
+
Snowflake.sequence = 0;
|
|
1089
|
+
}
|
|
1090
|
+
Snowflake.lastTimestamp = timestamp;
|
|
1091
|
+
const result = (BigInt(timestamp - Snowflake.EPOCH) << BigInt(22)) |
|
|
1092
|
+
(BigInt(shardId % 1024) << BigInt(12)) |
|
|
1093
|
+
BigInt(Snowflake.sequence);
|
|
1094
|
+
return result.toString();
|
|
1095
|
+
}
|
|
1096
|
+
static waitNextMillis(currentTimestamp) {
|
|
1097
|
+
let timestamp = Date.now();
|
|
1098
|
+
while (timestamp <= currentTimestamp) {
|
|
1099
|
+
timestamp = Date.now();
|
|
1100
|
+
}
|
|
1101
|
+
return timestamp;
|
|
1102
|
+
}
|
|
1103
|
+
}
|
|
1104
|
+
|
|
1062
1105
|
class SnowflakePipelineStep extends AbstractFragmentaryPipelineStep {
|
|
1106
|
+
_shardId;
|
|
1107
|
+
constructor(options) {
|
|
1108
|
+
super(options);
|
|
1109
|
+
const config = this.getConfig();
|
|
1110
|
+
this._shardId = config.getNumber(`snowflake.shard.id`);
|
|
1111
|
+
}
|
|
1063
1112
|
async doPerform(_data, _request) {
|
|
1064
|
-
return Snowflake.generate();
|
|
1113
|
+
return Snowflake.generate({ shardId: this._shardId });
|
|
1065
1114
|
}
|
|
1066
1115
|
}
|
|
1067
1116
|
|
|
@@ -1600,12 +1649,15 @@ class TypeOrmLoadEntityByIdPipelineStep extends AbstractTypeOrmPipelineStep {
|
|
|
1600
1649
|
class TypeOrmSaveEntityPipelineStep extends AbstractTypeOrmPipelineStep {
|
|
1601
1650
|
_entityName;
|
|
1602
1651
|
_fillIdBySnowflake;
|
|
1652
|
+
_snowflakeShardId;
|
|
1603
1653
|
_uniquenessCheckSnippet;
|
|
1604
1654
|
_uniquenessCheckFunc;
|
|
1605
1655
|
constructor(options) {
|
|
1606
1656
|
super(options);
|
|
1657
|
+
const config = this.getConfig();
|
|
1607
1658
|
this._entityName = options.entityName;
|
|
1608
1659
|
this._fillIdBySnowflake = options.fillIdBySnowflake ?? false;
|
|
1660
|
+
this._snowflakeShardId = config.getNumber(`snowflake.shard.id`);
|
|
1609
1661
|
this._uniquenessCheckSnippet = options.uniquenessCheckSnippet;
|
|
1610
1662
|
this._uniquenessCheckFunc = Utils.createSyncFunction(this.getUniquenessCheckSnippet(), {
|
|
1611
1663
|
createDefault: () => (void 0),
|
|
@@ -1648,7 +1700,7 @@ class TypeOrmSaveEntityPipelineStep extends AbstractTypeOrmPipelineStep {
|
|
|
1648
1700
|
if (this.isFillIdBySnowflake()) {
|
|
1649
1701
|
const id = entity[column.propertyName];
|
|
1650
1702
|
if (id == null || `${id ?? ''}`.trim().length === 0) {
|
|
1651
|
-
entity[column.propertyName] = Snowflake.generate();
|
|
1703
|
+
entity[column.propertyName] = Snowflake.generate({ shardId: this._snowflakeShardId });
|
|
1652
1704
|
}
|
|
1653
1705
|
}
|
|
1654
1706
|
return await repository.save(entity);
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { PipelineStepData, PipelineStepPayload } from '@rainbow-o23/n1';
|
|
2
|
-
import {
|
|
3
|
-
|
|
2
|
+
import { SnowflakeId } from '../utils';
|
|
3
|
+
import { AbstractFragmentaryPipelineStep, FragmentaryPipelineStepOptions } from './abstract-fragmentary-pipeline-step';
|
|
4
4
|
export declare class SnowflakePipelineStep<In = PipelineStepPayload, Out = PipelineStepPayload, InFragment = In> extends AbstractFragmentaryPipelineStep<In, Out, InFragment, SnowflakeId> {
|
|
5
|
+
private readonly _shardId;
|
|
6
|
+
constructor(options: FragmentaryPipelineStepOptions<In, Out, InFragment, SnowflakeId>);
|
|
5
7
|
protected doPerform(_data: InFragment, _request: PipelineStepData<In>): Promise<SnowflakeId>;
|
|
6
8
|
}
|
|
@@ -20,6 +20,7 @@ export interface TypeOrmSaveEntityPipelineStepOptions<In = PipelineStepPayload,
|
|
|
20
20
|
export declare class TypeOrmSaveEntityPipelineStep<In = PipelineStepPayload, Out = PipelineStepPayload, InFragment = EntityToSave, OutFragment = EntityToSave> extends AbstractTypeOrmPipelineStep<In, Out, InFragment, OutFragment> {
|
|
21
21
|
private readonly _entityName;
|
|
22
22
|
private readonly _fillIdBySnowflake;
|
|
23
|
+
private readonly _snowflakeShardId;
|
|
23
24
|
private readonly _uniquenessCheckSnippet?;
|
|
24
25
|
private readonly _uniquenessCheckFunc?;
|
|
25
26
|
constructor(options: TypeOrmSaveEntityPipelineStepOptions<In, Out, InFragment, OutFragment>);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './snowflake';
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export type SnowflakeId = string;
|
|
2
|
+
export type SnowflakeGenerateOptions = {
|
|
3
|
+
timestamp?: Date | number;
|
|
4
|
+
shardId?: number;
|
|
5
|
+
};
|
|
6
|
+
export declare class Snowflake {
|
|
7
|
+
private static EPOCH;
|
|
8
|
+
private static SHARD_ID;
|
|
9
|
+
private static readonly MAX_SEQUENCE;
|
|
10
|
+
private static sequence;
|
|
11
|
+
private static lastTimestamp;
|
|
12
|
+
static generate(options?: SnowflakeGenerateOptions): SnowflakeId;
|
|
13
|
+
private static waitNextMillis;
|
|
14
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@rainbow-o23/n3",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.38",
|
|
4
4
|
"description": "o23 pipelines",
|
|
5
5
|
"main": "index.cjs",
|
|
6
6
|
"module": "index.js",
|
|
@@ -21,8 +21,7 @@
|
|
|
21
21
|
"url": "https://github.com/InsureMO/rainbow-o23/issues"
|
|
22
22
|
},
|
|
23
23
|
"dependencies": {
|
|
24
|
-
"@rainbow-o23/n1": "1.0.
|
|
25
|
-
"@theinternetfolks/snowflake": "^1.3.0",
|
|
24
|
+
"@rainbow-o23/n1": "1.0.38",
|
|
26
25
|
"node-fetch": "2.6.7",
|
|
27
26
|
"typeorm": "^0.3.17"
|
|
28
27
|
},
|
|
@@ -41,7 +40,7 @@
|
|
|
41
40
|
"better-sqlite3": "^9.0.0",
|
|
42
41
|
"eslint": "^8.29.0",
|
|
43
42
|
"mssql": "^10.0.1",
|
|
44
|
-
"mysql2": "^3.9.
|
|
43
|
+
"mysql2": "^3.9.8",
|
|
45
44
|
"oracledb": "^6.2.0",
|
|
46
45
|
"pg": "^8.11.3",
|
|
47
46
|
"pg-query-stream": "^4.5.3",
|
package/rollup.config.base.js
CHANGED
|
@@ -25,8 +25,7 @@ export const buildConfig = (lint) => {
|
|
|
25
25
|
external(id) {
|
|
26
26
|
return ["@rainbow-o23/", "typeorm/"].some(scope => id.startsWith(scope))
|
|
27
27
|
|| [
|
|
28
|
-
"typeorm", "reflect-metadata",
|
|
29
|
-
"@theinternetfolks/snowflake", "node-fetch"
|
|
28
|
+
"typeorm", "reflect-metadata", "node-fetch"
|
|
30
29
|
].includes(id);
|
|
31
30
|
}
|
|
32
31
|
};
|
|
@@ -365,7 +365,14 @@ export abstract class AbstractFragmentaryPipelineStep<In = PipelineStepPayload,
|
|
|
365
365
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
366
366
|
} catch (e: any) {
|
|
367
367
|
const result = await this.handleError(fragment, request, e);
|
|
368
|
-
|
|
368
|
+
try {
|
|
369
|
+
return this.setToOutput(result, request);
|
|
370
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
371
|
+
} catch (e: any) {
|
|
372
|
+
// since exception raised during set to output,
|
|
373
|
+
// therefore never call this function on error handling
|
|
374
|
+
return (await this.handleError(fragment, request, e)) as PipelineStepData<Out>;
|
|
375
|
+
}
|
|
369
376
|
}
|
|
370
377
|
}
|
|
371
378
|
|
|
@@ -1,16 +1,22 @@
|
|
|
1
1
|
import {PipelineStepData, PipelineStepPayload} from '@rainbow-o23/n1';
|
|
2
|
-
import {Snowflake} from '
|
|
3
|
-
import {AbstractFragmentaryPipelineStep} from './abstract-fragmentary-pipeline-step';
|
|
4
|
-
|
|
5
|
-
export type SnowflakeId = string;
|
|
2
|
+
import {Snowflake, SnowflakeId} from '../utils';
|
|
3
|
+
import {AbstractFragmentaryPipelineStep, FragmentaryPipelineStepOptions} from './abstract-fragmentary-pipeline-step';
|
|
6
4
|
|
|
7
5
|
/**
|
|
8
6
|
* create a snowflake id as out fragment
|
|
9
7
|
*/
|
|
10
8
|
export class SnowflakePipelineStep<In = PipelineStepPayload, Out = PipelineStepPayload, InFragment = In>
|
|
11
9
|
extends AbstractFragmentaryPipelineStep<In, Out, InFragment, SnowflakeId> {
|
|
10
|
+
private readonly _shardId: number;
|
|
11
|
+
|
|
12
|
+
public constructor(options: FragmentaryPipelineStepOptions<In, Out, InFragment, SnowflakeId>) {
|
|
13
|
+
super(options);
|
|
14
|
+
const config = this.getConfig();
|
|
15
|
+
this._shardId = config.getNumber(`snowflake.shard.id`);
|
|
16
|
+
}
|
|
17
|
+
|
|
12
18
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
13
19
|
protected async doPerform(_data: InFragment, _request: PipelineStepData<In>): Promise<SnowflakeId> {
|
|
14
|
-
return Snowflake.generate();
|
|
20
|
+
return Snowflake.generate({shardId: this._shardId});
|
|
15
21
|
}
|
|
16
22
|
}
|
|
@@ -5,9 +5,9 @@ import {
|
|
|
5
5
|
PipelineStepPayload,
|
|
6
6
|
Undefinable
|
|
7
7
|
} from '@rainbow-o23/n1';
|
|
8
|
-
import {Snowflake} from '@theinternetfolks/snowflake';
|
|
9
8
|
import {DeepPartial, ObjectLiteral} from 'typeorm';
|
|
10
9
|
import {ScriptFuncOrBody, Utils} from '../step';
|
|
10
|
+
import {Snowflake} from '../utils';
|
|
11
11
|
import {AbstractTypeOrmPipelineStep, TypeOrmPipelineStepOptions} from './abstract-typeorm-step';
|
|
12
12
|
import {TypeOrmEntityName} from './types';
|
|
13
13
|
|
|
@@ -33,13 +33,16 @@ export class TypeOrmSaveEntityPipelineStep<In = PipelineStepPayload, Out = Pipel
|
|
|
33
33
|
extends AbstractTypeOrmPipelineStep<In, Out, InFragment, OutFragment> {
|
|
34
34
|
private readonly _entityName: TypeOrmEntityName;
|
|
35
35
|
private readonly _fillIdBySnowflake: boolean;
|
|
36
|
+
private readonly _snowflakeShardId: number;
|
|
36
37
|
private readonly _uniquenessCheckSnippet?: ScriptFuncOrBody<UniquenessCheckFunc<InFragment>>;
|
|
37
38
|
private readonly _uniquenessCheckFunc?: UniquenessCheckFunc<InFragment>;
|
|
38
39
|
|
|
39
40
|
public constructor(options: TypeOrmSaveEntityPipelineStepOptions<In, Out, InFragment, OutFragment>) {
|
|
40
41
|
super(options);
|
|
42
|
+
const config = this.getConfig();
|
|
41
43
|
this._entityName = options.entityName;
|
|
42
44
|
this._fillIdBySnowflake = options.fillIdBySnowflake ?? false;
|
|
45
|
+
this._snowflakeShardId = config.getNumber(`snowflake.shard.id`);
|
|
43
46
|
this._uniquenessCheckSnippet = options.uniquenessCheckSnippet;
|
|
44
47
|
this._uniquenessCheckFunc = Utils.createSyncFunction(this.getUniquenessCheckSnippet(), {
|
|
45
48
|
createDefault: () => (void 0),
|
|
@@ -100,7 +103,7 @@ export class TypeOrmSaveEntityPipelineStep<In = PipelineStepPayload, Out = Pipel
|
|
|
100
103
|
// fill id if not exists
|
|
101
104
|
const id = entity[column.propertyName];
|
|
102
105
|
if (id == null || `${id ?? ''}`.trim().length === 0) {
|
|
103
|
-
entity[column.propertyName] = Snowflake.generate() as unknown as number;
|
|
106
|
+
entity[column.propertyName] = Snowflake.generate({shardId: this._snowflakeShardId}) as unknown as number;
|
|
104
107
|
}
|
|
105
108
|
}
|
|
106
109
|
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './snowflake';
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
export type SnowflakeId = string;
|
|
2
|
+
export type SnowflakeGenerateOptions = { timestamp?: Date | number, shardId?: number };
|
|
3
|
+
|
|
4
|
+
export class Snowflake {
|
|
5
|
+
/** The generators epoch timestamp in milliseconds. Defaults to "1st of January 1970, 00:00". */
|
|
6
|
+
private static EPOCH: number = Date.UTC(1970, 0, 1).valueOf();
|
|
7
|
+
|
|
8
|
+
/** The id of the shard running this generator. Defaults to "1". */
|
|
9
|
+
private static SHARD_ID = 1;
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Max sequence, 2^12 - 1
|
|
13
|
+
*/
|
|
14
|
+
private static readonly MAX_SEQUENCE = 4095;
|
|
15
|
+
/** current sequence */
|
|
16
|
+
private static sequence = 0;
|
|
17
|
+
private static lastTimestamp = -1;
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Generates a single snowflake.
|
|
21
|
+
*/
|
|
22
|
+
static generate(options: SnowflakeGenerateOptions = {}): SnowflakeId {
|
|
23
|
+
let {timestamp = Date.now(), shardId} = options;
|
|
24
|
+
|
|
25
|
+
if (timestamp instanceof Date) {
|
|
26
|
+
timestamp = timestamp.getTime();
|
|
27
|
+
} else {
|
|
28
|
+
timestamp = new Date(timestamp).getTime();
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
shardId = shardId || Snowflake.SHARD_ID;
|
|
32
|
+
|
|
33
|
+
if (timestamp === Snowflake.lastTimestamp) {
|
|
34
|
+
Snowflake.sequence = (Snowflake.sequence + 1) % Snowflake.MAX_SEQUENCE;
|
|
35
|
+
if (Snowflake.sequence === 0) {
|
|
36
|
+
timestamp = Snowflake.waitNextMillis(timestamp);
|
|
37
|
+
}
|
|
38
|
+
} else {
|
|
39
|
+
Snowflake.sequence = 0;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
Snowflake.lastTimestamp = timestamp;
|
|
43
|
+
|
|
44
|
+
const result = (BigInt(timestamp - Snowflake.EPOCH) << BigInt(22)) |
|
|
45
|
+
(BigInt(shardId % 1024) << BigInt(12)) |
|
|
46
|
+
BigInt(Snowflake.sequence);
|
|
47
|
+
|
|
48
|
+
return result.toString();
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
private static waitNextMillis(currentTimestamp: number) {
|
|
52
|
+
let timestamp = Date.now();
|
|
53
|
+
while (timestamp <= currentTimestamp) {
|
|
54
|
+
timestamp = Date.now();
|
|
55
|
+
}
|
|
56
|
+
return timestamp;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
@@ -187,7 +187,7 @@ describe('TypeORM SQL Transactional Suite', () => {
|
|
|
187
187
|
await new TypeOrmDataSourceHelper(config).create({
|
|
188
188
|
'TEST': [TestTable]
|
|
189
189
|
});
|
|
190
|
-
const repo = (await TypeOrmDataSourceManager.findDataSource('TEST')).getDataSource().getRepository(TestTable);
|
|
190
|
+
const repo = (await TypeOrmDataSourceManager.findDataSource('TEST', config)).getDataSource().getRepository(TestTable);
|
|
191
191
|
await repo.insert({id: 1, content: 'hello world!'});
|
|
192
192
|
await repo.insert({id: 2, content: 'good-bye world!'});
|
|
193
193
|
});
|