@rainbow-o23/n3 1.0.37 → 1.0.39
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +50 -3
- package/index.js +50 -3
- package/lib/step/snowflake-step.d.ts +4 -2
- package/lib/typeorm-step/typeorm-save-entity-step.d.ts +1 -0
- package/lib/utils/index.d.ts +1 -0
- package/lib/utils/snowflake.d.ts +14 -0
- package/package.json +3 -4
- package/rollup.config.base.js +1 -2
- package/src/lib/step/snowflake-step.ts +11 -5
- package/src/lib/typeorm-step/typeorm-save-entity-step.ts +5 -2
- package/src/lib/utils/index.ts +1 -0
- package/src/lib/utils/snowflake.ts +58 -0
- package/test/step/typeorm-by-sql-autonomous.test.ts +1 -1
- package/test/step/typeorm-by-sql-transactional.test.ts +1 -1
- package/test/step/typeorm-entity.test.ts +1 -1
package/index.cjs
CHANGED
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
var typeorm = require('typeorm');
|
|
4
4
|
var n1 = require('@rainbow-o23/n1');
|
|
5
|
-
var snowflake = require('@theinternetfolks/snowflake');
|
|
6
5
|
var fetch = require('node-fetch');
|
|
7
6
|
|
|
8
7
|
const ERR_PIPELINE_STEP_SNIPPET_NOT_EMPTY = 'O03-00001';
|
|
@@ -1061,9 +1060,54 @@ class SnippetPipelineStep extends AbstractFragmentaryPipelineStep {
|
|
|
1061
1060
|
}
|
|
1062
1061
|
}
|
|
1063
1062
|
|
|
1063
|
+
class Snowflake {
|
|
1064
|
+
static EPOCH = Date.UTC(1970, 0, 1).valueOf();
|
|
1065
|
+
static SHARD_ID = 1;
|
|
1066
|
+
static MAX_SEQUENCE = 4095;
|
|
1067
|
+
static sequence = 0;
|
|
1068
|
+
static lastTimestamp = -1;
|
|
1069
|
+
static generate(options = {}) {
|
|
1070
|
+
let { timestamp = Date.now(), shardId } = options;
|
|
1071
|
+
if (timestamp instanceof Date) {
|
|
1072
|
+
timestamp = timestamp.getTime();
|
|
1073
|
+
}
|
|
1074
|
+
else {
|
|
1075
|
+
timestamp = new Date(timestamp).getTime();
|
|
1076
|
+
}
|
|
1077
|
+
shardId = shardId || Snowflake.SHARD_ID;
|
|
1078
|
+
if (timestamp === Snowflake.lastTimestamp) {
|
|
1079
|
+
Snowflake.sequence = (Snowflake.sequence + 1) % Snowflake.MAX_SEQUENCE;
|
|
1080
|
+
if (Snowflake.sequence === 0) {
|
|
1081
|
+
timestamp = Snowflake.waitNextMillis(timestamp);
|
|
1082
|
+
}
|
|
1083
|
+
}
|
|
1084
|
+
else {
|
|
1085
|
+
Snowflake.sequence = 0;
|
|
1086
|
+
}
|
|
1087
|
+
Snowflake.lastTimestamp = timestamp;
|
|
1088
|
+
const result = (BigInt(timestamp - Snowflake.EPOCH) << BigInt(22)) |
|
|
1089
|
+
(BigInt(shardId % 1024) << BigInt(12)) |
|
|
1090
|
+
BigInt(Snowflake.sequence);
|
|
1091
|
+
return result.toString();
|
|
1092
|
+
}
|
|
1093
|
+
static waitNextMillis(currentTimestamp) {
|
|
1094
|
+
let timestamp = Date.now();
|
|
1095
|
+
while (timestamp <= currentTimestamp) {
|
|
1096
|
+
timestamp = Date.now();
|
|
1097
|
+
}
|
|
1098
|
+
return timestamp;
|
|
1099
|
+
}
|
|
1100
|
+
}
|
|
1101
|
+
|
|
1064
1102
|
class SnowflakePipelineStep extends AbstractFragmentaryPipelineStep {
|
|
1103
|
+
_shardId;
|
|
1104
|
+
constructor(options) {
|
|
1105
|
+
super(options);
|
|
1106
|
+
const config = this.getConfig();
|
|
1107
|
+
this._shardId = config.getNumber(`snowflake.shard.id`);
|
|
1108
|
+
}
|
|
1065
1109
|
async doPerform(_data, _request) {
|
|
1066
|
-
return
|
|
1110
|
+
return Snowflake.generate({ shardId: this._shardId });
|
|
1067
1111
|
}
|
|
1068
1112
|
}
|
|
1069
1113
|
|
|
@@ -1602,12 +1646,15 @@ class TypeOrmLoadEntityByIdPipelineStep extends AbstractTypeOrmPipelineStep {
|
|
|
1602
1646
|
class TypeOrmSaveEntityPipelineStep extends AbstractTypeOrmPipelineStep {
|
|
1603
1647
|
_entityName;
|
|
1604
1648
|
_fillIdBySnowflake;
|
|
1649
|
+
_snowflakeShardId;
|
|
1605
1650
|
_uniquenessCheckSnippet;
|
|
1606
1651
|
_uniquenessCheckFunc;
|
|
1607
1652
|
constructor(options) {
|
|
1608
1653
|
super(options);
|
|
1654
|
+
const config = this.getConfig();
|
|
1609
1655
|
this._entityName = options.entityName;
|
|
1610
1656
|
this._fillIdBySnowflake = options.fillIdBySnowflake ?? false;
|
|
1657
|
+
this._snowflakeShardId = config.getNumber(`snowflake.shard.id`);
|
|
1611
1658
|
this._uniquenessCheckSnippet = options.uniquenessCheckSnippet;
|
|
1612
1659
|
this._uniquenessCheckFunc = Utils.createSyncFunction(this.getUniquenessCheckSnippet(), {
|
|
1613
1660
|
createDefault: () => (void 0),
|
|
@@ -1650,7 +1697,7 @@ class TypeOrmSaveEntityPipelineStep extends AbstractTypeOrmPipelineStep {
|
|
|
1650
1697
|
if (this.isFillIdBySnowflake()) {
|
|
1651
1698
|
const id = entity[column.propertyName];
|
|
1652
1699
|
if (id == null || `${id ?? ''}`.trim().length === 0) {
|
|
1653
|
-
entity[column.propertyName] =
|
|
1700
|
+
entity[column.propertyName] = Snowflake.generate({ shardId: this._snowflakeShardId });
|
|
1654
1701
|
}
|
|
1655
1702
|
}
|
|
1656
1703
|
return await repository.save(entity);
|
package/index.js
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import { DataSource } from 'typeorm';
|
|
2
2
|
import { UncatchableError, AbstractPipelineStep, CatchableError, ExposedUncatchableError, ERR_UNKNOWN, PipelineRepository, StepHelpersUtils } from '@rainbow-o23/n1';
|
|
3
|
-
import { Snowflake } from '@theinternetfolks/snowflake';
|
|
4
3
|
import fetch from 'node-fetch';
|
|
5
4
|
|
|
6
5
|
const ERR_PIPELINE_STEP_SNIPPET_NOT_EMPTY = 'O03-00001';
|
|
@@ -1059,9 +1058,54 @@ class SnippetPipelineStep extends AbstractFragmentaryPipelineStep {
|
|
|
1059
1058
|
}
|
|
1060
1059
|
}
|
|
1061
1060
|
|
|
1061
|
+
class Snowflake {
|
|
1062
|
+
static EPOCH = Date.UTC(1970, 0, 1).valueOf();
|
|
1063
|
+
static SHARD_ID = 1;
|
|
1064
|
+
static MAX_SEQUENCE = 4095;
|
|
1065
|
+
static sequence = 0;
|
|
1066
|
+
static lastTimestamp = -1;
|
|
1067
|
+
static generate(options = {}) {
|
|
1068
|
+
let { timestamp = Date.now(), shardId } = options;
|
|
1069
|
+
if (timestamp instanceof Date) {
|
|
1070
|
+
timestamp = timestamp.getTime();
|
|
1071
|
+
}
|
|
1072
|
+
else {
|
|
1073
|
+
timestamp = new Date(timestamp).getTime();
|
|
1074
|
+
}
|
|
1075
|
+
shardId = shardId || Snowflake.SHARD_ID;
|
|
1076
|
+
if (timestamp === Snowflake.lastTimestamp) {
|
|
1077
|
+
Snowflake.sequence = (Snowflake.sequence + 1) % Snowflake.MAX_SEQUENCE;
|
|
1078
|
+
if (Snowflake.sequence === 0) {
|
|
1079
|
+
timestamp = Snowflake.waitNextMillis(timestamp);
|
|
1080
|
+
}
|
|
1081
|
+
}
|
|
1082
|
+
else {
|
|
1083
|
+
Snowflake.sequence = 0;
|
|
1084
|
+
}
|
|
1085
|
+
Snowflake.lastTimestamp = timestamp;
|
|
1086
|
+
const result = (BigInt(timestamp - Snowflake.EPOCH) << BigInt(22)) |
|
|
1087
|
+
(BigInt(shardId % 1024) << BigInt(12)) |
|
|
1088
|
+
BigInt(Snowflake.sequence);
|
|
1089
|
+
return result.toString();
|
|
1090
|
+
}
|
|
1091
|
+
static waitNextMillis(currentTimestamp) {
|
|
1092
|
+
let timestamp = Date.now();
|
|
1093
|
+
while (timestamp <= currentTimestamp) {
|
|
1094
|
+
timestamp = Date.now();
|
|
1095
|
+
}
|
|
1096
|
+
return timestamp;
|
|
1097
|
+
}
|
|
1098
|
+
}
|
|
1099
|
+
|
|
1062
1100
|
class SnowflakePipelineStep extends AbstractFragmentaryPipelineStep {
|
|
1101
|
+
_shardId;
|
|
1102
|
+
constructor(options) {
|
|
1103
|
+
super(options);
|
|
1104
|
+
const config = this.getConfig();
|
|
1105
|
+
this._shardId = config.getNumber(`snowflake.shard.id`);
|
|
1106
|
+
}
|
|
1063
1107
|
async doPerform(_data, _request) {
|
|
1064
|
-
return Snowflake.generate();
|
|
1108
|
+
return Snowflake.generate({ shardId: this._shardId });
|
|
1065
1109
|
}
|
|
1066
1110
|
}
|
|
1067
1111
|
|
|
@@ -1600,12 +1644,15 @@ class TypeOrmLoadEntityByIdPipelineStep extends AbstractTypeOrmPipelineStep {
|
|
|
1600
1644
|
class TypeOrmSaveEntityPipelineStep extends AbstractTypeOrmPipelineStep {
|
|
1601
1645
|
_entityName;
|
|
1602
1646
|
_fillIdBySnowflake;
|
|
1647
|
+
_snowflakeShardId;
|
|
1603
1648
|
_uniquenessCheckSnippet;
|
|
1604
1649
|
_uniquenessCheckFunc;
|
|
1605
1650
|
constructor(options) {
|
|
1606
1651
|
super(options);
|
|
1652
|
+
const config = this.getConfig();
|
|
1607
1653
|
this._entityName = options.entityName;
|
|
1608
1654
|
this._fillIdBySnowflake = options.fillIdBySnowflake ?? false;
|
|
1655
|
+
this._snowflakeShardId = config.getNumber(`snowflake.shard.id`);
|
|
1609
1656
|
this._uniquenessCheckSnippet = options.uniquenessCheckSnippet;
|
|
1610
1657
|
this._uniquenessCheckFunc = Utils.createSyncFunction(this.getUniquenessCheckSnippet(), {
|
|
1611
1658
|
createDefault: () => (void 0),
|
|
@@ -1648,7 +1695,7 @@ class TypeOrmSaveEntityPipelineStep extends AbstractTypeOrmPipelineStep {
|
|
|
1648
1695
|
if (this.isFillIdBySnowflake()) {
|
|
1649
1696
|
const id = entity[column.propertyName];
|
|
1650
1697
|
if (id == null || `${id ?? ''}`.trim().length === 0) {
|
|
1651
|
-
entity[column.propertyName] = Snowflake.generate();
|
|
1698
|
+
entity[column.propertyName] = Snowflake.generate({ shardId: this._snowflakeShardId });
|
|
1652
1699
|
}
|
|
1653
1700
|
}
|
|
1654
1701
|
return await repository.save(entity);
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { PipelineStepData, PipelineStepPayload } from '@rainbow-o23/n1';
|
|
2
|
-
import {
|
|
3
|
-
|
|
2
|
+
import { SnowflakeId } from '../utils';
|
|
3
|
+
import { AbstractFragmentaryPipelineStep, FragmentaryPipelineStepOptions } from './abstract-fragmentary-pipeline-step';
|
|
4
4
|
export declare class SnowflakePipelineStep<In = PipelineStepPayload, Out = PipelineStepPayload, InFragment = In> extends AbstractFragmentaryPipelineStep<In, Out, InFragment, SnowflakeId> {
|
|
5
|
+
private readonly _shardId;
|
|
6
|
+
constructor(options: FragmentaryPipelineStepOptions<In, Out, InFragment, SnowflakeId>);
|
|
5
7
|
protected doPerform(_data: InFragment, _request: PipelineStepData<In>): Promise<SnowflakeId>;
|
|
6
8
|
}
|
|
@@ -20,6 +20,7 @@ export interface TypeOrmSaveEntityPipelineStepOptions<In = PipelineStepPayload,
|
|
|
20
20
|
export declare class TypeOrmSaveEntityPipelineStep<In = PipelineStepPayload, Out = PipelineStepPayload, InFragment = EntityToSave, OutFragment = EntityToSave> extends AbstractTypeOrmPipelineStep<In, Out, InFragment, OutFragment> {
|
|
21
21
|
private readonly _entityName;
|
|
22
22
|
private readonly _fillIdBySnowflake;
|
|
23
|
+
private readonly _snowflakeShardId;
|
|
23
24
|
private readonly _uniquenessCheckSnippet?;
|
|
24
25
|
private readonly _uniquenessCheckFunc?;
|
|
25
26
|
constructor(options: TypeOrmSaveEntityPipelineStepOptions<In, Out, InFragment, OutFragment>);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './snowflake';
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export type SnowflakeId = string;
|
|
2
|
+
export type SnowflakeGenerateOptions = {
|
|
3
|
+
timestamp?: Date | number;
|
|
4
|
+
shardId?: number;
|
|
5
|
+
};
|
|
6
|
+
export declare class Snowflake {
|
|
7
|
+
private static EPOCH;
|
|
8
|
+
private static SHARD_ID;
|
|
9
|
+
private static readonly MAX_SEQUENCE;
|
|
10
|
+
private static sequence;
|
|
11
|
+
private static lastTimestamp;
|
|
12
|
+
static generate(options?: SnowflakeGenerateOptions): SnowflakeId;
|
|
13
|
+
private static waitNextMillis;
|
|
14
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@rainbow-o23/n3",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.39",
|
|
4
4
|
"description": "o23 pipelines",
|
|
5
5
|
"main": "index.cjs",
|
|
6
6
|
"module": "index.js",
|
|
@@ -21,8 +21,7 @@
|
|
|
21
21
|
"url": "https://github.com/InsureMO/rainbow-o23/issues"
|
|
22
22
|
},
|
|
23
23
|
"dependencies": {
|
|
24
|
-
"@rainbow-o23/n1": "1.0.
|
|
25
|
-
"@theinternetfolks/snowflake": "^1.3.0",
|
|
24
|
+
"@rainbow-o23/n1": "1.0.39",
|
|
26
25
|
"node-fetch": "2.6.7",
|
|
27
26
|
"typeorm": "^0.3.17"
|
|
28
27
|
},
|
|
@@ -41,7 +40,7 @@
|
|
|
41
40
|
"better-sqlite3": "^9.0.0",
|
|
42
41
|
"eslint": "^8.29.0",
|
|
43
42
|
"mssql": "^10.0.1",
|
|
44
|
-
"mysql2": "^3.9.
|
|
43
|
+
"mysql2": "^3.9.8",
|
|
45
44
|
"oracledb": "^6.2.0",
|
|
46
45
|
"pg": "^8.11.3",
|
|
47
46
|
"pg-query-stream": "^4.5.3",
|
package/rollup.config.base.js
CHANGED
|
@@ -25,8 +25,7 @@ export const buildConfig = (lint) => {
|
|
|
25
25
|
external(id) {
|
|
26
26
|
return ["@rainbow-o23/", "typeorm/"].some(scope => id.startsWith(scope))
|
|
27
27
|
|| [
|
|
28
|
-
"typeorm", "reflect-metadata",
|
|
29
|
-
"@theinternetfolks/snowflake", "node-fetch"
|
|
28
|
+
"typeorm", "reflect-metadata", "node-fetch"
|
|
30
29
|
].includes(id);
|
|
31
30
|
}
|
|
32
31
|
};
|
|
@@ -1,16 +1,22 @@
|
|
|
1
1
|
import {PipelineStepData, PipelineStepPayload} from '@rainbow-o23/n1';
|
|
2
|
-
import {Snowflake} from '
|
|
3
|
-
import {AbstractFragmentaryPipelineStep} from './abstract-fragmentary-pipeline-step';
|
|
4
|
-
|
|
5
|
-
export type SnowflakeId = string;
|
|
2
|
+
import {Snowflake, SnowflakeId} from '../utils';
|
|
3
|
+
import {AbstractFragmentaryPipelineStep, FragmentaryPipelineStepOptions} from './abstract-fragmentary-pipeline-step';
|
|
6
4
|
|
|
7
5
|
/**
|
|
8
6
|
* create a snowflake id as out fragment
|
|
9
7
|
*/
|
|
10
8
|
export class SnowflakePipelineStep<In = PipelineStepPayload, Out = PipelineStepPayload, InFragment = In>
|
|
11
9
|
extends AbstractFragmentaryPipelineStep<In, Out, InFragment, SnowflakeId> {
|
|
10
|
+
private readonly _shardId: number;
|
|
11
|
+
|
|
12
|
+
public constructor(options: FragmentaryPipelineStepOptions<In, Out, InFragment, SnowflakeId>) {
|
|
13
|
+
super(options);
|
|
14
|
+
const config = this.getConfig();
|
|
15
|
+
this._shardId = config.getNumber(`snowflake.shard.id`);
|
|
16
|
+
}
|
|
17
|
+
|
|
12
18
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
13
19
|
protected async doPerform(_data: InFragment, _request: PipelineStepData<In>): Promise<SnowflakeId> {
|
|
14
|
-
return Snowflake.generate();
|
|
20
|
+
return Snowflake.generate({shardId: this._shardId});
|
|
15
21
|
}
|
|
16
22
|
}
|
|
@@ -5,9 +5,9 @@ import {
|
|
|
5
5
|
PipelineStepPayload,
|
|
6
6
|
Undefinable
|
|
7
7
|
} from '@rainbow-o23/n1';
|
|
8
|
-
import {Snowflake} from '@theinternetfolks/snowflake';
|
|
9
8
|
import {DeepPartial, ObjectLiteral} from 'typeorm';
|
|
10
9
|
import {ScriptFuncOrBody, Utils} from '../step';
|
|
10
|
+
import {Snowflake} from '../utils';
|
|
11
11
|
import {AbstractTypeOrmPipelineStep, TypeOrmPipelineStepOptions} from './abstract-typeorm-step';
|
|
12
12
|
import {TypeOrmEntityName} from './types';
|
|
13
13
|
|
|
@@ -33,13 +33,16 @@ export class TypeOrmSaveEntityPipelineStep<In = PipelineStepPayload, Out = Pipel
|
|
|
33
33
|
extends AbstractTypeOrmPipelineStep<In, Out, InFragment, OutFragment> {
|
|
34
34
|
private readonly _entityName: TypeOrmEntityName;
|
|
35
35
|
private readonly _fillIdBySnowflake: boolean;
|
|
36
|
+
private readonly _snowflakeShardId: number;
|
|
36
37
|
private readonly _uniquenessCheckSnippet?: ScriptFuncOrBody<UniquenessCheckFunc<InFragment>>;
|
|
37
38
|
private readonly _uniquenessCheckFunc?: UniquenessCheckFunc<InFragment>;
|
|
38
39
|
|
|
39
40
|
public constructor(options: TypeOrmSaveEntityPipelineStepOptions<In, Out, InFragment, OutFragment>) {
|
|
40
41
|
super(options);
|
|
42
|
+
const config = this.getConfig();
|
|
41
43
|
this._entityName = options.entityName;
|
|
42
44
|
this._fillIdBySnowflake = options.fillIdBySnowflake ?? false;
|
|
45
|
+
this._snowflakeShardId = config.getNumber(`snowflake.shard.id`);
|
|
43
46
|
this._uniquenessCheckSnippet = options.uniquenessCheckSnippet;
|
|
44
47
|
this._uniquenessCheckFunc = Utils.createSyncFunction(this.getUniquenessCheckSnippet(), {
|
|
45
48
|
createDefault: () => (void 0),
|
|
@@ -100,7 +103,7 @@ export class TypeOrmSaveEntityPipelineStep<In = PipelineStepPayload, Out = Pipel
|
|
|
100
103
|
// fill id if not exists
|
|
101
104
|
const id = entity[column.propertyName];
|
|
102
105
|
if (id == null || `${id ?? ''}`.trim().length === 0) {
|
|
103
|
-
entity[column.propertyName] = Snowflake.generate() as unknown as number;
|
|
106
|
+
entity[column.propertyName] = Snowflake.generate({shardId: this._snowflakeShardId}) as unknown as number;
|
|
104
107
|
}
|
|
105
108
|
}
|
|
106
109
|
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './snowflake';
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
export type SnowflakeId = string;
|
|
2
|
+
export type SnowflakeGenerateOptions = { timestamp?: Date | number, shardId?: number };
|
|
3
|
+
|
|
4
|
+
export class Snowflake {
|
|
5
|
+
/** The generators epoch timestamp in milliseconds. Defaults to "1st of January 1970, 00:00". */
|
|
6
|
+
private static EPOCH: number = Date.UTC(1970, 0, 1).valueOf();
|
|
7
|
+
|
|
8
|
+
/** The id of the shard running this generator. Defaults to "1". */
|
|
9
|
+
private static SHARD_ID = 1;
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Max sequence, 2^12 - 1
|
|
13
|
+
*/
|
|
14
|
+
private static readonly MAX_SEQUENCE = 4095;
|
|
15
|
+
/** current sequence */
|
|
16
|
+
private static sequence = 0;
|
|
17
|
+
private static lastTimestamp = -1;
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Generates a single snowflake.
|
|
21
|
+
*/
|
|
22
|
+
static generate(options: SnowflakeGenerateOptions = {}): SnowflakeId {
|
|
23
|
+
let {timestamp = Date.now(), shardId} = options;
|
|
24
|
+
|
|
25
|
+
if (timestamp instanceof Date) {
|
|
26
|
+
timestamp = timestamp.getTime();
|
|
27
|
+
} else {
|
|
28
|
+
timestamp = new Date(timestamp).getTime();
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
shardId = shardId || Snowflake.SHARD_ID;
|
|
32
|
+
|
|
33
|
+
if (timestamp === Snowflake.lastTimestamp) {
|
|
34
|
+
Snowflake.sequence = (Snowflake.sequence + 1) % Snowflake.MAX_SEQUENCE;
|
|
35
|
+
if (Snowflake.sequence === 0) {
|
|
36
|
+
timestamp = Snowflake.waitNextMillis(timestamp);
|
|
37
|
+
}
|
|
38
|
+
} else {
|
|
39
|
+
Snowflake.sequence = 0;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
Snowflake.lastTimestamp = timestamp;
|
|
43
|
+
|
|
44
|
+
const result = (BigInt(timestamp - Snowflake.EPOCH) << BigInt(22)) |
|
|
45
|
+
(BigInt(shardId % 1024) << BigInt(12)) |
|
|
46
|
+
BigInt(Snowflake.sequence);
|
|
47
|
+
|
|
48
|
+
return result.toString();
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
private static waitNextMillis(currentTimestamp: number) {
|
|
52
|
+
let timestamp = Date.now();
|
|
53
|
+
while (timestamp <= currentTimestamp) {
|
|
54
|
+
timestamp = Date.now();
|
|
55
|
+
}
|
|
56
|
+
return timestamp;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
@@ -36,7 +36,7 @@ describe('TypeORM SQL Autonomous Suite', () => {
|
|
|
36
36
|
await new TypeOrmDataSourceHelper(config).create({
|
|
37
37
|
'TEST': [TestTable]
|
|
38
38
|
});
|
|
39
|
-
const repo = (await TypeOrmDataSourceManager.findDataSource('TEST')).getDataSource().getRepository(TestTable);
|
|
39
|
+
const repo = (await TypeOrmDataSourceManager.findDataSource('TEST', config)).getDataSource().getRepository(TestTable);
|
|
40
40
|
await repo.insert({id: 1, content: 'hello world!'});
|
|
41
41
|
await repo.insert({id: 2, content: 'good-bye world!'});
|
|
42
42
|
});
|
|
@@ -187,7 +187,7 @@ describe('TypeORM SQL Transactional Suite', () => {
|
|
|
187
187
|
await new TypeOrmDataSourceHelper(config).create({
|
|
188
188
|
'TEST': [TestTable]
|
|
189
189
|
});
|
|
190
|
-
const repo = (await TypeOrmDataSourceManager.findDataSource('TEST')).getDataSource().getRepository(TestTable);
|
|
190
|
+
const repo = (await TypeOrmDataSourceManager.findDataSource('TEST', config)).getDataSource().getRepository(TestTable);
|
|
191
191
|
await repo.insert({id: 1, content: 'hello world!'});
|
|
192
192
|
await repo.insert({id: 2, content: 'good-bye world!'});
|
|
193
193
|
});
|
|
@@ -28,7 +28,7 @@ describe('TypeORM Entity Suite', () => {
|
|
|
28
28
|
await new TypeOrmDataSourceHelper(config).create({
|
|
29
29
|
'TEST': [TestTable]
|
|
30
30
|
});
|
|
31
|
-
const repo = (await TypeOrmDataSourceManager.findDataSource('TEST')).getDataSource().getRepository(TestTable);
|
|
31
|
+
const repo = (await TypeOrmDataSourceManager.findDataSource('TEST', config)).getDataSource().getRepository(TestTable);
|
|
32
32
|
await repo.insert({id: 1, content: 'hello world!'});
|
|
33
33
|
});
|
|
34
34
|
|