@hot-updater/postgres 0.1.5 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +121 -119
- package/dist/index.d.ts +1 -8
- package/dist/index.js +90 -95
- package/dist/postgres.d.ts +5 -0
- package/dist/types.d.ts +10 -0
- package/package.json +9 -5
- package/sql/bundles.sql +13 -0
- package/sql/get_update_info.sql +79 -0
- package/sql/get_update_info.test.ts +74 -0
- package/sql/prepareSql.ts +11 -0
- package/sql/semver.sql +97 -0
- package/sql/semver_satisfies.test.ts +26 -0
- package/dist/index.d.cts +0 -8
package/dist/index.cjs
CHANGED
|
@@ -1,122 +1,124 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
var
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
};
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
2
|
+
var __webpack_require__ = {};
|
|
3
|
+
(()=>{
|
|
4
|
+
__webpack_require__.d = function(exports1, definition) {
|
|
5
|
+
for(var key in definition)if (__webpack_require__.o(definition, key) && !__webpack_require__.o(exports1, key)) Object.defineProperty(exports1, key, {
|
|
6
|
+
enumerable: true,
|
|
7
|
+
get: definition[key]
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
})();
|
|
11
|
+
(()=>{
|
|
12
|
+
__webpack_require__.o = function(obj, prop) {
|
|
13
|
+
return Object.prototype.hasOwnProperty.call(obj, prop);
|
|
14
|
+
};
|
|
15
|
+
})();
|
|
16
|
+
(()=>{
|
|
17
|
+
__webpack_require__.r = function(exports1) {
|
|
18
|
+
if ('undefined' != typeof Symbol && Symbol.toStringTag) Object.defineProperty(exports1, Symbol.toStringTag, {
|
|
19
|
+
value: 'Module'
|
|
20
|
+
});
|
|
21
|
+
Object.defineProperty(exports1, '__esModule', {
|
|
22
|
+
value: true
|
|
23
|
+
});
|
|
24
|
+
};
|
|
25
|
+
})();
|
|
26
|
+
var __webpack_exports__ = {};
|
|
27
|
+
__webpack_require__.r(__webpack_exports__);
|
|
28
|
+
__webpack_require__.d(__webpack_exports__, {
|
|
29
|
+
postgres: ()=>postgres
|
|
24
30
|
});
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
};
|
|
119
|
-
// Annotate the CommonJS export names for ESM import in node:
|
|
120
|
-
0 && (module.exports = {
|
|
121
|
-
postgres
|
|
31
|
+
const external_kysely_namespaceObject = require("kysely");
|
|
32
|
+
const external_pg_namespaceObject = require("pg");
|
|
33
|
+
const postgres = (config, hooks)=>(_)=>{
|
|
34
|
+
const pool = new external_pg_namespaceObject.Pool(config);
|
|
35
|
+
const dialect = new external_kysely_namespaceObject.PostgresDialect({
|
|
36
|
+
pool
|
|
37
|
+
});
|
|
38
|
+
const db = new external_kysely_namespaceObject.Kysely({
|
|
39
|
+
dialect
|
|
40
|
+
});
|
|
41
|
+
let bundles = [];
|
|
42
|
+
let isUnmount = false;
|
|
43
|
+
return {
|
|
44
|
+
name: "postgres",
|
|
45
|
+
async onUnmount () {
|
|
46
|
+
if (isUnmount) return;
|
|
47
|
+
isUnmount = true;
|
|
48
|
+
await pool.end();
|
|
49
|
+
},
|
|
50
|
+
async commitBundle () {
|
|
51
|
+
await db.transaction().execute(async (tx)=>{
|
|
52
|
+
for (const bundle of bundles)await tx.insertInto("bundles").values({
|
|
53
|
+
id: bundle.id,
|
|
54
|
+
enabled: bundle.enabled,
|
|
55
|
+
file_url: bundle.fileUrl,
|
|
56
|
+
force_update: bundle.forceUpdate,
|
|
57
|
+
file_hash: bundle.fileHash,
|
|
58
|
+
git_commit_hash: bundle.gitCommitHash,
|
|
59
|
+
message: bundle.message,
|
|
60
|
+
platform: bundle.platform,
|
|
61
|
+
target_app_version: bundle.targetAppVersion
|
|
62
|
+
}).onConflict((oc)=>oc.column("id").doUpdateSet({
|
|
63
|
+
enabled: bundle.enabled,
|
|
64
|
+
file_url: bundle.fileUrl,
|
|
65
|
+
force_update: bundle.forceUpdate,
|
|
66
|
+
file_hash: bundle.fileHash,
|
|
67
|
+
git_commit_hash: bundle.gitCommitHash,
|
|
68
|
+
message: bundle.message,
|
|
69
|
+
platform: bundle.platform,
|
|
70
|
+
target_app_version: bundle.targetAppVersion
|
|
71
|
+
})).execute();
|
|
72
|
+
});
|
|
73
|
+
hooks?.onDatabaseUpdated?.();
|
|
74
|
+
},
|
|
75
|
+
async updateBundle (targetBundleId, newBundle) {
|
|
76
|
+
bundles = await this.getBundles();
|
|
77
|
+
const targetIndex = bundles.findIndex((u)=>u.id === targetBundleId);
|
|
78
|
+
if (-1 === targetIndex) throw new Error("target bundle version not found");
|
|
79
|
+
Object.assign(bundles[targetIndex], newBundle);
|
|
80
|
+
},
|
|
81
|
+
async appendBundle (inputBundle) {
|
|
82
|
+
bundles = await this.getBundles();
|
|
83
|
+
bundles.unshift(inputBundle);
|
|
84
|
+
},
|
|
85
|
+
async setBundles (inputBundles) {
|
|
86
|
+
bundles = inputBundles;
|
|
87
|
+
},
|
|
88
|
+
async getBundleById (bundleId) {
|
|
89
|
+
const data = await db.selectFrom("bundles").selectAll().where("id", "=", bundleId).executeTakeFirst();
|
|
90
|
+
if (!data) return null;
|
|
91
|
+
return {
|
|
92
|
+
enabled: data.enabled,
|
|
93
|
+
fileUrl: data.file_url,
|
|
94
|
+
forceUpdate: data.force_update,
|
|
95
|
+
fileHash: data.file_hash,
|
|
96
|
+
gitCommitHash: data.git_commit_hash,
|
|
97
|
+
id: data.id,
|
|
98
|
+
message: data.message,
|
|
99
|
+
platform: data.platform,
|
|
100
|
+
targetAppVersion: data.target_app_version
|
|
101
|
+
};
|
|
102
|
+
},
|
|
103
|
+
async getBundles (refresh = false) {
|
|
104
|
+
if (bundles.length > 0 && !refresh) return bundles;
|
|
105
|
+
const data = await db.selectFrom("bundles").orderBy("id", "desc").selectAll().execute();
|
|
106
|
+
return data.map((bundle)=>({
|
|
107
|
+
enabled: bundle.enabled,
|
|
108
|
+
fileUrl: bundle.file_url,
|
|
109
|
+
forceUpdate: bundle.force_update,
|
|
110
|
+
fileHash: bundle.file_hash,
|
|
111
|
+
gitCommitHash: bundle.git_commit_hash,
|
|
112
|
+
id: bundle.id,
|
|
113
|
+
message: bundle.message,
|
|
114
|
+
platform: bundle.platform,
|
|
115
|
+
targetAppVersion: bundle.target_app_version
|
|
116
|
+
}));
|
|
117
|
+
}
|
|
118
|
+
};
|
|
119
|
+
};
|
|
120
|
+
var __webpack_export_target__ = exports;
|
|
121
|
+
for(var __webpack_i__ in __webpack_exports__)__webpack_export_target__[__webpack_i__] = __webpack_exports__[__webpack_i__];
|
|
122
|
+
if (__webpack_exports__.__esModule) Object.defineProperty(__webpack_export_target__, '__esModule', {
|
|
123
|
+
value: true
|
|
122
124
|
});
|
package/dist/index.d.ts
CHANGED
|
@@ -1,8 +1 @@
|
|
|
1
|
-
|
|
2
|
-
import { PoolConfig } from 'pg';
|
|
3
|
-
|
|
4
|
-
interface PostgresConfig extends PoolConfig {
|
|
5
|
-
}
|
|
6
|
-
declare const postgres: (config: PostgresConfig, hooks?: DatabasePluginHooks) => (_: BasePluginArgs) => DatabasePlugin;
|
|
7
|
-
|
|
8
|
-
export { type PostgresConfig, postgres };
|
|
1
|
+
export * from "./postgres";
|
package/dist/index.js
CHANGED
|
@@ -1,95 +1,90 @@
|
|
|
1
|
-
|
|
2
|
-
import
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
};
|
|
92
|
-
};
|
|
93
|
-
export {
|
|
94
|
-
postgres
|
|
95
|
-
};
|
|
1
|
+
import * as __WEBPACK_EXTERNAL_MODULE_kysely__ from "kysely";
|
|
2
|
+
import * as __WEBPACK_EXTERNAL_MODULE_pg__ from "pg";
|
|
3
|
+
const postgres = (config, hooks)=>(_)=>{
|
|
4
|
+
const pool = new __WEBPACK_EXTERNAL_MODULE_pg__.Pool(config);
|
|
5
|
+
const dialect = new __WEBPACK_EXTERNAL_MODULE_kysely__.PostgresDialect({
|
|
6
|
+
pool
|
|
7
|
+
});
|
|
8
|
+
const db = new __WEBPACK_EXTERNAL_MODULE_kysely__.Kysely({
|
|
9
|
+
dialect
|
|
10
|
+
});
|
|
11
|
+
let bundles = [];
|
|
12
|
+
let isUnmount = false;
|
|
13
|
+
return {
|
|
14
|
+
name: "postgres",
|
|
15
|
+
async onUnmount () {
|
|
16
|
+
if (isUnmount) return;
|
|
17
|
+
isUnmount = true;
|
|
18
|
+
await pool.end();
|
|
19
|
+
},
|
|
20
|
+
async commitBundle () {
|
|
21
|
+
await db.transaction().execute(async (tx)=>{
|
|
22
|
+
for (const bundle of bundles)await tx.insertInto("bundles").values({
|
|
23
|
+
id: bundle.id,
|
|
24
|
+
enabled: bundle.enabled,
|
|
25
|
+
file_url: bundle.fileUrl,
|
|
26
|
+
force_update: bundle.forceUpdate,
|
|
27
|
+
file_hash: bundle.fileHash,
|
|
28
|
+
git_commit_hash: bundle.gitCommitHash,
|
|
29
|
+
message: bundle.message,
|
|
30
|
+
platform: bundle.platform,
|
|
31
|
+
target_app_version: bundle.targetAppVersion
|
|
32
|
+
}).onConflict((oc)=>oc.column("id").doUpdateSet({
|
|
33
|
+
enabled: bundle.enabled,
|
|
34
|
+
file_url: bundle.fileUrl,
|
|
35
|
+
force_update: bundle.forceUpdate,
|
|
36
|
+
file_hash: bundle.fileHash,
|
|
37
|
+
git_commit_hash: bundle.gitCommitHash,
|
|
38
|
+
message: bundle.message,
|
|
39
|
+
platform: bundle.platform,
|
|
40
|
+
target_app_version: bundle.targetAppVersion
|
|
41
|
+
})).execute();
|
|
42
|
+
});
|
|
43
|
+
hooks?.onDatabaseUpdated?.();
|
|
44
|
+
},
|
|
45
|
+
async updateBundle (targetBundleId, newBundle) {
|
|
46
|
+
bundles = await this.getBundles();
|
|
47
|
+
const targetIndex = bundles.findIndex((u)=>u.id === targetBundleId);
|
|
48
|
+
if (-1 === targetIndex) throw new Error("target bundle version not found");
|
|
49
|
+
Object.assign(bundles[targetIndex], newBundle);
|
|
50
|
+
},
|
|
51
|
+
async appendBundle (inputBundle) {
|
|
52
|
+
bundles = await this.getBundles();
|
|
53
|
+
bundles.unshift(inputBundle);
|
|
54
|
+
},
|
|
55
|
+
async setBundles (inputBundles) {
|
|
56
|
+
bundles = inputBundles;
|
|
57
|
+
},
|
|
58
|
+
async getBundleById (bundleId) {
|
|
59
|
+
const data = await db.selectFrom("bundles").selectAll().where("id", "=", bundleId).executeTakeFirst();
|
|
60
|
+
if (!data) return null;
|
|
61
|
+
return {
|
|
62
|
+
enabled: data.enabled,
|
|
63
|
+
fileUrl: data.file_url,
|
|
64
|
+
forceUpdate: data.force_update,
|
|
65
|
+
fileHash: data.file_hash,
|
|
66
|
+
gitCommitHash: data.git_commit_hash,
|
|
67
|
+
id: data.id,
|
|
68
|
+
message: data.message,
|
|
69
|
+
platform: data.platform,
|
|
70
|
+
targetAppVersion: data.target_app_version
|
|
71
|
+
};
|
|
72
|
+
},
|
|
73
|
+
async getBundles (refresh = false) {
|
|
74
|
+
if (bundles.length > 0 && !refresh) return bundles;
|
|
75
|
+
const data = await db.selectFrom("bundles").orderBy("id", "desc").selectAll().execute();
|
|
76
|
+
return data.map((bundle)=>({
|
|
77
|
+
enabled: bundle.enabled,
|
|
78
|
+
fileUrl: bundle.file_url,
|
|
79
|
+
forceUpdate: bundle.force_update,
|
|
80
|
+
fileHash: bundle.file_hash,
|
|
81
|
+
gitCommitHash: bundle.git_commit_hash,
|
|
82
|
+
id: bundle.id,
|
|
83
|
+
message: bundle.message,
|
|
84
|
+
platform: bundle.platform,
|
|
85
|
+
targetAppVersion: bundle.target_app_version
|
|
86
|
+
}));
|
|
87
|
+
}
|
|
88
|
+
};
|
|
89
|
+
};
|
|
90
|
+
export { postgres };
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import type { BasePluginArgs, DatabasePlugin, DatabasePluginHooks } from "@hot-updater/plugin-core";
|
|
2
|
+
import { type PoolConfig } from "pg";
|
|
3
|
+
export interface PostgresConfig extends PoolConfig {
|
|
4
|
+
}
|
|
5
|
+
export declare const postgres: (config: PostgresConfig, hooks?: DatabasePluginHooks) => (_: BasePluginArgs) => DatabasePlugin;
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { Bundle } from "@hot-updater/core";
|
|
2
|
+
type SnakeCase<S extends string> = S extends `${infer T}${infer U}` ? `${T extends Capitalize<T> ? "_" : ""}${Lowercase<T>}${SnakeCase<U>}` : S;
|
|
3
|
+
type SnakeKeyObject<T> = T extends Record<string, any> ? {
|
|
4
|
+
[K in keyof T as SnakeCase<Extract<K, string>>]: T[K] extends object ? SnakeKeyObject<T[K]> : T[K];
|
|
5
|
+
} : T;
|
|
6
|
+
export type BundlesTable = SnakeKeyObject<Bundle>;
|
|
7
|
+
export interface Database {
|
|
8
|
+
bundles: BundlesTable;
|
|
9
|
+
}
|
|
10
|
+
export {};
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@hot-updater/postgres",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.
|
|
4
|
+
"version": "0.2.0",
|
|
5
5
|
"description": "React Native OTA solution for self-hosted",
|
|
6
6
|
"main": "dist/index.cjs",
|
|
7
7
|
"module": "dist/index.js",
|
|
@@ -18,18 +18,22 @@
|
|
|
18
18
|
},
|
|
19
19
|
"files": [
|
|
20
20
|
"dist",
|
|
21
|
+
"sql",
|
|
21
22
|
"package.json"
|
|
22
23
|
],
|
|
23
24
|
"dependencies": {
|
|
24
|
-
"@hot-updater/
|
|
25
|
-
"
|
|
25
|
+
"@hot-updater/core": "0.2.0",
|
|
26
|
+
"@hot-updater/plugin-core": "0.2.0",
|
|
27
|
+
"kysely": "^0.27.5",
|
|
26
28
|
"pg": "^8.13.1"
|
|
27
29
|
},
|
|
28
30
|
"devDependencies": {
|
|
29
|
-
"@
|
|
31
|
+
"@electric-sql/pglite": "^0.2.15",
|
|
32
|
+
"@types/pg": "^8.11.10",
|
|
33
|
+
"camelcase-keys": "^9.1.3"
|
|
30
34
|
},
|
|
31
35
|
"scripts": {
|
|
32
|
-
"build": "
|
|
36
|
+
"build": "rslib build",
|
|
33
37
|
"test:type": "tsc --noEmit"
|
|
34
38
|
}
|
|
35
39
|
}
|
package/sql/bundles.sql
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
CREATE TYPE platforms AS ENUM ('ios', 'android');
|
|
2
|
+
|
|
3
|
+
CREATE TABLE bundles (
|
|
4
|
+
id uuid PRIMARY KEY,
|
|
5
|
+
platform platforms NOT NULL,
|
|
6
|
+
target_app_version text NOT NULL,
|
|
7
|
+
force_update boolean NOT NULL,
|
|
8
|
+
enabled boolean NOT NULL,
|
|
9
|
+
file_url text NOT NULL,
|
|
10
|
+
file_hash text NOT NULL,
|
|
11
|
+
git_commit_hash text,
|
|
12
|
+
message text
|
|
13
|
+
);
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
CREATE OR REPLACE FUNCTION get_update_info (
|
|
2
|
+
app_platform platforms,
|
|
3
|
+
app_version text,
|
|
4
|
+
bundle_id uuid
|
|
5
|
+
)
|
|
6
|
+
RETURNS TABLE (
|
|
7
|
+
id uuid,
|
|
8
|
+
force_update boolean,
|
|
9
|
+
file_url text,
|
|
10
|
+
file_hash text,
|
|
11
|
+
status text
|
|
12
|
+
)
|
|
13
|
+
LANGUAGE plpgsql
|
|
14
|
+
AS
|
|
15
|
+
$$
|
|
16
|
+
DECLARE
|
|
17
|
+
NIL_UUID CONSTANT uuid := '00000000-0000-0000-0000-000000000000';
|
|
18
|
+
BEGIN
|
|
19
|
+
RETURN QUERY
|
|
20
|
+
WITH rollback_candidate AS (
|
|
21
|
+
SELECT
|
|
22
|
+
b.id,
|
|
23
|
+
-- If status is 'ROLLBACK', force_update is always TRUE
|
|
24
|
+
TRUE AS force_update,
|
|
25
|
+
b.file_url,
|
|
26
|
+
b.file_hash,
|
|
27
|
+
'ROLLBACK' AS status
|
|
28
|
+
FROM bundles b
|
|
29
|
+
WHERE b.enabled = TRUE
|
|
30
|
+
AND b.platform = app_platform
|
|
31
|
+
AND b.id < bundle_id
|
|
32
|
+
ORDER BY b.id DESC
|
|
33
|
+
LIMIT 1
|
|
34
|
+
),
|
|
35
|
+
update_candidate AS (
|
|
36
|
+
SELECT
|
|
37
|
+
b.id,
|
|
38
|
+
b.force_update,
|
|
39
|
+
b.file_url,
|
|
40
|
+
b.file_hash,
|
|
41
|
+
'UPDATE' AS status
|
|
42
|
+
FROM bundles b
|
|
43
|
+
WHERE b.enabled = TRUE
|
|
44
|
+
AND b.platform = app_platform
|
|
45
|
+
AND b.id >= bundle_id
|
|
46
|
+
AND semver_satisfies(b.target_app_version, app_version)
|
|
47
|
+
ORDER BY b.id DESC
|
|
48
|
+
LIMIT 1
|
|
49
|
+
),
|
|
50
|
+
final_result AS (
|
|
51
|
+
SELECT *
|
|
52
|
+
FROM update_candidate
|
|
53
|
+
|
|
54
|
+
UNION ALL
|
|
55
|
+
|
|
56
|
+
SELECT *
|
|
57
|
+
FROM rollback_candidate
|
|
58
|
+
WHERE NOT EXISTS (SELECT 1 FROM update_candidate)
|
|
59
|
+
)
|
|
60
|
+
SELECT *
|
|
61
|
+
FROM final_result WHERE final_result.id != bundle_id
|
|
62
|
+
|
|
63
|
+
UNION ALL
|
|
64
|
+
/*
|
|
65
|
+
When there are no final results and bundle_id != NIL_UUID,
|
|
66
|
+
add one fallback row.
|
|
67
|
+
This fallback row is also ROLLBACK so forceUpdate = TRUE.
|
|
68
|
+
*/
|
|
69
|
+
SELECT
|
|
70
|
+
NIL_UUID AS id,
|
|
71
|
+
TRUE AS force_update, -- Always TRUE
|
|
72
|
+
NULL AS file_url,
|
|
73
|
+
NULL AS file_hash,
|
|
74
|
+
'ROLLBACK' AS status
|
|
75
|
+
WHERE (SELECT COUNT(*) FROM final_result) = 0
|
|
76
|
+
AND bundle_id != NIL_UUID;
|
|
77
|
+
|
|
78
|
+
END;
|
|
79
|
+
$$;
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import { PGlite } from "@electric-sql/pglite";
|
|
2
|
+
import type { Bundle, GetBundlesArgs, UpdateInfo } from "@hot-updater/core";
|
|
3
|
+
import { setupGetUpdateInfoTestSuite } from "@hot-updater/core/test-utils";
|
|
4
|
+
import camelcaseKeys from "camelcase-keys";
|
|
5
|
+
import { afterAll, beforeEach, describe } from "vitest";
|
|
6
|
+
import { prepareSql } from "./prepareSql";
|
|
7
|
+
|
|
8
|
+
const createInsertBundleQuery = (bundle: Bundle) => {
|
|
9
|
+
return `
|
|
10
|
+
INSERT INTO bundles (
|
|
11
|
+
id, file_url, file_hash, platform, target_app_version,
|
|
12
|
+
force_update, enabled, git_commit_hash, message
|
|
13
|
+
) VALUES (
|
|
14
|
+
'${bundle.id}',
|
|
15
|
+
'${bundle.fileUrl}',
|
|
16
|
+
'${bundle.fileHash}',
|
|
17
|
+
'${bundle.platform}',
|
|
18
|
+
'${bundle.targetAppVersion}',
|
|
19
|
+
${bundle.forceUpdate},
|
|
20
|
+
${bundle.enabled},
|
|
21
|
+
${bundle.gitCommitHash ? `'${bundle.gitCommitHash}'` : "null"},
|
|
22
|
+
${bundle.message ? `'${bundle.message}'` : "null"}
|
|
23
|
+
);
|
|
24
|
+
`;
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
const createGetUpdateInfo =
|
|
28
|
+
(db: PGlite) =>
|
|
29
|
+
async (
|
|
30
|
+
bundles: Bundle[],
|
|
31
|
+
{ appVersion, bundleId, platform }: GetBundlesArgs,
|
|
32
|
+
): Promise<UpdateInfo | null> => {
|
|
33
|
+
await db.exec(createInsertBundleQuerys(bundles));
|
|
34
|
+
|
|
35
|
+
const result = await db.query<{
|
|
36
|
+
id: string;
|
|
37
|
+
force_update: boolean;
|
|
38
|
+
file_url: string;
|
|
39
|
+
file_hash: string;
|
|
40
|
+
status: string;
|
|
41
|
+
}>(
|
|
42
|
+
`
|
|
43
|
+
SELECT * FROM get_update_info('${platform}', '${appVersion}', '${bundleId}')
|
|
44
|
+
`,
|
|
45
|
+
);
|
|
46
|
+
|
|
47
|
+
return result.rows[0]
|
|
48
|
+
? (camelcaseKeys(result.rows[0]) as UpdateInfo)
|
|
49
|
+
: null;
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
const createInsertBundleQuerys = (bundles: Bundle[]) => {
|
|
53
|
+
return bundles.map(createInsertBundleQuery).join("\n");
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
const db = new PGlite();
|
|
57
|
+
|
|
58
|
+
const sql = await prepareSql();
|
|
59
|
+
await db.exec(sql);
|
|
60
|
+
const getUpdateInfo = createGetUpdateInfo(db);
|
|
61
|
+
|
|
62
|
+
describe("getUpdateInfo", () => {
|
|
63
|
+
beforeEach(async () => {
|
|
64
|
+
await db.exec("DELETE FROM bundles");
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
afterAll(async () => {
|
|
68
|
+
await db.close();
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
setupGetUpdateInfoTestSuite({
|
|
72
|
+
getUpdateInfo: getUpdateInfo,
|
|
73
|
+
});
|
|
74
|
+
});
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import path from "path";
|
|
2
|
+
import fs from "fs/promises";
|
|
3
|
+
|
|
4
|
+
export const prepareSql = async () => {
|
|
5
|
+
const files = await fs.readdir(__dirname);
|
|
6
|
+
const sqlFiles = files.filter((file) => file.endsWith(".sql"));
|
|
7
|
+
const contents = await Promise.all(
|
|
8
|
+
sqlFiles.map((file) => fs.readFile(path.join(__dirname, file), "utf-8")),
|
|
9
|
+
);
|
|
10
|
+
return contents.join("\n");
|
|
11
|
+
};
|
package/sql/semver.sql
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
CREATE OR REPLACE FUNCTION semver_satisfies(range_expression TEXT, version TEXT)
|
|
2
|
+
RETURNS BOOLEAN AS $$
|
|
3
|
+
DECLARE
|
|
4
|
+
version_parts TEXT[];
|
|
5
|
+
version_major INT;
|
|
6
|
+
version_minor INT;
|
|
7
|
+
version_patch INT;
|
|
8
|
+
satisfies BOOLEAN := FALSE;
|
|
9
|
+
BEGIN
|
|
10
|
+
-- Split the version into major, minor, and patch
|
|
11
|
+
version_parts := string_to_array(version, '.');
|
|
12
|
+
version_major := version_parts[1]::INT;
|
|
13
|
+
version_minor := version_parts[2]::INT;
|
|
14
|
+
version_patch := version_parts[3]::INT;
|
|
15
|
+
|
|
16
|
+
-- Parse range expression and evaluate
|
|
17
|
+
IF range_expression ~ '^\d+\.\d+\.\d+$' THEN
|
|
18
|
+
-- Exact match
|
|
19
|
+
satisfies := (range_expression = version);
|
|
20
|
+
|
|
21
|
+
ELSIF range_expression = '*' THEN
|
|
22
|
+
-- Matches any version
|
|
23
|
+
satisfies := TRUE;
|
|
24
|
+
|
|
25
|
+
ELSIF range_expression ~ '^\d+\.x\.x$' THEN
|
|
26
|
+
-- Matches major.x.x
|
|
27
|
+
DECLARE
|
|
28
|
+
major_range INT := split_part(range_expression, '.', 1)::INT;
|
|
29
|
+
BEGIN
|
|
30
|
+
satisfies := (version_major = major_range);
|
|
31
|
+
END;
|
|
32
|
+
|
|
33
|
+
ELSIF range_expression ~ '^\d+\.\d+\.x$' THEN
|
|
34
|
+
-- Matches major.minor.x
|
|
35
|
+
DECLARE
|
|
36
|
+
major_range INT := split_part(range_expression, '.', 1)::INT;
|
|
37
|
+
minor_range INT := split_part(range_expression, '.', 2)::INT;
|
|
38
|
+
BEGIN
|
|
39
|
+
satisfies := (version_major = major_range AND version_minor = minor_range);
|
|
40
|
+
END;
|
|
41
|
+
|
|
42
|
+
ELSIF range_expression ~ '^\d+\.\d+$' THEN
|
|
43
|
+
-- Matches major.minor
|
|
44
|
+
DECLARE
|
|
45
|
+
major_range INT := split_part(range_expression, '.', 1)::INT;
|
|
46
|
+
minor_range INT := split_part(range_expression, '.', 2)::INT;
|
|
47
|
+
BEGIN
|
|
48
|
+
satisfies := (version_major = major_range AND version_minor = minor_range);
|
|
49
|
+
END;
|
|
50
|
+
|
|
51
|
+
ELSIF range_expression ~ '^\d+\.\d+\.\d+ - \d+\.\d+\.\d+$' THEN
|
|
52
|
+
-- Matches range e.g., 1.2.3 - 1.2.7
|
|
53
|
+
DECLARE
|
|
54
|
+
lower_bound TEXT := split_part(range_expression, ' - ', 1);
|
|
55
|
+
upper_bound TEXT := split_part(range_expression, ' - ', 2);
|
|
56
|
+
BEGIN
|
|
57
|
+
satisfies := (version >= lower_bound AND version <= upper_bound);
|
|
58
|
+
END;
|
|
59
|
+
|
|
60
|
+
ELSIF range_expression ~ '^>=\d+\.\d+\.\d+ <\d+\.\d+\.\d+$' THEN
|
|
61
|
+
-- Matches range with inequalities
|
|
62
|
+
DECLARE
|
|
63
|
+
lower_bound TEXT := regexp_replace(range_expression, '>=([\d\.]+) <.*', '\1');
|
|
64
|
+
upper_bound TEXT := regexp_replace(range_expression, '.*<([\d\.]+)', '\1');
|
|
65
|
+
BEGIN
|
|
66
|
+
satisfies := (version >= lower_bound AND version < upper_bound);
|
|
67
|
+
END;
|
|
68
|
+
|
|
69
|
+
ELSIF range_expression ~ '^~\d+\.\d+\.\d+$' THEN
|
|
70
|
+
-- Matches ~1.2.3 (>=1.2.3 <1.3.0)
|
|
71
|
+
DECLARE
|
|
72
|
+
lower_bound TEXT := regexp_replace(range_expression, '~', '');
|
|
73
|
+
upper_bound_major INT := split_part(lower_bound, '.', 1)::INT;
|
|
74
|
+
upper_bound_minor INT := split_part(lower_bound, '.', 2)::INT + 1;
|
|
75
|
+
upper_bound TEXT := upper_bound_major || '.' || upper_bound_minor || '.0';
|
|
76
|
+
BEGIN
|
|
77
|
+
satisfies := (version >= lower_bound AND version < upper_bound);
|
|
78
|
+
END;
|
|
79
|
+
|
|
80
|
+
ELSIF range_expression ~ '^\^\d+\.\d+\.\d+$' THEN
|
|
81
|
+
-- Matches ^1.2.3 (>=1.2.3 <2.0.0)
|
|
82
|
+
DECLARE
|
|
83
|
+
lower_bound TEXT := regexp_replace(range_expression, '\^', '');
|
|
84
|
+
upper_bound_major INT := split_part(lower_bound, '.', 1)::INT + 1;
|
|
85
|
+
upper_bound TEXT := upper_bound_major || '.0.0';
|
|
86
|
+
BEGIN
|
|
87
|
+
satisfies := (version >= lower_bound AND version < upper_bound);
|
|
88
|
+
END;
|
|
89
|
+
|
|
90
|
+
ELSE
|
|
91
|
+
RAISE EXCEPTION 'Unsupported range expression: %', range_expression;
|
|
92
|
+
END IF;
|
|
93
|
+
|
|
94
|
+
RETURN satisfies;
|
|
95
|
+
END;
|
|
96
|
+
$$ LANGUAGE plpgsql;
|
|
97
|
+
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { PGlite } from "@electric-sql/pglite";
|
|
2
|
+
import { setupSemverSatisfiesTestSuite } from "@hot-updater/core/test-utils";
|
|
3
|
+
import { afterAll, describe } from "vitest";
|
|
4
|
+
import { prepareSql } from "./prepareSql";
|
|
5
|
+
|
|
6
|
+
const db = new PGlite();
|
|
7
|
+
const sql = await prepareSql();
|
|
8
|
+
await db.exec(sql);
|
|
9
|
+
|
|
10
|
+
const createSemverSatisfies =
|
|
11
|
+
(db: PGlite) => async (targetAppVersion: string, currentVersion: string) => {
|
|
12
|
+
const result = await db.query<{ actual: boolean }>(`
|
|
13
|
+
SELECT semver_satisfies('${targetAppVersion}', '${currentVersion}') AS actual;
|
|
14
|
+
`);
|
|
15
|
+
return result.rows[0].actual;
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
const semverSatisfies = createSemverSatisfies(db);
|
|
19
|
+
|
|
20
|
+
describe("semverSatisfies", () => {
|
|
21
|
+
afterAll(async () => {
|
|
22
|
+
await db.close();
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
setupSemverSatisfiesTestSuite({ semverSatisfies });
|
|
26
|
+
});
|
package/dist/index.d.cts
DELETED
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
import { DatabasePluginHooks, BasePluginArgs, DatabasePlugin } from '@hot-updater/plugin-core';
|
|
2
|
-
import { PoolConfig } from 'pg';
|
|
3
|
-
|
|
4
|
-
interface PostgresConfig extends PoolConfig {
|
|
5
|
-
}
|
|
6
|
-
declare const postgres: (config: PostgresConfig, hooks?: DatabasePluginHooks) => (_: BasePluginArgs) => DatabasePlugin;
|
|
7
|
-
|
|
8
|
-
export { type PostgresConfig, postgres };
|