@soga/uploader 0.2.43 → 0.2.45
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/hooks/trasform.d.ts +5 -2
- package/dist/hooks/trasform.js +1 -1
- package/dist/host-uploader/ali.js +1 -1
- package/dist/host-uploader/baidu.js +1 -1
- package/dist/host-uploader/base.d.ts +8 -4
- package/dist/host-uploader/base.js +1 -1
- package/dist/uploader.d.ts +0 -2
- package/dist/uploader.js +1 -1
- package/package.json +12 -12
package/dist/hooks/trasform.d.ts
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { UploadFile, UploadPart, UploadSuccess } from '@soga/entities';
|
|
2
|
+
import { Repository } from 'typeorm';
|
|
2
3
|
export declare function transform(params: {
|
|
3
4
|
file_id: number;
|
|
4
|
-
|
|
5
|
+
fileRepository: Repository<UploadFile>;
|
|
6
|
+
partRepository: Repository<UploadPart>;
|
|
7
|
+
successRepository: Repository<UploadSuccess>;
|
|
5
8
|
}): Promise<void>;
|
package/dist/hooks/trasform.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.transform=transform;const
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.transform=transform;const types_1=require("@soga/types"),fs_extra_1=require("fs-extra"),typeorm_1=require("typeorm");async function transform(e){const{fileRepository:i,partRepository:t,successRepository:a}=e,o=await i.findOneBy({id:e.file_id}),d={root_id:o.root_id,pid:0,space_id:o.space_id,space_name:o.space_name,uid:o.uid,inputs:o.inputs.map((e=>({filename:e.filename,filepath:e.filepath,filesize:e.filesize}))),cloud_id:o.cloud_info.id,cloud_name:o.cloud_info.name,type:o.type},p=0==o.root_id;if(!p){const e=await a.findOneBy({root_id:o.root_id,pid:0});if(e)d.pid=e.id;else{const e=await i.findOneBy({id:o.root_id}),t=await a.save(a.create({root_id:o.root_id,pid:0,space_id:e.space_id,space_name:e.space_name,uid:e.uid,inputs:e.inputs?.map((e=>({filename:e.filename,filepath:e.filepath,filesize:e.filesize}))),cloud_id:e.cloud_info.id,cloud_name:e.cloud_info.name,type:e.type}));d.pid=t.id}await a.update({id:d.pid},{updated_at:new Date})}if(await a.save(a.create(d)),await i.delete({id:e.file_id}),await t.delete({file_id:e.file_id}),await(0,fs_extra_1.remove)(o.output_root),!p){await i.findOneBy({root_id:o.root_id,type:(0,typeorm_1.Not)(types_1.RecordType.FOLDER)})?await i.increment({id:o.root_id},"completed_count",1):(await i.delete({root_id:o.root_id}),await i.delete({id:o.root_id}))}}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";var __importDefault=this&&this.__importDefault||function(
|
|
1
|
+
"use strict";var __importDefault=this&&this.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(exports,"__esModule",{value:!0}),exports.getAliUploader=exports.AliUploader=void 0;const base_1=require("./base"),types_1=require("@soga/types"),piscina_1=__importDefault(require("piscina")),node_worker_threads_1=require("node:worker_threads"),piscina=new piscina_1.default({filename:require.resolve("@soga/part-uploader")}),uploaders=new Map;class AliUploader extends base_1.BaseUploader{params;constructor(t){super(t,types_1.HostType.ALI),this.params=t}getThread(t){const e=new AbortController,{port1:r,port2:a}=new node_worker_threads_1.MessageChannel;return{file_id:t.file_id,part_id:t.id,uid:t.uid,start:async()=>{try{const{file_id:s,host_id:i,id:o,info:d}=t,l=await this.fileRepository.findOneBy({id:s}),{output_root:p}=l;a.on("message",(async t=>{await this.onPartProgress(t)})),a.on("error",(t=>{console.log("ali upload part error evt: "),console.log(t)}));const n={file_id:s,host_id:i,part_id:o,output_root:p,part:d,cloud_folder_id:l.cloud_info.hosts.ali?.file_id,sdk_domain:this.params.sdk_domain,port:r},_=await piscina.run(n,{name:"uploadAli",signal:e.signal,transferList:[r]});_&&(await this.partRepository.update(t.id,{upload_result:{ali:_},upload_status:types_1.UploadStatus.SUCCESS}),await this.onPartSuccess({file_id:s,host_type:types_1.HostType.ALI}))}catch(e){"AbortError"==e.name?this.partRepository.update(t.id,{upload_status:types_1.UploadStatus.NULL}):await this.onPartError({error:e,part_id:t.id,file_id:t.file_id})}finally{this.threads=this.threads.filter((e=>e.part_id!=t.id)),a.close(),await this.start()}},stop:async()=>{e.abort(),this.threads=this.threads.filter((e=>e.part_id!==t.id))}}}}exports.AliUploader=AliUploader;const getAliUploader=async t=>{const{uid:e,host_id:r}=t,a=`${e}_${r}`;let s=uploaders.get(a);return s||(s=new AliUploader(t),s.setThreads(3),uploaders.set(a,s)),s};exports.getAliUploader=getAliUploader;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";var __importDefault=this&&this.__importDefault||function(
|
|
1
|
+
"use strict";var __importDefault=this&&this.__importDefault||function(e){return e&&e.__esModule?e:{default:e}};Object.defineProperty(exports,"__esModule",{value:!0}),exports.getBaiduUploader=exports.BaiduUploader=void 0;const base_1=require("./base"),types_1=require("@soga/types"),piscina_1=__importDefault(require("piscina")),node_worker_threads_1=require("node:worker_threads"),piscina=new piscina_1.default({filename:require.resolve("@soga/part-uploader")}),uploaders=new Map;class BaiduUploader extends base_1.BaseUploader{params;constructor(e){super(e,types_1.HostType.BAIDU),this.params=e}getThread(e){const t=new AbortController,{port1:a,port2:r}=new node_worker_threads_1.MessageChannel;return{file_id:e.file_id,part_id:e.id,uid:e.uid,start:async()=>{try{const{file_id:o,host_id:s,id:i,info:d}=e,p=await this.fileRepository.findOneBy({id:o}),{output_root:l}=p;r.on("message",(async e=>{await this.onPartProgress(e)})),r.on("error",(async e=>{console.log("baidu upload part error evt: "),console.log(e)}));const u={file_id:o,host_id:s,part_id:i,output_root:l,part:d,cloud_folder_path:p.cloud_info.hosts.baidu?.path,sdk_domain:this.params.sdk_domain,port:a},n=await piscina.run(u,{name:"uploadBaidu",signal:t.signal,transferList:[a]});n&&(await this.partRepository.update(e.id,{upload_result:{baidu:n},upload_status:types_1.UploadStatus.SUCCESS}),await this.onPartSuccess({file_id:o,host_type:types_1.HostType.BAIDU}))}catch(t){console.log("error_name: ",t.name),"AbortError"==t.name?this.partRepository.update(e.id,{upload_status:types_1.UploadStatus.NULL}):(console.log("baidu upload part error: "),console.log(t),await this.onPartError({error:t,part_id:e.id,file_id:e.file_id}))}finally{this.threads=this.threads.filter((t=>t.part_id!=e.id)),r.close(),await this.start()}},stop:async()=>{t.abort(),this.threads=this.threads.filter((t=>t.part_id!==e.id))}}}}exports.BaiduUploader=BaiduUploader;const getBaiduUploader=async e=>{const{uid:t,host_id:a}=e,r=`${t}_${a}`;let o=uploaders.get(r);return o||(o=new BaiduUploader(e),o.setThreads(6),uploaders.set(r,o)),o};exports.getBaiduUploader=getBaiduUploader;
|
|
@@ -10,7 +10,6 @@ export declare abstract class BaseUploader {
|
|
|
10
10
|
protected successRepository: Repository<UploadSuccess>;
|
|
11
11
|
protected fileRepository: Repository<UploadFile>;
|
|
12
12
|
protected partRepository: Repository<UploadPart>;
|
|
13
|
-
private isRunning;
|
|
14
13
|
private thread_count;
|
|
15
14
|
private maxThreads;
|
|
16
15
|
protected threads: ThreadType[];
|
|
@@ -22,15 +21,20 @@ export declare abstract class BaseUploader {
|
|
|
22
21
|
file_id: number;
|
|
23
22
|
host_type: HostType;
|
|
24
23
|
}): Promise<void>;
|
|
25
|
-
|
|
26
|
-
protected onPartError(
|
|
24
|
+
protected onFileSuccess(file: UploadFile): Promise<void>;
|
|
25
|
+
protected onPartError({ error, part_id, file_id, }: {
|
|
26
|
+
error: Error;
|
|
27
|
+
part_id: number;
|
|
28
|
+
file_id: number;
|
|
29
|
+
}): Promise<void>;
|
|
27
30
|
constructor(baseParams: UploaderParams, host_type: HostType);
|
|
28
31
|
setThreads(threads: number): Promise<void>;
|
|
29
32
|
start(): Promise<void>;
|
|
30
33
|
stopFiles(ids: number[]): Promise<void>;
|
|
31
34
|
deleteFiles(ids: number[]): Promise<void>;
|
|
32
35
|
stopAll(): Promise<void>;
|
|
36
|
+
protected dequeueOneFile(): Promise<void>;
|
|
33
37
|
private checkProcessingFile;
|
|
34
38
|
private run;
|
|
35
|
-
|
|
39
|
+
private checkAllHostsUploaded;
|
|
36
40
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.BaseUploader=void 0;const typeorm_1=require("typeorm"),entities_1=require("@soga/entities"),types_1=require("@soga/types"),prepare_1=require("../hooks/prepare"),utils_1=require("@soga/utils");let isDequeueing=!1;class BaseUploader{hostType;onProgress=async()=>{};onSuccess=async()=>{};onError=async()=>{};successRepository;fileRepository;partRepository;
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.BaseUploader=void 0;const typeorm_1=require("typeorm"),entities_1=require("@soga/entities"),types_1=require("@soga/types"),prepare_1=require("../hooks/prepare"),utils_1=require("@soga/utils"),complete_1=require("../hooks/complete"),trasform_1=require("../hooks/trasform");let isDequeueing=!1;const runningMap=new Map,successMap=new Map;class BaseUploader{hostType;onProgress=async()=>{};onSuccess=async()=>{};onError=async()=>{};successRepository;fileRepository;partRepository;thread_count=1;maxThreads=10;threads=[];baseParams;getValidThreads(t){return Math.min(Math.max(t??0,0),this.maxThreads)}async onPartProgress(t){const{id:s,data:e,type:i}=t;if("percent"!=i)return;const{part_id:a,percent:o}=e,r=await this.partRepository.findOneBy({id:a});if(!r)return;if(r.upload_percent>=o)return;if(r.upload_status==types_1.UploadStatus.ERROR)return;if(r.upload_status==types_1.UploadStatus.SUCCESS)return;await this.partRepository.update(a,{upload_percent:o});const p=await this.partRepository.findBy({file_id:s,host_type:this.hostType});let d=0,n=0;for(const t of p)d+=t.info.size,n+=t.info.size*t.upload_percent;const u=n/d,l=await this.fileRepository.findOneBy({id:s}),h=types_1.UploadProcessStep[`upload_${this.hostType}`];l.progress[h].percent=u,l.percent=(0,utils_1.getProgressPercent)(l.progress),await this.fileRepository.update(s,{progress:l.progress,percent:l.percent}),await this.onProgress(l)}async onPartSuccess({file_id:t,host_type:s}){for(;successMap.get(t);)await new Promise((t=>{setTimeout(t,100)}));try{successMap.set(t,!0);const e=await this.fileRepository.findOneBy({id:t}),i=`${s}_upload_status`;if(e[i]==types_1.UploadStatus.ERROR)return;if(e.upload_status==types_1.UploadStatus.ERROR)return;if(await this.partRepository.findOneBy({file_id:e.id,host_type:s,upload_status:(0,typeorm_1.Not)(types_1.UploadStatus.SUCCESS)}))return;const a=await this.partRepository.findBy({file_id:e.id,host_type:s}),o={};if(s==types_1.HostType.BAIDU)for(const t of a)o[t.info.file]={...t.upload_result.baidu,md4:t.info.md4};else if(s==types_1.HostType.ALI)for(const t of a)o[t.info.file]={...t.upload_result.ali,sha1:t.info.sha1};const r=`${s}_upload_result`,p=await this.fileRepository.findOneBy({id:e.id}),d=types_1.UploadProcessStep[`upload_${this.hostType}`];p.progress[d].percent=1,p.percent=(0,utils_1.getProgressPercent)(p.progress),await this.fileRepository.update(e.id,{[i]:types_1.UploadStatus.SUCCESS,[r]:o,progress:p.progress,percent:p.percent});const n=await this.fileRepository.findOneBy({id:e.id});if(!await this.checkAllHostsUploaded(n))return;await this.onFileSuccess(n)}finally{successMap.delete(t)}}async onFileSuccess(t){await(0,complete_1.complete)({file_id:t.id,fileRepository:this.fileRepository,sdk_domain:this.baseParams.sdk_domain});const s=await this.fileRepository.findOneBy({id:t.id});await this.onSuccess(s),await(0,trasform_1.transform)({file_id:t.id,fileRepository:this.fileRepository,partRepository:this.partRepository,successRepository:this.successRepository})}async onPartError({error:t,part_id:s,file_id:e}){await this.partRepository.update(s,{upload_status:types_1.UploadStatus.ERROR});const i=`${this.hostType}_upload_status`,a=await this.fileRepository.findOneBy({id:e});a[i]!=types_1.UploadStatus.ERROR&&(await this.fileRepository.update(a.id,{[i]:types_1.UploadStatus.ERROR,upload_status:types_1.UploadStatus.ERROR}),await this.onError(t,a))}constructor(t,s){this.hostType=s,this.baseParams=t;const{dataSource:e}=t;this.fileRepository=e.getRepository(entities_1.UploadFile),this.successRepository=e.getRepository(entities_1.UploadSuccess),this.partRepository=e.getRepository(entities_1.UploadPart),this.thread_count=this.getValidThreads(t.threads??this.thread_count),t.onProgress&&(this.onProgress=t.onProgress.bind(this)),t.onSuccess&&(this.onSuccess=t.onSuccess.bind(this)),t.onError&&(this.onError=t.onError.bind(this))}async setThreads(t){const s=this.getValidThreads(t);this.thread_count=s,await this.run()}async start(){await this.run()}async stopFiles(t){const s=this.threads.filter((s=>t.includes(s.file_id)));await Promise.all(s.map((t=>t.stop()))),await this.start()}async deleteFiles(t){const s=this.threads.filter((s=>t.includes(s.file_id)));await Promise.all(s.map((t=>t.stop())))}async stopAll(){await Promise.all(this.threads.map((t=>t.stop())))}async dequeueOneFile(){for(;isDequeueing;)await new Promise((t=>{setTimeout(t,50)}));try{isDequeueing=!0;const t=`${this.hostType}_upload_status`,s=`${this.hostType}_host_id`,e=await this.fileRepository.findOne({where:{uid:this.baseParams.uid,is_paused:!1,encode_status:types_1.EncodeStatus.SUCCESS,[t]:(0,typeorm_1.IsNull)(),upload_status:(0,typeorm_1.In)([types_1.UploadStatus.NULL,types_1.UploadStatus.PROCESS])},order:{created_at:"ASC"}});if(!e)return;await this.fileRepository.update(e.id,{[t]:types_1.UploadStatus.PROCESS,upload_status:types_1.UploadStatus.PROCESS}),await(0,prepare_1.prepare)({file_id:e.id,sdk_domain:this.baseParams.sdk_domain,fileRepository:this.fileRepository});const{encoded:i}=e,{affix:a,source:o,txt:r,img:p,media:d}=i,n=[...o?.parts??[],...r?.parts??[],...p?.parts??[],...d?.parts??[]];a?.forEach((t=>{t.parts?.forEach((t=>{n.push(t)}))}));const u=e[s],l=[],{length:h}=n;for(let t=0;t<h;t++){const s=n[t];await this.partRepository.findOneBy({file_id:e.id,part_name:s.file,host_id:u})||l.push({uid:this.baseParams.uid,file_id:e.id,part_name:s.file,info:s,output_root:e.output_root,host_id:u,host_type:this.hostType})}return void await this.partRepository.save(l)}finally{isDequeueing=!1}}async checkProcessingFile(t){if(t.baidu_host_id&&t.baidu_upload_status==types_1.UploadStatus.PROCESS){await this.partRepository.findOneBy({file_id:t.id,host_type:types_1.HostType.BAIDU,upload_status:(0,typeorm_1.Not)(types_1.UploadStatus.SUCCESS)})||await this.onPartSuccess({file_id:t.id,host_type:types_1.HostType.BAIDU})}if(t.ali_host_id&&t.ali_upload_status==types_1.UploadStatus.PROCESS){await this.partRepository.findOneBy({file_id:t.id,host_type:types_1.HostType.ALI,upload_status:(0,typeorm_1.Not)(types_1.UploadStatus.SUCCESS)})||await this.onPartSuccess({file_id:t.id,host_type:types_1.HostType.ALI})}}async run(){const t=this.baseParams.uid;for(;runningMap.get(t);)await new Promise((t=>{setTimeout(t,100)}));runningMap.set(t,!0);const s=Date.now(),e=await this.fileRepository.findBy({uid:this.baseParams.uid,is_paused:!1,upload_status:types_1.UploadStatus.PROCESS});if(e.length){const t=this.threads.map((t=>t.file_id)),s=e.filter((s=>!t.includes(s.id)));for(const t of s)await this.checkProcessingFile(t)}console.log("exception time1: ",Date.now()-s);const i=Date.now(),a=await this.partRepository.findBy({uid:this.baseParams.uid,is_paused:!1,upload_status:types_1.UploadStatus.PROCESS,host_id:this.baseParams.host_id});if(a.length){const t=this.threads.map((t=>t.part_id)),s=a.filter((s=>!t.includes(s.id)));if(s.length){const t=s.map((t=>t.id));console.log("exception ids: ",t),await this.partRepository.update({id:(0,typeorm_1.In)(t),upload_status:types_1.UploadStatus.PROCESS},{upload_status:types_1.UploadStatus.NULL})}}console.log("exception time2: ",Date.now()-i);const o=this.threads.length,r=this.thread_count;if(o<r){const s=r-o,e=async()=>await this.partRepository.findOne({where:{uid:this.baseParams.uid,is_paused:!1,upload_status:types_1.UploadStatus.NULL,host_id:this.baseParams.host_id},order:{created_at:"ASC"}});for(let t=0;t<s;t++){const t=await e();if(!t)break;const s=this.getThread(t);this.threads.push(s),await this.partRepository.update(t.id,{upload_status:types_1.UploadStatus.PROCESS}),s.start()}if(this.threads.length<r){await this.dequeueOneFile();await e()?(runningMap.delete(t),await this.run()):runningMap.delete(t)}else runningMap.delete(t)}else{const s=o-r,e=this.threads.slice(0,s);for(const t of e)await t.stop();runningMap.delete(t)}}async checkAllHostsUploaded(t){return(!t.baidu_host_id||t.baidu_upload_status==types_1.UploadStatus.SUCCESS)&&(!t.ali_host_id||t.ali_upload_status==types_1.UploadStatus.SUCCESS)}}exports.BaseUploader=BaseUploader;
|
package/dist/uploader.d.ts
CHANGED
|
@@ -10,8 +10,6 @@ export declare class Uploader {
|
|
|
10
10
|
protected partRepository: Repository<UploadPart>;
|
|
11
11
|
constructor(params: GetUploaderParams);
|
|
12
12
|
protected processorMap: Map<number, BaseUploader>;
|
|
13
|
-
private successing;
|
|
14
|
-
private onSuccess;
|
|
15
13
|
init(): Promise<void>;
|
|
16
14
|
start(): Promise<void>;
|
|
17
15
|
stopAll(): Promise<void>;
|
package/dist/uploader.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.Uploader=void 0;const types_1=require("@soga/types"),entities_1=require("@soga/entities"),baidu_1=require("./host-uploader/baidu"),ali_1=require("./host-uploader/ali")
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.Uploader=void 0;const types_1=require("@soga/types"),entities_1=require("@soga/entities"),baidu_1=require("./host-uploader/baidu"),ali_1=require("./host-uploader/ali");class Uploader{params;uid;dataSource;fileRepository;partRepository;constructor(t){this.params=t,this.uid=t.uid,this.dataSource=t.dataSource,this.fileRepository=this.dataSource.getRepository(entities_1.UploadFile),this.partRepository=this.dataSource.getRepository(entities_1.UploadPart)}processorMap=new Map;async init(){}async start(){await Promise.all([this.startAli(),this.startBaidu()])}async stopAll(){const t=this.processorMap.values();for(const s of t)await s.stopAll()}async stopFiles(t){const s=this.processorMap.values();for(const e of s)await e.stopFiles(t)}async deleteFiles(t){const s=this.processorMap.values();for(const e of s)await e.deleteFiles(t)}async startHost(t,s){if(this.processorMap.has(t)){const s=this.processorMap.get(t);return void await s.start()}const e={uid:this.uid,sdk_domain:this.params.sdk_domain,dataSource:this.dataSource,host_id:t,onProgress:this.params.onProgress,onSuccess:this.params.onSuccess,onError:this.params.onError};if(s==types_1.HostType.BAIDU){const s=await(0,baidu_1.getBaiduUploader)(e);this.processorMap.set(t,s),await s.start()}else if(s==types_1.HostType.ALI){const s=await(0,ali_1.getAliUploader)(e);this.processorMap.set(t,s),await s.start()}}async startAli(){const t=(await this.fileRepository.createQueryBuilder("file").select("DISTINCT file.ali_host_id").where("file.uid = :uid",{uid:this.uid}).getRawMany()).map((t=>t.ali_host_id)).filter((t=>!!t));if(t.length)for(const s of t)await this.startHost(s,types_1.HostType.ALI)}async startBaidu(){const t=(await this.fileRepository.createQueryBuilder("file").select("DISTINCT file.baidu_host_id").where("file.uid = :uid",{uid:this.uid}).getRawMany()).map((t=>t.baidu_host_id)).filter((t=>!!t));if(t.length)for(const s of t)await this.startHost(s,types_1.HostType.BAIDU)}}exports.Uploader=Uploader;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@soga/uploader",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.45",
|
|
4
4
|
"publishConfig": {
|
|
5
5
|
"access": "public"
|
|
6
6
|
},
|
|
@@ -25,21 +25,21 @@
|
|
|
25
25
|
"@types/fs-extra": "^11.0.4",
|
|
26
26
|
"@types/glob": "^8.1.0",
|
|
27
27
|
"@types/jest": "^29.5.14",
|
|
28
|
-
"@types/node": "^
|
|
29
|
-
"@typescript-eslint/eslint-plugin": "^
|
|
30
|
-
"@typescript-eslint/parser": "^
|
|
31
|
-
"eslint": "^
|
|
32
|
-
"eslint-config-prettier": "^
|
|
33
|
-
"eslint-plugin-jest": "^
|
|
34
|
-
"eslint-plugin-prettier": "^5.
|
|
35
|
-
"glob": "^
|
|
28
|
+
"@types/node": "^22.14.1",
|
|
29
|
+
"@typescript-eslint/eslint-plugin": "^8.33.0",
|
|
30
|
+
"@typescript-eslint/parser": "^8.33.0",
|
|
31
|
+
"eslint": "^9.27.0",
|
|
32
|
+
"eslint-config-prettier": "^10.1.5",
|
|
33
|
+
"eslint-plugin-jest": "^28.11.1",
|
|
34
|
+
"eslint-plugin-prettier": "^5.4.0",
|
|
35
|
+
"glob": "^11.0.2",
|
|
36
36
|
"jest": "^29.7.0",
|
|
37
37
|
"prettier": "^3.0.2",
|
|
38
38
|
"rimraf": "^6.0.1",
|
|
39
39
|
"terser": "^5.19.2",
|
|
40
40
|
"ts-jest": "^29.2.5",
|
|
41
41
|
"ts-node": "^10.9.1",
|
|
42
|
-
"typescript": "^5.
|
|
42
|
+
"typescript": "^5.8.3"
|
|
43
43
|
},
|
|
44
44
|
"keywords": [],
|
|
45
45
|
"author": "",
|
|
@@ -49,11 +49,11 @@
|
|
|
49
49
|
},
|
|
50
50
|
"dependencies": {
|
|
51
51
|
"@soga/entities": "^0.2.41",
|
|
52
|
-
"@soga/part-uploader": "^0.2.
|
|
52
|
+
"@soga/part-uploader": "^0.2.44",
|
|
53
53
|
"@soga/sdk": "^0.2.33",
|
|
54
54
|
"@soga/types": "^0.2.33",
|
|
55
55
|
"@soga/utils": "^0.2.33",
|
|
56
56
|
"piscina": "^4.9.2"
|
|
57
57
|
},
|
|
58
|
-
"gitHead": "
|
|
58
|
+
"gitHead": "1c1e8cb300236c3b0fbceac6e24989bac9b7209f"
|
|
59
59
|
}
|