@soga/uploader 0.1.17 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1 +1 @@
1
- # uploader
1
+ # downloader
File without changes
File without changes
@@ -1,14 +1,7 @@
1
1
  import { UploadFile } from '@soga/entities';
2
- import { RecordDetail } from '@soga/types';
3
2
  import { Repository } from 'typeorm';
4
- export declare function complete({ file_id, fileRepository, updateCloudRecord, }: {
3
+ export declare function complete({ file_id, sdk_domain, fileRepository, }: {
5
4
  file_id: number;
5
+ sdk_domain: string;
6
6
  fileRepository: Repository<UploadFile>;
7
- updateCloudRecord: (params: Partial<{
8
- uid: number;
9
- space_id: number;
10
- record_id: number;
11
- manifest: string;
12
- parent_id?: number;
13
- }>) => Promise<RecordDetail>;
14
- }): Promise<RecordDetail>;
7
+ }): Promise<void>;
@@ -1 +1 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.complete=complete;const types_1=require("@soga/types");async function complete({file_id:e,fileRepository:i,updateCloudRecord:t}){const a=await i.findOneBy({id:e});let s=a.task_record_id;if(0!==a.pid){s=(await i.findOneBy({id:a.pid})).cloud_info.id}const r=await getManifest(a);return await t({space_id:a.space_id,record_id:a.cloud_info.id,uid:a.uid,parent_id:s,manifest:JSON.stringify(r)})}async function getManifest(e){const{type:i,source_data:t,media_data:a,img_data:s,txt_data:r}=e,d={};if(t?.parts){let e=!0;i!=types_1.RecordType.VIDEO&&i!=types_1.RecordType.AUDIO||(e=!1),i==types_1.RecordType.TXT&&(e=!1),t.parts.forEach((i=>{d[i.file]={md5:i.md5,size:i.size,source:!0,preview:e}}))}a?.parts&&a.parts.forEach((e=>{d[e.file]={md5:e.md5,size:e.size,source:!1,preview:!0}})),s?.parts&&s.parts.forEach((e=>{d[e.file]={md5:e.md5,size:e.size,source:!1,preview:!0}})),r?.parts&&r.parts.forEach((e=>{d[e.file]={md5:e.md5,size:e.size,source:!1,preview:!0}}));const o=Object.values(d),p={host:0,preview:0,source:0};o.forEach((e=>{p.host+=e.size,e.preview&&(p.preview+=e.size),e.source&&(p.source+=e.size)}));const c={meta:{host_size:p.host,preview_size:p.preview,source_size:p.source,filesize:e.size,filename:e.filename,birthtime:e.local_birthtime,mtime:e.local_mtime,ctime:e.local_ctime},parts:d};if(t?.parts&&(c.source={head:t.head,parts:t.parts.map((e=>e.file))}),a?.parts){const{parts:e,...i}=a;c.media={...i,parts:e.map((e=>e.file))}}if(s?.parts){const{meta:e,parts:i}=s;c.img={meta:e,preview:i.map((e=>({file:e.file,start:e.start,end:e.end}))),parts:i.map((e=>e.file))}}if(r?.parts){const{map:e,pad:i,pages:t,parts:a}=r;c.txt={entrance:e,pad:i,pages:t,parts:a.map((e=>e.file))}}if(e.ali_host_id){c.ali={drive_id:"",host_id:e.ali_host_id,info:{}};const i=e.upload_data.ali;Object.keys(d).forEach((e=>{const t=i[e];c.ali.info[e]=t.file_id,c.ali.drive_id||(c.ali.drive_id=t.drive_id)}))}if(e.baidu_host_id){c.baidu={host_id:e.baidu_host_id,info:{}};const i=e.upload_data.baidu;Object.keys(d).forEach((e=>{const t=i[e];c.baidu.info[e]=t.fs_id}))}return c}
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.complete=complete;const sdk_1=require("@soga/sdk"),types_1=require("@soga/types");async function complete({file_id:e,sdk_domain:i,fileRepository:t}){const s=await t.findOneBy({id:e});let a=s.task_record_id;const o=(0,sdk_1.getSdk)(i),r={};let d;if(s.type==types_1.RecordType.AFFIX)a=0,await async function(e,i){e.encoded.affix?.forEach((e=>{e.parts.forEach((e=>{r[e.file]={md5:e.md5,size:e.size,source:!0,preview:!0}}))}));const t=await i.getRecordInfo({space_id:e.space_id,record_id:e.task_record_id,refresh:!0});t.manifest||(t.manifest={});d=t.manifest,d.meta||(d.meta={host_size:0});d.parts||(d.parts={});d.affix||(d.affix=[]);e.encoded.affix?.forEach((e=>{const{parts:i,...t}=e;d.affix.push({...t,parts:i.map((e=>e.file))})})),Object.assign(d.parts,r),p(),c()}(s,o);else{if(0!==s.pid){a=(await t.findOneBy({id:s.pid})).cloud_info.id}await async function(e){const{type:i,inputs:t,encoded:s}=e;if(s.source?.parts){let e=!0;i!=types_1.RecordType.VIDEO&&i!=types_1.RecordType.AUDIO||(e=!1),i==types_1.RecordType.TXT&&(e=!1),s.source.parts.forEach((i=>{r[i.file]={md5:i.md5,size:i.size,source:!0,preview:e}}))}s.media?.parts&&s.media.parts.forEach((e=>{r[e.file]={md5:e.md5,size:e.size,source:!1,preview:!0}}));s.img?.parts&&s.img.parts.forEach((e=>{r[e.file]={md5:e.md5,size:e.size,source:!1,preview:!0}}));s.txt?.parts&&s.txt.parts.forEach((e=>{r[e.file]={md5:e.md5,size:e.size,source:!1,preview:!0}}));const a=t[0],o={host_size:0,filesize:a.filesize,filename:a.filename,btime:a.local_btime,ctime:a.local_ctime,mtime:a.local_mtime};d={meta:o,parts:r},s.source?.parts&&(d.source={head:s.source.head,parts:s.source.parts.map((e=>e.file))});if(s.media?.parts){const{parts:e,...i}=s.media;d.media={...i,parts:e.map((e=>e.file))}}if(s.img?.parts){const{meta:e,parts:i}=s.img;d.img={meta:e,preview:i.map((e=>({file:e.file,start:e.start,end:e.end}))),parts:i.map((e=>e.file))}}if(s.txt?.parts){const{map:e,pad:i,pages:t,parts:a}=s.txt;d.txt={entrance:e,pad:i,pages:t,parts:a.map((e=>e.file))}}return c(),p(),d}(s)}function c(){const e=Object.values(r),i={host:0,preview:0,source:0};e.forEach((e=>{i.host+=e.size})),d.meta.host_size=i.host}function p(){if(s.ali_host_id&&!d.ali){d.ali={drive_id:"",host_id:s.ali_host_id,info:{}};const e=s.uploaded.ali;Object.keys(r).forEach((i=>{const t=e[i];d.ali.info[i]=t.file_id,d.ali.drive_id||(d.ali.drive_id=t.drive_id)}))}if(s.baidu_host_id){d.baidu={host_id:s.baidu_host_id,info:{}};const e=s.uploaded.baidu;Object.keys(r).forEach((i=>{const t=e[i];d.baidu.info[i]=t.fs_id}))}}await o.updateRecord({space_id:s.space_id,record_id:s.cloud_info.id,parent_id:a,manifest:JSON.stringify(d)})}
@@ -1,15 +1,7 @@
1
1
  import { UploadFile } from '@soga/entities';
2
- import { RecordDetail, RecordFtype, RecordType } from '@soga/types';
3
2
  import { Repository } from 'typeorm';
4
- export declare function prepare({ file_id, fileRepository, createCloudRecord, }: {
3
+ export declare function prepare({ file_id, sdk_domain, fileRepository, }: {
5
4
  file_id: number;
5
+ sdk_domain: string;
6
6
  fileRepository: Repository<UploadFile>;
7
- createCloudRecord: ({ uid, space_id, name, parent_id, type, ftype, }: Partial<{
8
- uid: number;
9
- space_id: number;
10
- name: string;
11
- parent_id: number;
12
- type: RecordType;
13
- ftype: RecordFtype;
14
- }>) => Promise<RecordDetail>;
15
7
  }): Promise<void>;
@@ -1 +1 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.prepare=prepare;const types_1=require("@soga/types");async function prepare({file_id:i,fileRepository:a,createCloudRecord:d}){await async function(){const d=await a.findOneBy({id:i});if(!d)return;const e=await t(d.pid,[]);for(const i of e)await n(i);return e}();const e=await a.findOneBy({id:i});async function t(i,d=[]){if(0===i)return d;const e=await a.findOneBy({id:i});return e?e.cloud_info?.id?d:(d.unshift(e),0!==e.pid?await t(e.pid,d):d):d}async function n(i){if(i.cloud_info?.id)return;const{filename:e,uid:t,space_id:n,task_record_id:o}=i;let s=o;if(0!==i.pid){s=(await a.findOneBy({id:i.pid})).cloud_info.id}const p=await d({uid:t,space_id:n,name:e,parent_id:s,type:i.type,ftype:i.type==types_1.RecordType.FOLDER?types_1.RecordFtype.NONE:i.ftype}),{id:r,cloud_info:u}=p,f={id:r,hosts:{}};if(u.ali&&(f.hosts.ali={id:u.ali.id,name:u.ali.name,drive_id:u.ali.drive_id,file_id:u.ali.file_id}),u.baidu&&(f.hosts.baidu={id:u.baidu.id,name:u.baidu.name,fs_id:u.baidu.fs_id,path:u.baidu.path},!f.hosts.baidu.path)){const d=await a.findOneBy({id:i.pid}),{path:e}=d.cloud_info.hosts.baidu;e&&(f.hosts.baidu.path=`${e}/${u.baidu.name}`)}await a.update({id:i.id},{cloud_info:f})}e&&await n(e)}
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.prepare=prepare;const types_1=require("@soga/types"),sdk_1=require("@soga/sdk");async function prepare({file_id:i,sdk_domain:a,fileRepository:d}){const e=await d.findOneBy({id:i});if(e.type!=types_1.RecordType.AFFIX)await async function(){const a=await d.findOneBy({id:i});if(!a)return;const e=await t(a.pid,[]);for(const i of e)await s(i);return e}(),await s(e);else{const{task_record_id:i}=e,t=(0,sdk_1.getSdk)(a),s=await t.getRecordInfo({space_id:e.space_id,record_id:i,refresh:!0}),o=await n(s);await d.update(e.id,{cloud_info:o})}async function t(i,a=[]){if(0===i)return a;const e=await d.findOneBy({id:i});return e?e.cloud_info?.id?a:(a.unshift(e),0!==e.pid?await t(e.pid,a):a):a}async function n(i){const{id:a,cloud_info:t}=i,n={id:a,hosts:{}};if(t.ali&&(n.hosts.ali={id:t.ali.id,name:t.ali.name,drive_id:t.ali.drive_id,file_id:t.ali.file_id}),t.baidu&&(n.hosts.baidu={id:t.baidu.id,name:t.baidu.name,fs_id:t.baidu.fs_id,path:t.baidu.path},!n.hosts.baidu.path)){const i=await d.findOneBy({id:e.pid}),{path:a}=i.cloud_info.hosts.baidu;a&&(n.hosts.baidu.path=`${a}/${t.baidu.name}`)}return n}async function s(i){if(i.cloud_info?.id)return;const{inputs:e,space_id:t,task_record_id:s}=i;let o=s;if(0!==i.pid){o=(await d.findOneBy({id:i.pid})).cloud_info.id}const r=(0,sdk_1.getSdk)(a),c=await r.createRecord({space_id:t,name:e[0].filename,parent_id:o,type:i.type,ftype:types_1.RecordFtype.NONE}),p=await n(c);await d.update({id:i.id},{cloud_info:p})}}
@@ -0,0 +1,9 @@
1
+ import { ThreadType, UploaderParams } from '../types/main';
2
+ import { BaseUploader } from './base';
3
+ import { UploadPart } from '@soga/entities';
4
+ export declare class AliUploader extends BaseUploader {
5
+ private params;
6
+ constructor(params: UploaderParams);
7
+ getThread(part: UploadPart): ThreadType;
8
+ }
9
+ export declare const getAliUploader: (params: UploaderParams) => Promise<AliUploader>;
@@ -0,0 +1 @@
1
+ "use strict";var __importDefault=this&&this.__importDefault||function(i){return i&&i.__esModule?i:{default:i}};Object.defineProperty(exports,"__esModule",{value:!0}),exports.getAliUploader=exports.AliUploader=void 0;const base_1=require("./base"),types_1=require("@soga/types"),piscina_1=__importDefault(require("piscina")),piscina=new piscina_1.default({filename:require.resolve("@soga/part-uploader")}),uploaders=new Map;class AliUploader extends base_1.BaseUploader{params;constructor(i){super(i,types_1.HostType.ALI),this.params=i}getThread(i){return{file_id:i.file_id,part_id:i.id,uid:i.uid,start:async()=>{try{const t=new AbortController,{file_id:e,host_id:a,id:s,info:r}=i,o=await this.fileRepository.findOneBy({id:e}),{output_root:d}=o;piscina.on("message",(async i=>{await this.onPartProgress(i)}));const l={file_id:e,host_id:a,part_id:s,output_root:d,part:r,cloud_folder_id:o.cloud_info.hosts.ali?.file_id,sdk_domain:this.params.sdk_domain},p=await piscina.run(l,{name:"uploadAli",signal:t.signal});if(p){await this.partRepository.update(i.id,{upload_result:{ali:p},upload_status:types_1.UploadStatus.SUCCESS});const t=await this.partRepository.findOneBy({id:s}),a=await this.fileRepository.findOneBy({id:e});await this.onPartSuccess(t,a)}}catch(t){await this.onPartError(t,await this.fileRepository.findOneBy({id:i.file_id}))}finally{this.threads=this.threads.filter((t=>t.part_id!=i.id)),await this.start()}},stop:async()=>{this.threads=this.threads.filter((t=>t.part_id!==i.id))}}}}exports.AliUploader=AliUploader;const getAliUploader=async i=>{const{uid:t,host_id:e}=i,a=`${t}_${e}`;let s=uploaders.get(a);if(s)for(;s.is_initing;)await new Promise((i=>setTimeout(i,20)));else s=new AliUploader(i),s.setThreads(3),uploaders.set(a,s),await s.init();return s};exports.getAliUploader=getAliUploader;
@@ -0,0 +1,9 @@
1
+ import { ThreadType, UploaderParams } from '../types/main';
2
+ import { BaseUploader } from './base';
3
+ import { UploadPart } from '@soga/entities';
4
+ export declare class BaiduUploader extends BaseUploader {
5
+ private params;
6
+ constructor(params: UploaderParams);
7
+ getThread(part: UploadPart): ThreadType;
8
+ }
9
+ export declare const getBaiduUploader: (params: UploaderParams) => Promise<BaiduUploader>;
@@ -0,0 +1 @@
1
+ "use strict";var __importDefault=this&&this.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(exports,"__esModule",{value:!0}),exports.getBaiduUploader=exports.BaiduUploader=void 0;const base_1=require("./base"),types_1=require("@soga/types"),piscina_1=__importDefault(require("piscina")),piscina=new piscina_1.default({filename:require.resolve("@soga/part-uploader")}),uploaders=new Map;class BaiduUploader extends base_1.BaseUploader{params;constructor(t){super(t,types_1.HostType.BAIDU),this.params=t}getThread(t){return{file_id:t.file_id,part_id:t.id,uid:t.uid,start:async()=>{try{const i=new AbortController,{file_id:a,host_id:e,id:s,info:r}=t,o=await this.fileRepository.findOneBy({id:a}),{output_root:d}=o;piscina.on("message",(async t=>{await this.onPartProgress(t)}));const p={file_id:a,host_id:e,part_id:s,output_root:d,part:r,cloud_folder_path:o.cloud_info.hosts.baidu?.path,sdk_domain:this.params.sdk_domain},n=await piscina.run(p,{name:"uploadBaidu",signal:i.signal});if(n){await this.partRepository.update(t.id,{upload_result:{baidu:n},upload_status:types_1.UploadStatus.SUCCESS});const i=await this.partRepository.findOneBy({id:s}),e=await this.fileRepository.findOneBy({id:a});await this.onPartSuccess(i,e)}}catch(i){await this.onPartError(i,await this.fileRepository.findOneBy({id:t.file_id}))}finally{this.threads=this.threads.filter((i=>i.part_id!=t.id)),await this.start()}},stop:async()=>{this.threads=this.threads.filter((i=>i.part_id!==t.id))}}}}exports.BaiduUploader=BaiduUploader;const getBaiduUploader=async t=>{const{uid:i,host_id:a}=t,e=`${i}_${a}`;let s=uploaders.get(e);if(s)for(;s.is_initing;)await new Promise((t=>setTimeout(t,20)));else s=new BaiduUploader(t),s.setThreads(3),uploaders.set(e,s),await s.init();return s};exports.getBaiduUploader=getBaiduUploader;
@@ -1,9 +1,8 @@
1
1
  import { Repository } from 'typeorm';
2
2
  import { UploadFile, UploadPart } from '@soga/entities';
3
- import { HostType } from '@soga/types';
4
- import { ProcessorParams, ThreadType } from '../types/main';
5
- import { PartUplodProgress } from '@soga/single-uploader';
6
- export declare abstract class BaseProcessor {
3
+ import { HostType, UploadWorkerPercent } from '@soga/types';
4
+ import { UploaderParams, ThreadType } from '../types/main';
5
+ export declare abstract class BaseUploader {
7
6
  protected hostType: HostType;
8
7
  protected onProgress: (file: UploadFile) => Promise<void>;
9
8
  protected onSuccess: (file: UploadFile) => Promise<void>;
@@ -14,13 +13,14 @@ export declare abstract class BaseProcessor {
14
13
  private thread_count;
15
14
  private maxThreads;
16
15
  protected threads: ThreadType[];
17
- baseParams: ProcessorParams;
16
+ baseParams: UploaderParams;
18
17
  abstract getThread(part: UploadPart): ThreadType;
19
18
  protected getValidThreads(threads: number): number;
20
- protected onPartProgress(params: PartUplodProgress): Promise<void>;
21
- protected onPartSuccess(file: UploadFile): Promise<void>;
19
+ protected onPartProgress(params: UploadWorkerPercent): Promise<void>;
20
+ protected onSomePartSuccess(file: UploadFile): Promise<void>;
21
+ protected onPartSuccess(part: UploadPart, file: UploadFile): Promise<void>;
22
22
  protected onPartError(err: Error, file: UploadFile): Promise<void>;
23
- constructor(baseParams: ProcessorParams);
23
+ constructor(baseParams: UploaderParams, host_type: HostType);
24
24
  setThreads(threads: number): Promise<void>;
25
25
  is_initing: boolean;
26
26
  init(): Promise<void>;
@@ -0,0 +1 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.BaseUploader=void 0;const typeorm_1=require("typeorm"),entities_1=require("@soga/entities"),types_1=require("@soga/types"),prepare_1=require("../hooks/prepare"),complete_1=require("../hooks/complete"),utils_1=require("@soga/utils");let isDequeueing=!1;class BaseUploader{hostType;onProgress=async()=>{};onSuccess=async()=>{};onError=async()=>{};fileRepository;partRepository;isRunning=!1;thread_count=1;maxThreads=10;threads=[];baseParams;getValidThreads(t){return Math.min(Math.max(t??0,0),this.maxThreads)}async onPartProgress(t){const{id:s,data:e,type:i}=t;if("percent"!=i)return;const{part_id:a,percent:o}=e;await this.partRepository.update(a,{upload_percent:o});const r=await this.partRepository.findBy({file_id:s});let p=0,d=0;for(const t of r)p+=t.info.size,d+=t.info.size*t.upload_percent;const n=d/p,u=await this.fileRepository.findOneBy({id:s}),h=types_1.UploadProcessStep[`upload_${this.hostType}`];u.progress[h].percent=n,u.percent=(0,utils_1.getProgressPercent)(u.progress),await this.fileRepository.update(s,{progress:u.progress,percent:u.percent}),await this.onProgress(u)}async onSomePartSuccess(t){const{ali_host_id:s,baidu_host_id:e,ali_upload_status:i,baidu_upload_status:a}=t;if(e&&a!=types_1.UploadStatus.SUCCESS)return;if(s&&i!=types_1.UploadStatus.SUCCESS)return;const o={ali:{},baidu:{}},r=await this.partRepository.findBy({file_id:t.id});if(!r.length)return;for(const t of r)t.host_type==types_1.HostType.BAIDU&&(o.baidu[t.info.file]=t.upload_result.baidu),t.host_type==types_1.HostType.ALI&&(o.ali[t.info.file]=t.upload_result.ali);await this.fileRepository.update(t.id,{uploaded:o}),await(0,complete_1.complete)({file_id:t.id,sdk_domain:this.baseParams.sdk_domain,fileRepository:this.fileRepository});const p=await this.fileRepository.findOneBy({id:t.id});p.progress.END.percent=1,p.percent=1,await this.fileRepository.update(t.id,{progress:p.progress,percent:p.percent,upload_status:types_1.UploadStatus.SUCCESS}),await this.partRepository.delete({file_id:t.id}),await this.onSuccess(await this.fileRepository.findOneBy({id:t.id}))}async onPartSuccess(t,s){const{host_type:e}=t,i=`${e}_upload_status`;if(s[i]==types_1.UploadStatus.ERROR)return;if(await this.partRepository.findOneBy({id:(0,typeorm_1.Not)(t.id),file_id:s.id,host_type:e,upload_status:(0,typeorm_1.Not)(types_1.UploadStatus.SUCCESS)}))return;await this.fileRepository.update(s.id,{[i]:types_1.UploadStatus.SUCCESS,upload_status:types_1.UploadStatus.SOME_PART_SUCCESS});const a=await this.fileRepository.findOneBy({id:s.id});await this.onSomePartSuccess(a)}async onPartError(t,s){const e=`${this.hostType}_upload_status`;s[e]!=types_1.UploadStatus.ERROR&&(await this.fileRepository.update(s.id,{[e]:types_1.UploadStatus.ERROR}),await this.onError(t,s))}constructor(t,s){this.hostType=s,this.baseParams=t;const{dataSource:e}=t;this.fileRepository=e.getRepository(entities_1.UploadFile),this.partRepository=e.getRepository(entities_1.UploadPart),this.thread_count=this.getValidThreads(t.threads??this.thread_count),t.onProgress&&(this.onProgress=t.onProgress.bind(this)),t.onSuccess&&(this.onSuccess=t.onSuccess.bind(this)),t.onError&&(this.onError=t.onError.bind(this))}async setThreads(t){const s=this.getValidThreads(t);this.thread_count=s,await this.run()}is_initing=!1;async init(){this.is_initing=!0,await this.partRepository.update({uid:this.baseParams.uid,upload_status:types_1.UploadStatus.PROCESS},{upload_status:types_1.UploadStatus.NULL});const t=await this.fileRepository.findBy({uid:this.baseParams.uid,upload_status:types_1.UploadStatus.SOME_PART_SUCCESS});for(const s of t)await this.onSomePartSuccess(s);this.is_initing=!1}async start(){await this.run()}async stopFiles(t,s=!1){s&&(await this.partRepository.update({file_id:(0,typeorm_1.In)(t),is_paused:!1},{is_paused:!0}),await this.fileRepository.update({id:(0,typeorm_1.In)(t),is_paused:!1},{is_paused:!0}));const e=this.threads.filter((s=>t.includes(s.file_id)));await Promise.all(e.map((t=>t.stop()))),await this.start()}async stopAll(t=!1){t&&(await this.partRepository.update({uid:this.baseParams.uid},{is_paused:!0}),await this.fileRepository.update({uid:this.baseParams.uid,type:(0,typeorm_1.Not)(types_1.RecordType.FOLDER)},{is_paused:!0})),await Promise.all(this.threads.map((t=>t.stop())))}async run(){for(;this.isRunning;)await new Promise((t=>{setTimeout(t,200)}));this.isRunning=!0;const t=this.threads.length,s=this.thread_count;if(t<s){const e=s-t,i=async()=>await this.partRepository.findOne({where:{uid:this.baseParams.uid,is_paused:!1,upload_status:types_1.UploadStatus.NULL,host_id:this.baseParams.host_id},order:{created_at:"ASC"}});for(let t=0;t<e;t++){const t=await i();if(!t)break;const s=this.getThread(t);this.threads.push(s),await this.partRepository.update(t.id,{upload_status:types_1.UploadStatus.PROCESS}),s.start()}if(this.threads.length<s){await this.dequeueOneFile();await i()&&(this.isRunning=!1,await this.run())}}else if(t>s){const e=t-s,i=this.threads.slice(0,e);for(const t of i)await t.stop()}this.isRunning=!1}async dequeueOneFile(){for(;isDequeueing;)await new Promise((t=>{setTimeout(t,50)}));isDequeueing=!0;try{const t=`${this.hostType}_upload_status`,s=`${this.hostType}_host_id`,e=await this.fileRepository.findOne({where:{uid:this.baseParams.uid,is_paused:!1,encode_status:types_1.EncodeStatus.SUCCESS,[t]:(0,typeorm_1.IsNull)(),upload_status:(0,typeorm_1.In)([types_1.UploadStatus.NULL,types_1.UploadStatus.PROCESS])},order:{created_at:"ASC"}});if(!e)return;await this.fileRepository.update(e.id,{[t]:types_1.UploadStatus.PROCESS,upload_status:types_1.UploadStatus.PROCESS}),await(0,prepare_1.prepare)({file_id:e.id,sdk_domain:this.baseParams.sdk_domain,fileRepository:this.fileRepository});const{encoded:i}=e,{affix:a,source:o,txt:r,img:p,media:d}=i,n=[...o?.parts??[],...r?.parts??[],...p?.parts??[],...d?.parts??[]];a?.forEach((t=>{t.parts?.forEach((t=>{n.push(t)}))}));const u=e[s],h=n.map((t=>({uid:this.baseParams.uid,file_id:e.id,info:t,output_root:e.output_root,host_id:u,host_type:this.hostType})));return void await this.partRepository.save(h)}finally{isDequeueing=!1}}}exports.BaseUploader=BaseUploader;
package/dist/main.d.ts CHANGED
@@ -1,5 +1,6 @@
1
- import { UploaderParams } from './types/main';
2
- import { Uploader } from './uploader/uploader';
3
- export declare const getUploader: (params: UploaderParams) => Promise<Uploader>;
4
- export declare const getUploaderByUid: (uid: number) => Promise<Uploader>;
1
+ export * from './host-uploader/baidu';
2
+ export * from './host-uploader/ali';
3
+ import { GetUploaderParams } from './types/main';
4
+ import { Uploader } from './uploader';
5
+ export declare const getUploader: (params: GetUploaderParams) => Promise<Uploader>;
5
6
  export { UploaderParams } from './types/main';
package/dist/main.js CHANGED
@@ -1 +1 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.getUploaderByUid=exports.getUploader=void 0;const uploader_1=require("./uploader/uploader"),instanceMap=new Map,getUploader=async e=>{for(;instanceMap.get(e.uid)?.initing;)await new Promise((e=>setTimeout(e,100)));if(instanceMap.get(e.uid)?.uploader)return instanceMap.get(e.uid).uploader;const t={uploader:new uploader_1.Uploader(e),initing:!0};return instanceMap.set(e.uid,t),await t.uploader.init(),t.initing=!1,instanceMap.set(e.uid,t),t.uploader};exports.getUploader=getUploader;const getUploaderByUid=async e=>{if(instanceMap.has(e)){for(;instanceMap.get(e)?.initing;)await new Promise((e=>setTimeout(e,100)));return instanceMap.get(e)?.uploader}return null};exports.getUploaderByUid=getUploaderByUid;
1
+ "use strict";var __createBinding=this&&this.__createBinding||(Object.create?function(e,t,r,i){void 0===i&&(i=r);var a=Object.getOwnPropertyDescriptor(t,r);a&&!("get"in a?!t.__esModule:a.writable||a.configurable)||(a={enumerable:!0,get:function(){return t[r]}}),Object.defineProperty(e,i,a)}:function(e,t,r,i){void 0===i&&(i=r),e[i]=t[r]}),__exportStar=this&&this.__exportStar||function(e,t){for(var r in e)"default"===r||Object.prototype.hasOwnProperty.call(t,r)||__createBinding(t,e,r)};Object.defineProperty(exports,"__esModule",{value:!0}),exports.getUploader=void 0,__exportStar(require("./host-uploader/baidu"),exports),__exportStar(require("./host-uploader/ali"),exports);const uploader_1=require("./uploader"),instanceMap=new Map,getUploader=async e=>{for(;instanceMap.get(e.uid)?.initing;)await new Promise((e=>setTimeout(e,100)));if(instanceMap.get(e.uid)?.uploader)return instanceMap.get(e.uid).uploader;const t={uploader:new uploader_1.Uploader(e),initing:!0};return instanceMap.set(e.uid,t),await t.uploader.init(),t.initing=!1,instanceMap.set(e.uid,t),t.uploader};exports.getUploader=getUploader;
@@ -1,46 +1,19 @@
1
1
  import { UploadFile } from '@soga/entities';
2
- import { RecordDetail, RecordFtype, RecordType } from '@soga/types';
3
2
  import { DataSource } from 'typeorm';
4
3
  interface CommonParams {
5
4
  uid: number;
5
+ sdk_domain: string;
6
6
  dataSource: DataSource;
7
7
  onProgress?: (file: UploadFile) => Promise<void>;
8
8
  onSuccess?: (file: UploadFile) => Promise<void>;
9
9
  onError?: (error: Error, file: UploadFile) => Promise<void>;
10
- createCloudRecord: (params: Partial<{
11
- uid: number;
12
- space_id: number;
13
- name: string;
14
- parent_id: number;
15
- type: RecordType;
16
- ftype: RecordFtype;
17
- }>) => Promise<RecordDetail>;
18
- updateCloudRecord: (params: {
19
- uid: number;
20
- space_id: number;
21
- record_id: number;
22
- manifest: string;
23
- parent_id?: number;
24
- }) => Promise<RecordDetail>;
25
10
  }
26
11
  export interface UploaderParams extends CommonParams {
27
- getBaiduAccessToken: (baidu_host_id: number) => Promise<string>;
28
- getAliAccessToken: (ali_host_id: number) => Promise<string>;
29
- }
30
- export interface ProcessorParams extends CommonParams {
31
12
  host_id: number;
32
13
  threads?: number;
33
14
  debug?: boolean;
34
15
  }
35
- export interface BaiduProcessorParams extends ProcessorParams {
36
- getAccessToken: (baidu_host_id: number) => Promise<string>;
37
- }
38
- export interface GetBaiduProcessorParams extends BaiduProcessorParams {
39
- }
40
- export interface AliProcessorParams extends ProcessorParams {
41
- getAccessToken: (ali_host_id: number) => Promise<string>;
42
- }
43
- export interface GetAliProcessorParams extends AliProcessorParams {
16
+ export interface GetUploaderParams extends CommonParams {
44
17
  }
45
18
  export type ThreadType = {
46
19
  file_id: number;
@@ -1,15 +1,15 @@
1
1
  import { DataSource, Repository } from 'typeorm';
2
2
  import { UploadFile, UploadPart } from '@soga/entities';
3
- import { UploaderParams } from '../types/main';
4
- import { BaseProcessor } from '../processor/base';
3
+ import { GetUploaderParams } from './types/main';
4
+ import { BaseUploader } from './host-uploader/base';
5
5
  export declare class Uploader {
6
- protected params: UploaderParams;
6
+ protected params: GetUploaderParams;
7
7
  protected uid: number;
8
8
  protected dataSource: DataSource;
9
9
  protected fileRepository: Repository<UploadFile>;
10
10
  protected partRepository: Repository<UploadPart>;
11
- constructor(params: UploaderParams);
12
- protected processorMap: Map<number, BaseProcessor>;
11
+ constructor(params: GetUploaderParams);
12
+ protected processorMap: Map<number, BaseUploader>;
13
13
  init(): Promise<void>;
14
14
  start(): Promise<void>;
15
15
  stopAll(change_db?: boolean): Promise<void>;
@@ -0,0 +1 @@
1
+ "use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.Uploader=void 0;const types_1=require("@soga/types"),typeorm_1=require("typeorm"),entities_1=require("@soga/entities"),baidu_1=require("./host-uploader/baidu"),ali_1=require("./host-uploader/ali");class Uploader{params;uid;dataSource;fileRepository;partRepository;constructor(t){this.params=t,this.uid=t.uid,this.dataSource=t.dataSource,this.fileRepository=this.dataSource.getRepository(entities_1.UploadFile),this.partRepository=this.dataSource.getRepository(entities_1.UploadPart)}processorMap=new Map;async init(){}async start(){await Promise.all([this.startAli(),this.startBaidu()])}async stopAll(t=!1){const s=this.processorMap.values();for(const a of s)await a.stopAll(t)}async stopFiles(t,s=!1){const a=this.processorMap.values();for(const i of a)await i.stopFiles(t,s)}async repairAllParts(){await this.partRepository.update({uid:this.uid,upload_status:types_1.UploadStatus.ERROR},{upload_status:types_1.UploadStatus.NULL})}async repairParts(t){await this.partRepository.update({file_id:(0,typeorm_1.In)(t),upload_status:types_1.UploadStatus.ERROR},{upload_status:types_1.UploadStatus.NULL})}async startHost(t,s){if(this.processorMap.has(t)){const s=this.processorMap.get(t);return void await s.start()}const a={uid:this.uid,sdk_domain:this.params.sdk_domain,dataSource:this.dataSource,host_id:t,onProgress:this.params.onProgress,onSuccess:this.params.onSuccess,onError:this.params.onError};if(s==types_1.HostType.BAIDU){const s=await(0,baidu_1.getBaiduUploader)(a);this.processorMap.set(t,s),await s.start()}else if(s==types_1.HostType.ALI){const s=await(0,ali_1.getAliUploader)(a);this.processorMap.set(t,s),await s.start()}}async startAli(){const t=(await this.fileRepository.createQueryBuilder("file").select("DISTINCT file.ali_host_id").where("file.uid = :uid",{uid:this.uid}).getRawMany()).map((t=>t.ali_host_id)).filter((t=>!!t));if(t.length)for(const s of t)await this.startHost(s,types_1.HostType.ALI)}async startBaidu(){const t=(await this.fileRepository.createQueryBuilder("file").select("DISTINCT file.baidu_host_id").where("file.uid = :uid",{uid:this.uid}).getRawMany()).map((t=>t.baidu_host_id)).filter((t=>!!t));if(t.length)for(const s of t)await this.startHost(s,types_1.HostType.BAIDU)}}exports.Uploader=Uploader;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@soga/uploader",
3
- "version": "0.1.17",
3
+ "version": "0.2.1",
4
4
  "publishConfig": {
5
5
  "access": "public"
6
6
  },
@@ -15,49 +15,51 @@
15
15
  "minify": "ts-node ./scripts/minify",
16
16
  "demo_backup": "ts-node ./demo/demo.ts",
17
17
  "demo": "ts-node ./demo/demo.ts",
18
- "worker": "tsc && ts-node ./demo/worker.ts",
19
18
  "test": "jest",
20
19
  "dev": "ts-node ./src/main.ts",
21
20
  "lint": "eslint . --ext .ts",
22
21
  "prepublishOnly": "npm run build"
23
22
  },
24
23
  "devDependencies": {
24
+ "@soga/entities": "file:../entities",
25
+ "@soga/part-uploader": "file:../part-uploader",
26
+ "@soga/sdk": "file:../sdk",
27
+ "@soga/test": "^0.2.0",
28
+ "@soga/types": "file:../types",
29
+ "@soga/utils": "file:../utils",
25
30
  "@types/fs-extra": "^11.0.4",
26
31
  "@types/glob": "^8.1.0",
27
- "@types/jest": "^29.5.4",
32
+ "@types/jest": "^29.5.14",
28
33
  "@types/node": "^20.8.7",
29
34
  "@typescript-eslint/eslint-plugin": "^6.4.1",
30
35
  "@typescript-eslint/parser": "^6.4.1",
31
36
  "eslint": "^8.47.0",
32
- "eslint-config-prettier": "^9.0.0",
33
- "eslint-plugin-jest": "^27.2.3",
34
- "eslint-plugin-prettier": "^5.0.0",
35
- "glob": "^10.3.3",
36
- "jest": "^29.6.3",
37
+ "eslint-config-prettier": "^9.1.0",
38
+ "eslint-plugin-jest": "^27.9.0",
39
+ "eslint-plugin-prettier": "^5.2.3",
40
+ "glob": "^10.4.5",
41
+ "jest": "^29.7.0",
37
42
  "prettier": "^3.0.2",
38
43
  "rimraf": "^6.0.1",
39
44
  "terser": "^5.19.2",
40
- "ts-jest": "^29.1.1",
45
+ "ts-jest": "^29.2.5",
41
46
  "ts-node": "^10.9.1",
42
47
  "typescript": "^5.1.6"
43
48
  },
44
49
  "keywords": [],
45
50
  "author": "",
46
51
  "license": "ISC",
47
- "dependencies": {
48
- "@soga/baidu-ua": "^0.1.16",
49
- "@soga/entities": "^0.1.17",
50
- "@soga/single-uploader": "^0.1.13",
51
- "@soga/types": "^0.1.13",
52
- "@soga/utils": "^0.1.13",
53
- "axios": "^1.7.9",
54
- "fs-extra": "^11.2.0",
55
- "piscina": "^5.0.0-alpha.0"
56
- },
57
52
  "peerDependencies": {
58
- "better-sqlite3": "*",
59
53
  "level": "*",
60
54
  "typeorm": "*"
61
55
  },
62
- "gitHead": "8413c83d57b0d94e87ee82e63685efd4d810d640"
56
+ "dependencies": {
57
+ "@soga/entities": "^0.2.1",
58
+ "@soga/part-uploader": "^0.2.1",
59
+ "@soga/sdk": "^0.2.1",
60
+ "@soga/types": "^0.2.1",
61
+ "@soga/utils": "^0.2.1",
62
+ "piscina": "^4.9.2"
63
+ },
64
+ "gitHead": "92e93cbd89d863f10407c93b88db5ec5029ed426"
63
65
  }
@@ -1,11 +0,0 @@
1
- import { AliProcessorParams, GetAliProcessorParams, ThreadType } from '../types/main';
2
- import { BaseProcessor } from './base';
3
- import { UploadPart } from '@soga/entities';
4
- import { HostType } from '@soga/types';
5
- export declare class AliProcessor extends BaseProcessor {
6
- protected hostType: HostType;
7
- private params;
8
- constructor(params: AliProcessorParams);
9
- getThread(part: UploadPart): ThreadType;
10
- }
11
- export declare const getAliProcessor: (params: GetAliProcessorParams) => Promise<AliProcessor>;
@@ -1 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.getAliProcessor=exports.AliProcessor=void 0;const single_uploader_1=require("@soga/single-uploader"),base_1=require("./base"),types_1=require("@soga/types"),processors=new Map;class AliProcessor extends base_1.BaseProcessor{hostType=types_1.HostType.ALI;params;constructor(s){super(s),this.params=s}getThread(s){let i,t=null;return{file_id:s.file_id,part_id:s.id,uid:s.uid,start:async()=>{try{const{filepath:e,output_root:r,info:o}=s,a=await this.fileRepository.findOneBy({id:s.file_id}),d={file_id:s.file_id,filePath:e,part:o,uid:s.uid,part_id:s.id,outputRoot:r,drive_id:a.cloud_info.hosts.ali.drive_id,cloud_parent_id:a.cloud_info.hosts.ali.file_id,getAccessToken:async()=>await this.params.getAccessToken(a.ali_host_id),onProgress:async s=>{await this.onPartProgress(s)}};i=(0,single_uploader_1.getAliUploader)(d),t=await i.start(),t&&(await this.partRepository.update(s.id,{ali_result:t,ali_status:types_1.UploadStatus.SUCCESS}),await this.onPartSuccess(await this.fileRepository.findOneBy({id:s.file_id})))}catch(i){await this.onPartError(i,await this.fileRepository.findOneBy({id:s.file_id}))}finally{t&&(this.threads=this.threads.filter((i=>i.part_id!==s.id))),await this.start()}},stop:async()=>{i&&(await i.stop(),this.threads=this.threads.filter((i=>i.part_id!==s.id)))}}}}exports.AliProcessor=AliProcessor;const getAliProcessor=async s=>{const{uid:i,host_id:t}=s,e=`${i}_${t}`;let r=processors.get(e);if(r)for(;r.is_initing;)await new Promise((s=>setTimeout(s,20)));else r=new AliProcessor(s),r.setThreads(3),processors.set(e,r),await r.init();return r};exports.getAliProcessor=getAliProcessor;
@@ -1,11 +0,0 @@
1
- import { BaiduProcessorParams, GetBaiduProcessorParams, ThreadType } from '../types/main';
2
- import { BaseProcessor } from './base';
3
- import { UploadPart } from '@soga/entities';
4
- import { HostType } from '@soga/types';
5
- export declare class BaiduProcessor extends BaseProcessor {
6
- protected hostType: HostType;
7
- private params;
8
- constructor(params: BaiduProcessorParams);
9
- getThread(part: UploadPart): ThreadType;
10
- }
11
- export declare const getBaiduProcessor: (params: GetBaiduProcessorParams) => Promise<BaiduProcessor>;
@@ -1 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.getBaiduProcessor=exports.BaiduProcessor=void 0;const single_uploader_1=require("@soga/single-uploader"),base_1=require("./base"),types_1=require("@soga/types"),processors=new Map;class BaiduProcessor extends base_1.BaseProcessor{hostType=types_1.HostType.BAIDU;params;constructor(s){super(s),this.params=s}getThread(s){let t,e=null;return{file_id:s.file_id,part_id:s.id,uid:s.uid,start:async()=>{try{const{filepath:i,output_root:a,info:r}=s,o=await this.fileRepository.findOneBy({id:s.file_id}),d={file_id:s.file_id,filePath:i,part:r,uid:s.uid,part_id:s.id,outputRoot:a,cloudRoot:o.cloud_info.hosts.baidu.path,getAccessToken:async()=>await this.params.getAccessToken(o.baidu_host_id),onProgress:async s=>{await this.onPartProgress(s)}};t=(0,single_uploader_1.getBaiduUploader)(d),e=await t.start(),e&&(await this.partRepository.update(s.id,{baidu_result:e,baidu_status:types_1.UploadStatus.SUCCESS}),await this.onPartSuccess(await this.fileRepository.findOneBy({id:s.file_id})))}catch(t){await this.onPartError(t,await this.fileRepository.findOneBy({id:s.file_id}))}finally{e&&(this.threads=this.threads.filter((t=>t.part_id!==s.id))),await this.start()}},stop:async()=>{t&&(await t.stop(),this.threads=this.threads.filter((t=>t.part_id!==s.id)))}}}}exports.BaiduProcessor=BaiduProcessor;const getBaiduProcessor=async s=>{const{uid:t,host_id:e}=s,i=`${t}_${e}`;let a=processors.get(i);if(a)for(;a.is_initing;)await new Promise((s=>setTimeout(s,20)));else a=new BaiduProcessor(s),a.setThreads(5),processors.set(i,a),await a.init();return a};exports.getBaiduProcessor=getBaiduProcessor;
@@ -1 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.BaseProcessor=void 0;const typeorm_1=require("typeorm"),entities_1=require("@soga/entities"),types_1=require("@soga/types"),prepare_1=require("../hooks/prepare"),utils_1=require("@soga/utils"),complete_1=require("../hooks/complete");let isDequeueing=!1;class BaseProcessor{hostType;onProgress=async()=>{};onSuccess=async()=>{};onError=async()=>{};fileRepository;partRepository;isRunning=!1;thread_count=3;maxThreads=10;threads=[];baseParams;getValidThreads(t){return Math.min(Math.max(t??0,0),this.maxThreads)}async onPartProgress(t){const{file_id:s,part_id:i,host_type:e,percent:a,type:o}=t,r=`${e}_percent`;await this.partRepository.update(i,{[r]:a});const p=await this.partRepository.findBy({file_id:s});let d=0,u=0;for(const t of p)d+=t.size,u+=t.size*t[r];const n=u/d,h=await this.fileRepository.findOneBy({id:s});h.progress[o].percent=n,h.percent=(0,utils_1.getProgressPercent)(h.progress),await this.fileRepository.update(s,{progress:h.progress,percent:h.percent}),await this.onProgress(h)}async onPartSuccess(t){const{ali_host_id:s,baidu_host_id:i}=t;if(s){if((await this.partRepository.findBy({file_id:t.id,ali_status:(0,typeorm_1.Not)(types_1.UploadStatus.SUCCESS)})).length)return}if(i){if((await this.partRepository.findBy({file_id:t.id,baidu_status:(0,typeorm_1.Not)(types_1.UploadStatus.SUCCESS)})).length)return}const e={ali:{},baidu:{}},a=await this.partRepository.findBy({file_id:t.id});for(const t of a)t.ali_status===types_1.UploadStatus.SUCCESS&&(e.ali[t.info.file]=t.ali_result),t.baidu_status===types_1.UploadStatus.SUCCESS&&(e.baidu[t.info.file]=t.baidu_result);await this.fileRepository.update(t.id,{upload_data:e}),await(0,complete_1.complete)({file_id:t.id,fileRepository:this.fileRepository,updateCloudRecord:this.baseParams.updateCloudRecord}),t.progress.END.percent=1,t.percent=1,await this.fileRepository.update(t.id,{progress:t.progress,percent:t.percent,upload_status:types_1.UploadStatus.SUCCESS}),await this.partRepository.delete({file_id:t.id});const o=await this.fileRepository.findOneBy({id:t.id});await this.onSuccess(o)}async onPartError(t,s){s.upload_status!==types_1.UploadStatus.ERROR&&(await this.fileRepository.update(s.id,{upload_status:types_1.UploadStatus.ERROR}),await this.onError(t,s))}constructor(t){this.baseParams=t,this.fileRepository=t.dataSource.getRepository(entities_1.UploadFile),this.partRepository=t.dataSource.getRepository(entities_1.UploadPart),this.thread_count=this.getValidThreads(t.threads??this.thread_count),t.onProgress&&(this.onProgress=t.onProgress.bind(this)),t.onSuccess&&(this.onSuccess=t.onSuccess.bind(this)),t.onError&&(this.onError=t.onError.bind(this))}async setThreads(t){const s=this.getValidThreads(t);this.thread_count=s,await this.run()}is_initing=!1;async init(){this.is_initing=!0;const t=`${this.hostType}_status`;this.partRepository.update({uid:this.baseParams.uid,[t]:types_1.UploadStatus.PROCESS},{[t]:types_1.UploadStatus.NULL}),this.is_initing=!1}async start(){await this.run()}async stopFiles(t,s=!1){s&&(await this.partRepository.update({file_id:(0,typeorm_1.In)(t),is_paused:!1},{is_paused:!0}),await this.fileRepository.update({id:(0,typeorm_1.In)(t),is_paused:!1},{is_paused:!0}));const i=this.threads.filter((s=>t.includes(s.file_id)));await Promise.all(i.map((t=>t.stop()))),await this.start()}async stopAll(t=!1){t&&(await this.partRepository.update({uid:this.baseParams.uid},{is_paused:!0}),await this.fileRepository.update({uid:this.baseParams.uid,is_folder:!1},{is_paused:!0})),await Promise.all(this.threads.map((t=>t.stop())))}async run(){for(;this.isRunning;)await new Promise((t=>{setTimeout(t,200)}));this.isRunning=!0;const t=`${this.hostType}_status`,s=`${this.hostType}_host_id`,i=this.threads.length,e=this.thread_count;if(i<e){const a=e-i;for(let i=0;i<a;i++){const i=await this.partRepository.findOne({where:{uid:this.baseParams.uid,is_paused:!1,[t]:types_1.UploadStatus.NULL,[s]:this.baseParams.host_id},order:{created_at:"ASC"}});if(!i)break;const e=this.getThread(i);this.threads.push(e),await this.partRepository.update(i.id,{[t]:types_1.UploadStatus.PROCESS}),e.start()}if(this.threads.length<e){await this.dequeueOneFile();const i=await this.partRepository.findOneBy({uid:this.baseParams.uid,is_paused:!1,[t]:types_1.UploadStatus.NULL,[s]:this.baseParams.host_id});this.isRunning=!1,i&&await this.run()}}else if(i>e){const t=i-e,s=this.threads.slice(0,t);for(const t of s)await t.stop()}this.isRunning=!1}async dequeueOneFile(){for(;isDequeueing;)await new Promise((t=>{setTimeout(t,50)}));isDequeueing=!0;try{const t=await this.fileRepository.findOne({where:{uid:this.baseParams.uid,is_paused:!1,is_folder:!1,encode_status:types_1.EncodeStatus.SUCCESS,upload_status:types_1.UploadStatus.NULL},order:{created_at:"ASC"}});if(!t)return;await this.fileRepository.update(t.id,{upload_status:types_1.UploadStatus.PROCESS}),await(0,prepare_1.prepare)({file_id:t.id,fileRepository:this.fileRepository,createCloudRecord:this.baseParams.createCloudRecord});const{source_data:s,txt_data:i,img_data:e,media_data:a}=t,o=[...s.parts??[],...i.parts??[],...e.parts??[],...a.parts??[]].map((s=>({uid:this.baseParams.uid,file_id:t.id,info:s,filepath:t.path,output_root:t.output_root,size:s.size,ali_host_id:t.ali_host_id||null,baidu_host_id:t.baidu_host_id||null})));return void await this.partRepository.save(o)}finally{isDequeueing=!1}}}exports.BaseProcessor=BaseProcessor;
@@ -1 +0,0 @@
1
- "use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.Uploader=void 0;const types_1=require("@soga/types"),typeorm_1=require("typeorm"),entities_1=require("@soga/entities"),baidu_1=require("../processor/baidu"),ali_1=require("../processor/ali");class Uploader{params;uid;dataSource;fileRepository;partRepository;constructor(t){this.params=t,this.uid=t.uid,this.dataSource=t.dataSource,this.fileRepository=this.dataSource.getRepository(entities_1.UploadFile),this.partRepository=this.dataSource.getRepository(entities_1.UploadPart)}processorMap=new Map;async init(){}async start(){await Promise.all([this.startAli(),this.startBaidu()])}async stopAll(t=!1){const s=this.processorMap.values();for(const a of s)await a.stopAll(t)}async stopFiles(t,s=!1){const a=this.processorMap.values();for(const e of a)await e.stopFiles(t,s)}async repairAllParts(){await Promise.all([this.partRepository.update({uid:this.uid,baidu_status:types_1.UploadStatus.ERROR},{baidu_status:types_1.UploadStatus.NULL}),this.partRepository.update({uid:this.uid,ali_status:types_1.UploadStatus.ERROR},{ali_status:types_1.UploadStatus.NULL})])}async repairParts(t){await Promise.all([this.partRepository.update({file_id:(0,typeorm_1.In)(t),baidu_status:types_1.UploadStatus.ERROR},{baidu_status:types_1.UploadStatus.NULL}),this.partRepository.update({file_id:(0,typeorm_1.In)(t),ali_status:types_1.UploadStatus.ERROR},{ali_status:types_1.UploadStatus.NULL})])}async startHost(t,s){if(this.processorMap.has(t)){const s=this.processorMap.get(t);return void await s.start()}const a={uid:this.uid,dataSource:this.dataSource,host_id:t,createCloudRecord:this.params.createCloudRecord,updateCloudRecord:this.params.updateCloudRecord,onProgress:this.params.onProgress,onSuccess:this.params.onSuccess,onError:this.params.onError};if(s==types_1.HostType.BAIDU){const s=await(0,baidu_1.getBaiduProcessor)({...a,getAccessToken:this.params.getBaiduAccessToken});this.processorMap.set(t,s),await s.start()}else if(s==types_1.HostType.ALI){const s=await(0,ali_1.getAliProcessor)({...a,getAccessToken:this.params.getAliAccessToken});this.processorMap.set(t,s),await s.start()}}async startAli(){const t=(await this.fileRepository.createQueryBuilder("file").select("DISTINCT file.ali_host_id").where("file.uid = :uid",{uid:this.uid}).getRawMany()).map((t=>t.ali_host_id));if(t.length)for(const s of t)await this.startHost(s,types_1.HostType.ALI)}async startBaidu(){const t=(await this.fileRepository.createQueryBuilder("file").select("DISTINCT file.baidu_host_id").where("file.uid = :uid",{uid:this.uid}).getRawMany()).map((t=>t.baidu_host_id));if(t.length)for(const s of t)await this.startHost(s,types_1.HostType.BAIDU)}}exports.Uploader=Uploader;