@soga/fetcher 0.3.0 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.d.mts +103 -0
- package/dist/main.d.ts +103 -7
- package/dist/main.js +1 -34
- package/dist/main.mjs +1 -0
- package/package.json +15 -13
- package/README.md +0 -1
- package/dist/ali/fetch-buffer.d.ts +0 -2
- package/dist/ali/fetch-buffer.js +0 -83
- package/dist/ali/fetcher.d.ts +0 -28
- package/dist/ali/fetcher.js +0 -145
- package/dist/ali/get-buffer.d.ts +0 -3
- package/dist/ali/get-buffer.js +0 -42
- package/dist/baidu/fetch-buffer.d.ts +0 -2
- package/dist/baidu/fetch-buffer.js +0 -109
- package/dist/baidu/fetcher.d.ts +0 -35
- package/dist/baidu/fetcher.js +0 -243
- package/dist/baidu/get-buffer.d.ts +0 -3
- package/dist/baidu/get-buffer.js +0 -41
- package/dist/common/common.d.ts +0 -9
- package/dist/common/common.js +0 -8
- package/dist/common/low-store.d.ts +0 -19
- package/dist/common/low-store.js +0 -91
- package/dist/common/types.d.ts +0 -29
- package/dist/common/types.js +0 -2
package/dist/main.d.mts
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import { AxiosInstance } from 'axios';
|
|
2
|
+
import { Repository, DataSource } from 'typeorm';
|
|
3
|
+
import { FetchUrl } from '@soga/entities';
|
|
4
|
+
|
|
5
|
+
declare class BaiduFetcher {
|
|
6
|
+
axios: AxiosInstance;
|
|
7
|
+
private domain;
|
|
8
|
+
protected urlRepository: Repository<FetchUrl>;
|
|
9
|
+
constructor(dataSource: DataSource);
|
|
10
|
+
getDownloadUrl({ access_token, fs_id, ua, }: {
|
|
11
|
+
access_token: string;
|
|
12
|
+
fs_id: number;
|
|
13
|
+
ua?: string;
|
|
14
|
+
}): Promise<string>;
|
|
15
|
+
getArrayBuffer(params: {
|
|
16
|
+
access_token: string;
|
|
17
|
+
fs_id: number;
|
|
18
|
+
start: number;
|
|
19
|
+
end: number;
|
|
20
|
+
abort_controller?: AbortController;
|
|
21
|
+
ua?: string;
|
|
22
|
+
}): Promise<ArrayBuffer>;
|
|
23
|
+
getDownloadUrlByRange({ access_token, fs_id, start, end, ua, }: {
|
|
24
|
+
access_token: string;
|
|
25
|
+
fs_id: number;
|
|
26
|
+
start: number;
|
|
27
|
+
end: number;
|
|
28
|
+
ua?: string;
|
|
29
|
+
}): Promise<string>;
|
|
30
|
+
private cleanExpired;
|
|
31
|
+
private fetchBestDlink;
|
|
32
|
+
private fetchDownloadUrl;
|
|
33
|
+
private multipleGetArrayBuffer;
|
|
34
|
+
private getKey;
|
|
35
|
+
}
|
|
36
|
+
declare const getBaiduFetcher: (dataSource: DataSource) => BaiduFetcher;
|
|
37
|
+
|
|
38
|
+
declare class AliFetcher {
|
|
39
|
+
axios: AxiosInstance;
|
|
40
|
+
private domain;
|
|
41
|
+
protected urlRepository: Repository<FetchUrl>;
|
|
42
|
+
constructor(dataSource: DataSource);
|
|
43
|
+
getDownloadUrl({ access_token, drive_id, file_id, ua, }: {
|
|
44
|
+
access_token: string;
|
|
45
|
+
drive_id: string;
|
|
46
|
+
file_id: string;
|
|
47
|
+
ua?: string;
|
|
48
|
+
}): Promise<string>;
|
|
49
|
+
getArrayBuffer(params: {
|
|
50
|
+
access_token: string;
|
|
51
|
+
drive_id: string;
|
|
52
|
+
file_id: string;
|
|
53
|
+
start: number;
|
|
54
|
+
end: number;
|
|
55
|
+
ua?: string;
|
|
56
|
+
abort_controller?: AbortController;
|
|
57
|
+
}): Promise<ArrayBuffer>;
|
|
58
|
+
private multipleGetArrayBuffer;
|
|
59
|
+
private fetchDownloadInfo;
|
|
60
|
+
private getKey;
|
|
61
|
+
}
|
|
62
|
+
declare const getAliFetcher: (dataSource: DataSource) => AliFetcher;
|
|
63
|
+
|
|
64
|
+
interface CommonBufferParams {
|
|
65
|
+
id?: string;
|
|
66
|
+
dataSource: DataSource;
|
|
67
|
+
record_id: number;
|
|
68
|
+
part_md5: string;
|
|
69
|
+
start: number;
|
|
70
|
+
end: number;
|
|
71
|
+
total: number;
|
|
72
|
+
host_id: number;
|
|
73
|
+
host_vip_type: number;
|
|
74
|
+
access_token: string;
|
|
75
|
+
abort_controller?: AbortController;
|
|
76
|
+
ua?: string;
|
|
77
|
+
}
|
|
78
|
+
interface BaiduBufferParams extends CommonBufferParams {
|
|
79
|
+
fs_id: number;
|
|
80
|
+
}
|
|
81
|
+
interface AliBufferParams extends CommonBufferParams {
|
|
82
|
+
drive_id: string;
|
|
83
|
+
file_id: string;
|
|
84
|
+
}
|
|
85
|
+
interface BaiduPreviewBufferParams extends BaiduBufferParams {
|
|
86
|
+
cache_folder: string;
|
|
87
|
+
}
|
|
88
|
+
interface AliPreviewBufferParams extends AliBufferParams {
|
|
89
|
+
cache_folder: string;
|
|
90
|
+
}
|
|
91
|
+
type CacheType = 'preview' | 'download';
|
|
92
|
+
|
|
93
|
+
declare function fetchBaiduBuffer(params: BaiduBufferParams): Promise<Buffer<ArrayBuffer>>;
|
|
94
|
+
|
|
95
|
+
declare function fetchAliBuffer(params: AliBufferParams): Promise<Buffer<ArrayBuffer>>;
|
|
96
|
+
|
|
97
|
+
declare const getBaiduPreviewBuffer: (params: BaiduPreviewBufferParams) => Promise<any>;
|
|
98
|
+
declare const getBaiduDownloadBuffer: (params: BaiduPreviewBufferParams) => Promise<any>;
|
|
99
|
+
|
|
100
|
+
declare const getAliPreviewBuffer: (params: AliPreviewBufferParams) => Promise<any>;
|
|
101
|
+
declare const getAliDownloadBuffer: (params: AliPreviewBufferParams) => Promise<any>;
|
|
102
|
+
|
|
103
|
+
export { type AliBufferParams, AliFetcher, type AliPreviewBufferParams, type BaiduBufferParams, BaiduFetcher, type BaiduPreviewBufferParams, type CacheType, fetchAliBuffer, fetchBaiduBuffer, getAliDownloadBuffer, getAliFetcher, getAliPreviewBuffer, getBaiduDownloadBuffer, getBaiduFetcher, getBaiduPreviewBuffer };
|
package/dist/main.d.ts
CHANGED
|
@@ -1,7 +1,103 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
1
|
+
import { AxiosInstance } from 'axios';
|
|
2
|
+
import { Repository, DataSource } from 'typeorm';
|
|
3
|
+
import { FetchUrl } from '@soga/entities';
|
|
4
|
+
|
|
5
|
+
declare class BaiduFetcher {
|
|
6
|
+
axios: AxiosInstance;
|
|
7
|
+
private domain;
|
|
8
|
+
protected urlRepository: Repository<FetchUrl>;
|
|
9
|
+
constructor(dataSource: DataSource);
|
|
10
|
+
getDownloadUrl({ access_token, fs_id, ua, }: {
|
|
11
|
+
access_token: string;
|
|
12
|
+
fs_id: number;
|
|
13
|
+
ua?: string;
|
|
14
|
+
}): Promise<string>;
|
|
15
|
+
getArrayBuffer(params: {
|
|
16
|
+
access_token: string;
|
|
17
|
+
fs_id: number;
|
|
18
|
+
start: number;
|
|
19
|
+
end: number;
|
|
20
|
+
abort_controller?: AbortController;
|
|
21
|
+
ua?: string;
|
|
22
|
+
}): Promise<ArrayBuffer>;
|
|
23
|
+
getDownloadUrlByRange({ access_token, fs_id, start, end, ua, }: {
|
|
24
|
+
access_token: string;
|
|
25
|
+
fs_id: number;
|
|
26
|
+
start: number;
|
|
27
|
+
end: number;
|
|
28
|
+
ua?: string;
|
|
29
|
+
}): Promise<string>;
|
|
30
|
+
private cleanExpired;
|
|
31
|
+
private fetchBestDlink;
|
|
32
|
+
private fetchDownloadUrl;
|
|
33
|
+
private multipleGetArrayBuffer;
|
|
34
|
+
private getKey;
|
|
35
|
+
}
|
|
36
|
+
declare const getBaiduFetcher: (dataSource: DataSource) => BaiduFetcher;
|
|
37
|
+
|
|
38
|
+
declare class AliFetcher {
|
|
39
|
+
axios: AxiosInstance;
|
|
40
|
+
private domain;
|
|
41
|
+
protected urlRepository: Repository<FetchUrl>;
|
|
42
|
+
constructor(dataSource: DataSource);
|
|
43
|
+
getDownloadUrl({ access_token, drive_id, file_id, ua, }: {
|
|
44
|
+
access_token: string;
|
|
45
|
+
drive_id: string;
|
|
46
|
+
file_id: string;
|
|
47
|
+
ua?: string;
|
|
48
|
+
}): Promise<string>;
|
|
49
|
+
getArrayBuffer(params: {
|
|
50
|
+
access_token: string;
|
|
51
|
+
drive_id: string;
|
|
52
|
+
file_id: string;
|
|
53
|
+
start: number;
|
|
54
|
+
end: number;
|
|
55
|
+
ua?: string;
|
|
56
|
+
abort_controller?: AbortController;
|
|
57
|
+
}): Promise<ArrayBuffer>;
|
|
58
|
+
private multipleGetArrayBuffer;
|
|
59
|
+
private fetchDownloadInfo;
|
|
60
|
+
private getKey;
|
|
61
|
+
}
|
|
62
|
+
declare const getAliFetcher: (dataSource: DataSource) => AliFetcher;
|
|
63
|
+
|
|
64
|
+
interface CommonBufferParams {
|
|
65
|
+
id?: string;
|
|
66
|
+
dataSource: DataSource;
|
|
67
|
+
record_id: number;
|
|
68
|
+
part_md5: string;
|
|
69
|
+
start: number;
|
|
70
|
+
end: number;
|
|
71
|
+
total: number;
|
|
72
|
+
host_id: number;
|
|
73
|
+
host_vip_type: number;
|
|
74
|
+
access_token: string;
|
|
75
|
+
abort_controller?: AbortController;
|
|
76
|
+
ua?: string;
|
|
77
|
+
}
|
|
78
|
+
interface BaiduBufferParams extends CommonBufferParams {
|
|
79
|
+
fs_id: number;
|
|
80
|
+
}
|
|
81
|
+
interface AliBufferParams extends CommonBufferParams {
|
|
82
|
+
drive_id: string;
|
|
83
|
+
file_id: string;
|
|
84
|
+
}
|
|
85
|
+
interface BaiduPreviewBufferParams extends BaiduBufferParams {
|
|
86
|
+
cache_folder: string;
|
|
87
|
+
}
|
|
88
|
+
interface AliPreviewBufferParams extends AliBufferParams {
|
|
89
|
+
cache_folder: string;
|
|
90
|
+
}
|
|
91
|
+
type CacheType = 'preview' | 'download';
|
|
92
|
+
|
|
93
|
+
declare function fetchBaiduBuffer(params: BaiduBufferParams): Promise<Buffer<ArrayBuffer>>;
|
|
94
|
+
|
|
95
|
+
declare function fetchAliBuffer(params: AliBufferParams): Promise<Buffer<ArrayBuffer>>;
|
|
96
|
+
|
|
97
|
+
declare const getBaiduPreviewBuffer: (params: BaiduPreviewBufferParams) => Promise<any>;
|
|
98
|
+
declare const getBaiduDownloadBuffer: (params: BaiduPreviewBufferParams) => Promise<any>;
|
|
99
|
+
|
|
100
|
+
declare const getAliPreviewBuffer: (params: AliPreviewBufferParams) => Promise<any>;
|
|
101
|
+
declare const getAliDownloadBuffer: (params: AliPreviewBufferParams) => Promise<any>;
|
|
102
|
+
|
|
103
|
+
export { type AliBufferParams, AliFetcher, type AliPreviewBufferParams, type BaiduBufferParams, BaiduFetcher, type BaiduPreviewBufferParams, type CacheType, fetchAliBuffer, fetchBaiduBuffer, getAliDownloadBuffer, getAliFetcher, getAliPreviewBuffer, getBaiduDownloadBuffer, getBaiduFetcher, getBaiduPreviewBuffer };
|
package/dist/main.js
CHANGED
|
@@ -1,34 +1 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
-
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
-
};
|
|
16
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
-
exports.getAliDownloadBuffer = exports.getAliPreviewBuffer = exports.getBaiduDownloadBuffer = exports.getBaiduPreviewBuffer = exports.fetchAliBuffer = exports.fetchBaiduBuffer = exports.getAliFetcher = exports.AliFetcher = exports.getBaiduFetcher = exports.BaiduFetcher = void 0;
|
|
18
|
-
var fetcher_1 = require("./baidu/fetcher");
|
|
19
|
-
Object.defineProperty(exports, "BaiduFetcher", { enumerable: true, get: function () { return fetcher_1.BaiduFetcher; } });
|
|
20
|
-
Object.defineProperty(exports, "getBaiduFetcher", { enumerable: true, get: function () { return fetcher_1.getBaiduFetcher; } });
|
|
21
|
-
var fetcher_2 = require("./ali/fetcher");
|
|
22
|
-
Object.defineProperty(exports, "AliFetcher", { enumerable: true, get: function () { return fetcher_2.AliFetcher; } });
|
|
23
|
-
Object.defineProperty(exports, "getAliFetcher", { enumerable: true, get: function () { return fetcher_2.getAliFetcher; } });
|
|
24
|
-
var fetch_buffer_1 = require("./baidu/fetch-buffer");
|
|
25
|
-
Object.defineProperty(exports, "fetchBaiduBuffer", { enumerable: true, get: function () { return fetch_buffer_1.fetchBaiduBuffer; } });
|
|
26
|
-
var fetch_buffer_2 = require("./ali/fetch-buffer");
|
|
27
|
-
Object.defineProperty(exports, "fetchAliBuffer", { enumerable: true, get: function () { return fetch_buffer_2.fetchAliBuffer; } });
|
|
28
|
-
var get_buffer_1 = require("./baidu/get-buffer");
|
|
29
|
-
Object.defineProperty(exports, "getBaiduPreviewBuffer", { enumerable: true, get: function () { return get_buffer_1.getBaiduPreviewBuffer; } });
|
|
30
|
-
Object.defineProperty(exports, "getBaiduDownloadBuffer", { enumerable: true, get: function () { return get_buffer_1.getBaiduDownloadBuffer; } });
|
|
31
|
-
var get_buffer_2 = require("./ali/get-buffer");
|
|
32
|
-
Object.defineProperty(exports, "getAliPreviewBuffer", { enumerable: true, get: function () { return get_buffer_2.getAliPreviewBuffer; } });
|
|
33
|
-
Object.defineProperty(exports, "getAliDownloadBuffer", { enumerable: true, get: function () { return get_buffer_2.getAliDownloadBuffer; } });
|
|
34
|
-
__exportStar(require("./common/types"), exports);
|
|
1
|
+
var e,t=Object.create,a=Object.defineProperty,r=Object.getOwnPropertyDescriptor,i=Object.getOwnPropertyNames,s=Object.getPrototypeOf,n=Object.prototype.hasOwnProperty,o=(e,t,s,o)=>{if(t&&"object"==typeof t||"function"==typeof t)for(let c of i(t))n.call(e,c)||c===s||a(e,c,{get:()=>t[c],enumerable:!(o=r(t,c))||o.enumerable});return e},c=(e,r,i)=>(i=null!=e?t(s(e)):{},o(!r&&e&&e.__esModule?i:a(i,"default",{value:e,enumerable:!0}),e)),d={};((e,t)=>{for(var r in t)a(e,r,{get:t[r],enumerable:!0})})(d,{AliFetcher:()=>v,BaiduFetcher:()=>p,fetchAliBuffer:()=>M,fetchBaiduBuffer:()=>U,getAliDownloadBuffer:()=>K,getAliFetcher:()=>D,getAliPreviewBuffer:()=>I,getBaiduDownloadBuffer:()=>C,getBaiduFetcher:()=>y,getBaiduPreviewBuffer:()=>F}),module.exports=(e=d,o(a({},"__esModule",{value:!0}),e));var l=c(require("axios")),u=require("typeorm"),f=require("@soga/entities"),h={},w=null,_=0,p=class{axios;domain="https://pan.baidu.com";urlRepository;constructor(e){this.axios=l.default.create({baseURL:this.domain}),this.axios.interceptors.response.use(e=>e.data,e=>Promise.reject(e)),this.urlRepository=e.getRepository(f.FetchUrl)}async getDownloadUrl({access_token:e,fs_id:t,ua:a}){return await this.getDownloadUrlByRange({access_token:e,fs_id:t,start:0,end:0,ua:a})}async getArrayBuffer(e){return await this.cleanExpired(),await this.multipleGetArrayBuffer({...e,times:0})}async getDownloadUrlByRange({access_token:e,fs_id:t,start:a=0,end:r=0,ua:i}){const s=this.getKey({fs_id:t,access_token:e,ua:i});try{for(;h[s];)await new Promise(e=>setTimeout(e,100));h[s]=!0;const n=await this.urlRepository.findOne({where:{key:s,speed:(0,u.MoreThan)(30),expire:(0,u.MoreThan)(Date.now())},order:{speed:"DESC",fail:"ASC"}});if(n)return n.fail>10?(await this.urlRepository.delete({key:s}),""):n.dlink;const{dlink:o,speed:c}=await this.fetchBestDlink({access_token:e,fs_id:t,start:a,end:r,ua:i});if(o)return await this.urlRepository.save({dlink:o,key:s,speed:c,expire:Date.now()+252e5}),o;throw new Error("baidu fetch download url failed")}finally{delete h[s]}}async cleanExpired(){Date.now()-_>18e5&&(await this.urlRepository.delete({expire:(0,u.LessThan)(Date.now())}),_=Date.now())}async fetchBestDlink({fs_id:e,start:t=0,end:a=0,ua:r,access_token:i}){const s=[];let n="";const o=await this.fetchDownloadUrl({fs_id:e,access_token:i,ua:r});if(!o)return{dlink:"",speed:0};const c=Math.min(t+127,a),d={"User-Agent":r,Range:`bytes=${t}-${c}`},u=c-t+1,f={start:0,end:0};for(let e=0;e<50;e++){const e=(await l.default.get(o,{headers:d,maxRedirects:0,validateStatus:e=>e>=200&&e<303})).headers.location;if(!e)return{dlink:"",speed:0};const t=new URL(e),{hostname:a}=t;if(!s.includes(a)){n=e;try{return f.start=Date.now(),await l.default.get(e,{headers:d,timeout:2e3}),f.end=Date.now(),{dlink:e,speed:1e3*u/(f.end-f.start)}}catch(e){s.push(a),await new Promise(e=>setTimeout(e,200));continue}}}return{dlink:n,speed:1e3*u/(f.end-f.start)}}async fetchDownloadUrl({fs_id:e,ua:t="pan.baidu.com",access_token:a}){const r=await this.axios.get("/rest/2.0/xpan/multimedia",{params:{access_token:a,dlink:"1",method:"filemetas",fsids:JSON.stringify([e])},headers:{"User-Agent":t}}),{errno:i,list:s}=r||{};if(0!=i)throw new Error(`获取下载链接失败: ${i}`);if(!s||!s.length)throw new Error("获取下载链接失败");return`${s[0].dlink}&access_token=${a}`}async multipleGetArrayBuffer({access_token:e,fs_id:t,start:a,end:r,abort_controller:i,times:s=0,ua:n}){const o=await this.getDownloadUrlByRange({access_token:e,fs_id:t,start:a,end:r,ua:n});if(!o)throw new Error("获取下载链接失败");try{const e=r-a+1,t=Math.max(1e3*Math.ceil(e/1024/10),3e3);return(await l.default.get(o,{headers:{Range:`bytes=${a}-${r}`,"User-Agent":n,Accept:"application/octet-stream"},signal:i?.signal,responseType:"arraybuffer",timeout:t})).data}catch(c){if("AbortError"===c.name)return;const d=this.getKey({fs_id:t,access_token:e,ua:n});if(await this.urlRepository.increment({key:d,dlink:o},"fail",1),await new Promise(e=>setTimeout(e,1e3)),(s||0)<3)return await this.multipleGetArrayBuffer({access_token:e,fs_id:t,start:a,end:r,times:(s||0)+1,abort_controller:i,ua:n});throw c}}getKey({access_token:e,fs_id:t,ua:a}){const r=a?encodeURIComponent(a):"ua";return`baidu-${encodeURIComponent(e)}-${r}-${t}`}},y=e=>(w||(w=new p(e)),w),m=c(require("axios")),g=require("typeorm"),k=require("@soga/entities"),b={},B=null,v=class{axios;domain="https://openapi.alipan.com";urlRepository;constructor(e){this.axios=m.default.create({baseURL:this.domain}),this.axios.interceptors.response.use(e=>e.data,e=>Promise.reject(e)),this.urlRepository=e.getRepository(k.FetchUrl)}async getDownloadUrl({access_token:e,drive_id:t,file_id:a,ua:r}){const i=this.getKey({access_token:e,file_id:a,ua:r});try{for(;b[i];)await new Promise(e=>setTimeout(e,100));b[i]=!0;const s=await this.urlRepository.findOne({where:{key:i,fail:(0,g.LessThan)(2),expire:(0,g.MoreThan)(Date.now())},order:{fail:"ASC"}});if(s)return s.fail>1?(await this.urlRepository.delete({key:i}),""):s.dlink;const{url:n,expiration:o}=await this.fetchDownloadInfo({access_token:e,drive_id:t,file_id:a,ua:r});if(n)return await this.urlRepository.save({dlink:n,key:i,speed:100,expire:new Date(o).getTime()}),n;throw new Error("ali fetch download url failed")}finally{delete b[i]}}async getArrayBuffer(e){return await this.multipleGetArrayBuffer({...e,times:0})}async multipleGetArrayBuffer({access_token:e,drive_id:t,file_id:a,start:r,end:i,abort_controller:s,times:n=0,ua:o}){const c=await this.getDownloadUrl({access_token:e,drive_id:t,file_id:a,ua:o});if(!c)throw new Error("获取下载链接失败");try{const e=i-r+1,t=Math.max(1e3*Math.ceil(e/1024/50),3e3);return(await m.default.get(c,{headers:{Range:`bytes=${r}-${i}`,"User-Agent":o,Accept:"application/octet-stream"},responseType:"arraybuffer",timeout:t,signal:s?.signal})).data}catch(s){if("AbortError"===s.name)return;const d=this.getKey({access_token:e,file_id:a,ua:o});if(await this.urlRepository.increment({key:d,dlink:c},"fail",1),await new Promise(e=>setTimeout(e,1e3)),(n||0)<3)return await this.multipleGetArrayBuffer({access_token:e,drive_id:t,file_id:a,start:r,end:i,times:(n||0)+1,ua:o});throw s}}async fetchDownloadInfo({access_token:e,drive_id:t,file_id:a,ua:r}){const i={Authorization:`Bearer ${e}`,"User-Agent":r},{url:s,expiration:n}=await this.axios.post("/adrive/v1.0/openFile/getDownloadUrl",{drive_id:t,file_id:a},{headers:i});return{url:s,expiration:n}}getKey({access_token:e,file_id:t,ua:a}){const r=a?encodeURIComponent(a):"ua";return`ali-${encodeURIComponent(e)}-${r}-${t}`}},D=e=>(B||(B=new v(e)),B),R=require("@soga/baidu-ua"),x={},A=({part_md5:e,start:t,end:a})=>`${e}-${t}-${a}`;async function U(e){const t=function(e){const{start:t,end:a,total:r}=e,i=Math.max(a-t+1,0);if(2==e.host_vip_type){const r=Math.ceil(i/1572864),s=Math.ceil(i/r),n=[];for(let i=0;i<r;i++){const r=t+i*s,o=Math.min(a,r+s-1);n.push({...e,start:r,end:o,auto_ua:!1})}return{ranges:n,threads:2}}{const s=Math.ceil(i/131072),n=Math.ceil(i/s),o=[];for(let r=0;r<s;r++){const i=t+r*n,s=Math.min(a,i+n-1);o.push({...e,start:i,end:s,auto_ua:!0})}return{ranges:o,threads:Math.min(r<52428800?8:6,s)}}}(e),a=await async function(e,t){const{ranges:a,threads:r}=e,i=[...a],s=async()=>{if(!i.length)return;const e=i.shift();if(!e)return;const a=A(e);if(!x[a]?.data){const r=y(t),{access_token:i,fs_id:s,host_id:n}=e,{ua:o,finish:c}=await(0,R.getDownloadBaiduUA)(n,e.total),d=await r.getArrayBuffer({access_token:i,fs_id:s,start:e.start,end:e.end,ua:e.auto_ua?o:e.ua});if(c(),!d||0===d.byteLength)return;x[a]={updated:Date.now(),data:Buffer.from(d)}}await s()},n=[];for(let e=0;e<r;e++)n.push(s());await Promise.all(n);let o=!0;const c=a.map(e=>{const t=A(e);return x[t]?.data?x[t].data:(o=!1,null)});if(o){const e=Buffer.concat(c);return a.forEach(e=>{const t=A(e);delete x[t]}),e}return null}(t,e.dataSource);return a}async function M(e){const t=function(e){const{start:t,end:a}=e,r=Math.max(a-t+1,0),i=Math.ceil(r/1572864),s=Math.ceil(r/i),n=[];for(let r=0;r<i;r++){const i=t+r*s,o=Math.min(a,i+s-1);n.push({...e,start:i,end:o})}return{ranges:n,threads:2}}(e),a=await async function(e,t){const{ranges:a,threads:r}=e,i=[...a],s=async()=>{if(!i.length)return;const e=i.shift();if(!e)return;const a=A(e);if(!x[a]?.data){const r=D(t),{access_token:i,drive_id:s,file_id:n}=e,o=await r.getArrayBuffer({access_token:i,drive_id:s,file_id:n,start:e.start,end:e.end,ua:e.ua,abort_controller:e.abort_controller});x[a]={updated:Date.now(),data:Buffer.from(o)}}await s()},n=[];for(let e=0;e<r;e++)n.push(s());await Promise.all(n);let o=!0;const c=a.map(e=>{const t=A(e);return x[t]?.data?x[t].data:(o=!1,null)});if(o){const e=Buffer.concat(c);return a.forEach(e=>{const t=A(e);delete x[t]}),e}return null}(t,e.dataSource);return a}var $=require("@soga/lowdb"),P=require("fs-extra"),E=require("path"),O=class{folder_path;cache_path;db;cache_type;constructor(e,t){this.folder_path=e,this.cache_type=t,this.cache_path=(0,E.resolve)(e,`${t}_cache`)}async init(){const e=(0,E.resolve)(this.cache_path,"cache.json");this.db=await(0,$.getDb)(e,{})}async read(e){return this.db.data[e]}async write(e,t){this.db.data[e]=t,await this.db.write()}async delete(e){delete this.db.data[e],await this.db.write()}async writeBuffer(e,t){const a=(0,E.resolve)(this.cache_path,e);await(0,P.writeFile)(a,t),this.db.data[e]=!0,await this.db.write()}async readBuffer(e){if(!this.db.data[e])return null;const t=(0,E.resolve)(this.cache_path,e);return await(0,P.readFile)(t)}async deleteBuffer(e){if(!this.db.data[e])return;delete this.db.data[e],await this.db.write();const t=(0,E.resolve)(this.cache_path,e);try{await(0,P.remove)(t)}catch(e){}}},T={},j=0,S=async(e,t)=>{if(T[e]){for(;!T[e].inited;)await new Promise(e=>setTimeout(e,20));T[e].updated_at=Date.now();const t=Date.now();return t-j>18e5&&(j=t,Object.keys(T).forEach(e=>{t-T[e].updated_at>288e5&&delete T[e]})),T[e].store}const a=new O(e,t);return T[e]={updated_at:Date.now(),store:a,inited:!1},await a.init(),T[e].inited=!0,a},q=async({id:e,dataSource:t,record_id:a,start:r,end:i,total:s,part_md5:n,access_token:o,host_id:c,host_vip_type:d,fs_id:l,cache_folder:u,abort_controller:f},h)=>{let w=null;const _=e||A({part_md5:n,start:r,end:i});if(u){w=await S(u,h);const e=await w.readBuffer(_);if(e)return e}const p=await U({dataSource:t,record_id:a,start:r,end:i,total:s,access_token:o,host_id:c,host_vip_type:d,fs_id:l,part_md5:n,abort_controller:f});return p&&w&&await w.writeBuffer(_,p),p},F=async e=>await q(e,"preview"),C=async e=>await q(e,"download"),G=async({id:e,dataSource:t,record_id:a,start:r,end:i,total:s,part_md5:n,access_token:o,host_id:c,host_vip_type:d,drive_id:l,file_id:u,cache_folder:f,abort_controller:h},w)=>{const _=e||A({part_md5:n,start:r,end:i});let p=null;if(f){p=await S(f,w);const e=await p.readBuffer(_);if(e)return e}const y=await M({dataSource:t,record_id:a,start:r,end:i,total:s,access_token:o,host_id:c,host_vip_type:d,drive_id:l,file_id:u,part_md5:n,abort_controller:h});return y&&p&&await p.writeBuffer(_,y),y},I=async e=>await G(e,"preview"),K=async e=>await G(e,"download");
|
package/dist/main.mjs
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import t from"axios";import{LessThan as e,MoreThan as a}from"typeorm";import{FetchUrl as r}from"@soga/entities";var i={},s=null,n=0,o=class{axios;domain="https://pan.baidu.com";urlRepository;constructor(e){this.axios=t.create({baseURL:this.domain}),this.axios.interceptors.response.use(t=>t.data,t=>Promise.reject(t)),this.urlRepository=e.getRepository(r)}async getDownloadUrl({access_token:t,fs_id:e,ua:a}){return await this.getDownloadUrlByRange({access_token:t,fs_id:e,start:0,end:0,ua:a})}async getArrayBuffer(t){return await this.cleanExpired(),await this.multipleGetArrayBuffer({...t,times:0})}async getDownloadUrlByRange({access_token:t,fs_id:e,start:r=0,end:s=0,ua:n}){const o=this.getKey({fs_id:e,access_token:t,ua:n});try{for(;i[o];)await new Promise(t=>setTimeout(t,100));i[o]=!0;const c=await this.urlRepository.findOne({where:{key:o,speed:a(30),expire:a(Date.now())},order:{speed:"DESC",fail:"ASC"}});if(c)return c.fail>10?(await this.urlRepository.delete({key:o}),""):c.dlink;const{dlink:d,speed:l}=await this.fetchBestDlink({access_token:t,fs_id:e,start:r,end:s,ua:n});if(d)return await this.urlRepository.save({dlink:d,key:o,speed:l,expire:Date.now()+252e5}),d;throw new Error("baidu fetch download url failed")}finally{delete i[o]}}async cleanExpired(){Date.now()-n>18e5&&(await this.urlRepository.delete({expire:e(Date.now())}),n=Date.now())}async fetchBestDlink({fs_id:e,start:a=0,end:r=0,ua:i,access_token:s}){const n=[];let o="";const c=await this.fetchDownloadUrl({fs_id:e,access_token:s,ua:i});if(!c)return{dlink:"",speed:0};const d=Math.min(a+127,r),l={"User-Agent":i,Range:`bytes=${a}-${d}`},u=d-a+1,f={start:0,end:0};for(let e=0;e<50;e++){const e=(await t.get(c,{headers:l,maxRedirects:0,validateStatus:t=>t>=200&&t<303})).headers.location;if(!e)return{dlink:"",speed:0};const a=new URL(e),{hostname:r}=a;if(!n.includes(r)){o=e;try{return f.start=Date.now(),await t.get(e,{headers:l,timeout:2e3}),f.end=Date.now(),{dlink:e,speed:1e3*u/(f.end-f.start)}}catch(t){n.push(r),await new Promise(t=>setTimeout(t,200));continue}}}return{dlink:o,speed:1e3*u/(f.end-f.start)}}async fetchDownloadUrl({fs_id:t,ua:e="pan.baidu.com",access_token:a}){const r=await this.axios.get("/rest/2.0/xpan/multimedia",{params:{access_token:a,dlink:"1",method:"filemetas",fsids:JSON.stringify([t])},headers:{"User-Agent":e}}),{errno:i,list:s}=r||{};if(0!=i)throw new Error(`获取下载链接失败: ${i}`);if(!s||!s.length)throw new Error("获取下载链接失败");return`${s[0].dlink}&access_token=${a}`}async multipleGetArrayBuffer({access_token:e,fs_id:a,start:r,end:i,abort_controller:s,times:n=0,ua:o}){const c=await this.getDownloadUrlByRange({access_token:e,fs_id:a,start:r,end:i,ua:o});if(!c)throw new Error("获取下载链接失败");try{const e=i-r+1,a=Math.max(1e3*Math.ceil(e/1024/10),3e3);return(await t.get(c,{headers:{Range:`bytes=${r}-${i}`,"User-Agent":o,Accept:"application/octet-stream"},signal:s?.signal,responseType:"arraybuffer",timeout:a})).data}catch(t){if("AbortError"===t.name)return;const d=this.getKey({fs_id:a,access_token:e,ua:o});if(await this.urlRepository.increment({key:d,dlink:c},"fail",1),await new Promise(t=>setTimeout(t,1e3)),(n||0)<3)return await this.multipleGetArrayBuffer({access_token:e,fs_id:a,start:r,end:i,times:(n||0)+1,abort_controller:s,ua:o});throw t}}getKey({access_token:t,fs_id:e,ua:a}){const r=a?encodeURIComponent(a):"ua";return`baidu-${encodeURIComponent(t)}-${r}-${e}`}},c=t=>(s||(s=new o(t)),s);import d from"axios";import{LessThan as l,MoreThan as u}from"typeorm";import{FetchUrl as f}from"@soga/entities";var h={},w=null,_=class{axios;domain="https://openapi.alipan.com";urlRepository;constructor(t){this.axios=d.create({baseURL:this.domain}),this.axios.interceptors.response.use(t=>t.data,t=>Promise.reject(t)),this.urlRepository=t.getRepository(f)}async getDownloadUrl({access_token:t,drive_id:e,file_id:a,ua:r}){const i=this.getKey({access_token:t,file_id:a,ua:r});try{for(;h[i];)await new Promise(t=>setTimeout(t,100));h[i]=!0;const s=await this.urlRepository.findOne({where:{key:i,fail:l(2),expire:u(Date.now())},order:{fail:"ASC"}});if(s)return s.fail>1?(await this.urlRepository.delete({key:i}),""):s.dlink;const{url:n,expiration:o}=await this.fetchDownloadInfo({access_token:t,drive_id:e,file_id:a,ua:r});if(n)return await this.urlRepository.save({dlink:n,key:i,speed:100,expire:new Date(o).getTime()}),n;throw new Error("ali fetch download url failed")}finally{delete h[i]}}async getArrayBuffer(t){return await this.multipleGetArrayBuffer({...t,times:0})}async multipleGetArrayBuffer({access_token:t,drive_id:e,file_id:a,start:r,end:i,abort_controller:s,times:n=0,ua:o}){const c=await this.getDownloadUrl({access_token:t,drive_id:e,file_id:a,ua:o});if(!c)throw new Error("获取下载链接失败");try{const t=i-r+1,e=Math.max(1e3*Math.ceil(t/1024/50),3e3);return(await d.get(c,{headers:{Range:`bytes=${r}-${i}`,"User-Agent":o,Accept:"application/octet-stream"},responseType:"arraybuffer",timeout:e,signal:s?.signal})).data}catch(s){if("AbortError"===s.name)return;const d=this.getKey({access_token:t,file_id:a,ua:o});if(await this.urlRepository.increment({key:d,dlink:c},"fail",1),await new Promise(t=>setTimeout(t,1e3)),(n||0)<3)return await this.multipleGetArrayBuffer({access_token:t,drive_id:e,file_id:a,start:r,end:i,times:(n||0)+1,ua:o});throw s}}async fetchDownloadInfo({access_token:t,drive_id:e,file_id:a,ua:r}){const i={Authorization:`Bearer ${t}`,"User-Agent":r},{url:s,expiration:n}=await this.axios.post("/adrive/v1.0/openFile/getDownloadUrl",{drive_id:e,file_id:a},{headers:i});return{url:s,expiration:n}}getKey({access_token:t,file_id:e,ua:a}){const r=a?encodeURIComponent(a):"ua";return`ali-${encodeURIComponent(t)}-${r}-${e}`}},p=t=>(w||(w=new _(t)),w);import{getDownloadBaiduUA as y}from"@soga/baidu-ua";var m={},k=({part_md5:t,start:e,end:a})=>`${t}-${e}-${a}`;async function g(t){const e=function(t){const{start:e,end:a,total:r}=t,i=Math.max(a-e+1,0);if(2==t.host_vip_type){const r=Math.ceil(i/1572864),s=Math.ceil(i/r),n=[];for(let i=0;i<r;i++){const r=e+i*s,o=Math.min(a,r+s-1);n.push({...t,start:r,end:o,auto_ua:!1})}return{ranges:n,threads:2}}{const s=Math.ceil(i/131072),n=Math.ceil(i/s),o=[];for(let r=0;r<s;r++){const i=e+r*n,s=Math.min(a,i+n-1);o.push({...t,start:i,end:s,auto_ua:!0})}return{ranges:o,threads:Math.min(r<52428800?8:6,s)}}}(t),a=await async function(t,e){const{ranges:a,threads:r}=t,i=[...a],s=async()=>{if(!i.length)return;const t=i.shift();if(!t)return;const a=k(t);if(!m[a]?.data){const r=c(e),{access_token:i,fs_id:s,host_id:n}=t,{ua:o,finish:d}=await y(n,t.total),l=await r.getArrayBuffer({access_token:i,fs_id:s,start:t.start,end:t.end,ua:t.auto_ua?o:t.ua});if(d(),!l||0===l.byteLength)return;m[a]={updated:Date.now(),data:Buffer.from(l)}}await s()},n=[];for(let t=0;t<r;t++)n.push(s());await Promise.all(n);let o=!0;const d=a.map(t=>{const e=k(t);return m[e]?.data?m[e].data:(o=!1,null)});if(o){const t=Buffer.concat(d);return a.forEach(t=>{const e=k(t);delete m[e]}),t}return null}(e,t.dataSource);return a}async function b(t){const e=function(t){const{start:e,end:a}=t,r=Math.max(a-e+1,0),i=Math.ceil(r/1572864),s=Math.ceil(r/i),n=[];for(let r=0;r<i;r++){const i=e+r*s,o=Math.min(a,i+s-1);n.push({...t,start:i,end:o})}return{ranges:n,threads:2}}(t),a=await async function(t,e){const{ranges:a,threads:r}=t,i=[...a],s=async()=>{if(!i.length)return;const t=i.shift();if(!t)return;const a=k(t);if(!m[a]?.data){const r=p(e),{access_token:i,drive_id:s,file_id:n}=t,o=await r.getArrayBuffer({access_token:i,drive_id:s,file_id:n,start:t.start,end:t.end,ua:t.ua,abort_controller:t.abort_controller});m[a]={updated:Date.now(),data:Buffer.from(o)}}await s()},n=[];for(let t=0;t<r;t++)n.push(s());await Promise.all(n);let o=!0;const c=a.map(t=>{const e=k(t);return m[e]?.data?m[e].data:(o=!1,null)});if(o){const t=Buffer.concat(c);return a.forEach(t=>{const e=k(t);delete m[e]}),t}return null}(e,t.dataSource);return a}import{getDb as R}from"@soga/lowdb";import{readFile as x,remove as D,writeFile as v}from"fs-extra";import{resolve as B}from"path";var A=class{folder_path;cache_path;db;cache_type;constructor(t,e){this.folder_path=t,this.cache_type=e,this.cache_path=B(t,`${e}_cache`)}async init(){const t=B(this.cache_path,"cache.json");this.db=await R(t,{})}async read(t){return this.db.data[t]}async write(t,e){this.db.data[t]=e,await this.db.write()}async delete(t){delete this.db.data[t],await this.db.write()}async writeBuffer(t,e){const a=B(this.cache_path,t);await v(a,e),this.db.data[t]=!0,await this.db.write()}async readBuffer(t){if(!this.db.data[t])return null;const e=B(this.cache_path,t);return await x(e)}async deleteBuffer(t){if(!this.db.data[t])return;delete this.db.data[t],await this.db.write();const e=B(this.cache_path,t);try{await D(e)}catch(t){}}},U={},$=0,M=async(t,e)=>{if(U[t]){for(;!U[t].inited;)await new Promise(t=>setTimeout(t,20));U[t].updated_at=Date.now();const e=Date.now();return e-$>18e5&&($=e,Object.keys(U).forEach(t=>{e-U[t].updated_at>288e5&&delete U[t]})),U[t].store}const a=new A(t,e);return U[t]={updated_at:Date.now(),store:a,inited:!1},await a.init(),U[t].inited=!0,a},E=async({id:t,dataSource:e,record_id:a,start:r,end:i,total:s,part_md5:n,access_token:o,host_id:c,host_vip_type:d,fs_id:l,cache_folder:u,abort_controller:f},h)=>{let w=null;const _=t||k({part_md5:n,start:r,end:i});if(u){w=await M(u,h);const t=await w.readBuffer(_);if(t)return t}const p=await g({dataSource:e,record_id:a,start:r,end:i,total:s,access_token:o,host_id:c,host_vip_type:d,fs_id:l,part_md5:n,abort_controller:f});return p&&w&&await w.writeBuffer(_,p),p},S=async t=>await E(t,"preview"),T=async t=>await E(t,"download"),P=async({id:t,dataSource:e,record_id:a,start:r,end:i,total:s,part_md5:n,access_token:o,host_id:c,host_vip_type:d,drive_id:l,file_id:u,cache_folder:f,abort_controller:h},w)=>{const _=t||k({part_md5:n,start:r,end:i});let p=null;if(f){p=await M(f,w);const t=await p.readBuffer(_);if(t)return t}const y=await b({dataSource:e,record_id:a,start:r,end:i,total:s,access_token:o,host_id:c,host_vip_type:d,drive_id:l,file_id:u,part_md5:n,abort_controller:h});return y&&p&&await p.writeBuffer(_,y),y},C=async t=>await P(t,"preview"),G=async t=>await P(t,"download");export{_ as AliFetcher,o as BaiduFetcher,b as fetchAliBuffer,g as fetchBaiduBuffer,G as getAliDownloadBuffer,p as getAliFetcher,C as getAliPreviewBuffer,T as getBaiduDownloadBuffer,c as getBaiduFetcher,S as getBaiduPreviewBuffer};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@soga/fetcher",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "1.3.1",
|
|
4
4
|
"publishConfig": {
|
|
5
5
|
"access": "public"
|
|
6
6
|
},
|
|
@@ -14,20 +14,22 @@
|
|
|
14
14
|
"author": "",
|
|
15
15
|
"license": "ISC",
|
|
16
16
|
"dependencies": {
|
|
17
|
-
"@soga/baidu-ua": "^0.3.0",
|
|
18
|
-
"@soga/bridge": "^0.3.0",
|
|
19
|
-
"@soga/entities": "^0.3.0",
|
|
20
17
|
"axios": "^1.9.0",
|
|
21
|
-
"
|
|
18
|
+
"fs-extra": "^11.3.2",
|
|
19
|
+
"typeorm": "^0.3.24",
|
|
20
|
+
"@soga/baidu-ua": "^1.0.0",
|
|
21
|
+
"@soga/entities": "^1.0.0",
|
|
22
|
+
"@soga/lowdb": "^1.0.2"
|
|
23
|
+
},
|
|
24
|
+
"devDependencies": {
|
|
25
|
+
"@types/node": "^24.5.2",
|
|
26
|
+
"rimraf": "^6.0.1",
|
|
27
|
+
"terser": "^5.43.1",
|
|
28
|
+
"tsup": "^8.5.0",
|
|
29
|
+
"typescript": "^5.8.3",
|
|
30
|
+
"@soga/typescript-config": "^1.0.0"
|
|
22
31
|
},
|
|
23
32
|
"scripts": {
|
|
24
|
-
"build": "rimraf dist &&
|
|
25
|
-
"minify": "ts-node ./scripts/minify",
|
|
26
|
-
"demo_backup": "ts-node ./demo/demo.ts",
|
|
27
|
-
"demo": "ts-node ./demo/demo.ts",
|
|
28
|
-
"worker": "tsc && ts-node ./demo/worker.ts",
|
|
29
|
-
"test": "jest",
|
|
30
|
-
"dev": "ts-node ./src/main.ts",
|
|
31
|
-
"lint": "eslint . --ext .ts"
|
|
33
|
+
"build": "rimraf dist && tsup src/main.ts --format cjs,esm --dts --minify terser"
|
|
32
34
|
}
|
|
33
35
|
}
|
package/README.md
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
# downloader
|
package/dist/ali/fetch-buffer.js
DELETED
|
@@ -1,83 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.fetchAliBuffer = fetchAliBuffer;
|
|
4
|
-
const fetcher_1 = require("./fetcher");
|
|
5
|
-
const common_1 = require("../common/common");
|
|
6
|
-
async function fetchAliBuffer(params) {
|
|
7
|
-
const rangeData = calculateRangeData(params);
|
|
8
|
-
const buffer = await parallelFetchBuffer(rangeData, params.dataSource);
|
|
9
|
-
return buffer;
|
|
10
|
-
}
|
|
11
|
-
async function parallelFetchBuffer(params, dataSource) {
|
|
12
|
-
const { ranges, threads } = params;
|
|
13
|
-
const queues = [...ranges];
|
|
14
|
-
const loop = async () => {
|
|
15
|
-
if (!queues.length)
|
|
16
|
-
return;
|
|
17
|
-
const item = queues.shift();
|
|
18
|
-
if (!item)
|
|
19
|
-
return;
|
|
20
|
-
const key = (0, common_1.getKey)(item);
|
|
21
|
-
if (!common_1.bufferCache[key]?.data) {
|
|
22
|
-
const fetcher = (0, fetcher_1.getAliFetcher)(dataSource);
|
|
23
|
-
const { access_token, drive_id, file_id } = item;
|
|
24
|
-
const ab = await fetcher.getArrayBuffer({
|
|
25
|
-
access_token,
|
|
26
|
-
drive_id,
|
|
27
|
-
file_id,
|
|
28
|
-
start: item.start,
|
|
29
|
-
end: item.end,
|
|
30
|
-
ua: item.ua,
|
|
31
|
-
abort_controller: item.abort_controller,
|
|
32
|
-
});
|
|
33
|
-
common_1.bufferCache[key] = {
|
|
34
|
-
updated: Date.now(),
|
|
35
|
-
data: Buffer.from(ab),
|
|
36
|
-
};
|
|
37
|
-
}
|
|
38
|
-
await loop();
|
|
39
|
-
};
|
|
40
|
-
const arr = [];
|
|
41
|
-
for (let i = 0; i < threads; i++) {
|
|
42
|
-
arr.push(loop());
|
|
43
|
-
}
|
|
44
|
-
await Promise.all(arr);
|
|
45
|
-
let completed = true;
|
|
46
|
-
const buffers = ranges.map((item) => {
|
|
47
|
-
const key = (0, common_1.getKey)(item);
|
|
48
|
-
if (common_1.bufferCache[key]?.data) {
|
|
49
|
-
return common_1.bufferCache[key].data;
|
|
50
|
-
}
|
|
51
|
-
completed = false;
|
|
52
|
-
return null;
|
|
53
|
-
});
|
|
54
|
-
if (completed) {
|
|
55
|
-
const buffer = Buffer.concat(buffers);
|
|
56
|
-
ranges.forEach((item) => {
|
|
57
|
-
const key = (0, common_1.getKey)(item);
|
|
58
|
-
delete common_1.bufferCache[key];
|
|
59
|
-
});
|
|
60
|
-
return buffer;
|
|
61
|
-
}
|
|
62
|
-
return null;
|
|
63
|
-
}
|
|
64
|
-
function calculateRangeData(params) {
|
|
65
|
-
const { start, end } = params;
|
|
66
|
-
const size = Math.max(end - start + 1, 0);
|
|
67
|
-
const segments = Math.ceil(size / 1572864);
|
|
68
|
-
const bytes = Math.ceil(size / segments);
|
|
69
|
-
const ranges = [];
|
|
70
|
-
for (let i = 0; i < segments; i++) {
|
|
71
|
-
const the_start = start + i * bytes;
|
|
72
|
-
const the_end = Math.min(end, the_start + bytes - 1);
|
|
73
|
-
ranges.push({
|
|
74
|
-
...params,
|
|
75
|
-
start: the_start,
|
|
76
|
-
end: the_end,
|
|
77
|
-
});
|
|
78
|
-
}
|
|
79
|
-
return {
|
|
80
|
-
ranges,
|
|
81
|
-
threads: 2,
|
|
82
|
-
};
|
|
83
|
-
}
|
package/dist/ali/fetcher.d.ts
DELETED
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
import { AxiosInstance } from 'axios';
|
|
2
|
-
import { DataSource, Repository } from 'typeorm';
|
|
3
|
-
import { FetchUrl } from '@soga/entities';
|
|
4
|
-
export declare class AliFetcher {
|
|
5
|
-
axios: AxiosInstance;
|
|
6
|
-
private domain;
|
|
7
|
-
protected urlRepository: Repository<FetchUrl>;
|
|
8
|
-
constructor(dataSource: DataSource);
|
|
9
|
-
getDownloadUrl({ access_token, drive_id, file_id, ua, }: {
|
|
10
|
-
access_token: string;
|
|
11
|
-
drive_id: string;
|
|
12
|
-
file_id: string;
|
|
13
|
-
ua?: string;
|
|
14
|
-
}): Promise<string>;
|
|
15
|
-
getArrayBuffer(params: {
|
|
16
|
-
access_token: string;
|
|
17
|
-
drive_id: string;
|
|
18
|
-
file_id: string;
|
|
19
|
-
start: number;
|
|
20
|
-
end: number;
|
|
21
|
-
ua?: string;
|
|
22
|
-
abort_controller?: AbortController;
|
|
23
|
-
}): Promise<ArrayBuffer>;
|
|
24
|
-
private multipleGetArrayBuffer;
|
|
25
|
-
private fetchDownloadInfo;
|
|
26
|
-
private getKey;
|
|
27
|
-
}
|
|
28
|
-
export declare const getAliFetcher: (dataSource: DataSource) => AliFetcher;
|
package/dist/ali/fetcher.js
DELETED
|
@@ -1,145 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.getAliFetcher = exports.AliFetcher = void 0;
|
|
7
|
-
const axios_1 = __importDefault(require("axios"));
|
|
8
|
-
const typeorm_1 = require("typeorm");
|
|
9
|
-
const entities_1 = require("@soga/entities");
|
|
10
|
-
const processMap = {};
|
|
11
|
-
let instance = null;
|
|
12
|
-
class AliFetcher {
|
|
13
|
-
axios;
|
|
14
|
-
domain = 'https://openapi.alipan.com';
|
|
15
|
-
urlRepository;
|
|
16
|
-
constructor(dataSource) {
|
|
17
|
-
this.axios = axios_1.default.create({
|
|
18
|
-
baseURL: this.domain,
|
|
19
|
-
});
|
|
20
|
-
this.axios.interceptors.response.use((response) => {
|
|
21
|
-
return response.data;
|
|
22
|
-
}, (error) => {
|
|
23
|
-
return Promise.reject(error);
|
|
24
|
-
});
|
|
25
|
-
this.urlRepository = dataSource.getRepository(entities_1.FetchUrl);
|
|
26
|
-
}
|
|
27
|
-
async getDownloadUrl({ access_token, drive_id, file_id, ua, }) {
|
|
28
|
-
const key = this.getKey({ access_token, file_id, ua });
|
|
29
|
-
try {
|
|
30
|
-
while (processMap[key]) {
|
|
31
|
-
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
32
|
-
}
|
|
33
|
-
processMap[key] = true;
|
|
34
|
-
const row = await this.urlRepository.findOne({
|
|
35
|
-
where: { key, fail: (0, typeorm_1.LessThan)(2), expire: (0, typeorm_1.MoreThan)(Date.now()) },
|
|
36
|
-
order: {
|
|
37
|
-
fail: 'ASC',
|
|
38
|
-
},
|
|
39
|
-
});
|
|
40
|
-
if (row) {
|
|
41
|
-
if (row.fail > 1) {
|
|
42
|
-
await this.urlRepository.delete({ key });
|
|
43
|
-
return '';
|
|
44
|
-
}
|
|
45
|
-
return row.dlink;
|
|
46
|
-
}
|
|
47
|
-
const { url, expiration } = await this.fetchDownloadInfo({
|
|
48
|
-
access_token,
|
|
49
|
-
drive_id,
|
|
50
|
-
file_id,
|
|
51
|
-
ua,
|
|
52
|
-
});
|
|
53
|
-
if (url) {
|
|
54
|
-
await this.urlRepository.save({
|
|
55
|
-
dlink: url,
|
|
56
|
-
key,
|
|
57
|
-
speed: 100,
|
|
58
|
-
expire: new Date(expiration).getTime(),
|
|
59
|
-
});
|
|
60
|
-
return url;
|
|
61
|
-
}
|
|
62
|
-
throw new Error('ali fetch download url failed');
|
|
63
|
-
}
|
|
64
|
-
finally {
|
|
65
|
-
delete processMap[key];
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
async getArrayBuffer(params) {
|
|
69
|
-
return await this.multipleGetArrayBuffer({
|
|
70
|
-
...params,
|
|
71
|
-
times: 0,
|
|
72
|
-
});
|
|
73
|
-
}
|
|
74
|
-
async multipleGetArrayBuffer({ access_token, drive_id, file_id, start, end, abort_controller, times = 0, ua, }) {
|
|
75
|
-
const dlink = await this.getDownloadUrl({
|
|
76
|
-
access_token,
|
|
77
|
-
drive_id,
|
|
78
|
-
file_id,
|
|
79
|
-
ua,
|
|
80
|
-
});
|
|
81
|
-
if (!dlink) {
|
|
82
|
-
throw new Error('获取下载链接失败');
|
|
83
|
-
}
|
|
84
|
-
try {
|
|
85
|
-
const length = end - start + 1;
|
|
86
|
-
const duration = Math.max(Math.ceil(length / 1024 / 50) * 1000, 3000);
|
|
87
|
-
const res = await axios_1.default.get(dlink, {
|
|
88
|
-
headers: {
|
|
89
|
-
Range: `bytes=${start}-${end}`,
|
|
90
|
-
'User-Agent': ua,
|
|
91
|
-
Accept: 'application/octet-stream',
|
|
92
|
-
},
|
|
93
|
-
responseType: 'arraybuffer',
|
|
94
|
-
timeout: duration,
|
|
95
|
-
signal: abort_controller?.signal,
|
|
96
|
-
});
|
|
97
|
-
return res.data;
|
|
98
|
-
}
|
|
99
|
-
catch (err) {
|
|
100
|
-
if (err.name === 'AbortError')
|
|
101
|
-
return;
|
|
102
|
-
const key = this.getKey({ access_token, file_id, ua });
|
|
103
|
-
await this.urlRepository.increment({ key, dlink }, 'fail', 1);
|
|
104
|
-
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
105
|
-
if ((times || 0) < 3) {
|
|
106
|
-
return await this.multipleGetArrayBuffer({
|
|
107
|
-
access_token,
|
|
108
|
-
drive_id,
|
|
109
|
-
file_id,
|
|
110
|
-
start,
|
|
111
|
-
end,
|
|
112
|
-
times: (times || 0) + 1,
|
|
113
|
-
ua,
|
|
114
|
-
});
|
|
115
|
-
}
|
|
116
|
-
throw err;
|
|
117
|
-
}
|
|
118
|
-
}
|
|
119
|
-
async fetchDownloadInfo({ access_token, drive_id, file_id, ua, }) {
|
|
120
|
-
const headers = {
|
|
121
|
-
Authorization: `Bearer ${access_token}`,
|
|
122
|
-
'User-Agent': ua,
|
|
123
|
-
};
|
|
124
|
-
const { url, expiration } = await this.axios.post('/adrive/v1.0/openFile/getDownloadUrl', {
|
|
125
|
-
drive_id,
|
|
126
|
-
file_id,
|
|
127
|
-
}, {
|
|
128
|
-
headers,
|
|
129
|
-
});
|
|
130
|
-
return { url, expiration };
|
|
131
|
-
}
|
|
132
|
-
getKey({ access_token, file_id, ua, }) {
|
|
133
|
-
const userAgent = ua ? encodeURIComponent(ua) : 'ua';
|
|
134
|
-
const accessToken = encodeURIComponent(access_token);
|
|
135
|
-
return `ali-${accessToken}-${userAgent}-${file_id}`;
|
|
136
|
-
}
|
|
137
|
-
}
|
|
138
|
-
exports.AliFetcher = AliFetcher;
|
|
139
|
-
const getAliFetcher = (dataSource) => {
|
|
140
|
-
if (!instance) {
|
|
141
|
-
instance = new AliFetcher(dataSource);
|
|
142
|
-
}
|
|
143
|
-
return instance;
|
|
144
|
-
};
|
|
145
|
-
exports.getAliFetcher = getAliFetcher;
|
package/dist/ali/get-buffer.d.ts
DELETED
|
@@ -1,3 +0,0 @@
|
|
|
1
|
-
import { AliPreviewBufferParams } from '../common/types';
|
|
2
|
-
export declare const getAliPreviewBuffer: (params: AliPreviewBufferParams) => Promise<Buffer<ArrayBuffer>>;
|
|
3
|
-
export declare const getAliDownloadBuffer: (params: AliPreviewBufferParams) => Promise<Buffer<ArrayBuffer>>;
|
package/dist/ali/get-buffer.js
DELETED
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getAliDownloadBuffer = exports.getAliPreviewBuffer = void 0;
|
|
4
|
-
const common_1 = require("../common/common");
|
|
5
|
-
const fetch_buffer_1 = require("./fetch-buffer");
|
|
6
|
-
const low_store_1 = require("../common/low-store");
|
|
7
|
-
const getAliBuffer = async ({ dataSource, record_id, start, end, total, part_md5, access_token, host_id, host_vip_type, drive_id, file_id, cache_folder, abort_controller, }, cache_type) => {
|
|
8
|
-
const key = (0, common_1.getKey)({ part_md5, start, end });
|
|
9
|
-
let store = null;
|
|
10
|
-
if (cache_folder) {
|
|
11
|
-
store = await (0, low_store_1.getLowStore)(cache_folder, cache_type);
|
|
12
|
-
const bufferCache = await store.readBuffer(key);
|
|
13
|
-
if (bufferCache) {
|
|
14
|
-
return bufferCache;
|
|
15
|
-
}
|
|
16
|
-
}
|
|
17
|
-
const buffer = await (0, fetch_buffer_1.fetchAliBuffer)({
|
|
18
|
-
dataSource,
|
|
19
|
-
record_id,
|
|
20
|
-
start,
|
|
21
|
-
end,
|
|
22
|
-
total,
|
|
23
|
-
access_token,
|
|
24
|
-
host_id,
|
|
25
|
-
host_vip_type,
|
|
26
|
-
drive_id,
|
|
27
|
-
file_id,
|
|
28
|
-
part_md5,
|
|
29
|
-
abort_controller,
|
|
30
|
-
});
|
|
31
|
-
if (buffer && store) {
|
|
32
|
-
await store.writeBuffer(key, buffer);
|
|
33
|
-
}
|
|
34
|
-
return buffer;
|
|
35
|
-
};
|
|
36
|
-
const getAliPreviewBuffer = async (params) => {
|
|
37
|
-
console.log('getAliPreviewBuffer params:', params);
|
|
38
|
-
return await getAliBuffer(params, 'preview');
|
|
39
|
-
};
|
|
40
|
-
exports.getAliPreviewBuffer = getAliPreviewBuffer;
|
|
41
|
-
const getAliDownloadBuffer = async (params) => await getAliBuffer(params, 'download');
|
|
42
|
-
exports.getAliDownloadBuffer = getAliDownloadBuffer;
|
|
@@ -1,109 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.fetchBaiduBuffer = fetchBaiduBuffer;
|
|
4
|
-
const baidu_ua_1 = require("@soga/baidu-ua");
|
|
5
|
-
const fetcher_1 = require("./fetcher");
|
|
6
|
-
const common_1 = require("../common/common");
|
|
7
|
-
async function fetchBaiduBuffer(params) {
|
|
8
|
-
const rangeData = calculateRangeData(params);
|
|
9
|
-
const buffer = await parallelFetchBuffer(rangeData, params.dataSource);
|
|
10
|
-
return buffer;
|
|
11
|
-
}
|
|
12
|
-
async function parallelFetchBuffer(params, dataSource) {
|
|
13
|
-
const { ranges, threads } = params;
|
|
14
|
-
const queues = [...ranges];
|
|
15
|
-
const loop = async () => {
|
|
16
|
-
if (!queues.length)
|
|
17
|
-
return;
|
|
18
|
-
const item = queues.shift();
|
|
19
|
-
if (!item)
|
|
20
|
-
return;
|
|
21
|
-
const key = (0, common_1.getKey)(item);
|
|
22
|
-
if (!common_1.bufferCache[key]?.data) {
|
|
23
|
-
const fetcher = (0, fetcher_1.getBaiduFetcher)(dataSource);
|
|
24
|
-
const { access_token, fs_id, host_id } = item;
|
|
25
|
-
const { ua, finishBaiduUA } = await (0, baidu_ua_1.getBaiduUA)(host_id, item.total);
|
|
26
|
-
const ab = await fetcher.getArrayBuffer({
|
|
27
|
-
access_token,
|
|
28
|
-
fs_id,
|
|
29
|
-
start: item.start,
|
|
30
|
-
end: item.end,
|
|
31
|
-
ua: item.auto_ua ? ua : item.ua,
|
|
32
|
-
});
|
|
33
|
-
finishBaiduUA();
|
|
34
|
-
if (!ab || ab.byteLength === 0) {
|
|
35
|
-
return;
|
|
36
|
-
}
|
|
37
|
-
common_1.bufferCache[key] = {
|
|
38
|
-
updated: Date.now(),
|
|
39
|
-
data: Buffer.from(ab),
|
|
40
|
-
};
|
|
41
|
-
}
|
|
42
|
-
await loop();
|
|
43
|
-
};
|
|
44
|
-
const arr = [];
|
|
45
|
-
for (let i = 0; i < threads; i++) {
|
|
46
|
-
arr.push(loop());
|
|
47
|
-
}
|
|
48
|
-
await Promise.all(arr);
|
|
49
|
-
let completed = true;
|
|
50
|
-
const buffers = ranges.map((item) => {
|
|
51
|
-
const key = (0, common_1.getKey)(item);
|
|
52
|
-
if (common_1.bufferCache[key]?.data) {
|
|
53
|
-
return common_1.bufferCache[key].data;
|
|
54
|
-
}
|
|
55
|
-
completed = false;
|
|
56
|
-
return null;
|
|
57
|
-
});
|
|
58
|
-
if (completed) {
|
|
59
|
-
const buffer = Buffer.concat(buffers);
|
|
60
|
-
ranges.forEach((item) => {
|
|
61
|
-
const key = (0, common_1.getKey)(item);
|
|
62
|
-
delete common_1.bufferCache[key];
|
|
63
|
-
});
|
|
64
|
-
return buffer;
|
|
65
|
-
}
|
|
66
|
-
return null;
|
|
67
|
-
}
|
|
68
|
-
function calculateRangeData(params) {
|
|
69
|
-
const { start, end, total } = params;
|
|
70
|
-
const size = Math.max(end - start + 1, 0);
|
|
71
|
-
if (params.host_vip_type == 2) {
|
|
72
|
-
const segments = Math.ceil(size / 1572864);
|
|
73
|
-
const bytes = Math.ceil(size / segments);
|
|
74
|
-
const ranges = [];
|
|
75
|
-
for (let i = 0; i < segments; i++) {
|
|
76
|
-
const the_start = start + i * bytes;
|
|
77
|
-
const the_end = Math.min(end, the_start + bytes - 1);
|
|
78
|
-
ranges.push({
|
|
79
|
-
...params,
|
|
80
|
-
start: the_start,
|
|
81
|
-
end: the_end,
|
|
82
|
-
auto_ua: false,
|
|
83
|
-
});
|
|
84
|
-
}
|
|
85
|
-
return {
|
|
86
|
-
ranges,
|
|
87
|
-
threads: 2,
|
|
88
|
-
};
|
|
89
|
-
}
|
|
90
|
-
else {
|
|
91
|
-
const segments = Math.ceil(size / 131072);
|
|
92
|
-
const bytes = Math.ceil(size / segments);
|
|
93
|
-
const ranges = [];
|
|
94
|
-
for (let i = 0; i < segments; i++) {
|
|
95
|
-
const the_start = start + i * bytes;
|
|
96
|
-
const the_end = Math.min(end, the_start + bytes - 1);
|
|
97
|
-
ranges.push({
|
|
98
|
-
...params,
|
|
99
|
-
start: the_start,
|
|
100
|
-
end: the_end,
|
|
101
|
-
auto_ua: true,
|
|
102
|
-
});
|
|
103
|
-
}
|
|
104
|
-
return {
|
|
105
|
-
ranges,
|
|
106
|
-
threads: Math.min(total < 50 * 1024 * 1024 ? 8 : 6, segments),
|
|
107
|
-
};
|
|
108
|
-
}
|
|
109
|
-
}
|
package/dist/baidu/fetcher.d.ts
DELETED
|
@@ -1,35 +0,0 @@
|
|
|
1
|
-
import { AxiosInstance } from 'axios';
|
|
2
|
-
import { DataSource, Repository } from 'typeorm';
|
|
3
|
-
import { FetchUrl } from '@soga/entities';
|
|
4
|
-
export declare class BaiduFetcher {
|
|
5
|
-
axios: AxiosInstance;
|
|
6
|
-
private domain;
|
|
7
|
-
protected urlRepository: Repository<FetchUrl>;
|
|
8
|
-
constructor(dataSource: DataSource);
|
|
9
|
-
getDownloadUrl({ access_token, fs_id, ua, }: {
|
|
10
|
-
access_token: string;
|
|
11
|
-
fs_id: number;
|
|
12
|
-
ua?: string;
|
|
13
|
-
}): Promise<string>;
|
|
14
|
-
getArrayBuffer(params: {
|
|
15
|
-
access_token: string;
|
|
16
|
-
fs_id: number;
|
|
17
|
-
start: number;
|
|
18
|
-
end: number;
|
|
19
|
-
abort_controller?: AbortController;
|
|
20
|
-
ua?: string;
|
|
21
|
-
}): Promise<ArrayBuffer>;
|
|
22
|
-
getDownloadUrlByRange({ access_token, fs_id, start, end, ua, }: {
|
|
23
|
-
access_token: string;
|
|
24
|
-
fs_id: number;
|
|
25
|
-
start: number;
|
|
26
|
-
end: number;
|
|
27
|
-
ua?: string;
|
|
28
|
-
}): Promise<string>;
|
|
29
|
-
private cleanExpired;
|
|
30
|
-
private fetchBestDlink;
|
|
31
|
-
private fetchDownloadUrl;
|
|
32
|
-
private multipleGetArrayBuffer;
|
|
33
|
-
private getKey;
|
|
34
|
-
}
|
|
35
|
-
export declare const getBaiduFetcher: (dataSource: DataSource) => BaiduFetcher;
|
package/dist/baidu/fetcher.js
DELETED
|
@@ -1,243 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.getBaiduFetcher = exports.BaiduFetcher = void 0;
|
|
7
|
-
const axios_1 = __importDefault(require("axios"));
|
|
8
|
-
const typeorm_1 = require("typeorm");
|
|
9
|
-
const entities_1 = require("@soga/entities");
|
|
10
|
-
const processMap = {};
|
|
11
|
-
let instance = null;
|
|
12
|
-
let lastCleanTime = 0;
|
|
13
|
-
class BaiduFetcher {
|
|
14
|
-
axios;
|
|
15
|
-
domain = 'https://pan.baidu.com';
|
|
16
|
-
urlRepository;
|
|
17
|
-
constructor(dataSource) {
|
|
18
|
-
this.axios = axios_1.default.create({
|
|
19
|
-
baseURL: this.domain,
|
|
20
|
-
});
|
|
21
|
-
this.axios.interceptors.response.use((response) => {
|
|
22
|
-
return response.data;
|
|
23
|
-
}, (error) => {
|
|
24
|
-
return Promise.reject(error);
|
|
25
|
-
});
|
|
26
|
-
this.urlRepository = dataSource.getRepository(entities_1.FetchUrl);
|
|
27
|
-
}
|
|
28
|
-
async getDownloadUrl({ access_token, fs_id, ua, }) {
|
|
29
|
-
const url = await this.getDownloadUrlByRange({
|
|
30
|
-
access_token,
|
|
31
|
-
fs_id,
|
|
32
|
-
start: 0,
|
|
33
|
-
end: 0,
|
|
34
|
-
ua,
|
|
35
|
-
});
|
|
36
|
-
return url;
|
|
37
|
-
}
|
|
38
|
-
async getArrayBuffer(params) {
|
|
39
|
-
await this.cleanExpired();
|
|
40
|
-
return await this.multipleGetArrayBuffer({
|
|
41
|
-
...params,
|
|
42
|
-
times: 0,
|
|
43
|
-
});
|
|
44
|
-
}
|
|
45
|
-
async getDownloadUrlByRange({ access_token, fs_id, start = 0, end = 0, ua, }) {
|
|
46
|
-
const key = this.getKey({ fs_id, access_token, ua });
|
|
47
|
-
try {
|
|
48
|
-
while (processMap[key]) {
|
|
49
|
-
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
50
|
-
}
|
|
51
|
-
processMap[key] = true;
|
|
52
|
-
const row = await this.urlRepository.findOne({
|
|
53
|
-
where: {
|
|
54
|
-
key,
|
|
55
|
-
speed: (0, typeorm_1.MoreThan)(30),
|
|
56
|
-
expire: (0, typeorm_1.MoreThan)(Date.now()),
|
|
57
|
-
},
|
|
58
|
-
order: {
|
|
59
|
-
speed: 'DESC',
|
|
60
|
-
fail: 'ASC',
|
|
61
|
-
},
|
|
62
|
-
});
|
|
63
|
-
if (row) {
|
|
64
|
-
if (row.fail > 10) {
|
|
65
|
-
await this.urlRepository.delete({ key });
|
|
66
|
-
return '';
|
|
67
|
-
}
|
|
68
|
-
return row.dlink;
|
|
69
|
-
}
|
|
70
|
-
const { dlink, speed } = await this.fetchBestDlink({
|
|
71
|
-
access_token,
|
|
72
|
-
fs_id,
|
|
73
|
-
start,
|
|
74
|
-
end,
|
|
75
|
-
ua,
|
|
76
|
-
});
|
|
77
|
-
if (dlink) {
|
|
78
|
-
await this.urlRepository.save({
|
|
79
|
-
dlink,
|
|
80
|
-
key,
|
|
81
|
-
speed,
|
|
82
|
-
expire: Date.now() + 7 * 3600000,
|
|
83
|
-
});
|
|
84
|
-
return dlink;
|
|
85
|
-
}
|
|
86
|
-
throw new Error('baidu fetch download url failed');
|
|
87
|
-
}
|
|
88
|
-
finally {
|
|
89
|
-
delete processMap[key];
|
|
90
|
-
}
|
|
91
|
-
}
|
|
92
|
-
async cleanExpired() {
|
|
93
|
-
if (Date.now() - lastCleanTime > 1800000) {
|
|
94
|
-
await this.urlRepository.delete({
|
|
95
|
-
expire: (0, typeorm_1.LessThan)(Date.now()),
|
|
96
|
-
});
|
|
97
|
-
lastCleanTime = Date.now();
|
|
98
|
-
}
|
|
99
|
-
}
|
|
100
|
-
async fetchBestDlink({ fs_id, start = 0, end = 0, ua, access_token, }) {
|
|
101
|
-
const errorDomains = [];
|
|
102
|
-
let download_link = '';
|
|
103
|
-
const url = await this.fetchDownloadUrl({ fs_id, access_token, ua });
|
|
104
|
-
if (!url) {
|
|
105
|
-
return {
|
|
106
|
-
dlink: '',
|
|
107
|
-
speed: 0,
|
|
108
|
-
};
|
|
109
|
-
}
|
|
110
|
-
const to = Math.min(start + 127, end);
|
|
111
|
-
const headers = {
|
|
112
|
-
'User-Agent': ua,
|
|
113
|
-
Range: `bytes=${start}-${to}`,
|
|
114
|
-
};
|
|
115
|
-
const bytes = to - start + 1;
|
|
116
|
-
const times = {
|
|
117
|
-
start: 0,
|
|
118
|
-
end: 0,
|
|
119
|
-
};
|
|
120
|
-
for (let i = 0; i < 50; i++) {
|
|
121
|
-
const res = await axios_1.default.get(url, {
|
|
122
|
-
headers,
|
|
123
|
-
maxRedirects: 0,
|
|
124
|
-
validateStatus: (status) => status >= 200 && status < 303,
|
|
125
|
-
});
|
|
126
|
-
const dlink = res.headers.location;
|
|
127
|
-
if (!dlink) {
|
|
128
|
-
return {
|
|
129
|
-
dlink: '',
|
|
130
|
-
speed: 0,
|
|
131
|
-
};
|
|
132
|
-
}
|
|
133
|
-
const $url = new URL(dlink);
|
|
134
|
-
const { hostname } = $url;
|
|
135
|
-
if (errorDomains.includes(hostname)) {
|
|
136
|
-
continue;
|
|
137
|
-
}
|
|
138
|
-
download_link = dlink;
|
|
139
|
-
try {
|
|
140
|
-
times.start = Date.now();
|
|
141
|
-
await axios_1.default.get(dlink, {
|
|
142
|
-
headers,
|
|
143
|
-
timeout: 2000,
|
|
144
|
-
});
|
|
145
|
-
times.end = Date.now();
|
|
146
|
-
return {
|
|
147
|
-
dlink,
|
|
148
|
-
speed: (1000 * bytes) / (times.end - times.start),
|
|
149
|
-
};
|
|
150
|
-
}
|
|
151
|
-
catch (err) {
|
|
152
|
-
errorDomains.push(hostname);
|
|
153
|
-
await new Promise((resolve) => setTimeout(resolve, 200));
|
|
154
|
-
continue;
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
return {
|
|
158
|
-
dlink: download_link,
|
|
159
|
-
speed: (1000 * bytes) / (times.end - times.start),
|
|
160
|
-
};
|
|
161
|
-
}
|
|
162
|
-
async fetchDownloadUrl({ fs_id, ua = 'pan.baidu.com', access_token, }) {
|
|
163
|
-
const res = await this.axios.get('/rest/2.0/xpan/multimedia', {
|
|
164
|
-
params: {
|
|
165
|
-
access_token,
|
|
166
|
-
dlink: '1',
|
|
167
|
-
method: 'filemetas',
|
|
168
|
-
fsids: JSON.stringify([fs_id]),
|
|
169
|
-
},
|
|
170
|
-
headers: {
|
|
171
|
-
'User-Agent': ua,
|
|
172
|
-
},
|
|
173
|
-
});
|
|
174
|
-
const { errno, list } = res || {};
|
|
175
|
-
if (errno != 0) {
|
|
176
|
-
throw new Error(`获取下载链接失败: ${errno}`);
|
|
177
|
-
}
|
|
178
|
-
if (!list || !list.length) {
|
|
179
|
-
throw new Error('获取下载链接失败');
|
|
180
|
-
}
|
|
181
|
-
const url = `${list[0].dlink}&access_token=${access_token}`;
|
|
182
|
-
return url;
|
|
183
|
-
}
|
|
184
|
-
async multipleGetArrayBuffer({ access_token, fs_id, start, end, abort_controller, times = 0, ua, }) {
|
|
185
|
-
const dlink = await this.getDownloadUrlByRange({
|
|
186
|
-
access_token,
|
|
187
|
-
fs_id,
|
|
188
|
-
start,
|
|
189
|
-
end,
|
|
190
|
-
ua,
|
|
191
|
-
});
|
|
192
|
-
if (!dlink) {
|
|
193
|
-
throw new Error('获取下载链接失败');
|
|
194
|
-
}
|
|
195
|
-
try {
|
|
196
|
-
const length = end - start + 1;
|
|
197
|
-
const duration = Math.max(Math.ceil(length / 1024 / 10) * 1000, 3000);
|
|
198
|
-
const res = await axios_1.default.get(dlink, {
|
|
199
|
-
headers: {
|
|
200
|
-
Range: `bytes=${start}-${end}`,
|
|
201
|
-
'User-Agent': ua,
|
|
202
|
-
Accept: 'application/octet-stream',
|
|
203
|
-
},
|
|
204
|
-
signal: abort_controller?.signal,
|
|
205
|
-
responseType: 'arraybuffer',
|
|
206
|
-
timeout: duration,
|
|
207
|
-
});
|
|
208
|
-
return res.data;
|
|
209
|
-
}
|
|
210
|
-
catch (err) {
|
|
211
|
-
if (err.name === 'AbortError')
|
|
212
|
-
return;
|
|
213
|
-
const key = this.getKey({ fs_id, access_token, ua });
|
|
214
|
-
await this.urlRepository.increment({ key, dlink }, 'fail', 1);
|
|
215
|
-
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
216
|
-
if ((times || 0) < 3) {
|
|
217
|
-
return await this.multipleGetArrayBuffer({
|
|
218
|
-
access_token,
|
|
219
|
-
fs_id,
|
|
220
|
-
start,
|
|
221
|
-
end,
|
|
222
|
-
times: (times || 0) + 1,
|
|
223
|
-
abort_controller,
|
|
224
|
-
ua,
|
|
225
|
-
});
|
|
226
|
-
}
|
|
227
|
-
throw err;
|
|
228
|
-
}
|
|
229
|
-
}
|
|
230
|
-
getKey({ access_token, fs_id, ua, }) {
|
|
231
|
-
const userAgent = ua ? encodeURIComponent(ua) : 'ua';
|
|
232
|
-
const accessToken = encodeURIComponent(access_token);
|
|
233
|
-
return `baidu-${accessToken}-${userAgent}-${fs_id}`;
|
|
234
|
-
}
|
|
235
|
-
}
|
|
236
|
-
exports.BaiduFetcher = BaiduFetcher;
|
|
237
|
-
const getBaiduFetcher = (dataSource) => {
|
|
238
|
-
if (!instance) {
|
|
239
|
-
instance = new BaiduFetcher(dataSource);
|
|
240
|
-
}
|
|
241
|
-
return instance;
|
|
242
|
-
};
|
|
243
|
-
exports.getBaiduFetcher = getBaiduFetcher;
|
|
@@ -1,3 +0,0 @@
|
|
|
1
|
-
import { BaiduPreviewBufferParams } from '../common/types';
|
|
2
|
-
export declare const getBaiduPreviewBuffer: (params: BaiduPreviewBufferParams) => Promise<Buffer<ArrayBuffer>>;
|
|
3
|
-
export declare const getBaiduDownloadBuffer: (params: BaiduPreviewBufferParams) => Promise<Buffer<ArrayBuffer>>;
|
package/dist/baidu/get-buffer.js
DELETED
|
@@ -1,41 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getBaiduDownloadBuffer = exports.getBaiduPreviewBuffer = void 0;
|
|
4
|
-
const common_1 = require("../common/common");
|
|
5
|
-
const fetch_buffer_1 = require("./fetch-buffer");
|
|
6
|
-
const low_store_1 = require("../common/low-store");
|
|
7
|
-
const entities_1 = require("@soga/entities");
|
|
8
|
-
const getBaiduBuffer = async ({ dataSource, record_id, start, end, total, part_md5, access_token, host_id, host_vip_type, fs_id, cache_folder, abort_controller, }, cache_type) => {
|
|
9
|
-
const rep = dataSource.getRepository(entities_1.FetchUrl);
|
|
10
|
-
const data = await rep.find({});
|
|
11
|
-
let store = null;
|
|
12
|
-
const key = (0, common_1.getKey)({ part_md5, start, end });
|
|
13
|
-
if (cache_folder) {
|
|
14
|
-
store = await (0, low_store_1.getLowStore)(cache_folder, cache_type);
|
|
15
|
-
const bufferCache = await store.readBuffer(key);
|
|
16
|
-
if (bufferCache) {
|
|
17
|
-
return bufferCache;
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
const buffer = await (0, fetch_buffer_1.fetchBaiduBuffer)({
|
|
21
|
-
dataSource,
|
|
22
|
-
record_id,
|
|
23
|
-
start,
|
|
24
|
-
end,
|
|
25
|
-
total,
|
|
26
|
-
access_token,
|
|
27
|
-
host_id,
|
|
28
|
-
host_vip_type,
|
|
29
|
-
fs_id,
|
|
30
|
-
part_md5,
|
|
31
|
-
abort_controller,
|
|
32
|
-
});
|
|
33
|
-
if (buffer && store) {
|
|
34
|
-
await store.writeBuffer(key, buffer);
|
|
35
|
-
}
|
|
36
|
-
return buffer;
|
|
37
|
-
};
|
|
38
|
-
const getBaiduPreviewBuffer = async (params) => await getBaiduBuffer(params, 'preview');
|
|
39
|
-
exports.getBaiduPreviewBuffer = getBaiduPreviewBuffer;
|
|
40
|
-
const getBaiduDownloadBuffer = async (params) => await getBaiduBuffer(params, 'download');
|
|
41
|
-
exports.getBaiduDownloadBuffer = getBaiduDownloadBuffer;
|
package/dist/common/common.d.ts
DELETED
package/dist/common/common.js
DELETED
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getKey = exports.bufferCache = void 0;
|
|
4
|
-
exports.bufferCache = {};
|
|
5
|
-
const getKey = ({ part_md5, start, end, }) => {
|
|
6
|
-
return `${part_md5}-${start}-${end}`;
|
|
7
|
-
};
|
|
8
|
-
exports.getKey = getKey;
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
import { LowType } from '@soga/bridge';
|
|
2
|
-
import { CacheType } from './types';
|
|
3
|
-
export declare class LowStore {
|
|
4
|
-
folder_path: string;
|
|
5
|
-
cache_path: string;
|
|
6
|
-
db: LowType<DbDataType>;
|
|
7
|
-
cache_type: CacheType;
|
|
8
|
-
constructor(folder_path: string, cache_type: CacheType);
|
|
9
|
-
init(): Promise<void>;
|
|
10
|
-
read(key: string): Promise<boolean>;
|
|
11
|
-
write(key: string, value: boolean): Promise<void>;
|
|
12
|
-
delete(key: string): Promise<void>;
|
|
13
|
-
writeBuffer(key: string, buffer: Buffer<ArrayBuffer | ArrayBufferLike>): Promise<void>;
|
|
14
|
-
readBuffer(key: string): Promise<NonSharedBuffer>;
|
|
15
|
-
deleteBuffer(key: string): Promise<void>;
|
|
16
|
-
}
|
|
17
|
-
export declare const getLowStore: (folder_path: string, cache_type: CacheType) => Promise<LowStore>;
|
|
18
|
-
type DbDataType = Record<string, boolean>;
|
|
19
|
-
export {};
|
package/dist/common/low-store.js
DELETED
|
@@ -1,91 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getLowStore = exports.LowStore = void 0;
|
|
4
|
-
const bridge_1 = require("@soga/bridge");
|
|
5
|
-
const fs_extra_1 = require("fs-extra");
|
|
6
|
-
const path_1 = require("path");
|
|
7
|
-
class LowStore {
|
|
8
|
-
folder_path;
|
|
9
|
-
cache_path;
|
|
10
|
-
db;
|
|
11
|
-
cache_type;
|
|
12
|
-
constructor(folder_path, cache_type) {
|
|
13
|
-
this.folder_path = folder_path;
|
|
14
|
-
this.cache_type = cache_type;
|
|
15
|
-
this.cache_path = (0, path_1.resolve)(folder_path, `${cache_type}_cache`);
|
|
16
|
-
}
|
|
17
|
-
async init() {
|
|
18
|
-
const dbPath = (0, path_1.resolve)(this.cache_path, 'cache.json');
|
|
19
|
-
this.db = await (0, bridge_1.getDb)(dbPath, {});
|
|
20
|
-
}
|
|
21
|
-
async read(key) {
|
|
22
|
-
return this.db.data[key];
|
|
23
|
-
}
|
|
24
|
-
async write(key, value) {
|
|
25
|
-
this.db.data[key] = value;
|
|
26
|
-
await this.db.write();
|
|
27
|
-
}
|
|
28
|
-
async delete(key) {
|
|
29
|
-
delete this.db.data[key];
|
|
30
|
-
await this.db.write();
|
|
31
|
-
}
|
|
32
|
-
async writeBuffer(key, buffer) {
|
|
33
|
-
const chunkPath = (0, path_1.resolve)(this.cache_path, key);
|
|
34
|
-
await (0, fs_extra_1.writeFile)(chunkPath, buffer);
|
|
35
|
-
this.db.data[key] = true;
|
|
36
|
-
await this.db.write();
|
|
37
|
-
}
|
|
38
|
-
async readBuffer(key) {
|
|
39
|
-
const exist = this.db.data[key];
|
|
40
|
-
if (!exist) {
|
|
41
|
-
return null;
|
|
42
|
-
}
|
|
43
|
-
const chunkPath = (0, path_1.resolve)(this.cache_path, key);
|
|
44
|
-
const data = await (0, fs_extra_1.readFile)(chunkPath);
|
|
45
|
-
return data;
|
|
46
|
-
}
|
|
47
|
-
async deleteBuffer(key) {
|
|
48
|
-
const exist = this.db.data[key];
|
|
49
|
-
if (!exist)
|
|
50
|
-
return;
|
|
51
|
-
delete this.db.data[key];
|
|
52
|
-
await this.db.write();
|
|
53
|
-
const chunkPath = (0, path_1.resolve)(this.cache_path, key);
|
|
54
|
-
try {
|
|
55
|
-
await (0, fs_extra_1.remove)(chunkPath);
|
|
56
|
-
}
|
|
57
|
-
catch (e) {
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
exports.LowStore = LowStore;
|
|
62
|
-
const storeMap = {};
|
|
63
|
-
let lastCleanTime = 0;
|
|
64
|
-
const getLowStore = async (folder_path, cache_type) => {
|
|
65
|
-
if (storeMap[folder_path]) {
|
|
66
|
-
while (!storeMap[folder_path].inited) {
|
|
67
|
-
await new Promise((resolve) => setTimeout(resolve, 20));
|
|
68
|
-
}
|
|
69
|
-
storeMap[folder_path].updated_at = Date.now();
|
|
70
|
-
const now = Date.now();
|
|
71
|
-
if (now - lastCleanTime > 30 * 60000) {
|
|
72
|
-
lastCleanTime = now;
|
|
73
|
-
Object.keys(storeMap).forEach((key) => {
|
|
74
|
-
if (now - storeMap[key].updated_at > 8 * 60 * 60 * 1000) {
|
|
75
|
-
delete storeMap[key];
|
|
76
|
-
}
|
|
77
|
-
});
|
|
78
|
-
}
|
|
79
|
-
return storeMap[folder_path].store;
|
|
80
|
-
}
|
|
81
|
-
const store = new LowStore(folder_path, cache_type);
|
|
82
|
-
storeMap[folder_path] = {
|
|
83
|
-
updated_at: Date.now(),
|
|
84
|
-
store,
|
|
85
|
-
inited: false,
|
|
86
|
-
};
|
|
87
|
-
await store.init();
|
|
88
|
-
storeMap[folder_path].inited = true;
|
|
89
|
-
return store;
|
|
90
|
-
};
|
|
91
|
-
exports.getLowStore = getLowStore;
|
package/dist/common/types.d.ts
DELETED
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
import { DataSource } from 'typeorm';
|
|
2
|
-
interface CommonBufferParams {
|
|
3
|
-
dataSource: DataSource;
|
|
4
|
-
record_id: number;
|
|
5
|
-
part_md5: string;
|
|
6
|
-
start: number;
|
|
7
|
-
end: number;
|
|
8
|
-
total: number;
|
|
9
|
-
host_id: number;
|
|
10
|
-
host_vip_type: number;
|
|
11
|
-
access_token: string;
|
|
12
|
-
abort_controller?: AbortController;
|
|
13
|
-
ua?: string;
|
|
14
|
-
}
|
|
15
|
-
export interface BaiduBufferParams extends CommonBufferParams {
|
|
16
|
-
fs_id: number;
|
|
17
|
-
}
|
|
18
|
-
export interface AliBufferParams extends CommonBufferParams {
|
|
19
|
-
drive_id: string;
|
|
20
|
-
file_id: string;
|
|
21
|
-
}
|
|
22
|
-
export interface BaiduPreviewBufferParams extends BaiduBufferParams {
|
|
23
|
-
cache_folder: string;
|
|
24
|
-
}
|
|
25
|
-
export interface AliPreviewBufferParams extends AliBufferParams {
|
|
26
|
-
cache_folder: string;
|
|
27
|
-
}
|
|
28
|
-
export type CacheType = 'preview' | 'download';
|
|
29
|
-
export {};
|
package/dist/common/types.js
DELETED