@donotdev/firebase 0.0.4 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/imageProcessing.d.ts +95 -0
- package/dist/client/imageProcessing.d.ts.map +1 -0
- package/dist/client/imageProcessing.js +1 -0
- package/dist/client/index.d.ts +2 -0
- package/dist/client/index.d.ts.map +1 -1
- package/dist/client/index.js +1 -1
- package/dist/client/storage.d.ts +167 -0
- package/dist/client/storage.d.ts.map +1 -0
- package/dist/client/storage.js +1 -0
- package/dist/server/batch.js +1 -1
- package/dist/server/index.d.ts +1 -0
- package/dist/server/index.d.ts.map +1 -1
- package/dist/server/index.js +1 -1
- package/dist/server/init.js +1 -1
- package/dist/server/subscription.js +1 -1
- package/dist/server/uniqueness.js +1 -1
- package/dist/server/utils.js +1 -1
- package/dist/server/validation.js +1 -1
- package/dist/shared/transform.d.ts.map +1 -1
- package/dist/shared/transform.js +1 -1
- package/dist/shared/utils.d.ts.map +1 -1
- package/dist/shared/utils.js +1 -1
- package/package.json +3 -2
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Standard image dimensions for processing
|
|
3
|
+
*/
|
|
4
|
+
export interface ImageDimensions {
|
|
5
|
+
width: number;
|
|
6
|
+
height: number;
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* Default dimensions for images
|
|
10
|
+
*/
|
|
11
|
+
export declare const DEFAULT_DIMENSIONS: {
|
|
12
|
+
/** Full-size image dimensions (4:3 aspect ratio) */
|
|
13
|
+
readonly full: {
|
|
14
|
+
readonly width: 1600;
|
|
15
|
+
readonly height: 1200;
|
|
16
|
+
};
|
|
17
|
+
/** Thumbnail dimensions (4:3 aspect ratio) */
|
|
18
|
+
readonly thumbnail: {
|
|
19
|
+
readonly width: 320;
|
|
20
|
+
readonly height: 240;
|
|
21
|
+
};
|
|
22
|
+
/** Medium dimensions for cards/previews */
|
|
23
|
+
readonly medium: {
|
|
24
|
+
readonly width: 800;
|
|
25
|
+
readonly height: 600;
|
|
26
|
+
};
|
|
27
|
+
};
|
|
28
|
+
/**
|
|
29
|
+
* Options for image processing
|
|
30
|
+
*/
|
|
31
|
+
export interface ImageProcessingOptions {
|
|
32
|
+
/** Target dimensions */
|
|
33
|
+
dimensions: ImageDimensions;
|
|
34
|
+
/** WebP quality (0-1, default: 0.9) */
|
|
35
|
+
quality?: number;
|
|
36
|
+
/** Maximum file size in MB before compression (default: 2) */
|
|
37
|
+
maxSizeMB?: number;
|
|
38
|
+
/** Fill background color (default: transparent) */
|
|
39
|
+
backgroundColor?: string;
|
|
40
|
+
/** Whether to maintain aspect ratio (default: true) */
|
|
41
|
+
maintainAspectRatio?: boolean;
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Result of image processing
|
|
45
|
+
*/
|
|
46
|
+
export interface ProcessedImage {
|
|
47
|
+
/** Blob of the processed image */
|
|
48
|
+
blob: Blob;
|
|
49
|
+
/** MIME type (always image/webp) */
|
|
50
|
+
mimeType: 'image/webp';
|
|
51
|
+
/** Final dimensions */
|
|
52
|
+
dimensions: ImageDimensions;
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Creates a standardized WebP image from a source file
|
|
56
|
+
* @param file - Source file to process
|
|
57
|
+
* @param options - Processing options
|
|
58
|
+
* @returns Processed image as WebP blob
|
|
59
|
+
*/
|
|
60
|
+
export declare function processImage(file: File, options: ImageProcessingOptions): Promise<ProcessedImage>;
|
|
61
|
+
/**
|
|
62
|
+
* Creates full-size and thumbnail versions of an image
|
|
63
|
+
* @param file - Source file to process
|
|
64
|
+
* @param options - Optional custom dimensions
|
|
65
|
+
* @returns Object with full and thumbnail processed images
|
|
66
|
+
*/
|
|
67
|
+
export declare function processImageWithThumbnail(file: File, options?: {
|
|
68
|
+
fullDimensions?: ImageDimensions;
|
|
69
|
+
thumbnailDimensions?: ImageDimensions;
|
|
70
|
+
quality?: number;
|
|
71
|
+
maxSizeMB?: number;
|
|
72
|
+
}): Promise<{
|
|
73
|
+
full: ProcessedImage;
|
|
74
|
+
thumbnail: ProcessedImage;
|
|
75
|
+
}>;
|
|
76
|
+
/**
|
|
77
|
+
* Validates an image file before processing
|
|
78
|
+
* @param file - File to validate
|
|
79
|
+
* @param options - Validation options
|
|
80
|
+
* @returns Validation result
|
|
81
|
+
*/
|
|
82
|
+
export declare function validateImageFile(file: File, options?: {
|
|
83
|
+
maxSizeMB?: number;
|
|
84
|
+
allowedTypes?: string[];
|
|
85
|
+
}): {
|
|
86
|
+
valid: boolean;
|
|
87
|
+
error?: string;
|
|
88
|
+
};
|
|
89
|
+
/**
|
|
90
|
+
* Creates a data URL preview of an image file
|
|
91
|
+
* @param file - File to preview
|
|
92
|
+
* @returns Data URL string
|
|
93
|
+
*/
|
|
94
|
+
export declare function createImagePreview(file: File): Promise<string>;
|
|
95
|
+
//# sourceMappingURL=imageProcessing.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"imageProcessing.d.ts","sourceRoot":"","sources":["../../src/client/imageProcessing.ts"],"names":[],"mappings":"AAYA;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,eAAO,MAAM,kBAAkB;IAC7B,oDAAoD;;;;;IAEpD,8CAA8C;;;;;IAE9C,2CAA2C;;;;;CAEnC,CAAC;AAEX;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACrC,wBAAwB;IACxB,UAAU,EAAE,eAAe,CAAC;IAC5B,uCAAuC;IACvC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,8DAA8D;IAC9D,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,mDAAmD;IACnD,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,uDAAuD;IACvD,mBAAmB,CAAC,EAAE,OAAO,CAAC;CAC/B;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,kCAAkC;IAClC,IAAI,EAAE,IAAI,CAAC;IACX,oCAAoC;IACpC,QAAQ,EAAE,YAAY,CAAC;IACvB,uBAAuB;IACvB,UAAU,EAAE,eAAe,CAAC;CAC7B;AAED;;;;;GAKG;AACH,wBAAsB,YAAY,CAChC,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,sBAAsB,GAC9B,OAAO,CAAC,cAAc,CAAC,CAiGzB;AAED;;;;;GAKG;AACH,wBAAsB,yBAAyB,CAC7C,IAAI,EAAE,IAAI,EACV,OAAO,CAAC,EAAE;IACR,cAAc,CAAC,EAAE,eAAe,CAAC;IACjC,mBAAmB,CAAC,EAAE,eAAe,CAAC;IACtC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB,GACA,OAAO,CAAC;IACT,IAAI,EAAE,cAAc,CAAC;IACrB,SAAS,EAAE,cAAc,CAAC;CAC3B,CAAC,CAmBD;AAED;;;;;GAKG;AACH,wBAAgB,iBAAiB,CAC/B,IAAI,EAAE,IAAI,EACV,OAAO,CAAC,EAAE;IACR,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,EAAE,CAAC;CACzB,GACA;IAAE,KAAK,EAAE,OAAO,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAAE,CAwBpC;AAED;;;;GAIG;AACH,wBAAgB,kBAAkB,CAAC,IAAI,EAAE,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,CAa9D"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import v from"browser-image-compression";const f={full:{width:1600,height:1200},thumbnail:{width:320,height:240},medium:{width:800,height:600}};async function p(a,t){const{dimensions:e,quality:i=.9,maxSizeMB:o=2,backgroundColor:d,maintainAspectRatio:y=!0}=t;return new Promise(async(x,l)=>{try{const b=await v(a,{maxSizeMB:o,useWebWorker:!0}),n=new Image;n.onload=()=>{try{const r=document.createElement("canvas");r.width=e.width,r.height=e.height;const s=r.getContext("2d");if(!s){l(new Error("Could not get canvas context"));return}d?(s.fillStyle=d,s.fillRect(0,0,r.width,r.height)):s.clearRect(0,0,r.width,r.height);let g=0,w=0,h=e.width,c=e.height;if(y){const m=Math.min(e.width/n.width,e.height/n.height);h=n.width*m,c=n.height*m,g=(e.width-h)/2,w=(e.height-c)/2}s.drawImage(n,g,w,h,c),r.toBlob(m=>{m?x({blob:m,mimeType:"image/webp",dimensions:e}):l(new Error("Failed to convert canvas to blob"))},"image/webp",i)}catch(r){l(r)}},n.onerror=()=>{l(new Error("Failed to load image"))},n.src=URL.createObjectURL(b)}catch(u){l(u)}})}async function M(a,t){const e=t?.fullDimensions??f.full,i=t?.thumbnailDimensions??f.thumbnail,[o,d]=await Promise.all([p(a,{dimensions:e,quality:t?.quality,maxSizeMB:t?.maxSizeMB}),p(a,{dimensions:i,quality:t?.quality??.8,maxSizeMB:1})]);return{full:o,thumbnail:d}}function B(a,t){const{maxSizeMB:e=10,allowedTypes:i=["image/jpeg","image/png","image/webp","image/gif"]}=t??{};if(!i.includes(a.type))return{valid:!1,error:`Invalid file type. Allowed: ${i.join(", ")}`};const o=a.size/(1024*1024);return o>e?{valid:!1,error:`File too large (${o.toFixed(2)}MB). Maximum: ${e}MB`}:{valid:!0}}function I(a){return new Promise((t,e)=>{const i=new FileReader;i.onload=()=>{typeof i.result=="string"?t(i.result):e(new Error("Failed to read file"))},i.onerror=()=>e(new Error("Failed to read file")),i.readAsDataURL(a)})}export{f as DEFAULT_DIMENSIONS,I as createImagePreview,p as processImage,M as processImageWithThumbnail,B as validateImageFile};
|
package/dist/client/index.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/client/index.ts"],"names":[],"mappings":"AAEA;;;;;;GAMG;AAGH,cAAc,OAAO,CAAC;AAGtB,cAAc,aAAa,CAAC;AAG5B,cAAc,aAAa,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/client/index.ts"],"names":[],"mappings":"AAEA;;;;;;GAMG;AAGH,cAAc,OAAO,CAAC;AAGtB,cAAc,aAAa,CAAC;AAG5B,cAAc,aAAa,CAAC;AAG5B,cAAc,WAAW,CAAC;AAG1B,cAAc,mBAAmB,CAAC"}
|
package/dist/client/index.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export*from"./sdk";export*from"./firestore";export*from"./functions";
|
|
1
|
+
export*from"./sdk";export*from"./firestore";export*from"./functions";export*from"./storage";export*from"./imageProcessing";
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Firebase Storage client SDK utilities
|
|
3
|
+
* @description Browser-safe Storage operations for file uploads and downloads
|
|
4
|
+
* @version 0.0.1
|
|
5
|
+
* @since 0.0.1
|
|
6
|
+
* @author AMBROISE PARK Consulting
|
|
7
|
+
*/
|
|
8
|
+
import { type StorageReference, type UploadTask, type UploadMetadata } from 'firebase/storage';
|
|
9
|
+
/**
|
|
10
|
+
* Options for file upload
|
|
11
|
+
*/
|
|
12
|
+
export interface UploadOptions {
|
|
13
|
+
/** Base path in storage (e.g., 'images', 'documents') */
|
|
14
|
+
basePath: string;
|
|
15
|
+
/** Optional subfolder (e.g., entity ID) */
|
|
16
|
+
subfolder?: string;
|
|
17
|
+
/** Custom filename (default: timestamp + original name) */
|
|
18
|
+
filename?: string;
|
|
19
|
+
/** Maximum file size in bytes (default: 10MB) */
|
|
20
|
+
maxSize?: number;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Result of a file upload
|
|
24
|
+
*/
|
|
25
|
+
export interface UploadResult {
|
|
26
|
+
/** Download URL of the uploaded file */
|
|
27
|
+
url: string;
|
|
28
|
+
/** Full storage path */
|
|
29
|
+
path: string;
|
|
30
|
+
/** Original filename */
|
|
31
|
+
filename: string;
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Progress callback for resumable uploads
|
|
35
|
+
*/
|
|
36
|
+
export interface UploadProgressCallback {
|
|
37
|
+
(progress: {
|
|
38
|
+
bytesTransferred: number;
|
|
39
|
+
totalBytes: number;
|
|
40
|
+
progress: number;
|
|
41
|
+
}): void;
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Options for resumable file upload with progress tracking
|
|
45
|
+
*/
|
|
46
|
+
export interface UploadResumableOptions extends UploadOptions {
|
|
47
|
+
/** Progress callback */
|
|
48
|
+
onProgress?: UploadProgressCallback;
|
|
49
|
+
/** Upload metadata (content type, cache control, etc.) */
|
|
50
|
+
metadata?: UploadMetadata;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Uploads a single file to Firebase Storage with progress tracking
|
|
54
|
+
* @param file - File or Blob to upload
|
|
55
|
+
* @param options - Upload options with progress callback
|
|
56
|
+
* @returns Upload task and promise resolving to upload result
|
|
57
|
+
*/
|
|
58
|
+
export declare function uploadFileResumable(file: File | Blob, options: UploadResumableOptions): {
|
|
59
|
+
task: UploadTask;
|
|
60
|
+
promise: Promise<UploadResult>;
|
|
61
|
+
};
|
|
62
|
+
/**
|
|
63
|
+
* Uploads a single file to Firebase Storage
|
|
64
|
+
* @param file - File to upload
|
|
65
|
+
* @param options - Upload options
|
|
66
|
+
* @returns Upload result with URL and path
|
|
67
|
+
* @throws Error if file exceeds max size
|
|
68
|
+
*/
|
|
69
|
+
export declare function uploadFile(file: File, options: UploadOptions): Promise<UploadResult>;
|
|
70
|
+
/**
|
|
71
|
+
* Uploads multiple files to Firebase Storage
|
|
72
|
+
* @param files - Array of files to upload
|
|
73
|
+
* @param options - Upload options (same for all files)
|
|
74
|
+
* @returns Array of upload results
|
|
75
|
+
*/
|
|
76
|
+
export declare function uploadFiles(files: File[], options: UploadOptions): Promise<UploadResult[]>;
|
|
77
|
+
/**
|
|
78
|
+
* Deletes a file from Firebase Storage by URL
|
|
79
|
+
* @param url - Full download URL of the file
|
|
80
|
+
*/
|
|
81
|
+
export declare function deleteFileByUrl(url: string): Promise<void>;
|
|
82
|
+
/**
|
|
83
|
+
* Deletes a file from Firebase Storage by path
|
|
84
|
+
* @param path - Storage path of the file
|
|
85
|
+
*/
|
|
86
|
+
export declare function deleteFileByPath(path: string): Promise<void>;
|
|
87
|
+
/**
|
|
88
|
+
* Deletes multiple files by their URLs
|
|
89
|
+
* @param urls - Array of download URLs
|
|
90
|
+
*/
|
|
91
|
+
export declare function deleteFilesByUrl(urls: string[]): Promise<void>;
|
|
92
|
+
/**
|
|
93
|
+
* Lists all files in a storage path
|
|
94
|
+
* @param path - Storage path to list
|
|
95
|
+
* @returns Array of storage references
|
|
96
|
+
*/
|
|
97
|
+
export declare function listFiles(path: string): Promise<StorageReference[]>;
|
|
98
|
+
/**
|
|
99
|
+
* Gets download URLs for all files in a path
|
|
100
|
+
* @param path - Storage path to list
|
|
101
|
+
* @returns Array of download URLs
|
|
102
|
+
*/
|
|
103
|
+
export declare function getFileUrls(path: string): Promise<string[]>;
|
|
104
|
+
/**
|
|
105
|
+
* Gets download URL for a file at a specific storage path
|
|
106
|
+
* @param path - Storage path of the file
|
|
107
|
+
* @returns Download URL
|
|
108
|
+
*/
|
|
109
|
+
export declare function getFileUrl(path: string): Promise<string>;
|
|
110
|
+
/**
|
|
111
|
+
* Creates a storage reference for a given path
|
|
112
|
+
* @param path - Storage path
|
|
113
|
+
* @returns Storage reference
|
|
114
|
+
*/
|
|
115
|
+
export declare function createStorageRef(path: string): StorageReference;
|
|
116
|
+
/**
|
|
117
|
+
* Deletes all files in a storage path
|
|
118
|
+
* @param path - Storage path to clear
|
|
119
|
+
*/
|
|
120
|
+
export declare function deleteAllFiles(path: string): Promise<void>;
|
|
121
|
+
/**
|
|
122
|
+
* Uploads an image file with common defaults
|
|
123
|
+
* @param file - Image file to upload
|
|
124
|
+
* @param entityType - Type of entity (e.g., 'cars', 'products')
|
|
125
|
+
* @param entityId - Optional entity ID for organization
|
|
126
|
+
* @returns Upload result
|
|
127
|
+
*/
|
|
128
|
+
export declare function uploadImage(file: File, entityType: string, entityId?: string): Promise<UploadResult>;
|
|
129
|
+
/**
|
|
130
|
+
* Uploads multiple images
|
|
131
|
+
* @param files - Image files to upload
|
|
132
|
+
* @param entityType - Type of entity (e.g., 'cars', 'products')
|
|
133
|
+
* @param entityId - Optional entity ID for organization
|
|
134
|
+
* @returns Array of upload results
|
|
135
|
+
*/
|
|
136
|
+
export declare function uploadImages(files: File[], entityType: string, entityId?: string): Promise<UploadResult[]>;
|
|
137
|
+
/**
|
|
138
|
+
* Result of a processed image upload (full + thumbnail)
|
|
139
|
+
*/
|
|
140
|
+
export interface ProcessedUploadResult {
|
|
141
|
+
/** Full-size image URL */
|
|
142
|
+
fullUrl: string;
|
|
143
|
+
/** Thumbnail image URL */
|
|
144
|
+
thumbUrl: string;
|
|
145
|
+
/** Full-size image path */
|
|
146
|
+
fullPath: string;
|
|
147
|
+
/** Thumbnail image path */
|
|
148
|
+
thumbPath: string;
|
|
149
|
+
}
|
|
150
|
+
/**
|
|
151
|
+
* Uploads an image with WebP conversion and thumbnail generation
|
|
152
|
+
* Requires browser-image-compression package
|
|
153
|
+
* @param file - Image file to upload
|
|
154
|
+
* @param entityType - Type of entity (e.g., 'cars', 'products')
|
|
155
|
+
* @param entityId - Optional entity ID for organization
|
|
156
|
+
* @returns Object with full and thumbnail URLs
|
|
157
|
+
*/
|
|
158
|
+
export declare function uploadProcessedImage(file: File, entityType: string, entityId?: string): Promise<ProcessedUploadResult>;
|
|
159
|
+
/**
|
|
160
|
+
* Uploads multiple images with WebP conversion and thumbnails
|
|
161
|
+
* @param files - Image files to upload
|
|
162
|
+
* @param entityType - Type of entity
|
|
163
|
+
* @param entityId - Optional entity ID
|
|
164
|
+
* @returns Array of processed upload results
|
|
165
|
+
*/
|
|
166
|
+
export declare function uploadProcessedImages(files: File[], entityType: string, entityId?: string): Promise<ProcessedUploadResult[]>;
|
|
167
|
+
//# sourceMappingURL=storage.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"storage.d.ts","sourceRoot":"","sources":["../../src/client/storage.ts"],"names":[],"mappings":"AAEA;;;;;;GAMG;AAEH,OAAO,EAQL,KAAK,gBAAgB,EACrB,KAAK,UAAU,EACf,KAAK,cAAc,EACpB,MAAM,kBAAkB,CAAC;AAE1B;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,yDAAyD;IACzD,QAAQ,EAAE,MAAM,CAAC;IACjB,2CAA2C;IAC3C,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,2DAA2D;IAC3D,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,iDAAiD;IACjD,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,wCAAwC;IACxC,GAAG,EAAE,MAAM,CAAC;IACZ,wBAAwB;IACxB,IAAI,EAAE,MAAM,CAAC;IACb,wBAAwB;IACxB,QAAQ,EAAE,MAAM,CAAC;CAClB;AA8BD;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACrC,CAAC,QAAQ,EAAE;QACT,gBAAgB,EAAE,MAAM,CAAC;QACzB,UAAU,EAAE,MAAM,CAAC;QACnB,QAAQ,EAAE,MAAM,CAAC;KAClB,GAAG,IAAI,CAAC;CACV;AAED;;GAEG;AACH,MAAM,WAAW,sBAAuB,SAAQ,aAAa;IAC3D,wBAAwB;IACxB,UAAU,CAAC,EAAE,sBAAsB,CAAC;IACpC,0DAA0D;IAC1D,QAAQ,CAAC,EAAE,cAAc,CAAC;CAC3B;AAED;;;;;GAKG;AACH,wBAAgB,mBAAmB,CACjC,IAAI,EAAE,IAAI,GAAG,IAAI,EACjB,OAAO,EAAE,sBAAsB,GAC9B;IACD,IAAI,EAAE,UAAU,CAAC;IACjB,OAAO,EAAE,OAAO,CAAC,YAAY,CAAC,CAAC;CAChC,CAuCA;AAED;;;;;;GAMG;AACH,wBAAsB,UAAU,CAC9B,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,aAAa,GACrB,OAAO,CAAC,YAAY,CAAC,CAsBvB;AAED;;;;;GAKG;AACH,wBAAsB,WAAW,CAC/B,KAAK,EAAE,IAAI,EAAE,EACb,OAAO,EAAE,aAAa,GACrB,OAAO,CAAC,YAAY,EAAE,CAAC,CAGzB;AAED;;;GAGG;AACH,wBAAsB,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAgBhE;AAED;;;GAGG;AACH,wBAAsB,gBAAgB,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAQlE;AAED;;;GAGG;AACH,wBAAsB,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAGpE;AAED;;;;GAIG;AACH,wBAAsB,SAAS,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,gBAAgB,EAAE,CAAC,CAKzE;AAED;;;;GAIG;AACH,wBAAsB,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,CAIjE;AAED;;;;GAIG;AACH,wBAAsB,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAI9D;AAED;;;;GAIG;AACH,wBAAgB,gBAAgB,CAAC,IAAI,EAAE,MAAM,GAAG,gBAAgB,CAG/D;AAED;;;GAGG;AACH,wBAAsB,cAAc,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAIhE;AAMD;;;;;;GAMG;AACH,wBAAsB,WAAW,CAC/B,IAAI,EAAE,IAAI,EACV,UAAU,EAAE,MAAM,EAClB,QAAQ,CAAC,EAAE,MAAM,GAChB,OAAO,CAAC,YAAY,CAAC,CAMvB;AAED;;;;;;GAMG;AACH,wBAAsB,YAAY,CAChC,KAAK,EAAE,IAAI,EAAE,EACb,UAAU,EAAE,MAAM,EAClB,QAAQ,CAAC,EAAE,MAAM,GAChB,OAAO,CAAC,YAAY,EAAE,CAAC,CAMzB;AAMD;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC,0BAA0B;IAC1B,OAAO,EAAE,MAAM,CAAC;IAChB,0BAA0B;IAC1B,QAAQ,EAAE,MAAM,CAAC;IACjB,2BAA2B;IAC3B,QAAQ,EAAE,MAAM,CAAC;IACjB,2BAA2B;IAC3B,SAAS,EAAE,MAAM,CAAC;CACnB;AAED;;;;;;;GAOG;AACH,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,IAAI,EACV,UAAU,EAAE,MAAM,EAClB,QAAQ,CAAC,EAAE,MAAM,GAChB,OAAO,CAAC,qBAAqB,CAAC,CAuChC;AAED;;;;;;GAMG;AACH,wBAAsB,qBAAqB,CACzC,KAAK,EAAE,IAAI,EAAE,EACb,UAAU,EAAE,MAAM,EAClB,QAAQ,CAAC,EAAE,MAAM,GAChB,OAAO,CAAC,qBAAqB,EAAE,CAAC,CAKlC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{getStorage as i,ref as r,uploadBytes as g,uploadBytesResumable as B,getDownloadURL as f,deleteObject as w,listAll as S}from"firebase/storage";const h=10*1024*1024;function U(t){return t.replace(/[^a-zA-Z0-9.-]/g,"_")}function P(t,e){const a=Date.now(),o=U(e||t);return`${a}_${o}`}function F(t,e){const a=[e.basePath];return e.subfolder&&a.push(e.subfolder),a.push(t),a.join("/")}function D(t,e){const a=e.maxSize??h,o=t instanceof File?t.name:"blob";if(t.size>a)throw new Error(`File size (${(t.size/1024/1024).toFixed(2)}MB) exceeds maximum allowed (${(a/1024/1024).toFixed(2)}MB)`);const s=i(),n=P(o,e.filename),l=F(n,e),d=r(s,l),u=e.metadata||{},m=B(d,t,u);e.onProgress&&m.on("state_changed",c=>{const b=c.bytesTransferred/c.totalBytes*100;e.onProgress({bytesTransferred:c.bytesTransferred,totalBytes:c.totalBytes,progress:b})});const p=m.then(async()=>({url:await f(d),path:l,filename:o}));return{task:m,promise:p}}async function y(t,e){const a=e.maxSize??h;if(t.size>a)throw new Error(`File size (${(t.size/1024/1024).toFixed(2)}MB) exceeds maximum allowed (${(a/1024/1024).toFixed(2)}MB)`);const o=i(),s=P(t.name,e.filename),n=F(s,e),l=r(o,n);return await g(l,t),{url:await f(l),path:n,filename:t.name}}async function _(t,e){const a=t.map(o=>y(o,e));return Promise.all(a)}async function M(t){try{const e=i(),o=new URL(t).pathname.match(/\/o\/(.+)\?/);if(o&&o[1]){const s=decodeURIComponent(o[1]),n=r(e,s);await w(n)}}catch{}}async function E(t){try{const e=i(),a=r(e,t);await w(a)}catch{}}async function I(t){const e=t.map(a=>M(a));await Promise.all(e)}async function $(t){const e=i(),a=r(e,t);return(await S(a)).items}async function L(t){const a=(await $(t)).map(o=>f(o));return Promise.all(a)}async function N(t){const e=i(),a=r(e,t);return f(a)}function O(t){const e=i();return r(e,t)}async function T(t){const a=(await $(t)).map(o=>w(o));await Promise.all(a)}async function C(t,e,a){return y(t,{basePath:e,subfolder:a,maxSize:5*1024*1024})}async function Z(t,e,a){return _(t,{basePath:e,subfolder:a,maxSize:5*1024*1024})}async function A(t,e,a){const{processImageWithThumbnail:o}=await import("./imageProcessing"),{full:s,thumbnail:n}=await o(t),l=i(),u=`${Date.now()}_${t.name.replace(/\.[^/.]+$/,"")}`,m=a?`${e}/${a}/${u}_full.webp`:`${e}/${u}_full.webp`,p=a?`${e}/${a}/${u}_thumb.webp`:`${e}/${u}_thumb.webp`,c={contentType:"image/webp",cacheControl:"public, max-age=2592000"},b=r(l,m),x=r(l,p);await Promise.all([g(b,s.blob,c),g(x,n.blob,c)]);const[z,R]=await Promise.all([f(b),f(x)]);return{fullUrl:z,thumbUrl:R,fullPath:m,thumbPath:p}}async function k(t,e,a){const o=t.map(s=>A(s,e,a));return Promise.all(o)}export{O as createStorageRef,T as deleteAllFiles,E as deleteFileByPath,M as deleteFileByUrl,I as deleteFilesByUrl,N as getFileUrl,L as getFileUrls,$ as listFiles,y as uploadFile,D as uploadFileResumable,_ as uploadFiles,C as uploadImage,Z as uploadImages,A as uploadProcessedImage,k as uploadProcessedImages};
|
package/dist/server/batch.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
import*as E from"valibot";import{
|
|
1
|
+
import*as E from"valibot";import{handleError as m}from"@donotdev/core/server";import{getServerFirestore as x}from"./utils";import{prepareForFirestore as W,transformFirestoreData as $}from"../shared/transform";const p=typeof window<"u",i={async batchWrite(e,t,r,n,w={}){if(!t||!t.length)return{successes:0,failures:0};const{idField:s="id",generateIds:y=!0,maxBatchSize:I=450,removeFields:l=[],validate:v=!!n}=w,c={successes:0,failures:0,failedItems:[]};try{const f=[],d=await x();let a=d.batch(),h=0;for(let b=0;b<t.length;b++){let u=t[b];if(!u){c.failures++,c.failedItems?.push({index:b,error:new Error("Item is null or undefined")});continue}try{if(r==="delete"){if(!u[s])throw m(new Error(`Missing ID for delete operation (${s})`),{userMessage:"Unable to delete item: missing ID",severity:"error",context:{collection:e,idField:s,operation:r}});const o=d.collection(e).doc(u[s]);a.delete(o)}else{if(v&&n)try{E.parse(n,u)}catch(F){throw m(F,{userMessage:"Validation failed for item",severity:"warning",context:{collection:e,operation:r,idField:s,itemId:u[s]}})}let o=u[s];if(!o&&r==="create"&&y)o=d.collection(e).doc().id,u={...u,[s]:o};else if(!o)throw m(new Error(`Missing ID for ${r} operation (${s})`),{userMessage:`Unable to ${r} item: missing ID`,severity:"error",context:{collection:e,idField:s,operation:r}});const M=[s,...l],C=W(u,M),D=d.collection(e).doc(o);r==="create"?a.create(D,C):r==="update"&&a.update(D,C)}c.successes++,h++,h>=I&&(f.push(a),a=d.batch(),h=0)}catch(o){c.failures++,c.failedItems?.push({index:b,error:o instanceof Error?o:new Error(String(o))})}}return h>0&&f.push(a),f.length>0&&await Promise.all(f.map(b=>b.commit())),c}catch(f){throw m(f,{userMessage:`Batch ${r} operation failed for collection ${e}`,context:{collection:e,operation:r,itemCount:t.length,processedCount:c.successes+c.failures,successCount:c.successes,failureCount:c.failures}})}},async batchCreate(e,t,r,n={}){return i.batchWrite(e,t,"create",r,n)},async batchUpdate(e,t,r,n={}){return i.batchWrite(e,t,"update",r,n)},async batchDelete(e,t,r={}){if(t.length>0&&typeof t[0]=="string"){const n=r.idField||"id",w=t.map(s=>({[n]:s}));return i.batchWrite(e,w,"delete",void 0,r)}return i.batchWrite(e,t,"delete",void 0,r)},async runTransaction(e){try{return await(await x()).runTransaction(e)}catch(t){throw m(t,{userMessage:"Transaction failed"})}},async bulkGet(e,t,r={}){if(!t.length)return[];const{maxBatchSize:n=450,transform:w=!0}=r;try{const s=[];for(let l=0;l<t.length;l+=n)s.push(t.slice(l,l+n));const y=s.map(async l=>{const f=await(await x()).collection(e).where("__name__","in",l).get(),d=new Map;return f.forEach(a=>{d.set(a.id,a.data())}),l.map(a=>{const h=d.get(a);return h?w?$({id:a,...h}):{id:a,...h}:null})});return(await Promise.all(y)).flat()}catch(s){throw m(s,{userMessage:`Bulk get operation failed for collection ${e}`,context:{collection:e,idCount:t.length}})}}},g=e=>()=>{throw new Error(`Firebase Admin function '${e}' is not available in browser environments`)},k=p?g("batchWrite"):i.batchWrite,R=p?g("batchCreate"):i.batchCreate,S=p?g("batchUpdate"):i.batchUpdate,G=p?g("batchDelete"):i.batchDelete,_=p?g("runTransaction"):i.runTransaction,z=p?g("bulkGet"):i.bulkGet;export{R as batchCreate,G as batchDelete,S as batchUpdate,k as batchWrite,z as bulkGet,_ as runTransaction};
|
package/dist/server/index.d.ts
CHANGED
|
@@ -11,5 +11,6 @@ export * from './batch';
|
|
|
11
11
|
export * from './subscription';
|
|
12
12
|
export * from './validation';
|
|
13
13
|
export * from './uniqueness';
|
|
14
|
+
export * from '../shared/transform';
|
|
14
15
|
export { FieldValue, Query, Timestamp } from 'firebase-admin/firestore';
|
|
15
16
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/server/index.ts"],"names":[],"mappings":"AAEA;;;;;;GAMG;AAEH,cAAc,QAAQ,CAAC;AACvB,cAAc,SAAS,CAAC;AACxB,cAAc,SAAS,CAAC;AACxB,cAAc,gBAAgB,CAAC;AAC/B,cAAc,cAAc,CAAC;AAC7B,cAAc,cAAc,CAAC;AAG7B,OAAO,EAAE,UAAU,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/server/index.ts"],"names":[],"mappings":"AAEA;;;;;;GAMG;AAEH,cAAc,QAAQ,CAAC;AACvB,cAAc,SAAS,CAAC;AACxB,cAAc,SAAS,CAAC;AACxB,cAAc,gBAAgB,CAAC;AAC/B,cAAc,cAAc,CAAC;AAC7B,cAAc,cAAc,CAAC;AAG7B,cAAc,qBAAqB,CAAC;AAGpC,OAAO,EAAE,UAAU,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC"}
|
package/dist/server/index.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export*from"./init";export*from"./utils";export*from"./batch";export*from"./subscription";export*from"./validation";export*from"./uniqueness";import{FieldValue as
|
|
1
|
+
export*from"./init";export*from"./utils";export*from"./batch";export*from"./subscription";export*from"./validation";export*from"./uniqueness";export*from"../shared/transform";import{FieldValue as i,Query as l,Timestamp as u}from"firebase-admin/firestore";export{i as FieldValue,l as Query,u as Timestamp};
|
package/dist/server/init.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import{initializeApp as n,getApps as t,cert as o}from"firebase-admin/app";import{getAuth as s}from"firebase-admin/auth";import{getFirestore as c}from"firebase-admin/firestore";import{getFunctions as a}from"firebase-admin/functions";import{
|
|
1
|
+
import{initializeApp as n,getApps as t,cert as o}from"firebase-admin/app";import{getAuth as s}from"firebase-admin/auth";import{getFirestore as c}from"firebase-admin/firestore";import{getFunctions as a}from"firebase-admin/functions";import{handleError as p,createSingleton as A}from"@donotdev/core/server";function E(){const e=t();let i=null;if(e.length===0?process.env.FIREBASE_ADMIN_PRIVATE_KEY&&process.env.FIREBASE_PROJECT_ID&&process.env.FIREBASE_CLIENT_EMAIL?i=n({credential:o({projectId:process.env.FIREBASE_PROJECT_ID,clientEmail:process.env.FIREBASE_CLIENT_EMAIL,privateKey:process.env.FIREBASE_ADMIN_PRIVATE_KEY.replace(/\\n/g,`
|
|
2
2
|
`)}),projectId:process.env.FIREBASE_PROJECT_ID}):i=n():i=e[0]||null,!i)throw new Error("Failed to initialize Firebase Admin app");return{app:i,auth:s(i),firestore:c(i),functions:a(i)}}const r=A(E);async function I(){try{return r()}catch(e){throw p(e,{userMessage:"Failed to initialize Firebase Admin",context:{provider:"FirebaseAdmin",operation:"initFirebaseAdmin"},severity:"error"})}}async function v(){const e=r();if(!e.app)throw new Error("Firebase Admin not initialized");return e.app}function _(){const e=r();if(!e.auth)throw new Error("Firebase Admin auth not initialized");return e.auth}function b(){const e=r();if(!e.firestore)throw new Error("Firebase Admin firestore not initialized");return e.firestore}function h(){const e=r();if(!e.functions)throw new Error("Firebase Admin functions not initialized");return e.functions}export{v as getFirebaseAdminApp,_ as getFirebaseAdminAuth,b as getFirebaseAdminFirestore,h as getFirebaseAdminFunctions,I as initFirebaseAdmin};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{
|
|
1
|
+
import{handleError as l}from"@donotdev/core/server";import{getServerFirestore as i}from"./utils";import{prepareForFirestore as f,transformFirestoreData as w}from"../shared/transform";import{handleFirebaseError as u}from"../shared/utils";async function m(t,a={}){const{collection:r="subscriptions",userIdField:e="customerId"}=a;try{const n=await(await i()).collection(r).where(e,"==",t).where("status","==","active").limit(1).get();if(n.empty)return null;const c=n.docs[0];if(!c)return null;const d=c.data();return w({id:c.id,...d})}catch(o){throw u(o,"getActiveSubscription")}}async function I(t,a={}){const{collection:r="subscriptions",userIdField:e="customerId"}=a;try{return(await(await i()).collection(r).where(e,"==",t).orderBy("createdAt","desc").get()).docs.map(c=>w({id:c.id,...c.data()}))}catch(o){throw u(o,"getUserSubscriptions")}}async function p(t,a={}){const{collection:r="subscriptions"}=a;try{const e=await i(),o=new Date().toISOString();t.createdAt||(t.createdAt=o),t.updatedAt=o;const s=f(t),{id:n,...c}=s,d=n?e.collection(r).doc(n):e.collection(r).doc();return await d.set(c,{merge:!0}),{...t,id:d.id}}catch(e){throw u(e,"saveSubscription")}}async function h(t,a,r={}){const{collection:e="subscriptions"}=r;try{await(await i()).collection(e).doc(t).update({status:a,updatedAt:new Date().toISOString()})}catch(o){throw u(o,"updateSubscriptionStatus")}}async function v(t,a,r={}){try{const e=await m(t,r);return e?e.features&&Array.isArray(e.features)?e.features.includes(a):e.metadata&&e.metadata[a]?e.metadata[a]==="true":!1:!1}catch(e){return l(e,{context:{function:"hasFeatureAccess"}}),!1}}async function F(t,a={}){const r=t.type,e=t.data.object;try{switch(r){case"customer.subscription.created":case"customer.subscription.updated":await y(e,a);break;case"customer.subscription.deleted":await h(e.id,"canceled",a);break;default:}}catch(o){throw l(o,{userMessage:`Failed to handle Stripe webhook event: ${r}`,severity:"error",context:{eventType:r,eventId:t.id}})}}async function y(t,a={}){const r={id:t.id,customerId:t.customer,status:t.status,planId:t.items.data[0]?.plan.id||"",currentPeriodStart:new Date(t.current_period_start*1e3).toISOString(),currentPeriodEnd:new Date(t.current_period_end*1e3).toISOString(),cancelAtPeriodEnd:t.cancel_at_period_end,metadata:t.metadata||{},createdAt:new Date(t.created*1e3).toISOString(),updatedAt:new Date().toISOString()};t.canceled_at&&(r.canceledAt=new Date(t.canceled_at*1e3).toISOString()),await p(r,a)}export{m as getActiveSubscription,I as getUserSubscriptions,F as handleStripeWebhookEvent,v as hasFeatureAccess,p as saveSubscription,h as updateSubscriptionStatus};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{
|
|
1
|
+
import{handleError as l}from"@donotdev/core/server";import{getServerFirestore as d}from"./utils";const u={};function f(t){u.uniqueConstraintValidator=t}function h(){return u.uniqueConstraintValidator}function k(){return!!u.uniqueConstraintValidator}async function p(){try{const t=await d(),i={checkDuplicate:async(a,r,e,s)=>{if(e==null)return!1;try{return(await t.collection(a).where(r,"==",e).get()).docs.some(o=>o.id!==s&&o.data()[r]===e)}catch(n){throw l(n,{userMessage:`Failed to check uniqueness for field "${r}"`,severity:"error",context:{collection:a,field:r,value:e}})}}};return f(i),i}catch{return{checkDuplicate:async()=>!1}}}async function y(){try{const t=await d(),i={checkDuplicate:async(a,r,e,s)=>{if(e==null)return!1;try{return(await t.collection(a).where(r,"==",e).get()).docs.some(o=>o.id!==s&&o.data()[r]===e)}catch(n){throw l(n,{userMessage:`Failed to check uniqueness for field "${r}"`,severity:"error",context:{collection:a,field:r,value:e}})}}};return f(i),i}catch{return{checkDuplicate:async()=>!1}}}async function m(t,i,a,r){if(!i.length)return;let e=h();if(e||(typeof window<"u"?e=await y():e=await p()),!e)return;const s=[];if(await Promise.all(i.map(async({field:n,errorMessage:c})=>{const o=a[n];if(o==null)return;await e.checkDuplicate(t,n,o,r)&&s.push({field:n,message:c||`The ${n} must be unique`})})),s.length>0)throw l(new Error("Uniqueness validation failed"),{userMessage:"Uniqueness validation failed",severity:"error",context:{validationErrors:s,collection:t}})}export{y as createFirestoreClientValidator,p as createFirestoreValidator,h as getUniqueConstraintValidator,k as hasUniqueConstraintValidator,f as registerUniqueConstraintValidator,m as validateUniqueness};
|
package/dist/server/utils.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
import{
|
|
1
|
+
import{handleError as t}from"@donotdev/core/server";const r={async getServerAdmin(){try{const e=await import("firebase-admin");return e.default||e}catch(e){throw t(e,"Error getting Firebase Admin")}},async getServerFirestore(){try{return(await import("firebase-admin/firestore")).getFirestore()}catch(e){throw t(e,"Error getting Admin Firestore")}},async generateServerId(){return(await r.getServerFirestore()).collection("_").doc().id}},i=r.getServerAdmin,n=r.getServerFirestore,s=r.generateServerId;export{s as generateServerId,i as getServerAdmin,n as getServerFirestore};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import*as n from"valibot";import{
|
|
1
|
+
import*as n from"valibot";import{handleError as r}from"@donotdev/core/server";import{validateUniqueness as s}from"./uniqueness";import{validateUniqueness as v,registerUniqueConstraintValidator as V,getUniqueConstraintValidator as p,hasUniqueConstraintValidator as q,createFirestoreValidator as g,createFirestoreClientValidator as w}from"./uniqueness";async function u(t,e,i,o){try{n.parse(t,e),t.metadata?.uniqueFields?.length&&await s(t.metadata.collection,t.metadata.uniqueFields,e,o),t.metadata?.customValidate&&await t.metadata.customValidate(e,i)}catch(a){throw a.name==="ValibotError"||a.issues?r(a,{userMessage:"Document validation failed",context:{validationErrors:a.issues||[],data:e},severity:"warning"}):r(a,{userMessage:"Document validation failed",context:{collection:t.metadata?.collection||"unknown"},severity:"error"})}}function c(t,e){const i=t;return i.metadata=e,i}export{w as createFirestoreClientValidator,g as createFirestoreValidator,c as enhanceSchema,p as getUniqueConstraintValidator,q as hasUniqueConstraintValidator,V as registerUniqueConstraintValidator,u as validateFirestoreDocument,v as validateUniqueness};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"transform.d.ts","sourceRoot":"","sources":["../../src/shared/transform.ts"],"names":[],"mappings":"AAEA;;;;;;GAMG;AAEH,OAAO,KAAK,EAAE,SAAS,EAAE,kBAAkB,EAAE,MAAM,gBAAgB,CAAC;
|
|
1
|
+
{"version":3,"file":"transform.d.ts","sourceRoot":"","sources":["../../src/shared/transform.ts"],"names":[],"mappings":"AAEA;;;;;;GAMG;AAEH,OAAO,KAAK,EAAE,SAAS,EAAE,kBAAkB,EAAE,MAAM,gBAAgB,CAAC;AAGpE;;;;;;;;;GASG;AACH,wBAAgB,eAAe,CAAC,IAAI,EAAE,IAAI,GAAG,kBAAkB,CAY9D;AAED;;;;;;;;;GASG;AACH,wBAAgB,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,GAAG,kBAAkB,CAyBnE;AAED;;;;;;;;;GASG;AACH,wBAAgB,oBAAoB,CAAC,IAAI,EAAE,SAAS,GAAG,MAAM,CAoC5D;AAED;;;;;;;;GAQG;AACH,wBAAgB,WAAW,CAAC,KAAK,EAAE,GAAG,GAAG,KAAK,IAAI,kBAAkB,CASnE;AAmBD;;;;;;;;;GASG;AACH,wBAAgB,wBAAwB,CAAC,CAAC,GAAG,GAAG,EAAE,IAAI,EAAE,CAAC,GAAG,CAAC,CAoB5D;AAED;;;;;;;;;GASG;AACH,wBAAgB,sBAAsB,CAAC,CAAC,GAAG,GAAG,EAC5C,IAAI,EAAE,CAAC,EACP,kBAAkB,GAAE,OAAe,GAClC,CAAC,CA0CH;AAED;;;;;;;;;GASG;AACH,wBAAgB,mBAAmB,CAAC,CAAC,GAAG,GAAG,EACzC,IAAI,EAAE,CAAC,EACP,YAAY,GAAE,MAAM,EAAO,GAC1B,CAAC,CAsDH;AAED;;;;;;;;;GASG;AACH,wBAAgB,4BAA4B,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,EACxE,QAAQ,EAAE,CAAC,EACX,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC,GAClB,OAAO,CAAC,CAAC,CAAC,CAsBZ"}
|
package/dist/shared/transform.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
import{handleError as
|
|
1
|
+
import{handleError as i}from"@donotdev/core/server";function s(e){return{seconds:Math.floor(e.getTime()/1e3),nanoseconds:e.getTime()%1e3*1e6,toDate:()=>new Date(e),toMillis:()=>e.getTime(),isEqual:r=>r.seconds===Math.floor(e.getTime()/1e3)&&r.nanoseconds===e.getTime()%1e3*1e6,valueOf:()=>`Timestamp(seconds=${Math.floor(e.getTime()/1e3)}, nanoseconds=${e.getTime()%1e3*1e6})`}}function c(e){try{if(typeof e=="string"){const r=new Date(e);if(isNaN(r.getTime()))throw i(new Error("Invalid date string format"),{context:{value:e}});return s(r)}if(e instanceof Date){if(isNaN(e.getTime()))throw i(new Error("Invalid Date object"),{context:{value:e}});return s(e)}throw i(new Error("Invalid date value type"),{context:{valueType:typeof e}})}catch(r){throw i(r,"Failed to convert to Timestamp")}}function l(e){try{if(e instanceof Date){if(isNaN(e.getTime()))throw i(new Error("Invalid Date object"),{context:{value:e}});return e.toISOString()}if(typeof e=="object"&&e!==null&&"toDate"in e&&typeof e.toDate=="function")return e.toDate().toISOString();if(typeof e=="string"){const r=new Date(e);if(isNaN(r.getTime()))throw i(new Error("Invalid date string format"),{context:{value:e}});return r.toISOString()}throw i(new Error("Invalid date value type"),{context:{valueType:typeof e}})}catch(r){throw i(r,"Failed to convert to ISO string")}}function y(e){return typeof e=="object"&&e!==null&&"toDate"in e&&typeof e.toDate=="function"&&"seconds"in e&&"nanoseconds"in e}function m(e){if(typeof e!="string"||!/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{3})?Z$/.test(e))return!1;const t=new Date(e);return!isNaN(t.getTime())}function u(e){if(!e)return e;if(Array.isArray(e))return e.map(r=>u(r));if(typeof e=="string"&&m(e))return new Date(e);if(typeof e=="object"&&e!==null){const r={};for(const[t,n]of Object.entries(e))r[t]=u(n);return r}return e}function f(e,r=!1){if(!e)return e;if(r&&typeof e=="object"&&e!==null&&"id"in e&&"ref"in e&&"data"in e&&typeof e.data=="function"){const t=e,n=t.data();return{id:t.id,...f(n)}}if(Array.isArray(e))return e.map(t=>f(t));if(y(e))return l(e);if(typeof e=="object"&&e!==null){const t={};for(const[n,o]of Object.entries(e))t[n]=f(o);return t}return e}function p(e,r=[]){if(!e)return e;if(Array.isArray(e))return e.map(t=>p(t,r));if(typeof e=="string"&&/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{3})?Z$/.test(e))try{return c(e)}catch{return e}if(e instanceof Date)try{return c(e)}catch{return e}if(typeof e=="object"&&e!==null){const t={};for(const[n,o]of Object.entries(e))r.includes(n)||o!==void 0&&(t[n]=p(o,r));return t}return e}function D(e,r){const t={};for(const[n,o]of Object.entries(r)){if(!(n in e)){t[n]=o;continue}JSON.stringify(e[n])!==JSON.stringify(o)&&(t[n]=o)}return{...t,updatedAt:new Date().toISOString()}}export{u as convertISOStringsToDates,D as createFirestorePartialUpdate,s as createTimestamp,l as firestoreToISOString,y as isTimestamp,p as prepareForFirestore,c as toTimestamp,f as transformFirestoreData};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../src/shared/utils.ts"],"names":[],"mappings":"AAEA;;;;;;;GAOG;AAEH,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AAsB1D;;;;;;GAMG;AACH,eAAO,MAAM,KAAK,sBAAmD,CAAC;AAEtE;;;;;;GAMG;AACH,eAAO,MAAM,iBAAiB,aAEgB,CAAC;AAE/C;;;;;;GAMG;AACH,eAAO,MAAM,UAAU,aAEgB,CAAC;AAExC;;;;;;;;GAQG;AACH,wBAAgB,qBAAqB,CACnC,OAAO,CAAC,EAAE,mBAAmB,GAC5B,eAAe,
|
|
1
|
+
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../src/shared/utils.ts"],"names":[],"mappings":"AAEA;;;;;;;GAOG;AAEH,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AAsB1D;;;;;;GAMG;AACH,eAAO,MAAM,KAAK,sBAAmD,CAAC;AAEtE;;;;;;GAMG;AACH,eAAO,MAAM,iBAAiB,aAEgB,CAAC;AAE/C;;;;;;GAMG;AACH,eAAO,MAAM,UAAU,aAEgB,CAAC;AAExC;;;;;;;;GAQG;AACH,wBAAgB,qBAAqB,CACnC,OAAO,CAAC,EAAE,mBAAmB,GAC5B,eAAe,CA+BjB;AAED;;;;;;;;;GASG;AACH,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,MAAM,GAAG,KAAK,CAKvE;AAED;;;;;;;;;;GAUG;AACH,wBAAsB,wBAAwB,CAAC,CAAC,EAC9C,SAAS,EAAE,MAAM,EACjB,EAAE,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,EACpB,OAAO,GAAE;IACP,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,UAAU,CAAC,EAAE,MAAM,CAAC;CAChB,GACL,OAAO,CAAC,CAAC,CAAC,CA+BZ"}
|
package/dist/shared/utils.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
import{handleError as
|
|
1
|
+
import{handleError as o}from"@donotdev/core/server";const s=typeof window<"u",n=e=>()=>{throw o(new Error(`Firebase Admin function '${e}' is not available in browser environments`),{userMessage:"This operation requires server-side Firebase access",context:{function:e}})},g=s?null:n("admin"),w=n("getAdminFirestore"),y=n("generateId");function h(e){const t=new AbortController;let r;return e?.timeout&&(r=setTimeout(()=>{t.abort(o(new Error("Request timeout exceeded"),{userMessage:"The operation timed out",context:{timeoutMs:e.timeout,abortKey:e.abortKey}}))},e.timeout)),e?.externalSignal&&e.externalSignal.addEventListener("abort",()=>{t.abort(e.externalSignal?.reason||"External abort")}),t.signal.addEventListener("abort",()=>{r!==void 0&&clearTimeout(r)}),t}function E(e,t){return o(e,{userMessage:t?`Error during ${t}`:void 0,context:{operation:t}})}async function v(e,t,r={}){const{retry:i=!1,maxRetries:l=3,retryDelay:d=300}=r;let a=0;for(;;)try{return await t()}catch(u){if(a++,b(u)&&i&&a<=l){const c=d*Math.pow(2,a-1);await new Promise(m=>setTimeout(m,c));continue}throw o(u,{userMessage:`Firebase operation "${e}" failed`,context:{operation:e,attempt:a,maxRetries:l,options:r}})}}function b(e){const t=["permission-denied","invalid-argument","already-exists","not-found","unauthenticated"];return e&&e.name==="DoNotDevError"&&t.includes(e.code)?!1:e&&e.code?["unavailable","deadline","cancel","network","timeout","internal","resource","exhausted"].some(i=>e.code.toLowerCase().includes(i)):!0}export{g as admin,h as createAbortController,v as executeFirebaseOperation,y as generateId,w as getAdminFirestore,E as handleFirebaseError};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@donotdev/firebase",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.6",
|
|
4
4
|
"private": false,
|
|
5
5
|
"type": "module",
|
|
6
6
|
"license": "SEE LICENSE IN LICENSE.md",
|
|
@@ -25,10 +25,11 @@
|
|
|
25
25
|
"type-check": "tsc --noEmit"
|
|
26
26
|
},
|
|
27
27
|
"dependencies": {
|
|
28
|
+
"browser-image-compression": "^2.0.2",
|
|
28
29
|
"valibot": "^1.2.0"
|
|
29
30
|
},
|
|
30
31
|
"peerDependencies": {
|
|
31
|
-
"@donotdev/core": "0.0.
|
|
32
|
+
"@donotdev/core": "^0.0.12",
|
|
32
33
|
"firebase": "^12.5.0",
|
|
33
34
|
"firebase-admin": "^13.6.0"
|
|
34
35
|
},
|