@bitblit/ratchet-aws 4.0.305-alpha → 4.0.307-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.mjs +1 -1
- package/package.json +5 -5
package/lib/index.mjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import{DateTime as e}from"luxon";import{RequireRatchet as t,StringRatchet as a,ErrorRatchet as s,Logger as n,StopWatch as i,PromiseRatchet as r,WebStreamRatchet as o,MapRatchet as c,DurationRatchet as l,NumberRatchet as h,Base64Ratchet as u}from"@bitblit/ratchet-common";import{SubmitJobCommand as d,ListJobsCommand as m}from"@aws-sdk/client-batch";import{CloudWatchLogsClient as f,DescribeLogStreamsCommand as g,FilterLogEventsCommand as p,OrderBy as y,DeleteLogStreamCommand as w,DescribeLogGroupsCommand as b,DeleteLogGroupCommand as E,StartQueryCommand as T,GetQueryResultsCommand as S,StopQueryCommand as N}from"@aws-sdk/client-cloudwatch-logs";import{CloudWatchClient as C,PutMetricDataCommand as x,StandardUnit as v}from"@aws-sdk/client-cloudwatch";import{PutObjectCommand as D,GetObjectCommand as O,NoSuchKey as k,DeleteObjectCommand as F,NotFound as A,HeadObjectCommand as P,CopyObjectCommand as I,ListObjectsCommand as M,S3Client as U,ListObjectsV2Command as R}from"@aws-sdk/client-s3";import{Upload as j}from"@aws-sdk/lib-storage";import{getSignedUrl as B}from"@aws-sdk/s3-request-presigner";import{ScanCommand as K,QueryCommand as L,BatchWriteCommand as V,BatchGetCommand as _,PutCommand as G,GetCommand as W,UpdateCommand as q,DeleteCommand as z}from"@aws-sdk/lib-dynamodb";import{ConditionalCheckFailedException as J,ProvisionedThroughputExceededException as $,DeleteTableCommand as Q,CreateTableCommand as H,ListTablesCommand as Y,DescribeTableCommand as Z,ResourceNotFoundException as X,ReturnConsumedCapacity as ee}from"@aws-sdk/client-dynamodb";import{EC2Client as te,StopInstancesCommand as ae,StartInstancesCommand as se,DescribeInstancesCommand as ne}from"@aws-sdk/client-ec2";import{EC2InstanceConnectClient as ie,SendSSHPublicKeyCommand as re}from"@aws-sdk/client-ec2-instance-connect";import{DescribeRegistryCommand as oe,DescribeImagesCommand as ce,BatchDeleteImageCommand as le}from"@aws-sdk/client-ecr";import he from"lodash";import{SSMClient as ue,GetParameterCommand as de,ParameterNotFound as me}from"@aws-sdk/client-ssm";import{ChangeResourceRecordSetsCommand as fe,waitUntilResourceRecordSetsChanged as ge}from"@aws-sdk/client-route-53";import{WaiterState as pe}from"@smithy/util-waiter";import{SendRawEmailCommand as ye}from"@aws-sdk/client-ses";import{SNSClient as we,PublishCommand as be}from"@aws-sdk/client-sns";class Ee{batchRatchet;validTaskNames;constructor(e,a){this.batchRatchet=e,this.validTaskNames=a,t.notNullOrUndefined(this.batchRatchet,"batchRatchet"),t.notNullOrUndefined(this.batchRatchet.batchClient,"batchRatchet.batchClient"),t.notNullOrUndefined(this.batchRatchet.defaultJobDefinition,"batchRatchet.defaultJobDefinition"),t.notNullOrUndefined(this.batchRatchet.defaultQueueName,"batchRatchet.defaultQueueName")}async scheduleBackgroundTask(t,i={},r={}){!this.validTaskNames||!this.validTaskNames.length||a.trimToNull(t)&&this.validTaskNames.includes(t)||s.throwFormattedErr("Cannot start task %s - not found in valid task list",t),n.info("Submitting background task to AWS batch: %s %j %s",t,i,this.batchRatchet.defaultQueueName);let o=null;const c=`${this.batchRatchet.defaultJobDefinition}-${t}_${e.utc().toFormat("yyyy-MM-dd-HH-mm")}`,l={jobName:c,jobDefinition:this.batchRatchet.defaultJobDefinition,jobQueue:this.batchRatchet.defaultQueueName,parameters:{taskName:t,taskData:JSON.stringify(i),taskMetadata:JSON.stringify(r)}};try{o=await this.batchRatchet.scheduleJob(l),n.info("Job %s(%s) submitted",o.jobName,o.jobId)}catch(e){n.error("Cannot submit batch job taskName: %s jobDef: %s queue: %s jobName: %s data: %j",t,this.batchRatchet.defaultJobDefinition,this.batchRatchet.defaultQueueName,c,i,e)}return o}}class Te{_batchClient;_defaultQueueName;_defaultJobDefinition;constructor(e,t,a){this._batchClient=e,this._defaultQueueName=t,this._defaultJobDefinition=a}get batchClient(){return this._batchClient}get defaultQueueName(){return this._defaultQueueName}get defaultJobDefinition(){return this._defaultJobDefinition}async scheduleJob(e){n.info("Submitting batch job %s",e.jobName);try{const t=await this._batchClient.send(new d(e));return n.info("Job %s(%s) submitted",t.jobName,t.jobId),t}catch(t){n.error("Cannot submit batch job %s: %s",e.jobName,t)}return null}async jobCountInState(e,t=this.defaultQueueName){return(await this.listJobs(t,e)).length}async listJobs(e=this.defaultQueueName,a=null){t.notNullOrUndefined(e,"queueName");let s=[];const i={jobQueue:e,jobStatus:a,nextToken:null};n.info("Fetching %j",i);do{n.info("Pulling page...");const e=await this._batchClient.send(new m(i));s=s.concat(e.jobSummaryList),i.nextToken=e.nextToken}while(i.nextToken);return s}}class Se{constructor(){}static buildInformation(){return{version:"305",hash:"09159fd087a2abb029edd8f20abbf8b9cbf223b2",branch:"alpha-2024-01-18-4",tag:"alpha-2024-01-18-4",timeBuiltISO:"2024-01-18T22:18:29-0800",notes:"No notes"}}}class Ne{dynamo;opts;constructor(e,a){this.dynamo=e,this.opts=a,t.notNullOrUndefined(this.dynamo,"dynamo"),t.notNullOrUndefined(this.opts,"opts"),t.notNullOrUndefined(this.opts.tableName,"opts.tableName"),t.notNullOrUndefined(this.opts.hashKeyName,"opts.hashKeyName"),t.true(!this.opts.useRangeKeys||!!this.opts.rangeKeyName&&!!this.opts.hashKeyValue,"invalid range configuration")}static createDefaultOptions(){return{tableName:"simple-cache",useRangeKeys:!1,hashKeyName:"cache-key",rangeKeyName:null,hashKeyValue:null}}createKeyObject(e){const t={};return this.opts.useRangeKeys?(t[this.opts.hashKeyName]=this.opts.hashKeyValue,t[this.opts.rangeKeyName]=e):t[this.opts.hashKeyName]=e,t}cleanDynamoFieldsFromObjectInPlace(e){e&&(delete e[this.opts.hashKeyName],this.opts.rangeKeyName&&delete e[this.opts.rangeKeyName],this.opts.dynamoExpiresColumnName&&delete e[this.opts.dynamoExpiresColumnName])}extractKeysFromObject(e){let t=null;return e&&(t={},this.opts.useRangeKeys?(t[this.opts.hashKeyName]=this.opts.hashKeyValue,t[this.opts.rangeKeyName]=e.cacheKey):t[this.opts.hashKeyName]=e.cacheKey),t}async readFromCache(e){const t=this.createKeyObject(e),a=await this.dynamo.simpleGet(this.opts.tableName,t);return this.cleanDynamoFieldsFromObjectInPlace(a),a}async storeInCache(e){t.notNullOrUndefined(e,"value"),t.notNullOrUndefined(e.cacheKey,"value.cacheKey");const a=Object.assign({},e,this.createKeyObject(e.cacheKey));this.opts.dynamoExpiresColumnName&&e.expiresEpochMS&&(a[this.opts.dynamoExpiresColumnName]=Math.floor(e.expiresEpochMS/1e3));return!!await this.dynamo.simplePut(this.opts.tableName,a)}async removeFromCache(e){await this.dynamo.simpleDelete(this.opts.tableName,this.createKeyObject(e))}async clearCache(){const e=(await this.readAll()).map((e=>this.extractKeysFromObject(e)));return await this.dynamo.deleteAllInBatches(this.opts.tableName,e,25)}async readAll(){let e=null;if(this.opts.useRangeKeys){const t={TableName:this.opts.tableName,KeyConditionExpression:"#cacheKey = :cacheKey",ExpressionAttributeNames:{"#cacheKey":this.opts.hashKeyName},ExpressionAttributeValues:{":cacheKey":this.opts.hashKeyValue}};e=await this.dynamo.fullyExecuteQuery(t)}else{const t={TableName:this.opts.tableName};e=await this.dynamo.fullyExecuteScan(t)}return e.forEach((e=>this.cleanDynamoFieldsFromObjectInPlace(e))),e}}class Ce{s3CacheRatchet;prefix;constructor(e,a){this.s3CacheRatchet=e,this.prefix=a,t.notNullOrUndefined(this.s3CacheRatchet,"s3CacheRatchet"),t.notNullOrUndefined(this.s3CacheRatchet.getDefaultBucket(),"s3CacheRatchet.defaultBucket")}keyToPath(e){let t=a.trimToEmpty(this.prefix);return t.length>0&&!t.endsWith("/")&&(t+="/"),t+=e,t}async readFromCache(e){return await this.s3CacheRatchet.fetchCacheFileAsObject(this.keyToPath(e))}async storeInCache(e){t.notNullOrUndefined(e,"value"),t.notNullOrUndefined(e.cacheKey,"value.cacheKey");return!!await this.s3CacheRatchet.writeObjectToCacheFile(this.keyToPath(e.cacheKey),e)}async removeFromCache(e){await this.s3CacheRatchet.removeCacheFile(this.keyToPath(e))}async clearCache(){const e=await this.s3CacheRatchet.directChildrenOfPrefix(this.keyToPath(""));return await Promise.all(e.map((e=>this.removeFromCache(e)))),e.length}async readAll(){const e=await this.s3CacheRatchet.directChildrenOfPrefix(this.keyToPath(""));return await Promise.all(e.map((e=>this.readFromCache(e))))}}class xe{provider;defaultTimeToLiveMS;constructor(e,t=6e4){this.provider=e,this.defaultTimeToLiveMS=t}createDefaultReadOptions(){return{maxStalenessMS:null,timeToLiveMS:this.defaultTimeToLiveMS,cacheNullValues:!1}}async fetchWrapper(e,t,a=this.createDefaultReadOptions()){n.silly("Fetching %s",e);const s=(new Date).getTime();let i=await this.provider.readFromCache(e);if(i&&i.expiresEpochMS<s&&(n.debug("Object found, but expired - removing"),i=null),i&&a&&a.maxStalenessMS&&s-i.createdEpochMS>a.maxStalenessMS&&(n.debug("Object found by too stale - removing"),i=null),!i){n.debug("%s not found in cache, generating",e);const r=await t();(r||a?.cacheNullValues)&&(n.debug("Writing %j to cache"),i={cacheKey:e,createdEpochMS:s,expiresEpochMS:a&&a.timeToLiveMS?s+a.timeToLiveMS:null,value:r,generated:!1},await this.provider.storeInCache(i),i.generated=!0)}return i}async fetch(e,t,a=null){const s=await this.fetchWrapper(e,t,a);return s?s.value:null}async removeFromCache(e,t){let a=null;return t&&(a=await this.fetchWrapper(e,(()=>null))),await this.provider.removeFromCache(e),a}async clearCache(){return this.provider.clearCache()}async readAll(){return this.provider.readAll()}}class ve{logGroup;awsCWLogs;constructor(e,t=new f({region:"us-east-1"})){this.logGroup=e,this.awsCWLogs=t}get cloudWatchLogsClient(){return this.awsCWLogs}async readLogStreams(e=null,t=null){const a={logGroupName:this.logGroup,orderBy:"LastEventTime"},s=[];do{n.debug("Pulling more log streams (%d found so far)",s.length);const i=await this.awsCWLogs.send(new g(a));i.logStreams.forEach((a=>{null!==a.lastEventTimestamp&&(!e||a.lastEventTimestamp>=e)&&(!t||a.firstEventTimestamp<=t)&&s.push(a)})),a.nextToken=i.nextToken}while(a.nextToken);return n.debug("Found %d total, returning",s.length),s}async readLogStreamNames(e=null,t=null){return(await this.readLogStreams(e,t)).map((e=>e.logStreamName))}async readEvents(e,t=null,a=null,s=!0,r=null){const o=new i,c={logGroupName:this.logGroup,endTime:a,startTime:t};e&&(c.filterPattern=e),n.debug("Reading log events matching : %j",c);let l=[];do{n.debug("Pulling more log events (%d found so far) : %s",l.length,o.dump());const e=await this.awsCWLogs.send(new p(c));l=l.concat(e.events),c.nextToken=e.nextToken}while(c.nextToken&&(!r||l.length<r));return n.debug("Found %d total in %s",l.length,o.dump()),s&&(n.debug("Sorting events by timestamp"),l=l.sort(((e,t)=>{let a=e.timestamp-t.timestamp;return 0===a&&(a=e.message.localeCompare(t.message)),a}))),o.log(),l}}class De{static MAX_DELETE_RETRIES=5;cwLogs;constructor(e=null){this.cwLogs=e||new f({region:"us-east-1"})}get cloudWatchLogsClient(){return this.cwLogs}async removeEmptyOrOldLogStreams(e,t=1e3,a=null){n.info("Removing empty streams from %s, oldest event epoch MS : %d",e,a);const s={logGroupName:e,orderBy:y.LastEventTime},i=a||1;let o=0;const c=[],l=[];let h=10;do{n.debug("Executing search for streams");try{const e=await this.cwLogs.send(new g(s));o+=e.logStreams.length,n.debug("Found %d streams (%d so far, %d to delete)",e.logStreams.length,o,c.length);for(let a=0;a<e.logStreams.length&&c.length<t;a++){const t=e.logStreams[a];t.firstEventTimestamp?t.lastEventTimestamp<i&&c.push(t):c.push(t)}s.nextToken=e.nextToken}catch(e){const t=h;h=Math.min(1e3,1.5*h),n.info("Caught while describing %s, increasing wait between deletes (was %d, now %d)",e,t,h)}}while(s.nextToken&&c.length<t);n.info("Found %d streams to delete",c.length);let u=10;for(let t=0;t<c.length;t++){const a={logGroupName:e,logStreamName:c[t].logStreamName},s=0===c[t].storedBytes?"empty":"old";n.info("Removing %s stream %s",s,c[t].logStreamName);let i=!1,o=0;for(;!i&&o<De.MAX_DELETE_RETRIES;)try{await this.cwLogs.send(new w(a)),i=!0,await r.wait(u)}catch(e){o++;const t=u;u=Math.min(1e3,1.5*u),n.info("Caught %s, increasing wait between deletes and retrying (wait was %d, now %d) (Retry %d of %d)",e,t,u,o,De.MAX_DELETE_RETRIES)}i||l.push(c[t])}return n.warn("Failed to remove streams : %j",l),c}async findOldestEventTimestampInGroup(e){const t=await this.findStreamWithOldestEventInGroup(e);return t?t.firstEventTimestamp:null}async findStreamWithOldestEventInGroup(e){n.info("Finding oldest event in : %s",e);let t=null;try{const a={logGroupName:e,orderBy:y.LastEventTime};let s=0;do{n.debug("Executing search for streams");const e=await this.cwLogs.send(new g(a));s+=e.logStreams.length,n.debug("Found %d streams (%d so far)",e.logStreams.length,s),e.logStreams.forEach((e=>{e.firstEventTimestamp&&(null===t||e.firstEventTimestamp<t.firstEventTimestamp)&&(t=e)})),a.nextToken=e.nextToken}while(a.nextToken)}catch(t){n.error("Error attempting to find oldest event in group : %s : %s",e,t,t)}return t}async findLogGroups(e){t.notNullOrUndefined(e);const a={logGroupNamePrefix:e};let s=[];do{n.info("%d found, pulling log groups : %j",s.length,a);const e=await this.cwLogs.send(new b(a));s=s.concat(e.logGroups),a.nextToken=e.nextToken}while(a.nextToken);return s}async removeLogGroups(e){t.notNullOrUndefined(e);const a=[];for(let t=0;t<e.length;t++)try{n.info("Deleting %j",e[t]);const s={logGroupName:e[t].logGroupName};await this.cwLogs.send(new E(s)),a.push(!0)}catch(s){n.error("Failure to delete %j : %s",e[t],s),a.push(!1)}return a}async removeLogGroupsWithPrefix(e){t.notNullOrUndefined(e),t.true(a.trimToEmpty(e).length>0),n.info("Removing log groups with prefix %s",e);const s=await this.findLogGroups(e);return await this.removeLogGroups(s)}async fullyExecuteInsightsQuery(e){t.notNullOrUndefined(e),n.debug("Starting insights query : %j",e);const a=await this.cwLogs.send(new T(e));n.debug("Got query id %j",a);let s=null,i=100;for(;!s||["Running","Scheduled"].includes(s.status);)s=await this.cwLogs.send(new S({queryId:a.queryId})),await r.wait(i),i*=2,n.info("Got : %j",s);return s}async abortInsightsQuery(e){let t=null;return e&&(t=await this.cwLogs.send(new N({queryId:e}))),t}}class Oe{cw;constructor(e=null){this.cw=e||new C({region:"us-east-1",apiVersion:"2010-08-01"})}get cloudWatchClient(){return this.cw}async writeSingleMetric(e,t,a,s=v.None,i,r=new Date,o=!1){const c=[];a&&a.length>0&&a.forEach((e=>{c.push({Name:e.key,Value:e.value})}));const l={Namespace:e,MetricData:[{MetricName:t,Dimensions:c,Unit:s,Value:i,Timestamp:r,StorageResolution:o?1:60}]};n.silly("Writing metric to cw : %j",l);const h=await this.cw.send(new x(l));return n.silly("Result: %j",h),h}async writeDynamoCountAsMinuteLevelMetric(t){if(n.info("Publishing %s / %s metric for %s UTC",t.namespace,t.metric,t.minuteUTC),t.scan&&t.query)throw new Error("Must send query or scan, but not both");if(!t.scan&&!t.query)throw new Error("You must specify either a scan or a query");const a=t.query?await t.dynamoRatchet.fullyExecuteQueryCount(t.query):await t.dynamoRatchet.fullyExecuteScanCount(t.scan);n.debug("%s / %s for %s are %j",t.namespace,t.metric,t.minuteUTC,a);const s=t.minuteUTC.split(" ").join("T")+":00Z",i=e.fromISO(s).toJSDate(),r=await this.writeSingleMetric(t.namespace,t.metric,t.dims,v.Count,a.count,i,!1);return n.debug("Metrics response: %j",r),a.count}}class ke{static DEFAULT_CONTENT=Buffer.from("DAEMON_PLACEHOLDER");static DAEMON_METADATA_KEY="daemon_meta";static async start(e,t,a,s){try{s.meta=s.meta||{},n.info("Starting daemon, key: %s, options: %j",a,s);const i=(new Date).getTime(),r={id:t,title:s.title,lastUpdatedEpochMS:i,lastUpdatedMessage:"Created",targetFileName:s.targetFileName,startedEpochMS:i,completedEpochMS:null,meta:s.meta,error:null,link:null,contentType:s.contentType};return await ke.writeState(e,a,r,ke.DEFAULT_CONTENT)}catch(e){throw n.error("Error while trying to start a daemon: %j %s",s,e),e}}static async writeState(e,t,a,s){try{const i={};a.lastUpdatedEpochMS=(new Date).getTime(),i[ke.DAEMON_METADATA_KEY]=JSON.stringify(a);const r={Bucket:e.getDefaultBucket(),Key:t,ContentType:a.contentType,Metadata:i,Body:s};a.targetFileName&&(r.ContentDisposition='attachment;filename="'+a.targetFileName+'"');const o=await e.getS3Client().send(new D(r));return n.silly("Daemon wrote : %s",o),ke.stat(e,t)}catch(e){throw n.error("Error while trying to write a daemon stat: %j %s",a,e),e}}static async streamDataAndFinish(e,t,s,i){n.debug("Streaming data to %s",t);const r=await ke.updateMessage(e,t,"Streaming data");r.completedEpochMS=(new Date).getTime(),r.lastUpdatedMessage="Complete";const o={};o[ke.DAEMON_METADATA_KEY]=JSON.stringify(r);const c={Bucket:e.getDefaultBucket(),Key:t,ContentType:r.contentType,Metadata:o,Body:s},l=a.trimToNull(i?.overrideTargetFileName)||a.trimToNull(r?.targetFileName);l&&(c.ContentDisposition='attachment;filename="'+l+'"');const h=new j({client:e.getS3Client(),params:c,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});i?.progressFn&&h.on("httpUploadProgress",i.progressFn);const u=await h.done();return n.silly("Daemon wrote : %s",u),ke.stat(e,t)}static async updateMessage(e,t,a){try{const s=await ke.stat(e,t);return s.lastUpdatedMessage=a,ke.writeState(e,t,s,ke.DEFAULT_CONTENT)}catch(e){throw n.error("Error while trying to update a daemon message: %j %s",t,e),e}}static async stat(e,t){try{n.debug("Daemon stat for path %s / %s",e.getDefaultBucket(),t);let a=null;const s=await e.fetchMetaForCacheFile(t);n.debug("Daemon: Meta is %j",s);const i=s&&s.Metadata?s.Metadata[ke.DAEMON_METADATA_KEY]:null;return i?(a=JSON.parse(i),a.completedEpochMS&&!a.error&&(a.link=await e.preSignedDownloadUrlForCacheFile(t))):n.warn("No metadata found! (Head was %j)",s),a}catch(e){throw n.error("Error while trying to fetch a daemon state: %j %s",t,e),e}}static async abort(e,t){return ke.error(e,t,"Aborted")}static async error(e,t,a){try{const s=await ke.stat(e,t);return s.error=a,s.completedEpochMS=(new Date).getTime(),ke.writeState(e,t,s,ke.DEFAULT_CONTENT)}catch(e){throw n.error("Error while trying to write a daemon error: %j %s",t,e),e}}static async finalize(e,t,a){try{n.info("Finalizing daemon %s with %d bytes",t,a.length);const s=await ke.stat(e,t);return s.completedEpochMS=(new Date).getTime(),s.lastUpdatedMessage="Complete",ke.writeState(e,t,s,a)}catch(e){throw n.error("Error while trying to finalize a daemon: %j %s",t,e),e}}}class Fe{s3;defaultBucket;constructor(e,a=null){this.s3=e,this.defaultBucket=a,t.notNullOrUndefined(this.s3,"s3")}get s3Client(){return this.s3}static applyCacheControlMaxAge(e,t){return e&&t&&(e.CacheControl="max-age="+t),e}static applyUserMetaData(e,t,s){return e&&a.trimToNull(t)&&a.trimToNull(s)&&(e.Metadata=e.Metadata||{},e.Metadata[t]=s),e}getDefaultBucket(){return this.defaultBucket}getS3Client(){return this.s3}async fileExists(e,t=null){try{return!!await this.fetchMetaForCacheFile(e,this.bucketVal(t))}catch(e){return n.silly("Error calling file exists (as expected) %s",e),!1}}async fetchCacheFileAsS3GetObjectCommandOutput(e,t=null){let a=null;try{const s={Bucket:this.bucketVal(t),Key:e};a=await this.s3.send(new O(s))}catch(t){if(!(t instanceof k))throw t;n.debug("Key %s not found - returning null",e),a=null}return a}async fetchCacheFileAsReadableStream(e,t=null){return(await this.fetchCacheFileAsS3GetObjectCommandOutput(e,t)).Body.transformToWebStream()}async fetchCacheFileAsBuffer(e,t=null){let a=null;const s=await this.fetchCacheFileAsS3GetObjectCommandOutput(e,t);if(s?.Body){const e=await s.Body.transformToByteArray();a=Buffer.from(e)}return a}async fetchCacheFileAsString(e,t=null){let a=null;const s=await this.fetchCacheFileAsS3GetObjectCommandOutput(e,t);return s?.Body&&(a=await s.Body.transformToString()),a}async fetchCacheFileAsObject(e,t=null){const a=await this.fetchCacheFileAsString(e,t);return a?JSON.parse(a):null}async removeCacheFile(e,t=null){let a=null;const s={Bucket:this.bucketVal(t),Key:e};try{a=await this.s3.send(new F(s))}catch(s){if(!(s&&s instanceof A))throw s;n.info("Swallowing 404 deleting missing object %s %s",t,e),a=null}return a}async writeObjectToCacheFile(e,t,a,s){const n=JSON.stringify(t);return this.writeStringToCacheFile(e,n,a,s)}async writeStringToCacheFile(e,t,a,s){const n=o.stringToWebReadableStream(t);return this.writeStreamToCacheFile(e,n,a,s)}async writeStreamToCacheFile(e,t,a,s,i=(e=>{n.debug("Uploading : %s",e)})){const r=Object.assign({},a||{},{Bucket:this.bucketVal(s),Key:e,Body:t}),o=new j({client:this.s3,params:r,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});i&&o.on("httpUploadProgress",i);return await o.done()}async synchronize(e,a,s=this,r=!1){t.notNullOrUndefined(e,"srcPrefix"),t.notNullOrUndefined(a,"targetPrefix"),t.true(e.endsWith("/"),"srcPrefix must end in /"),t.true(a.endsWith("/"),"targetPrefix must end in /");let o=[];const c=await this.directChildrenOfPrefix(e),l=await s.directChildrenOfPrefix(a),h=new i;for(let t=0;t<c.length;t++){const i=c[t];if(n.info("Processing %s : %s",i,h.dumpExpected(t/c.length)),i.endsWith("/"))if(r){n.info("%s is a subfolder - recursing");const t=await this.synchronize(e+i,a+i,s,r);n.info("Got %d back from %s",t.length,i),o=o.concat(t)}else n.info("%s is a subfolder and recurse not specified - skipping",i);else{let t=!0;const r=await this.fetchMetaForCacheFile(e+i);if(l.includes(i)){const e=await s.fetchMetaForCacheFile(a+i);r.ETag===e.ETag&&(n.debug("Skipping - identical"),t=!1)}if(t){n.debug("Copying...");const t=await this.fetchCacheFileAsReadableStream(e+i);try{const e=await s.writeStreamToCacheFile(a+i,t,r,void 0);n.silly("Write result : %j",e),o.push(i)}catch(e){n.error("Failed to sync : %s : %s",i,e)}}}}return n.info("Found %d files, copied %d",c.length,o.length),h.log(),o}async fetchMetaForCacheFile(e,t=null){let a=null;try{a=await this.s3.send(new P({Bucket:this.bucketVal(t),Key:e}))}catch(s){if(!(s&&s instanceof A))throw n.error("Unrecognized error, rethrowing : %s",s,s),s;n.info("Cache file %s %s not found returning null",this.bucketVal(t),e),a=null}return a}async cacheFileAgeInSeconds(e,t=null){try{const a=await this.fetchMetaForCacheFile(e,t);return a&&a.LastModified?Math.floor(((new Date).getTime()-a.LastModified.getTime())/1e3):(n.warn("Cache file %s %s had no last modified returning null",this.bucketVal(t),e),null)}catch(a){if(a&&a instanceof A)return n.warn("Cache file %s %s not found returning null",this.bucketVal(t),e),null;throw a}}async copyFile(e,t,a=null,s=null){const n={CopySource:"/"+this.bucketVal(a)+"/"+e,Bucket:this.bucketVal(s),Key:t,MetadataDirective:"COPY"};return await this.s3.send(new I(n))}async quietCopyFile(e,t,a=null,s=null){let i=!1;try{await this.copyFile(e,t,a,s);i=!0}catch(e){n.silly("Failed to copy file in S3 : %s",e)}return i}async preSignedDownloadUrlForCacheFile(e,t=3600,a=null){const s={Bucket:this.bucketVal(a),Key:e};return await B(this.s3,new O(s),{expiresIn:t})}async directChildrenOfPrefix(e,t=!1,a=null,s=null){const n=[],i={Bucket:this.bucketVal(a),Prefix:e,Delimiter:"/"};let r=null;do{r=await this.s3.send(new M(i));const o=e.length;r.CommonPrefixes&&r.CommonPrefixes.forEach((e=>{if(!s||n.length<s){const t=e.Prefix.substring(o);n.push(t)}})),r.Contents&&await Promise.all(r.Contents.map((async e=>{if(!s||n.length<s)if(t){const t={link:await this.preSignedDownloadUrlForCacheFile(e.Key,3600,a),name:e.Key.substring(o),size:e.Size};n.push(t)}else n.push(e.Key.substring(o))}))),i.Marker=r.NextMarker}while(i.Marker&&(!s||n.length<s));return n}async allSubFoldersOfPrefix(e,t=null){const a=[e];let s=0;for(;s<a.length;){const e=a[s++];n.debug("Pulling %s (%d remaining)",e,a.length-s);const i={Bucket:this.bucketVal(t),Prefix:e,Delimiter:"/"};let r=null;do{i.ContinuationToken=r?r.NextContinuationToken:null,r=await this.s3.send(new M(i)),r.CommonPrefixes.forEach((e=>{a.push(e.Prefix)})),n.debug("g:%j",r)}while(r.NextContinuationToken)}return a}bucketVal(e){const t=e||this.defaultBucket;if(!t)throw"You must set either the default bucket or pass it explicitly";return t}}class Ae{s3;bucket;prefix;_defaultGroup;jwtRatchet;static DEFAULT_DEFAULT_GROUP="DEFAULT";cache;constructor(e,t,a="",s=Ae.DEFAULT_DEFAULT_GROUP,n){this.s3=e,this.bucket=t,this.prefix=a,this._defaultGroup=s,this.jwtRatchet=n,this.cache=new Fe(this.s3,this.bucket)}get defaultGroup(){return this._defaultGroup}async keyToPublicToken(e,a){t.notNullOrUndefined(this.jwtRatchet,"You must set jwtRatchet if you wish to use public tokens"),t.notNullOrUndefined(e,"key"),t.true(a>0,"Expiration seconds must be larger than 0");const s={daemonKey:e};return await this.jwtRatchet.createTokenString(s,a)}keyToPath(e){return Buffer.from(e,"base64").toString()}pathToKey(e){return Buffer.from(e).toString("base64")}generatePath(e=this._defaultGroup){return this.generatePrefix(e)+a.createType4Guid()}generatePrefix(e=this._defaultGroup){return this.prefix+e+"/"}async start(e){e.group=e.group||this._defaultGroup;const t=this.generatePath(e.group),a=this.pathToKey(t);return ke.start(this.cache,a,t,e)}async writeState(e,t){const a=this.keyToPath(e.id);return ke.writeState(this.cache,a,e,t)}async clean(e=this._defaultGroup,t=604800){try{n.info("Daemon removing items older than %d seconds from group %s",t,e);const a=await this.list(e),s=(new Date).getTime(),i=[];for(let e=0;e<a.length;e++){const n=a[e];if((s-n.startedEpochMS)/1e3>t){await this.cache.removeCacheFile(this.keyToPath(n.id));i.push(n)}}return n.debug("Removed %d of %d items",i.length,a.length),i}catch(t){throw n.error("Error while trying to clean a daemon: %j %s",e,t),t}}async listKeys(e=this._defaultGroup){try{const t=this.generatePrefix(e);n.info("Fetching children of %s",t);const a=await this.cache.directChildrenOfPrefix(t);return n.debug("Found : %j",a),a}catch(t){throw n.error("Error while trying to list daemon keys: %j %s",e,t),t}}async list(e=this._defaultGroup){try{const t=this.generatePrefix(e);n.info("Fetching children of %s",t);const a=(await this.listKeys(e)).map((t=>this.stat(this.pathToKey(this.generatePrefix(e)+t))));return await Promise.all(a)}catch(t){throw n.error("Error while trying to list daemon states: %j %s",e,t),t}}async updateMessage(e,t){const a=this.keyToPath(e);return ke.updateMessage(this.cache,a,t)}async stat(e){const t=this.keyToPath(e);return ke.stat(this.cache,t)}async statFromPublicToken(e){t.notNullOrUndefined(this.jwtRatchet,"You must set jwtRatchet if you wish to use public tokens"),t.notNullOrUndefined(e,"publicToken");const a=await this.jwtRatchet.decodeToken(e),s=a?.daemonKey;return s?this.stat(s):null}async abort(e){return ke.abort(this.cache,this.keyToPath(e))}async error(e,t){return ke.error(this.cache,this.keyToPath(e),t)}async finalize(e,t){return ke.finalize(this.cache,this.keyToPath(e),t)}}class Pe{provider;cfg;static defaultDaoConfig(){return{guidCreateFunction:a.createType4Guid,guidFieldName:"guid",createdEpochMSFieldName:"createdEpochMS",updatedEpochMSFieldName:"updatedEpochMS",createdUtcTimestampFieldName:null,updatedUtcTimestampFieldName:null}}constructor(e,a=Pe.defaultDaoConfig()){this.provider=e,this.cfg=a,t.notNullOrUndefined(e,"provider"),t.notNullOrUndefined(a,"cfg"),t.notNullOrUndefined(a.guidCreateFunction,"cfg.guidCreateFunction"),t.notNullOrUndefined(a.guidFieldName,"cfg.guidFieldName")}async fetchAll(){return(await this.provider.loadDatabase()).items||[]}async resetDatabase(){await this.provider.storeDatabase({items:[],lastModifiedEpochMS:Date.now()})}async removeItems(e){let t=await this.fetchAll();return e&&(t=t.filter((t=>!e.includes(t[this.cfg.guidFieldName]))),await this.provider.storeDatabase({items:t,lastModifiedEpochMS:Date.now()})),t}async store(t){let a=await this.fetchAll();return t&&(t[this.cfg.guidFieldName]=t[this.cfg.guidFieldName]||this.cfg.guidCreateFunction(),this.cfg.createdEpochMSFieldName&&(t[this.cfg.createdEpochMSFieldName]=t[this.cfg.createdEpochMSFieldName]||Date.now()),this.cfg.createdUtcTimestampFieldName&&(t[this.cfg.createdUtcTimestampFieldName]=t[this.cfg.createdUtcTimestampFieldName]||e.utc().toISO()),this.cfg.updatedEpochMSFieldName&&(t[this.cfg.updatedEpochMSFieldName]=Date.now()),this.cfg.updatedUtcTimestampFieldName&&(t[this.cfg.updatedUtcTimestampFieldName]=e.utc().toISO()),a=a.filter((e=>e[this.cfg.guidFieldName]!==t[this.cfg.guidFieldName])),a.push(t),await this.provider.storeDatabase({items:a,lastModifiedEpochMS:Date.now()})),t}async fetchById(e){return(await this.fetchAll()).find((t=>t[this.cfg.guidFieldName]===e))}async searchByField(e,a){t.notNullOrUndefined(e,"fieldDotPath"),t.notNullOrUndefined(a,"fieldValue");const s={};return s[e]=a,this.searchByFieldMap(s)}async searchByFieldMap(e){t.notNullOrUndefined(e,"input");let a=await this.fetchAll();return a=a.filter((t=>{let a=!0;return Object.keys(e).forEach((s=>{const n=c.findValueDotPath(t,s),i=e[s];a=a&&n===i})),a})),a}}class Ie{s3CacheRatchet;keyName;constructor(e,a){this.s3CacheRatchet=e,this.keyName=a,t.notNullOrUndefined(e,"s3CacheRatchet"),t.notNullUndefinedOrOnlyWhitespaceString(e.getDefaultBucket(),"s3CacheRatchet.defaultBucket"),t.notNullUndefinedOrOnlyWhitespaceString(a,"keyName")}async storeDatabase(e){const t=e||{items:[],lastModifiedEpochMS:null};t.lastModifiedEpochMS=Date.now();return!!await this.s3CacheRatchet.writeObjectToCacheFile(this.keyName,t)}async loadDatabase(){return await this.s3CacheRatchet.fetchCacheFileAsObject(this.keyName)||{items:[],lastModifiedEpochMS:Date.now()}}}class Me{cache;prefix;constructor(e,t){if(this.cache=e,this.prefix=t,!e)throw new Error("cache object may not be null");if(!e.getDefaultBucket())throw new Error("Supplied cache must have default bucket set")}buildPathPrefix(e){let t="";return this.prefix&&(t+=this.prefix),e&&(t+=e),t}buildFullPath(e,t){let a=this.buildPathPrefix(t);return a.length>0&&(a+="/"),a+=e+".json",a}async exists(e,t){const a=this.buildFullPath(e,t);return n.debug("Check file existence : %s",a),this.cache.fileExists(a)}async fetch(e,t){const a=this.buildFullPath(e,t);n.debug("Fetching : %s",a);const s=await this.cache.fetchCacheFileAsObject(a);return s.id=e,s.path=t,s}async store(e,t){e.id=e.id||a.createType4Guid(),e.lastModifiedEpochMS=(new Date).getTime();const s=this.buildFullPath(e.id,t);n.debug("Storing : %s",s),await this.cache.writeObjectToCacheFile(s,e);return await this.fetch(e.id,t)}async listItems(e){const t=this.buildPathPrefix(e);n.debug("Listing : %s",t);return await this.cache.directChildrenOfPrefix(t)}async fetchItemsInPath(e){const t=this.buildPathPrefix(e);n.debug("Full fetch of : %s",t);const a=(await this.listItems(e)).map((t=>this.fetch(t,e)));return await Promise.all(a)}async delete(e,t){const a=this.buildFullPath(e,t);n.debug("Deleting : %s",a);return null!=await this.cache.removeCacheFile(a)}}class Ue{awsDDB;constructor(e){if(this.awsDDB=e,!e)throw"awsDDB may not be null"}get dynamoDBDocumentClient(){return this.awsDDB}getDDB(){return this.awsDDB}async tableIsEmpty(e){const t={TableName:e,Limit:1};return 0===(await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),t)).Items.length}async scanPromise(e){return this.awsDDB.send(new K(e))}async queryPromise(e){return this.awsDDB.send(new L(e))}async throughputSafeScanOrQuery(e,t,a,i){let o=null;if(t){let c=i??0;do{c++;try{o=await e(t)}catch(e){if(!Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;{const e=1e3*Math.pow(2,c);n.debug("Exceeded scan throughput for %j : Try %d of %d (Waiting %d ms)",t,c,a,e),await r.wait(e),c++}}}while(!o&&(!a||c<a));o||s.throwFormattedErr("throughputSafeScan failed - tried %d times, kept running into throughput exceeded : %j",a,t)}return o}async fullyExecuteQueryCount(e,t=0){try{e.Select="COUNT",n.debug("Executing count query : %j",e);const a={count:0,scannedCount:0,pages:0},s=(new Date).getTime();let i=null;const o=e.Limit;e.Limit=null;do{i=await this.throughputSafeScanOrQuery((e=>this.queryPromise(e)),e),a.count+=i.Count,a.scannedCount+=i.ScannedCount,a.pages++,e.ExclusiveStartKey=i.LastEvaluatedKey,await r.wait(t),n.silly("Rval is now %j",a),o&&a.count>=o&&e.ExclusiveStartKey&&(n.info("Aborting query since hit limit of %d",o),e.ExclusiveStartKey=null)}while(e.ExclusiveStartKey);const c=(new Date).getTime();return n.debug("Finished, returned %j in %s for %j",a,l.formatMsDuration(c-s,!0),e),a}catch(t){return n.error("Failed with %s, q: %j",t,e,t),null}}async fullyExecuteQuery(e,t=0,a=null){const s=[];return await this.fullyExecuteProcessOverQuery(e,(async e=>{s.push(e)}),t,a),s}async fullyExecuteProcessOverQuery(e,t,a=0,s=null){let i=0;try{n.debug("Executing query : %j",e);const o=(new Date).getTime();n.debug("Pulling %j",e);let c=await this.throughputSafeScanOrQuery((e=>this.queryPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;let h=0,u=0;for(;c.LastEvaluatedKey&&(null===s||i<s)&&!e.Limit;){n.silly("Found more rows - requery with key %j",c.LastEvaluatedKey),e.ExclusiveStartKey=c.LastEvaluatedKey,c=await this.throughputSafeScanOrQuery((e=>this.queryPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;n.silly("Have processed %d items",i),h++,u+=0===c.Count?1:0,await r.wait(a)}const d=(new Date).getTime();n.debug("Finished, processed %d rows in %s for %j (%d blank pages, %d total pages)",i,l.formatMsDuration(d-o,!0),e,u,h)}catch(t){n.error("Failed with %s, q: %j",t,e,t)}return i}async fullyExecuteScanCount(e,t=0){try{e.Select="COUNT";const a={count:0,scannedCount:0,pages:0};n.debug("Executing scan count : %j",e);const s=(new Date).getTime();let i=null;const o=e.Limit;e.Limit=null;do{i=await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),e),a.count+=i.Count,a.scannedCount+=i.ScannedCount,a.pages++,e.ExclusiveStartKey=i?.LastEvaluatedKey,await r.wait(t),n.silly("Rval is now %j",a),o&&a.count>=o&&e.ExclusiveStartKey&&(n.info("Aborting scan since hit limit of %d",o),e.ExclusiveStartKey=null)}while(e.ExclusiveStartKey);const c=(new Date).getTime();return n.debug("Finished, returned %j in %s for %j",a,l.formatMsDuration(c-s,!0),e),a}catch(t){return n.error("Failed with %s, q: %j",t,e,t),null}}async fullyExecuteScan(e,t=0,a=null){const s=[];return await this.fullyExecuteProcessOverScan(e,(async e=>{s.push(e)}),t,a),s}async fullyExecuteProcessOverScan(e,t,a=0,s=null){let i=0;try{n.debug("Executing scan : %j",e);const o=(new Date).getTime();n.debug("Pulling %j",e);let c=await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;for(;c.LastEvaluatedKey&&(null===s||i<s)&&!e.Limit;){n.silly("Found more rows - requery with key %j",c.LastEvaluatedKey),e.ExclusiveStartKey=c.LastEvaluatedKey,c=await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;n.silly("Rval is now %d items",i),await r.wait(a)}const h=(new Date).getTime();n.debug("Finished, processed %d results in %s for %j",i,l.formatMsDuration(h-o,!0),e)}catch(t){n.error("Failed with %s, q: %j",t,e,t)}return i}async writeAllInBatches(e,t,a){if(!a||a<2)throw new Error("Batch size needs to be at least 2, was "+a);let s=0;if(t&&t.length>0){let i=[];for(t.forEach((t=>{i.push({PutRequest:{Item:t,ReturnConsumedCapacity:"TOTAL",TableName:e}})})),n.debug("Processing %d batch items to %s",i.length,e);i.length>0;){const t=i.slice(0,Math.min(i.length,a));i=i.slice(t.length);const o={RequestItems:{},ReturnConsumedCapacity:"TOTAL",ReturnItemCollectionMetrics:"SIZE"};o.RequestItems[e]=t;let c=1,l=!1,h=null;for(;!l&&c<7;){try{h=await this.awsDDB.send(new V(o))}catch(e){if(!Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;n.info("Caught ProvisionedThroughputExceededException - retrying delete"),h={UnprocessedItems:o.RequestItems}}if(h&&h.UnprocessedItems&&h.UnprocessedItems[e]&&h.UnprocessedItems[e].length>0){const t=Math.pow(2,c);n.warn("Found %d unprocessed items. Backing off %d seconds and trying again",h.UnprocessedItems[e].length,t),await r.wait(1e3*t),c++,o.RequestItems[e]=h.UnprocessedItems[e]}else l=!0}h&&h.UnprocessedItems&&h.UnprocessedItems[e]&&h.UnprocessedItems[e].length>0?(n.error("After 6 tries there were still %d unprocessed items",h.UnprocessedItems[e].length),s+=t.length-h.UnprocessedItems[e].length,n.warn("FIX Unprocessed : %j",h.UnprocessedItems)):s+=t.length}}return s}async fetchFullObjectsMatchingKeysOnlyIndexQuery(e,a,s=25){t.notNullOrUndefined(e),t.notNullOrUndefined(e.TableName),t.notNullOrUndefined(a),t.true(a.length>0);const n=await this.fullyExecuteQuery(e),i=Ue.stripAllToKeysOnly(n,a);return await this.fetchAllInBatches(e.TableName,i,s)}async fetchAllInBatches(e,t,a){if(!a||a<2||a>100)throw new Error("Batch size needs to be at least 2 and no more than 100, was "+a);let s=[];const i=[];let o=Object.assign([],t);for(;o.length>0;){const t=o.slice(0,Math.min(o.length,a));o=o.slice(t.length);const s={};s[e]={Keys:t};const n={RequestItems:s,ReturnConsumedCapacity:"TOTAL"};i.push(n)}n.debug("Created %d batches",i.length);for(let t=0;t<i.length;t++){i.length>1&&n.info("Processing batch %d of %d",t+1,i.length);const a=i[t];let o=1;do{n.silly("Pulling %j",a);const t=await this.awsDDB.send(new _(a));s=s.concat(t.Responses[e]),t.UnprocessedKeys&&t.UnprocessedKeys[e]&&t.UnprocessedKeys[e].Keys.length>0&&o<15&&(n.silly("Found %d unprocessed, waiting",t.UnprocessedKeys[e].Keys),await r.wait(1e3*Math.pow(2,o)),o++),a.RequestItems=t.UnprocessedKeys}while(!a.RequestItems&&a.RequestItems[e].Keys.length>0)}return s}async deleteAllInBatches(e,t,a){if(!a||a<2)throw new Error("Batch size needs to be at least 2, was "+a);let s=0;if(t&&t.length>0){let i=[];for(t.forEach((t=>{i.push({DeleteRequest:{Key:t,ReturnConsumedCapacity:"TOTAL",TableName:e}})})),n.debug("Processing %d DeleteBatch items to %s",i.length,e);i.length>0;){const t=i.slice(0,Math.min(i.length,a));i=i.slice(t.length);const o={RequestItems:{},ReturnConsumedCapacity:"TOTAL",ReturnItemCollectionMetrics:"SIZE"};o.RequestItems[e]=t;let c=1,l=!1,h=null;for(;!l&&c<7;){try{h=await this.awsDDB.send(new V(o))}catch(e){if(!Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;n.info("Caught ProvisionedThroughputExceededException - retrying delete"),h={UnprocessedItems:o.RequestItems}}if(h&&h.UnprocessedItems&&h.UnprocessedItems[e]&&h.UnprocessedItems[e].length>0){const t=Math.pow(2,c);n.warn("Found %d unprocessed items. Backing off %d seconds and trying again",h.UnprocessedItems[e].length,t),await r.wait(1e3*t),c++,o.RequestItems[e]=h.UnprocessedItems[e]}else l=!0}h&&h.UnprocessedItems&&h.UnprocessedItems[e]&&h.UnprocessedItems[e].length>0?(n.error("After 6 tries there were still %d unprocessed items",h.UnprocessedItems[e].length),s+=t.length-h.UnprocessedItems[e].length,n.warn("FIX Unprocessed : %j",h.UnprocessedItems)):s+=t.length,n.debug("%d Remain, DeleteBatch Results : %j",i.length,h)}}return s}async simplePut(e,t,a=3){let s=null,i=0;const o={Item:t,ReturnConsumedCapacity:"TOTAL",TableName:e};for(;!s&&i<a;)try{s=await this.awsDDB.send(new G(o))}catch(e){if(!Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;{const e=1e3*Math.pow(2,i);n.debug("Exceeded write throughput for %j : Try %d of %d (Waiting %d ms)",o,i,a,e),await r.wait(e),i++}}return s||n.warn("Unable to write %j to DDB after %d tries, giving up",o,a),s}async simplePutOnlyIfFieldIsNullOrUndefined(e,t,a){let s=!1;const i={Item:t,ReturnConsumedCapacity:"TOTAL",ConditionExpression:"attribute_not_exists(#fieldName) OR #fieldName = :null ",ExpressionAttributeNames:{"#fieldName":a},ExpressionAttributeValues:{":null":null},TableName:e};try{const e=await this.awsDDB.send(new G(i));n.silly("Wrote : %j",e),s=!0}catch(o){if(Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(o))n.debug("Exceeded write throughput for %j : (Waiting 2000 ms)",i),await r.wait(2e3),s=await this.simplePutOnlyIfFieldIsNullOrUndefined(e,t,a);else{if(!(o&&o instanceof J))throw o;n.debug("Failed to write %j due to null field failure"),s=!1}}return s}async simplePutWithCollisionAvoidance(e,a,s,i,o=null,c=3){t.true(s&&s.length>0&&s.length<3,"You must pass 1 or 2 key names");let l=null,h=0;const u={"#key0":s[0]},d={":key0":a[s[0]]};let m="#key0 <> :key0";s.length>1&&(m+=" AND #key1 <> :key1",u["#key1"]=s[1],d[":key1"]=a[s[1]]);const f={Item:a,ReturnConsumedCapacity:"TOTAL",ConditionExpression:m,ExpressionAttributeNames:u,ExpressionAttributeValues:d,TableName:e};let g=0;for(;!l&&h<c&&(!o||g<o);)try{l=await this.awsDDB.send(new G(f))}catch(e){if(Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e)){h++;const e=1e3*Math.pow(2,h);n.debug("Exceeded write throughput for %j : Try %d of %d (Waiting %d ms)",f,h,c,e),await r.wait(e)}else{if(!(e&&e instanceof J))throw e;{let e=Object.assign({},f.Item);n.info("Failed to write %j due to collision - adjusting and retrying",e),e=i(e),f.Item=e,f.ExpressionAttributeValues[":key0"]=e[s[0]],s.length>1&&(f.ExpressionAttributeValues[":key1"]=e[s[1]]),g++}}}return l&&g>0&&n.info("After adjustment, wrote %j as %j",a,f.Item),l||n.warn("Unable to write %j to DDB after %d provision tries and %d adjusts, giving up",f,h,g),l?f.Item:null}async simpleGet(e,t,a=3){let s=null,i=0;const o={TableName:e,Key:t};for(;!s&&i<a;)try{s=await this.awsDDB.send(new W(o))}catch(e){if(!Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;{const e=1e3*Math.pow(2,i);n.debug("Exceeded read throughput for %j : Try %d of %d (Waiting %d ms)",o,i,a,e),await r.wait(e),i++}}s||n.warn("Unable to read %j from DDB after %d tries, giving up",o,a);return s&&s.Item?Object.assign({},s.Item):null}static objectIsErrorWithProvisionedThroughputExceededExceptionCode(e){return!!e&&e instanceof $}async simpleGetWithCounterDecrement(e,t,a,s,i=3){let o=null,c=0;const l={TableName:e,Key:t,UpdateExpression:"set #counter = #counter-:decVal",ExpressionAttributeNames:{"#counter":a},ExpressionAttributeValues:{":decVal":1,":minVal":0},ConditionExpression:"#counter > :minVal",ReturnValues:"ALL_NEW"};let h=!1;for(;!o&&c<i&&!h;)try{o=await this.awsDDB.send(new q(l))}catch(e){if(Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e)){const e=1e3*Math.pow(2,c);n.debug("Exceeded update throughput for %j : Try %d of %d (Waiting %d ms)",l,c,i,e),await r.wait(e),c++}else{if(!(e&&e instanceof J))throw e;n.info("Cannot fetch requested row (%j) - the update check failed",t),h=!0}}o||h||n.warn("Unable to update %j from DDB after %d tries, giving up",l,i);const u=o&&o.Attributes?Object.assign({},o.Attributes):null;return s&&u&&0===u[a]&&(n.info("Delete on 0 specified, removing"),await this.simpleDelete(e,t)),u}async simpleDelete(e,t){const a={TableName:e,Key:t};return await this.awsDDB.send(new z(a))}async atomicCounter(e,t,a,s=1){const n={TableName:e,Key:t,UpdateExpression:"SET #counterFieldName = #counterFieldName + :inc",ExpressionAttributeNames:{"#counterFieldName":a},ExpressionAttributeValues:{":inc":s},ReturnValues:"UPDATED_NEW"},i=await this.awsDDB.send(new q(n));return h.safeNumber(i.Attributes[a])}static cleanObject(e){if(e){const t=[];Object.keys(e).forEach((a=>{const s=e[a];""===s?t.push(a):s instanceof Object&&Ue.cleanObject(s)})),n.silly("Removing keys : %j",t),t.forEach((t=>{delete e[t]}))}}static stripToKeysOnly(e,t){let a=null;return e&&t&&t.length>0&&(a={},t.forEach((t=>{e[t]||s.throwFormattedErr("Failed key extraction on %j - missing %s",e,t),a[t]=e[t]}))),a}static stripAllToKeysOnly(e,t){return e.map((e=>Ue.stripToKeysOnly(e,t)))}}class Re{awsDDB;constructor(e){if(this.awsDDB=e,!e)throw"awsDDB may not be null"}async deleteTable(e,a=!0){t.notNullOrUndefined(e);const s={TableName:e};n.debug("Deleting ddb table %s",e);const i=await this.awsDDB.send(new Q(s));return a&&(n.debug("Table marked for delete, waiting for deletion"),await this.waitForTableDelete(e)),i}async createTable(e,a=!0,i=!1){t.notNullOrUndefined(e),t.notNullOrUndefined(e.TableName),n.debug("Creating new table : %j",e);await this.tableExists(e.TableName)&&(i?(n.debug("Table %s exists and replace specified - deleting",e.TableName),await this.deleteTable(e.TableName)):s.throwFormattedErr("Cannot create table %s - exists already and replace not specified",e.TableName));const r=await this.awsDDB.send(new H(e));return a&&(n.debug("Table created, awaiting ready"),await this.waitForTableReady(e.TableName)),r}async waitForTableReady(e){let t=!0,a=await this.safeDescribeTable(e);for(;a&&a.Table&&"ACTIVE"!==a.Table.TableStatus;)n.silly("Table not ready - waiting 2 seconds"),await r.wait(2e3),a=await this.safeDescribeTable(e);return a||a.Table||(n.warn("Cannot wait for %s to be ready - table does not exist",e),t=!1),t}async waitForTableDelete(e){let t=await this.safeDescribeTable(e);for(;t;)n.silly("Table %s still exists, waiting 2 seconds (State is %s)",e,t.Table.TableStatus),await r.wait(2e3),t=await this.safeDescribeTable(e)}async tableExists(e){return!!await this.safeDescribeTable(e)}async listAllTables(){const e={};let t=[];do{const a=await this.awsDDB.send(new Y(e));t=t.concat(a.TableNames),e.ExclusiveStartTableName=a.LastEvaluatedTableName}while(e.ExclusiveStartTableName);return t}async safeDescribeTable(e){try{return await this.awsDDB.send(new Z({TableName:e}))}catch(e){if(e instanceof X)return null;throw e}}async copyTable(e,a,n,i){if(t.notNullUndefinedOrOnlyWhitespaceString(e,"srcTableName"),t.notNullUndefinedOrOnlyWhitespaceString(a,"dstTableName"),i)throw s.fErr("Cannot copy %s to %s - copy data not supported yet",e,a);const r=await this.safeDescribeTable(e);if(await this.tableExists(a))throw s.fErr("Cannot copy to %s - table already exists",a);if(!r)throw s.fErr("Cannot copy %s - doesnt exist",e);r.Table.AttributeDefinitions,r.Table.KeySchema,r.Table.GlobalSecondaryIndexes;const o=Object.assign({},n||{},{AttributeDefinitions:r.Table.AttributeDefinitions,TableName:a,KeySchema:r.Table.KeySchema,LocalSecondaryIndexes:r.Table.LocalSecondaryIndexes,GlobalSecondaryIndexes:r.Table.GlobalSecondaryIndexes.map((e=>{const t=e;return 0!==t.ProvisionedThroughput?.WriteCapacityUnits&&0!==t.ProvisionedThroughput?.ReadCapacityUnits||(t.ProvisionedThroughput=void 0),t})),BillingMode:r.Table.BillingModeSummary.BillingMode,ProvisionedThroughput:"PROVISIONED"===r.Table.BillingModeSummary.BillingMode?r.Table.ProvisionedThroughput:void 0,StreamSpecification:r.Table.StreamSpecification,SSESpecification:r.Table.SSEDescription,Tags:void 0,TableClass:r.Table.TableClassSummary?.TableClass,DeletionProtectionEnabled:r.Table.DeletionProtectionEnabled});return await this.awsDDB.send(new H(o))}}class je{spots;buckets;separator;alphabet;_allSlots;constructor(e=3,s=16,n="_",i="0123456789ABCDEF"){this.spots=e,this.buckets=s,this.separator=n,this.alphabet=i,t.true(e>0,"Spots must be larger than 0"),t.true(s>1,"Buckets must be larger than 1"),t.notNullOrUndefined(a.trimToNull(i),"Alphabet may not be null or empty"),t.true(a.allUnique(i),"Alphabet must be unique"),t.true(a.stringContainsOnlyAlphanumeric(i),"Alphabet must be alphanumeric");const r=Math.pow(i.length,e);t.true(s<r,"Buckets must be less than permutations ("+s+" / "+r+")"),t.notNullOrUndefined(a.trimToNull(this.separator),"Separator must be nonnull and nonempty");const o=a.allPermutationsOfLength(e,i);this._allSlots=o.slice(0,s)}get allBuckets(){return Object.assign([],this._allSlots)}get randomBucket(){return this._allSlots[Math.floor(Math.random()*this.buckets)]}allSpreadValues(e){t.notNullOrUndefined(a.trimToNull(e),"Cannot spread null/empty value");return this._allSlots.map((t=>e+this.separator+t))}allSpreadValuesForArray(e){t.true(e&&e.length>0,"Cannot spread null/empty array");let a=[];return e.forEach((e=>{a=a.concat(this.allSpreadValues(e))})),a}addSpreader(e){return t.notNullOrUndefined(a.trimToNull(e),"Cannot spread null/empty value"),e+this.separator+this.randomBucket}extractBucket(e){t.notNullOrUndefined(a.trimToNull(e),"Cannot extract from null or empty value");const n=e.length-this.spots;return(n<0||e.charAt(n)!==this.separator)&&s.throwFormattedErr("Cannot extract bucket, not created by this spreader (missing %s at location %d)",this.separator,n),e.substring(n)}removeBucket(e){t.notNullOrUndefined(a.trimToNull(e),"Cannot extract from null or empty value");const n=e.length-this.spots;return(n<0||e.charAt(n)!==this.separator)&&s.throwFormattedErr("Cannot remove bucket, not created by this spreader (missing %s at location %d)",this.separator,n),e.substring(0,n)}}class Be{region;availabilityZone;ec2;ec2InstanceConnect;constructor(e="us-east-1",t="us-east-1a"){this.region=e,this.availabilityZone=t,this.ec2=new te({region:e}),this.ec2InstanceConnect=new ie({region:e})}get eC2Client(){return this.ec2}get eC2InstanceConnectClient(){return this.ec2InstanceConnect}async stopInstance(e,t=0){let a=!0;try{const a={InstanceIds:[e],DryRun:!1};n.info("About to stop instances : %j",a),await this.ec2.send(new ae(a)),n.info("Stop instance command sent, waiting on shutdown");let s=await this.describeInstance(e);if(t>0){const a=(new Date).getTime();for(;s&&16!==s.State.Code&&(new Date).getTime()-a<t;)n.debug("Instance status is %j - waiting for 5 seconds (up to %s)",s.State,l.formatMsDuration(t)),await r.wait(5e3),s=await this.describeInstance(e)}}catch(t){n.error("Failed to stop instance %s : %s",e,t,t),a=!1}return a}async launchInstance(e,t=0){let a=!0;try{const a={InstanceIds:[e],DryRun:!1};n.info("About to start instance : %j",a),await this.ec2.send(new se(a)),n.info("Start instance command sent, waiting on startup");let s=await this.describeInstance(e);if(t>0){const a=(new Date).getTime();for(;s&&16!==s.State.Code&&(new Date).getTime()-a<t;)n.debug("Instance status is %j - waiting for 5 seconds (up to %s)",s.State,l.formatMsDuration(t)),await r.wait(5e3),s=await this.describeInstance(e)}s&&s.PublicIpAddress&&(n.info("Instance address is %s",s.PublicIpAddress),n.info("SSH command : ssh -i path_to_pem_file ec2-user@%s",s.PublicIpAddress))}catch(t){n.error("Failed to start instance %s : %s",e,t,t),a=!1}return a}async describeInstance(e){const t=await this.listAllInstances([e]);return 1===t.length?t[0]:null}async listAllInstances(e=[]){let t=[];const a={NextToken:null};e&&e.length>0&&(a.InstanceIds=e);do{n.debug("Pulling instances... (%j)",a);const e=await this.ec2.send(new ne(a));e.Reservations.forEach((e=>{t=t.concat(e.Instances)})),a.NextToken=e.NextToken}while(a.NextToken);return n.debug("Finished pulling instances (found %d)",t.length),t}async sendPublicKeyToEc2Instance(e,t,a){const s=a||"ec2-user",n={InstanceId:e,AvailabilityZone:this.availabilityZone,InstanceOSUser:s,SSHPublicKey:t};return await this.ec2InstanceConnect.send(new re(n))}}class Ke{ecr;static ECR_REPOSITORIES_TO_PRUNE_ENV_KEY="NEON_ECR_REPOSITORIES_TO_PRUNE";static AWS_ECR_BATCH_DELETE_IMAGE_COUNT=100;static ECR_IMAGE_MINIMUM_AGE_DAYS=60;static ECR_REPOSITORY_MINIMUM_IMAGE_COUNT=600;constructor(e){this.ecr=e,t.notNullOrUndefined(e,"ecr")}async fetchRepositoryNames(){return null}async fetchRegistryId(){return(await this.ecr.send(new oe({}))).registryId}async handlePruning(e){const t=await this.fetchRegistryId();return await this.handlePruningForRegistry(t,e)}async handlePruningForRegistry(e,a){let s=[];const i=t.isNullOrUndefined(a.minimumAgeInDays)?60:a.minimumAgeInDays,r=t.isNullOrUndefined(a.minimumImageCount)?600:a.minimumImageCount,o=a.batchDeleteSize||100;for(let t=0;t<a.repositoriesToPurge.length;t++){const c=a.repositoriesToPurge[t],l=[];let h;do{h&&n.info(`Fetching images for ${c} (from ${l.length})...`);const t=await this.ecr.send(new ce({registryId:e,repositoryName:c,nextToken:h,maxResults:1e3}));h=t.nextToken,l.push(...t.imageDetails)}while(void 0!==h);n.info(`Found ${l.length} image(s) for ${c}`),l.sort(((e,t)=>e.imagePushedAt>t.imagePushedAt?1:-1));const u=Date.now()-24*i*60*60*1e3,d=SharedDateUtil.epochMSToDateHourUtc(u);n.info(`Maximum allowed creation date for pruning: ~${d}`);const m=[];for(const e of l){if(e.imagePushedAt.getTime()>u){n.info(`Image is too recently pushed ${e.imagePushedAt}. Finished selecting for pruning.`);break}if(l.length-m.length<=r){n.info("Reached minimum image count. Finished selecting for pruning.");break}m.push({imageDigest:e.imageDigest}),n.info(`Adding to prune list: ${e.imageDigest} (${e.imagePushedAt})`)}const f=he.chunk(m,o);n.info(`Got ${f.length} chunks of images to delete.`);let g=0;if(s=s.concat(m),a.dryRun)n.info("DryRun specified : Would have purged %j",m);else for(;g<f.length;){const t=f[g];n.info(`Deleting chunk: ${g+1}/${f.length} (${t.length} image(s))`),await this.ecr.send(new le({registryId:e,repositoryName:c,imageIds:t})),g++}n.info(`Finished deleting all chunks for ${c}`)}return s}}class Le{providers;constructor(e){this.providers=e,t.notNullOrUndefined(e),t.true(e.length>0)}async fetchConfig(e){n.silly("CascadeEnvironmentServiceProvider fetch for %s",e);let t=null;for(let a=0;a<this.providers.length&&!t;a++)try{t=await this.providers[a].fetchConfig(e)}catch(e){n.error("Provider %d failed - trying next : %s",a,e,e),t=null}return t}}class Ve{envVarName;constructor(e){this.envVarName=e,t.notNullOrUndefined(e)}async fetchConfig(){n.silly("EnvVarEnvironmentServiceProvider fetch for %s",this.envVarName);let e=null;const t=process?process.env:global||{},i=a.trimToNull(t[this.envVarName]);if(i)try{e=JSON.parse(i)}catch(e){throw n.error("Failed to read env - null or invalid JSON : %s : %s",e,i,e),e}else s.throwFormattedErr("Could not find env var with name : %s",this.envVarName);return e}}class _e{provider;cfg;readPromiseCache=new Map;static defaultEnvironmentServiceConfig(){return{maxRetries:3,backoffMultiplierMS:500}}constructor(e,a=_e.defaultEnvironmentServiceConfig()){this.provider=e,this.cfg=a,t.notNullOrUndefined(e),t.notNullOrUndefined(a)}async getConfig(e){return n.silly("EnvService:Request to read config %s",e),this.readPromiseCache.has(e)||(n.silly("EnvService: Nothing in cache - adding"),this.readPromiseCache.set(e,this.getConfigUncached(e))),this.readPromiseCache.get(e)}async getConfigUncached(e){let t=1,a=null;for(;!a&&t<this.cfg.maxRetries;){t++,n.silly("Attempting fetch of %s",e);try{a=await this.provider.fetchConfig(e)}catch(a){const s=t*this.cfg.backoffMultiplierMS;n.info("Error attempting to fetch config %s (try %d of %d, waiting %s MS): %s",e,t,this.cfg.maxRetries,s,a,a),await r.wait(s)}}return a||s.throwFormattedErr("Was unable to fetch config %s even after %d retries",e,this.cfg.maxRetries),a}}class Ge{value;constructor(e){this.value=e,t.notNullOrUndefined(e)}static fromRecord(e){const t=new Map;return Object.keys(e).forEach((a=>{t.set(a,e[a])})),new Ge(t)}async fetchConfig(e){n.silly("FixedEnvironmentServiceProvider fetch for %s",e);return this.value.get(e)}}class We{cfg;ratchet;constructor(e){this.cfg=e,t.notNullOrUndefined(e),t.notNullOrUndefined(e.bucketName),t.notNullOrUndefined(e.region),t.true(!!e.s3Override||!!e.region,"You must set either region or S3Override");const a=e.s3Override||new U({region:e.region});this.ratchet=new Fe(a,e.bucketName)}async fetchConfig(e){const t=a.trimToEmpty(this.cfg.pathPrefix)+e+a.trimToEmpty(this.cfg.pathSuffix);n.silly("S3EnvironmentServiceProvider:Request to read config from : %s / %s",this.cfg.bucketName,t);const s=new i,r=await this.ratchet.fetchCacheFileAsObject(t);return s.log(),r}}class qe{region;ssmEncrypted;ssm;constructor(e="us-east-1",a=!0){this.region=e,this.ssmEncrypted=a,t.notNullOrUndefined(e),t.notNullOrUndefined(a),this.ssm=new ue({region:this.region})}async fetchConfig(e){n.silly("SsmEnvironmentServiceProvider fetch for %s",e);const t={Name:e,WithDecryption:this.ssmEncrypted};let i=null,o=null;try{const e=await this.ssm.send(new de(t));o=a.trimToNull(e?.Parameter?.Value)}catch(t){if(t instanceof me){const t=n.warn("AWS could not find parameter %s - are you using the right AWS key?",e);throw new Error(t)}if(!((s.safeStringifyErr(t)||"").toLowerCase().indexOf("throttl")>-1))throw n.error("Final environment fetch error (cannot retry) : %s",t,t),t;n.warn("Throttled while trying to read parameters - waiting 1 second before allowing retry"),await r.wait(1e3)}if(o)try{i=JSON.parse(o)}catch(e){throw n.error("Failed to read env - null or invalid JSON : %s : %s",e,o,e),e}else s.throwFormattedErr("Could not find system parameter with name : %s in this account",e);return i}}class ze{tableName;dynamoRatchet;constructor(e,t){this.tableName=e,this.dynamoRatchet=t}async checkCode(e,t,a){const s={code:e,context:t},n=await this.dynamoRatchet.simpleGet(this.tableName,s),i=n&&n.expiresEpochMS>Date.now();return i&&a&&await this.dynamoRatchet.simpleDelete(this.tableName,s),i}async storeCode(e){const t=await this.dynamoRatchet.simplePut(this.tableName,e);return t&&t.ConsumedCapacity.CapacityUnits>0}async createTableIfMissing(e){return null}}class Je{provider;constructor(e){this.provider=e}static generateCode(e){t.notNullOrUndefined(e,"params"),t.notNullOrUndefined(e.context,"params.context"),t.notNullOrUndefined(e.length,"params.length"),t.notNullOrUndefined(e.alphabet,"params.alphabet");let a="";for(let t=0;t<e.length;t++)a+=e.alphabet.charAt(Math.floor(e.alphabet.length*Math.random()));return{code:a,context:e.context,tags:e.tags,expiresEpochMS:Date.now()+1e3*e.timeToLiveSeconds}}async createNewCode(e){const t=Je.generateCode(e);return await this.provider.storeCode(t)?t:null}async checkCode(e,t,s){return await this.provider.checkCode(a.trimToEmpty(e),a.trimToEmpty(t),s)}}class $e{s3CacheRatchet;keyName;constructor(e,a){this.s3CacheRatchet=e,this.keyName=a,t.notNullOrUndefined(e,"s3CacheRatchet"),t.notNullUndefinedOrOnlyWhitespaceString(e.getDefaultBucket(),"s3CacheRatchet.defaultBucket"),t.notNullUndefinedOrOnlyWhitespaceString(a,"keyName")}async fetchFile(){return await this.s3CacheRatchet.fetchCacheFileAsObject(this.keyName)||{data:[],lastModifiedEpochMS:Date.now()}}async updateFile(e){const t={data:e||[],lastModifiedEpochMS:Date.now()};t.data=t.data.filter((e=>e.expiresEpochMS>Date.now())),n.info("Updating code file to %s codes",t.data.length);return await this.s3CacheRatchet.writeObjectToCacheFile(this.keyName,t)}async checkCode(e,t,a){const s=await this.fetchFile(),i=s.data.find((a=>a?.code?.toUpperCase()===e?.toUpperCase()&&a?.context?.toUpperCase()===t?.toUpperCase()));if(i&&(a||i.expiresEpochMS<Date.now())){n.info("Stripping used/expired code from the database");const e=s.data.filter((e=>e!=i));await this.updateFile(e)}return!!i&&i.expiresEpochMS>Date.now()}async storeCode(e){const t=await this.fetchFile();t.data.push(e);return!!await this.updateFile(t.data)}}class Qe{constructor(){}static applySetProfileEnvironmentalVariable(e){process.env?a.trimToNull(e)?process.env.AWS_PROFILE=e:s.throwFormattedErr("Cannot set profile to null/empty string"):s.throwFormattedErr("Cannot set profile - not in a node environment - process missing")}}class He{static isValidCronEvent(e){return e&&"aws.events"==e.source&&e.resources&&e.resources.length>0}static isValidSnsEvent(e){return e&&e.Records&&e.Records.length>0&&"aws:sns"==e.Records[0].EventSource}static isValidSqsEvent(e){return e&&e.Records&&e.Records.length>0&&"aws:sqs"==e.Records[0].eventSource}static isValidDynamoDBEvent(e){return e&&e.Records&&e.Records.length>0&&"aws:dynamodb"==e.Records[0].eventSource}static isValidS3Event(e){return e&&e.Records&&e.Records.length>0&&"aws:s3"==e.Records[0].eventSource}static isValidApiGatewayV2WithRequestContextEvent(e){return e&&e.rawPath&&e.requestContext&&e.routeKey}static isValidApiGatewayEvent(e){return e&&e.httpMethod&&e.path&&e.requestContext}static isValidApiGatewayAuthorizerEvent(e){return e&&e.authorizationToken&&e.methodArn}static isSingleCronEvent(e){return this.isValidCronEvent(e)&&He.isSingleEntryEvent(e,"resources")}static isSingleSnsEvent(e){return this.isValidSnsEvent(e)&&He.isSingleEntryEvent(e)}static isSingleDynamoDBEvent(e){return this.isValidDynamoDBEvent(e)&&He.isSingleEntryEvent(e)}static isSingleS3Event(e){return this.isValidS3Event(e)&&He.isSingleEntryEvent(e)}static isSingleEntryEvent(e,t="Records"){return e&&e[t]&&e[t]instanceof Array&&1===e[t].length}}class Ye{static isValidCronEvent(e){return He.isValidCronEvent(e)}static isValidSqsEvent(e){return He.isValidSqsEvent(e)}static isValidSnsEvent(e){return He.isValidSnsEvent(e)}static isValidDynamoDBEvent(e){return He.isValidDynamoDBEvent(e)}static isValidS3Event(e){return He.isValidS3Event(e)}static isValidApiGatewayV2WithRequestContextEvent(e){return He.isValidApiGatewayV2WithRequestContextEvent(e)}static isValidApiGatewayEvent(e){return He.isValidApiGatewayEvent(e)}static isValidApiGatewayAuthorizerEvent(e){return He.isValidApiGatewayAuthorizerEvent(e)}}class Ze{route53;hostedZoneId;constructor(e,t){if(this.route53=e,this.hostedZoneId=t,!this.route53)throw"route53 may not be null"}get route53Client(){return this.route53}async changeCnameRecordTarget(e,t,a=this.hostedZoneId,s=600){n.info("Updating %s to point to %s",e,t);try{const i={ChangeBatch:{Changes:[{Action:"UPSERT",ResourceRecordSet:{Name:e,ResourceRecords:[{Value:t}],TTL:s,Type:"CNAME"}}]},HostedZoneId:a},r=await this.route53.send(new fe(i));n.debug("Updated domain result: %j",r);const o={Id:r.ChangeInfo.Id},c=await ge({client:this.route53,maxWaitTime:300},o);if(n.debug("Wait responsed: %j",c),c.state===pe.SUCCESS)return n.info("Updated %s to point to %s",e,a),!0}catch(a){n.warn("Error update CName for %s with value %s: %j",e,t,a)}return n.info("Cannot update %s to point to %s",e,t),!1}}class Xe{dynamo;tableName;constructor(e,a){this.dynamo=e,this.tableName=a,t.notNullOrUndefined(this.dynamo),t.notNullOrUndefined(this.tableName)}async readParameter(e,t){n.silly("Reading %s / %s from underlying db",e,t);const a={groupId:e,paramKey:t};return await this.dynamo.simpleGet(this.tableName,a)}async readAllParametersForGroup(e){const t={TableName:this.tableName,KeyConditionExpression:"groupId = :groupId",ExpressionAttributeValues:{":groupId":e}};return await this.dynamo.fullyExecuteQuery(t)}async writeParameter(e){return!!await this.dynamo.simplePut(this.tableName,e)}}class et{wrapped;options={globalTTL:1,separator:".",prefix:"RuntimeEnv-",suffix:""};constructor(e,a){this.wrapped=e,t.notNullOrUndefined(this.wrapped,"wrapped"),t.notNullOrUndefined(global?.process?.env,'"process" not found - this only runs in Node, not the browser'),a&&(this.options=a),t.notNullOrUndefined(this.options.globalTTL,"this.options.globalTTL"),t.notNullOrUndefined(this.options.separator,"this.options.separator"),t.true(this.options.globalTTL>0,"this.options.globalTTL must be larger than 0")}generateName(e,t){return a.trimToEmpty(this.options.prefix)+e+a.trimToEmpty(this.options.separator)+t+a.trimToEmpty(this.options.suffix)}async readParameter(e,t){const n=a.trimToNull(process.env[this.generateName(e,t)]);n&&!a.canParseAsJson(n)&&s.throwFormattedErr("Cannot parse ENV override (%s / %s) as JSON - did you forget the quotes on a string?",e,t);return n?{groupId:e,paramKey:t,paramValue:n,ttlSeconds:this.options.globalTTL}:await this.wrapped.readParameter(e,t)}async readAllParametersForGroup(e){return this.wrapped.readAllParametersForGroup(e)}async writeParameter(e){return this.wrapped.writeParameter(e)}}class tt{data;constructor(e=Promise.resolve({})){this.data=e}async readParameter(e,t){n.silly("Reading %s / %s from underlying db",e,t);return(await this.data)[e+"::"+t]}async readAllParametersForGroup(e){const t=await this.data,a=[];return Object.keys(t).forEach((s=>{s.startsWith(e)&&a.push(t[s])})),a}async writeParameter(e){return(await this.data)[e.groupId+"::"+e.paramKey]=e,!0}}class at{provider;cache=new Map;constructor(e){this.provider=e,t.notNullOrUndefined(this.provider)}async fetchParameter(e,t,a=null,s=!1){n.debug("Reading parameter %s / %s / Force : %s",e,t,s);const i=this.cache.get(at.toCacheStoreKey(e,t));let r=null;const o=(new Date).getTime();if(!s&&i){const a=i.ttlSeconds?o-1e3*i.ttlSeconds:0;i.storedEpochMS>a&&(n.silly("Fetched %s / %s from cache",e,t),r=JSON.parse(i.paramValue))}if(!r){const a=await this.readUnderlyingEntry(e,t);a&&(this.addToCache(a),r=JSON.parse(a.paramValue))}return r=r||a,r}async fetchAllParametersForGroup(e){const t=await this.readUnderlyingEntries(e),a=new Map;return t.forEach((e=>{a.set(e.paramKey,JSON.parse(e.paramValue)),this.addToCache(e)})),a}async readUnderlyingEntry(e,t){return this.provider.readParameter(e,t)}async readUnderlyingEntries(e){return this.provider.readAllParametersForGroup(e)}async storeParameter(e,t,a,s){const n={groupId:e,paramKey:t,paramValue:JSON.stringify(a),ttlSeconds:s};return await this.provider.writeParameter(n),this.provider.readParameter(e,t)}static toCacheStoreKey(e,t){return e+":::"+t}addToCache(e){if(e){const t=(new Date).getTime(),a=Object.assign({storedEpochMS:t},e);this.cache.set(at.toCacheStoreKey(e.groupId,e.paramKey),a)}}clearCache(){n.debug("Clearing runtime parameter cache"),this.cache=new Map}}class st{config;constructor(e){t.notNullOrUndefined(e,"config"),this.config=e,this.config.maxNumThreads||(this.config.maxNumThreads=15),this.config.maxRetries||(this.config.maxRetries=5)}updateSrcPrefix(e){this.config.srcPrefix=e}updateDstPrefix(e){this.config.dstPrefix=e}async copyObject(e,t,a=!1){const s=e.replace(this.config.srcPrefix,this.config.dstPrefix);let i=!1,r=0;for(;!i&&r<this.config.maxRetries;){n.debug(`${r>0?`Retry ${r} `:""}${a?"Express":"Slow"} copying\n [${[this.config.srcBucket,e].join("/")} ---\x3e ${[this.config.dstBucket,s].join("/")}]`);try{if(a){const t={CopySource:encodeURIComponent([this.config.srcBucket,e].join("/")),Bucket:this.config.dstBucket,Key:s,MetadataDirective:"COPY"};await this.config.dstS3.send(new I(t))}else{const a=await this.config.srcS3.send(new O({Bucket:this.config.srcBucket,Key:e})),i={Bucket:this.config.dstBucket,Key:s,Body:a.Body,ContentLength:t},r=new j({client:this.config.dstS3,params:i,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});r.on("httpUploadProgress",(e=>{n.debug("Uploading : %s",e)})),await r.done()}i=!0}catch(t){n.warn(`Can't ${a?"express":"slow"} copy\n [${[this.config.srcBucket,e].join("/")} ---\x3e ${[this.config.dstBucket,s].join("/")}]: %j`,t),r++}}n.debug(`Finished ${a?"express":"slow"} copying\n [${[this.config.srcBucket,e].join("/")} ---\x3e ${[this.config.dstBucket,s].join("/")}]`)}async listObjects(e,t,a){n.info(`Scanning bucket [${[e,t].join("/")}]`);const s={Bucket:e,Prefix:t};let i=!0;const r={};for(;i;){const e=await a.send(new R(s));i=e.IsTruncated,e.Contents.forEach((e=>{r[e.Key]={Key:e.Key,LastModified:e.LastModified,ETag:e.ETag,Size:e.Size}})),i&&(s.ContinuationToken=e.NextContinuationToken)}return r}async startSyncing(){n.info(`Syncing [${this.config.srcBucket}/${this.config.srcPrefix}\n ---\x3e ${this.config.dstBucket}/${this.config.dstPrefix}]`);const e=async e=>{await this.copyObject(e.Key,e.Size)};let t=await this.compareSrcAndDst();return(t.needCopy.length>0||t.diff.length>0)&&(await r.runBoundedParallelSingleParam(e,t.needCopy,this,this.config.maxNumThreads),await r.runBoundedParallelSingleParam(e,t.diff,this,this.config.maxNumThreads),n.info("Verifying..."),t=await this.compareSrcAndDst(),n.debug("Compare result %j",t)),0===t.needCopy.length&&0===t.diff.length}async compareSrcAndDst(){const e=this.listObjects(this.config.srcBucket,this.config.srcPrefix,this.config.srcS3),t=this.listObjects(this.config.dstBucket,this.config.dstPrefix,this.config.dstS3),a=await e,s=await t,n={needCopy:[],existed:[],diff:[]};return await r.runBoundedParallelSingleParam((e=>{const t=a[e],i=e.replace(this.config.srcPrefix,this.config.dstPrefix),r=s.hasOwnProperty(i)?s[i]:void 0;r?t.Size===r.Size&&t.LastModified.getTime()<=r.LastModified.getTime()?n.existed.push(t):n.diff.push(t):n.needCopy.push(t)}),Object.keys(a),this,this.config.maxNumThreads),n}}class nt{static checkS3UrlForValidity(e){let t=!1;return e&&(t=e.startsWith("s3://")&&e.trim().length>5),t}static extractBucketFromURL(e){t.true(nt.checkS3UrlForValidity(e),"invalid s3 url");const a=e.indexOf("/",5);return a>0?e.substring(5,a):e.substring(5)}static extractKeyFromURL(e){t.true(nt.checkS3UrlForValidity(e),"invalid s3 url");const a=e.indexOf("/",5);return a>0?e.substring(a+1):null}}class it{ses;config;static EMAIL=new RegExp(".+@.+\\.[a-z]+");constructor(e,a={}){if(this.ses=e,this.config=a,t.notNullOrUndefined(this.ses),a.archive&&!a.archive.getDefaultBucket())throw new Error("If archive specified, must set a default bucket")}get sESClient(){return this.ses}async fillEmailBody(e,a,n,i=null,r=null,o=null){return t.notNullOrUndefined(n),this.config.templateRenderer||s.throwFormattedErr("Cannot use fill body if template renderer not set"),e.htmlMessage=await this.config.templateRenderer.renderTemplate(n,a,r,o),e.txtMessage=i?await this.config.templateRenderer.renderTemplate(i,a):null,e}async fillEmailBodyAndSend(e,t,a,s=null,n=null,i=null){const r=await this.fillEmailBody(e,t,a,s,n,i);return await this.sendEmail(r)}filterEmailsToValid(e){return(e||[]).filter((e=>{if(this.config.allowedDestinationEmails&&0!=this.config.allowedDestinationEmails.length){return!!this.config.allowedDestinationEmails.find((t=>t.test(e)))}return!0}))}async archiveEmailIfConfigured(t){let s=!1;if(t&&this.config.archive&&!t.doNotArchive){n.debug("Archiving outbound email to : %j",t.destinationAddresses);let i=a.trimToEmpty(this.config.archivePrefix);i.endsWith("/")||(i+="/");const r=e.utc();i+="year="+r.toFormat("yyyy")+"/month="+r.toFormat("MM")+"/day="+r.toFormat("dd")+"/hour="+r.toFormat("HH")+"/"+r.toFormat("mm_ss__SSS"),i+=".json";try{await this.config.archive.writeObjectToCacheFile(i,t),s=!0}catch(e){n.warn("Failed to archive email %s %j : %s",i,t,e)}}return s}applyLimitsToBodySizesIfAnyInPlace(e){if(this.config.maxMessageBodySizeInBytes){const t=a.trimToEmpty(e.txtMessage).length+a.trimToEmpty(e.htmlMessage).length;if(t>this.config.maxMessageBodySizeInBytes){if(n.warn("Max message size is %d but size is %d - converting",this.config.maxMessageBodySizeInBytes,t),e.attachments=e.attachments||[],a.trimToNull(e.txtMessage)){const t={filename:"original-txt-body.txt",contentType:"text/plain",base64Data:u.generateBase64VersionOfString(e.txtMessage)};e.attachments.push(t)}if(a.trimToNull(e.htmlMessage)){const t={filename:"original-html-body.html",contentType:"text/html",base64Data:u.generateBase64VersionOfString(e.htmlMessage)};e.attachments.push(t)}e.htmlMessage=null,e.txtMessage="The message was too large and was converted to attachment(s). Please see attached files for content"}}}applyLimitsToAttachmentSizesIfAnyInPlace(e){if(this.config.maxAttachmentSizeInBase64Bytes){const t=[];e.attachments&&e.attachments.forEach((e=>{e.base64Data&&e.base64Data.length<this.config.maxAttachmentSizeInBase64Bytes?t.push(e):(n.warn("Removing too-large attachment : %s : %s : %d",e.filename,e.contentType,e.base64Data.length),t.push({filename:"attachment-removed-notice-"+a.createRandomHexString(4)+".txt",contentType:"text/plain",base64Data:u.generateBase64VersionOfString("Attachment "+e.filename+" of type "+e.contentType+" was removed since it was "+e.base64Data.length+" bytes but max allowed is "+this.config.maxAttachmentSizeInBase64Bytes)}))})),e.attachments=t}}async sendEmail(e){t.notNullOrUndefined(e,"RTS must be defined"),t.notNullOrUndefined(e.destinationAddresses,"Destination addresses must be defined");let s=null,i=this.filterEmailsToValid(e.destinationAddresses);const r=e.doNotAutoBcc?[]:this.config.autoBccAddresses||[],o=(e.bccAddresses||[]).concat(r);0===i.length&&o.length>0&&(n.debug("Destination emails filtered to none but BCC defined, copying BCC"),i=o);const c=Object.assign({},e);if(c.srcDestinationAddresses=e.destinationAddresses,c.srcBccAddresses=e.bccAddresses,c.destinationAddresses=i,c.bccAddresses=o,this.applyLimitsToBodySizesIfAnyInPlace(c),this.applyLimitsToAttachmentSizesIfAnyInPlace(c),await this.archiveEmailIfConfigured(c),0===c.destinationAddresses.length)n.info("After cleaning email lists, no destination addresses left - not sending email");else{const e="To: "+c.destinationAddresses.join(", ")+"\n",t=c.bccAddresses&&c.bccAddresses.length>0?"Bcc: "+c.bccAddresses.join(", ")+"\n":"";try{const n=c.fromAddress||this.config.defaultSendingAddress,i="NextPart",r="AltPart";let o="From: "+n+"\n";o+=e,o+=t,o+="Subject: "+c.subject+"\n",o+="MIME-Version: 1.0\n",o+='Content-Type: multipart/mixed; boundary="'+i+'"\n',o+="\n\n--"+i+"\n",o+='Content-Type: multipart/alternative; boundary="'+r+'"\n',a.trimToNull(c.htmlMessage)&&(o+="\n\n--"+r+"\n",o+='Content-Type: text/html; charset="UTF-8"\n\n',o+=c.htmlMessage),a.trimToNull(c.txtMessage)&&(o+="\n\n--"+r+"\n",o+="Content-Type: text/plain\n\n",o+=c.txtMessage),o+="\n\n--"+r+"--\n",c.attachments&&c.attachments.forEach((e=>{o+="\n\n--"+i+"\n",o+="Content-Type: "+e.contentType+'; name="'+e.filename+'"\n',o+="Content-Transfer-Encoding: base64\n",o+="Content-Disposition: attachment\n\n",o+=e.base64Data.replace(/([^\0]{76})/g,"$1\n")+"\n\n"})),o+="\n\n--"+i+"--\n";const l={RawMessage:{Data:(new TextEncoder).encode(o)}};s=await this.ses.send(new ye(l))}catch(e){n.error("Error while processing email: %s",e,e)}}return s}static validEmail(e){return null!==e&&it.EMAIL.test(e)}}class rt{sns;topicArn;constructor(e=new we({region:"us-east-1"}),a){this.sns=e,this.topicArn=a,t.notNullOrUndefined(this.sns,"sns"),t.notNullOrUndefined(this.topicArn,"topicArn")}get snsClient(){return this.sns}async sendMessage(e,t=!1){let a=null;try{const t=e||"NO-MESSAGE-PROVIDED",s="string"==typeof t?t:JSON.stringify(t),i={TopicArn:this.topicArn,Message:s};n.debug("Sending via SNS : %j",i),a=await this.sns.send(new be(i))}catch(a){if(!t)throw a;n.error("Failed to fire SNS notification : %j : %s",e,a)}return a}async conditionallySendMessage(e,t,a=!1){let s=null;return t?s=await this.sendMessage(e,a):n.info("Not sending message, condition was false : %j",e),s}}class ot{ratchet;tableName;constructor(e,s){this.ratchet=e,this.tableName=s,t.notNullOrUndefined(e,"ratchet"),t.notNullOrUndefined(a.trimToNull(this.tableName),"tableName")}async acquireLock(e,t=30){let a=!1;if(e&&t){const s=Math.floor((new Date).getTime()/1e3),i={Item:{lockingKey:e,timestamp:s,expires:s+t},ReturnConsumedCapacity:ee.TOTAL,TableName:this.tableName,ConditionExpression:"attribute_not_exists(lockingKey)"};try{await this.ratchet.getDDB().send(new G(i));a=!0}catch(t){t instanceof J&&n.silly("Unable to acquire lock on %s",e)}}return a}async releaseLock(e){if(a.trimToNull(e))try{const t=await this.ratchet.simpleDelete(this.tableName,{lockingKey:e});n.silly("Released lock %s : %s",e,t)}catch(t){n.warn("Failed to release lock key : %s : %s",e,t,t)}}async clearExpiredSyncLocks(){const e=Math.floor((new Date).getTime()/1e3),t={TableName:this.tableName,FilterExpression:"expires < :now",ExpressionAttributeValues:{":now":e}},a=(await this.ratchet.fullyExecuteScan(t)).map((e=>({lockingKey:e.lockingKey})));return await this.ratchet.deleteAllInBatches(this.tableName,a,25)}}class ct{_locks=new Map;constructor(){}async acquireLock(e,t=30){let s=!1;if(a.trimToNull(e)){const a=Date.now(),n=this._locks.get(e);(!n||n<a)&&(this._locks.set(e,a+1e3*t),s=!0)}return s}async releaseLock(e){a.trimToNull(e)&&this._locks.delete(e)}async clearExpiredSyncLocks(){const e=[],t=Date.now();return this._locks.forEach(((a,s)=>{a<t&&e.push(s)})),e.forEach((e=>{this._locks.delete(e)})),e.length}}export{Ee as AwsBatchBackgroundProcessor,Te as AwsBatchRatchet,Qe as AwsCredentialsRatchet,Le as CascadeEnvironmentServiceProvider,ve as CloudWatchLogGroupRatchet,De as CloudWatchLogsRatchet,Oe as CloudWatchMetricsRatchet,Ae as Daemon,ke as DaemonUtil,Ne as DynamoDbStorageProvider,ot as DynamoDbSyncLock,ze as DynamoExpiringCodeProvider,Ue as DynamoRatchet,Xe as DynamoRuntimeParameterProvider,Re as DynamoTableRatchet,Be as Ec2Ratchet,Ke as EcrRatchet,Ve as EnvVarEnvironmentServiceProvider,_e as EnvironmentService,Je as ExpiringCodeRatchet,Ge as FixedEnvironmentServiceProvider,et as GlobalVariableOverrideRuntimeParameterProvider,je as HashSpreader,He as LambdaEventDetector,Ye as LambdaEventTypeGuards,it as Mailer,tt as MemoryRuntimeParameterProvider,ct as MemorySyncLock,Pe as PrototypeDao,Se as RatchetAwsInfo,Ze as Route53Ratchet,at as RuntimeParameterRatchet,Fe as S3CacheRatchet,We as S3EnvironmentServiceProvider,$e as S3ExpiringCodeProvider,st as S3LocationSyncRatchet,Ie as S3PrototypeDaoProvider,nt as S3Ratchet,Me as S3SimpleDao,Ce as S3StorageProvider,xe as SimpleCache,rt as SnsRatchet,qe as SsmEnvironmentServiceProvider};
|
|
1
|
+
import{DateTime as e}from"luxon";import{RequireRatchet as t,StringRatchet as a,ErrorRatchet as s,Logger as n,StopWatch as i,PromiseRatchet as r,WebStreamRatchet as o,MapRatchet as c,DurationRatchet as l,NumberRatchet as h,Base64Ratchet as u}from"@bitblit/ratchet-common";import{SubmitJobCommand as d,ListJobsCommand as m}from"@aws-sdk/client-batch";import{CloudWatchLogsClient as f,DescribeLogStreamsCommand as g,FilterLogEventsCommand as p,OrderBy as y,DeleteLogStreamCommand as w,DescribeLogGroupsCommand as b,DeleteLogGroupCommand as E,StartQueryCommand as T,GetQueryResultsCommand as S,StopQueryCommand as N}from"@aws-sdk/client-cloudwatch-logs";import{CloudWatchClient as C,PutMetricDataCommand as x,StandardUnit as v}from"@aws-sdk/client-cloudwatch";import{PutObjectCommand as D,GetObjectCommand as O,NoSuchKey as k,DeleteObjectCommand as F,NotFound as A,HeadObjectCommand as P,CopyObjectCommand as I,ListObjectsCommand as M,S3Client as U,ListObjectsV2Command as R}from"@aws-sdk/client-s3";import{Upload as j}from"@aws-sdk/lib-storage";import{getSignedUrl as B}from"@aws-sdk/s3-request-presigner";import{ScanCommand as K,QueryCommand as L,BatchWriteCommand as V,BatchGetCommand as _,PutCommand as G,GetCommand as W,UpdateCommand as q,DeleteCommand as z}from"@aws-sdk/lib-dynamodb";import{ConditionalCheckFailedException as J,ProvisionedThroughputExceededException as $,DeleteTableCommand as Q,CreateTableCommand as H,ListTablesCommand as Y,DescribeTableCommand as Z,ResourceNotFoundException as X,ReturnConsumedCapacity as ee}from"@aws-sdk/client-dynamodb";import{EC2Client as te,StopInstancesCommand as ae,StartInstancesCommand as se,DescribeInstancesCommand as ne}from"@aws-sdk/client-ec2";import{EC2InstanceConnectClient as ie,SendSSHPublicKeyCommand as re}from"@aws-sdk/client-ec2-instance-connect";import{DescribeRegistryCommand as oe,DescribeImagesCommand as ce,BatchDeleteImageCommand as le}from"@aws-sdk/client-ecr";import he from"lodash";import{SSMClient as ue,GetParameterCommand as de,ParameterNotFound as me}from"@aws-sdk/client-ssm";import{ChangeResourceRecordSetsCommand as fe,waitUntilResourceRecordSetsChanged as ge}from"@aws-sdk/client-route-53";import{WaiterState as pe}from"@smithy/util-waiter";import{SendRawEmailCommand as ye}from"@aws-sdk/client-ses";import{SNSClient as we,PublishCommand as be}from"@aws-sdk/client-sns";class Ee{batchRatchet;validTaskNames;constructor(e,a){this.batchRatchet=e,this.validTaskNames=a,t.notNullOrUndefined(this.batchRatchet,"batchRatchet"),t.notNullOrUndefined(this.batchRatchet.batchClient,"batchRatchet.batchClient"),t.notNullOrUndefined(this.batchRatchet.defaultJobDefinition,"batchRatchet.defaultJobDefinition"),t.notNullOrUndefined(this.batchRatchet.defaultQueueName,"batchRatchet.defaultQueueName")}async scheduleBackgroundTask(t,i={},r={}){!this.validTaskNames||!this.validTaskNames.length||a.trimToNull(t)&&this.validTaskNames.includes(t)||s.throwFormattedErr("Cannot start task %s - not found in valid task list",t),n.info("Submitting background task to AWS batch: %s %j %s",t,i,this.batchRatchet.defaultQueueName);let o=null;const c=`${this.batchRatchet.defaultJobDefinition}-${t}_${e.utc().toFormat("yyyy-MM-dd-HH-mm")}`,l={jobName:c,jobDefinition:this.batchRatchet.defaultJobDefinition,jobQueue:this.batchRatchet.defaultQueueName,parameters:{taskName:t,taskData:JSON.stringify(i),taskMetadata:JSON.stringify(r)}};try{o=await this.batchRatchet.scheduleJob(l),n.info("Job %s(%s) submitted",o.jobName,o.jobId)}catch(e){n.error("Cannot submit batch job taskName: %s jobDef: %s queue: %s jobName: %s data: %j",t,this.batchRatchet.defaultJobDefinition,this.batchRatchet.defaultQueueName,c,i,e)}return o}}class Te{_batchClient;_defaultQueueName;_defaultJobDefinition;constructor(e,t,a){this._batchClient=e,this._defaultQueueName=t,this._defaultJobDefinition=a}get batchClient(){return this._batchClient}get defaultQueueName(){return this._defaultQueueName}get defaultJobDefinition(){return this._defaultJobDefinition}async scheduleJob(e){n.info("Submitting batch job %s",e.jobName);try{const t=await this._batchClient.send(new d(e));return n.info("Job %s(%s) submitted",t.jobName,t.jobId),t}catch(t){n.error("Cannot submit batch job %s: %s",e.jobName,t)}return null}async jobCountInState(e,t=this.defaultQueueName){return(await this.listJobs(t,e)).length}async listJobs(e=this.defaultQueueName,a=null){t.notNullOrUndefined(e,"queueName");let s=[];const i={jobQueue:e,jobStatus:a,nextToken:null};n.info("Fetching %j",i);do{n.info("Pulling page...");const e=await this._batchClient.send(new m(i));s=s.concat(e.jobSummaryList),i.nextToken=e.nextToken}while(i.nextToken);return s}}class Se{constructor(){}static buildInformation(){return{version:"307",hash:"1487144465622d6277854e7159e10280b70f2c58",branch:"alpha-2024-01-19-1",tag:"alpha-2024-01-19-1",timeBuiltISO:"2024-01-19T00:52:21-0800",notes:"No notes"}}}class Ne{dynamo;opts;constructor(e,a){this.dynamo=e,this.opts=a,t.notNullOrUndefined(this.dynamo,"dynamo"),t.notNullOrUndefined(this.opts,"opts"),t.notNullOrUndefined(this.opts.tableName,"opts.tableName"),t.notNullOrUndefined(this.opts.hashKeyName,"opts.hashKeyName"),t.true(!this.opts.useRangeKeys||!!this.opts.rangeKeyName&&!!this.opts.hashKeyValue,"invalid range configuration")}static createDefaultOptions(){return{tableName:"simple-cache",useRangeKeys:!1,hashKeyName:"cache-key",rangeKeyName:null,hashKeyValue:null}}createKeyObject(e){const t={};return this.opts.useRangeKeys?(t[this.opts.hashKeyName]=this.opts.hashKeyValue,t[this.opts.rangeKeyName]=e):t[this.opts.hashKeyName]=e,t}cleanDynamoFieldsFromObjectInPlace(e){e&&(delete e[this.opts.hashKeyName],this.opts.rangeKeyName&&delete e[this.opts.rangeKeyName],this.opts.dynamoExpiresColumnName&&delete e[this.opts.dynamoExpiresColumnName])}extractKeysFromObject(e){let t=null;return e&&(t={},this.opts.useRangeKeys?(t[this.opts.hashKeyName]=this.opts.hashKeyValue,t[this.opts.rangeKeyName]=e.cacheKey):t[this.opts.hashKeyName]=e.cacheKey),t}async readFromCache(e){const t=this.createKeyObject(e),a=await this.dynamo.simpleGet(this.opts.tableName,t);return this.cleanDynamoFieldsFromObjectInPlace(a),a}async storeInCache(e){t.notNullOrUndefined(e,"value"),t.notNullOrUndefined(e.cacheKey,"value.cacheKey");const a=Object.assign({},e,this.createKeyObject(e.cacheKey));this.opts.dynamoExpiresColumnName&&e.expiresEpochMS&&(a[this.opts.dynamoExpiresColumnName]=Math.floor(e.expiresEpochMS/1e3));return!!await this.dynamo.simplePut(this.opts.tableName,a)}async removeFromCache(e){await this.dynamo.simpleDelete(this.opts.tableName,this.createKeyObject(e))}async clearCache(){const e=(await this.readAll()).map((e=>this.extractKeysFromObject(e)));return await this.dynamo.deleteAllInBatches(this.opts.tableName,e,25)}async readAll(){let e=null;if(this.opts.useRangeKeys){const t={TableName:this.opts.tableName,KeyConditionExpression:"#cacheKey = :cacheKey",ExpressionAttributeNames:{"#cacheKey":this.opts.hashKeyName},ExpressionAttributeValues:{":cacheKey":this.opts.hashKeyValue}};e=await this.dynamo.fullyExecuteQuery(t)}else{const t={TableName:this.opts.tableName};e=await this.dynamo.fullyExecuteScan(t)}return e.forEach((e=>this.cleanDynamoFieldsFromObjectInPlace(e))),e}}class Ce{s3CacheRatchet;prefix;constructor(e,a){this.s3CacheRatchet=e,this.prefix=a,t.notNullOrUndefined(this.s3CacheRatchet,"s3CacheRatchet"),t.notNullOrUndefined(this.s3CacheRatchet.getDefaultBucket(),"s3CacheRatchet.defaultBucket")}keyToPath(e){let t=a.trimToEmpty(this.prefix);return t.length>0&&!t.endsWith("/")&&(t+="/"),t+=e,t}async readFromCache(e){return await this.s3CacheRatchet.fetchCacheFileAsObject(this.keyToPath(e))}async storeInCache(e){t.notNullOrUndefined(e,"value"),t.notNullOrUndefined(e.cacheKey,"value.cacheKey");return!!await this.s3CacheRatchet.writeObjectToCacheFile(this.keyToPath(e.cacheKey),e)}async removeFromCache(e){await this.s3CacheRatchet.removeCacheFile(this.keyToPath(e))}async clearCache(){const e=await this.s3CacheRatchet.directChildrenOfPrefix(this.keyToPath(""));return await Promise.all(e.map((e=>this.removeFromCache(e)))),e.length}async readAll(){const e=await this.s3CacheRatchet.directChildrenOfPrefix(this.keyToPath(""));return await Promise.all(e.map((e=>this.readFromCache(e))))}}class xe{provider;defaultTimeToLiveMS;constructor(e,t=6e4){this.provider=e,this.defaultTimeToLiveMS=t}createDefaultReadOptions(){return{maxStalenessMS:null,timeToLiveMS:this.defaultTimeToLiveMS,cacheNullValues:!1}}async fetchWrapper(e,t,a=this.createDefaultReadOptions()){n.silly("Fetching %s",e);const s=(new Date).getTime();let i=await this.provider.readFromCache(e);if(i&&i.expiresEpochMS<s&&(n.debug("Object found, but expired - removing"),i=null),i&&a&&a.maxStalenessMS&&s-i.createdEpochMS>a.maxStalenessMS&&(n.debug("Object found by too stale - removing"),i=null),!i){n.debug("%s not found in cache, generating",e);const r=await t();(r||a?.cacheNullValues)&&(n.debug("Writing %j to cache"),i={cacheKey:e,createdEpochMS:s,expiresEpochMS:a&&a.timeToLiveMS?s+a.timeToLiveMS:null,value:r,generated:!1},await this.provider.storeInCache(i),i.generated=!0)}return i}async fetch(e,t,a=null){const s=await this.fetchWrapper(e,t,a);return s?s.value:null}async removeFromCache(e,t){let a=null;return t&&(a=await this.fetchWrapper(e,(()=>null))),await this.provider.removeFromCache(e),a}async clearCache(){return this.provider.clearCache()}async readAll(){return this.provider.readAll()}}class ve{logGroup;awsCWLogs;constructor(e,t=new f({region:"us-east-1"})){this.logGroup=e,this.awsCWLogs=t}get cloudWatchLogsClient(){return this.awsCWLogs}async readLogStreams(e=null,t=null){const a={logGroupName:this.logGroup,orderBy:"LastEventTime"},s=[];do{n.debug("Pulling more log streams (%d found so far)",s.length);const i=await this.awsCWLogs.send(new g(a));i.logStreams.forEach((a=>{null!==a.lastEventTimestamp&&(!e||a.lastEventTimestamp>=e)&&(!t||a.firstEventTimestamp<=t)&&s.push(a)})),a.nextToken=i.nextToken}while(a.nextToken);return n.debug("Found %d total, returning",s.length),s}async readLogStreamNames(e=null,t=null){return(await this.readLogStreams(e,t)).map((e=>e.logStreamName))}async readEvents(e,t=null,a=null,s=!0,r=null){const o=new i,c={logGroupName:this.logGroup,endTime:a,startTime:t};e&&(c.filterPattern=e),n.debug("Reading log events matching : %j",c);let l=[];do{n.debug("Pulling more log events (%d found so far) : %s",l.length,o.dump());const e=await this.awsCWLogs.send(new p(c));l=l.concat(e.events),c.nextToken=e.nextToken}while(c.nextToken&&(!r||l.length<r));return n.debug("Found %d total in %s",l.length,o.dump()),s&&(n.debug("Sorting events by timestamp"),l=l.sort(((e,t)=>{let a=e.timestamp-t.timestamp;return 0===a&&(a=e.message.localeCompare(t.message)),a}))),o.log(),l}}class De{static MAX_DELETE_RETRIES=5;cwLogs;constructor(e=null){this.cwLogs=e||new f({region:"us-east-1"})}get cloudWatchLogsClient(){return this.cwLogs}async removeEmptyOrOldLogStreams(e,t=1e3,a=null){n.info("Removing empty streams from %s, oldest event epoch MS : %d",e,a);const s={logGroupName:e,orderBy:y.LastEventTime},i=a||1;let o=0;const c=[],l=[];let h=10;do{n.debug("Executing search for streams");try{const e=await this.cwLogs.send(new g(s));o+=e.logStreams.length,n.debug("Found %d streams (%d so far, %d to delete)",e.logStreams.length,o,c.length);for(let a=0;a<e.logStreams.length&&c.length<t;a++){const t=e.logStreams[a];t.firstEventTimestamp?t.lastEventTimestamp<i&&c.push(t):c.push(t)}s.nextToken=e.nextToken}catch(e){const t=h;h=Math.min(1e3,1.5*h),n.info("Caught while describing %s, increasing wait between deletes (was %d, now %d)",e,t,h)}}while(s.nextToken&&c.length<t);n.info("Found %d streams to delete",c.length);let u=10;for(let t=0;t<c.length;t++){const a={logGroupName:e,logStreamName:c[t].logStreamName},s=0===c[t].storedBytes?"empty":"old";n.info("Removing %s stream %s",s,c[t].logStreamName);let i=!1,o=0;for(;!i&&o<De.MAX_DELETE_RETRIES;)try{await this.cwLogs.send(new w(a)),i=!0,await r.wait(u)}catch(e){o++;const t=u;u=Math.min(1e3,1.5*u),n.info("Caught %s, increasing wait between deletes and retrying (wait was %d, now %d) (Retry %d of %d)",e,t,u,o,De.MAX_DELETE_RETRIES)}i||l.push(c[t])}return n.warn("Failed to remove streams : %j",l),c}async findOldestEventTimestampInGroup(e){const t=await this.findStreamWithOldestEventInGroup(e);return t?t.firstEventTimestamp:null}async findStreamWithOldestEventInGroup(e){n.info("Finding oldest event in : %s",e);let t=null;try{const a={logGroupName:e,orderBy:y.LastEventTime};let s=0;do{n.debug("Executing search for streams");const e=await this.cwLogs.send(new g(a));s+=e.logStreams.length,n.debug("Found %d streams (%d so far)",e.logStreams.length,s),e.logStreams.forEach((e=>{e.firstEventTimestamp&&(null===t||e.firstEventTimestamp<t.firstEventTimestamp)&&(t=e)})),a.nextToken=e.nextToken}while(a.nextToken)}catch(t){n.error("Error attempting to find oldest event in group : %s : %s",e,t,t)}return t}async findLogGroups(e){t.notNullOrUndefined(e);const a={logGroupNamePrefix:e};let s=[];do{n.info("%d found, pulling log groups : %j",s.length,a);const e=await this.cwLogs.send(new b(a));s=s.concat(e.logGroups),a.nextToken=e.nextToken}while(a.nextToken);return s}async removeLogGroups(e){t.notNullOrUndefined(e);const a=[];for(let t=0;t<e.length;t++)try{n.info("Deleting %j",e[t]);const s={logGroupName:e[t].logGroupName};await this.cwLogs.send(new E(s)),a.push(!0)}catch(s){n.error("Failure to delete %j : %s",e[t],s),a.push(!1)}return a}async removeLogGroupsWithPrefix(e){t.notNullOrUndefined(e),t.true(a.trimToEmpty(e).length>0),n.info("Removing log groups with prefix %s",e);const s=await this.findLogGroups(e);return await this.removeLogGroups(s)}async fullyExecuteInsightsQuery(e){t.notNullOrUndefined(e),n.debug("Starting insights query : %j",e);const a=await this.cwLogs.send(new T(e));n.debug("Got query id %j",a);let s=null,i=100;for(;!s||["Running","Scheduled"].includes(s.status);)s=await this.cwLogs.send(new S({queryId:a.queryId})),await r.wait(i),i*=2,n.info("Got : %j",s);return s}async abortInsightsQuery(e){let t=null;return e&&(t=await this.cwLogs.send(new N({queryId:e}))),t}}class Oe{cw;constructor(e=null){this.cw=e||new C({region:"us-east-1",apiVersion:"2010-08-01"})}get cloudWatchClient(){return this.cw}async writeSingleMetric(e,t,a,s=v.None,i,r=new Date,o=!1){const c=[];a&&a.length>0&&a.forEach((e=>{c.push({Name:e.key,Value:e.value})}));const l={Namespace:e,MetricData:[{MetricName:t,Dimensions:c,Unit:s,Value:i,Timestamp:r,StorageResolution:o?1:60}]};n.silly("Writing metric to cw : %j",l);const h=await this.cw.send(new x(l));return n.silly("Result: %j",h),h}async writeDynamoCountAsMinuteLevelMetric(t){if(n.info("Publishing %s / %s metric for %s UTC",t.namespace,t.metric,t.minuteUTC),t.scan&&t.query)throw new Error("Must send query or scan, but not both");if(!t.scan&&!t.query)throw new Error("You must specify either a scan or a query");const a=t.query?await t.dynamoRatchet.fullyExecuteQueryCount(t.query):await t.dynamoRatchet.fullyExecuteScanCount(t.scan);n.debug("%s / %s for %s are %j",t.namespace,t.metric,t.minuteUTC,a);const s=t.minuteUTC.split(" ").join("T")+":00Z",i=e.fromISO(s).toJSDate(),r=await this.writeSingleMetric(t.namespace,t.metric,t.dims,v.Count,a.count,i,!1);return n.debug("Metrics response: %j",r),a.count}}class ke{static DEFAULT_CONTENT=Buffer.from("DAEMON_PLACEHOLDER");static DAEMON_METADATA_KEY="daemon_meta";static async start(e,t,a,s){try{s.meta=s.meta||{},n.info("Starting daemon, key: %s, options: %j",a,s);const i=(new Date).getTime(),r={id:t,title:s.title,lastUpdatedEpochMS:i,lastUpdatedMessage:"Created",targetFileName:s.targetFileName,startedEpochMS:i,completedEpochMS:null,meta:s.meta,error:null,link:null,contentType:s.contentType};return await ke.writeState(e,a,r,ke.DEFAULT_CONTENT)}catch(e){throw n.error("Error while trying to start a daemon: %j %s",s,e),e}}static async writeState(e,t,a,s){try{const i={};a.lastUpdatedEpochMS=(new Date).getTime(),i[ke.DAEMON_METADATA_KEY]=JSON.stringify(a);const r={Bucket:e.getDefaultBucket(),Key:t,ContentType:a.contentType,Metadata:i,Body:s};a.targetFileName&&(r.ContentDisposition='attachment;filename="'+a.targetFileName+'"');const o=await e.getS3Client().send(new D(r));return n.silly("Daemon wrote : %s",o),ke.stat(e,t)}catch(e){throw n.error("Error while trying to write a daemon stat: %j %s",a,e),e}}static async streamDataAndFinish(e,t,s,i){n.debug("Streaming data to %s",t);const r=await ke.updateMessage(e,t,"Streaming data");r.completedEpochMS=(new Date).getTime(),r.lastUpdatedMessage="Complete";const o={};o[ke.DAEMON_METADATA_KEY]=JSON.stringify(r);const c={Bucket:e.getDefaultBucket(),Key:t,ContentType:r.contentType,Metadata:o,Body:s},l=a.trimToNull(i?.overrideTargetFileName)||a.trimToNull(r?.targetFileName);l&&(c.ContentDisposition='attachment;filename="'+l+'"');const h=new j({client:e.getS3Client(),params:c,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});i?.progressFn&&h.on("httpUploadProgress",i.progressFn);const u=await h.done();return n.silly("Daemon wrote : %s",u),ke.stat(e,t)}static async updateMessage(e,t,a){try{const s=await ke.stat(e,t);return s.lastUpdatedMessage=a,ke.writeState(e,t,s,ke.DEFAULT_CONTENT)}catch(e){throw n.error("Error while trying to update a daemon message: %j %s",t,e),e}}static async stat(e,t){try{n.debug("Daemon stat for path %s / %s",e.getDefaultBucket(),t);let a=null;const s=await e.fetchMetaForCacheFile(t);n.debug("Daemon: Meta is %j",s);const i=s&&s.Metadata?s.Metadata[ke.DAEMON_METADATA_KEY]:null;return i?(a=JSON.parse(i),a.completedEpochMS&&!a.error&&(a.link=await e.preSignedDownloadUrlForCacheFile(t))):n.warn("No metadata found! (Head was %j)",s),a}catch(e){throw n.error("Error while trying to fetch a daemon state: %j %s",t,e),e}}static async abort(e,t){return ke.error(e,t,"Aborted")}static async error(e,t,a){try{const s=await ke.stat(e,t);return s.error=a,s.completedEpochMS=(new Date).getTime(),ke.writeState(e,t,s,ke.DEFAULT_CONTENT)}catch(e){throw n.error("Error while trying to write a daemon error: %j %s",t,e),e}}static async finalize(e,t,a){try{n.info("Finalizing daemon %s with %d bytes",t,a.length);const s=await ke.stat(e,t);return s.completedEpochMS=(new Date).getTime(),s.lastUpdatedMessage="Complete",ke.writeState(e,t,s,a)}catch(e){throw n.error("Error while trying to finalize a daemon: %j %s",t,e),e}}}class Fe{s3;defaultBucket;constructor(e,a=null){this.s3=e,this.defaultBucket=a,t.notNullOrUndefined(this.s3,"s3")}get s3Client(){return this.s3}static applyCacheControlMaxAge(e,t){return e&&t&&(e.CacheControl="max-age="+t),e}static applyUserMetaData(e,t,s){return e&&a.trimToNull(t)&&a.trimToNull(s)&&(e.Metadata=e.Metadata||{},e.Metadata[t]=s),e}getDefaultBucket(){return this.defaultBucket}getS3Client(){return this.s3}async fileExists(e,t=null){try{return!!await this.fetchMetaForCacheFile(e,this.bucketVal(t))}catch(e){return n.silly("Error calling file exists (as expected) %s",e),!1}}async fetchCacheFileAsS3GetObjectCommandOutput(e,t=null){let a=null;try{const s={Bucket:this.bucketVal(t),Key:e};a=await this.s3.send(new O(s))}catch(t){if(!(t instanceof k))throw t;n.debug("Key %s not found - returning null",e),a=null}return a}async fetchCacheFileAsReadableStream(e,t=null){return(await this.fetchCacheFileAsS3GetObjectCommandOutput(e,t)).Body.transformToWebStream()}async fetchCacheFileAsBuffer(e,t=null){let a=null;const s=await this.fetchCacheFileAsS3GetObjectCommandOutput(e,t);if(s?.Body){const e=await s.Body.transformToByteArray();a=Buffer.from(e)}return a}async fetchCacheFileAsString(e,t=null){let a=null;const s=await this.fetchCacheFileAsS3GetObjectCommandOutput(e,t);return s?.Body&&(a=await s.Body.transformToString()),a}async fetchCacheFileAsObject(e,t=null){const a=await this.fetchCacheFileAsString(e,t);return a?JSON.parse(a):null}async removeCacheFile(e,t=null){let a=null;const s={Bucket:this.bucketVal(t),Key:e};try{a=await this.s3.send(new F(s))}catch(s){if(!(s&&s instanceof A))throw s;n.info("Swallowing 404 deleting missing object %s %s",t,e),a=null}return a}async writeObjectToCacheFile(e,t,a,s){const n=JSON.stringify(t);return this.writeStringToCacheFile(e,n,a,s)}async writeStringToCacheFile(e,t,a,s){const n=o.stringToWebReadableStream(t);return this.writeStreamToCacheFile(e,n,a,s)}async writeStreamToCacheFile(e,t,a,s,i=(e=>{n.debug("Uploading : %s",e)})){const r=Object.assign({},a||{},{Bucket:this.bucketVal(s),Key:e,Body:t}),o=new j({client:this.s3,params:r,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});i&&o.on("httpUploadProgress",i);return await o.done()}async synchronize(e,a,s=this,r=!1){t.notNullOrUndefined(e,"srcPrefix"),t.notNullOrUndefined(a,"targetPrefix"),t.true(e.endsWith("/"),"srcPrefix must end in /"),t.true(a.endsWith("/"),"targetPrefix must end in /");let o=[];const c=await this.directChildrenOfPrefix(e),l=await s.directChildrenOfPrefix(a),h=new i;for(let t=0;t<c.length;t++){const i=c[t];if(n.info("Processing %s : %s",i,h.dumpExpected(t/c.length)),i.endsWith("/"))if(r){n.info("%s is a subfolder - recursing");const t=await this.synchronize(e+i,a+i,s,r);n.info("Got %d back from %s",t.length,i),o=o.concat(t)}else n.info("%s is a subfolder and recurse not specified - skipping",i);else{let t=!0;const r=await this.fetchMetaForCacheFile(e+i);if(l.includes(i)){const e=await s.fetchMetaForCacheFile(a+i);r.ETag===e.ETag&&(n.debug("Skipping - identical"),t=!1)}if(t){n.debug("Copying...");const t=await this.fetchCacheFileAsReadableStream(e+i);try{const e=await s.writeStreamToCacheFile(a+i,t,r,void 0);n.silly("Write result : %j",e),o.push(i)}catch(e){n.error("Failed to sync : %s : %s",i,e)}}}}return n.info("Found %d files, copied %d",c.length,o.length),h.log(),o}async fetchMetaForCacheFile(e,t=null){let a=null;try{a=await this.s3.send(new P({Bucket:this.bucketVal(t),Key:e}))}catch(s){if(!(s&&s instanceof A))throw n.error("Unrecognized error, rethrowing : %s",s,s),s;n.info("Cache file %s %s not found returning null",this.bucketVal(t),e),a=null}return a}async cacheFileAgeInSeconds(e,t=null){try{const a=await this.fetchMetaForCacheFile(e,t);return a&&a.LastModified?Math.floor(((new Date).getTime()-a.LastModified.getTime())/1e3):(n.warn("Cache file %s %s had no last modified returning null",this.bucketVal(t),e),null)}catch(a){if(a&&a instanceof A)return n.warn("Cache file %s %s not found returning null",this.bucketVal(t),e),null;throw a}}async copyFile(e,t,a=null,s=null){const n={CopySource:"/"+this.bucketVal(a)+"/"+e,Bucket:this.bucketVal(s),Key:t,MetadataDirective:"COPY"};return await this.s3.send(new I(n))}async quietCopyFile(e,t,a=null,s=null){let i=!1;try{await this.copyFile(e,t,a,s);i=!0}catch(e){n.silly("Failed to copy file in S3 : %s",e)}return i}async preSignedDownloadUrlForCacheFile(e,t=3600,a=null){const s={Bucket:this.bucketVal(a),Key:e};return await B(this.s3,new O(s),{expiresIn:t})}async directChildrenOfPrefix(e,t=!1,a=null,s=null){const n=[],i={Bucket:this.bucketVal(a),Prefix:e,Delimiter:"/"};let r=null;do{r=await this.s3.send(new M(i));const o=e.length;r.CommonPrefixes&&r.CommonPrefixes.forEach((e=>{if(!s||n.length<s){const t=e.Prefix.substring(o);n.push(t)}})),r.Contents&&await Promise.all(r.Contents.map((async e=>{if(!s||n.length<s)if(t){const t={link:await this.preSignedDownloadUrlForCacheFile(e.Key,3600,a),name:e.Key.substring(o),size:e.Size};n.push(t)}else n.push(e.Key.substring(o))}))),i.Marker=r.NextMarker}while(i.Marker&&(!s||n.length<s));return n}async allSubFoldersOfPrefix(e,t=null){const a=[e];let s=0;for(;s<a.length;){const e=a[s++];n.debug("Pulling %s (%d remaining)",e,a.length-s);const i={Bucket:this.bucketVal(t),Prefix:e,Delimiter:"/"};let r=null;do{i.ContinuationToken=r?r.NextContinuationToken:null,r=await this.s3.send(new M(i)),r.CommonPrefixes.forEach((e=>{a.push(e.Prefix)})),n.debug("g:%j",r)}while(r.NextContinuationToken)}return a}bucketVal(e){const t=e||this.defaultBucket;if(!t)throw"You must set either the default bucket or pass it explicitly";return t}}class Ae{s3;bucket;prefix;_defaultGroup;jwtRatchet;static DEFAULT_DEFAULT_GROUP="DEFAULT";cache;constructor(e,t,a="",s=Ae.DEFAULT_DEFAULT_GROUP,n){this.s3=e,this.bucket=t,this.prefix=a,this._defaultGroup=s,this.jwtRatchet=n,this.cache=new Fe(this.s3,this.bucket)}get defaultGroup(){return this._defaultGroup}async keyToPublicToken(e,a){t.notNullOrUndefined(this.jwtRatchet,"You must set jwtRatchet if you wish to use public tokens"),t.notNullOrUndefined(e,"key"),t.true(a>0,"Expiration seconds must be larger than 0");const s={daemonKey:e};return await this.jwtRatchet.createTokenString(s,a)}keyToPath(e){return Buffer.from(e,"base64").toString()}pathToKey(e){return Buffer.from(e).toString("base64")}generatePath(e=this._defaultGroup){return this.generatePrefix(e)+a.createType4Guid()}generatePrefix(e=this._defaultGroup){return this.prefix+e+"/"}async start(e){e.group=e.group||this._defaultGroup;const t=this.generatePath(e.group),a=this.pathToKey(t);return ke.start(this.cache,a,t,e)}async writeState(e,t){const a=this.keyToPath(e.id);return ke.writeState(this.cache,a,e,t)}async clean(e=this._defaultGroup,t=604800){try{n.info("Daemon removing items older than %d seconds from group %s",t,e);const a=await this.list(e),s=(new Date).getTime(),i=[];for(let e=0;e<a.length;e++){const n=a[e];if((s-n.startedEpochMS)/1e3>t){await this.cache.removeCacheFile(this.keyToPath(n.id));i.push(n)}}return n.debug("Removed %d of %d items",i.length,a.length),i}catch(t){throw n.error("Error while trying to clean a daemon: %j %s",e,t),t}}async listKeys(e=this._defaultGroup){try{const t=this.generatePrefix(e);n.info("Fetching children of %s",t);const a=await this.cache.directChildrenOfPrefix(t);return n.debug("Found : %j",a),a}catch(t){throw n.error("Error while trying to list daemon keys: %j %s",e,t),t}}async list(e=this._defaultGroup){try{const t=this.generatePrefix(e);n.info("Fetching children of %s",t);const a=(await this.listKeys(e)).map((t=>this.stat(this.pathToKey(this.generatePrefix(e)+t))));return await Promise.all(a)}catch(t){throw n.error("Error while trying to list daemon states: %j %s",e,t),t}}async updateMessage(e,t){const a=this.keyToPath(e);return ke.updateMessage(this.cache,a,t)}async stat(e){const t=this.keyToPath(e);return ke.stat(this.cache,t)}async statFromPublicToken(e){t.notNullOrUndefined(this.jwtRatchet,"You must set jwtRatchet if you wish to use public tokens"),t.notNullOrUndefined(e,"publicToken");const a=await this.jwtRatchet.decodeToken(e),s=a?.daemonKey;return s?this.stat(s):null}async abort(e){return ke.abort(this.cache,this.keyToPath(e))}async error(e,t){return ke.error(this.cache,this.keyToPath(e),t)}async finalize(e,t){return ke.finalize(this.cache,this.keyToPath(e),t)}}class Pe{provider;cfg;static defaultDaoConfig(){return{guidCreateFunction:a.createType4Guid,guidFieldName:"guid",createdEpochMSFieldName:"createdEpochMS",updatedEpochMSFieldName:"updatedEpochMS",createdUtcTimestampFieldName:null,updatedUtcTimestampFieldName:null}}constructor(e,a=Pe.defaultDaoConfig()){this.provider=e,this.cfg=a,t.notNullOrUndefined(e,"provider"),t.notNullOrUndefined(a,"cfg"),t.notNullOrUndefined(a.guidCreateFunction,"cfg.guidCreateFunction"),t.notNullOrUndefined(a.guidFieldName,"cfg.guidFieldName")}async fetchAll(){return(await this.provider.loadDatabase()).items||[]}async resetDatabase(){await this.provider.storeDatabase({items:[],lastModifiedEpochMS:Date.now()})}async removeItems(e){let t=await this.fetchAll();return e&&(t=t.filter((t=>!e.includes(t[this.cfg.guidFieldName]))),await this.provider.storeDatabase({items:t,lastModifiedEpochMS:Date.now()})),t}async store(t){let a=await this.fetchAll();return t&&(t[this.cfg.guidFieldName]=t[this.cfg.guidFieldName]||this.cfg.guidCreateFunction(),this.cfg.createdEpochMSFieldName&&(t[this.cfg.createdEpochMSFieldName]=t[this.cfg.createdEpochMSFieldName]||Date.now()),this.cfg.createdUtcTimestampFieldName&&(t[this.cfg.createdUtcTimestampFieldName]=t[this.cfg.createdUtcTimestampFieldName]||e.utc().toISO()),this.cfg.updatedEpochMSFieldName&&(t[this.cfg.updatedEpochMSFieldName]=Date.now()),this.cfg.updatedUtcTimestampFieldName&&(t[this.cfg.updatedUtcTimestampFieldName]=e.utc().toISO()),a=a.filter((e=>e[this.cfg.guidFieldName]!==t[this.cfg.guidFieldName])),a.push(t),await this.provider.storeDatabase({items:a,lastModifiedEpochMS:Date.now()})),t}async fetchById(e){return(await this.fetchAll()).find((t=>t[this.cfg.guidFieldName]===e))}async searchByField(e,a){t.notNullOrUndefined(e,"fieldDotPath"),t.notNullOrUndefined(a,"fieldValue");const s={};return s[e]=a,this.searchByFieldMap(s)}async searchByFieldMap(e){t.notNullOrUndefined(e,"input");let a=await this.fetchAll();return a=a.filter((t=>{let a=!0;return Object.keys(e).forEach((s=>{const n=c.findValueDotPath(t,s),i=e[s];a=a&&n===i})),a})),a}}class Ie{s3CacheRatchet;keyName;constructor(e,a){this.s3CacheRatchet=e,this.keyName=a,t.notNullOrUndefined(e,"s3CacheRatchet"),t.notNullUndefinedOrOnlyWhitespaceString(e.getDefaultBucket(),"s3CacheRatchet.defaultBucket"),t.notNullUndefinedOrOnlyWhitespaceString(a,"keyName")}async storeDatabase(e){const t=e||{items:[],lastModifiedEpochMS:null};t.lastModifiedEpochMS=Date.now();return!!await this.s3CacheRatchet.writeObjectToCacheFile(this.keyName,t)}async loadDatabase(){return await this.s3CacheRatchet.fetchCacheFileAsObject(this.keyName)||{items:[],lastModifiedEpochMS:Date.now()}}}class Me{cache;prefix;constructor(e,t){if(this.cache=e,this.prefix=t,!e)throw new Error("cache object may not be null");if(!e.getDefaultBucket())throw new Error("Supplied cache must have default bucket set")}buildPathPrefix(e){let t="";return this.prefix&&(t+=this.prefix),e&&(t+=e),t}buildFullPath(e,t){let a=this.buildPathPrefix(t);return a.length>0&&(a+="/"),a+=e+".json",a}async exists(e,t){const a=this.buildFullPath(e,t);return n.debug("Check file existence : %s",a),this.cache.fileExists(a)}async fetch(e,t){const a=this.buildFullPath(e,t);n.debug("Fetching : %s",a);const s=await this.cache.fetchCacheFileAsObject(a);return s.id=e,s.path=t,s}async store(e,t){e.id=e.id||a.createType4Guid(),e.lastModifiedEpochMS=(new Date).getTime();const s=this.buildFullPath(e.id,t);n.debug("Storing : %s",s),await this.cache.writeObjectToCacheFile(s,e);return await this.fetch(e.id,t)}async listItems(e){const t=this.buildPathPrefix(e);n.debug("Listing : %s",t);return await this.cache.directChildrenOfPrefix(t)}async fetchItemsInPath(e){const t=this.buildPathPrefix(e);n.debug("Full fetch of : %s",t);const a=(await this.listItems(e)).map((t=>this.fetch(t,e)));return await Promise.all(a)}async delete(e,t){const a=this.buildFullPath(e,t);n.debug("Deleting : %s",a);return null!=await this.cache.removeCacheFile(a)}}class Ue{awsDDB;constructor(e){if(this.awsDDB=e,!e)throw"awsDDB may not be null"}get dynamoDBDocumentClient(){return this.awsDDB}getDDB(){return this.awsDDB}async tableIsEmpty(e){const t={TableName:e,Limit:1};return 0===(await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),t)).Items.length}async scanPromise(e){return this.awsDDB.send(new K(e))}async queryPromise(e){return this.awsDDB.send(new L(e))}async throughputSafeScanOrQuery(e,t,a,i){let o=null;if(t){let c=i??0;do{c++;try{o=await e(t)}catch(e){if(!Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;{const e=1e3*Math.pow(2,c);n.debug("Exceeded scan throughput for %j : Try %d of %d (Waiting %d ms)",t,c,a,e),await r.wait(e),c++}}}while(!o&&(!a||c<a));o||s.throwFormattedErr("throughputSafeScan failed - tried %d times, kept running into throughput exceeded : %j",a,t)}return o}async fullyExecuteQueryCount(e,t=0){try{e.Select="COUNT",n.debug("Executing count query : %j",e);const a={count:0,scannedCount:0,pages:0},s=(new Date).getTime();let i=null;const o=e.Limit;e.Limit=null;do{i=await this.throughputSafeScanOrQuery((e=>this.queryPromise(e)),e),a.count+=i.Count,a.scannedCount+=i.ScannedCount,a.pages++,e.ExclusiveStartKey=i.LastEvaluatedKey,await r.wait(t),n.silly("Rval is now %j",a),o&&a.count>=o&&e.ExclusiveStartKey&&(n.info("Aborting query since hit limit of %d",o),e.ExclusiveStartKey=null)}while(e.ExclusiveStartKey);const c=(new Date).getTime();return n.debug("Finished, returned %j in %s for %j",a,l.formatMsDuration(c-s,!0),e),a}catch(t){return n.error("Failed with %s, q: %j",t,e,t),null}}async fullyExecuteQuery(e,t=0,a=null){const s=[];return await this.fullyExecuteProcessOverQuery(e,(async e=>{s.push(e)}),t,a),s}async fullyExecuteProcessOverQuery(e,t,a=0,s=null){let i=0;try{n.debug("Executing query : %j",e);const o=(new Date).getTime();n.debug("Pulling %j",e);let c=await this.throughputSafeScanOrQuery((e=>this.queryPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;let h=0,u=0;for(;c.LastEvaluatedKey&&(null===s||i<s)&&!e.Limit;){n.silly("Found more rows - requery with key %j",c.LastEvaluatedKey),e.ExclusiveStartKey=c.LastEvaluatedKey,c=await this.throughputSafeScanOrQuery((e=>this.queryPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;n.silly("Have processed %d items",i),h++,u+=0===c.Count?1:0,await r.wait(a)}const d=(new Date).getTime();n.debug("Finished, processed %d rows in %s for %j (%d blank pages, %d total pages)",i,l.formatMsDuration(d-o,!0),e,u,h)}catch(t){n.error("Failed with %s, q: %j",t,e,t)}return i}async fullyExecuteScanCount(e,t=0){try{e.Select="COUNT";const a={count:0,scannedCount:0,pages:0};n.debug("Executing scan count : %j",e);const s=(new Date).getTime();let i=null;const o=e.Limit;e.Limit=null;do{i=await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),e),a.count+=i.Count,a.scannedCount+=i.ScannedCount,a.pages++,e.ExclusiveStartKey=i?.LastEvaluatedKey,await r.wait(t),n.silly("Rval is now %j",a),o&&a.count>=o&&e.ExclusiveStartKey&&(n.info("Aborting scan since hit limit of %d",o),e.ExclusiveStartKey=null)}while(e.ExclusiveStartKey);const c=(new Date).getTime();return n.debug("Finished, returned %j in %s for %j",a,l.formatMsDuration(c-s,!0),e),a}catch(t){return n.error("Failed with %s, q: %j",t,e,t),null}}async fullyExecuteScan(e,t=0,a=null){const s=[];return await this.fullyExecuteProcessOverScan(e,(async e=>{s.push(e)}),t,a),s}async fullyExecuteProcessOverScan(e,t,a=0,s=null){let i=0;try{n.debug("Executing scan : %j",e);const o=(new Date).getTime();n.debug("Pulling %j",e);let c=await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;for(;c.LastEvaluatedKey&&(null===s||i<s)&&!e.Limit;){n.silly("Found more rows - requery with key %j",c.LastEvaluatedKey),e.ExclusiveStartKey=c.LastEvaluatedKey,c=await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;n.silly("Rval is now %d items",i),await r.wait(a)}const h=(new Date).getTime();n.debug("Finished, processed %d results in %s for %j",i,l.formatMsDuration(h-o,!0),e)}catch(t){n.error("Failed with %s, q: %j",t,e,t)}return i}async writeAllInBatches(e,t,a){if(!a||a<2)throw new Error("Batch size needs to be at least 2, was "+a);let s=0;if(t&&t.length>0){let i=[];for(t.forEach((t=>{i.push({PutRequest:{Item:t,ReturnConsumedCapacity:"TOTAL",TableName:e}})})),n.debug("Processing %d batch items to %s",i.length,e);i.length>0;){const t=i.slice(0,Math.min(i.length,a));i=i.slice(t.length);const o={RequestItems:{},ReturnConsumedCapacity:"TOTAL",ReturnItemCollectionMetrics:"SIZE"};o.RequestItems[e]=t;let c=1,l=!1,h=null;for(;!l&&c<7;){try{h=await this.awsDDB.send(new V(o))}catch(e){if(!Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;n.info("Caught ProvisionedThroughputExceededException - retrying delete"),h={UnprocessedItems:o.RequestItems}}if(h&&h.UnprocessedItems&&h.UnprocessedItems[e]&&h.UnprocessedItems[e].length>0){const t=Math.pow(2,c);n.warn("Found %d unprocessed items. Backing off %d seconds and trying again",h.UnprocessedItems[e].length,t),await r.wait(1e3*t),c++,o.RequestItems[e]=h.UnprocessedItems[e]}else l=!0}h&&h.UnprocessedItems&&h.UnprocessedItems[e]&&h.UnprocessedItems[e].length>0?(n.error("After 6 tries there were still %d unprocessed items",h.UnprocessedItems[e].length),s+=t.length-h.UnprocessedItems[e].length,n.warn("FIX Unprocessed : %j",h.UnprocessedItems)):s+=t.length}}return s}async fetchFullObjectsMatchingKeysOnlyIndexQuery(e,a,s=25){t.notNullOrUndefined(e),t.notNullOrUndefined(e.TableName),t.notNullOrUndefined(a),t.true(a.length>0);const n=await this.fullyExecuteQuery(e),i=Ue.stripAllToKeysOnly(n,a);return await this.fetchAllInBatches(e.TableName,i,s)}async fetchAllInBatches(e,t,a){if(!a||a<2||a>100)throw new Error("Batch size needs to be at least 2 and no more than 100, was "+a);let s=[];const i=[];let o=Object.assign([],t);for(;o.length>0;){const t=o.slice(0,Math.min(o.length,a));o=o.slice(t.length);const s={};s[e]={Keys:t};const n={RequestItems:s,ReturnConsumedCapacity:"TOTAL"};i.push(n)}n.debug("Created %d batches",i.length);for(let t=0;t<i.length;t++){i.length>1&&n.info("Processing batch %d of %d",t+1,i.length);const a=i[t];let o=1;do{n.silly("Pulling %j",a);const t=await this.awsDDB.send(new _(a));s=s.concat(t.Responses[e]),t.UnprocessedKeys&&t.UnprocessedKeys[e]&&t.UnprocessedKeys[e].Keys.length>0&&o<15&&(n.silly("Found %d unprocessed, waiting",t.UnprocessedKeys[e].Keys),await r.wait(1e3*Math.pow(2,o)),o++),a.RequestItems=t.UnprocessedKeys}while(!a.RequestItems&&a.RequestItems[e].Keys.length>0)}return s}async deleteAllInBatches(e,t,a){if(!a||a<2)throw new Error("Batch size needs to be at least 2, was "+a);let s=0;if(t&&t.length>0){let i=[];for(t.forEach((t=>{i.push({DeleteRequest:{Key:t,ReturnConsumedCapacity:"TOTAL",TableName:e}})})),n.debug("Processing %d DeleteBatch items to %s",i.length,e);i.length>0;){const t=i.slice(0,Math.min(i.length,a));i=i.slice(t.length);const o={RequestItems:{},ReturnConsumedCapacity:"TOTAL",ReturnItemCollectionMetrics:"SIZE"};o.RequestItems[e]=t;let c=1,l=!1,h=null;for(;!l&&c<7;){try{h=await this.awsDDB.send(new V(o))}catch(e){if(!Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;n.info("Caught ProvisionedThroughputExceededException - retrying delete"),h={UnprocessedItems:o.RequestItems}}if(h&&h.UnprocessedItems&&h.UnprocessedItems[e]&&h.UnprocessedItems[e].length>0){const t=Math.pow(2,c);n.warn("Found %d unprocessed items. Backing off %d seconds and trying again",h.UnprocessedItems[e].length,t),await r.wait(1e3*t),c++,o.RequestItems[e]=h.UnprocessedItems[e]}else l=!0}h&&h.UnprocessedItems&&h.UnprocessedItems[e]&&h.UnprocessedItems[e].length>0?(n.error("After 6 tries there were still %d unprocessed items",h.UnprocessedItems[e].length),s+=t.length-h.UnprocessedItems[e].length,n.warn("FIX Unprocessed : %j",h.UnprocessedItems)):s+=t.length,n.debug("%d Remain, DeleteBatch Results : %j",i.length,h)}}return s}async simplePut(e,t,a=3){let s=null,i=0;const o={Item:t,ReturnConsumedCapacity:"TOTAL",TableName:e};for(;!s&&i<a;)try{s=await this.awsDDB.send(new G(o))}catch(e){if(!Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;{const e=1e3*Math.pow(2,i);n.debug("Exceeded write throughput for %j : Try %d of %d (Waiting %d ms)",o,i,a,e),await r.wait(e),i++}}return s||n.warn("Unable to write %j to DDB after %d tries, giving up",o,a),s}async simplePutOnlyIfFieldIsNullOrUndefined(e,t,a){let s=!1;const i={Item:t,ReturnConsumedCapacity:"TOTAL",ConditionExpression:"attribute_not_exists(#fieldName) OR #fieldName = :null ",ExpressionAttributeNames:{"#fieldName":a},ExpressionAttributeValues:{":null":null},TableName:e};try{const e=await this.awsDDB.send(new G(i));n.silly("Wrote : %j",e),s=!0}catch(o){if(Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(o))n.debug("Exceeded write throughput for %j : (Waiting 2000 ms)",i),await r.wait(2e3),s=await this.simplePutOnlyIfFieldIsNullOrUndefined(e,t,a);else{if(!(o&&o instanceof J))throw o;n.debug("Failed to write %j due to null field failure"),s=!1}}return s}async simplePutWithCollisionAvoidance(e,a,s,i,o=null,c=3){t.true(s&&s.length>0&&s.length<3,"You must pass 1 or 2 key names");let l=null,h=0;const u={"#key0":s[0]},d={":key0":a[s[0]]};let m="#key0 <> :key0";s.length>1&&(m+=" AND #key1 <> :key1",u["#key1"]=s[1],d[":key1"]=a[s[1]]);const f={Item:a,ReturnConsumedCapacity:"TOTAL",ConditionExpression:m,ExpressionAttributeNames:u,ExpressionAttributeValues:d,TableName:e};let g=0;for(;!l&&h<c&&(!o||g<o);)try{l=await this.awsDDB.send(new G(f))}catch(e){if(Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e)){h++;const e=1e3*Math.pow(2,h);n.debug("Exceeded write throughput for %j : Try %d of %d (Waiting %d ms)",f,h,c,e),await r.wait(e)}else{if(!(e&&e instanceof J))throw e;{let e=Object.assign({},f.Item);n.info("Failed to write %j due to collision - adjusting and retrying",e),e=i(e),f.Item=e,f.ExpressionAttributeValues[":key0"]=e[s[0]],s.length>1&&(f.ExpressionAttributeValues[":key1"]=e[s[1]]),g++}}}return l&&g>0&&n.info("After adjustment, wrote %j as %j",a,f.Item),l||n.warn("Unable to write %j to DDB after %d provision tries and %d adjusts, giving up",f,h,g),l?f.Item:null}async simpleGet(e,t,a=3){let s=null,i=0;const o={TableName:e,Key:t};for(;!s&&i<a;)try{s=await this.awsDDB.send(new W(o))}catch(e){if(!Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;{const e=1e3*Math.pow(2,i);n.debug("Exceeded read throughput for %j : Try %d of %d (Waiting %d ms)",o,i,a,e),await r.wait(e),i++}}s||n.warn("Unable to read %j from DDB after %d tries, giving up",o,a);return s&&s.Item?Object.assign({},s.Item):null}static objectIsErrorWithProvisionedThroughputExceededExceptionCode(e){return!!e&&e instanceof $}async simpleGetWithCounterDecrement(e,t,a,s,i=3){let o=null,c=0;const l={TableName:e,Key:t,UpdateExpression:"set #counter = #counter-:decVal",ExpressionAttributeNames:{"#counter":a},ExpressionAttributeValues:{":decVal":1,":minVal":0},ConditionExpression:"#counter > :minVal",ReturnValues:"ALL_NEW"};let h=!1;for(;!o&&c<i&&!h;)try{o=await this.awsDDB.send(new q(l))}catch(e){if(Ue.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e)){const e=1e3*Math.pow(2,c);n.debug("Exceeded update throughput for %j : Try %d of %d (Waiting %d ms)",l,c,i,e),await r.wait(e),c++}else{if(!(e&&e instanceof J))throw e;n.info("Cannot fetch requested row (%j) - the update check failed",t),h=!0}}o||h||n.warn("Unable to update %j from DDB after %d tries, giving up",l,i);const u=o&&o.Attributes?Object.assign({},o.Attributes):null;return s&&u&&0===u[a]&&(n.info("Delete on 0 specified, removing"),await this.simpleDelete(e,t)),u}async simpleDelete(e,t){const a={TableName:e,Key:t};return await this.awsDDB.send(new z(a))}async atomicCounter(e,t,a,s=1){const n={TableName:e,Key:t,UpdateExpression:"SET #counterFieldName = #counterFieldName + :inc",ExpressionAttributeNames:{"#counterFieldName":a},ExpressionAttributeValues:{":inc":s},ReturnValues:"UPDATED_NEW"},i=await this.awsDDB.send(new q(n));return h.safeNumber(i.Attributes[a])}static cleanObject(e){if(e){const t=[];Object.keys(e).forEach((a=>{const s=e[a];""===s?t.push(a):s instanceof Object&&Ue.cleanObject(s)})),n.silly("Removing keys : %j",t),t.forEach((t=>{delete e[t]}))}}static stripToKeysOnly(e,t){let a=null;return e&&t&&t.length>0&&(a={},t.forEach((t=>{e[t]||s.throwFormattedErr("Failed key extraction on %j - missing %s",e,t),a[t]=e[t]}))),a}static stripAllToKeysOnly(e,t){return e.map((e=>Ue.stripToKeysOnly(e,t)))}}class Re{awsDDB;constructor(e){if(this.awsDDB=e,!e)throw"awsDDB may not be null"}async deleteTable(e,a=!0){t.notNullOrUndefined(e);const s={TableName:e};n.debug("Deleting ddb table %s",e);const i=await this.awsDDB.send(new Q(s));return a&&(n.debug("Table marked for delete, waiting for deletion"),await this.waitForTableDelete(e)),i}async createTable(e,a=!0,i=!1){t.notNullOrUndefined(e),t.notNullOrUndefined(e.TableName),n.debug("Creating new table : %j",e);await this.tableExists(e.TableName)&&(i?(n.debug("Table %s exists and replace specified - deleting",e.TableName),await this.deleteTable(e.TableName)):s.throwFormattedErr("Cannot create table %s - exists already and replace not specified",e.TableName));const r=await this.awsDDB.send(new H(e));return a&&(n.debug("Table created, awaiting ready"),await this.waitForTableReady(e.TableName)),r}async waitForTableReady(e){let t=!0,a=await this.safeDescribeTable(e);for(;a&&a.Table&&"ACTIVE"!==a.Table.TableStatus;)n.silly("Table not ready - waiting 2 seconds"),await r.wait(2e3),a=await this.safeDescribeTable(e);return a||a.Table||(n.warn("Cannot wait for %s to be ready - table does not exist",e),t=!1),t}async waitForTableDelete(e){let t=await this.safeDescribeTable(e);for(;t;)n.silly("Table %s still exists, waiting 2 seconds (State is %s)",e,t.Table.TableStatus),await r.wait(2e3),t=await this.safeDescribeTable(e)}async tableExists(e){return!!await this.safeDescribeTable(e)}async listAllTables(){const e={};let t=[];do{const a=await this.awsDDB.send(new Y(e));t=t.concat(a.TableNames),e.ExclusiveStartTableName=a.LastEvaluatedTableName}while(e.ExclusiveStartTableName);return t}async safeDescribeTable(e){try{return await this.awsDDB.send(new Z({TableName:e}))}catch(e){if(e instanceof X)return null;throw e}}async copyTable(e,a,n,i){if(t.notNullUndefinedOrOnlyWhitespaceString(e,"srcTableName"),t.notNullUndefinedOrOnlyWhitespaceString(a,"dstTableName"),i)throw s.fErr("Cannot copy %s to %s - copy data not supported yet",e,a);const r=await this.safeDescribeTable(e);if(await this.tableExists(a))throw s.fErr("Cannot copy to %s - table already exists",a);if(!r)throw s.fErr("Cannot copy %s - doesnt exist",e);r.Table.AttributeDefinitions,r.Table.KeySchema,r.Table.GlobalSecondaryIndexes;const o=Object.assign({},n||{},{AttributeDefinitions:r.Table.AttributeDefinitions,TableName:a,KeySchema:r.Table.KeySchema,LocalSecondaryIndexes:r.Table.LocalSecondaryIndexes,GlobalSecondaryIndexes:r.Table.GlobalSecondaryIndexes.map((e=>{const t=e;return 0!==t.ProvisionedThroughput?.WriteCapacityUnits&&0!==t.ProvisionedThroughput?.ReadCapacityUnits||(t.ProvisionedThroughput=void 0),t})),BillingMode:r.Table.BillingModeSummary.BillingMode,ProvisionedThroughput:"PROVISIONED"===r.Table.BillingModeSummary.BillingMode?r.Table.ProvisionedThroughput:void 0,StreamSpecification:r.Table.StreamSpecification,SSESpecification:r.Table.SSEDescription,Tags:void 0,TableClass:r.Table.TableClassSummary?.TableClass,DeletionProtectionEnabled:r.Table.DeletionProtectionEnabled});return await this.awsDDB.send(new H(o))}}class je{spots;buckets;separator;alphabet;_allSlots;constructor(e=3,s=16,n="_",i="0123456789ABCDEF"){this.spots=e,this.buckets=s,this.separator=n,this.alphabet=i,t.true(e>0,"Spots must be larger than 0"),t.true(s>1,"Buckets must be larger than 1"),t.notNullOrUndefined(a.trimToNull(i),"Alphabet may not be null or empty"),t.true(a.allUnique(i),"Alphabet must be unique"),t.true(a.stringContainsOnlyAlphanumeric(i),"Alphabet must be alphanumeric");const r=Math.pow(i.length,e);t.true(s<r,"Buckets must be less than permutations ("+s+" / "+r+")"),t.notNullOrUndefined(a.trimToNull(this.separator),"Separator must be nonnull and nonempty");const o=a.allPermutationsOfLength(e,i);this._allSlots=o.slice(0,s)}get allBuckets(){return Object.assign([],this._allSlots)}get randomBucket(){return this._allSlots[Math.floor(Math.random()*this.buckets)]}allSpreadValues(e){t.notNullOrUndefined(a.trimToNull(e),"Cannot spread null/empty value");return this._allSlots.map((t=>e+this.separator+t))}allSpreadValuesForArray(e){t.true(e&&e.length>0,"Cannot spread null/empty array");let a=[];return e.forEach((e=>{a=a.concat(this.allSpreadValues(e))})),a}addSpreader(e){return t.notNullOrUndefined(a.trimToNull(e),"Cannot spread null/empty value"),e+this.separator+this.randomBucket}extractBucket(e){t.notNullOrUndefined(a.trimToNull(e),"Cannot extract from null or empty value");const n=e.length-this.spots;return(n<0||e.charAt(n)!==this.separator)&&s.throwFormattedErr("Cannot extract bucket, not created by this spreader (missing %s at location %d)",this.separator,n),e.substring(n)}removeBucket(e){t.notNullOrUndefined(a.trimToNull(e),"Cannot extract from null or empty value");const n=e.length-this.spots;return(n<0||e.charAt(n)!==this.separator)&&s.throwFormattedErr("Cannot remove bucket, not created by this spreader (missing %s at location %d)",this.separator,n),e.substring(0,n)}}class Be{region;availabilityZone;ec2;ec2InstanceConnect;constructor(e="us-east-1",t="us-east-1a"){this.region=e,this.availabilityZone=t,this.ec2=new te({region:e}),this.ec2InstanceConnect=new ie({region:e})}get eC2Client(){return this.ec2}get eC2InstanceConnectClient(){return this.ec2InstanceConnect}async stopInstance(e,t=0){let a=!0;try{const a={InstanceIds:[e],DryRun:!1};n.info("About to stop instances : %j",a),await this.ec2.send(new ae(a)),n.info("Stop instance command sent, waiting on shutdown");let s=await this.describeInstance(e);if(t>0){const a=(new Date).getTime();for(;s&&16!==s.State.Code&&(new Date).getTime()-a<t;)n.debug("Instance status is %j - waiting for 5 seconds (up to %s)",s.State,l.formatMsDuration(t)),await r.wait(5e3),s=await this.describeInstance(e)}}catch(t){n.error("Failed to stop instance %s : %s",e,t,t),a=!1}return a}async launchInstance(e,t=0){let a=!0;try{const a={InstanceIds:[e],DryRun:!1};n.info("About to start instance : %j",a),await this.ec2.send(new se(a)),n.info("Start instance command sent, waiting on startup");let s=await this.describeInstance(e);if(t>0){const a=(new Date).getTime();for(;s&&16!==s.State.Code&&(new Date).getTime()-a<t;)n.debug("Instance status is %j - waiting for 5 seconds (up to %s)",s.State,l.formatMsDuration(t)),await r.wait(5e3),s=await this.describeInstance(e)}s&&s.PublicIpAddress&&(n.info("Instance address is %s",s.PublicIpAddress),n.info("SSH command : ssh -i path_to_pem_file ec2-user@%s",s.PublicIpAddress))}catch(t){n.error("Failed to start instance %s : %s",e,t,t),a=!1}return a}async describeInstance(e){const t=await this.listAllInstances([e]);return 1===t.length?t[0]:null}async listAllInstances(e=[]){let t=[];const a={NextToken:null};e&&e.length>0&&(a.InstanceIds=e);do{n.debug("Pulling instances... (%j)",a);const e=await this.ec2.send(new ne(a));e.Reservations.forEach((e=>{t=t.concat(e.Instances)})),a.NextToken=e.NextToken}while(a.NextToken);return n.debug("Finished pulling instances (found %d)",t.length),t}async sendPublicKeyToEc2Instance(e,t,a){const s=a||"ec2-user",n={InstanceId:e,AvailabilityZone:this.availabilityZone,InstanceOSUser:s,SSHPublicKey:t};return await this.ec2InstanceConnect.send(new re(n))}}class Ke{ecr;static ECR_REPOSITORIES_TO_PRUNE_ENV_KEY="NEON_ECR_REPOSITORIES_TO_PRUNE";static AWS_ECR_BATCH_DELETE_IMAGE_COUNT=100;static ECR_IMAGE_MINIMUM_AGE_DAYS=60;static ECR_REPOSITORY_MINIMUM_IMAGE_COUNT=600;constructor(e){this.ecr=e,t.notNullOrUndefined(e,"ecr")}async fetchRepositoryNames(){return null}async fetchRegistryId(){return(await this.ecr.send(new oe({}))).registryId}async handlePruning(e){const t=await this.fetchRegistryId();return await this.handlePruningForRegistry(t,e)}async handlePruningForRegistry(e,a){let s=[];const i=t.isNullOrUndefined(a.minimumAgeInDays)?60:a.minimumAgeInDays,r=t.isNullOrUndefined(a.minimumImageCount)?600:a.minimumImageCount,o=a.batchDeleteSize||100;for(let t=0;t<a.repositoriesToPurge.length;t++){const c=a.repositoriesToPurge[t],l=[];let h;do{h&&n.info(`Fetching images for ${c} (from ${l.length})...`);const t=await this.ecr.send(new ce({registryId:e,repositoryName:c,nextToken:h,maxResults:1e3}));h=t.nextToken,l.push(...t.imageDetails)}while(void 0!==h);n.info(`Found ${l.length} image(s) for ${c}`),l.sort(((e,t)=>e.imagePushedAt>t.imagePushedAt?1:-1));const u=Date.now()-24*i*60*60*1e3,d=SharedDateUtil.epochMSToDateHourUtc(u);n.info(`Maximum allowed creation date for pruning: ~${d}`);const m=[];for(const e of l){if(e.imagePushedAt.getTime()>u){n.info(`Image is too recently pushed ${e.imagePushedAt}. Finished selecting for pruning.`);break}if(l.length-m.length<=r){n.info("Reached minimum image count. Finished selecting for pruning.");break}m.push({imageDigest:e.imageDigest}),n.info(`Adding to prune list: ${e.imageDigest} (${e.imagePushedAt})`)}const f=he.chunk(m,o);n.info(`Got ${f.length} chunks of images to delete.`);let g=0;if(s=s.concat(m),a.dryRun)n.info("DryRun specified : Would have purged %j",m);else for(;g<f.length;){const t=f[g];n.info(`Deleting chunk: ${g+1}/${f.length} (${t.length} image(s))`),await this.ecr.send(new le({registryId:e,repositoryName:c,imageIds:t})),g++}n.info(`Finished deleting all chunks for ${c}`)}return s}}class Le{providers;constructor(e){this.providers=e,t.notNullOrUndefined(e),t.true(e.length>0)}async fetchConfig(e){n.silly("CascadeEnvironmentServiceProvider fetch for %s",e);let t=null;for(let a=0;a<this.providers.length&&!t;a++)try{t=await this.providers[a].fetchConfig(e)}catch(e){n.error("Provider %d failed - trying next : %s",a,e,e),t=null}return t}}class Ve{envVarName;constructor(e){this.envVarName=e,t.notNullOrUndefined(e)}async fetchConfig(){n.silly("EnvVarEnvironmentServiceProvider fetch for %s",this.envVarName);let e=null;const t=process?process.env:global||{},i=a.trimToNull(t[this.envVarName]);if(i)try{e=JSON.parse(i)}catch(e){throw n.error("Failed to read env - null or invalid JSON : %s : %s",e,i,e),e}else s.throwFormattedErr("Could not find env var with name : %s",this.envVarName);return e}}class _e{provider;cfg;readPromiseCache=new Map;static defaultEnvironmentServiceConfig(){return{maxRetries:3,backoffMultiplierMS:500}}constructor(e,a=_e.defaultEnvironmentServiceConfig()){this.provider=e,this.cfg=a,t.notNullOrUndefined(e),t.notNullOrUndefined(a)}async getConfig(e){return n.silly("EnvService:Request to read config %s",e),this.readPromiseCache.has(e)||(n.silly("EnvService: Nothing in cache - adding"),this.readPromiseCache.set(e,this.getConfigUncached(e))),this.readPromiseCache.get(e)}async getConfigUncached(e){let t=1,a=null;for(;!a&&t<this.cfg.maxRetries;){t++,n.silly("Attempting fetch of %s",e);try{a=await this.provider.fetchConfig(e)}catch(a){const s=t*this.cfg.backoffMultiplierMS;n.info("Error attempting to fetch config %s (try %d of %d, waiting %s MS): %s",e,t,this.cfg.maxRetries,s,a,a),await r.wait(s)}}return a||s.throwFormattedErr("Was unable to fetch config %s even after %d retries",e,this.cfg.maxRetries),a}}class Ge{value;constructor(e){this.value=e,t.notNullOrUndefined(e)}static fromRecord(e){const t=new Map;return Object.keys(e).forEach((a=>{t.set(a,e[a])})),new Ge(t)}async fetchConfig(e){n.silly("FixedEnvironmentServiceProvider fetch for %s",e);return this.value.get(e)}}class We{cfg;ratchet;constructor(e){this.cfg=e,t.notNullOrUndefined(e),t.notNullOrUndefined(e.bucketName),t.notNullOrUndefined(e.region),t.true(!!e.s3Override||!!e.region,"You must set either region or S3Override");const a=e.s3Override||new U({region:e.region});this.ratchet=new Fe(a,e.bucketName)}async fetchConfig(e){const t=a.trimToEmpty(this.cfg.pathPrefix)+e+a.trimToEmpty(this.cfg.pathSuffix);n.silly("S3EnvironmentServiceProvider:Request to read config from : %s / %s",this.cfg.bucketName,t);const s=new i,r=await this.ratchet.fetchCacheFileAsObject(t);return s.log(),r}}class qe{region;ssmEncrypted;ssm;constructor(e="us-east-1",a=!0){this.region=e,this.ssmEncrypted=a,t.notNullOrUndefined(e),t.notNullOrUndefined(a),this.ssm=new ue({region:this.region})}async fetchConfig(e){n.silly("SsmEnvironmentServiceProvider fetch for %s",e);const t={Name:e,WithDecryption:this.ssmEncrypted};let i=null,o=null;try{const e=await this.ssm.send(new de(t));o=a.trimToNull(e?.Parameter?.Value)}catch(t){if(t instanceof me){const t=n.warn("AWS could not find parameter %s - are you using the right AWS key?",e);throw new Error(t)}if(!((s.safeStringifyErr(t)||"").toLowerCase().indexOf("throttl")>-1))throw n.error("Final environment fetch error (cannot retry) : %s",t,t),t;n.warn("Throttled while trying to read parameters - waiting 1 second before allowing retry"),await r.wait(1e3)}if(o)try{i=JSON.parse(o)}catch(e){throw n.error("Failed to read env - null or invalid JSON : %s : %s",e,o,e),e}else s.throwFormattedErr("Could not find system parameter with name : %s in this account",e);return i}}class ze{tableName;dynamoRatchet;constructor(e,t){this.tableName=e,this.dynamoRatchet=t}async checkCode(e,t,a){const s={code:e,context:t},n=await this.dynamoRatchet.simpleGet(this.tableName,s),i=n&&n.expiresEpochMS>Date.now();return i&&a&&await this.dynamoRatchet.simpleDelete(this.tableName,s),i}async storeCode(e){const t=await this.dynamoRatchet.simplePut(this.tableName,e);return t&&t.ConsumedCapacity.CapacityUnits>0}async createTableIfMissing(e){return null}}class Je{provider;constructor(e){this.provider=e}static generateCode(e){t.notNullOrUndefined(e,"params"),t.notNullOrUndefined(e.context,"params.context"),t.notNullOrUndefined(e.length,"params.length"),t.notNullOrUndefined(e.alphabet,"params.alphabet");let a="";for(let t=0;t<e.length;t++)a+=e.alphabet.charAt(Math.floor(e.alphabet.length*Math.random()));return{code:a,context:e.context,tags:e.tags,expiresEpochMS:Date.now()+1e3*e.timeToLiveSeconds}}async createNewCode(e){const t=Je.generateCode(e);return await this.provider.storeCode(t)?t:null}async checkCode(e,t,s){return await this.provider.checkCode(a.trimToEmpty(e),a.trimToEmpty(t),s)}}class $e{s3CacheRatchet;keyName;constructor(e,a){this.s3CacheRatchet=e,this.keyName=a,t.notNullOrUndefined(e,"s3CacheRatchet"),t.notNullUndefinedOrOnlyWhitespaceString(e.getDefaultBucket(),"s3CacheRatchet.defaultBucket"),t.notNullUndefinedOrOnlyWhitespaceString(a,"keyName")}async fetchFile(){return await this.s3CacheRatchet.fetchCacheFileAsObject(this.keyName)||{data:[],lastModifiedEpochMS:Date.now()}}async updateFile(e){const t={data:e||[],lastModifiedEpochMS:Date.now()};t.data=t.data.filter((e=>e.expiresEpochMS>Date.now())),n.info("Updating code file to %s codes",t.data.length);return await this.s3CacheRatchet.writeObjectToCacheFile(this.keyName,t)}async checkCode(e,t,a){const s=await this.fetchFile(),i=s.data.find((a=>a?.code?.toUpperCase()===e?.toUpperCase()&&a?.context?.toUpperCase()===t?.toUpperCase()));if(i&&(a||i.expiresEpochMS<Date.now())){n.info("Stripping used/expired code from the database");const e=s.data.filter((e=>e!=i));await this.updateFile(e)}return!!i&&i.expiresEpochMS>Date.now()}async storeCode(e){const t=await this.fetchFile();t.data.push(e);return!!await this.updateFile(t.data)}}class Qe{constructor(){}static applySetProfileEnvironmentalVariable(e){process.env?a.trimToNull(e)?process.env.AWS_PROFILE=e:s.throwFormattedErr("Cannot set profile to null/empty string"):s.throwFormattedErr("Cannot set profile - not in a node environment - process missing")}}class He{static isValidCronEvent(e){return e&&"aws.events"==e.source&&e.resources&&e.resources.length>0}static isValidSnsEvent(e){return e&&e.Records&&e.Records.length>0&&"aws:sns"==e.Records[0].EventSource}static isValidSqsEvent(e){return e&&e.Records&&e.Records.length>0&&"aws:sqs"==e.Records[0].eventSource}static isValidDynamoDBEvent(e){return e&&e.Records&&e.Records.length>0&&"aws:dynamodb"==e.Records[0].eventSource}static isValidS3Event(e){return e&&e.Records&&e.Records.length>0&&"aws:s3"==e.Records[0].eventSource}static isValidApiGatewayV2WithRequestContextEvent(e){return e&&e.rawPath&&e.requestContext&&e.routeKey}static isValidApiGatewayEvent(e){return e&&e.httpMethod&&e.path&&e.requestContext}static isValidApiGatewayAuthorizerEvent(e){return e&&e.authorizationToken&&e.methodArn}static isSingleCronEvent(e){return this.isValidCronEvent(e)&&He.isSingleEntryEvent(e,"resources")}static isSingleSnsEvent(e){return this.isValidSnsEvent(e)&&He.isSingleEntryEvent(e)}static isSingleDynamoDBEvent(e){return this.isValidDynamoDBEvent(e)&&He.isSingleEntryEvent(e)}static isSingleS3Event(e){return this.isValidS3Event(e)&&He.isSingleEntryEvent(e)}static isSingleEntryEvent(e,t="Records"){return e&&e[t]&&e[t]instanceof Array&&1===e[t].length}}class Ye{static isValidCronEvent(e){return He.isValidCronEvent(e)}static isValidSqsEvent(e){return He.isValidSqsEvent(e)}static isValidSnsEvent(e){return He.isValidSnsEvent(e)}static isValidDynamoDBEvent(e){return He.isValidDynamoDBEvent(e)}static isValidS3Event(e){return He.isValidS3Event(e)}static isValidApiGatewayV2WithRequestContextEvent(e){return He.isValidApiGatewayV2WithRequestContextEvent(e)}static isValidApiGatewayEvent(e){return He.isValidApiGatewayEvent(e)}static isValidApiGatewayAuthorizerEvent(e){return He.isValidApiGatewayAuthorizerEvent(e)}}class Ze{route53;hostedZoneId;constructor(e,t){if(this.route53=e,this.hostedZoneId=t,!this.route53)throw"route53 may not be null"}get route53Client(){return this.route53}async changeCnameRecordTarget(e,t,a=this.hostedZoneId,s=600){n.info("Updating %s to point to %s",e,t);try{const i={ChangeBatch:{Changes:[{Action:"UPSERT",ResourceRecordSet:{Name:e,ResourceRecords:[{Value:t}],TTL:s,Type:"CNAME"}}]},HostedZoneId:a},r=await this.route53.send(new fe(i));n.debug("Updated domain result: %j",r);const o={Id:r.ChangeInfo.Id},c=await ge({client:this.route53,maxWaitTime:300},o);if(n.debug("Wait responsed: %j",c),c.state===pe.SUCCESS)return n.info("Updated %s to point to %s",e,a),!0}catch(a){n.warn("Error update CName for %s with value %s: %j",e,t,a)}return n.info("Cannot update %s to point to %s",e,t),!1}}class Xe{dynamo;tableName;constructor(e,a){this.dynamo=e,this.tableName=a,t.notNullOrUndefined(this.dynamo),t.notNullOrUndefined(this.tableName)}async readParameter(e,t){n.silly("Reading %s / %s from underlying db",e,t);const a={groupId:e,paramKey:t};return await this.dynamo.simpleGet(this.tableName,a)}async readAllParametersForGroup(e){const t={TableName:this.tableName,KeyConditionExpression:"groupId = :groupId",ExpressionAttributeValues:{":groupId":e}};return await this.dynamo.fullyExecuteQuery(t)}async writeParameter(e){return!!await this.dynamo.simplePut(this.tableName,e)}}class et{wrapped;options={globalTTL:1,separator:".",prefix:"RuntimeEnv-",suffix:""};constructor(e,a){this.wrapped=e,t.notNullOrUndefined(this.wrapped,"wrapped"),t.notNullOrUndefined(global?.process?.env,'"process" not found - this only runs in Node, not the browser'),a&&(this.options=a),t.notNullOrUndefined(this.options.globalTTL,"this.options.globalTTL"),t.notNullOrUndefined(this.options.separator,"this.options.separator"),t.true(this.options.globalTTL>0,"this.options.globalTTL must be larger than 0")}generateName(e,t){return a.trimToEmpty(this.options.prefix)+e+a.trimToEmpty(this.options.separator)+t+a.trimToEmpty(this.options.suffix)}async readParameter(e,t){const n=a.trimToNull(process.env[this.generateName(e,t)]);n&&!a.canParseAsJson(n)&&s.throwFormattedErr("Cannot parse ENV override (%s / %s) as JSON - did you forget the quotes on a string?",e,t);return n?{groupId:e,paramKey:t,paramValue:n,ttlSeconds:this.options.globalTTL}:await this.wrapped.readParameter(e,t)}async readAllParametersForGroup(e){return this.wrapped.readAllParametersForGroup(e)}async writeParameter(e){return this.wrapped.writeParameter(e)}}class tt{data;constructor(e=Promise.resolve({})){this.data=e}async readParameter(e,t){n.silly("Reading %s / %s from underlying db",e,t);return(await this.data)[e+"::"+t]}async readAllParametersForGroup(e){const t=await this.data,a=[];return Object.keys(t).forEach((s=>{s.startsWith(e)&&a.push(t[s])})),a}async writeParameter(e){return(await this.data)[e.groupId+"::"+e.paramKey]=e,!0}}class at{provider;cache=new Map;constructor(e){this.provider=e,t.notNullOrUndefined(this.provider)}async fetchParameter(e,t,a=null,s=!1){n.debug("Reading parameter %s / %s / Force : %s",e,t,s);const i=this.cache.get(at.toCacheStoreKey(e,t));let r=null;const o=(new Date).getTime();if(!s&&i){const a=i.ttlSeconds?o-1e3*i.ttlSeconds:0;i.storedEpochMS>a&&(n.silly("Fetched %s / %s from cache",e,t),r=JSON.parse(i.paramValue))}if(!r){const a=await this.readUnderlyingEntry(e,t);a&&(this.addToCache(a),r=JSON.parse(a.paramValue))}return r=r||a,r}async fetchAllParametersForGroup(e){const t=await this.readUnderlyingEntries(e),a=new Map;return t.forEach((e=>{a.set(e.paramKey,JSON.parse(e.paramValue)),this.addToCache(e)})),a}async readUnderlyingEntry(e,t){return this.provider.readParameter(e,t)}async readUnderlyingEntries(e){return this.provider.readAllParametersForGroup(e)}async storeParameter(e,t,a,s){const n={groupId:e,paramKey:t,paramValue:JSON.stringify(a),ttlSeconds:s};return await this.provider.writeParameter(n),this.provider.readParameter(e,t)}static toCacheStoreKey(e,t){return e+":::"+t}addToCache(e){if(e){const t=(new Date).getTime(),a=Object.assign({storedEpochMS:t},e);this.cache.set(at.toCacheStoreKey(e.groupId,e.paramKey),a)}}clearCache(){n.debug("Clearing runtime parameter cache"),this.cache=new Map}}class st{config;constructor(e){t.notNullOrUndefined(e,"config"),this.config=e,this.config.maxNumThreads||(this.config.maxNumThreads=15),this.config.maxRetries||(this.config.maxRetries=5)}updateSrcPrefix(e){this.config.srcPrefix=e}updateDstPrefix(e){this.config.dstPrefix=e}async copyObject(e,t,a=!1){const s=e.replace(this.config.srcPrefix,this.config.dstPrefix);let i=!1,r=0;for(;!i&&r<this.config.maxRetries;){n.debug(`${r>0?`Retry ${r} `:""}${a?"Express":"Slow"} copying\n [${[this.config.srcBucket,e].join("/")} ---\x3e ${[this.config.dstBucket,s].join("/")}]`);try{if(a){const t={CopySource:encodeURIComponent([this.config.srcBucket,e].join("/")),Bucket:this.config.dstBucket,Key:s,MetadataDirective:"COPY"};await this.config.dstS3.send(new I(t))}else{const a=await this.config.srcS3.send(new O({Bucket:this.config.srcBucket,Key:e})),i={Bucket:this.config.dstBucket,Key:s,Body:a.Body,ContentLength:t},r=new j({client:this.config.dstS3,params:i,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});r.on("httpUploadProgress",(e=>{n.debug("Uploading : %s",e)})),await r.done()}i=!0}catch(t){n.warn(`Can't ${a?"express":"slow"} copy\n [${[this.config.srcBucket,e].join("/")} ---\x3e ${[this.config.dstBucket,s].join("/")}]: %j`,t),r++}}n.debug(`Finished ${a?"express":"slow"} copying\n [${[this.config.srcBucket,e].join("/")} ---\x3e ${[this.config.dstBucket,s].join("/")}]`)}async listObjects(e,t,a){n.info(`Scanning bucket [${[e,t].join("/")}]`);const s={Bucket:e,Prefix:t};let i=!0;const r={};for(;i;){const e=await a.send(new R(s));i=e.IsTruncated,e.Contents.forEach((e=>{r[e.Key]={Key:e.Key,LastModified:e.LastModified,ETag:e.ETag,Size:e.Size}})),i&&(s.ContinuationToken=e.NextContinuationToken)}return r}async startSyncing(){n.info(`Syncing [${this.config.srcBucket}/${this.config.srcPrefix}\n ---\x3e ${this.config.dstBucket}/${this.config.dstPrefix}]`);const e=async e=>{await this.copyObject(e.Key,e.Size)};let t=await this.compareSrcAndDst();return(t.needCopy.length>0||t.diff.length>0)&&(await r.runBoundedParallelSingleParam(e,t.needCopy,this,this.config.maxNumThreads),await r.runBoundedParallelSingleParam(e,t.diff,this,this.config.maxNumThreads),n.info("Verifying..."),t=await this.compareSrcAndDst(),n.debug("Compare result %j",t)),0===t.needCopy.length&&0===t.diff.length}async compareSrcAndDst(){const e=this.listObjects(this.config.srcBucket,this.config.srcPrefix,this.config.srcS3),t=this.listObjects(this.config.dstBucket,this.config.dstPrefix,this.config.dstS3),a=await e,s=await t,n={needCopy:[],existed:[],diff:[]};return await r.runBoundedParallelSingleParam((e=>{const t=a[e],i=e.replace(this.config.srcPrefix,this.config.dstPrefix),r=s.hasOwnProperty(i)?s[i]:void 0;r?t.Size===r.Size&&t.LastModified.getTime()<=r.LastModified.getTime()?n.existed.push(t):n.diff.push(t):n.needCopy.push(t)}),Object.keys(a),this,this.config.maxNumThreads),n}}class nt{static checkS3UrlForValidity(e){let t=!1;return e&&(t=e.startsWith("s3://")&&e.trim().length>5),t}static extractBucketFromURL(e){t.true(nt.checkS3UrlForValidity(e),"invalid s3 url");const a=e.indexOf("/",5);return a>0?e.substring(5,a):e.substring(5)}static extractKeyFromURL(e){t.true(nt.checkS3UrlForValidity(e),"invalid s3 url");const a=e.indexOf("/",5);return a>0?e.substring(a+1):null}}class it{ses;config;static EMAIL=new RegExp(".+@.+\\.[a-z]+");constructor(e,a={}){if(this.ses=e,this.config=a,t.notNullOrUndefined(this.ses),a.archive&&!a.archive.getDefaultBucket())throw new Error("If archive specified, must set a default bucket")}get sESClient(){return this.ses}async fillEmailBody(e,a,n,i=null,r=null,o=null){return t.notNullOrUndefined(n),this.config.templateRenderer||s.throwFormattedErr("Cannot use fill body if template renderer not set"),e.htmlMessage=await this.config.templateRenderer.renderTemplate(n,a,r,o),e.txtMessage=i?await this.config.templateRenderer.renderTemplate(i,a):null,e}async fillEmailBodyAndSend(e,t,a,s=null,n=null,i=null){const r=await this.fillEmailBody(e,t,a,s,n,i);return await this.sendEmail(r)}filterEmailsToValid(e){return(e||[]).filter((e=>{if(this.config.allowedDestinationEmails&&0!=this.config.allowedDestinationEmails.length){return!!this.config.allowedDestinationEmails.find((t=>t.test(e)))}return!0}))}async archiveEmailIfConfigured(t){let s=!1;if(t&&this.config.archive&&!t.doNotArchive){n.debug("Archiving outbound email to : %j",t.destinationAddresses);let i=a.trimToEmpty(this.config.archivePrefix);i.endsWith("/")||(i+="/");const r=e.utc();i+="year="+r.toFormat("yyyy")+"/month="+r.toFormat("MM")+"/day="+r.toFormat("dd")+"/hour="+r.toFormat("HH")+"/"+r.toFormat("mm_ss__SSS"),i+=".json";try{await this.config.archive.writeObjectToCacheFile(i,t),s=!0}catch(e){n.warn("Failed to archive email %s %j : %s",i,t,e)}}return s}applyLimitsToBodySizesIfAnyInPlace(e){if(this.config.maxMessageBodySizeInBytes){const t=a.trimToEmpty(e.txtMessage).length+a.trimToEmpty(e.htmlMessage).length;if(t>this.config.maxMessageBodySizeInBytes){if(n.warn("Max message size is %d but size is %d - converting",this.config.maxMessageBodySizeInBytes,t),e.attachments=e.attachments||[],a.trimToNull(e.txtMessage)){const t={filename:"original-txt-body.txt",contentType:"text/plain",base64Data:u.generateBase64VersionOfString(e.txtMessage)};e.attachments.push(t)}if(a.trimToNull(e.htmlMessage)){const t={filename:"original-html-body.html",contentType:"text/html",base64Data:u.generateBase64VersionOfString(e.htmlMessage)};e.attachments.push(t)}e.htmlMessage=null,e.txtMessage="The message was too large and was converted to attachment(s). Please see attached files for content"}}}applyLimitsToAttachmentSizesIfAnyInPlace(e){if(this.config.maxAttachmentSizeInBase64Bytes){const t=[];e.attachments&&e.attachments.forEach((e=>{e.base64Data&&e.base64Data.length<this.config.maxAttachmentSizeInBase64Bytes?t.push(e):(n.warn("Removing too-large attachment : %s : %s : %d",e.filename,e.contentType,e.base64Data.length),t.push({filename:"attachment-removed-notice-"+a.createRandomHexString(4)+".txt",contentType:"text/plain",base64Data:u.generateBase64VersionOfString("Attachment "+e.filename+" of type "+e.contentType+" was removed since it was "+e.base64Data.length+" bytes but max allowed is "+this.config.maxAttachmentSizeInBase64Bytes)}))})),e.attachments=t}}async sendEmail(e){t.notNullOrUndefined(e,"RTS must be defined"),t.notNullOrUndefined(e.destinationAddresses,"Destination addresses must be defined");let s=null,i=this.filterEmailsToValid(e.destinationAddresses);const r=e.doNotAutoBcc?[]:this.config.autoBccAddresses||[],o=(e.bccAddresses||[]).concat(r);0===i.length&&o.length>0&&(n.debug("Destination emails filtered to none but BCC defined, copying BCC"),i=o);const c=Object.assign({},e);if(c.srcDestinationAddresses=e.destinationAddresses,c.srcBccAddresses=e.bccAddresses,c.destinationAddresses=i,c.bccAddresses=o,this.applyLimitsToBodySizesIfAnyInPlace(c),this.applyLimitsToAttachmentSizesIfAnyInPlace(c),await this.archiveEmailIfConfigured(c),0===c.destinationAddresses.length)n.info("After cleaning email lists, no destination addresses left - not sending email");else{const e="To: "+c.destinationAddresses.join(", ")+"\n",t=c.bccAddresses&&c.bccAddresses.length>0?"Bcc: "+c.bccAddresses.join(", ")+"\n":"";try{const n=c.fromAddress||this.config.defaultSendingAddress,i="NextPart",r="AltPart";let o="From: "+n+"\n";o+=e,o+=t,o+="Subject: "+c.subject+"\n",o+="MIME-Version: 1.0\n",o+='Content-Type: multipart/mixed; boundary="'+i+'"\n',o+="\n\n--"+i+"\n",o+='Content-Type: multipart/alternative; boundary="'+r+'"\n',a.trimToNull(c.htmlMessage)&&(o+="\n\n--"+r+"\n",o+='Content-Type: text/html; charset="UTF-8"\n\n',o+=c.htmlMessage),a.trimToNull(c.txtMessage)&&(o+="\n\n--"+r+"\n",o+="Content-Type: text/plain\n\n",o+=c.txtMessage),o+="\n\n--"+r+"--\n",c.attachments&&c.attachments.forEach((e=>{o+="\n\n--"+i+"\n",o+="Content-Type: "+e.contentType+'; name="'+e.filename+'"\n',o+="Content-Transfer-Encoding: base64\n",o+="Content-Disposition: attachment\n\n",o+=e.base64Data.replace(/([^\0]{76})/g,"$1\n")+"\n\n"})),o+="\n\n--"+i+"--\n";const l={RawMessage:{Data:(new TextEncoder).encode(o)}};s=await this.ses.send(new ye(l))}catch(e){n.error("Error while processing email: %s",e,e)}}return s}static validEmail(e){return null!==e&&it.EMAIL.test(e)}}class rt{sns;topicArn;constructor(e=new we({region:"us-east-1"}),a){this.sns=e,this.topicArn=a,t.notNullOrUndefined(this.sns,"sns"),t.notNullOrUndefined(this.topicArn,"topicArn")}get snsClient(){return this.sns}async sendMessage(e,t=!1){let a=null;try{const t=e||"NO-MESSAGE-PROVIDED",s="string"==typeof t?t:JSON.stringify(t),i={TopicArn:this.topicArn,Message:s};n.debug("Sending via SNS : %j",i),a=await this.sns.send(new be(i))}catch(a){if(!t)throw a;n.error("Failed to fire SNS notification : %j : %s",e,a)}return a}async conditionallySendMessage(e,t,a=!1){let s=null;return t?s=await this.sendMessage(e,a):n.info("Not sending message, condition was false : %j",e),s}}class ot{ratchet;tableName;constructor(e,s){this.ratchet=e,this.tableName=s,t.notNullOrUndefined(e,"ratchet"),t.notNullOrUndefined(a.trimToNull(this.tableName),"tableName")}async acquireLock(e,t=30){let a=!1;if(e&&t){const s=Math.floor((new Date).getTime()/1e3),i={Item:{lockingKey:e,timestamp:s,expires:s+t},ReturnConsumedCapacity:ee.TOTAL,TableName:this.tableName,ConditionExpression:"attribute_not_exists(lockingKey)"};try{await this.ratchet.getDDB().send(new G(i));a=!0}catch(t){t instanceof J&&n.silly("Unable to acquire lock on %s",e)}}return a}async releaseLock(e){if(a.trimToNull(e))try{const t=await this.ratchet.simpleDelete(this.tableName,{lockingKey:e});n.silly("Released lock %s : %s",e,t)}catch(t){n.warn("Failed to release lock key : %s : %s",e,t,t)}}async clearExpiredSyncLocks(){const e=Math.floor((new Date).getTime()/1e3),t={TableName:this.tableName,FilterExpression:"expires < :now",ExpressionAttributeValues:{":now":e}},a=(await this.ratchet.fullyExecuteScan(t)).map((e=>({lockingKey:e.lockingKey})));return await this.ratchet.deleteAllInBatches(this.tableName,a,25)}}class ct{_locks=new Map;constructor(){}async acquireLock(e,t=30){let s=!1;if(a.trimToNull(e)){const a=Date.now(),n=this._locks.get(e);(!n||n<a)&&(this._locks.set(e,a+1e3*t),s=!0)}return s}async releaseLock(e){a.trimToNull(e)&&this._locks.delete(e)}async clearExpiredSyncLocks(){const e=[],t=Date.now();return this._locks.forEach(((a,s)=>{a<t&&e.push(s)})),e.forEach((e=>{this._locks.delete(e)})),e.length}}export{Ee as AwsBatchBackgroundProcessor,Te as AwsBatchRatchet,Qe as AwsCredentialsRatchet,Le as CascadeEnvironmentServiceProvider,ve as CloudWatchLogGroupRatchet,De as CloudWatchLogsRatchet,Oe as CloudWatchMetricsRatchet,Ae as Daemon,ke as DaemonUtil,Ne as DynamoDbStorageProvider,ot as DynamoDbSyncLock,ze as DynamoExpiringCodeProvider,Ue as DynamoRatchet,Xe as DynamoRuntimeParameterProvider,Re as DynamoTableRatchet,Be as Ec2Ratchet,Ke as EcrRatchet,Ve as EnvVarEnvironmentServiceProvider,_e as EnvironmentService,Je as ExpiringCodeRatchet,Ge as FixedEnvironmentServiceProvider,et as GlobalVariableOverrideRuntimeParameterProvider,je as HashSpreader,He as LambdaEventDetector,Ye as LambdaEventTypeGuards,it as Mailer,tt as MemoryRuntimeParameterProvider,ct as MemorySyncLock,Pe as PrototypeDao,Se as RatchetAwsInfo,Ze as Route53Ratchet,at as RuntimeParameterRatchet,Fe as S3CacheRatchet,We as S3EnvironmentServiceProvider,$e as S3ExpiringCodeProvider,st as S3LocationSyncRatchet,Ie as S3PrototypeDaoProvider,nt as S3Ratchet,Me as S3SimpleDao,Ce as S3StorageProvider,xe as SimpleCache,rt as SnsRatchet,qe as SsmEnvironmentServiceProvider};
|
|
2
2
|
//# sourceMappingURL=index.mjs.map
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@bitblit/ratchet-aws",
|
|
3
|
-
"version": "4.0.
|
|
3
|
+
"version": "4.0.307-alpha",
|
|
4
4
|
"description": "Common tools for use with AWS browser and node",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"type": "module",
|
|
@@ -52,7 +52,7 @@
|
|
|
52
52
|
},
|
|
53
53
|
"license": "Apache-2.0",
|
|
54
54
|
"dependencies": {
|
|
55
|
-
"@bitblit/ratchet-common": "4.0.
|
|
55
|
+
"@bitblit/ratchet-common": "4.0.307-alpha"
|
|
56
56
|
},
|
|
57
57
|
"optionalDependencies": {
|
|
58
58
|
"@aws-sdk/client-athena": "3.468.0",
|
|
@@ -94,7 +94,7 @@
|
|
|
94
94
|
"@aws-sdk/lib-storage": "^3.468.0",
|
|
95
95
|
"@aws-sdk/s3-request-presigner": "^3.468.0",
|
|
96
96
|
"@aws-sdk/types": "^3.468.0",
|
|
97
|
-
"@bitblit/ratchet-common": "4.0.
|
|
97
|
+
"@bitblit/ratchet-common": "4.0.307-alpha",
|
|
98
98
|
"@smithy/abort-controller": "^2.0.15",
|
|
99
99
|
"@smithy/smithy-client": "^2.1.15",
|
|
100
100
|
"@smithy/util-waiter": "^2.0.15"
|
|
@@ -162,8 +162,8 @@
|
|
|
162
162
|
}
|
|
163
163
|
},
|
|
164
164
|
"devDependencies": {
|
|
165
|
-
"@bitblit/ratchet-jest": "4.0.
|
|
166
|
-
"@bitblit/ratchet-node-only": "4.0.
|
|
165
|
+
"@bitblit/ratchet-jest": "4.0.307-alpha",
|
|
166
|
+
"@bitblit/ratchet-node-only": "4.0.307-alpha",
|
|
167
167
|
"@types/aws-lambda": "8.10.131",
|
|
168
168
|
"aws-sdk-client-mock": "3.0.1"
|
|
169
169
|
}
|