@bitblit/ratchet-aws 4.0.414-alpha → 4.0.416-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.mjs +1 -1
- package/lib/types.d.ts +1 -0
- package/package.json +4 -4
package/lib/index.mjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import{DateTime as e}from"luxon";import{RequireRatchet as t,StringRatchet as a,ErrorRatchet as s,Logger as n,StopWatch as i,PromiseRatchet as r,WebStreamRatchet as o,MapRatchet as c,DurationRatchet as l,NumberRatchet as u,MailerUtil as h}from"@bitblit/ratchet-common";import{SubmitJobCommand as d,ListJobsCommand as m,DescribeJobDefinitionsCommand as f}from"@aws-sdk/client-batch";import{CloudWatchLogsClient as g,DescribeLogStreamsCommand as p,FilterLogEventsCommand as y,OrderBy as w,DeleteLogStreamCommand as b,DescribeLogGroupsCommand as T,DeleteLogGroupCommand as E,StartQueryCommand as N,GetQueryResultsCommand as S,StopQueryCommand as C}from"@aws-sdk/client-cloudwatch-logs";import{CloudWatchClient as v,PutMetricDataCommand as x,StandardUnit as k}from"@aws-sdk/client-cloudwatch";import{PutObjectCommand as O,GetObjectCommand as D,NoSuchKey as F,DeleteObjectCommand as I,NotFound as U,HeadObjectCommand as P,CopyObjectCommand as j,ListObjectsCommand as R,S3Client as A,ListObjectsV2Command as M}from"@aws-sdk/client-s3";import{Upload as B}from"@aws-sdk/lib-storage";import{getSignedUrl as K}from"@aws-sdk/s3-request-presigner";import{ScanCommand as L,QueryCommand as V,BatchWriteCommand as _,BatchGetCommand as G,PutCommand as W,GetCommand as q,UpdateCommand as J,DeleteCommand as Q}from"@aws-sdk/lib-dynamodb";import{ConditionalCheckFailedException as z,ProvisionedThroughputExceededException as $,DeleteTableCommand as H,CreateTableCommand as Y,ListTablesCommand as Z,DescribeTableCommand as X,ResourceNotFoundException as ee,ReturnConsumedCapacity as te}from"@aws-sdk/client-dynamodb";import{EC2Client as ae,StopInstancesCommand as se,StartInstancesCommand as ne,DescribeInstancesCommand as ie}from"@aws-sdk/client-ec2";import{EC2InstanceConnectClient as re,SendSSHPublicKeyCommand as oe}from"@aws-sdk/client-ec2-instance-connect";import{BatchDeleteImageCommand as ce,DescribeImagesCommand as le,DescribeRepositoriesCommand as ue,DescribeRegistryCommand as he}from"@aws-sdk/client-ecr";import{GetFunctionCommand as de,ListFunctionsCommand as me}from"@aws-sdk/client-lambda";import{SSMClient as fe,GetParameterCommand as ge,ParameterNotFound as pe}from"@aws-sdk/client-ssm";import{ChangeResourceRecordSetsCommand as ye,waitUntilResourceRecordSetsChanged as we}from"@aws-sdk/client-route-53";import{WaiterState as be}from"@smithy/util-waiter";import{SendRawEmailCommand as Te}from"@aws-sdk/client-ses";import{SNSClient as Ee,PublishCommand as Ne}from"@aws-sdk/client-sns";class Se{batchRatchet;validTaskNames;constructor(e,a){this.batchRatchet=e,this.validTaskNames=a,t.notNullOrUndefined(this.batchRatchet,"batchRatchet"),t.notNullOrUndefined(this.batchRatchet.batchClient,"batchRatchet.batchClient"),t.notNullOrUndefined(this.batchRatchet.defaultJobDefinition,"batchRatchet.defaultJobDefinition"),t.notNullOrUndefined(this.batchRatchet.defaultQueueName,"batchRatchet.defaultQueueName")}async scheduleBackgroundTask(t,i={},r={}){!this.validTaskNames||!this.validTaskNames.length||a.trimToNull(t)&&this.validTaskNames.includes(t)||s.throwFormattedErr("Cannot start task %s - not found in valid task list",t),n.info("Submitting background task to AWS batch: %s %j %s",t,i,this.batchRatchet.defaultQueueName);let o=null;const c=`${this.batchRatchet.defaultJobDefinition}-${t}_${e.utc().toFormat("yyyy-MM-dd-HH-mm")}`,l={jobName:c,jobDefinition:this.batchRatchet.defaultJobDefinition,jobQueue:this.batchRatchet.defaultQueueName,parameters:{taskName:t,taskData:JSON.stringify(i),taskMetadata:JSON.stringify(r)}};try{o=await this.batchRatchet.scheduleJob(l),n.info("Job %s(%s) submitted",o.jobName,o.jobId)}catch(e){n.error("Cannot submit batch job taskName: %s jobDef: %s queue: %s jobName: %s data: %j",t,this.batchRatchet.defaultJobDefinition,this.batchRatchet.defaultQueueName,c,i,e)}return o}}class Ce{_batchClient;_defaultQueueName;_defaultJobDefinition;constructor(e,t,a){this._batchClient=e,this._defaultQueueName=t,this._defaultJobDefinition=a}get batchClient(){return this._batchClient}get defaultQueueName(){return this._defaultQueueName}get defaultJobDefinition(){return this._defaultJobDefinition}async scheduleJob(e){n.info("Submitting batch job %s",e.jobName);try{const t=await this._batchClient.send(new d(e));return n.info("Job %s(%s) submitted",t.jobName,t.jobId),t}catch(t){n.error("Cannot submit batch job %s: %s",e.jobName,t)}return null}async jobCountInState(e,t=this.defaultQueueName){return(await this.listJobs(t,e)).length}async listJobs(e=this.defaultQueueName,a=null){t.notNullOrUndefined(e,"queueName");let s=[];const i={jobQueue:e,jobStatus:a,nextToken:null};n.info("Fetching %j",i);do{n.info("Pulling page...");const e=await this._batchClient.send(new m(i));s=s.concat(e.jobSummaryList),i.nextToken=e.nextToken}while(i.nextToken);return s}}class ve{constructor(){}static buildInformation(){return{version:"414",hash:"311d546526302b7dd26ad47a98c28cad7b7c310c",branch:"alpha-2024-07-05-1",tag:"alpha-2024-07-05-1",timeBuiltISO:"2024-07-05T16:31:46-0700",notes:"No notes"}}}class xe{dynamo;opts;constructor(e,a){this.dynamo=e,this.opts=a,t.notNullOrUndefined(this.dynamo,"dynamo"),t.notNullOrUndefined(this.opts,"opts"),t.notNullOrUndefined(this.opts.tableName,"opts.tableName"),t.notNullOrUndefined(this.opts.hashKeyName,"opts.hashKeyName"),t.true(!this.opts.useRangeKeys||!!this.opts.rangeKeyName&&!!this.opts.hashKeyValue,"invalid range configuration")}static createDefaultOptions(){return{tableName:"simple-cache",useRangeKeys:!1,hashKeyName:"cache-key",rangeKeyName:null,hashKeyValue:null}}createKeyObject(e){const t={};return this.opts.useRangeKeys?(t[this.opts.hashKeyName]=this.opts.hashKeyValue,t[this.opts.rangeKeyName]=e):t[this.opts.hashKeyName]=e,t}cleanDynamoFieldsFromObjectInPlace(e){e&&(delete e[this.opts.hashKeyName],this.opts.rangeKeyName&&delete e[this.opts.rangeKeyName],this.opts.dynamoExpiresColumnName&&delete e[this.opts.dynamoExpiresColumnName])}extractKeysFromObject(e){let t=null;return e&&(t={},this.opts.useRangeKeys?(t[this.opts.hashKeyName]=this.opts.hashKeyValue,t[this.opts.rangeKeyName]=e.cacheKey):t[this.opts.hashKeyName]=e.cacheKey),t}async readFromCache(e){const t=this.createKeyObject(e),a=await this.dynamo.simpleGet(this.opts.tableName,t);return this.cleanDynamoFieldsFromObjectInPlace(a),a}async storeInCache(e){t.notNullOrUndefined(e,"value"),t.notNullOrUndefined(e.cacheKey,"value.cacheKey");const a=Object.assign({},e,this.createKeyObject(e.cacheKey));this.opts.dynamoExpiresColumnName&&e.expiresEpochMS&&(a[this.opts.dynamoExpiresColumnName]=Math.floor(e.expiresEpochMS/1e3));return!!await this.dynamo.simplePut(this.opts.tableName,a)}async removeFromCache(e){await this.dynamo.simpleDelete(this.opts.tableName,this.createKeyObject(e))}async clearCache(){const e=(await this.readAll()).map((e=>this.extractKeysFromObject(e)));return await this.dynamo.deleteAllInBatches(this.opts.tableName,e,25)}async readAll(){let e=null;if(this.opts.useRangeKeys){const t={TableName:this.opts.tableName,KeyConditionExpression:"#cacheKey = :cacheKey",ExpressionAttributeNames:{"#cacheKey":this.opts.hashKeyName},ExpressionAttributeValues:{":cacheKey":this.opts.hashKeyValue}};e=await this.dynamo.fullyExecuteQuery(t)}else{const t={TableName:this.opts.tableName};e=await this.dynamo.fullyExecuteScan(t)}return e.forEach((e=>this.cleanDynamoFieldsFromObjectInPlace(e))),e}}class ke{s3CacheRatchet;prefix;constructor(e,a){this.s3CacheRatchet=e,this.prefix=a,t.notNullOrUndefined(this.s3CacheRatchet,"s3CacheRatchet"),t.notNullOrUndefined(this.s3CacheRatchet.getDefaultBucket(),"s3CacheRatchet.defaultBucket")}keyToPath(e){let t=a.trimToEmpty(this.prefix);return t.length>0&&!t.endsWith("/")&&(t+="/"),t+=e,t}async readFromCache(e){return await this.s3CacheRatchet.fetchCacheFileAsObject(this.keyToPath(e))}async storeInCache(e){t.notNullOrUndefined(e,"value"),t.notNullOrUndefined(e.cacheKey,"value.cacheKey");return!!await this.s3CacheRatchet.writeObjectToCacheFile(this.keyToPath(e.cacheKey),e)}async removeFromCache(e){await this.s3CacheRatchet.removeCacheFile(this.keyToPath(e))}async clearCache(){const e=await this.s3CacheRatchet.directChildrenOfPrefix(this.keyToPath(""));return await Promise.all(e.map((e=>this.removeFromCache(e)))),e.length}async readAll(){const e=await this.s3CacheRatchet.directChildrenOfPrefix(this.keyToPath(""));return await Promise.all(e.map((e=>this.readFromCache(e))))}}class Oe{provider;defaultTimeToLiveMS;constructor(e,t=6e4){this.provider=e,this.defaultTimeToLiveMS=t}createDefaultReadOptions(){return{maxStalenessMS:null,timeToLiveMS:this.defaultTimeToLiveMS,cacheNullValues:!1}}async fetchWrapper(e,t,a=this.createDefaultReadOptions()){n.silly("Fetching %s",e);const s=(new Date).getTime();let i=await this.provider.readFromCache(e);if(i&&i.expiresEpochMS<s&&(n.debug("Object found, but expired - removing"),i=null),i&&a&&a.maxStalenessMS&&s-i.createdEpochMS>a.maxStalenessMS&&(n.debug("Object found by too stale - removing"),i=null),!i){n.debug("%s not found in cache, generating",e);const r=await t();(r||a?.cacheNullValues)&&(n.debug("Writing %j to cache"),i={cacheKey:e,createdEpochMS:s,expiresEpochMS:a&&a.timeToLiveMS?s+a.timeToLiveMS:null,value:r,generated:!1},await this.provider.storeInCache(i),i.generated=!0)}return i}async fetch(e,t,a=null){const s=await this.fetchWrapper(e,t,a);return s?s.value:null}async removeFromCache(e,t){let a=null;return t&&(a=await this.fetchWrapper(e,(()=>null))),await this.provider.removeFromCache(e),a}async clearCache(){return this.provider.clearCache()}async readAll(){return this.provider.readAll()}}class De{logGroup;awsCWLogs;constructor(e,t=new g({region:"us-east-1"})){this.logGroup=e,this.awsCWLogs=t}get cloudWatchLogsClient(){return this.awsCWLogs}async readLogStreams(e=null,t=null){const a={logGroupName:this.logGroup,orderBy:"LastEventTime"},s=[];do{n.debug("Pulling more log streams (%d found so far)",s.length);const i=await this.awsCWLogs.send(new p(a));i.logStreams.forEach((a=>{null!==a.lastEventTimestamp&&(!e||a.lastEventTimestamp>=e)&&(!t||a.firstEventTimestamp<=t)&&s.push(a)})),a.nextToken=i.nextToken}while(a.nextToken);return n.debug("Found %d total, returning",s.length),s}async readLogStreamNames(e=null,t=null){return(await this.readLogStreams(e,t)).map((e=>e.logStreamName))}async readEvents(e,t=null,a=null,s=!0,r=null){const o=new i,c={logGroupName:this.logGroup,endTime:a,startTime:t};e&&(c.filterPattern=e),n.debug("Reading log events matching : %j",c);let l=[];do{n.debug("Pulling more log events (%d found so far) : %s",l.length,o.dump());const e=await this.awsCWLogs.send(new y(c));l=l.concat(e.events),c.nextToken=e.nextToken}while(c.nextToken&&(!r||l.length<r));return n.debug("Found %d total in %s",l.length,o.dump()),s&&(n.debug("Sorting events by timestamp"),l=l.sort(((e,t)=>{let a=e.timestamp-t.timestamp;return 0===a&&(a=e.message.localeCompare(t.message)),a}))),o.log(),l}}class Fe{static MAX_DELETE_RETRIES=5;cwLogs;constructor(e=null){this.cwLogs=e||new g({region:"us-east-1"})}get cloudWatchLogsClient(){return this.cwLogs}async removeEmptyOrOldLogStreams(e,t=1e3,a=null){n.info("Removing empty streams from %s, oldest event epoch MS : %d",e,a);const s={logGroupName:e,orderBy:w.LastEventTime},i=a||1;let o=0;const c=[],l=[];let u=10;do{n.debug("Executing search for streams");try{const e=await this.cwLogs.send(new p(s));o+=e.logStreams.length,n.debug("Found %d streams (%d so far, %d to delete)",e.logStreams.length,o,c.length);for(let a=0;a<e.logStreams.length&&c.length<t;a++){const t=e.logStreams[a];t.firstEventTimestamp?t.lastEventTimestamp<i&&c.push(t):c.push(t)}s.nextToken=e.nextToken}catch(e){const t=u;u=Math.min(1e3,1.5*u),n.info("Caught while describing %s, increasing wait between deletes (was %d, now %d)",e,t,u)}}while(s.nextToken&&c.length<t);n.info("Found %d streams to delete",c.length);let h=10;for(let t=0;t<c.length;t++){const a={logGroupName:e,logStreamName:c[t].logStreamName},s=0===c[t].storedBytes?"empty":"old";n.info("Removing %s stream %s",s,c[t].logStreamName);let i=!1,o=0;for(;!i&&o<Fe.MAX_DELETE_RETRIES;)try{await this.cwLogs.send(new b(a)),i=!0,await r.wait(h)}catch(e){o++;const t=h;h=Math.min(1e3,1.5*h),n.info("Caught %s, increasing wait between deletes and retrying (wait was %d, now %d) (Retry %d of %d)",e,t,h,o,Fe.MAX_DELETE_RETRIES)}i||l.push(c[t])}return n.warn("Failed to remove streams : %j",l),c}async findOldestEventTimestampInGroup(e){const t=await this.findStreamWithOldestEventInGroup(e);return t?t.firstEventTimestamp:null}async findStreamWithOldestEventInGroup(e){n.info("Finding oldest event in : %s",e);let t=null;try{const a={logGroupName:e,orderBy:w.LastEventTime};let s=0;do{n.debug("Executing search for streams");const e=await this.cwLogs.send(new p(a));s+=e.logStreams.length,n.debug("Found %d streams (%d so far)",e.logStreams.length,s),e.logStreams.forEach((e=>{e.firstEventTimestamp&&(null===t||e.firstEventTimestamp<t.firstEventTimestamp)&&(t=e)})),a.nextToken=e.nextToken}while(a.nextToken)}catch(t){n.error("Error attempting to find oldest event in group : %s : %s",e,t,t)}return t}async findLogGroups(e){t.notNullOrUndefined(e);const a={logGroupNamePrefix:e};let s=[];do{n.info("%d found, pulling log groups : %j",s.length,a);const e=await this.cwLogs.send(new T(a));s=s.concat(e.logGroups),a.nextToken=e.nextToken}while(a.nextToken);return s}async removeLogGroups(e){t.notNullOrUndefined(e);const a=[];for(let t=0;t<e.length;t++)try{n.info("Deleting %j",e[t]);const s={logGroupName:e[t].logGroupName};await this.cwLogs.send(new E(s)),a.push(!0)}catch(s){n.error("Failure to delete %j : %s",e[t],s),a.push(!1)}return a}async removeLogGroupsWithPrefix(e){t.notNullOrUndefined(e),t.true(a.trimToEmpty(e).length>0),n.info("Removing log groups with prefix %s",e);const s=await this.findLogGroups(e);return await this.removeLogGroups(s)}async fullyExecuteInsightsQuery(e){t.notNullOrUndefined(e),n.debug("Starting insights query : %j",e);const a=await this.cwLogs.send(new N(e));n.debug("Got query id %j",a);let s=null,i=100;for(;!s||["Running","Scheduled"].includes(s.status);)s=await this.cwLogs.send(new S({queryId:a.queryId})),await r.wait(i),i*=2,n.info("Got : %j",s);return s}async abortInsightsQuery(e){let t=null;return e&&(t=await this.cwLogs.send(new C({queryId:e}))),t}}class Ie{cw;constructor(e=null){this.cw=e||new v({region:"us-east-1",apiVersion:"2010-08-01"})}get cloudWatchClient(){return this.cw}async writeSingleMetric(e,t,a,s=k.None,i,r=new Date,o=!1){const c=[];a&&a.length>0&&a.forEach((e=>{c.push({Name:e.key,Value:e.value})}));const l={Namespace:e,MetricData:[{MetricName:t,Dimensions:c,Unit:s,Value:i,Timestamp:r,StorageResolution:o?1:60}]};n.silly("Writing metric to cw : %j",l);const u=await this.cw.send(new x(l));return n.silly("Result: %j",u),u}async writeDynamoCountAsMinuteLevelMetric(t){if(n.info("Publishing %s / %s metric for %s UTC",t.namespace,t.metric,t.minuteUTC),t.scan&&t.query)throw new Error("Must send query or scan, but not both");if(!t.scan&&!t.query)throw new Error("You must specify either a scan or a query");const a=t.query?await t.dynamoRatchet.fullyExecuteQueryCount(t.query):await t.dynamoRatchet.fullyExecuteScanCount(t.scan);n.debug("%s / %s for %s are %j",t.namespace,t.metric,t.minuteUTC,a);const s=t.minuteUTC.split(" ").join("T")+":00Z",i=e.fromISO(s).toJSDate(),r=await this.writeSingleMetric(t.namespace,t.metric,t.dims,k.Count,a.count,i,!1);return n.debug("Metrics response: %j",r),a.count}}class Ue{static DEFAULT_CONTENT=Buffer.from("DAEMON_PLACEHOLDER");static DAEMON_METADATA_KEY="daemon_meta";static async start(e,t,a,s){try{s.meta=s.meta||{},n.info("Starting daemon, key: %s, options: %j",a,s);const i=(new Date).getTime(),r={id:t,title:s.title,lastUpdatedEpochMS:i,lastUpdatedMessage:"Created",targetFileName:s.targetFileName,startedEpochMS:i,completedEpochMS:null,meta:s.meta,error:null,link:null,contentType:s.contentType};return await Ue.writeState(e,a,r,Ue.DEFAULT_CONTENT)}catch(e){throw n.error("Error while trying to start a daemon: %j %s",s,e),e}}static async writeState(e,t,a,s){try{const i={};a.lastUpdatedEpochMS=(new Date).getTime(),i[Ue.DAEMON_METADATA_KEY]=JSON.stringify(a);const r={Bucket:e.getDefaultBucket(),Key:t,ContentType:a.contentType,Metadata:i,Body:s};a.targetFileName&&(r.ContentDisposition='attachment;filename="'+a.targetFileName+'"');const o=await e.getS3Client().send(new O(r));return n.silly("Daemon wrote : %s",o),Ue.stat(e,t)}catch(e){throw n.error("Error while trying to write a daemon stat: %j %s",a,e),e}}static async streamDataAndFinish(e,t,s,i){n.debug("Streaming data to %s",t);const r=await Ue.updateMessage(e,t,"Streaming data");r.completedEpochMS=(new Date).getTime(),r.lastUpdatedMessage="Complete";const o={};o[Ue.DAEMON_METADATA_KEY]=JSON.stringify(r);const c={Bucket:e.getDefaultBucket(),Key:t,ContentType:r.contentType,Metadata:o,Body:s},l=a.trimToNull(i?.overrideTargetFileName)||a.trimToNull(r?.targetFileName);l&&(c.ContentDisposition='attachment;filename="'+l+'"');const u=new B({client:e.getS3Client(),params:c,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});i?.progressFn&&u.on("httpUploadProgress",i.progressFn);const h=await u.done();return n.silly("Daemon wrote : %s",h),Ue.stat(e,t)}static async updateMessage(e,t,a){try{const s=await Ue.stat(e,t);return s.lastUpdatedMessage=a,Ue.writeState(e,t,s,Ue.DEFAULT_CONTENT)}catch(e){throw n.error("Error while trying to update a daemon message: %j %s",t,e),e}}static async stat(e,t){try{n.debug("Daemon stat for path %s / %s",e.getDefaultBucket(),t);let a=null;const s=await e.fetchMetaForCacheFile(t);n.debug("Daemon: Meta is %j",s);const i=s&&s.Metadata?s.Metadata[Ue.DAEMON_METADATA_KEY]:null;return i?(a=JSON.parse(i),a.completedEpochMS&&!a.error&&(a.link=await e.preSignedDownloadUrlForCacheFile(t))):n.warn("No metadata found! (Head was %j)",s),a}catch(e){throw n.error("Error while trying to fetch a daemon state: %j %s",t,e),e}}static async abort(e,t){return Ue.error(e,t,"Aborted")}static async error(e,t,a){try{const s=await Ue.stat(e,t);return s.error=a,s.completedEpochMS=(new Date).getTime(),Ue.writeState(e,t,s,Ue.DEFAULT_CONTENT)}catch(e){throw n.error("Error while trying to write a daemon error: %j %s",t,e),e}}static async finalize(e,t,a){try{n.info("Finalizing daemon %s with %d bytes",t,a.length);const s=await Ue.stat(e,t);return s.completedEpochMS=(new Date).getTime(),s.lastUpdatedMessage="Complete",Ue.writeState(e,t,s,a)}catch(e){throw n.error("Error while trying to finalize a daemon: %j %s",t,e),e}}}class Pe{s3;defaultBucket;constructor(e,a=null){this.s3=e,this.defaultBucket=a,t.notNullOrUndefined(this.s3,"s3")}get s3Client(){return this.s3}static applyCacheControlMaxAge(e,t){return e&&t&&(e.CacheControl="max-age="+t),e}static applyUserMetaData(e,t,s){return e&&a.trimToNull(t)&&a.trimToNull(s)&&(e.Metadata=e.Metadata||{},e.Metadata[t]=s),e}getDefaultBucket(){return this.defaultBucket}getS3Client(){return this.s3}async fileExists(e,t=null){try{return!!await this.fetchMetaForCacheFile(e,this.bucketVal(t))}catch(e){return n.silly("Error calling file exists (as expected) %s",e),!1}}async fetchCacheFilePassThru(e){let t=null;try{t=await this.s3.send(new D(e))}catch(a){if(!(a instanceof F))throw a;n.debug("Key %s not found - returning null",e.Key),t=null}return t}async fetchCacheFileAsS3GetObjectCommandOutput(e,t=null){let a=null;try{const s={Bucket:this.bucketVal(t),Key:e};a=await this.s3.send(new D(s))}catch(t){if(!(t instanceof F))throw t;n.debug("Key %s not found - returning null",e),a=null}return a}async fetchCacheFileAsReadableStream(e,t=null){return(await this.fetchCacheFileAsS3GetObjectCommandOutput(e,t)).Body.transformToWebStream()}async fetchCacheFileAsBuffer(e,t=null){let a=null;const s=await this.fetchCacheFileAsS3GetObjectCommandOutput(e,t);if(s?.Body){const e=await s.Body.transformToByteArray();a=Buffer.from(e)}return a}async fetchCacheFileAsString(e,t=null){let a=null;const s=await this.fetchCacheFileAsS3GetObjectCommandOutput(e,t);return s?.Body&&(a=await s.Body.transformToString()),a}async fetchCacheFileAsObject(e,t=null){const a=await this.fetchCacheFileAsString(e,t);return a?JSON.parse(a):null}async removeCacheFile(e,t=null){let a=null;const s={Bucket:this.bucketVal(t),Key:e};try{a=await this.s3.send(new I(s))}catch(s){if(!(s&&s instanceof U))throw s;n.info("Swallowing 404 deleting missing object %s %s",t,e),a=null}return a}async writeObjectToCacheFile(e,t,a,s){const n=JSON.stringify(t);return this.writeStringToCacheFile(e,n,a,s)}async writeStringToCacheFile(e,t,a,s){const n=o.stringToWebReadableStream(t);return this.writeStreamToCacheFile(e,n,a,s)}async writeStreamToCacheFile(e,t,a,s,i=(e=>{n.debug("Uploading : %s",e)})){const r=Object.assign({},a||{},{Bucket:this.bucketVal(s),Key:e,Body:t}),o=new B({client:this.s3,params:r,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});i&&o.on("httpUploadProgress",i);return await o.done()}async synchronize(e,a,s=this,r=!1){t.notNullOrUndefined(e,"srcPrefix"),t.notNullOrUndefined(a,"targetPrefix"),t.true(e.endsWith("/"),"srcPrefix must end in /"),t.true(a.endsWith("/"),"targetPrefix must end in /");let o=[];const c=await this.directChildrenOfPrefix(e),l=await s.directChildrenOfPrefix(a),u=new i;for(let t=0;t<c.length;t++){const i=c[t];if(n.info("Processing %s : %s",i,u.dumpExpected(t/c.length)),i.endsWith("/"))if(r){n.info("%s is a subfolder - recursing");const t=await this.synchronize(e+i,a+i,s,r);n.info("Got %d back from %s",t.length,i),o=o.concat(t)}else n.info("%s is a subfolder and recurse not specified - skipping",i);else{let t=!0;const r=await this.fetchMetaForCacheFile(e+i);if(l.includes(i)){const e=await s.fetchMetaForCacheFile(a+i);r.ETag===e.ETag&&(n.debug("Skipping - identical"),t=!1)}if(t){n.debug("Copying...");const t=await this.fetchCacheFileAsReadableStream(e+i);try{const e=await s.writeStreamToCacheFile(a+i,t,r,void 0);n.silly("Write result : %j",e),o.push(i)}catch(e){n.error("Failed to sync : %s : %s",i,e)}}}}return n.info("Found %d files, copied %d",c.length,o.length),u.log(),o}async fetchMetaForCacheFile(e,t=null){let a=null;try{a=await this.s3.send(new P({Bucket:this.bucketVal(t),Key:e}))}catch(s){if(!(s&&s instanceof U))throw n.error("Unrecognized error, rethrowing : %s",s,s),s;n.info("Cache file %s %s not found returning null",this.bucketVal(t),e),a=null}return a}async cacheFileAgeInSeconds(e,t=null){try{const a=await this.fetchMetaForCacheFile(e,t);return a&&a.LastModified?Math.floor(((new Date).getTime()-a.LastModified.getTime())/1e3):(n.warn("Cache file %s %s had no last modified returning null",this.bucketVal(t),e),null)}catch(a){if(a&&a instanceof U)return n.warn("Cache file %s %s not found returning null",this.bucketVal(t),e),null;throw a}}async copyFile(e,t,a=null,s=null){const n={CopySource:"/"+this.bucketVal(a)+"/"+e,Bucket:this.bucketVal(s),Key:t,MetadataDirective:"COPY"};return await this.s3.send(new j(n))}async quietCopyFile(e,t,a=null,s=null){let i=!1;try{await this.copyFile(e,t,a,s);i=!0}catch(e){n.silly("Failed to copy file in S3 : %s",e)}return i}async preSignedDownloadUrlForCacheFile(e,t=3600,a=null){const s={Bucket:this.bucketVal(a),Key:e};return await K(this.s3,new D(s),{expiresIn:t})}async directChildrenOfPrefix(e,t=!1,a=null,s=null){const n=[],i={Bucket:this.bucketVal(a),Prefix:e,Delimiter:"/"};let r=null;do{r=await this.s3.send(new R(i));const o=e.length;r.CommonPrefixes&&r.CommonPrefixes.forEach((e=>{if(!s||n.length<s){const t=e.Prefix.substring(o);n.push(t)}})),r.Contents&&await Promise.all(r.Contents.map((async e=>{if(!s||n.length<s)if(t){const t={link:await this.preSignedDownloadUrlForCacheFile(e.Key,3600,a),name:e.Key.substring(o),size:e.Size};n.push(t)}else n.push(e.Key.substring(o))}))),i.Marker=r.NextMarker}while(i.Marker&&(!s||n.length<s));return n}async allSubFoldersOfPrefix(e,t=null){const a=[e];let s=0;for(;s<a.length;){const e=a[s++];n.debug("Pulling %s (%d remaining)",e,a.length-s);const i={Bucket:this.bucketVal(t),Prefix:e,Delimiter:"/"};let r=null;do{i.ContinuationToken=r?r.NextContinuationToken:null,r=await this.s3.send(new R(i)),r.CommonPrefixes.forEach((e=>{a.push(e.Prefix)})),n.debug("g:%j",r)}while(r.NextContinuationToken)}return a}bucketVal(e){const t=e||this.defaultBucket;if(!t)throw"You must set either the default bucket or pass it explicitly";return t}}class je{s3;bucket;prefix;_defaultGroup;jwtRatchet;static DEFAULT_DEFAULT_GROUP="DEFAULT";cache;constructor(e,t,a="",s=je.DEFAULT_DEFAULT_GROUP,n){this.s3=e,this.bucket=t,this.prefix=a,this._defaultGroup=s,this.jwtRatchet=n,this.cache=new Pe(this.s3,this.bucket)}get defaultGroup(){return this._defaultGroup}async keyToPublicToken(e,a){t.notNullOrUndefined(this.jwtRatchet,"You must set jwtRatchet if you wish to use public tokens"),t.notNullOrUndefined(e,"key"),t.true(a>0,"Expiration seconds must be larger than 0");const s={daemonKey:e};return await this.jwtRatchet.createTokenString(s,a)}keyToPath(e){return Buffer.from(e,"base64").toString()}pathToKey(e){return Buffer.from(e).toString("base64")}generatePath(e=this._defaultGroup){return this.generatePrefix(e)+a.createType4Guid()}generatePrefix(e=this._defaultGroup){return this.prefix+e+"/"}async start(e){e.group=e.group||this._defaultGroup;const t=this.generatePath(e.group),a=this.pathToKey(t);return Ue.start(this.cache,a,t,e)}async writeState(e,t){const a=this.keyToPath(e.id);return Ue.writeState(this.cache,a,e,t)}async clean(e=this._defaultGroup,t=604800){try{n.info("Daemon removing items older than %d seconds from group %s",t,e);const a=await this.list(e),s=(new Date).getTime(),i=[];for(let e=0;e<a.length;e++){const n=a[e];if((s-n.startedEpochMS)/1e3>t){await this.cache.removeCacheFile(this.keyToPath(n.id));i.push(n)}}return n.debug("Removed %d of %d items",i.length,a.length),i}catch(t){throw n.error("Error while trying to clean a daemon: %j %s",e,t),t}}async listKeys(e=this._defaultGroup){try{const t=this.generatePrefix(e);n.info("Fetching children of %s",t);const a=await this.cache.directChildrenOfPrefix(t);return n.debug("Found : %j",a),a}catch(t){throw n.error("Error while trying to list daemon keys: %j %s",e,t),t}}async list(e=this._defaultGroup){try{const t=this.generatePrefix(e);n.info("Fetching children of %s",t);const a=(await this.listKeys(e)).map((t=>this.stat(this.pathToKey(this.generatePrefix(e)+t))));return await Promise.all(a)}catch(t){throw n.error("Error while trying to list daemon states: %j %s",e,t),t}}async updateMessage(e,t){const a=this.keyToPath(e);return Ue.updateMessage(this.cache,a,t)}async stat(e){const t=this.keyToPath(e);return Ue.stat(this.cache,t)}async statFromPublicToken(e){t.notNullOrUndefined(this.jwtRatchet,"You must set jwtRatchet if you wish to use public tokens"),t.notNullOrUndefined(e,"publicToken");const a=await this.jwtRatchet.decodeToken(e),s=a?.daemonKey;return s?this.stat(s):null}async abort(e){return Ue.abort(this.cache,this.keyToPath(e))}async error(e,t){return Ue.error(this.cache,this.keyToPath(e),t)}async finalize(e,t){return Ue.finalize(this.cache,this.keyToPath(e),t)}}class Re{provider;cfg;static defaultDaoConfig(){return{guidCreateFunction:a.createType4Guid,guidFieldName:"guid",createdEpochMSFieldName:"createdEpochMS",updatedEpochMSFieldName:"updatedEpochMS",createdUtcTimestampFieldName:null,updatedUtcTimestampFieldName:null}}constructor(e,a=Re.defaultDaoConfig()){this.provider=e,this.cfg=a,t.notNullOrUndefined(e,"provider"),t.notNullOrUndefined(a,"cfg"),t.notNullOrUndefined(a.guidCreateFunction,"cfg.guidCreateFunction"),t.notNullOrUndefined(a.guidFieldName,"cfg.guidFieldName")}async fetchAll(){return(await this.provider.loadDatabase()).items||[]}async resetDatabase(){await this.provider.storeDatabase({items:[],lastModifiedEpochMS:Date.now()})}async removeItems(e){let t=await this.fetchAll();return e&&(t=t.filter((t=>!e.includes(t[this.cfg.guidFieldName]))),await this.provider.storeDatabase({items:t,lastModifiedEpochMS:Date.now()})),t}async store(t){let a=await this.fetchAll();return t&&(t[this.cfg.guidFieldName]=t[this.cfg.guidFieldName]||this.cfg.guidCreateFunction(),this.cfg.createdEpochMSFieldName&&(t[this.cfg.createdEpochMSFieldName]=t[this.cfg.createdEpochMSFieldName]||Date.now()),this.cfg.createdUtcTimestampFieldName&&(t[this.cfg.createdUtcTimestampFieldName]=t[this.cfg.createdUtcTimestampFieldName]||e.utc().toISO()),this.cfg.updatedEpochMSFieldName&&(t[this.cfg.updatedEpochMSFieldName]=Date.now()),this.cfg.updatedUtcTimestampFieldName&&(t[this.cfg.updatedUtcTimestampFieldName]=e.utc().toISO()),a=a.filter((e=>e[this.cfg.guidFieldName]!==t[this.cfg.guidFieldName])),a.push(t),await this.provider.storeDatabase({items:a,lastModifiedEpochMS:Date.now()})),t}async fetchById(e){return(await this.fetchAll()).find((t=>t[this.cfg.guidFieldName]===e))}async searchByField(e,a){t.notNullOrUndefined(e,"fieldDotPath"),t.notNullOrUndefined(a,"fieldValue");const s={};return s[e]=a,this.searchByFieldMap(s)}async searchByFieldMap(e){t.notNullOrUndefined(e,"input");let a=await this.fetchAll();return a=a.filter((t=>{let a=!0;return Object.keys(e).forEach((s=>{const n=c.findValueDotPath(t,s),i=e[s];a=a&&n===i})),a})),a}}class Ae{s3CacheRatchet;keyName;constructor(e,a){this.s3CacheRatchet=e,this.keyName=a,t.notNullOrUndefined(e,"s3CacheRatchet"),t.notNullUndefinedOrOnlyWhitespaceString(e.getDefaultBucket(),"s3CacheRatchet.defaultBucket"),t.notNullUndefinedOrOnlyWhitespaceString(a,"keyName")}async storeDatabase(e){const t=e||{items:[],lastModifiedEpochMS:null};t.lastModifiedEpochMS=Date.now();return!!await this.s3CacheRatchet.writeObjectToCacheFile(this.keyName,t)}async loadDatabase(){return await this.s3CacheRatchet.fetchCacheFileAsObject(this.keyName)||{items:[],lastModifiedEpochMS:Date.now()}}}class Me{cache;prefix;constructor(e,t){if(this.cache=e,this.prefix=t,!e)throw new Error("cache object may not be null");if(!e.getDefaultBucket())throw new Error("Supplied cache must have default bucket set")}buildPathPrefix(e){let t="";return this.prefix&&(t+=this.prefix),e&&(t+=e),t}buildFullPath(e,t){let a=this.buildPathPrefix(t);return a.length>0&&(a+="/"),a+=e+".json",a}async exists(e,t){const a=this.buildFullPath(e,t);return n.debug("Check file existence : %s",a),this.cache.fileExists(a)}async fetch(e,t){const a=this.buildFullPath(e,t);n.debug("Fetching : %s",a);const s=await this.cache.fetchCacheFileAsObject(a);return s.id=e,s.path=t,s}async store(e,t){e.id=e.id||a.createType4Guid(),e.lastModifiedEpochMS=(new Date).getTime();const s=this.buildFullPath(e.id,t);n.debug("Storing : %s",s),await this.cache.writeObjectToCacheFile(s,e);return await this.fetch(e.id,t)}async listItems(e){const t=this.buildPathPrefix(e);n.debug("Listing : %s",t);return await this.cache.directChildrenOfPrefix(t)}async fetchItemsInPath(e){const t=this.buildPathPrefix(e);n.debug("Full fetch of : %s",t);const a=(await this.listItems(e)).map((t=>this.fetch(t,e)));return await Promise.all(a)}async delete(e,t){const a=this.buildFullPath(e,t);n.debug("Deleting : %s",a);return null!=await this.cache.removeCacheFile(a)}}class Be{awsDDB;constructor(e){if(this.awsDDB=e,!e)throw"awsDDB may not be null"}get dynamoDBDocumentClient(){return this.awsDDB}getDDB(){return this.awsDDB}async tableIsEmpty(e){const t={TableName:e,Limit:1};return 0===(await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),t)).Items.length}async scanPromise(e){return this.awsDDB.send(new L(e))}async queryPromise(e){return this.awsDDB.send(new V(e))}async throughputSafeScanOrQuery(e,t,a,i){let o=null;if(t){let c=i??0;do{c++;try{o=await e(t)}catch(e){if(!Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;{const e=1e3*Math.pow(2,c);n.debug("Exceeded scan throughput for %j : Try %d of %d (Waiting %d ms)",t,c,a,e),await r.wait(e),c++}}}while(!o&&(!a||c<a));o||s.throwFormattedErr("throughputSafeScan failed - tried %d times, kept running into throughput exceeded : %j",a,t)}return o}async fullyExecuteQueryCount(e,t=0){try{e.Select="COUNT",n.debug("Executing count query : %j",e);const a={count:0,scannedCount:0,pages:0},s=(new Date).getTime();let i=null;const o=e.Limit;e.Limit=null;do{i=await this.throughputSafeScanOrQuery((e=>this.queryPromise(e)),e),a.count+=i.Count,a.scannedCount+=i.ScannedCount,a.pages++,e.ExclusiveStartKey=i.LastEvaluatedKey,await r.wait(t),n.silly("Rval is now %j",a),o&&a.count>=o&&e.ExclusiveStartKey&&(n.info("Aborting query since hit limit of %d",o),e.ExclusiveStartKey=null)}while(e.ExclusiveStartKey);const c=(new Date).getTime();return n.debug("Finished, returned %j in %s for %j",a,l.formatMsDuration(c-s,!0),e),a}catch(t){return n.error("Failed with %s, q: %j",t,e,t),null}}async fullyExecuteQuery(e,t=0,a=null){const s=[];return await this.fullyExecuteProcessOverQuery(e,(async e=>{s.push(e)}),t,a),s}async fullyExecuteProcessOverQuery(e,t,a=0,s=null){let i=0;try{n.debug("Executing query : %j",e);const o=(new Date).getTime();n.debug("Pulling %j",e);let c=await this.throughputSafeScanOrQuery((e=>this.queryPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;let u=0,h=0;for(;c.LastEvaluatedKey&&(null===s||i<s)&&!e.Limit;){n.silly("Found more rows - requery with key %j",c.LastEvaluatedKey),e.ExclusiveStartKey=c.LastEvaluatedKey,c=await this.throughputSafeScanOrQuery((e=>this.queryPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;n.silly("Have processed %d items",i),u++,h+=0===c.Count?1:0,await r.wait(a)}const d=(new Date).getTime();n.debug("Finished, processed %d rows in %s for %j (%d blank pages, %d total pages)",i,l.formatMsDuration(d-o,!0),e,h,u)}catch(t){n.error("Failed with %s, q: %j",t,e,t)}return i}async fullyExecuteScanCount(e,t=0){try{e.Select="COUNT";const a={count:0,scannedCount:0,pages:0};n.debug("Executing scan count : %j",e);const s=(new Date).getTime();let i=null;const o=e.Limit;e.Limit=null;do{i=await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),e),a.count+=i.Count,a.scannedCount+=i.ScannedCount,a.pages++,e.ExclusiveStartKey=i?.LastEvaluatedKey,await r.wait(t),n.silly("Rval is now %j",a),o&&a.count>=o&&e.ExclusiveStartKey&&(n.info("Aborting scan since hit limit of %d",o),e.ExclusiveStartKey=null)}while(e.ExclusiveStartKey);const c=(new Date).getTime();return n.debug("Finished, returned %j in %s for %j",a,l.formatMsDuration(c-s,!0),e),a}catch(t){return n.error("Failed with %s, q: %j",t,e,t),null}}async fullyExecuteScan(e,t=0,a=null){const s=[];return await this.fullyExecuteProcessOverScan(e,(async e=>{s.push(e)}),t,a),s}async fullyExecuteProcessOverScan(e,t,a=0,s=null){let i=0;try{n.debug("Executing scan : %j",e);const o=(new Date).getTime();n.debug("Pulling %j",e);let c=await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;for(;c.LastEvaluatedKey&&(null===s||i<s)&&!e.Limit;){n.silly("Found more rows - requery with key %j",c.LastEvaluatedKey),e.ExclusiveStartKey=c.LastEvaluatedKey,c=await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;n.silly("Rval is now %d items",i),await r.wait(a)}const u=(new Date).getTime();n.debug("Finished, processed %d results in %s for %j",i,l.formatMsDuration(u-o,!0),e)}catch(t){n.error("Failed with %s, q: %j",t,e,t)}return i}async writeAllInBatches(e,t,a){if(!a||a<2)throw new Error("Batch size needs to be at least 2, was "+a);let s=0;if(t&&t.length>0){let i=[];for(t.forEach((t=>{i.push({PutRequest:{Item:t,ReturnConsumedCapacity:"TOTAL",TableName:e}})})),n.debug("Processing %d batch items to %s",i.length,e);i.length>0;){const t=i.slice(0,Math.min(i.length,a));i=i.slice(t.length);const o={RequestItems:{},ReturnConsumedCapacity:"TOTAL",ReturnItemCollectionMetrics:"SIZE"};o.RequestItems[e]=t;let c=1,l=!1,u=null;for(;!l&&c<7;){try{u=await this.awsDDB.send(new _(o))}catch(e){if(!Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;n.info("Caught ProvisionedThroughputExceededException - retrying delete"),u={UnprocessedItems:o.RequestItems}}if(u&&u.UnprocessedItems&&u.UnprocessedItems[e]&&u.UnprocessedItems[e].length>0){const t=Math.pow(2,c);n.warn("Found %d unprocessed items. Backing off %d seconds and trying again",u.UnprocessedItems[e].length,t),await r.wait(1e3*t),c++,o.RequestItems[e]=u.UnprocessedItems[e]}else l=!0}u&&u.UnprocessedItems&&u.UnprocessedItems[e]&&u.UnprocessedItems[e].length>0?(n.error("After 6 tries there were still %d unprocessed items",u.UnprocessedItems[e].length),s+=t.length-u.UnprocessedItems[e].length,n.warn("FIX Unprocessed : %j",u.UnprocessedItems)):s+=t.length}}return s}async fetchFullObjectsMatchingKeysOnlyIndexQuery(e,a,s=25){t.notNullOrUndefined(e),t.notNullOrUndefined(e.TableName),t.notNullOrUndefined(a),t.true(a.length>0);const n=await this.fullyExecuteQuery(e),i=Be.stripAllToKeysOnly(n,a);return await this.fetchAllInBatches(e.TableName,i,s)}async fetchAllInBatches(e,t,a){if(!a||a<2||a>100)throw new Error("Batch size needs to be at least 2 and no more than 100, was "+a);let s=[];const i=[];let o=Object.assign([],t);for(;o.length>0;){const t=o.slice(0,Math.min(o.length,a));o=o.slice(t.length);const s={};s[e]={Keys:t};const n={RequestItems:s,ReturnConsumedCapacity:"TOTAL"};i.push(n)}n.debug("Created %d batches",i.length);for(let t=0;t<i.length;t++){i.length>1&&n.info("Processing batch %d of %d",t+1,i.length);const a=i[t];let o=1;do{n.silly("Pulling %j",a);const t=await this.awsDDB.send(new G(a));s=s.concat(t.Responses[e]),t.UnprocessedKeys&&t.UnprocessedKeys[e]&&t.UnprocessedKeys[e].Keys.length>0&&o<15&&(n.silly("Found %d unprocessed, waiting",t.UnprocessedKeys[e].Keys),await r.wait(1e3*Math.pow(2,o)),o++),a.RequestItems=t.UnprocessedKeys}while(!a.RequestItems&&a.RequestItems[e].Keys.length>0)}return s}async deleteAllInBatches(e,t,a){if(!a||a<2)throw new Error("Batch size needs to be at least 2, was "+a);let s=0;if(t&&t.length>0){let i=[];for(t.forEach((t=>{i.push({DeleteRequest:{Key:t,ReturnConsumedCapacity:"TOTAL",TableName:e}})})),n.debug("Processing %d DeleteBatch items to %s",i.length,e);i.length>0;){const t=i.slice(0,Math.min(i.length,a));i=i.slice(t.length);const o={RequestItems:{},ReturnConsumedCapacity:"TOTAL",ReturnItemCollectionMetrics:"SIZE"};o.RequestItems[e]=t;let c=1,l=!1,u=null;for(;!l&&c<7;){try{u=await this.awsDDB.send(new _(o))}catch(e){if(!Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;n.info("Caught ProvisionedThroughputExceededException - retrying delete"),u={UnprocessedItems:o.RequestItems}}if(u&&u.UnprocessedItems&&u.UnprocessedItems[e]&&u.UnprocessedItems[e].length>0){const t=Math.pow(2,c);n.warn("Found %d unprocessed items. Backing off %d seconds and trying again",u.UnprocessedItems[e].length,t),await r.wait(1e3*t),c++,o.RequestItems[e]=u.UnprocessedItems[e]}else l=!0}u&&u.UnprocessedItems&&u.UnprocessedItems[e]&&u.UnprocessedItems[e].length>0?(n.error("After 6 tries there were still %d unprocessed items",u.UnprocessedItems[e].length),s+=t.length-u.UnprocessedItems[e].length,n.warn("FIX Unprocessed : %j",u.UnprocessedItems)):s+=t.length,n.debug("%d Remain, DeleteBatch Results : %j",i.length,u)}}return s}async simplePut(e,t,a=3){let s=null,i=0;const o={Item:t,ReturnConsumedCapacity:"TOTAL",TableName:e};for(;!s&&i<a;)try{s=await this.awsDDB.send(new W(o))}catch(e){if(!Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;{const e=1e3*Math.pow(2,i);n.debug("Exceeded write throughput for %j : Try %d of %d (Waiting %d ms)",o,i,a,e),await r.wait(e),i++}}return s||n.warn("Unable to write %j to DDB after %d tries, giving up",o,a),s}async simplePutOnlyIfFieldIsNullOrUndefined(e,t,a){let s=!1;const i={Item:t,ReturnConsumedCapacity:"TOTAL",ConditionExpression:"attribute_not_exists(#fieldName) OR #fieldName = :null ",ExpressionAttributeNames:{"#fieldName":a},ExpressionAttributeValues:{":null":null},TableName:e};try{const e=await this.awsDDB.send(new W(i));n.silly("Wrote : %j",e),s=!0}catch(o){if(Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(o))n.debug("Exceeded write throughput for %j : (Waiting 2000 ms)",i),await r.wait(2e3),s=await this.simplePutOnlyIfFieldIsNullOrUndefined(e,t,a);else{if(!(o&&o instanceof z))throw o;n.debug("Failed to write %j due to null field failure"),s=!1}}return s}async simplePutWithCollisionAvoidance(e,a,s,i,o=null,c=3){t.true(s&&s.length>0&&s.length<3,"You must pass 1 or 2 key names");let l=null,u=0;const h={"#key0":s[0]},d={":key0":a[s[0]]};let m="#key0 <> :key0";s.length>1&&(m+=" AND #key1 <> :key1",h["#key1"]=s[1],d[":key1"]=a[s[1]]);const f={Item:a,ReturnConsumedCapacity:"TOTAL",ConditionExpression:m,ExpressionAttributeNames:h,ExpressionAttributeValues:d,TableName:e};let g=0;for(;!l&&u<c&&(!o||g<o);)try{l=await this.awsDDB.send(new W(f))}catch(e){if(Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e)){u++;const e=1e3*Math.pow(2,u);n.debug("Exceeded write throughput for %j : Try %d of %d (Waiting %d ms)",f,u,c,e),await r.wait(e)}else{if(!(e&&e instanceof z))throw e;{let e=Object.assign({},f.Item);n.info("Failed to write %j due to collision - adjusting and retrying",e),e=i(e),f.Item=e,f.ExpressionAttributeValues[":key0"]=e[s[0]],s.length>1&&(f.ExpressionAttributeValues[":key1"]=e[s[1]]),g++}}}return l&&g>0&&n.info("After adjustment, wrote %j as %j",a,f.Item),l||n.warn("Unable to write %j to DDB after %d provision tries and %d adjusts, giving up",f,u,g),l?f.Item:null}async simpleGet(e,t,a=3){let s=null,i=0;const o={TableName:e,Key:t};for(;!s&&i<a;)try{s=await this.awsDDB.send(new q(o))}catch(e){if(!Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;{const e=1e3*Math.pow(2,i);n.debug("Exceeded read throughput for %j : Try %d of %d (Waiting %d ms)",o,i,a,e),await r.wait(e),i++}}s||n.warn("Unable to read %j from DDB after %d tries, giving up",o,a);return s&&s.Item?Object.assign({},s.Item):null}static objectIsErrorWithProvisionedThroughputExceededExceptionCode(e){return!!e&&e instanceof $}async simpleGetWithCounterDecrement(e,t,a,s,i=3){let o=null,c=0;const l={TableName:e,Key:t,UpdateExpression:"set #counter = #counter-:decVal",ExpressionAttributeNames:{"#counter":a},ExpressionAttributeValues:{":decVal":1,":minVal":0},ConditionExpression:"#counter > :minVal",ReturnValues:"ALL_NEW"};let u=!1;for(;!o&&c<i&&!u;)try{o=await this.awsDDB.send(new J(l))}catch(e){if(Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e)){const e=1e3*Math.pow(2,c);n.debug("Exceeded update throughput for %j : Try %d of %d (Waiting %d ms)",l,c,i,e),await r.wait(e),c++}else{if(!(e&&e instanceof z))throw e;n.info("Cannot fetch requested row (%j) - the update check failed",t),u=!0}}o||u||n.warn("Unable to update %j from DDB after %d tries, giving up",l,i);const h=o&&o.Attributes?Object.assign({},o.Attributes):null;return s&&h&&0===h[a]&&(n.info("Delete on 0 specified, removing"),await this.simpleDelete(e,t)),h}async simpleDelete(e,t){const a={TableName:e,Key:t};return await this.awsDDB.send(new Q(a))}async atomicCounter(e,t,a,s=1){const n={TableName:e,Key:t,UpdateExpression:"SET #counterFieldName = #counterFieldName + :inc",ExpressionAttributeNames:{"#counterFieldName":a},ExpressionAttributeValues:{":inc":s},ReturnValues:"UPDATED_NEW"},i=await this.awsDDB.send(new J(n));return u.safeNumber(i.Attributes[a])}static cleanObject(e){if(e){const t=[];Object.keys(e).forEach((a=>{const s=e[a];""===s?t.push(a):s instanceof Object&&Be.cleanObject(s)})),n.silly("Removing keys : %j",t),t.forEach((t=>{delete e[t]}))}}static stripToKeysOnly(e,t){let a=null;return e&&t&&t.length>0&&(a={},t.forEach((t=>{e[t]||s.throwFormattedErr("Failed key extraction on %j - missing %s",e,t),a[t]=e[t]}))),a}static stripAllToKeysOnly(e,t){return e.map((e=>Be.stripToKeysOnly(e,t)))}}class Ke{awsDDB;constructor(e){if(this.awsDDB=e,!e)throw"awsDDB may not be null"}async deleteTable(e,a=!0){t.notNullOrUndefined(e);const s={TableName:e};n.debug("Deleting ddb table %s",e);const i=await this.awsDDB.send(new H(s));return a&&(n.debug("Table marked for delete, waiting for deletion"),await this.waitForTableDelete(e)),i}async createTable(e,a=!0,i=!1){t.notNullOrUndefined(e),t.notNullOrUndefined(e.TableName),n.debug("Creating new table : %j",e);await this.tableExists(e.TableName)&&(i?(n.debug("Table %s exists and replace specified - deleting",e.TableName),await this.deleteTable(e.TableName)):s.throwFormattedErr("Cannot create table %s - exists already and replace not specified",e.TableName));const r=await this.awsDDB.send(new Y(e));return a&&(n.debug("Table created, awaiting ready"),await this.waitForTableReady(e.TableName)),r}async waitForTableReady(e){let t=!0,a=await this.safeDescribeTable(e);for(;a&&a.Table&&"ACTIVE"!==a.Table.TableStatus;)n.silly("Table not ready - waiting 2 seconds"),await r.wait(2e3),a=await this.safeDescribeTable(e);return a||a.Table||(n.warn("Cannot wait for %s to be ready - table does not exist",e),t=!1),t}async waitForTableDelete(e){let t=await this.safeDescribeTable(e);for(;t;)n.silly("Table %s still exists, waiting 2 seconds (State is %s)",e,t.Table.TableStatus),await r.wait(2e3),t=await this.safeDescribeTable(e)}async tableExists(e){return!!await this.safeDescribeTable(e)}async listAllTables(){const e={};let t=[];do{const a=await this.awsDDB.send(new Z(e));t=t.concat(a.TableNames),e.ExclusiveStartTableName=a.LastEvaluatedTableName}while(e.ExclusiveStartTableName);return t}async safeDescribeTable(e){try{return await this.awsDDB.send(new X({TableName:e}))}catch(e){if(e instanceof ee)return null;throw e}}async copyTable(e,a,n,i){if(t.notNullUndefinedOrOnlyWhitespaceString(e,"srcTableName"),t.notNullUndefinedOrOnlyWhitespaceString(a,"dstTableName"),i)throw s.fErr("Cannot copy %s to %s - copy data not supported yet",e,a);const r=await this.safeDescribeTable(e);if(await this.tableExists(a))throw s.fErr("Cannot copy to %s - table already exists",a);if(!r)throw s.fErr("Cannot copy %s - doesnt exist",e);r.Table.AttributeDefinitions,r.Table.KeySchema,r.Table.GlobalSecondaryIndexes;const o=Object.assign({},n||{},{AttributeDefinitions:r.Table.AttributeDefinitions,TableName:a,KeySchema:r.Table.KeySchema,LocalSecondaryIndexes:r.Table.LocalSecondaryIndexes,GlobalSecondaryIndexes:r.Table.GlobalSecondaryIndexes.map((e=>{const t=e;return 0!==t.ProvisionedThroughput?.WriteCapacityUnits&&0!==t.ProvisionedThroughput?.ReadCapacityUnits||(t.ProvisionedThroughput=void 0),t})),BillingMode:r.Table.BillingModeSummary.BillingMode,ProvisionedThroughput:"PROVISIONED"===r.Table.BillingModeSummary.BillingMode?r.Table.ProvisionedThroughput:void 0,StreamSpecification:r.Table.StreamSpecification,SSESpecification:r.Table.SSEDescription,Tags:void 0,TableClass:r.Table.TableClassSummary?.TableClass,DeletionProtectionEnabled:r.Table.DeletionProtectionEnabled});return await this.awsDDB.send(new Y(o))}}class Le{spots;buckets;separator;alphabet;_allSlots;constructor(e=3,s=16,n="_",i="0123456789ABCDEF"){this.spots=e,this.buckets=s,this.separator=n,this.alphabet=i,t.true(e>0,"Spots must be larger than 0"),t.true(s>1,"Buckets must be larger than 1"),t.notNullOrUndefined(a.trimToNull(i),"Alphabet may not be null or empty"),t.true(a.allUnique(i),"Alphabet must be unique"),t.true(a.stringContainsOnlyAlphanumeric(i),"Alphabet must be alphanumeric");const r=Math.pow(i.length,e);t.true(s<r,"Buckets must be less than permutations ("+s+" / "+r+")"),t.notNullOrUndefined(a.trimToNull(this.separator),"Separator must be nonnull and nonempty");const o=a.allPermutationsOfLength(e,i);this._allSlots=o.slice(0,s)}get allBuckets(){return Object.assign([],this._allSlots)}get randomBucket(){return this._allSlots[Math.floor(Math.random()*this.buckets)]}allSpreadValues(e){t.notNullOrUndefined(a.trimToNull(e),"Cannot spread null/empty value");return this._allSlots.map((t=>e+this.separator+t))}allSpreadValuesForArray(e){t.true(e&&e.length>0,"Cannot spread null/empty array");let a=[];return e.forEach((e=>{a=a.concat(this.allSpreadValues(e))})),a}addSpreader(e){return t.notNullOrUndefined(a.trimToNull(e),"Cannot spread null/empty value"),e+this.separator+this.randomBucket}extractBucket(e){t.notNullOrUndefined(a.trimToNull(e),"Cannot extract from null or empty value");const n=e.length-this.spots;return(n<0||e.charAt(n)!==this.separator)&&s.throwFormattedErr("Cannot extract bucket, not created by this spreader (missing %s at location %d)",this.separator,n),e.substring(n)}removeBucket(e){t.notNullOrUndefined(a.trimToNull(e),"Cannot extract from null or empty value");const n=e.length-this.spots;return(n<0||e.charAt(n)!==this.separator)&&s.throwFormattedErr("Cannot remove bucket, not created by this spreader (missing %s at location %d)",this.separator,n),e.substring(0,n)}}class Ve{region;availabilityZone;ec2;ec2InstanceConnect;constructor(e="us-east-1",t="us-east-1a"){this.region=e,this.availabilityZone=t,this.ec2=new ae({region:e}),this.ec2InstanceConnect=new re({region:e})}get eC2Client(){return this.ec2}get eC2InstanceConnectClient(){return this.ec2InstanceConnect}async stopInstance(e,t=0){let a=!0;try{const a={InstanceIds:[e],DryRun:!1};n.info("About to stop instances : %j",a),await this.ec2.send(new se(a)),n.info("Stop instance command sent, waiting on shutdown");let s=await this.describeInstance(e);if(t>0){const a=(new Date).getTime();for(;s&&16!==s.State.Code&&(new Date).getTime()-a<t;)n.debug("Instance status is %j - waiting for 5 seconds (up to %s)",s.State,l.formatMsDuration(t)),await r.wait(5e3),s=await this.describeInstance(e)}}catch(t){n.error("Failed to stop instance %s : %s",e,t,t),a=!1}return a}async launchInstance(e,t=0){let a=!0;try{const a={InstanceIds:[e],DryRun:!1};n.info("About to start instance : %j",a),await this.ec2.send(new ne(a)),n.info("Start instance command sent, waiting on startup");let s=await this.describeInstance(e);if(t>0){const a=(new Date).getTime();for(;s&&16!==s.State.Code&&(new Date).getTime()-a<t;)n.debug("Instance status is %j - waiting for 5 seconds (up to %s)",s.State,l.formatMsDuration(t)),await r.wait(5e3),s=await this.describeInstance(e)}s&&s.PublicIpAddress&&(n.info("Instance address is %s",s.PublicIpAddress),n.info("SSH command : ssh -i path_to_pem_file ec2-user@%s",s.PublicIpAddress))}catch(t){n.error("Failed to start instance %s : %s",e,t,t),a=!1}return a}async describeInstance(e){const t=await this.listAllInstances([e]);return 1===t.length?t[0]:null}async listAllInstances(e=[]){let t=[];const a={NextToken:null};e&&e.length>0&&(a.InstanceIds=e);do{n.debug("Pulling instances... (%j)",a);const e=await this.ec2.send(new ie(a));e.Reservations.forEach((e=>{t=t.concat(e.Instances)})),a.NextToken=e.NextToken}while(a.NextToken);return n.debug("Finished pulling instances (found %d)",t.length),t}async sendPublicKeyToEc2Instance(e,t,a){const s=a||"ec2-user",n={InstanceId:e,AvailabilityZone:this.availabilityZone,InstanceOSUser:s,SSHPublicKey:t};return await this.ec2InstanceConnect.send(new oe(n))}}var _e;!function(e){e.InUse="InUse",e.MinimumAge="MinimumAge"}(_e||(_e={}));class Ge{ecr;static ECR_IMAGE_MINIMUM_AGE_DAYS=60;static ECR_REPOSITORY_MINIMUM_IMAGE_COUNT=600;constructor(e){this.ecr=e,t.notNullOrUndefined(e,"ecr")}async findAllUsedImages(e){const t=new Set;for(let a=0;a<e.length;a++){(await e[a].findUsedImageUris()).forEach((e=>t.add(e)))}return Array.from(t)}async performCleaning(e){n.info("Starting cleaning with options : %j",e),n.info("Finding in-use images");const t=(await this.findAllUsedImages(e.usedImageFinders||[])).map((e=>e.substring(e.lastIndexOf(":")+1)));n.info("Found %d images in use: %j",t.length,t);const a=await this.fetchRegistryId();n.info("Processing registry %s",a);const s=await this.fetchAllRepositoryDescriptors(a);n.info("Found repos : %j",s);const i=[];for(let a=0;a<s.length;a++){n.info("Processing repo %d of %d",a,s.length);try{const n=await this.cleanRepository(s[a],t,e);i.push(n)}catch(e){n.error("Failed to process repo : %j : %s",s[a],e,e)}}return{registryId:a,repositories:i,options:e}}async cleanRepository(e,t,s){n.info("Cleaning repository: %j",e);const i=await this.fetchAllImageDescriptors(e);n.info("Found images: %d : %j",i.length,i);const r=[],o=[];i.forEach((e=>{t.map((t=>e.imageTags.includes(t))).find((e=>e))?o.push({image:e,reason:_e.InUse}):r.push(e)})),n.info("Found %d to purge and %d to keep",r.length,o.length);const c=r.map((e=>e.imageSizeInBytes)).reduce(((e,t)=>e+t),0);n.info("Found %s total bytes to purge : %d",a.formatBytes(c),c);const l={registryId:e.registryId,repositoryName:e.repositoryName,imageIds:r.map((e=>({imageDigest:e.imageDigest,imageTag:e.imageTags[0]})))};if(n.info("Purge command : %j",l),s.dryRun)n.info("Dry run specd, stopping");else if(l.imageIds.length>0){n.info("Purging unused images");const e=await this.ecr.send(new ce(l));n.info("Response was : %j",e)}else n.info("Skipping - nothing to purge in this repo");return{repository:e,purged:r,retained:o,totalBytesRecovered:c}}async fetchAllImageDescriptors(e){t.notNullOrUndefined(e,"repo");let s=[];const n={registryId:e.registryId,repositoryName:e.repositoryName};let i=null;do{i=await this.ecr.send(new le(n)),s=s.concat(i.imageDetails),n.nextToken=i.nextToken}while(a.trimToNull(n.nextToken));return s}async fetchAllRepositoryDescriptors(e){let t=[];const s={registryId:e};let n=null;do{n=await this.ecr.send(new ue(s)),t=t.concat(n.repositories),s.nextToken=n.nextToken}while(a.trimToNull(s.nextToken));return t}async fetchAllRepositoryNames(e){return(await this.fetchAllRepositoryDescriptors(e)).map((e=>e.repositoryName))}async fetchRegistryId(){return(await this.ecr.send(new he({}))).registryId}}class We{batch;constructor(e){this.batch=e,t.notNullOrUndefined(e,"batch")}async findUsedImageUris(){const e=await this.listAllJobDefinitions(!1);n.info("Found %d jobs",e.length);const t=e.map((e=>e.containerProperties.image)).filter((e=>a.trimToNull(e)));return Array.from(new Set(t))}async listAllJobDefinitions(e){let t=[];const a={nextToken:null,status:e?void 0:"ACTIVE"};do{const e=await this.batch.send(new f(a));t=t.concat(e.jobDefinitions),a.nextToken=e.nextToken}while(a.nextToken);return t}}class qe{lambda;constructor(e){this.lambda=e,t.notNullOrUndefined(e,"lambda")}async findUsedImageUris(){const e=new Set,t=await this.fetchFunctions();n.info("Found %d functions",t.length);for(let a=0;a<t.length;a++)if("Image"===t[a].PackageType){const s=await this.lambda.send(new de({FunctionName:t[a].FunctionName}));"ECR"===s.Code.RepositoryType&&s.Code.ImageUri&&e.add(s.Code.ImageUri)}else n.info("Skipping zip packaged function: %s",t[a].FunctionName);return Array.from(e)}async fetchFunctions(){let e=[];const t={};let s=null;do{s=await this.lambda.send(new me(t)),e=e.concat(s.Functions),t.Marker=s.NextMarker}while(a.trimToNull(t.Marker));return e}}class Je{providers;constructor(e){this.providers=e,t.notNullOrUndefined(e),t.true(e.length>0)}async fetchConfig(e){n.silly("CascadeEnvironmentServiceProvider fetch for %s",e);let t=null;for(let a=0;a<this.providers.length&&!t;a++)try{t=await this.providers[a].fetchConfig(e)}catch(e){n.error("Provider %d failed - trying next : %s",a,e,e),t=null}return t}}class Qe{envVarName;constructor(e){this.envVarName=e,t.notNullOrUndefined(e)}async fetchConfig(){n.silly("EnvVarEnvironmentServiceProvider fetch for %s",this.envVarName);let e=null;const t=process?process.env:global||{},i=a.trimToNull(t[this.envVarName]);if(i)try{e=JSON.parse(i)}catch(e){throw n.error("Failed to read env - null or invalid JSON : %s : %s",e,i,e),e}else s.throwFormattedErr("Could not find env var with name : %s",this.envVarName);return e}}class ze{provider;cfg;readPromiseCache=new Map;static defaultEnvironmentServiceConfig(){return{maxRetries:3,backoffMultiplierMS:500}}constructor(e,a=ze.defaultEnvironmentServiceConfig()){this.provider=e,this.cfg=a,t.notNullOrUndefined(e),t.notNullOrUndefined(a)}async getConfig(e){return n.silly("EnvService:Request to read config %s",e),this.readPromiseCache.has(e)||(n.silly("EnvService: Nothing in cache - adding"),this.readPromiseCache.set(e,this.getConfigUncached(e))),this.readPromiseCache.get(e)}async getConfigUncached(e){let t=1,a=null;for(;!a&&t<this.cfg.maxRetries;){t++,n.silly("Attempting fetch of %s",e);try{a=await this.provider.fetchConfig(e)}catch(a){const s=t*this.cfg.backoffMultiplierMS;n.info("Error attempting to fetch config %s (try %d of %d, waiting %s MS): %s",e,t,this.cfg.maxRetries,s,a,a),await r.wait(s)}}return a||s.throwFormattedErr("Was unable to fetch config %s even after %d retries",e,this.cfg.maxRetries),a}}class $e{value;constructor(e){this.value=e,t.notNullOrUndefined(e)}static fromRecord(e){const t=new Map;return Object.keys(e).forEach((a=>{t.set(a,e[a])})),new $e(t)}async fetchConfig(e){n.silly("FixedEnvironmentServiceProvider fetch for %s",e);return this.value.get(e)}}class He{cfg;ratchet;constructor(e){this.cfg=e,t.notNullOrUndefined(e),t.notNullOrUndefined(e.bucketName),t.notNullOrUndefined(e.region),t.true(!!e.s3Override||!!e.region,"You must set either region or S3Override");const a=e.s3Override||new A({region:e.region});this.ratchet=new Pe(a,e.bucketName)}async fetchConfig(e){const t=a.trimToEmpty(this.cfg.pathPrefix)+e+a.trimToEmpty(this.cfg.pathSuffix);n.silly("S3EnvironmentServiceProvider:Request to read config from : %s / %s",this.cfg.bucketName,t);const s=new i,r=await this.ratchet.fetchCacheFileAsObject(t);return s.log(),r}}class Ye{region;ssmEncrypted;ssm;constructor(e="us-east-1",a=!0){this.region=e,this.ssmEncrypted=a,t.notNullOrUndefined(e),t.notNullOrUndefined(a),this.ssm=new fe({region:this.region})}async fetchConfig(e){n.silly("SsmEnvironmentServiceProvider fetch for %s",e);const t={Name:e,WithDecryption:this.ssmEncrypted};let i=null,o=null;try{const e=await this.ssm.send(new ge(t));o=a.trimToNull(e?.Parameter?.Value)}catch(t){if(t instanceof pe){const t=n.warn("AWS could not find parameter %s - are you using the right AWS key?",e);throw new Error(t)}if(!((s.safeStringifyErr(t)||"").toLowerCase().indexOf("throttl")>-1))throw n.error("Final environment fetch error (cannot retry) : %s",t,t),t;n.warn("Throttled while trying to read parameters - waiting 1 second before allowing retry"),await r.wait(1e3)}if(o)try{i=JSON.parse(o)}catch(e){throw n.error("Failed to read env - null or invalid JSON : %s : %s",e,o,e),e}else s.throwFormattedErr("Could not find system parameter with name : %s in this account",e);return i}}class Ze{tableName;dynamoRatchet;constructor(e,t){this.tableName=e,this.dynamoRatchet=t}async checkCode(e,t,a){const s={code:e,context:t},n=await this.dynamoRatchet.simpleGet(this.tableName,s),i=n&&n.expiresEpochMS>Date.now();return i&&a&&await this.dynamoRatchet.simpleDelete(this.tableName,s),i}async storeCode(e){const t=await this.dynamoRatchet.simplePut(this.tableName,e);return t&&t.ConsumedCapacity.CapacityUnits>0}async createTableIfMissing(e){return null}}class Xe{provider;constructor(e){this.provider=e}static generateCode(e){t.notNullOrUndefined(e,"params"),t.notNullOrUndefined(e.context,"params.context"),t.notNullOrUndefined(e.length,"params.length"),t.notNullOrUndefined(e.alphabet,"params.alphabet");let a="";for(let t=0;t<e.length;t++)a+=e.alphabet.charAt(Math.floor(e.alphabet.length*Math.random()));return{code:a,context:e.context,tags:e.tags,expiresEpochMS:Date.now()+1e3*e.timeToLiveSeconds}}async createNewCode(e){const t=Xe.generateCode(e);return await this.provider.storeCode(t)?t:null}async checkCode(e,t,s){return await this.provider.checkCode(a.trimToEmpty(e),a.trimToEmpty(t),s)}}class et{s3CacheRatchet;keyName;constructor(e,a){this.s3CacheRatchet=e,this.keyName=a,t.notNullOrUndefined(e,"s3CacheRatchet"),t.notNullUndefinedOrOnlyWhitespaceString(e.getDefaultBucket(),"s3CacheRatchet.defaultBucket"),t.notNullUndefinedOrOnlyWhitespaceString(a,"keyName")}async fetchFile(){return await this.s3CacheRatchet.fetchCacheFileAsObject(this.keyName)||{data:[],lastModifiedEpochMS:Date.now()}}async updateFile(e){const t={data:e||[],lastModifiedEpochMS:Date.now()};t.data=t.data.filter((e=>e.expiresEpochMS>Date.now())),n.info("Updating code file to %s codes",t.data.length);return await this.s3CacheRatchet.writeObjectToCacheFile(this.keyName,t)}async checkCode(e,t,a){const s=await this.fetchFile(),i=s.data.find((a=>a?.code?.toUpperCase()===e?.toUpperCase()&&a?.context?.toUpperCase()===t?.toUpperCase()));if(i&&(a||i.expiresEpochMS<Date.now())){n.info("Stripping used/expired code from the database");const e=s.data.filter((e=>e!=i));await this.updateFile(e)}return!!i&&i.expiresEpochMS>Date.now()}async storeCode(e){const t=await this.fetchFile();t.data.push(e);return!!await this.updateFile(t.data)}}class tt{constructor(){}static applySetProfileEnvironmentalVariable(e){process.env?a.trimToNull(e)?process.env.AWS_PROFILE=e:s.throwFormattedErr("Cannot set profile to null/empty string"):s.throwFormattedErr("Cannot set profile - not in a node environment - process missing")}}class at{static isValidCronEvent(e){return e&&"aws.events"==e.source&&e.resources&&e.resources.length>0}static isValidSnsEvent(e){return e&&e.Records&&e.Records.length>0&&"aws:sns"==e.Records[0].EventSource}static isValidSqsEvent(e){return e&&e.Records&&e.Records.length>0&&"aws:sqs"==e.Records[0].eventSource}static isValidDynamoDBEvent(e){return e&&e.Records&&e.Records.length>0&&"aws:dynamodb"==e.Records[0].eventSource}static isValidS3Event(e){return e&&e.Records&&e.Records.length>0&&"aws:s3"==e.Records[0].eventSource}static isValidApiGatewayV2WithRequestContextEvent(e){return e&&e.rawPath&&e.requestContext&&e.routeKey}static isValidApiGatewayEvent(e){return e&&e.httpMethod&&e.path&&e.requestContext}static isValidApiGatewayAuthorizerEvent(e){return e&&e.authorizationToken&&e.methodArn}static isSingleCronEvent(e){return this.isValidCronEvent(e)&&at.isSingleEntryEvent(e,"resources")}static isSingleSnsEvent(e){return this.isValidSnsEvent(e)&&at.isSingleEntryEvent(e)}static isSingleDynamoDBEvent(e){return this.isValidDynamoDBEvent(e)&&at.isSingleEntryEvent(e)}static isSingleS3Event(e){return this.isValidS3Event(e)&&at.isSingleEntryEvent(e)}static isSingleEntryEvent(e,t="Records"){return e&&e[t]&&e[t]instanceof Array&&1===e[t].length}}class st{static isValidCronEvent(e){return at.isValidCronEvent(e)}static isValidSqsEvent(e){return at.isValidSqsEvent(e)}static isValidSnsEvent(e){return at.isValidSnsEvent(e)}static isValidDynamoDBEvent(e){return at.isValidDynamoDBEvent(e)}static isValidS3Event(e){return at.isValidS3Event(e)}static isValidApiGatewayV2WithRequestContextEvent(e){return at.isValidApiGatewayV2WithRequestContextEvent(e)}static isValidApiGatewayEvent(e){return at.isValidApiGatewayEvent(e)}static isValidApiGatewayAuthorizerEvent(e){return at.isValidApiGatewayAuthorizerEvent(e)}}class nt{route53;hostedZoneId;constructor(e,t){if(this.route53=e,this.hostedZoneId=t,!this.route53)throw"route53 may not be null"}get route53Client(){return this.route53}async changeCnameRecordTarget(e,t,a=this.hostedZoneId,s=600){n.info("Updating %s to point to %s",e,t);try{const i={ChangeBatch:{Changes:[{Action:"UPSERT",ResourceRecordSet:{Name:e,ResourceRecords:[{Value:t}],TTL:s,Type:"CNAME"}}]},HostedZoneId:a},r=await this.route53.send(new ye(i));n.debug("Updated domain result: %j",r);const o={Id:r.ChangeInfo.Id},c=await we({client:this.route53,maxWaitTime:300},o);if(n.debug("Wait responsed: %j",c),c.state===be.SUCCESS)return n.info("Updated %s to point to %s",e,a),!0}catch(a){n.warn("Error update CName for %s with value %s: %j",e,t,a)}return n.info("Cannot update %s to point to %s",e,t),!1}}class it{dynamo;tableName;constructor(e,a){this.dynamo=e,this.tableName=a,t.notNullOrUndefined(this.dynamo),t.notNullOrUndefined(this.tableName)}async readParameter(e,t){n.silly("Reading %s / %s from underlying db",e,t);const a={groupId:e,paramKey:t};return await this.dynamo.simpleGet(this.tableName,a)}async readAllParametersForGroup(e){const t={TableName:this.tableName,KeyConditionExpression:"groupId = :groupId",ExpressionAttributeValues:{":groupId":e}};return await this.dynamo.fullyExecuteQuery(t)}async writeParameter(e){return!!await this.dynamo.simplePut(this.tableName,e)}}class rt{wrapped;options={globalTTL:1,separator:".",prefix:"RuntimeEnv-",suffix:""};constructor(e,a){this.wrapped=e,t.notNullOrUndefined(this.wrapped,"wrapped"),t.notNullOrUndefined(global?.process?.env,'"process" not found - this only runs in Node, not the browser'),a&&(this.options=a),t.notNullOrUndefined(this.options.globalTTL,"this.options.globalTTL"),t.notNullOrUndefined(this.options.separator,"this.options.separator"),t.true(this.options.globalTTL>0,"this.options.globalTTL must be larger than 0")}generateName(e,t){return a.trimToEmpty(this.options.prefix)+e+a.trimToEmpty(this.options.separator)+t+a.trimToEmpty(this.options.suffix)}async readParameter(e,t){const n=a.trimToNull(process.env[this.generateName(e,t)]);n&&!a.canParseAsJson(n)&&s.throwFormattedErr("Cannot parse ENV override (%s / %s) as JSON - did you forget the quotes on a string?",e,t);return n?{groupId:e,paramKey:t,paramValue:n,ttlSeconds:this.options.globalTTL}:await this.wrapped.readParameter(e,t)}async readAllParametersForGroup(e){return this.wrapped.readAllParametersForGroup(e)}async writeParameter(e){return this.wrapped.writeParameter(e)}}class ot{data;constructor(e=Promise.resolve({})){this.data=e}async readParameter(e,t){n.silly("Reading %s / %s from underlying db",e,t);return(await this.data)[e+"::"+t]}async readAllParametersForGroup(e){const t=await this.data,a=[];return Object.keys(t).forEach((s=>{s.startsWith(e)&&a.push(t[s])})),a}async writeParameter(e){return(await this.data)[e.groupId+"::"+e.paramKey]=e,!0}}class ct{provider;cache=new Map;constructor(e){this.provider=e,t.notNullOrUndefined(this.provider)}async fetchParameter(e,t,a=null,s=!1){n.debug("Reading parameter %s / %s / Force : %s",e,t,s);const i=this.cache.get(ct.toCacheStoreKey(e,t));let r=null;const o=(new Date).getTime();if(!s&&i){const a=i.ttlSeconds?o-1e3*i.ttlSeconds:0;i.storedEpochMS>a&&(n.silly("Fetched %s / %s from cache",e,t),r=JSON.parse(i.paramValue))}if(!r){const a=await this.readUnderlyingEntry(e,t);a&&(this.addToCache(a),r=JSON.parse(a.paramValue))}return r=r||a,r}async fetchAllParametersForGroup(e){const t=await this.readUnderlyingEntries(e),a=new Map;return t.forEach((e=>{a.set(e.paramKey,JSON.parse(e.paramValue)),this.addToCache(e)})),a}async readUnderlyingEntry(e,t){return this.provider.readParameter(e,t)}async readUnderlyingEntries(e){return this.provider.readAllParametersForGroup(e)}async storeParameter(e,t,a,s){const n={groupId:e,paramKey:t,paramValue:JSON.stringify(a),ttlSeconds:s};return await this.provider.writeParameter(n),this.provider.readParameter(e,t)}static toCacheStoreKey(e,t){return e+":::"+t}addToCache(e){if(e){const t=(new Date).getTime(),a=Object.assign({storedEpochMS:t},e);this.cache.set(ct.toCacheStoreKey(e.groupId,e.paramKey),a)}}clearCache(){n.debug("Clearing runtime parameter cache"),this.cache=new Map}}class lt{config;constructor(e){t.notNullOrUndefined(e,"config"),this.config=e,this.config.maxNumThreads||(this.config.maxNumThreads=15),this.config.maxRetries||(this.config.maxRetries=5)}updateSrcPrefix(e){this.config.srcPrefix=e}updateDstPrefix(e){this.config.dstPrefix=e}async copyObject(e,t,a=!1){const s=e.replace(this.config.srcPrefix,this.config.dstPrefix);let i=!1,r=0;for(;!i&&r<this.config.maxRetries;){n.debug(`${r>0?`Retry ${r} `:""}${a?"Express":"Slow"} copying\n [${[this.config.srcBucket,e].join("/")} ---\x3e ${[this.config.dstBucket,s].join("/")}]`);try{if(a){const t={CopySource:encodeURIComponent([this.config.srcBucket,e].join("/")),Bucket:this.config.dstBucket,Key:s,MetadataDirective:"COPY"};await this.config.dstS3.send(new j(t))}else{const a=await this.config.srcS3.send(new D({Bucket:this.config.srcBucket,Key:e})),i={Bucket:this.config.dstBucket,Key:s,Body:a.Body,ContentLength:t},r=new B({client:this.config.dstS3,params:i,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});r.on("httpUploadProgress",(e=>{n.debug("Uploading : %s",e)})),await r.done()}i=!0}catch(t){n.warn(`Can't ${a?"express":"slow"} copy\n [${[this.config.srcBucket,e].join("/")} ---\x3e ${[this.config.dstBucket,s].join("/")}]: %j`,t),r++}}n.debug(`Finished ${a?"express":"slow"} copying\n [${[this.config.srcBucket,e].join("/")} ---\x3e ${[this.config.dstBucket,s].join("/")}]`)}async listObjects(e,t,a){n.info(`Scanning bucket [${[e,t].join("/")}]`);const s={Bucket:e,Prefix:t};let i=!0;const r={};for(;i;){const e=await a.send(new M(s));i=e.IsTruncated,e.Contents.forEach((e=>{r[e.Key]={Key:e.Key,LastModified:e.LastModified,ETag:e.ETag,Size:e.Size}})),i&&(s.ContinuationToken=e.NextContinuationToken)}return r}async startSyncing(){n.info(`Syncing [${this.config.srcBucket}/${this.config.srcPrefix}\n ---\x3e ${this.config.dstBucket}/${this.config.dstPrefix}]`);const e=async e=>{await this.copyObject(e.Key,e.Size)};let t=await this.compareSrcAndDst();return(t.needCopy.length>0||t.diff.length>0)&&(await r.runBoundedParallelSingleParam(e,t.needCopy,this,this.config.maxNumThreads),await r.runBoundedParallelSingleParam(e,t.diff,this,this.config.maxNumThreads),n.info("Verifying..."),t=await this.compareSrcAndDst(),n.debug("Compare result %j",t)),0===t.needCopy.length&&0===t.diff.length}async compareSrcAndDst(){const e=this.listObjects(this.config.srcBucket,this.config.srcPrefix,this.config.srcS3),t=this.listObjects(this.config.dstBucket,this.config.dstPrefix,this.config.dstS3),a=await e,s=await t,n={needCopy:[],existed:[],diff:[]};return await r.runBoundedParallelSingleParam((e=>{const t=a[e],i=e.replace(this.config.srcPrefix,this.config.dstPrefix),r=s.hasOwnProperty(i)?s[i]:void 0;r?t.Size===r.Size&&t.LastModified.getTime()<=r.LastModified.getTime()?n.existed.push(t):n.diff.push(t):n.needCopy.push(t)}),Object.keys(a),this,this.config.maxNumThreads),n}}class ut{static checkS3UrlForValidity(e){let t=!1;return e&&(t=e.startsWith("s3://")&&e.trim().length>5),t}static extractBucketFromURL(e){t.true(ut.checkS3UrlForValidity(e),"invalid s3 url");const a=e.indexOf("/",5);return a>0?e.substring(5,a):e.substring(5)}static extractKeyFromURL(e){t.true(ut.checkS3UrlForValidity(e),"invalid s3 url");const a=e.indexOf("/",5);return a>0?e.substring(a+1):null}}class ht{_ses;_archiveRatchet;archivePrefix;constructor(e,a,s){if(this._ses=e,this._archiveRatchet=a,this.archivePrefix=s,t.notNullOrUndefined(this._ses),a&&!a.getDefaultBucket())throw new Error("If archiveRatchet specified, must set a default bucket")}async archiveEmail(t,s){let i=null;if(this._archiveRatchet){n.debug("Archiving outbound email to : %j",t.destinationAddresses);let s=a.trimToEmpty(this.archivePrefix);s.endsWith("/")||(s+="/");const r=e.utc();s+="year="+r.toFormat("yyyy")+"/month="+r.toFormat("MM")+"/day="+r.toFormat("dd")+"/hour="+r.toFormat("HH")+"/"+r.toFormat("mm_ss__SSS"),s+=".json";try{i=await this._archiveRatchet.writeObjectToCacheFile(s,t)}catch(e){n.warn("Failed to archive email %s %j : %s",s,t,e)}}return i}get sesClient(){return this._ses}get archiveRatchet(){return this._archiveRatchet}async sendEmail(e){t.notNullOrUndefined(e,"RTS must be defined"),t.notNullOrUndefined(e.destinationAddresses,"Destination addresses must be defined");let a=null;const s=h.convertResolvedReadyToSendEmailToRaw(e),n={RawMessage:{Data:(new TextEncoder).encode(s)}};return a=await this._ses.send(new Te(n)),a}}class dt{sns;topicArn;constructor(e=new Ee({region:"us-east-1"}),a){this.sns=e,this.topicArn=a,t.notNullOrUndefined(this.sns,"sns"),t.notNullOrUndefined(this.topicArn,"topicArn")}get snsClient(){return this.sns}async sendMessage(e,t=!1){let a=null;try{const t=e||"NO-MESSAGE-PROVIDED",s="string"==typeof t?t:JSON.stringify(t),i={TopicArn:this.topicArn,Message:s};n.debug("Sending via SNS : %j",i),a=await this.sns.send(new Ne(i))}catch(a){if(!t)throw a;n.error("Failed to fire SNS notification : %j : %s",e,a)}return a}async conditionallySendMessage(e,t,a=!1){let s=null;return t?s=await this.sendMessage(e,a):n.info("Not sending message, condition was false : %j",e),s}}class mt{ratchet;tableName;constructor(e,s){this.ratchet=e,this.tableName=s,t.notNullOrUndefined(e,"ratchet"),t.notNullOrUndefined(a.trimToNull(this.tableName),"tableName")}async acquireLock(e,t=30){let a=!1;if(e&&t){const s=Math.floor((new Date).getTime()/1e3),i={Item:{lockingKey:e,timestamp:s,expires:s+t},ReturnConsumedCapacity:te.TOTAL,TableName:this.tableName,ConditionExpression:"attribute_not_exists(lockingKey)"};try{await this.ratchet.getDDB().send(new W(i));a=!0}catch(t){t instanceof z&&n.silly("Unable to acquire lock on %s",e)}}return a}async releaseLock(e){if(a.trimToNull(e))try{const t=await this.ratchet.simpleDelete(this.tableName,{lockingKey:e});n.silly("Released lock %s : %s",e,t)}catch(t){n.warn("Failed to release lock key : %s : %s",e,t,t)}}async clearExpiredSyncLocks(){const e=Math.floor((new Date).getTime()/1e3),t={TableName:this.tableName,FilterExpression:"expires < :now",ExpressionAttributeValues:{":now":e}},a=(await this.ratchet.fullyExecuteScan(t)).map((e=>({lockingKey:e.lockingKey})));return await this.ratchet.deleteAllInBatches(this.tableName,a,25)}}class ft{_locks=new Map;constructor(){}async acquireLock(e,t=30){let s=!1;if(a.trimToNull(e)){const a=Date.now(),n=this._locks.get(e);(!n||n<a)&&(this._locks.set(e,a+1e3*t),s=!0)}return s}async releaseLock(e){a.trimToNull(e)&&this._locks.delete(e)}async clearExpiredSyncLocks(){const e=[],t=Date.now();return this._locks.forEach(((a,s)=>{a<t&&e.push(s)})),e.forEach((e=>{this._locks.delete(e)})),e.length}}export{Se as AwsBatchBackgroundProcessor,Ce as AwsBatchRatchet,We as AwsBatchUsedImageFinder,tt as AwsCredentialsRatchet,Je as CascadeEnvironmentServiceProvider,De as CloudWatchLogGroupRatchet,Fe as CloudWatchLogsRatchet,Ie as CloudWatchMetricsRatchet,je as Daemon,Ue as DaemonUtil,xe as DynamoDbStorageProvider,mt as DynamoDbSyncLock,Ze as DynamoExpiringCodeProvider,Be as DynamoRatchet,it as DynamoRuntimeParameterProvider,Ke as DynamoTableRatchet,Ve as Ec2Ratchet,Ge as EcrUnusedImageCleaner,Qe as EnvVarEnvironmentServiceProvider,ze as EnvironmentService,Xe as ExpiringCodeRatchet,$e as FixedEnvironmentServiceProvider,rt as GlobalVariableOverrideRuntimeParameterProvider,Le as HashSpreader,at as LambdaEventDetector,st as LambdaEventTypeGuards,qe as LambdaUsedImageFinder,ot as MemoryRuntimeParameterProvider,ft as MemorySyncLock,Re as PrototypeDao,ve as RatchetAwsInfo,_e as RetainedImageReason,nt as Route53Ratchet,ct as RuntimeParameterRatchet,Pe as S3CacheRatchet,He as S3EnvironmentServiceProvider,et as S3ExpiringCodeProvider,lt as S3LocationSyncRatchet,Ae as S3PrototypeDaoProvider,ut as S3Ratchet,Me as S3SimpleDao,ke as S3StorageProvider,ht as SesMailSendingProvider,Oe as SimpleCache,dt as SnsRatchet,Ye as SsmEnvironmentServiceProvider};
|
|
1
|
+
import{DateTime as e}from"luxon";import{RequireRatchet as t,StringRatchet as a,ErrorRatchet as s,Logger as n,StopWatch as i,PromiseRatchet as r,WebStreamRatchet as o,MapRatchet as c,DurationRatchet as l,NumberRatchet as u,MailerUtil as h}from"@bitblit/ratchet-common";import{SubmitJobCommand as d,ListJobsCommand as m,DescribeJobDefinitionsCommand as f}from"@aws-sdk/client-batch";import{CloudWatchLogsClient as g,DescribeLogStreamsCommand as p,FilterLogEventsCommand as y,OrderBy as w,DeleteLogStreamCommand as b,DescribeLogGroupsCommand as T,DeleteLogGroupCommand as E,StartQueryCommand as N,GetQueryResultsCommand as S,StopQueryCommand as C}from"@aws-sdk/client-cloudwatch-logs";import{CloudWatchClient as v,PutMetricDataCommand as x,StandardUnit as k}from"@aws-sdk/client-cloudwatch";import{PutObjectCommand as O,GetObjectCommand as D,NoSuchKey as F,DeleteObjectCommand as I,NotFound as U,HeadObjectCommand as P,CopyObjectCommand as j,ListObjectsCommand as R,S3Client as A,ListObjectsV2Command as M}from"@aws-sdk/client-s3";import{Upload as B}from"@aws-sdk/lib-storage";import{getSignedUrl as K}from"@aws-sdk/s3-request-presigner";import{ScanCommand as L,QueryCommand as V,BatchWriteCommand as _,BatchGetCommand as G,PutCommand as W,GetCommand as q,UpdateCommand as J,DeleteCommand as Q}from"@aws-sdk/lib-dynamodb";import{ConditionalCheckFailedException as z,ProvisionedThroughputExceededException as $,DeleteTableCommand as H,CreateTableCommand as Y,ListTablesCommand as Z,DescribeTableCommand as X,ResourceNotFoundException as ee,ReturnConsumedCapacity as te}from"@aws-sdk/client-dynamodb";import{EC2Client as ae,StopInstancesCommand as se,StartInstancesCommand as ne,DescribeInstancesCommand as ie}from"@aws-sdk/client-ec2";import{EC2InstanceConnectClient as re,SendSSHPublicKeyCommand as oe}from"@aws-sdk/client-ec2-instance-connect";import{BatchDeleteImageCommand as ce,DescribeImagesCommand as le,DescribeRepositoriesCommand as ue,DescribeRegistryCommand as he}from"@aws-sdk/client-ecr";import{GetFunctionCommand as de,ListFunctionsCommand as me}from"@aws-sdk/client-lambda";import{SSMClient as fe,GetParameterCommand as ge,ParameterNotFound as pe}from"@aws-sdk/client-ssm";import{ChangeResourceRecordSetsCommand as ye,waitUntilResourceRecordSetsChanged as we}from"@aws-sdk/client-route-53";import{WaiterState as be}from"@smithy/util-waiter";import{SendRawEmailCommand as Te}from"@aws-sdk/client-ses";import{SNSClient as Ee,PublishCommand as Ne}from"@aws-sdk/client-sns";class Se{batchRatchet;validTaskNames;constructor(e,a){this.batchRatchet=e,this.validTaskNames=a,t.notNullOrUndefined(this.batchRatchet,"batchRatchet"),t.notNullOrUndefined(this.batchRatchet.batchClient,"batchRatchet.batchClient"),t.notNullOrUndefined(this.batchRatchet.defaultJobDefinition,"batchRatchet.defaultJobDefinition"),t.notNullOrUndefined(this.batchRatchet.defaultQueueName,"batchRatchet.defaultQueueName")}async scheduleBackgroundTask(t,i={},r={}){!this.validTaskNames||!this.validTaskNames.length||a.trimToNull(t)&&this.validTaskNames.includes(t)||s.throwFormattedErr("Cannot start task %s - not found in valid task list",t),n.info("Submitting background task to AWS batch: %s %j %s",t,i,this.batchRatchet.defaultQueueName);let o=null;const c=`${this.batchRatchet.defaultJobDefinition}-${t}_${e.utc().toFormat("yyyy-MM-dd-HH-mm")}`,l={jobName:c,jobDefinition:this.batchRatchet.defaultJobDefinition,jobQueue:this.batchRatchet.defaultQueueName,parameters:{taskName:t,taskData:JSON.stringify(i),taskMetadata:JSON.stringify(r)}};try{o=await this.batchRatchet.scheduleJob(l),n.info("Job %s(%s) submitted",o.jobName,o.jobId)}catch(e){n.error("Cannot submit batch job taskName: %s jobDef: %s queue: %s jobName: %s data: %j",t,this.batchRatchet.defaultJobDefinition,this.batchRatchet.defaultQueueName,c,i,e)}return o}}class Ce{_batchClient;_defaultQueueName;_defaultJobDefinition;constructor(e,t,a){this._batchClient=e,this._defaultQueueName=t,this._defaultJobDefinition=a}get batchClient(){return this._batchClient}get defaultQueueName(){return this._defaultQueueName}get defaultJobDefinition(){return this._defaultJobDefinition}async scheduleJob(e){n.info("Submitting batch job %s",e.jobName);try{const t=await this._batchClient.send(new d(e));return n.info("Job %s(%s) submitted",t.jobName,t.jobId),t}catch(t){n.error("Cannot submit batch job %s: %s",e.jobName,t)}return null}async jobCountInState(e,t=this.defaultQueueName){return(await this.listJobs(t,e)).length}async listJobs(e=this.defaultQueueName,a=null){t.notNullOrUndefined(e,"queueName");let s=[];const i={jobQueue:e,jobStatus:a,nextToken:null};n.info("Fetching %j",i);do{n.info("Pulling page...");const e=await this._batchClient.send(new m(i));s=s.concat(e.jobSummaryList),i.nextToken=e.nextToken}while(i.nextToken);return s}}class ve{constructor(){}static buildInformation(){return{version:"416",hash:"e7c8727b07eba28014e4ed789357dce00144456f",branch:"alpha-2024-07-12-2",tag:"alpha-2024-07-12-2",timeBuiltISO:"2024-07-12T17:24:10-0700",notes:"No notes"}}}class xe{dynamo;opts;constructor(e,a){this.dynamo=e,this.opts=a,t.notNullOrUndefined(this.dynamo,"dynamo"),t.notNullOrUndefined(this.opts,"opts"),t.notNullOrUndefined(this.opts.tableName,"opts.tableName"),t.notNullOrUndefined(this.opts.hashKeyName,"opts.hashKeyName"),t.true(!this.opts.useRangeKeys||!!this.opts.rangeKeyName&&!!this.opts.hashKeyValue,"invalid range configuration")}static createDefaultOptions(){return{tableName:"simple-cache",useRangeKeys:!1,hashKeyName:"cache-key",rangeKeyName:null,hashKeyValue:null}}createKeyObject(e){const t={};return this.opts.useRangeKeys?(t[this.opts.hashKeyName]=this.opts.hashKeyValue,t[this.opts.rangeKeyName]=e):t[this.opts.hashKeyName]=e,t}cleanDynamoFieldsFromObjectInPlace(e){e&&(delete e[this.opts.hashKeyName],this.opts.rangeKeyName&&delete e[this.opts.rangeKeyName],this.opts.dynamoExpiresColumnName&&delete e[this.opts.dynamoExpiresColumnName])}extractKeysFromObject(e){let t=null;return e&&(t={},this.opts.useRangeKeys?(t[this.opts.hashKeyName]=this.opts.hashKeyValue,t[this.opts.rangeKeyName]=e.cacheKey):t[this.opts.hashKeyName]=e.cacheKey),t}async readFromCache(e){const t=this.createKeyObject(e),a=await this.dynamo.simpleGet(this.opts.tableName,t);return this.cleanDynamoFieldsFromObjectInPlace(a),a}async storeInCache(e){t.notNullOrUndefined(e,"value"),t.notNullOrUndefined(e.cacheKey,"value.cacheKey");const a=Object.assign({},e,this.createKeyObject(e.cacheKey));this.opts.dynamoExpiresColumnName&&e.expiresEpochMS&&(a[this.opts.dynamoExpiresColumnName]=Math.floor(e.expiresEpochMS/1e3));return!!await this.dynamo.simplePut(this.opts.tableName,a)}async removeFromCache(e){await this.dynamo.simpleDelete(this.opts.tableName,this.createKeyObject(e))}async clearCache(){const e=(await this.readAll()).map((e=>this.extractKeysFromObject(e)));return await this.dynamo.deleteAllInBatches(this.opts.tableName,e,25)}async readAll(){let e=null;if(this.opts.useRangeKeys){const t={TableName:this.opts.tableName,KeyConditionExpression:"#cacheKey = :cacheKey",ExpressionAttributeNames:{"#cacheKey":this.opts.hashKeyName},ExpressionAttributeValues:{":cacheKey":this.opts.hashKeyValue}};e=await this.dynamo.fullyExecuteQuery(t)}else{const t={TableName:this.opts.tableName};e=await this.dynamo.fullyExecuteScan(t)}return e.forEach((e=>this.cleanDynamoFieldsFromObjectInPlace(e))),e}}class ke{s3CacheRatchet;prefix;constructor(e,a){this.s3CacheRatchet=e,this.prefix=a,t.notNullOrUndefined(this.s3CacheRatchet,"s3CacheRatchet"),t.notNullOrUndefined(this.s3CacheRatchet.getDefaultBucket(),"s3CacheRatchet.defaultBucket")}keyToPath(e){let t=a.trimToEmpty(this.prefix);return t.length>0&&!t.endsWith("/")&&(t+="/"),t+=e,t}async readFromCache(e){return await this.s3CacheRatchet.fetchCacheFileAsObject(this.keyToPath(e))}async storeInCache(e){t.notNullOrUndefined(e,"value"),t.notNullOrUndefined(e.cacheKey,"value.cacheKey");return!!await this.s3CacheRatchet.writeObjectToCacheFile(this.keyToPath(e.cacheKey),e)}async removeFromCache(e){await this.s3CacheRatchet.removeCacheFile(this.keyToPath(e))}async clearCache(){const e=await this.s3CacheRatchet.directChildrenOfPrefix(this.keyToPath(""));return await Promise.all(e.map((e=>this.removeFromCache(e)))),e.length}async readAll(){const e=await this.s3CacheRatchet.directChildrenOfPrefix(this.keyToPath(""));return await Promise.all(e.map((e=>this.readFromCache(e))))}}class Oe{provider;defaultTimeToLiveMS;constructor(e,t=6e4){this.provider=e,this.defaultTimeToLiveMS=t}createDefaultReadOptions(){return{maxStalenessMS:null,timeToLiveMS:this.defaultTimeToLiveMS,cacheNullValues:!1}}async fetchWrapper(e,t,a=this.createDefaultReadOptions()){n.silly("Fetching %s",e);const s=(new Date).getTime();let i=await this.provider.readFromCache(e);if(i&&i.expiresEpochMS<s&&(n.debug("Object found, but expired - removing"),i=null),i&&a&&a.maxStalenessMS&&s-i.createdEpochMS>a.maxStalenessMS&&(n.debug("Object found by too stale - removing"),i=null),!i){n.debug("%s not found in cache, generating",e);const r=await t();(r||a?.cacheNullValues)&&(n.debug("Writing %j to cache"),i={cacheKey:e,createdEpochMS:s,expiresEpochMS:a&&a.timeToLiveMS?s+a.timeToLiveMS:null,value:r,generated:!1},await this.provider.storeInCache(i),i.generated=!0)}return i}async fetch(e,t,a=null){const s=await this.fetchWrapper(e,t,a);return s?s.value:null}async removeFromCache(e,t){let a=null;return t&&(a=await this.fetchWrapper(e,(()=>null))),await this.provider.removeFromCache(e),a}async clearCache(){return this.provider.clearCache()}async readAll(){return this.provider.readAll()}}class De{logGroup;awsCWLogs;constructor(e,t=new g({region:"us-east-1"})){this.logGroup=e,this.awsCWLogs=t}get cloudWatchLogsClient(){return this.awsCWLogs}async readLogStreams(e=null,t=null){const a={logGroupName:this.logGroup,orderBy:"LastEventTime"},s=[];do{n.debug("Pulling more log streams (%d found so far)",s.length);const i=await this.awsCWLogs.send(new p(a));i.logStreams.forEach((a=>{null!==a.lastEventTimestamp&&(!e||a.lastEventTimestamp>=e)&&(!t||a.firstEventTimestamp<=t)&&s.push(a)})),a.nextToken=i.nextToken}while(a.nextToken);return n.debug("Found %d total, returning",s.length),s}async readLogStreamNames(e=null,t=null){return(await this.readLogStreams(e,t)).map((e=>e.logStreamName))}async readEvents(e,t=null,a=null,s=!0,r=null){const o=new i,c={logGroupName:this.logGroup,endTime:a,startTime:t};e&&(c.filterPattern=e),n.debug("Reading log events matching : %j",c);let l=[];do{n.debug("Pulling more log events (%d found so far) : %s",l.length,o.dump());const e=await this.awsCWLogs.send(new y(c));l=l.concat(e.events),c.nextToken=e.nextToken}while(c.nextToken&&(!r||l.length<r));return n.debug("Found %d total in %s",l.length,o.dump()),s&&(n.debug("Sorting events by timestamp"),l=l.sort(((e,t)=>{let a=e.timestamp-t.timestamp;return 0===a&&(a=e.message.localeCompare(t.message)),a}))),o.log(),l}}class Fe{static MAX_DELETE_RETRIES=5;cwLogs;constructor(e=null){this.cwLogs=e||new g({region:"us-east-1"})}get cloudWatchLogsClient(){return this.cwLogs}async removeEmptyOrOldLogStreams(e,t=1e3,a=null){n.info("Removing empty streams from %s, oldest event epoch MS : %d",e,a);const s={logGroupName:e,orderBy:w.LastEventTime},i=a||1;let o=0;const c=[],l=[];let u=10;do{n.debug("Executing search for streams");try{const e=await this.cwLogs.send(new p(s));o+=e.logStreams.length,n.debug("Found %d streams (%d so far, %d to delete)",e.logStreams.length,o,c.length);for(let a=0;a<e.logStreams.length&&c.length<t;a++){const t=e.logStreams[a];t.firstEventTimestamp?t.lastEventTimestamp<i&&c.push(t):c.push(t)}s.nextToken=e.nextToken}catch(e){const t=u;u=Math.min(1e3,1.5*u),n.info("Caught while describing %s, increasing wait between deletes (was %d, now %d)",e,t,u)}}while(s.nextToken&&c.length<t);n.info("Found %d streams to delete",c.length);let h=10;for(let t=0;t<c.length;t++){const a={logGroupName:e,logStreamName:c[t].logStreamName},s=0===c[t].storedBytes?"empty":"old";n.info("Removing %s stream %s",s,c[t].logStreamName);let i=!1,o=0;for(;!i&&o<Fe.MAX_DELETE_RETRIES;)try{await this.cwLogs.send(new b(a)),i=!0,await r.wait(h)}catch(e){o++;const t=h;h=Math.min(1e3,1.5*h),n.info("Caught %s, increasing wait between deletes and retrying (wait was %d, now %d) (Retry %d of %d)",e,t,h,o,Fe.MAX_DELETE_RETRIES)}i||l.push(c[t])}return n.warn("Failed to remove streams : %j",l),c}async findOldestEventTimestampInGroup(e){const t=await this.findStreamWithOldestEventInGroup(e);return t?t.firstEventTimestamp:null}async findStreamWithOldestEventInGroup(e){n.info("Finding oldest event in : %s",e);let t=null;try{const a={logGroupName:e,orderBy:w.LastEventTime};let s=0;do{n.debug("Executing search for streams");const e=await this.cwLogs.send(new p(a));s+=e.logStreams.length,n.debug("Found %d streams (%d so far)",e.logStreams.length,s),e.logStreams.forEach((e=>{e.firstEventTimestamp&&(null===t||e.firstEventTimestamp<t.firstEventTimestamp)&&(t=e)})),a.nextToken=e.nextToken}while(a.nextToken)}catch(t){n.error("Error attempting to find oldest event in group : %s : %s",e,t,t)}return t}async findLogGroups(e){t.notNullOrUndefined(e);const a={logGroupNamePrefix:e};let s=[];do{n.info("%d found, pulling log groups : %j",s.length,a);const e=await this.cwLogs.send(new T(a));s=s.concat(e.logGroups),a.nextToken=e.nextToken}while(a.nextToken);return s}async removeLogGroups(e){t.notNullOrUndefined(e);const a=[];for(let t=0;t<e.length;t++)try{n.info("Deleting %j",e[t]);const s={logGroupName:e[t].logGroupName};await this.cwLogs.send(new E(s)),a.push(!0)}catch(s){n.error("Failure to delete %j : %s",e[t],s),a.push(!1)}return a}async removeLogGroupsWithPrefix(e){t.notNullOrUndefined(e),t.true(a.trimToEmpty(e).length>0),n.info("Removing log groups with prefix %s",e);const s=await this.findLogGroups(e);return await this.removeLogGroups(s)}async fullyExecuteInsightsQuery(e){t.notNullOrUndefined(e),n.debug("Starting insights query : %j",e);const a=await this.cwLogs.send(new N(e));n.debug("Got query id %j",a);let s=null,i=100;for(;!s||["Running","Scheduled"].includes(s.status);)s=await this.cwLogs.send(new S({queryId:a.queryId})),await r.wait(i),i*=2,n.info("Got : %j",s);return s}async abortInsightsQuery(e){let t=null;return e&&(t=await this.cwLogs.send(new C({queryId:e}))),t}}class Ie{cw;constructor(e=null){this.cw=e||new v({region:"us-east-1",apiVersion:"2010-08-01"})}get cloudWatchClient(){return this.cw}async writeSingleMetric(e,t,a,s=k.None,i,r=new Date,o=!1){const c=[];a&&a.length>0&&a.forEach((e=>{c.push({Name:e.key,Value:e.value})}));const l={Namespace:e,MetricData:[{MetricName:t,Dimensions:c,Unit:s,Value:i,Timestamp:r,StorageResolution:o?1:60}]};n.silly("Writing metric to cw : %j",l);const u=await this.cw.send(new x(l));return n.silly("Result: %j",u),u}async writeDynamoCountAsMinuteLevelMetric(t){if(n.info("Publishing %s / %s metric for %s UTC",t.namespace,t.metric,t.minuteUTC),t.scan&&t.query)throw new Error("Must send query or scan, but not both");if(!t.scan&&!t.query)throw new Error("You must specify either a scan or a query");const a=t.query?await t.dynamoRatchet.fullyExecuteQueryCount(t.query):await t.dynamoRatchet.fullyExecuteScanCount(t.scan);n.debug("%s / %s for %s are %j",t.namespace,t.metric,t.minuteUTC,a);const s=t.minuteUTC.split(" ").join("T")+":00Z",i=e.fromISO(s).toJSDate(),r=await this.writeSingleMetric(t.namespace,t.metric,t.dims,k.Count,a.count,i,!1);return n.debug("Metrics response: %j",r),a.count}}class Ue{static DEFAULT_CONTENT=Buffer.from("DAEMON_PLACEHOLDER");static DAEMON_METADATA_KEY="daemon_meta";static async start(e,t,a,s){try{s.meta=s.meta||{},n.info("Starting daemon, key: %s, options: %j",a,s);const i=(new Date).getTime(),r={id:t,title:s.title,lastUpdatedEpochMS:i,lastUpdatedMessage:"Created",targetFileName:s.targetFileName,startedEpochMS:i,completedEpochMS:null,meta:s.meta,error:null,link:null,contentType:s.contentType};return await Ue.writeState(e,a,r,Ue.DEFAULT_CONTENT)}catch(e){throw n.error("Error while trying to start a daemon: %j %s",s,e),e}}static async writeState(e,t,a,s){try{const i={};a.lastUpdatedEpochMS=(new Date).getTime(),i[Ue.DAEMON_METADATA_KEY]=JSON.stringify(a);const r={Bucket:e.getDefaultBucket(),Key:t,ContentType:a.contentType,Metadata:i,Body:s};a.targetFileName&&(r.ContentDisposition='attachment;filename="'+a.targetFileName+'"');const o=await e.getS3Client().send(new O(r));return n.silly("Daemon wrote : %s",o),Ue.stat(e,t)}catch(e){throw n.error("Error while trying to write a daemon stat: %j %s",a,e),e}}static async streamDataAndFinish(e,t,s,i){n.debug("Streaming data to %s",t);const r=await Ue.updateMessage(e,t,"Streaming data");r.completedEpochMS=(new Date).getTime(),r.lastUpdatedMessage="Complete";const o={};o[Ue.DAEMON_METADATA_KEY]=JSON.stringify(r);const c={Bucket:e.getDefaultBucket(),Key:t,ContentType:r.contentType,Metadata:o,Body:s},l=a.trimToNull(i?.overrideTargetFileName)||a.trimToNull(r?.targetFileName);l&&(c.ContentDisposition='attachment;filename="'+l+'"');const u=new B({client:e.getS3Client(),params:c,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});i?.progressFn&&u.on("httpUploadProgress",i.progressFn);const h=await u.done();return n.silly("Daemon wrote : %s",h),Ue.stat(e,t)}static async updateMessage(e,t,a){try{const s=await Ue.stat(e,t);return s.lastUpdatedMessage=a,Ue.writeState(e,t,s,Ue.DEFAULT_CONTENT)}catch(e){throw n.error("Error while trying to update a daemon message: %j %s",t,e),e}}static async stat(e,t){try{n.debug("Daemon stat for path %s / %s",e.getDefaultBucket(),t);let a=null;const s=await e.fetchMetaForCacheFile(t);n.debug("Daemon: Meta is %j",s);const i=s&&s.Metadata?s.Metadata[Ue.DAEMON_METADATA_KEY]:null;return i?(a=JSON.parse(i),a.completedEpochMS&&!a.error&&(a.link=await e.preSignedDownloadUrlForCacheFile(t))):n.warn("No metadata found! (Head was %j)",s),a}catch(e){throw n.error("Error while trying to fetch a daemon state: %j %s",t,e),e}}static async abort(e,t){return Ue.error(e,t,"Aborted")}static async error(e,t,a){try{const s=await Ue.stat(e,t);return s.error=a,s.completedEpochMS=(new Date).getTime(),Ue.writeState(e,t,s,Ue.DEFAULT_CONTENT)}catch(e){throw n.error("Error while trying to write a daemon error: %j %s",t,e),e}}static async finalize(e,t,a){try{n.info("Finalizing daemon %s with %d bytes",t,a.length);const s=await Ue.stat(e,t);return s.completedEpochMS=(new Date).getTime(),s.lastUpdatedMessage="Complete",Ue.writeState(e,t,s,a)}catch(e){throw n.error("Error while trying to finalize a daemon: %j %s",t,e),e}}}class Pe{s3;defaultBucket;constructor(e,a=null){this.s3=e,this.defaultBucket=a,t.notNullOrUndefined(this.s3,"s3")}get s3Client(){return this.s3}static applyCacheControlMaxAge(e,t){return e&&t&&(e.CacheControl="max-age="+t),e}static applyUserMetaData(e,t,s){return e&&a.trimToNull(t)&&a.trimToNull(s)&&(e.Metadata=e.Metadata||{},e.Metadata[t]=s),e}getDefaultBucket(){return this.defaultBucket}getS3Client(){return this.s3}async fileExists(e,t=null){try{return!!await this.fetchMetaForCacheFile(e,this.bucketVal(t))}catch(e){return n.silly("Error calling file exists (as expected) %s",e),!1}}async fetchCacheFilePassThru(e){let t=null;try{t=await this.s3.send(new D(e))}catch(a){if(!(a instanceof F))throw a;n.debug("Key %s not found - returning null",e.Key),t=null}return t}async fetchCacheFileAsS3GetObjectCommandOutput(e,t=null){let a=null;try{const s={Bucket:this.bucketVal(t),Key:e};a=await this.s3.send(new D(s))}catch(t){if(!(t instanceof F))throw t;n.debug("Key %s not found - returning null",e),a=null}return a}async fetchCacheFileAsReadableStream(e,t=null){return(await this.fetchCacheFileAsS3GetObjectCommandOutput(e,t)).Body.transformToWebStream()}async fetchCacheFileAsBuffer(e,t=null){let a=null;const s=await this.fetchCacheFileAsS3GetObjectCommandOutput(e,t);if(s?.Body){const e=await s.Body.transformToByteArray();a=Buffer.from(e)}return a}async fetchCacheFileAsString(e,t=null){let a=null;const s=await this.fetchCacheFileAsS3GetObjectCommandOutput(e,t);return s?.Body&&(a=await s.Body.transformToString()),a}async fetchCacheFileAsObject(e,t=null){const a=await this.fetchCacheFileAsString(e,t);return a?JSON.parse(a):null}async removeCacheFile(e,t=null){let a=null;const s={Bucket:this.bucketVal(t),Key:e};try{a=await this.s3.send(new I(s))}catch(s){if(!(s&&s instanceof U))throw s;n.info("Swallowing 404 deleting missing object %s %s",t,e),a=null}return a}async writeObjectToCacheFile(e,t,a,s){const n=JSON.stringify(t);return this.writeStringToCacheFile(e,n,a,s)}async writeStringToCacheFile(e,t,a,s){const n=o.stringToWebReadableStream(t);return this.writeStreamToCacheFile(e,n,a,s)}async writeStreamToCacheFile(e,t,a,s,i=(e=>{n.debug("Uploading : %s",e)})){const r=Object.assign({},a||{},{Bucket:this.bucketVal(s),Key:e,Body:t}),o=new B({client:this.s3,params:r,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});i&&o.on("httpUploadProgress",i);return await o.done()}async synchronize(e,a,s=this,r=!1){t.notNullOrUndefined(e,"srcPrefix"),t.notNullOrUndefined(a,"targetPrefix"),t.true(e.endsWith("/"),"srcPrefix must end in /"),t.true(a.endsWith("/"),"targetPrefix must end in /");let o=[];const c=await this.directChildrenOfPrefix(e),l=await s.directChildrenOfPrefix(a),u=new i;for(let t=0;t<c.length;t++){const i=c[t];if(n.info("Processing %s : %s",i,u.dumpExpected(t/c.length)),i.endsWith("/"))if(r){n.info("%s is a subfolder - recursing");const t=await this.synchronize(e+i,a+i,s,r);n.info("Got %d back from %s",t.length,i),o=o.concat(t)}else n.info("%s is a subfolder and recurse not specified - skipping",i);else{let t=!0;const r=await this.fetchMetaForCacheFile(e+i);if(l.includes(i)){const e=await s.fetchMetaForCacheFile(a+i);r.ETag===e.ETag&&(n.debug("Skipping - identical"),t=!1)}if(t){n.debug("Copying...");const t=await this.fetchCacheFileAsReadableStream(e+i);try{const e=await s.writeStreamToCacheFile(a+i,t,r,void 0);n.silly("Write result : %j",e),o.push(i)}catch(e){n.error("Failed to sync : %s : %s",i,e)}}}}return n.info("Found %d files, copied %d",c.length,o.length),u.log(),o}async fetchMetaForCacheFile(e,t=null){let a=null;try{a=await this.s3.send(new P({Bucket:this.bucketVal(t),Key:e}))}catch(s){if(!(s&&s instanceof U))throw n.error("Unrecognized error, rethrowing : %s",s,s),s;n.info("Cache file %s %s not found returning null",this.bucketVal(t),e),a=null}return a}async cacheFileAgeInSeconds(e,t=null){try{const a=await this.fetchMetaForCacheFile(e,t);return a&&a.LastModified?Math.floor(((new Date).getTime()-a.LastModified.getTime())/1e3):(n.warn("Cache file %s %s had no last modified returning null",this.bucketVal(t),e),null)}catch(a){if(a&&a instanceof U)return n.warn("Cache file %s %s not found returning null",this.bucketVal(t),e),null;throw a}}async renameFile(e,t,a=null,s=null){n.info("Rename %s to %s (%s/%s)",e,t,a,s);const i=await this.copyFile(e,t,a,s);return i&&await this.removeCacheFile(e,a),i}async copyFile(e,t,a=null,s=null){const n={CopySource:"/"+this.bucketVal(a)+"/"+e,Bucket:this.bucketVal(s),Key:t,MetadataDirective:"COPY"};return await this.s3.send(new j(n))}async quietCopyFile(e,t,a=null,s=null){let i=!1;try{await this.copyFile(e,t,a,s);i=!0}catch(e){n.silly("Failed to copy file in S3 : %s",e)}return i}async preSignedDownloadUrlForCacheFile(e,t=3600,a=null){const s={Bucket:this.bucketVal(a),Key:e};return await K(this.s3,new D(s),{expiresIn:t})}async directChildrenOfPrefix(e,t=!1,a=null,s=null){const n=[],i={Bucket:this.bucketVal(a),Prefix:e,Delimiter:"/"};let r=null;do{r=await this.s3.send(new R(i));const o=e.length;r.CommonPrefixes&&r.CommonPrefixes.forEach((e=>{if(!s||n.length<s){const t=e.Prefix.substring(o);n.push(t)}})),r.Contents&&await Promise.all(r.Contents.map((async e=>{if(!s||n.length<s)if(t){const t={link:await this.preSignedDownloadUrlForCacheFile(e.Key,3600,a),name:e.Key.substring(o),size:e.Size};n.push(t)}else n.push(e.Key.substring(o))}))),i.Marker=r.NextMarker}while(i.Marker&&(!s||n.length<s));return n}async allSubFoldersOfPrefix(e,t=null){const a=[e];let s=0;for(;s<a.length;){const e=a[s++];n.debug("Pulling %s (%d remaining)",e,a.length-s);const i={Bucket:this.bucketVal(t),Prefix:e,Delimiter:"/"};let r=null;do{i.ContinuationToken=r?r.NextContinuationToken:null,r=await this.s3.send(new R(i)),r.CommonPrefixes.forEach((e=>{a.push(e.Prefix)})),n.debug("g:%j",r)}while(r.NextContinuationToken)}return a}bucketVal(e){const t=e||this.defaultBucket;if(!t)throw"You must set either the default bucket or pass it explicitly";return t}}class je{s3;bucket;prefix;_defaultGroup;jwtRatchet;static DEFAULT_DEFAULT_GROUP="DEFAULT";cache;constructor(e,t,a="",s=je.DEFAULT_DEFAULT_GROUP,n){this.s3=e,this.bucket=t,this.prefix=a,this._defaultGroup=s,this.jwtRatchet=n,this.cache=new Pe(this.s3,this.bucket)}get defaultGroup(){return this._defaultGroup}async keyToPublicToken(e,a){t.notNullOrUndefined(this.jwtRatchet,"You must set jwtRatchet if you wish to use public tokens"),t.notNullOrUndefined(e,"key"),t.true(a>0,"Expiration seconds must be larger than 0");const s={daemonKey:e};return await this.jwtRatchet.createTokenString(s,a)}keyToPath(e){return Buffer.from(e,"base64").toString()}pathToKey(e){return Buffer.from(e).toString("base64")}generatePath(e=this._defaultGroup){return this.generatePrefix(e)+a.createType4Guid()}generatePrefix(e=this._defaultGroup){return this.prefix+e+"/"}async start(e){e.group=e.group||this._defaultGroup;const t=this.generatePath(e.group),a=this.pathToKey(t);return Ue.start(this.cache,a,t,e)}async writeState(e,t){const a=this.keyToPath(e.id);return Ue.writeState(this.cache,a,e,t)}async clean(e=this._defaultGroup,t=604800){try{n.info("Daemon removing items older than %d seconds from group %s",t,e);const a=await this.list(e),s=(new Date).getTime(),i=[];for(let e=0;e<a.length;e++){const n=a[e];if((s-n.startedEpochMS)/1e3>t){await this.cache.removeCacheFile(this.keyToPath(n.id));i.push(n)}}return n.debug("Removed %d of %d items",i.length,a.length),i}catch(t){throw n.error("Error while trying to clean a daemon: %j %s",e,t),t}}async listKeys(e=this._defaultGroup){try{const t=this.generatePrefix(e);n.info("Fetching children of %s",t);const a=await this.cache.directChildrenOfPrefix(t);return n.debug("Found : %j",a),a}catch(t){throw n.error("Error while trying to list daemon keys: %j %s",e,t),t}}async list(e=this._defaultGroup){try{const t=this.generatePrefix(e);n.info("Fetching children of %s",t);const a=(await this.listKeys(e)).map((t=>this.stat(this.pathToKey(this.generatePrefix(e)+t))));return await Promise.all(a)}catch(t){throw n.error("Error while trying to list daemon states: %j %s",e,t),t}}async updateMessage(e,t){const a=this.keyToPath(e);return Ue.updateMessage(this.cache,a,t)}async stat(e){const t=this.keyToPath(e);return Ue.stat(this.cache,t)}async statFromPublicToken(e){t.notNullOrUndefined(this.jwtRatchet,"You must set jwtRatchet if you wish to use public tokens"),t.notNullOrUndefined(e,"publicToken");const a=await this.jwtRatchet.decodeToken(e),s=a?.daemonKey;return s?this.stat(s):null}async abort(e){return Ue.abort(this.cache,this.keyToPath(e))}async error(e,t){return Ue.error(this.cache,this.keyToPath(e),t)}async finalize(e,t){return Ue.finalize(this.cache,this.keyToPath(e),t)}}class Re{provider;cfg;static defaultDaoConfig(){return{guidCreateFunction:a.createType4Guid,guidFieldName:"guid",createdEpochMSFieldName:"createdEpochMS",updatedEpochMSFieldName:"updatedEpochMS",createdUtcTimestampFieldName:null,updatedUtcTimestampFieldName:null}}constructor(e,a=Re.defaultDaoConfig()){this.provider=e,this.cfg=a,t.notNullOrUndefined(e,"provider"),t.notNullOrUndefined(a,"cfg"),t.notNullOrUndefined(a.guidCreateFunction,"cfg.guidCreateFunction"),t.notNullOrUndefined(a.guidFieldName,"cfg.guidFieldName")}async fetchAll(){return(await this.provider.loadDatabase()).items||[]}async resetDatabase(){await this.provider.storeDatabase({items:[],lastModifiedEpochMS:Date.now()})}async removeItems(e){let t=await this.fetchAll();return e&&(t=t.filter((t=>!e.includes(t[this.cfg.guidFieldName]))),await this.provider.storeDatabase({items:t,lastModifiedEpochMS:Date.now()})),t}async store(t){let a=await this.fetchAll();return t&&(t[this.cfg.guidFieldName]=t[this.cfg.guidFieldName]||this.cfg.guidCreateFunction(),this.cfg.createdEpochMSFieldName&&(t[this.cfg.createdEpochMSFieldName]=t[this.cfg.createdEpochMSFieldName]||Date.now()),this.cfg.createdUtcTimestampFieldName&&(t[this.cfg.createdUtcTimestampFieldName]=t[this.cfg.createdUtcTimestampFieldName]||e.utc().toISO()),this.cfg.updatedEpochMSFieldName&&(t[this.cfg.updatedEpochMSFieldName]=Date.now()),this.cfg.updatedUtcTimestampFieldName&&(t[this.cfg.updatedUtcTimestampFieldName]=e.utc().toISO()),a=a.filter((e=>e[this.cfg.guidFieldName]!==t[this.cfg.guidFieldName])),a.push(t),await this.provider.storeDatabase({items:a,lastModifiedEpochMS:Date.now()})),t}async fetchById(e){return(await this.fetchAll()).find((t=>t[this.cfg.guidFieldName]===e))}async searchByField(e,a){t.notNullOrUndefined(e,"fieldDotPath"),t.notNullOrUndefined(a,"fieldValue");const s={};return s[e]=a,this.searchByFieldMap(s)}async searchByFieldMap(e){t.notNullOrUndefined(e,"input");let a=await this.fetchAll();return a=a.filter((t=>{let a=!0;return Object.keys(e).forEach((s=>{const n=c.findValueDotPath(t,s),i=e[s];a=a&&n===i})),a})),a}}class Ae{s3CacheRatchet;keyName;constructor(e,a){this.s3CacheRatchet=e,this.keyName=a,t.notNullOrUndefined(e,"s3CacheRatchet"),t.notNullUndefinedOrOnlyWhitespaceString(e.getDefaultBucket(),"s3CacheRatchet.defaultBucket"),t.notNullUndefinedOrOnlyWhitespaceString(a,"keyName")}async storeDatabase(e){const t=e||{items:[],lastModifiedEpochMS:null};t.lastModifiedEpochMS=Date.now();return!!await this.s3CacheRatchet.writeObjectToCacheFile(this.keyName,t)}async loadDatabase(){return await this.s3CacheRatchet.fetchCacheFileAsObject(this.keyName)||{items:[],lastModifiedEpochMS:Date.now()}}}class Me{cache;prefix;constructor(e,t){if(this.cache=e,this.prefix=t,!e)throw new Error("cache object may not be null");if(!e.getDefaultBucket())throw new Error("Supplied cache must have default bucket set")}buildPathPrefix(e){let t="";return this.prefix&&(t+=this.prefix),e&&(t+=e),t}buildFullPath(e,t){let a=this.buildPathPrefix(t);return a.length>0&&(a+="/"),a+=e+".json",a}async exists(e,t){const a=this.buildFullPath(e,t);return n.debug("Check file existence : %s",a),this.cache.fileExists(a)}async fetch(e,t){const a=this.buildFullPath(e,t);n.debug("Fetching : %s",a);const s=await this.cache.fetchCacheFileAsObject(a);return s.id=e,s.path=t,s}async store(e,t){e.id=e.id||a.createType4Guid(),e.lastModifiedEpochMS=(new Date).getTime();const s=this.buildFullPath(e.id,t);n.debug("Storing : %s",s),await this.cache.writeObjectToCacheFile(s,e);return await this.fetch(e.id,t)}async listItems(e){const t=this.buildPathPrefix(e);n.debug("Listing : %s",t);return await this.cache.directChildrenOfPrefix(t)}async fetchItemsInPath(e){const t=this.buildPathPrefix(e);n.debug("Full fetch of : %s",t);const a=(await this.listItems(e)).map((t=>this.fetch(t,e)));return await Promise.all(a)}async delete(e,t){const a=this.buildFullPath(e,t);n.debug("Deleting : %s",a);return null!=await this.cache.removeCacheFile(a)}}class Be{awsDDB;constructor(e){if(this.awsDDB=e,!e)throw"awsDDB may not be null"}get dynamoDBDocumentClient(){return this.awsDDB}getDDB(){return this.awsDDB}async tableIsEmpty(e){const t={TableName:e,Limit:1};return 0===(await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),t)).Items.length}async scanPromise(e){return this.awsDDB.send(new L(e))}async queryPromise(e){return this.awsDDB.send(new V(e))}async throughputSafeScanOrQuery(e,t,a,i){let o=null;if(t){let c=i??0;do{c++;try{o=await e(t)}catch(e){if(!Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;{const e=1e3*Math.pow(2,c);n.debug("Exceeded scan throughput for %j : Try %d of %d (Waiting %d ms)",t,c,a,e),await r.wait(e),c++}}}while(!o&&(!a||c<a));o||s.throwFormattedErr("throughputSafeScan failed - tried %d times, kept running into throughput exceeded : %j",a,t)}return o}async fullyExecuteQueryCount(e,t=0){try{e.Select="COUNT",n.debug("Executing count query : %j",e);const a={count:0,scannedCount:0,pages:0},s=(new Date).getTime();let i=null;const o=e.Limit;e.Limit=null;do{i=await this.throughputSafeScanOrQuery((e=>this.queryPromise(e)),e),a.count+=i.Count,a.scannedCount+=i.ScannedCount,a.pages++,e.ExclusiveStartKey=i.LastEvaluatedKey,await r.wait(t),n.silly("Rval is now %j",a),o&&a.count>=o&&e.ExclusiveStartKey&&(n.info("Aborting query since hit limit of %d",o),e.ExclusiveStartKey=null)}while(e.ExclusiveStartKey);const c=(new Date).getTime();return n.debug("Finished, returned %j in %s for %j",a,l.formatMsDuration(c-s,!0),e),a}catch(t){return n.error("Failed with %s, q: %j",t,e,t),null}}async fullyExecuteQuery(e,t=0,a=null){const s=[];return await this.fullyExecuteProcessOverQuery(e,(async e=>{s.push(e)}),t,a),s}async fullyExecuteProcessOverQuery(e,t,a=0,s=null){let i=0;try{n.debug("Executing query : %j",e);const o=(new Date).getTime();n.debug("Pulling %j",e);let c=await this.throughputSafeScanOrQuery((e=>this.queryPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;let u=0,h=0;for(;c.LastEvaluatedKey&&(null===s||i<s)&&!e.Limit;){n.silly("Found more rows - requery with key %j",c.LastEvaluatedKey),e.ExclusiveStartKey=c.LastEvaluatedKey,c=await this.throughputSafeScanOrQuery((e=>this.queryPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;n.silly("Have processed %d items",i),u++,h+=0===c.Count?1:0,await r.wait(a)}const d=(new Date).getTime();n.debug("Finished, processed %d rows in %s for %j (%d blank pages, %d total pages)",i,l.formatMsDuration(d-o,!0),e,h,u)}catch(t){n.error("Failed with %s, q: %j",t,e,t)}return i}async fullyExecuteScanCount(e,t=0){try{e.Select="COUNT";const a={count:0,scannedCount:0,pages:0};n.debug("Executing scan count : %j",e);const s=(new Date).getTime();let i=null;const o=e.Limit;e.Limit=null;do{i=await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),e),a.count+=i.Count,a.scannedCount+=i.ScannedCount,a.pages++,e.ExclusiveStartKey=i?.LastEvaluatedKey,await r.wait(t),n.silly("Rval is now %j",a),o&&a.count>=o&&e.ExclusiveStartKey&&(n.info("Aborting scan since hit limit of %d",o),e.ExclusiveStartKey=null)}while(e.ExclusiveStartKey);const c=(new Date).getTime();return n.debug("Finished, returned %j in %s for %j",a,l.formatMsDuration(c-s,!0),e),a}catch(t){return n.error("Failed with %s, q: %j",t,e,t),null}}async fullyExecuteScan(e,t=0,a=null){const s=[];return await this.fullyExecuteProcessOverScan(e,(async e=>{s.push(e)}),t,a),s}async fullyExecuteProcessOverScan(e,t,a=0,s=null){let i=0;try{n.debug("Executing scan : %j",e);const o=(new Date).getTime();n.debug("Pulling %j",e);let c=await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;for(;c.LastEvaluatedKey&&(null===s||i<s)&&!e.Limit;){n.silly("Found more rows - requery with key %j",c.LastEvaluatedKey),e.ExclusiveStartKey=c.LastEvaluatedKey,c=await this.throughputSafeScanOrQuery((e=>this.scanPromise(e)),e);for(let e=0;e<c.Items.length;e++)await t(c.Items[e]),i++;n.silly("Rval is now %d items",i),await r.wait(a)}const u=(new Date).getTime();n.debug("Finished, processed %d results in %s for %j",i,l.formatMsDuration(u-o,!0),e)}catch(t){n.error("Failed with %s, q: %j",t,e,t)}return i}async writeAllInBatches(e,t,a){if(!a||a<2)throw new Error("Batch size needs to be at least 2, was "+a);let s=0;if(t&&t.length>0){let i=[];for(t.forEach((t=>{i.push({PutRequest:{Item:t,ReturnConsumedCapacity:"TOTAL",TableName:e}})})),n.debug("Processing %d batch items to %s",i.length,e);i.length>0;){const t=i.slice(0,Math.min(i.length,a));i=i.slice(t.length);const o={RequestItems:{},ReturnConsumedCapacity:"TOTAL",ReturnItemCollectionMetrics:"SIZE"};o.RequestItems[e]=t;let c=1,l=!1,u=null;for(;!l&&c<7;){try{u=await this.awsDDB.send(new _(o))}catch(e){if(!Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;n.info("Caught ProvisionedThroughputExceededException - retrying delete"),u={UnprocessedItems:o.RequestItems}}if(u&&u.UnprocessedItems&&u.UnprocessedItems[e]&&u.UnprocessedItems[e].length>0){const t=Math.pow(2,c);n.warn("Found %d unprocessed items. Backing off %d seconds and trying again",u.UnprocessedItems[e].length,t),await r.wait(1e3*t),c++,o.RequestItems[e]=u.UnprocessedItems[e]}else l=!0}u&&u.UnprocessedItems&&u.UnprocessedItems[e]&&u.UnprocessedItems[e].length>0?(n.error("After 6 tries there were still %d unprocessed items",u.UnprocessedItems[e].length),s+=t.length-u.UnprocessedItems[e].length,n.warn("FIX Unprocessed : %j",u.UnprocessedItems)):s+=t.length}}return s}async fetchFullObjectsMatchingKeysOnlyIndexQuery(e,a,s=25){t.notNullOrUndefined(e),t.notNullOrUndefined(e.TableName),t.notNullOrUndefined(a),t.true(a.length>0);const n=await this.fullyExecuteQuery(e),i=Be.stripAllToKeysOnly(n,a);return await this.fetchAllInBatches(e.TableName,i,s)}async fetchAllInBatches(e,t,a){if(!a||a<2||a>100)throw new Error("Batch size needs to be at least 2 and no more than 100, was "+a);let s=[];const i=[];let o=Object.assign([],t);for(;o.length>0;){const t=o.slice(0,Math.min(o.length,a));o=o.slice(t.length);const s={};s[e]={Keys:t};const n={RequestItems:s,ReturnConsumedCapacity:"TOTAL"};i.push(n)}n.debug("Created %d batches",i.length);for(let t=0;t<i.length;t++){i.length>1&&n.info("Processing batch %d of %d",t+1,i.length);const a=i[t];let o=1;do{n.silly("Pulling %j",a);const t=await this.awsDDB.send(new G(a));s=s.concat(t.Responses[e]),t.UnprocessedKeys&&t.UnprocessedKeys[e]&&t.UnprocessedKeys[e].Keys.length>0&&o<15&&(n.silly("Found %d unprocessed, waiting",t.UnprocessedKeys[e].Keys),await r.wait(1e3*Math.pow(2,o)),o++),a.RequestItems=t.UnprocessedKeys}while(!a.RequestItems&&a.RequestItems[e].Keys.length>0)}return s}async deleteAllInBatches(e,t,a){if(!a||a<2)throw new Error("Batch size needs to be at least 2, was "+a);let s=0;if(t&&t.length>0){let i=[];for(t.forEach((t=>{i.push({DeleteRequest:{Key:t,ReturnConsumedCapacity:"TOTAL",TableName:e}})})),n.debug("Processing %d DeleteBatch items to %s",i.length,e);i.length>0;){const t=i.slice(0,Math.min(i.length,a));i=i.slice(t.length);const o={RequestItems:{},ReturnConsumedCapacity:"TOTAL",ReturnItemCollectionMetrics:"SIZE"};o.RequestItems[e]=t;let c=1,l=!1,u=null;for(;!l&&c<7;){try{u=await this.awsDDB.send(new _(o))}catch(e){if(!Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;n.info("Caught ProvisionedThroughputExceededException - retrying delete"),u={UnprocessedItems:o.RequestItems}}if(u&&u.UnprocessedItems&&u.UnprocessedItems[e]&&u.UnprocessedItems[e].length>0){const t=Math.pow(2,c);n.warn("Found %d unprocessed items. Backing off %d seconds and trying again",u.UnprocessedItems[e].length,t),await r.wait(1e3*t),c++,o.RequestItems[e]=u.UnprocessedItems[e]}else l=!0}u&&u.UnprocessedItems&&u.UnprocessedItems[e]&&u.UnprocessedItems[e].length>0?(n.error("After 6 tries there were still %d unprocessed items",u.UnprocessedItems[e].length),s+=t.length-u.UnprocessedItems[e].length,n.warn("FIX Unprocessed : %j",u.UnprocessedItems)):s+=t.length,n.debug("%d Remain, DeleteBatch Results : %j",i.length,u)}}return s}async simplePut(e,t,a=3){let s=null,i=0;const o={Item:t,ReturnConsumedCapacity:"TOTAL",TableName:e};for(;!s&&i<a;)try{s=await this.awsDDB.send(new W(o))}catch(e){if(!Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;{const e=1e3*Math.pow(2,i);n.debug("Exceeded write throughput for %j : Try %d of %d (Waiting %d ms)",o,i,a,e),await r.wait(e),i++}}return s||n.warn("Unable to write %j to DDB after %d tries, giving up",o,a),s}async simplePutOnlyIfFieldIsNullOrUndefined(e,t,a){let s=!1;const i={Item:t,ReturnConsumedCapacity:"TOTAL",ConditionExpression:"attribute_not_exists(#fieldName) OR #fieldName = :null ",ExpressionAttributeNames:{"#fieldName":a},ExpressionAttributeValues:{":null":null},TableName:e};try{const e=await this.awsDDB.send(new W(i));n.silly("Wrote : %j",e),s=!0}catch(o){if(Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(o))n.debug("Exceeded write throughput for %j : (Waiting 2000 ms)",i),await r.wait(2e3),s=await this.simplePutOnlyIfFieldIsNullOrUndefined(e,t,a);else{if(!(o&&o instanceof z))throw o;n.debug("Failed to write %j due to null field failure"),s=!1}}return s}async simplePutWithCollisionAvoidance(e,a,s,i,o=null,c=3){t.true(s&&s.length>0&&s.length<3,"You must pass 1 or 2 key names");let l=null,u=0;const h={"#key0":s[0]},d={":key0":a[s[0]]};let m="#key0 <> :key0";s.length>1&&(m+=" AND #key1 <> :key1",h["#key1"]=s[1],d[":key1"]=a[s[1]]);const f={Item:a,ReturnConsumedCapacity:"TOTAL",ConditionExpression:m,ExpressionAttributeNames:h,ExpressionAttributeValues:d,TableName:e};let g=0;for(;!l&&u<c&&(!o||g<o);)try{l=await this.awsDDB.send(new W(f))}catch(e){if(Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e)){u++;const e=1e3*Math.pow(2,u);n.debug("Exceeded write throughput for %j : Try %d of %d (Waiting %d ms)",f,u,c,e),await r.wait(e)}else{if(!(e&&e instanceof z))throw e;{let e=Object.assign({},f.Item);n.info("Failed to write %j due to collision - adjusting and retrying",e),e=i(e),f.Item=e,f.ExpressionAttributeValues[":key0"]=e[s[0]],s.length>1&&(f.ExpressionAttributeValues[":key1"]=e[s[1]]),g++}}}return l&&g>0&&n.info("After adjustment, wrote %j as %j",a,f.Item),l||n.warn("Unable to write %j to DDB after %d provision tries and %d adjusts, giving up",f,u,g),l?f.Item:null}async simpleGet(e,t,a=3){let s=null,i=0;const o={TableName:e,Key:t};for(;!s&&i<a;)try{s=await this.awsDDB.send(new q(o))}catch(e){if(!Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e))throw e;{const e=1e3*Math.pow(2,i);n.debug("Exceeded read throughput for %j : Try %d of %d (Waiting %d ms)",o,i,a,e),await r.wait(e),i++}}s||n.warn("Unable to read %j from DDB after %d tries, giving up",o,a);return s&&s.Item?Object.assign({},s.Item):null}static objectIsErrorWithProvisionedThroughputExceededExceptionCode(e){return!!e&&e instanceof $}async simpleGetWithCounterDecrement(e,t,a,s,i=3){let o=null,c=0;const l={TableName:e,Key:t,UpdateExpression:"set #counter = #counter-:decVal",ExpressionAttributeNames:{"#counter":a},ExpressionAttributeValues:{":decVal":1,":minVal":0},ConditionExpression:"#counter > :minVal",ReturnValues:"ALL_NEW"};let u=!1;for(;!o&&c<i&&!u;)try{o=await this.awsDDB.send(new J(l))}catch(e){if(Be.objectIsErrorWithProvisionedThroughputExceededExceptionCode(e)){const e=1e3*Math.pow(2,c);n.debug("Exceeded update throughput for %j : Try %d of %d (Waiting %d ms)",l,c,i,e),await r.wait(e),c++}else{if(!(e&&e instanceof z))throw e;n.info("Cannot fetch requested row (%j) - the update check failed",t),u=!0}}o||u||n.warn("Unable to update %j from DDB after %d tries, giving up",l,i);const h=o&&o.Attributes?Object.assign({},o.Attributes):null;return s&&h&&0===h[a]&&(n.info("Delete on 0 specified, removing"),await this.simpleDelete(e,t)),h}async simpleDelete(e,t){const a={TableName:e,Key:t};return await this.awsDDB.send(new Q(a))}async atomicCounter(e,t,a,s=1){const n={TableName:e,Key:t,UpdateExpression:"SET #counterFieldName = #counterFieldName + :inc",ExpressionAttributeNames:{"#counterFieldName":a},ExpressionAttributeValues:{":inc":s},ReturnValues:"UPDATED_NEW"},i=await this.awsDDB.send(new J(n));return u.safeNumber(i.Attributes[a])}static cleanObject(e){if(e){const t=[];Object.keys(e).forEach((a=>{const s=e[a];""===s?t.push(a):s instanceof Object&&Be.cleanObject(s)})),n.silly("Removing keys : %j",t),t.forEach((t=>{delete e[t]}))}}static stripToKeysOnly(e,t){let a=null;return e&&t&&t.length>0&&(a={},t.forEach((t=>{e[t]||s.throwFormattedErr("Failed key extraction on %j - missing %s",e,t),a[t]=e[t]}))),a}static stripAllToKeysOnly(e,t){return e.map((e=>Be.stripToKeysOnly(e,t)))}}class Ke{awsDDB;constructor(e){if(this.awsDDB=e,!e)throw"awsDDB may not be null"}async deleteTable(e,a=!0){t.notNullOrUndefined(e);const s={TableName:e};n.debug("Deleting ddb table %s",e);const i=await this.awsDDB.send(new H(s));return a&&(n.debug("Table marked for delete, waiting for deletion"),await this.waitForTableDelete(e)),i}async createTable(e,a=!0,i=!1){t.notNullOrUndefined(e),t.notNullOrUndefined(e.TableName),n.debug("Creating new table : %j",e);await this.tableExists(e.TableName)&&(i?(n.debug("Table %s exists and replace specified - deleting",e.TableName),await this.deleteTable(e.TableName)):s.throwFormattedErr("Cannot create table %s - exists already and replace not specified",e.TableName));const r=await this.awsDDB.send(new Y(e));return a&&(n.debug("Table created, awaiting ready"),await this.waitForTableReady(e.TableName)),r}async waitForTableReady(e){let t=!0,a=await this.safeDescribeTable(e);for(;a&&a.Table&&"ACTIVE"!==a.Table.TableStatus;)n.silly("Table not ready - waiting 2 seconds"),await r.wait(2e3),a=await this.safeDescribeTable(e);return a||a.Table||(n.warn("Cannot wait for %s to be ready - table does not exist",e),t=!1),t}async waitForTableDelete(e){let t=await this.safeDescribeTable(e);for(;t;)n.silly("Table %s still exists, waiting 2 seconds (State is %s)",e,t.Table.TableStatus),await r.wait(2e3),t=await this.safeDescribeTable(e)}async tableExists(e){return!!await this.safeDescribeTable(e)}async listAllTables(){const e={};let t=[];do{const a=await this.awsDDB.send(new Z(e));t=t.concat(a.TableNames),e.ExclusiveStartTableName=a.LastEvaluatedTableName}while(e.ExclusiveStartTableName);return t}async safeDescribeTable(e){try{return await this.awsDDB.send(new X({TableName:e}))}catch(e){if(e instanceof ee)return null;throw e}}async copyTable(e,a,n,i){if(t.notNullUndefinedOrOnlyWhitespaceString(e,"srcTableName"),t.notNullUndefinedOrOnlyWhitespaceString(a,"dstTableName"),i)throw s.fErr("Cannot copy %s to %s - copy data not supported yet",e,a);const r=await this.safeDescribeTable(e);if(await this.tableExists(a))throw s.fErr("Cannot copy to %s - table already exists",a);if(!r)throw s.fErr("Cannot copy %s - doesnt exist",e);r.Table.AttributeDefinitions,r.Table.KeySchema,r.Table.GlobalSecondaryIndexes;const o=Object.assign({},n||{},{AttributeDefinitions:r.Table.AttributeDefinitions,TableName:a,KeySchema:r.Table.KeySchema,LocalSecondaryIndexes:r.Table.LocalSecondaryIndexes,GlobalSecondaryIndexes:r.Table.GlobalSecondaryIndexes.map((e=>{const t=e;return 0!==t.ProvisionedThroughput?.WriteCapacityUnits&&0!==t.ProvisionedThroughput?.ReadCapacityUnits||(t.ProvisionedThroughput=void 0),t})),BillingMode:r.Table.BillingModeSummary.BillingMode,ProvisionedThroughput:"PROVISIONED"===r.Table.BillingModeSummary.BillingMode?r.Table.ProvisionedThroughput:void 0,StreamSpecification:r.Table.StreamSpecification,SSESpecification:r.Table.SSEDescription,Tags:void 0,TableClass:r.Table.TableClassSummary?.TableClass,DeletionProtectionEnabled:r.Table.DeletionProtectionEnabled});return await this.awsDDB.send(new Y(o))}}class Le{spots;buckets;separator;alphabet;_allSlots;constructor(e=3,s=16,n="_",i="0123456789ABCDEF"){this.spots=e,this.buckets=s,this.separator=n,this.alphabet=i,t.true(e>0,"Spots must be larger than 0"),t.true(s>1,"Buckets must be larger than 1"),t.notNullOrUndefined(a.trimToNull(i),"Alphabet may not be null or empty"),t.true(a.allUnique(i),"Alphabet must be unique"),t.true(a.stringContainsOnlyAlphanumeric(i),"Alphabet must be alphanumeric");const r=Math.pow(i.length,e);t.true(s<r,"Buckets must be less than permutations ("+s+" / "+r+")"),t.notNullOrUndefined(a.trimToNull(this.separator),"Separator must be nonnull and nonempty");const o=a.allPermutationsOfLength(e,i);this._allSlots=o.slice(0,s)}get allBuckets(){return Object.assign([],this._allSlots)}get randomBucket(){return this._allSlots[Math.floor(Math.random()*this.buckets)]}allSpreadValues(e){t.notNullOrUndefined(a.trimToNull(e),"Cannot spread null/empty value");return this._allSlots.map((t=>e+this.separator+t))}allSpreadValuesForArray(e){t.true(e&&e.length>0,"Cannot spread null/empty array");let a=[];return e.forEach((e=>{a=a.concat(this.allSpreadValues(e))})),a}addSpreader(e){return t.notNullOrUndefined(a.trimToNull(e),"Cannot spread null/empty value"),e+this.separator+this.randomBucket}extractBucket(e){t.notNullOrUndefined(a.trimToNull(e),"Cannot extract from null or empty value");const n=e.length-this.spots;return(n<0||e.charAt(n)!==this.separator)&&s.throwFormattedErr("Cannot extract bucket, not created by this spreader (missing %s at location %d)",this.separator,n),e.substring(n)}removeBucket(e){t.notNullOrUndefined(a.trimToNull(e),"Cannot extract from null or empty value");const n=e.length-this.spots;return(n<0||e.charAt(n)!==this.separator)&&s.throwFormattedErr("Cannot remove bucket, not created by this spreader (missing %s at location %d)",this.separator,n),e.substring(0,n)}}class Ve{region;availabilityZone;ec2;ec2InstanceConnect;constructor(e="us-east-1",t="us-east-1a"){this.region=e,this.availabilityZone=t,this.ec2=new ae({region:e}),this.ec2InstanceConnect=new re({region:e})}get eC2Client(){return this.ec2}get eC2InstanceConnectClient(){return this.ec2InstanceConnect}async stopInstance(e,t=0){let a=!0;try{const a={InstanceIds:[e],DryRun:!1};n.info("About to stop instances : %j",a),await this.ec2.send(new se(a)),n.info("Stop instance command sent, waiting on shutdown");let s=await this.describeInstance(e);if(t>0){const a=(new Date).getTime();for(;s&&16!==s.State.Code&&(new Date).getTime()-a<t;)n.debug("Instance status is %j - waiting for 5 seconds (up to %s)",s.State,l.formatMsDuration(t)),await r.wait(5e3),s=await this.describeInstance(e)}}catch(t){n.error("Failed to stop instance %s : %s",e,t,t),a=!1}return a}async launchInstance(e,t=0){let a=!0;try{const a={InstanceIds:[e],DryRun:!1};n.info("About to start instance : %j",a),await this.ec2.send(new ne(a)),n.info("Start instance command sent, waiting on startup");let s=await this.describeInstance(e);if(t>0){const a=(new Date).getTime();for(;s&&16!==s.State.Code&&(new Date).getTime()-a<t;)n.debug("Instance status is %j - waiting for 5 seconds (up to %s)",s.State,l.formatMsDuration(t)),await r.wait(5e3),s=await this.describeInstance(e)}s&&s.PublicIpAddress&&(n.info("Instance address is %s",s.PublicIpAddress),n.info("SSH command : ssh -i path_to_pem_file ec2-user@%s",s.PublicIpAddress))}catch(t){n.error("Failed to start instance %s : %s",e,t,t),a=!1}return a}async describeInstance(e){const t=await this.listAllInstances([e]);return 1===t.length?t[0]:null}async listAllInstances(e=[]){let t=[];const a={NextToken:null};e&&e.length>0&&(a.InstanceIds=e);do{n.debug("Pulling instances... (%j)",a);const e=await this.ec2.send(new ie(a));e.Reservations.forEach((e=>{t=t.concat(e.Instances)})),a.NextToken=e.NextToken}while(a.NextToken);return n.debug("Finished pulling instances (found %d)",t.length),t}async sendPublicKeyToEc2Instance(e,t,a){const s=a||"ec2-user",n={InstanceId:e,AvailabilityZone:this.availabilityZone,InstanceOSUser:s,SSHPublicKey:t};return await this.ec2InstanceConnect.send(new oe(n))}}var _e;!function(e){e.InUse="InUse",e.MinimumAge="MinimumAge"}(_e||(_e={}));class Ge{ecr;static ECR_IMAGE_MINIMUM_AGE_DAYS=60;static ECR_REPOSITORY_MINIMUM_IMAGE_COUNT=600;constructor(e){this.ecr=e,t.notNullOrUndefined(e,"ecr")}async findAllUsedImages(e){const t=new Set;for(let a=0;a<e.length;a++){(await e[a].findUsedImageUris()).forEach((e=>t.add(e)))}return Array.from(t)}async performCleaning(e){n.info("Starting cleaning with options : %j",e),n.info("Finding in-use images");const t=(await this.findAllUsedImages(e.usedImageFinders||[])).map((e=>e.substring(e.lastIndexOf(":")+1)));n.info("Found %d images in use: %j",t.length,t);const a=await this.fetchRegistryId();n.info("Processing registry %s",a);const s=await this.fetchAllRepositoryDescriptors(a);n.info("Found repos : %j",s);const i=[];for(let a=0;a<s.length;a++){n.info("Processing repo %d of %d",a,s.length);try{const n=await this.cleanRepository(s[a],t,e);i.push(n)}catch(e){n.error("Failed to process repo : %j : %s",s[a],e,e)}}return{registryId:a,repositories:i,options:e}}async cleanRepository(e,t,s){n.info("Cleaning repository: %j",e);const i=await this.fetchAllImageDescriptors(e);n.info("Found images: %d : %j",i.length,i);const r=[],o=[];i.forEach((e=>{t.map((t=>e.imageTags.includes(t))).find((e=>e))?o.push({image:e,reason:_e.InUse}):r.push(e)})),n.info("Found %d to purge and %d to keep",r.length,o.length);const c=r.map((e=>e.imageSizeInBytes)).reduce(((e,t)=>e+t),0);n.info("Found %s total bytes to purge : %d",a.formatBytes(c),c);const l={registryId:e.registryId,repositoryName:e.repositoryName,imageIds:r.map((e=>({imageDigest:e.imageDigest,imageTag:e.imageTags[0]})))};if(n.info("Purge command : %j",l),s.dryRun)n.info("Dry run specd, stopping");else if(l.imageIds.length>0){n.info("Purging unused images");const e=await this.ecr.send(new ce(l));n.info("Response was : %j",e)}else n.info("Skipping - nothing to purge in this repo");return{repository:e,purged:r,retained:o,totalBytesRecovered:c}}async fetchAllImageDescriptors(e){t.notNullOrUndefined(e,"repo");let s=[];const n={registryId:e.registryId,repositoryName:e.repositoryName};let i=null;do{i=await this.ecr.send(new le(n)),s=s.concat(i.imageDetails),n.nextToken=i.nextToken}while(a.trimToNull(n.nextToken));return s}async fetchAllRepositoryDescriptors(e){let t=[];const s={registryId:e};let n=null;do{n=await this.ecr.send(new ue(s)),t=t.concat(n.repositories),s.nextToken=n.nextToken}while(a.trimToNull(s.nextToken));return t}async fetchAllRepositoryNames(e){return(await this.fetchAllRepositoryDescriptors(e)).map((e=>e.repositoryName))}async fetchRegistryId(){return(await this.ecr.send(new he({}))).registryId}}class We{batch;constructor(e){this.batch=e,t.notNullOrUndefined(e,"batch")}async findUsedImageUris(){const e=await this.listAllJobDefinitions(!1);n.info("Found %d jobs",e.length);const t=e.map((e=>e.containerProperties.image)).filter((e=>a.trimToNull(e)));return Array.from(new Set(t))}async listAllJobDefinitions(e){let t=[];const a={nextToken:null,status:e?void 0:"ACTIVE"};do{const e=await this.batch.send(new f(a));t=t.concat(e.jobDefinitions),a.nextToken=e.nextToken}while(a.nextToken);return t}}class qe{lambda;constructor(e){this.lambda=e,t.notNullOrUndefined(e,"lambda")}async findUsedImageUris(){const e=new Set,t=await this.fetchFunctions();n.info("Found %d functions",t.length);for(let a=0;a<t.length;a++)if("Image"===t[a].PackageType){const s=await this.lambda.send(new de({FunctionName:t[a].FunctionName}));"ECR"===s.Code.RepositoryType&&s.Code.ImageUri&&e.add(s.Code.ImageUri)}else n.info("Skipping zip packaged function: %s",t[a].FunctionName);return Array.from(e)}async fetchFunctions(){let e=[];const t={};let s=null;do{s=await this.lambda.send(new me(t)),e=e.concat(s.Functions),t.Marker=s.NextMarker}while(a.trimToNull(t.Marker));return e}}class Je{providers;constructor(e){this.providers=e,t.notNullOrUndefined(e),t.true(e.length>0)}async fetchConfig(e){n.silly("CascadeEnvironmentServiceProvider fetch for %s",e);let t=null;for(let a=0;a<this.providers.length&&!t;a++)try{t=await this.providers[a].fetchConfig(e)}catch(e){n.error("Provider %d failed - trying next : %s",a,e,e),t=null}return t}}class Qe{envVarName;constructor(e){this.envVarName=e,t.notNullOrUndefined(e)}async fetchConfig(){n.silly("EnvVarEnvironmentServiceProvider fetch for %s",this.envVarName);let e=null;const t=process?process.env:global||{},i=a.trimToNull(t[this.envVarName]);if(i)try{e=JSON.parse(i)}catch(e){throw n.error("Failed to read env - null or invalid JSON : %s : %s",e,i,e),e}else s.throwFormattedErr("Could not find env var with name : %s",this.envVarName);return e}}class ze{provider;cfg;readPromiseCache=new Map;static defaultEnvironmentServiceConfig(){return{maxRetries:3,backoffMultiplierMS:500}}constructor(e,a=ze.defaultEnvironmentServiceConfig()){this.provider=e,this.cfg=a,t.notNullOrUndefined(e),t.notNullOrUndefined(a)}async getConfig(e){return n.silly("EnvService:Request to read config %s",e),this.readPromiseCache.has(e)||(n.silly("EnvService: Nothing in cache - adding"),this.readPromiseCache.set(e,this.getConfigUncached(e))),this.readPromiseCache.get(e)}async getConfigUncached(e){let t=1,a=null;for(;!a&&t<this.cfg.maxRetries;){t++,n.silly("Attempting fetch of %s",e);try{a=await this.provider.fetchConfig(e)}catch(a){const s=t*this.cfg.backoffMultiplierMS;n.info("Error attempting to fetch config %s (try %d of %d, waiting %s MS): %s",e,t,this.cfg.maxRetries,s,a,a),await r.wait(s)}}return a||s.throwFormattedErr("Was unable to fetch config %s even after %d retries",e,this.cfg.maxRetries),a}}class $e{value;constructor(e){this.value=e,t.notNullOrUndefined(e)}static fromRecord(e){const t=new Map;return Object.keys(e).forEach((a=>{t.set(a,e[a])})),new $e(t)}async fetchConfig(e){n.silly("FixedEnvironmentServiceProvider fetch for %s",e);return this.value.get(e)}}class He{cfg;ratchet;constructor(e){this.cfg=e,t.notNullOrUndefined(e),t.notNullOrUndefined(e.bucketName),t.notNullOrUndefined(e.region),t.true(!!e.s3Override||!!e.region,"You must set either region or S3Override");const a=e.s3Override||new A({region:e.region});this.ratchet=new Pe(a,e.bucketName)}async fetchConfig(e){const t=a.trimToEmpty(this.cfg.pathPrefix)+e+a.trimToEmpty(this.cfg.pathSuffix);n.silly("S3EnvironmentServiceProvider:Request to read config from : %s / %s",this.cfg.bucketName,t);const s=new i,r=await this.ratchet.fetchCacheFileAsObject(t);return s.log(),r}}class Ye{region;ssmEncrypted;ssm;constructor(e="us-east-1",a=!0){this.region=e,this.ssmEncrypted=a,t.notNullOrUndefined(e),t.notNullOrUndefined(a),this.ssm=new fe({region:this.region})}async fetchConfig(e){n.silly("SsmEnvironmentServiceProvider fetch for %s",e);const t={Name:e,WithDecryption:this.ssmEncrypted};let i=null,o=null;try{const e=await this.ssm.send(new ge(t));o=a.trimToNull(e?.Parameter?.Value)}catch(t){if(t instanceof pe){const t=n.warn("AWS could not find parameter %s - are you using the right AWS key?",e);throw new Error(t)}if(!((s.safeStringifyErr(t)||"").toLowerCase().indexOf("throttl")>-1))throw n.error("Final environment fetch error (cannot retry) : %s",t,t),t;n.warn("Throttled while trying to read parameters - waiting 1 second before allowing retry"),await r.wait(1e3)}if(o)try{i=JSON.parse(o)}catch(e){throw n.error("Failed to read env - null or invalid JSON : %s : %s",e,o,e),e}else s.throwFormattedErr("Could not find system parameter with name : %s in this account",e);return i}}class Ze{tableName;dynamoRatchet;constructor(e,t){this.tableName=e,this.dynamoRatchet=t}async checkCode(e,t,a){const s={code:e,context:t},n=await this.dynamoRatchet.simpleGet(this.tableName,s),i=n&&n.expiresEpochMS>Date.now();return i&&a&&await this.dynamoRatchet.simpleDelete(this.tableName,s),i}async storeCode(e){const t=await this.dynamoRatchet.simplePut(this.tableName,e);return t&&t.ConsumedCapacity.CapacityUnits>0}async createTableIfMissing(e){return null}}class Xe{provider;constructor(e){this.provider=e}static generateCode(e){t.notNullOrUndefined(e,"params"),t.notNullOrUndefined(e.context,"params.context"),t.notNullOrUndefined(e.length,"params.length"),t.notNullOrUndefined(e.alphabet,"params.alphabet");let a="";for(let t=0;t<e.length;t++)a+=e.alphabet.charAt(Math.floor(e.alphabet.length*Math.random()));return{code:a,context:e.context,tags:e.tags,expiresEpochMS:Date.now()+1e3*e.timeToLiveSeconds}}async createNewCode(e){const t=Xe.generateCode(e);return await this.provider.storeCode(t)?t:null}async checkCode(e,t,s){return await this.provider.checkCode(a.trimToEmpty(e),a.trimToEmpty(t),s)}}class et{s3CacheRatchet;keyName;constructor(e,a){this.s3CacheRatchet=e,this.keyName=a,t.notNullOrUndefined(e,"s3CacheRatchet"),t.notNullUndefinedOrOnlyWhitespaceString(e.getDefaultBucket(),"s3CacheRatchet.defaultBucket"),t.notNullUndefinedOrOnlyWhitespaceString(a,"keyName")}async fetchFile(){return await this.s3CacheRatchet.fetchCacheFileAsObject(this.keyName)||{data:[],lastModifiedEpochMS:Date.now()}}async updateFile(e){const t={data:e||[],lastModifiedEpochMS:Date.now()};t.data=t.data.filter((e=>e.expiresEpochMS>Date.now())),n.info("Updating code file to %s codes",t.data.length);return await this.s3CacheRatchet.writeObjectToCacheFile(this.keyName,t)}async checkCode(e,t,a){const s=await this.fetchFile(),i=s.data.find((a=>a?.code?.toUpperCase()===e?.toUpperCase()&&a?.context?.toUpperCase()===t?.toUpperCase()));if(i&&(a||i.expiresEpochMS<Date.now())){n.info("Stripping used/expired code from the database");const e=s.data.filter((e=>e!=i));await this.updateFile(e)}return!!i&&i.expiresEpochMS>Date.now()}async storeCode(e){const t=await this.fetchFile();t.data.push(e);return!!await this.updateFile(t.data)}}class tt{constructor(){}static applySetProfileEnvironmentalVariable(e){process.env?a.trimToNull(e)?process.env.AWS_PROFILE=e:s.throwFormattedErr("Cannot set profile to null/empty string"):s.throwFormattedErr("Cannot set profile - not in a node environment - process missing")}}class at{static isValidCronEvent(e){return e&&"aws.events"==e.source&&e.resources&&e.resources.length>0}static isValidSnsEvent(e){return e&&e.Records&&e.Records.length>0&&"aws:sns"==e.Records[0].EventSource}static isValidSqsEvent(e){return e&&e.Records&&e.Records.length>0&&"aws:sqs"==e.Records[0].eventSource}static isValidDynamoDBEvent(e){return e&&e.Records&&e.Records.length>0&&"aws:dynamodb"==e.Records[0].eventSource}static isValidS3Event(e){return e&&e.Records&&e.Records.length>0&&"aws:s3"==e.Records[0].eventSource}static isValidApiGatewayV2WithRequestContextEvent(e){return e&&e.rawPath&&e.requestContext&&e.routeKey}static isValidApiGatewayEvent(e){return e&&e.httpMethod&&e.path&&e.requestContext}static isValidApiGatewayAuthorizerEvent(e){return e&&e.authorizationToken&&e.methodArn}static isSingleCronEvent(e){return this.isValidCronEvent(e)&&at.isSingleEntryEvent(e,"resources")}static isSingleSnsEvent(e){return this.isValidSnsEvent(e)&&at.isSingleEntryEvent(e)}static isSingleDynamoDBEvent(e){return this.isValidDynamoDBEvent(e)&&at.isSingleEntryEvent(e)}static isSingleS3Event(e){return this.isValidS3Event(e)&&at.isSingleEntryEvent(e)}static isSingleEntryEvent(e,t="Records"){return e&&e[t]&&e[t]instanceof Array&&1===e[t].length}}class st{static isValidCronEvent(e){return at.isValidCronEvent(e)}static isValidSqsEvent(e){return at.isValidSqsEvent(e)}static isValidSnsEvent(e){return at.isValidSnsEvent(e)}static isValidDynamoDBEvent(e){return at.isValidDynamoDBEvent(e)}static isValidS3Event(e){return at.isValidS3Event(e)}static isValidApiGatewayV2WithRequestContextEvent(e){return at.isValidApiGatewayV2WithRequestContextEvent(e)}static isValidApiGatewayEvent(e){return at.isValidApiGatewayEvent(e)}static isValidApiGatewayAuthorizerEvent(e){return at.isValidApiGatewayAuthorizerEvent(e)}}class nt{route53;hostedZoneId;constructor(e,t){if(this.route53=e,this.hostedZoneId=t,!this.route53)throw"route53 may not be null"}get route53Client(){return this.route53}async changeCnameRecordTarget(e,t,a=this.hostedZoneId,s=600){n.info("Updating %s to point to %s",e,t);try{const i={ChangeBatch:{Changes:[{Action:"UPSERT",ResourceRecordSet:{Name:e,ResourceRecords:[{Value:t}],TTL:s,Type:"CNAME"}}]},HostedZoneId:a},r=await this.route53.send(new ye(i));n.debug("Updated domain result: %j",r);const o={Id:r.ChangeInfo.Id},c=await we({client:this.route53,maxWaitTime:300},o);if(n.debug("Wait responsed: %j",c),c.state===be.SUCCESS)return n.info("Updated %s to point to %s",e,a),!0}catch(a){n.warn("Error update CName for %s with value %s: %j",e,t,a)}return n.info("Cannot update %s to point to %s",e,t),!1}}class it{dynamo;tableName;constructor(e,a){this.dynamo=e,this.tableName=a,t.notNullOrUndefined(this.dynamo),t.notNullOrUndefined(this.tableName)}async readParameter(e,t){n.silly("Reading %s / %s from underlying db",e,t);const a={groupId:e,paramKey:t};return await this.dynamo.simpleGet(this.tableName,a)}async readAllParametersForGroup(e){const t={TableName:this.tableName,KeyConditionExpression:"groupId = :groupId",ExpressionAttributeValues:{":groupId":e}};return await this.dynamo.fullyExecuteQuery(t)}async writeParameter(e){return!!await this.dynamo.simplePut(this.tableName,e)}}class rt{wrapped;options={globalTTL:1,separator:".",prefix:"RuntimeEnv-",suffix:""};constructor(e,a){this.wrapped=e,t.notNullOrUndefined(this.wrapped,"wrapped"),t.notNullOrUndefined(global?.process?.env,'"process" not found - this only runs in Node, not the browser'),a&&(this.options=a),t.notNullOrUndefined(this.options.globalTTL,"this.options.globalTTL"),t.notNullOrUndefined(this.options.separator,"this.options.separator"),t.true(this.options.globalTTL>0,"this.options.globalTTL must be larger than 0")}generateName(e,t){return a.trimToEmpty(this.options.prefix)+e+a.trimToEmpty(this.options.separator)+t+a.trimToEmpty(this.options.suffix)}async readParameter(e,t){const n=a.trimToNull(process.env[this.generateName(e,t)]);n&&!a.canParseAsJson(n)&&s.throwFormattedErr("Cannot parse ENV override (%s / %s) as JSON - did you forget the quotes on a string?",e,t);return n?{groupId:e,paramKey:t,paramValue:n,ttlSeconds:this.options.globalTTL}:await this.wrapped.readParameter(e,t)}async readAllParametersForGroup(e){return this.wrapped.readAllParametersForGroup(e)}async writeParameter(e){return this.wrapped.writeParameter(e)}}class ot{data;constructor(e=Promise.resolve({})){this.data=e}async readParameter(e,t){n.silly("Reading %s / %s from underlying db",e,t);return(await this.data)[e+"::"+t]}async readAllParametersForGroup(e){const t=await this.data,a=[];return Object.keys(t).forEach((s=>{s.startsWith(e)&&a.push(t[s])})),a}async writeParameter(e){return(await this.data)[e.groupId+"::"+e.paramKey]=e,!0}}class ct{provider;cache=new Map;constructor(e){this.provider=e,t.notNullOrUndefined(this.provider)}async fetchParameter(e,t,a=null,s=!1){n.debug("Reading parameter %s / %s / Force : %s",e,t,s);const i=this.cache.get(ct.toCacheStoreKey(e,t));let r=null;const o=(new Date).getTime();if(!s&&i){const a=i.ttlSeconds?o-1e3*i.ttlSeconds:0;i.storedEpochMS>a&&(n.silly("Fetched %s / %s from cache",e,t),r=JSON.parse(i.paramValue))}if(!r){const a=await this.readUnderlyingEntry(e,t);a&&(this.addToCache(a),r=JSON.parse(a.paramValue))}return r=r||a,r}async fetchAllParametersForGroup(e){const t=await this.readUnderlyingEntries(e),a=new Map;return t.forEach((e=>{a.set(e.paramKey,JSON.parse(e.paramValue)),this.addToCache(e)})),a}async readUnderlyingEntry(e,t){return this.provider.readParameter(e,t)}async readUnderlyingEntries(e){return this.provider.readAllParametersForGroup(e)}async storeParameter(e,t,a,s){const n={groupId:e,paramKey:t,paramValue:JSON.stringify(a),ttlSeconds:s};return await this.provider.writeParameter(n),this.provider.readParameter(e,t)}static toCacheStoreKey(e,t){return e+":::"+t}addToCache(e){if(e){const t=(new Date).getTime(),a=Object.assign({storedEpochMS:t},e);this.cache.set(ct.toCacheStoreKey(e.groupId,e.paramKey),a)}}clearCache(){n.debug("Clearing runtime parameter cache"),this.cache=new Map}}class lt{config;constructor(e){t.notNullOrUndefined(e,"config"),this.config=e,this.config.maxNumThreads||(this.config.maxNumThreads=15),this.config.maxRetries||(this.config.maxRetries=5)}updateSrcPrefix(e){this.config.srcPrefix=e}updateDstPrefix(e){this.config.dstPrefix=e}async copyObject(e,t,a=!1){const s=e.replace(this.config.srcPrefix,this.config.dstPrefix);let i=!1,r=0;for(;!i&&r<this.config.maxRetries;){n.debug(`${r>0?`Retry ${r} `:""}${a?"Express":"Slow"} copying\n [${[this.config.srcBucket,e].join("/")} ---\x3e ${[this.config.dstBucket,s].join("/")}]`);try{if(a){const t={CopySource:encodeURIComponent([this.config.srcBucket,e].join("/")),Bucket:this.config.dstBucket,Key:s,MetadataDirective:"COPY"};await this.config.dstS3.send(new j(t))}else{const a=await this.config.srcS3.send(new D({Bucket:this.config.srcBucket,Key:e})),i={Bucket:this.config.dstBucket,Key:s,Body:a.Body,ContentLength:t},r=new B({client:this.config.dstS3,params:i,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});r.on("httpUploadProgress",(e=>{n.debug("Uploading : %s",e)})),await r.done()}i=!0}catch(t){n.warn(`Can't ${a?"express":"slow"} copy\n [${[this.config.srcBucket,e].join("/")} ---\x3e ${[this.config.dstBucket,s].join("/")}]: %j`,t),r++}}n.debug(`Finished ${a?"express":"slow"} copying\n [${[this.config.srcBucket,e].join("/")} ---\x3e ${[this.config.dstBucket,s].join("/")}]`)}async listObjects(e,t,a){n.info(`Scanning bucket [${[e,t].join("/")}]`);const s={Bucket:e,Prefix:t};let i=!0;const r={};for(;i;){const e=await a.send(new M(s));i=e.IsTruncated,e.Contents.forEach((e=>{r[e.Key]={Key:e.Key,LastModified:e.LastModified,ETag:e.ETag,Size:e.Size}})),i&&(s.ContinuationToken=e.NextContinuationToken)}return r}async startSyncing(){n.info(`Syncing [${this.config.srcBucket}/${this.config.srcPrefix}\n ---\x3e ${this.config.dstBucket}/${this.config.dstPrefix}]`);const e=async e=>{await this.copyObject(e.Key,e.Size)};let t=await this.compareSrcAndDst();return(t.needCopy.length>0||t.diff.length>0)&&(await r.runBoundedParallelSingleParam(e,t.needCopy,this,this.config.maxNumThreads),await r.runBoundedParallelSingleParam(e,t.diff,this,this.config.maxNumThreads),n.info("Verifying..."),t=await this.compareSrcAndDst(),n.debug("Compare result %j",t)),0===t.needCopy.length&&0===t.diff.length}async compareSrcAndDst(){const e=this.listObjects(this.config.srcBucket,this.config.srcPrefix,this.config.srcS3),t=this.listObjects(this.config.dstBucket,this.config.dstPrefix,this.config.dstS3),a=await e,s=await t,n={needCopy:[],existed:[],diff:[]};return await r.runBoundedParallelSingleParam((e=>{const t=a[e],i=e.replace(this.config.srcPrefix,this.config.dstPrefix),r=s.hasOwnProperty(i)?s[i]:void 0;r?t.Size===r.Size&&t.LastModified.getTime()<=r.LastModified.getTime()?n.existed.push(t):n.diff.push(t):n.needCopy.push(t)}),Object.keys(a),this,this.config.maxNumThreads),n}}class ut{static checkS3UrlForValidity(e){let t=!1;return e&&(t=e.startsWith("s3://")&&e.trim().length>5),t}static extractBucketFromURL(e){t.true(ut.checkS3UrlForValidity(e),"invalid s3 url");const a=e.indexOf("/",5);return a>0?e.substring(5,a):e.substring(5)}static extractKeyFromURL(e){t.true(ut.checkS3UrlForValidity(e),"invalid s3 url");const a=e.indexOf("/",5);return a>0?e.substring(a+1):null}}class ht{_ses;_archiveRatchet;archivePrefix;constructor(e,a,s){if(this._ses=e,this._archiveRatchet=a,this.archivePrefix=s,t.notNullOrUndefined(this._ses),a&&!a.getDefaultBucket())throw new Error("If archiveRatchet specified, must set a default bucket")}async archiveEmail(t,s){let i=null;if(this._archiveRatchet){n.debug("Archiving outbound email to : %j",t.destinationAddresses);let s=a.trimToEmpty(this.archivePrefix);s.endsWith("/")||(s+="/");const r=e.utc();s+="year="+r.toFormat("yyyy")+"/month="+r.toFormat("MM")+"/day="+r.toFormat("dd")+"/hour="+r.toFormat("HH")+"/"+r.toFormat("mm_ss__SSS"),s+=".json";try{i=await this._archiveRatchet.writeObjectToCacheFile(s,t)}catch(e){n.warn("Failed to archive email %s %j : %s",s,t,e)}}return i}get sesClient(){return this._ses}get archiveRatchet(){return this._archiveRatchet}async sendEmail(e){t.notNullOrUndefined(e,"RTS must be defined"),t.notNullOrUndefined(e.destinationAddresses,"Destination addresses must be defined");let a=null;const s=h.convertResolvedReadyToSendEmailToRaw(e),n={RawMessage:{Data:(new TextEncoder).encode(s)}};return a=await this._ses.send(new Te(n)),a}}class dt{sns;topicArn;constructor(e=new Ee({region:"us-east-1"}),a){this.sns=e,this.topicArn=a,t.notNullOrUndefined(this.sns,"sns"),t.notNullOrUndefined(this.topicArn,"topicArn")}get snsClient(){return this.sns}async sendMessage(e,t=!1){let a=null;try{const t=e||"NO-MESSAGE-PROVIDED",s="string"==typeof t?t:JSON.stringify(t),i={TopicArn:this.topicArn,Message:s};n.debug("Sending via SNS : %j",i),a=await this.sns.send(new Ne(i))}catch(a){if(!t)throw a;n.error("Failed to fire SNS notification : %j : %s",e,a)}return a}async conditionallySendMessage(e,t,a=!1){let s=null;return t?s=await this.sendMessage(e,a):n.info("Not sending message, condition was false : %j",e),s}}class mt{ratchet;tableName;constructor(e,s){this.ratchet=e,this.tableName=s,t.notNullOrUndefined(e,"ratchet"),t.notNullOrUndefined(a.trimToNull(this.tableName),"tableName")}async acquireLock(e,t=30){let a=!1;if(e&&t){const s=Math.floor((new Date).getTime()/1e3),i={Item:{lockingKey:e,timestamp:s,expires:s+t},ReturnConsumedCapacity:te.TOTAL,TableName:this.tableName,ConditionExpression:"attribute_not_exists(lockingKey)"};try{await this.ratchet.getDDB().send(new W(i));a=!0}catch(t){t instanceof z&&n.silly("Unable to acquire lock on %s",e)}}return a}async releaseLock(e){if(a.trimToNull(e))try{const t=await this.ratchet.simpleDelete(this.tableName,{lockingKey:e});n.silly("Released lock %s : %s",e,t)}catch(t){n.warn("Failed to release lock key : %s : %s",e,t,t)}}async clearExpiredSyncLocks(){const e=Math.floor((new Date).getTime()/1e3),t={TableName:this.tableName,FilterExpression:"expires < :now",ExpressionAttributeValues:{":now":e}},a=(await this.ratchet.fullyExecuteScan(t)).map((e=>({lockingKey:e.lockingKey})));return await this.ratchet.deleteAllInBatches(this.tableName,a,25)}}class ft{_locks=new Map;constructor(){}async acquireLock(e,t=30){let s=!1;if(a.trimToNull(e)){const a=Date.now(),n=this._locks.get(e);(!n||n<a)&&(this._locks.set(e,a+1e3*t),s=!0)}return s}async releaseLock(e){a.trimToNull(e)&&this._locks.delete(e)}async clearExpiredSyncLocks(){const e=[],t=Date.now();return this._locks.forEach(((a,s)=>{a<t&&e.push(s)})),e.forEach((e=>{this._locks.delete(e)})),e.length}}export{Se as AwsBatchBackgroundProcessor,Ce as AwsBatchRatchet,We as AwsBatchUsedImageFinder,tt as AwsCredentialsRatchet,Je as CascadeEnvironmentServiceProvider,De as CloudWatchLogGroupRatchet,Fe as CloudWatchLogsRatchet,Ie as CloudWatchMetricsRatchet,je as Daemon,Ue as DaemonUtil,xe as DynamoDbStorageProvider,mt as DynamoDbSyncLock,Ze as DynamoExpiringCodeProvider,Be as DynamoRatchet,it as DynamoRuntimeParameterProvider,Ke as DynamoTableRatchet,Ve as Ec2Ratchet,Ge as EcrUnusedImageCleaner,Qe as EnvVarEnvironmentServiceProvider,ze as EnvironmentService,Xe as ExpiringCodeRatchet,$e as FixedEnvironmentServiceProvider,rt as GlobalVariableOverrideRuntimeParameterProvider,Le as HashSpreader,at as LambdaEventDetector,st as LambdaEventTypeGuards,qe as LambdaUsedImageFinder,ot as MemoryRuntimeParameterProvider,ft as MemorySyncLock,Re as PrototypeDao,ve as RatchetAwsInfo,_e as RetainedImageReason,nt as Route53Ratchet,ct as RuntimeParameterRatchet,Pe as S3CacheRatchet,He as S3EnvironmentServiceProvider,et as S3ExpiringCodeProvider,lt as S3LocationSyncRatchet,Ae as S3PrototypeDaoProvider,ut as S3Ratchet,Me as S3SimpleDao,ke as S3StorageProvider,ht as SesMailSendingProvider,Oe as SimpleCache,dt as SnsRatchet,Ye as SsmEnvironmentServiceProvider};
|
|
2
2
|
//# sourceMappingURL=index.mjs.map
|
package/lib/types.d.ts
CHANGED
|
@@ -736,6 +736,7 @@ declare class S3CacheRatchet implements S3CacheRatchetLike {
|
|
|
736
736
|
synchronize(srcPrefix: string, targetPrefix: string, targetRatchet?: S3CacheRatchetLike, recurseSubFolders?: boolean): Promise<string[]>;
|
|
737
737
|
fetchMetaForCacheFile(key: string, bucket?: string): Promise<HeadObjectCommandOutput>;
|
|
738
738
|
cacheFileAgeInSeconds(key: string, bucket?: string): Promise<number>;
|
|
739
|
+
renameFile(srcKey: string, dstKey: string, srcBucket?: string, dstBucket?: string): Promise<CopyObjectCommandOutput>;
|
|
739
740
|
copyFile(srcKey: string, dstKey: string, srcBucket?: string, dstBucket?: string): Promise<CopyObjectCommandOutput>;
|
|
740
741
|
quietCopyFile(srcKey: string, dstKey: string, srcBucket?: string, dstBucket?: string): Promise<boolean>;
|
|
741
742
|
preSignedDownloadUrlForCacheFile(key: string, expirationSeconds?: number, bucket?: string): Promise<string>;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@bitblit/ratchet-aws",
|
|
3
|
-
"version": "4.0.
|
|
3
|
+
"version": "4.0.416-alpha",
|
|
4
4
|
"description": "Common tools for use with AWS browser and node",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"type": "module",
|
|
@@ -52,7 +52,7 @@
|
|
|
52
52
|
},
|
|
53
53
|
"license": "Apache-2.0",
|
|
54
54
|
"dependencies": {
|
|
55
|
-
"@bitblit/ratchet-common": "4.0.
|
|
55
|
+
"@bitblit/ratchet-common": "4.0.416-alpha"
|
|
56
56
|
},
|
|
57
57
|
"optionalDependencies": {
|
|
58
58
|
"@aws-sdk/client-athena": "3.600.0",
|
|
@@ -100,7 +100,7 @@
|
|
|
100
100
|
"@aws-sdk/lib-storage": "^3.600.0",
|
|
101
101
|
"@aws-sdk/s3-request-presigner": "^3.600.0",
|
|
102
102
|
"@aws-sdk/types": "^3.598.0",
|
|
103
|
-
"@bitblit/ratchet-common": "4.0.
|
|
103
|
+
"@bitblit/ratchet-common": "4.0.416-alpha",
|
|
104
104
|
"@smithy/abort-controller": "^3.1.0",
|
|
105
105
|
"@smithy/smithy-client": "^3.1.4",
|
|
106
106
|
"@smithy/util-waiter": "^3.1.0"
|
|
@@ -177,7 +177,7 @@
|
|
|
177
177
|
}
|
|
178
178
|
},
|
|
179
179
|
"devDependencies": {
|
|
180
|
-
"@bitblit/ratchet-node-only": "4.0.
|
|
180
|
+
"@bitblit/ratchet-node-only": "4.0.416-alpha",
|
|
181
181
|
"@types/aws-lambda": "8.10.140",
|
|
182
182
|
"aws-sdk-client-mock": "4.0.1"
|
|
183
183
|
}
|