@bitblit/ratchet-aws-node-only 4.0.413-alpha → 4.0.415-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.mjs +1 -1
- package/lib/types.d.ts +0 -1
- package/package.json +3 -3
package/lib/index.mjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import e,{readFileSync as t}from"fs";import n from"path";import{RequireRatchet as i,StringRatchet as s,Logger as a,EsmRatchet as o,PromiseRatchet as r,StopWatch as c,FileTransferResult as l,BackupResult as u}from"@bitblit/ratchet-common";import{CsvRatchet as h,AbstractRatchetCliHandler as d,MultiStream as f}from"@bitblit/ratchet-node-only";import{S3Ratchet as m,Ec2Ratchet as g}from"@bitblit/ratchet-aws";import{ListNamedQueriesCommand as p,GetNamedQueryCommand as y,StartQueryExecutionCommand as S,GetQueryExecutionCommand as T}from"@aws-sdk/client-athena";import{GetObjectCommand as O,S3Client as N}from"@aws-sdk/client-s3";import E from"tmp";import w from"readline";import F from"walk";import b from"mime-types";import{Upload as M}from"@aws-sdk/lib-storage";import{spawnSync as C}from"child_process";import L from"os";import _ from"unzipper";import{DateTime as A}from"luxon";import{simpleParser as k}from"mailparser";import I from"crypto";class U{athena;athenaTableName;constructor(e,t){this.athena=e,this.athenaTableName=t,i.notNullOrUndefined(e,"athena"),i.notNullOrUndefined(s.trimToNull(t),"athenaTableName")}async updatePartitions(e,t,n=(new Date).getTime()-864e5,s=(new Date).getTime()){i.true(m.checkS3UrlForValidity(e),"root path not valid"),i.notNullOrUndefined(t,"s3"),a.info("Updating partitions for %s from %s",this.athenaTableName,e),m.extractBucketFromURL(e),m.extractKeyFromURL(e);let o=n;const r=[];for(;o<s;){const t=new Date(o).toISOString().substring(0,10);a.info("d:%s",t);const n=t.split("-");r.push("PARTITION (date_utc_partition='"+t+"') LOCATION '"+e+"/"+n[0]+"/"+n[1]+"/"+n[2]+"'"),o+=864e5}if(r.length>0){const e="ALTER TABLE "+this.athenaTableName+" ADD IF NOT EXISTS \n"+r.join("\n");await this.athena.runQueryToObjects(e)}else a.warn("Not updating partitions - no time between time clauses");return r}async createTable(e,s=!1){i.true(m.checkS3UrlForValidity(e),"root path not valid");let r=!1;if(a.info("Creating ALB table %s",this.athenaTableName),s){a.info("Replace if present specified, removed old table");try{await this.athena.runQueryToObjects("drop table "+this.athenaTableName)}catch(e){a.info("Drop error : %j",e)}}let c=t(n.join(o.fetchDirName(import.meta.url),"../static/albAthenaTableCreate.txt")).toString();c=c.split("{{TABLE NAME}}").join(this.athenaTableName),c=c.split("{{ALB_LOG_ROOT}}").join(e),a.info("Creating table with %s",c);try{await this.athena.runQueryToObjects(c),r=!0}catch(e){a.error("Error creating table : %s",e)}return r}static async readLogObjectsFromCsvStream(e){return h.streamParse(e,(e=>e))}static async readLogObjectsFromFile(e){return h.fileParse(e,(e=>e))}async fetchAlbLogRecords(e){const t=await this.fetchAlbLogRecordsToFile(e);return U.readLogObjectsFromFile(t)}async fetchAlbLogRecordsToFile(e,t=null){a.info("Querying %s : %j",this.athenaTableName,e);let n="select * from "+this.athenaTableName+" where 1=1 ";e.startTimeEpochMS&&(e.startTimeEpochMS&&(n+=" AND time >= '"+new Date(e.startTimeEpochMS).toISOString()+"'",n+=" AND date_utc_partition >='"+new Date(e.startTimeEpochMS).toISOString().substring(0,10)+"'"),e.endTimeEpochMS&&(n+=" AND time < '"+new Date(e.endTimeEpochMS).toISOString()+"'",n+=" AND date_utc_partition <='"+new Date(e.endTimeEpochMS).toISOString().substring(0,10)+"'"),e.requestUrlFilter&&(n+=" AND request_url LIKE '"+e.requestUrlFilter+"'"),e.limit&&(n+=" LIMIT "+e.limit));return await this.athena.runQueryToFile(n,null,t)}static CREATE_TABLE_STATEMENT="CREATE EXTERNAL TABLE IF NOT EXISTS `{{TABLE NAME}}`(\n `type` string COMMENT '',\n `time` string COMMENT '',\n `elb` string COMMENT '',\n `client_ip` string COMMENT '',\n `client_port` int COMMENT '',\n `target_ip` string COMMENT '',\n `target_port` int COMMENT '',\n `request_processing_time` double COMMENT '',\n `target_processing_time` double COMMENT '',\n `response_processing_time` double COMMENT '',\n `elb_status_code` string COMMENT '',\n `target_status_code` string COMMENT '',\n `received_bytes` bigint COMMENT '',\n `sent_bytes` bigint COMMENT '',\n `request_verb` string COMMENT '',\n `request_url` string COMMENT '',\n `request_proto` string COMMENT '',\n `user_agent` string COMMENT '',\n `ssl_cipher` string COMMENT '',\n `ssl_protocol` string COMMENT '',\n `target_group_arn` string COMMENT '',\n `trace_id` string COMMENT '',\n `domain_name` string COMMENT '',\n `chosen_cert_arn` string COMMENT '',\n `matched_rule_priority` string COMMENT '',\n `request_creation_time` string COMMENT '',\n `actions_executed` string COMMENT '',\n `redirect_url` string COMMENT '',\n `lambda_error_reason` string COMMENT '',\n `target_port_list` string COMMENT '',\n `target_status_code_list` string COMMENT '',\n `new_field` string COMMENT '')\nPARTITIONED BY (\n `date_utc_partition` string\n)\nROW FORMAT SERDE\n 'org.apache.hadoop.hive.serde2.RegexSerDe'\nWITH SERDEPROPERTIES (\n 'input.regex'='([^ ]*) ([^ ]*) ([^ ]*) ([^ ]*):([0-9]*) ([^ ]*)[:-]([0-9]*) ([-.0-9]*) ([-.0-9]*) ([-.0-9]*) (|[-0-9]*) (-|[-0-9]*) ([-0-9]*) ([-0-9]*) \\\"([^ ]*) ([^ ]*) (- |[^ ]*)\\\" \\\"([^\\\"]*)\\\" ([A-Z0-9-]+) ([A-Za-z0-9.-]*) ([^ ]*) \\\"([^\\\"]*)\\\" \\\"([^\\\"]*)\\\" \\\"([^\\\"]*)\\\" ([-.0-9]*) ([^ ]*) \\\"([^\\\"]*)\\\" \\\"([^\\\"]*)\\\" \\\"([^ ]*)\\\" \\\"([^s]+)\\\" \\\"([^s]+)\\\"(.*)')\nSTORED AS INPUTFORMAT\n 'org.apache.hadoop.mapred.TextInputFormat'\nOUTPUTFORMAT\n 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'\nLOCATION\n '{{ALB_LOG_ROOT}}'\n"}class R{athena;s3;outputLocation;constructor(e,t,n){this.athena=e,this.s3=t,this.outputLocation=n,i.notNullOrUndefined(e),i.notNullOrUndefined(t),i.notNullOrUndefined(n),i.true(n.startsWith("s3://"))}static athenaRowsToObject(e){const t=e[0].Data.map((e=>e.VarCharValue));return e.slice(1).map((e=>{const n={};for(let i=0;i<e.Data.length;i++)n[t[i]]=e.Data[i].VarCharValue;return n}))}static applyParamsToQuery(e,t){let n=e;return n&&t&&Object.keys(t).forEach((e=>{const i=s.safeString(t[e]),a="{"+e+"}";n=n.split(a).join(i)})),n}async fetchQueryIds(){const e={NextToken:null};let t=[],n=null;do{n=await this.athena.send(new p(e)),t=t.concat(n.NamedQueryIds),e.NextToken=n.NextToken}while(e.NextToken);return t}async listQueries(){const e=[],t=await this.fetchQueryIds();a.debug("Finding %d items",t.length);for(let n=0;n<t.length;n++){const i={NamedQueryId:t[n]},s=await this.athena.send(new y(i));e.push(s.NamedQuery)}return e}async findQueryByName(e){return(await this.listQueries()).find((t=>t.Name.toLowerCase()==e.toLowerCase()))}async runQueryToObjects(e,t={},n=2e3){a.info("Running query to objects");const i=await this.runQueryToOutputLocation(e,t,n);a.info("Query succeeded, processing file from %s",i);const s={Bucket:i.substring(5,i.indexOf("/",5)),Key:i.substring(i.indexOf("/",5)+1)},o=await this.s3.send(new O(s)),r=await o.Body.transformToString();return await h.stringParse(r,(e=>e),{columns:!0,skip_empty_lines:!0})}async runQueryToFile(t,n={},i=null,s=2e3){a.info("Running query to file");const o=await this.runQueryToOutputLocation(t,n,s);a.info("Query succeeded, pulling file from %s",o);const c={Bucket:o.substring(5,o.indexOf("/",5)),Key:o.substring(o.indexOf("/",5)+1)},l=i||E.fileSync({postfix:".csv",keep:!1}).name,u=e.createWriteStream(l),h=(await this.s3.send(new O(c))).Body;h.pipe(u);const d=await r.resolveOnEvent(h,["finish","close"],["error"],l);return a.silly("Response: %s",d),l}async runQueryToOutputLocation(e,t={},n=2e3){let i=null;const o=new c,l=R.applyParamsToQuery(e,t);try{a.info("Starting query : %s",l);const e=s.createType4Guid(),t={QueryString:l,ResultConfiguration:{OutputLocation:this.outputLocation,EncryptionConfiguration:{EncryptionOption:"SSE_S3"}},ClientRequestToken:e,QueryExecutionContext:{Database:"default"}},c={QueryExecutionId:(await this.athena.send(new S(t))).QueryExecutionId},u=["FAILED","CANCELLED","SUCCEEDED"];let h=await this.athena.send(new T(c));for(;-1===u.indexOf(h.QueryExecution.Status.State);)await r.createTimeoutPromise("wait",n),a.debug("%s : %s : %s",h.QueryExecution.Status.State,o.dump(),l),h=await this.athena.send(new T(c));"FAILED"===h.QueryExecution.Status.State?a.warn("Query failed : %s",h.QueryExecution.Status.StateChangeReason):"SUCCEEDED"===h.QueryExecution.Status.State&&(i=h.QueryExecution.ResultConfiguration.OutputLocation)}catch(e){a.warn("Failure : %s",e,e)}return a.info("Query took %s : %s",o.dump(),l),i}}class P{constructor(){}static buildInformation(){return{version:"413",hash:"4f9f54292df51665cce2e9639e6988b8ecff084f",branch:"alpha-2024-07-01-2",tag:"alpha-2024-07-01-2",timeBuiltISO:"2024-07-01T13:19:45-0700",notes:"No notes"}}}class D{constructor(){}static async importJsonLFileToTable(t,n,o){i.notNullOrUndefined(t,"dynamo"),i.notNullOrUndefined(n,"tableName"),i.notNullOrUndefined(o,"filename");const r=e.createReadStream(o),c=w.createInterface({input:r,crlfDelay:1/0});let l=0;for await(const e of c)if(l%100==0&&a.info("Importing line %d",l),s.trimToNull(e)){const i=JSON.parse(e);await t.simplePut(n,i),l++}return l}static async exportScanToJsonLFile(t,n,s){i.notNullOrUndefined(t,"dynamo"),i.notNullOrUndefined(n,"scan"),i.notNullOrUndefined(s,"filename");const o=e.createWriteStream(s);o.on("end",(()=>{a.debug("Write complete")}));const c=await D.exportScanToJsonLWriteStream(t,n,o);return await r.resolveOnEvent(o,["finish","close"],["error"]),o.close(),c}static async exportQueryToJsonLFile(t,n,s){i.notNullOrUndefined(t,"dynamo"),i.notNullOrUndefined(n,"qry"),i.notNullOrUndefined(s,"filename");const o=e.createWriteStream(s);o.on("end",(()=>{a.debug("Write complete")}));const c=await D.exportQueryToJsonLWriteStream(t,n,o);return await r.resolveOnEvent(o,["finish","close"],["error"]),o.close(),c}static async exportScanToJsonLWriteStream(e,t,n){i.notNullOrUndefined(e,"dynamo"),i.notNullOrUndefined(t,"scan"),i.notNullOrUndefined(n,"target");return await e.fullyExecuteProcessOverScan(t,(async e=>D.writeItemToJsonLStream(e,n,!1)))}static async exportQueryToJsonLWriteStream(e,t,n){i.notNullOrUndefined(e,"dynamo"),i.notNullOrUndefined(t,"qry"),i.notNullOrUndefined(n,"target");return await e.fullyExecuteProcessOverQuery(t,(async e=>D.writeItemToJsonLStream(e,n,!1)))}static writeItemToJsonLStream(e,t,n=!1){(e||n)&&t.write(JSON.stringify(e)+"\n")}}class x{srcDir;bucketName;config;s3=new N({region:"us-east-1"});constructor(t,n,i){this.srcDir=t,this.bucketName=n,this.config=JSON.parse(e.readFileSync(i).toString("ascii"))}static createFromArgs(e){if(e&&3===e.length){const t=e[0],n=e[1],i=e[2];return new x(t,n,i)}return console.log("Usage : node ratchet-site-uploader {srcDir} {bucket} {configFile} (Found "+e+" arguments, need 3)"),null}static async runFromCliArgs(e){return x.createFromArgs(e).runPump()}findMatch(e,t,n){let i=null;return null!=e&&null!=t&&null!=n&&null!=n.mapping&&n.mapping.forEach((n=>{null==i&&(null==n.prefixMatch||e.match(n.prefixMatch))&&(null==n.fileMatch||t.match(n.fileMatch))&&(i=n)})),i}findMime(e,t){let n=null;return null!=t&&null!=t.customMimeTypeMapping&&Object.keys(t.customMimeTypeMapping).forEach((i=>{null==n&&e.endsWith(i)&&(n=t.customMimeTypeMapping[i])})),null==n&&(n=b.lookup(e)),null==n&&(n="binary/octet-stream"),n}runPump(){return new Promise(((t,i)=>{a.info("Uploading contents of %s to %s using %j as config",this.srcDir,this.bucketName,this.config);const s=F.walk(this.srcDir,{});s.on("file",function(t,i,s){a.info("Processing %j",i.name);const o=t==this.srcDir?"":t.substring(this.srcDir.length+1)+"/",r=this.findMatch(o,i.name,this.config),c=o+i.name;a.info("Uploading file : %s/%s to key %s with %j",t,i.name,c,r);const l=r&&r.putParams?JSON.parse(JSON.stringify(r.putParams)):{};l.Bucket=this.bucketName,l.Key=c,l.Body=e.readFileSync(n.join(t,i.name)),l.ContentType||(l.ContentType=this.findMime(i.name,this.config));const u=new M({client:this.s3,params:l,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});u.on("httpUploadProgress",(e=>{a.debug("Uploading : %s",e)})),u.done().then((e=>{a.info("Finished upload of %s: %j",c,e),s()})).catch((e=>{a.warn("%s failed to upload : %s : Continuing",c,e),s()}))}.bind(this)),s.on("errors",(function(e,t,n){n()})),s.on("end",(function(){a.info("All done"),t(!0)}))}))}}class B{ec2Ratchet;constructor(e){this.ec2Ratchet=e}async startInstanceAndUploadPublicKeyFile(t,n,s="ec2-user"){i.notNullUndefinedOrOnlyWhitespaceString(t,"instanceId"),i.notNullUndefinedOrOnlyWhitespaceString(n,"filePath"),i.true(e.existsSync(n),"File does not exist"),a.info("Starting instance %s and uploading contents of public key file %s",t,n);const o=e.readFileSync(n).toString();return this.startInstanceAndUploadPublicKey(t,o,s)}async startInstanceAndUploadPublicKey(e,t,n="ec2-user"){a.info("Starting instance %s, public key length %d, user %s",e,t.length,n);let i=await this.ec2Ratchet.describeInstance(e);if(i){let s=!1;if(16==i.State.Code?(a.info("Instance is already running..."),s=!0):(a.info("Instance is not running... starting up : %s",e),s=await this.ec2Ratchet.launchInstance(e,3e4)),s){a.info("Uploading public key...");const s=await this.ec2Ratchet.sendPublicKeyToEc2Instance(e,t,n);a.info("Key response : %j",s),i=i&&i.PublicIpAddress?i:await this.ec2Ratchet.describeInstance(e),a.info("Instance IP address is %s",i.PublicIpAddress)}else a.info("Instance could not start - check logs")}else a.info("No such instance found - check your AWS keys? : %s",e);return i}}class v{instanceId;publicKeyFile;instanceOsUser;region;availabilityZone;ec2Ratchet;instanceUtil;constructor(e,t=n.join(L.homedir(),".ssh","id_rsa.pub"),i="ec2-user",s="us-east-1",a="us-east-1a"){this.instanceId=e,this.publicKeyFile=t,this.instanceOsUser=i,this.region=s,this.availabilityZone=a,this.ec2Ratchet=new g(this.region,this.availabilityZone),this.instanceUtil=new B(this.ec2Ratchet)}static createFromArgs(e){if(1===e?.length||2===e?.length){const t=e[0];return new v(t)}return a.info("Usage : ratchet-start-instance-and-ssh {instanceId} {publicKeyFile} (Found %s arguments, need 1 or 2)",e),null}static async runFromCliArgs(e){return v.createFromArgs(e).run()}async run(){let e=await this.instanceUtil.startInstanceAndUploadPublicKeyFile(this.instanceId,this.publicKeyFile,this.instanceOsUser);if(e){a.info("Instance IP address is %s",e.PublicIpAddress);const t=C("ssh",[this.instanceOsUser+"@"+e.PublicIpAddress],{stdio:"inherit"});a.info("%j",t)}else a.info("No such instance found - check your AWS keys? : %s",this.instanceId)}}class Q extends d{fetchHandlerMap(){return{"site-uploader":x.runFromCliArgs,"start-instance-and-ssh":v.runFromCliArgs}}fetchVersionInfo(){return P.buildInformation()}}class z{canProcess(e){return!0}async processEmail(e){const t=[];try{i.notNullOrUndefined(e,"msg"),a.info("Processing Broadsign reach inbound inventory email");const n=e.attachments[0].content;a.info("Unzipping attachment");const s=new f(n);let o=null;const r=s.pipe(_.Parse()).on("entry",(async e=>{e.path.toLowerCase().endsWith("csv")?o=await e.buffer():(a.info("Pass: %s",e.path),e.autodrain())})).promise();await r;const c=await h.stringParse(o.toString(),(e=>e),{columns:!1,skip_empty_lines:!0});if(c.length>1){const e="drop table if exists sample";let n="create table sample (pump_date varchar(255),";const i=c[0];let s="insert into sample (pump_date,",o="?,";for(let e=0;e<i.length;e++){e>0&&(n+=", ",s+=", ",o+=", ");const t=i[e].toLowerCase().split(" ").join("_");s+=t,o+="?",n+=t+" varchar(255)","id"===t?n+=" primary key":"device_id"===t&&(n+=" unique")}n+=")",s+=") values ",a.info("Recreating table");t.push({statement:e}),t.push({statement:n});const r=A.utc().toISO();let l=s,u=[];for(let e=1;e<c.length;e++)l>s&&(l+=","),l+="("+o+")",u=u.concat(r,c[e]),e%25!=0&&e!==c.length-1||(t.push({statement:l,params:u}),l=s,u=[],a.info("Inserted %d of %d rows",e,c.length));a.info("Finished insertion of %d rows",c.length)}}catch(e){a.error("Failure: %s : %j",e,t,e)}return t}}class j{cache;processors;constructor(e,t){this.cache=e,this.processors=t,i.notNullOrUndefined(this.cache,"cache"),i.notNullOrUndefined(this.cache.getDefaultBucket(),"cache.defaultBucket")}async processEmailFromS3(e){if(await this.cache.fileExists(e)){const t=await this.cache.fetchCacheFileAsString(e);return this.processEmailFromBuffer(new Buffer(t))}return a.warn("Cannot process inbound email - no such key : %s",e),!1}async processEmailFromBuffer(e){i.notNullOrUndefined(e,"buf"),a.info("Processing inbound email - size %d bytes",e.length);const t=await k(e);a.info('Found mail from "%s" subject "%s" with %d attachments',t?.from?.text,t?.subject,t?.attachments?.length);let n=!1;for(let e=0;e<this.processors.length&&!n;e++)if(this.processors[e].canProcess(t)){a.info("Processing message with processor %d",e);const i=await this.processors[e].processEmail(t);a.info("Result was : %j",i),n=!0}return n}}class W{canProcess(e){return!0}async processEmail(e){return e.body}}class K{s3;tmpFolder;cacheTimeoutSeconds;static DEFAULT_CACHE_TIMEOUT_SEC=604800;currentlyLoading=new Map;constructor(t,n,a=K.DEFAULT_CACHE_TIMEOUT_SEC){this.s3=t,this.tmpFolder=n,this.cacheTimeoutSeconds=a,i.notNullOrUndefined(t,"s3"),i.notNullOrUndefined(s.trimToNull(n)),i.true(e.existsSync(n),"folder must exist : "+n)}async getFileString(e){const t=await this.getFileBuffer(e);return t?t.toString():null}keyToLocalCachePath(e){const t=this.generateCacheHash(this.s3.getDefaultBucket()+"/"+e);return n.join(this.tmpFolder,t)}removeCacheFileForKey(t){const n=this.keyToLocalCachePath(t);a.info("Removing cache file for %s : %s",t,n),e.existsSync(n)?e.unlinkSync(n):a.debug("Skipping delete for %s - does not exist",n)}async getFileBuffer(e){const t=this.keyToLocalCachePath(e);let n=null;if(n=this.getCacheFileAsBuffer(t),n)a.info("Found cache file for s3://%s/%s. Local path %s",this.s3.getDefaultBucket(),e,t);else{a.info("No cache. Downloading File s3://%s/%s to %s",this.s3.getDefaultBucket(),e,t);try{let i=this.currentlyLoading.get(e);i?a.info("Already running - wait for that"):(a.info("Not running - start"),i=this.updateLocalCacheFile(e,t),this.currentlyLoading.set(e,i)),n=await i,this.currentlyLoading.delete(e)}catch(t){a.warn("File %s/%s does not exist. Err code: %s",this.s3.getDefaultBucket(),e,t)}}return n}async updateLocalCacheFile(t,n){const i=await this.s3.fetchCacheFileAsBuffer(t);return i&&i.length>0&&(a.info("Saving %d bytes to disk for cache",i.length),e.writeFileSync(n,i)),i}getCacheFileAsString(e){const t=this.getCacheFileAsBuffer(e);return t?t.toString():null}getCacheFileAsBuffer(t){if(!e.existsSync(t))return null;try{const n=e.statSync(t),i=(new Date).getTime();if((i-n.ctimeMs)/1e3>=this.cacheTimeoutSeconds)return null;return e.readFileSync(t)}catch(e){a.warn("Error getting s3 cache file %s",e)}return null}generateCacheHash(e){return I.createHash("md5").update(e).digest("hex")}}var q,H,J;!function(e){e.Lazy="Lazy",e.OnStartup="OnStartup"}(q||(q={})),function(e){e.TreatSameSizeAsNoChange="TreatSameSizeAsNoChange"}(H||(H={})),function(e){e.EveryUpload="EveryUpload",e.Disabled="Disabled"}(J||(J={}));class V{config;_localFileName;_remoteStatus;_lastSyncEpochMS;_loadingRemoteSignal;constructor(e){if(this.config=e,i.notNullOrUndefined(e,"config"),i.notNullUndefinedOrOnlyWhitespaceString(e.s3Path,"s3Path"),i.notNullOrUndefined(e.s3CacheRatchetLike,"s3CacheRatchetLike"),i.notNullUndefinedOrOnlyWhitespaceString(e.s3CacheRatchetLike.getDefaultBucket(),"s3CacheRatchetLike.getDefaultBucket()"),e.forceLocalFileFullPath)this._localFileName=e.forceLocalFileFullPath;else{const t=e.s3Path.includes(".")?e.s3Path.substring(e.s3Path.lastIndexOf(".")+1):void 0;this._localFileName=e.forceLocalFileFullPath??E.fileSync({postfix:t,keep:e.leaveTempFileOnDisk}).name}a.info("Using local file %s for remote path %s %s",this._localFileName,this.config.s3CacheRatchetLike.getDefaultBucket(),this.config.s3Path),this.initialize().then((()=>{a.info("Initialized")}))}get remoteStatusData(){return!this._remoteStatus||this.config.remoteStatusTtlMs&&Date.now()-this._remoteStatus.updatedEpochMs>this.config.remoteStatusTtlMs?(async()=>{const e=await this.fetchRemoteMeta();return this._remoteStatus={updatedEpochMs:Date.now(),remoteSizeInBytes:e.ContentLength,remoteLastUpdatedEpochMs:e.LastModified.getTime(),remoteHash:e.ETag},this._remoteStatus})():Promise.resolve(this._remoteStatus)}async initialize(){a.info("Using local path %s to sync %s / %s",this._localFileName,this.config.s3CacheRatchetLike.getDefaultBucket(),this.config.s3Path),this.config.initMode===q.OnStartup&&(a.info("Initial loading"),this.fetchRemoteToLocal().then((()=>{a.info("Finished initial load")})))}async localAndRemoteAreSameSize(){let e=!1;const t=this.localFileBytes;if(null!==t){const n=(await this.remoteStatusData).remoteSizeInBytes;e=t===n,a.info("Local size is %s, remote is %s, same is %s",t,n,e)}return e}directWriteValueToLocalFile(t){i.notNullOrUndefined(t,"value"),e.writeFileSync(this._localFileName,t)}get lastSyncEpochMS(){return this._lastSyncEpochMS}get localFileName(){return this._localFileName}get localFileStats(){let t=null;if(e.existsSync(this._localFileName)){t=e.statSync(this._localFileName)}return t}get localFileBytes(){const e=this.localFileStats;return e?e.size:null}get localFileUpdatedEpochMS(){const e=this.localFileStats;return e?e.mtime.getTime():null}async fetchRemoteMeta(){return this.config.s3CacheRatchetLike.fetchMetaForCacheFile(this.config.s3Path)}get wouldFetch(){return(async()=>!this.hasFetchOptimization(H.TreatSameSizeAsNoChange)||!await this.localAndRemoteAreSameSize())()}get wouldPush(){return(async()=>!this.hasPushOptimization(H.TreatSameSizeAsNoChange)||!await this.localAndRemoteAreSameSize())()}async sendLocalToRemote(){const t=new c;a.info("Sending local file to remote");let n=null;if(await this.wouldPush)try{if(this.config.backupMode===J.EveryUpload){a.info("EveryUpload mode set - backing up");const e=await this.backupRemote();a.info("Backup result : %s",e)}const t=await this.config.s3CacheRatchetLike.writeStreamToCacheFile(this.config.s3Path,e.readFileSync(this._localFileName));a.silly("SendLocalToRemote: %j",t),this._remoteStatus=null,n=l.Updated}catch(e){a.error("Failed to transfer %s : %s",this._localFileName,e,e),n=l.Error}else a.info("TreatSameSizeAsNoChange set and files are same size - skipping"),n=l.Skipped;return a.info("Sent %d bytes to remote in %s",this.localFileBytes,t.dump()),n}async backupRemote(){let e=null;try{const t=this.config.s3Path.lastIndexOf("/"),n="/backup/"+A.now().toFormat("yyyy/MM/dd/HH/mm/ss")+"/",i=t>-1?this.config.s3Path.substring(0,t)+n+this.config.s3Path.substring(t+1):n+this.config.s3Path;a.info("Backing up path %s to %s",this.config.s3Path,i),await this.config.s3CacheRatchetLike.copyFile(this.config.s3Path,i),e=u.Success}catch(t){a.error("Failed to backup %s : %s",this.config.s3Path,t,t),e=u.Error}return e}async fetchRemoteToLocal(){return a.info("Called fetchRemoteToLocal"),this.fetchRemoteToLocalIfNewerThan(0)}async fetchRemoteToLocalIfNewerThan(e){return this._loadingRemoteSignal||(this._loadingRemoteSignal=this.innerFetchRemoteToLocalIfNewerThan(e)),this._loadingRemoteSignal}hasFetchOptimization(e){const t=!!e&&(this?.config?.fetchOptimizations||[]).includes(e);return a.info("hasFetchOptimization %s returning %s",e,t),t}hasPushOptimization(e){const t=!!e&&(this?.config?.pushOptimizations||[]).includes(e);return a.info("hasPushOptimization %s returning %s",e,t),t}async innerFetchRemoteToLocalIfNewerThan(t){try{const n=new c;if(await this.wouldFetch){const i={Bucket:this.config.s3CacheRatchetLike.getDefaultBucket(),Key:this.config.s3Path,IfModifiedSince:new Date(t)},s=await this.config.s3CacheRatchetLike.fetchCacheFilePassThru(i),o=e.createWriteStream(this._localFileName);s.Body.pipe(o),a.info("Waiting for pipe completion"),await r.resolveOnEvent(o,["close","finish"],["error"]),a.info("Pipe completed"),this._lastSyncEpochMS=Date.now(),this._remoteStatus={updatedEpochMs:Date.now(),remoteSizeInBytes:s.ContentLength,remoteLastUpdatedEpochMs:s.LastModified.getTime(),remoteHash:s.ETag},a.info("Fetched remote to local, %d bytes in %s",s.ContentLength,n.dump())}else a.info("TreatSameSizeAsNoChange not enabled OR files are same size - skipping"),this._lastSyncEpochMS=Date.now();return l.Updated}catch(e){return a.error("Failed to fetch %s / %s : %s",this.config.s3CacheRatchetLike.getDefaultBucket(),this.config.s3Path,e,e),l.Error}}}export{U as AlbAthenaLogRatchet,R as AthenaRatchet,D as DynamoExporter,B as Ec2InstanceUtil,z as EmailToDbInsertProcessor,j as InboundEmailRatchet,P as RatchetAwsNodeOnlyInfo,Q as RatchetCliHandler,K as S3CacheToLocalDiskRatchet,V as S3SyncedFile,q as S3SyncedFileConfigInitMode,H as S3SyncedFileOptimization,J as S3SyncedFileRemoteBackupMode,W as SampleEmailProcessor,x as SiteUploader,v as StartInstanceAndSsh};
|
|
1
|
+
import e,{readFileSync as t}from"fs";import n from"path";import{RequireRatchet as i,StringRatchet as s,Logger as a,EsmRatchet as o,PromiseRatchet as r,StopWatch as c,FileTransferResult as l,BackupResult as u}from"@bitblit/ratchet-common";import{CsvRatchet as h,AbstractRatchetCliHandler as d,MultiStream as f}from"@bitblit/ratchet-node-only";import{S3Ratchet as m,Ec2Ratchet as g}from"@bitblit/ratchet-aws";import{ListNamedQueriesCommand as p,GetNamedQueryCommand as y,StartQueryExecutionCommand as S,GetQueryExecutionCommand as T}from"@aws-sdk/client-athena";import{GetObjectCommand as O,S3Client as N}from"@aws-sdk/client-s3";import E from"tmp";import w from"readline";import F from"walk";import b from"mime-types";import{Upload as M}from"@aws-sdk/lib-storage";import{spawnSync as C}from"child_process";import L from"os";import _ from"unzipper";import{DateTime as A}from"luxon";import{simpleParser as I}from"mailparser";import k from"crypto";class U{athena;athenaTableName;constructor(e,t){this.athena=e,this.athenaTableName=t,i.notNullOrUndefined(e,"athena"),i.notNullOrUndefined(s.trimToNull(t),"athenaTableName")}async updatePartitions(e,t,n=(new Date).getTime()-864e5,s=(new Date).getTime()){i.true(m.checkS3UrlForValidity(e),"root path not valid"),i.notNullOrUndefined(t,"s3"),a.info("Updating partitions for %s from %s",this.athenaTableName,e),m.extractBucketFromURL(e),m.extractKeyFromURL(e);let o=n;const r=[];for(;o<s;){const t=new Date(o).toISOString().substring(0,10);a.info("d:%s",t);const n=t.split("-");r.push("PARTITION (date_utc_partition='"+t+"') LOCATION '"+e+"/"+n[0]+"/"+n[1]+"/"+n[2]+"'"),o+=864e5}if(r.length>0){const e="ALTER TABLE "+this.athenaTableName+" ADD IF NOT EXISTS \n"+r.join("\n");await this.athena.runQueryToObjects(e)}else a.warn("Not updating partitions - no time between time clauses");return r}async createTable(e,r=!1){i.true(m.checkS3UrlForValidity(e),"root path not valid");let c=!1;if(a.info("Creating ALB table %s",this.athenaTableName),r){a.info("Replace if present specified, removed old table");try{await this.athena.runQueryToObjects("drop table "+this.athenaTableName)}catch(e){a.info("Drop error : %j",e)}}let l=t(n.join(o.fetchDirName(import.meta.url),"../static/albAthenaTableCreate.txt")).toString();l=s.simpleTemplateFill(l,{TABLE_NAME:this.athenaTableName,ALB_LOG_ROOT:e},!0),a.info("Creating table with %s",l);try{await this.athena.runQueryToObjects(l),c=!0}catch(e){a.error("Error creating table : %s",e)}return c}static async readLogObjectsFromCsvStream(e){return h.streamParse(e,(e=>e))}static async readLogObjectsFromFile(e){return h.fileParse(e,(e=>e))}async fetchAlbLogRecords(e){const t=await this.fetchAlbLogRecordsToFile(e);return U.readLogObjectsFromFile(t)}async fetchAlbLogRecordsToFile(e,t=null){a.info("Querying %s : %j",this.athenaTableName,e);let n="select * from "+this.athenaTableName+" where 1=1 ";e.startTimeEpochMS&&(e.startTimeEpochMS&&(n+=" AND time >= '"+new Date(e.startTimeEpochMS).toISOString()+"'",n+=" AND date_utc_partition >='"+new Date(e.startTimeEpochMS).toISOString().substring(0,10)+"'"),e.endTimeEpochMS&&(n+=" AND time < '"+new Date(e.endTimeEpochMS).toISOString()+"'",n+=" AND date_utc_partition <='"+new Date(e.endTimeEpochMS).toISOString().substring(0,10)+"'"),e.requestUrlFilter&&(n+=" AND request_url LIKE '"+e.requestUrlFilter+"'"),e.limit&&(n+=" LIMIT "+e.limit));return await this.athena.runQueryToFile(n,null,t)}static CREATE_TABLE_STATEMENT="CREATE EXTERNAL TABLE IF NOT EXISTS `{{TABLE NAME}}`(\n `type` string COMMENT '',\n `time` string COMMENT '',\n `elb` string COMMENT '',\n `client_ip` string COMMENT '',\n `client_port` int COMMENT '',\n `target_ip` string COMMENT '',\n `target_port` int COMMENT '',\n `request_processing_time` double COMMENT '',\n `target_processing_time` double COMMENT '',\n `response_processing_time` double COMMENT '',\n `elb_status_code` string COMMENT '',\n `target_status_code` string COMMENT '',\n `received_bytes` bigint COMMENT '',\n `sent_bytes` bigint COMMENT '',\n `request_verb` string COMMENT '',\n `request_url` string COMMENT '',\n `request_proto` string COMMENT '',\n `user_agent` string COMMENT '',\n `ssl_cipher` string COMMENT '',\n `ssl_protocol` string COMMENT '',\n `target_group_arn` string COMMENT '',\n `trace_id` string COMMENT '',\n `domain_name` string COMMENT '',\n `chosen_cert_arn` string COMMENT '',\n `matched_rule_priority` string COMMENT '',\n `request_creation_time` string COMMENT '',\n `actions_executed` string COMMENT '',\n `redirect_url` string COMMENT '',\n `lambda_error_reason` string COMMENT '',\n `target_port_list` string COMMENT '',\n `target_status_code_list` string COMMENT '',\n `new_field` string COMMENT '')\nPARTITIONED BY (\n `date_utc_partition` string\n)\nROW FORMAT SERDE\n 'org.apache.hadoop.hive.serde2.RegexSerDe'\nWITH SERDEPROPERTIES (\n 'input.regex'='([^ ]*) ([^ ]*) ([^ ]*) ([^ ]*):([0-9]*) ([^ ]*)[:-]([0-9]*) ([-.0-9]*) ([-.0-9]*) ([-.0-9]*) (|[-0-9]*) (-|[-0-9]*) ([-0-9]*) ([-0-9]*) \\\"([^ ]*) ([^ ]*) (- |[^ ]*)\\\" \\\"([^\\\"]*)\\\" ([A-Z0-9-]+) ([A-Za-z0-9.-]*) ([^ ]*) \\\"([^\\\"]*)\\\" \\\"([^\\\"]*)\\\" \\\"([^\\\"]*)\\\" ([-.0-9]*) ([^ ]*) \\\"([^\\\"]*)\\\" \\\"([^\\\"]*)\\\" \\\"([^ ]*)\\\" \\\"([^s]+)\\\" \\\"([^s]+)\\\"(.*)')\nSTORED AS INPUTFORMAT\n 'org.apache.hadoop.mapred.TextInputFormat'\nOUTPUTFORMAT\n 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'\nLOCATION\n '{{ALB_LOG_ROOT}}'\n"}class R{athena;s3;outputLocation;constructor(e,t,n){this.athena=e,this.s3=t,this.outputLocation=n,i.notNullOrUndefined(e),i.notNullOrUndefined(t),i.notNullOrUndefined(n),i.true(n.startsWith("s3://"))}static athenaRowsToObject(e){const t=e[0].Data.map((e=>e.VarCharValue));return e.slice(1).map((e=>{const n={};for(let i=0;i<e.Data.length;i++)n[t[i]]=e.Data[i].VarCharValue;return n}))}async fetchQueryIds(){const e={NextToken:null};let t=[],n=null;do{n=await this.athena.send(new p(e)),t=t.concat(n.NamedQueryIds),e.NextToken=n.NextToken}while(e.NextToken);return t}async listQueries(){const e=[],t=await this.fetchQueryIds();a.debug("Finding %d items",t.length);for(let n=0;n<t.length;n++){const i={NamedQueryId:t[n]},s=await this.athena.send(new y(i));e.push(s.NamedQuery)}return e}async findQueryByName(e){return(await this.listQueries()).find((t=>t.Name.toLowerCase()==e.toLowerCase()))}async runQueryToObjects(e,t={},n=2e3){a.info("Running query to objects");const i=await this.runQueryToOutputLocation(e,t,n);a.info("Query succeeded, processing file from %s",i);const s={Bucket:i.substring(5,i.indexOf("/",5)),Key:i.substring(i.indexOf("/",5)+1)},o=await this.s3.send(new O(s)),r=await o.Body.transformToString();return await h.stringParse(r,(e=>e),{columns:!0,skip_empty_lines:!0})}async runQueryToFile(t,n={},i=null,s=2e3){a.info("Running query to file");const o=await this.runQueryToOutputLocation(t,n,s);a.info("Query succeeded, pulling file from %s",o);const c={Bucket:o.substring(5,o.indexOf("/",5)),Key:o.substring(o.indexOf("/",5)+1)},l=i||E.fileSync({postfix:".csv",keep:!1}).name,u=e.createWriteStream(l),h=(await this.s3.send(new O(c))).Body;h.pipe(u);const d=await r.resolveOnEvent(h,["finish","close"],["error"],l);return a.silly("Response: %s",d),l}async runQueryToOutputLocation(e,t={},n=2e3){let i=null;const o=new c,l=s.simpleTemplateFill(e,t,!0,"{","}");try{a.info("Starting query : %s",l);const e=s.createType4Guid(),t={QueryString:l,ResultConfiguration:{OutputLocation:this.outputLocation,EncryptionConfiguration:{EncryptionOption:"SSE_S3"}},ClientRequestToken:e,QueryExecutionContext:{Database:"default"}},c={QueryExecutionId:(await this.athena.send(new S(t))).QueryExecutionId},u=["FAILED","CANCELLED","SUCCEEDED"];let h=await this.athena.send(new T(c));for(;-1===u.indexOf(h.QueryExecution.Status.State);)await r.createTimeoutPromise("wait",n),a.debug("%s : %s : %s",h.QueryExecution.Status.State,o.dump(),l),h=await this.athena.send(new T(c));"FAILED"===h.QueryExecution.Status.State?a.warn("Query failed : %s",h.QueryExecution.Status.StateChangeReason):"SUCCEEDED"===h.QueryExecution.Status.State&&(i=h.QueryExecution.ResultConfiguration.OutputLocation)}catch(e){a.warn("Failure : %s",e,e)}return a.info("Query took %s : %s",o.dump(),l),i}}class P{constructor(){}static buildInformation(){return{version:"415",hash:"43b8898fca427e6952399e7dfd5c58cf8e8177c0",branch:"alpha-2024-07-12-1",tag:"alpha-2024-07-12-1",timeBuiltISO:"2024-07-12T16:42:20-0700",notes:"No notes"}}}class D{constructor(){}static async importJsonLFileToTable(t,n,o){i.notNullOrUndefined(t,"dynamo"),i.notNullOrUndefined(n,"tableName"),i.notNullOrUndefined(o,"filename");const r=e.createReadStream(o),c=w.createInterface({input:r,crlfDelay:1/0});let l=0;for await(const e of c)if(l%100==0&&a.info("Importing line %d",l),s.trimToNull(e)){const i=JSON.parse(e);await t.simplePut(n,i),l++}return l}static async exportScanToJsonLFile(t,n,s){i.notNullOrUndefined(t,"dynamo"),i.notNullOrUndefined(n,"scan"),i.notNullOrUndefined(s,"filename");const o=e.createWriteStream(s);o.on("end",(()=>{a.debug("Write complete")}));const c=await D.exportScanToJsonLWriteStream(t,n,o);return await r.resolveOnEvent(o,["finish","close"],["error"]),o.close(),c}static async exportQueryToJsonLFile(t,n,s){i.notNullOrUndefined(t,"dynamo"),i.notNullOrUndefined(n,"qry"),i.notNullOrUndefined(s,"filename");const o=e.createWriteStream(s);o.on("end",(()=>{a.debug("Write complete")}));const c=await D.exportQueryToJsonLWriteStream(t,n,o);return await r.resolveOnEvent(o,["finish","close"],["error"]),o.close(),c}static async exportScanToJsonLWriteStream(e,t,n){i.notNullOrUndefined(e,"dynamo"),i.notNullOrUndefined(t,"scan"),i.notNullOrUndefined(n,"target");return await e.fullyExecuteProcessOverScan(t,(async e=>D.writeItemToJsonLStream(e,n,!1)))}static async exportQueryToJsonLWriteStream(e,t,n){i.notNullOrUndefined(e,"dynamo"),i.notNullOrUndefined(t,"qry"),i.notNullOrUndefined(n,"target");return await e.fullyExecuteProcessOverQuery(t,(async e=>D.writeItemToJsonLStream(e,n,!1)))}static writeItemToJsonLStream(e,t,n=!1){(e||n)&&t.write(JSON.stringify(e)+"\n")}}class x{srcDir;bucketName;config;s3=new N({region:"us-east-1"});constructor(t,n,i){this.srcDir=t,this.bucketName=n,this.config=JSON.parse(e.readFileSync(i).toString("ascii"))}static createFromArgs(e){if(e&&3===e.length){const t=e[0],n=e[1],i=e[2];return new x(t,n,i)}return console.log("Usage : node ratchet-site-uploader {srcDir} {bucket} {configFile} (Found "+e+" arguments, need 3)"),null}static async runFromCliArgs(e){return x.createFromArgs(e).runPump()}findMatch(e,t,n){let i=null;return null!=e&&null!=t&&null!=n&&null!=n.mapping&&n.mapping.forEach((n=>{null==i&&(null==n.prefixMatch||e.match(n.prefixMatch))&&(null==n.fileMatch||t.match(n.fileMatch))&&(i=n)})),i}findMime(e,t){let n=null;return null!=t&&null!=t.customMimeTypeMapping&&Object.keys(t.customMimeTypeMapping).forEach((i=>{null==n&&e.endsWith(i)&&(n=t.customMimeTypeMapping[i])})),null==n&&(n=b.lookup(e)),null==n&&(n="binary/octet-stream"),n}runPump(){return new Promise(((t,i)=>{a.info("Uploading contents of %s to %s using %j as config",this.srcDir,this.bucketName,this.config);const s=F.walk(this.srcDir,{});s.on("file",function(t,i,s){a.info("Processing %j",i.name);const o=t==this.srcDir?"":t.substring(this.srcDir.length+1)+"/",r=this.findMatch(o,i.name,this.config),c=o+i.name;a.info("Uploading file : %s/%s to key %s with %j",t,i.name,c,r);const l=r&&r.putParams?JSON.parse(JSON.stringify(r.putParams)):{};l.Bucket=this.bucketName,l.Key=c,l.Body=e.readFileSync(n.join(t,i.name)),l.ContentType||(l.ContentType=this.findMime(i.name,this.config));const u=new M({client:this.s3,params:l,tags:[],queueSize:4,partSize:5242880,leavePartsOnError:!1});u.on("httpUploadProgress",(e=>{a.debug("Uploading : %s",e)})),u.done().then((e=>{a.info("Finished upload of %s: %j",c,e),s()})).catch((e=>{a.warn("%s failed to upload : %s : Continuing",c,e),s()}))}.bind(this)),s.on("errors",(function(e,t,n){n()})),s.on("end",(function(){a.info("All done"),t(!0)}))}))}}class B{ec2Ratchet;constructor(e){this.ec2Ratchet=e}async startInstanceAndUploadPublicKeyFile(t,n,s="ec2-user"){i.notNullUndefinedOrOnlyWhitespaceString(t,"instanceId"),i.notNullUndefinedOrOnlyWhitespaceString(n,"filePath"),i.true(e.existsSync(n),"File does not exist"),a.info("Starting instance %s and uploading contents of public key file %s",t,n);const o=e.readFileSync(n).toString();return this.startInstanceAndUploadPublicKey(t,o,s)}async startInstanceAndUploadPublicKey(e,t,n="ec2-user"){a.info("Starting instance %s, public key length %d, user %s",e,t.length,n);let i=await this.ec2Ratchet.describeInstance(e);if(i){let s=!1;if(16==i.State.Code?(a.info("Instance is already running..."),s=!0):(a.info("Instance is not running... starting up : %s",e),s=await this.ec2Ratchet.launchInstance(e,3e4)),s){a.info("Uploading public key...");const s=await this.ec2Ratchet.sendPublicKeyToEc2Instance(e,t,n);a.info("Key response : %j",s),i=i&&i.PublicIpAddress?i:await this.ec2Ratchet.describeInstance(e),a.info("Instance IP address is %s",i.PublicIpAddress)}else a.info("Instance could not start - check logs")}else a.info("No such instance found - check your AWS keys? : %s",e);return i}}class v{instanceId;publicKeyFile;instanceOsUser;region;availabilityZone;ec2Ratchet;instanceUtil;constructor(e,t=n.join(L.homedir(),".ssh","id_rsa.pub"),i="ec2-user",s="us-east-1",a="us-east-1a"){this.instanceId=e,this.publicKeyFile=t,this.instanceOsUser=i,this.region=s,this.availabilityZone=a,this.ec2Ratchet=new g(this.region,this.availabilityZone),this.instanceUtil=new B(this.ec2Ratchet)}static createFromArgs(e){if(1===e?.length||2===e?.length){const t=e[0];return new v(t)}return a.info("Usage : ratchet-start-instance-and-ssh {instanceId} {publicKeyFile} (Found %s arguments, need 1 or 2)",e),null}static async runFromCliArgs(e){return v.createFromArgs(e).run()}async run(){let e=await this.instanceUtil.startInstanceAndUploadPublicKeyFile(this.instanceId,this.publicKeyFile,this.instanceOsUser);if(e){a.info("Instance IP address is %s",e.PublicIpAddress);const t=C("ssh",[this.instanceOsUser+"@"+e.PublicIpAddress],{stdio:"inherit"});a.info("%j",t)}else a.info("No such instance found - check your AWS keys? : %s",this.instanceId)}}class Q extends d{fetchHandlerMap(){return{"site-uploader":x.runFromCliArgs,"start-instance-and-ssh":v.runFromCliArgs}}fetchVersionInfo(){return P.buildInformation()}}class z{canProcess(e){return!0}async processEmail(e){const t=[];try{i.notNullOrUndefined(e,"msg"),a.info("Processing Broadsign reach inbound inventory email");const n=e.attachments[0].content;a.info("Unzipping attachment");const s=new f(n);let o=null;const r=s.pipe(_.Parse()).on("entry",(async e=>{e.path.toLowerCase().endsWith("csv")?o=await e.buffer():(a.info("Pass: %s",e.path),e.autodrain())})).promise();await r;const c=await h.stringParse(o.toString(),(e=>e),{columns:!1,skip_empty_lines:!0});if(c.length>1){const e="drop table if exists sample";let n="create table sample (pump_date varchar(255),";const i=c[0];let s="insert into sample (pump_date,",o="?,";for(let e=0;e<i.length;e++){e>0&&(n+=", ",s+=", ",o+=", ");const t=i[e].toLowerCase().split(" ").join("_");s+=t,o+="?",n+=t+" varchar(255)","id"===t?n+=" primary key":"device_id"===t&&(n+=" unique")}n+=")",s+=") values ",a.info("Recreating table");t.push({statement:e}),t.push({statement:n});const r=A.utc().toISO();let l=s,u=[];for(let e=1;e<c.length;e++)l>s&&(l+=","),l+="("+o+")",u=u.concat(r,c[e]),e%25!=0&&e!==c.length-1||(t.push({statement:l,params:u}),l=s,u=[],a.info("Inserted %d of %d rows",e,c.length));a.info("Finished insertion of %d rows",c.length)}}catch(e){a.error("Failure: %s : %j",e,t,e)}return t}}class j{cache;processors;constructor(e,t){this.cache=e,this.processors=t,i.notNullOrUndefined(this.cache,"cache"),i.notNullOrUndefined(this.cache.getDefaultBucket(),"cache.defaultBucket")}async processEmailFromS3(e){if(await this.cache.fileExists(e)){const t=await this.cache.fetchCacheFileAsString(e);return this.processEmailFromBuffer(new Buffer(t))}return a.warn("Cannot process inbound email - no such key : %s",e),!1}async processEmailFromBuffer(e){i.notNullOrUndefined(e,"buf"),a.info("Processing inbound email - size %d bytes",e.length);const t=await I(e);a.info('Found mail from "%s" subject "%s" with %d attachments',t?.from?.text,t?.subject,t?.attachments?.length);let n=!1;for(let e=0;e<this.processors.length&&!n;e++)if(this.processors[e].canProcess(t)){a.info("Processing message with processor %d",e);const i=await this.processors[e].processEmail(t);a.info("Result was : %j",i),n=!0}return n}}class W{canProcess(e){return!0}async processEmail(e){return e.body}}class K{s3;tmpFolder;cacheTimeoutSeconds;static DEFAULT_CACHE_TIMEOUT_SEC=604800;currentlyLoading=new Map;constructor(t,n,a=K.DEFAULT_CACHE_TIMEOUT_SEC){this.s3=t,this.tmpFolder=n,this.cacheTimeoutSeconds=a,i.notNullOrUndefined(t,"s3"),i.notNullOrUndefined(s.trimToNull(n)),i.true(e.existsSync(n),"folder must exist : "+n)}async getFileString(e){const t=await this.getFileBuffer(e);return t?t.toString():null}keyToLocalCachePath(e){const t=this.generateCacheHash(this.s3.getDefaultBucket()+"/"+e);return n.join(this.tmpFolder,t)}removeCacheFileForKey(t){const n=this.keyToLocalCachePath(t);a.info("Removing cache file for %s : %s",t,n),e.existsSync(n)?e.unlinkSync(n):a.debug("Skipping delete for %s - does not exist",n)}async getFileBuffer(e){const t=this.keyToLocalCachePath(e);let n=null;if(n=this.getCacheFileAsBuffer(t),n)a.info("Found cache file for s3://%s/%s. Local path %s",this.s3.getDefaultBucket(),e,t);else{a.info("No cache. Downloading File s3://%s/%s to %s",this.s3.getDefaultBucket(),e,t);try{let i=this.currentlyLoading.get(e);i?a.info("Already running - wait for that"):(a.info("Not running - start"),i=this.updateLocalCacheFile(e,t),this.currentlyLoading.set(e,i)),n=await i,this.currentlyLoading.delete(e)}catch(t){a.warn("File %s/%s does not exist. Err code: %s",this.s3.getDefaultBucket(),e,t)}}return n}async updateLocalCacheFile(t,n){const i=await this.s3.fetchCacheFileAsBuffer(t);return i&&i.length>0&&(a.info("Saving %d bytes to disk for cache",i.length),e.writeFileSync(n,i)),i}getCacheFileAsString(e){const t=this.getCacheFileAsBuffer(e);return t?t.toString():null}getCacheFileAsBuffer(t){if(!e.existsSync(t))return null;try{const n=e.statSync(t),i=(new Date).getTime();if((i-n.ctimeMs)/1e3>=this.cacheTimeoutSeconds)return null;return e.readFileSync(t)}catch(e){a.warn("Error getting s3 cache file %s",e)}return null}generateCacheHash(e){return k.createHash("md5").update(e).digest("hex")}}var q,H,J;!function(e){e.Lazy="Lazy",e.OnStartup="OnStartup"}(q||(q={})),function(e){e.TreatSameSizeAsNoChange="TreatSameSizeAsNoChange"}(H||(H={})),function(e){e.EveryUpload="EveryUpload",e.Disabled="Disabled"}(J||(J={}));class V{config;_localFileName;_remoteStatus;_lastSyncEpochMS;_loadingRemoteSignal;constructor(e){if(this.config=e,i.notNullOrUndefined(e,"config"),i.notNullUndefinedOrOnlyWhitespaceString(e.s3Path,"s3Path"),i.notNullOrUndefined(e.s3CacheRatchetLike,"s3CacheRatchetLike"),i.notNullUndefinedOrOnlyWhitespaceString(e.s3CacheRatchetLike.getDefaultBucket(),"s3CacheRatchetLike.getDefaultBucket()"),e.forceLocalFileFullPath)this._localFileName=e.forceLocalFileFullPath;else{const t=e.s3Path.includes(".")?e.s3Path.substring(e.s3Path.lastIndexOf(".")+1):void 0;this._localFileName=e.forceLocalFileFullPath??E.fileSync({postfix:t,keep:e.leaveTempFileOnDisk}).name}a.info("Using local file %s for remote path %s %s",this._localFileName,this.config.s3CacheRatchetLike.getDefaultBucket(),this.config.s3Path),this.initialize().then((()=>{a.info("Initialized")}))}get remoteStatusData(){return!this._remoteStatus||this.config.remoteStatusTtlMs&&Date.now()-this._remoteStatus.updatedEpochMs>this.config.remoteStatusTtlMs?(async()=>{const e=await this.fetchRemoteMeta();return this._remoteStatus={updatedEpochMs:Date.now(),remoteSizeInBytes:e.ContentLength,remoteLastUpdatedEpochMs:e.LastModified.getTime(),remoteHash:e.ETag},this._remoteStatus})():Promise.resolve(this._remoteStatus)}async initialize(){a.info("Using local path %s to sync %s / %s",this._localFileName,this.config.s3CacheRatchetLike.getDefaultBucket(),this.config.s3Path),this.config.initMode===q.OnStartup&&(a.info("Initial loading"),this.fetchRemoteToLocal().then((()=>{a.info("Finished initial load")})))}async localAndRemoteAreSameSize(){let e=!1;const t=this.localFileBytes;if(null!==t){const n=(await this.remoteStatusData).remoteSizeInBytes;e=t===n,a.info("Local size is %s, remote is %s, same is %s",t,n,e)}return e}directWriteValueToLocalFile(t){i.notNullOrUndefined(t,"value"),e.writeFileSync(this._localFileName,t)}get lastSyncEpochMS(){return this._lastSyncEpochMS}get localFileName(){return this._localFileName}get localFileStats(){let t=null;if(e.existsSync(this._localFileName)){t=e.statSync(this._localFileName)}return t}get localFileBytes(){const e=this.localFileStats;return e?e.size:null}get localFileUpdatedEpochMS(){const e=this.localFileStats;return e?e.mtime.getTime():null}async fetchRemoteMeta(){return this.config.s3CacheRatchetLike.fetchMetaForCacheFile(this.config.s3Path)}get wouldFetch(){return(async()=>!this.hasFetchOptimization(H.TreatSameSizeAsNoChange)||!await this.localAndRemoteAreSameSize())()}get wouldPush(){return(async()=>!this.hasPushOptimization(H.TreatSameSizeAsNoChange)||!await this.localAndRemoteAreSameSize())()}async sendLocalToRemote(){const t=new c;a.info("Sending local file to remote");let n=null;if(await this.wouldPush)try{if(this.config.backupMode===J.EveryUpload){a.info("EveryUpload mode set - backing up");const e=await this.backupRemote();a.info("Backup result : %s",e)}const t=await this.config.s3CacheRatchetLike.writeStreamToCacheFile(this.config.s3Path,e.readFileSync(this._localFileName));a.silly("SendLocalToRemote: %j",t),this._remoteStatus=null,n=l.Updated}catch(e){a.error("Failed to transfer %s : %s",this._localFileName,e,e),n=l.Error}else a.info("TreatSameSizeAsNoChange set and files are same size - skipping"),n=l.Skipped;return a.info("Sent %d bytes to remote in %s",this.localFileBytes,t.dump()),n}async backupRemote(){let e=null;try{const t=this.config.s3Path.lastIndexOf("/"),n="/backup/"+A.now().toFormat("yyyy/MM/dd/HH/mm/ss")+"/",i=t>-1?this.config.s3Path.substring(0,t)+n+this.config.s3Path.substring(t+1):n+this.config.s3Path;a.info("Backing up path %s to %s",this.config.s3Path,i),await this.config.s3CacheRatchetLike.copyFile(this.config.s3Path,i),e=u.Success}catch(t){a.error("Failed to backup %s : %s",this.config.s3Path,t,t),e=u.Error}return e}async fetchRemoteToLocal(){return a.info("Called fetchRemoteToLocal"),this.fetchRemoteToLocalIfNewerThan(0)}async fetchRemoteToLocalIfNewerThan(e){return this._loadingRemoteSignal||(this._loadingRemoteSignal=this.innerFetchRemoteToLocalIfNewerThan(e)),this._loadingRemoteSignal}hasFetchOptimization(e){const t=!!e&&(this?.config?.fetchOptimizations||[]).includes(e);return a.info("hasFetchOptimization %s returning %s",e,t),t}hasPushOptimization(e){const t=!!e&&(this?.config?.pushOptimizations||[]).includes(e);return a.info("hasPushOptimization %s returning %s",e,t),t}async innerFetchRemoteToLocalIfNewerThan(t){try{const n=new c;if(await this.wouldFetch){const i={Bucket:this.config.s3CacheRatchetLike.getDefaultBucket(),Key:this.config.s3Path,IfModifiedSince:new Date(t)},s=await this.config.s3CacheRatchetLike.fetchCacheFilePassThru(i),o=e.createWriteStream(this._localFileName);s.Body.pipe(o),a.info("Waiting for pipe completion"),await r.resolveOnEvent(o,["close","finish"],["error"]),a.info("Pipe completed"),this._lastSyncEpochMS=Date.now(),this._remoteStatus={updatedEpochMs:Date.now(),remoteSizeInBytes:s.ContentLength,remoteLastUpdatedEpochMs:s.LastModified.getTime(),remoteHash:s.ETag},a.info("Fetched remote to local, %d bytes in %s",s.ContentLength,n.dump())}else a.info("TreatSameSizeAsNoChange not enabled OR files are same size - skipping"),this._lastSyncEpochMS=Date.now();return l.Updated}catch(e){return a.error("Failed to fetch %s / %s : %s",this.config.s3CacheRatchetLike.getDefaultBucket(),this.config.s3Path,e,e),l.Error}}}export{U as AlbAthenaLogRatchet,R as AthenaRatchet,D as DynamoExporter,B as Ec2InstanceUtil,z as EmailToDbInsertProcessor,j as InboundEmailRatchet,P as RatchetAwsNodeOnlyInfo,Q as RatchetCliHandler,K as S3CacheToLocalDiskRatchet,V as S3SyncedFile,q as S3SyncedFileConfigInitMode,H as S3SyncedFileOptimization,J as S3SyncedFileRemoteBackupMode,W as SampleEmailProcessor,x as SiteUploader,v as StartInstanceAndSsh};
|
|
2
2
|
//# sourceMappingURL=index.mjs.map
|
package/lib/types.d.ts
CHANGED
|
@@ -14,7 +14,6 @@ declare class AthenaRatchet {
|
|
|
14
14
|
private outputLocation;
|
|
15
15
|
constructor(athena: AthenaClient, s3: S3Client, outputLocation: string);
|
|
16
16
|
static athenaRowsToObject<T>(input: Row[]): T[];
|
|
17
|
-
static applyParamsToQuery<T>(query: string, queryParams: T): string;
|
|
18
17
|
fetchQueryIds(): Promise<string[]>;
|
|
19
18
|
listQueries(): Promise<NamedQuery[]>;
|
|
20
19
|
findQueryByName(name: string): Promise<NamedQuery>;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@bitblit/ratchet-aws-node-only",
|
|
3
|
-
"version": "4.0.
|
|
3
|
+
"version": "4.0.415-alpha",
|
|
4
4
|
"description": "Common tools for use with AWS (Node only)",
|
|
5
5
|
"note-on-side-effects": "Technically the entries in 'bin' below might be side effects, but they are called explicitly",
|
|
6
6
|
"sideEffects": false,
|
|
@@ -60,8 +60,8 @@
|
|
|
60
60
|
"@aws-sdk/client-athena": "3.600.0",
|
|
61
61
|
"@aws-sdk/client-sts": "3.600.0",
|
|
62
62
|
"@aws-sdk/types": "3.598.0",
|
|
63
|
-
"@bitblit/ratchet-aws": "4.0.
|
|
64
|
-
"@bitblit/ratchet-common": "4.0.
|
|
63
|
+
"@bitblit/ratchet-aws": "4.0.415-alpha",
|
|
64
|
+
"@bitblit/ratchet-common": "4.0.415-alpha",
|
|
65
65
|
"@smithy/abort-controller": "3.1.0",
|
|
66
66
|
"@smithy/smithy-client": "3.1.4",
|
|
67
67
|
"@smithy/util-waiter": "3.1.0",
|