@coalescesoftware/coa 7.33.0-beta.1 → 7.33.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/coa.js CHANGED
@@ -10283,7 +10283,7 @@ CREATE TABLE {{ nodeMetadata.oldLocation }}[{{ nodeMetadata.oldName }}${this.dep
10283
10283
  `})}),t+=`${o}
10284
10284
  `}}),t&&(t=`Detected differences for the following columns:
10285
10285
 
10286
- ${t}`),t};var cc=Ye("PRESYNC"),BQe=class{constructor(t,n,i,o){this.platformKind=t;this.runTimeParameters=n;this.baseLogContext=i;this.renderer=o;this.perf=Q0("[Presync]",i)}nodesToSync={added:new Map,edited:new Map};qualifiedNameLookup=new Map;presyncDetails={};presyncedCurrentState={};summary=new Tcr;totalNodesCount=0;setupComplete=!1;_currentActualState=null;_updatedActualState=null;_currentWsData=null;_updatedWsData=null;perf;nodeLogContext(t,n,i){let o=n.operation.name;if(i){let s=i.operation.name;return{...this.baseLogContext,nodeID:t,currentNodeName:o,updatedNodeName:s}}else return{...this.baseLogContext,nodeID:t,nodeName:o}}getNode(t,n){return z1(t,n,"[Presync.getNode()]")}get currentActualState(){if(!this._currentActualState)throw new Xe("currentActualState has not been set. Call setup before accessing this property");return this._currentActualState}get updatedActualState(){if(!this._updatedActualState)throw new Xe("updatedActualState has not been set. Call setup before accessing this property");return this._updatedActualState}get updatedWsData(){if(!this._updatedWsData)throw new Xe("updatedWsData has not been set. Call setup before accessing this property");return this._updatedWsData}updateNodesCollection(t,n,i){let{node:o,syncDetails:s}=n;if(!o&&!s)throw new Xe("nodeData and syncDetails cannot both be null");o&&(this.presyncedCurrentState[t]=o),s&&(this.presyncDetails[t]=s,this.summary.add(i,t,s)),cc.debugContext(this.baseLogContext,`updateNodesCollection: ${t} added to: ${o?"--updatedNodesCollection ":""}${s?"--presyncDetails":""}`)}async setup(t,n,i){if(this.setupComplete)throw new Xe("Presync has already been setup");let o=this.baseLogContext,{measure:s}=this.perf;cc.appContext(o,"Initializing Presync setup");let{nodesFromCurrent:a,nodesFromUpdated:u}=await s("setup > setNodesToBeSynced",()=>this.setNodesToBeSynced(n.steps,i.steps));cc.infoContext(o,"current nodes:",qT(a),"updated nodes:",qT(u));let l=await s("setup > getImplementedLocations",()=>N_e(a,o)),c=N_e(u,o);cc.infoContext(o,"currentImplementedLocs:",l,"updatedImplementedLocs:",c);let{validLocations:d,affectedNodes:p,validNodes:f}=await s("setup > filterMissingLocations",()=>this.filterMissingLocations(t,l,a,o));p.size>0&&(cc.warnContext(o,"[setup] Missing current state locations detected on platform - nodes referencing these locations will be removed from current state"),this.summary.addNodesMissingLocations(p)),this.updateNodesToSyncForMissingLocations(p);let{currentLookup:h,updatedLookup:g}=await s("setup > getObjectAndColumnLookups",()=>t.getObjectAndColumnLookups(d,c,o)),{currentActualState:b,updatedActualState:T}=await s("setup > createState",()=>({currentActualState:t.createState(f,d,h.columnLookup,h.objectLookup),updatedActualState:t.createState(u,c,g.columnLookup,g.objectLookup)}));this._currentActualState=b,this._updatedActualState=T,this._updatedWsData=i,this.setupComplete=!0,cc.appContext(this.baseLogContext,"Presync setup complete")}async filterMissingLocations(t,n,i,o){let s=this.getImplementedDbSchemaMap(n),a=await this.auditLocations(t,s,o),{validNodes:u,affectedNodes:l}=this.auditNodesByLocation(i,a);return{validLocations:a,affectedNodes:l,validNodes:u}}updateNodesToSyncForMissingLocations(t){let n=[];for(let[i,{name:o,locationName:s}]of t)if(this.nodesToSync.edited.has(i)){let a=this.nodesToSync.edited.get(i);if(!a)throw new Xe(`(${i}) edited node should exist in nodesToSync.edited but is undefined`);this.nodesToSync.added.set(i,a.updated),this.nodesToSync.edited.delete(i);let{name:u,locationName:l}=a.updated.operation;n.push(`current: ${o} (${s}) -> updated: ${u} (${l}) (ID: ${i})`)}cc.appContext(this.baseLogContext,"[updateNodesToSyncForMissingLocations] The following nodes were migrated from edited to added due to their current location no longer being valid.",n.join(" | "))}setNodesToBeSynced=(t,n)=>{let i=this.baseLogContext,{deletedEntityIDs:o,addedEntityIDs:s,entitiesWithPossibleEdits:a}=p_e(n,t);cc.infoContext(i,"deletedEntityIDs:",o,"addedEntityIDs:",s,"entitiesWithPossibleEdits:",a),this.totalNodesCount=o.length+s.length+a.length;let u={},l={};for(let c of s){let d=this.getNode(c,n);lTo(d,this.nodeLogContext(c,d))&&(this.nodesToSync.added.set(c,d),l[c]=d)}for(let c of a){let d=this.getNode(c,t),p=this.getNode(c,n);lTo(p,this.nodeLogContext(c,d))&&lTo(d,this.nodeLogContext(c,p))?(this.nodesToSync.edited.set(c,{updated:p,current:d}),u[c]=d,l[c]=p):this.presyncedCurrentState[c]=d}for(let c of o)this.presyncedCurrentState[c]=this.getNode(c,t);return this.qualifiedNameLookup=new Map(Object.entries(t).filter(([c,d])=>Fp(d,this.nodeLogContext(c,d))).map(([c,d])=>{let{database:p,name:f,schema:h}=d.operation;return[[p,h,f].join("."),c]})),{nodesFromCurrent:u,nodesFromUpdated:l}};async syncNodes(t){if(!this.setupComplete)throw new Xe("Presync setup must be called before running presync");let{measure:n}=this.perf,i=performance.now(),{added:o,edited:s}=this.nodesToSync,a=[];cc.appContext(this.baseLogContext,`Presync ${o.size} Added Nodes`);for(let[u,l]of o){let c=this.nodeLogContext(u,l),{operation:{database:d,name:p,schema:f}}=l;cc.debugContext(c,"Presync added node:",u,"nodeName:",p),cc.debugContext(c,"Node data:",l);let h=this.updatedActualState.materializationTypeOnWarehouse(u);if(!h){cc.debugContext(c,"Added node does not exist at destination, no conflict.");continue}let g=[d,f,p].join("."),b=this.qualifiedNameLookup.get(g);if(b){cc.infoContext(c,`Node with shared object ('${g}') & different id ('${b}') already exists at destination; skipping`);continue}cc.debugContext(c,"Added node already exists at destination, syncing..."),a.push(async()=>{let T=await this.addedNodePresyncUpdate(u,l,h);this.updateNodesCollection(u,T,"added")})}cc.appContext(this.baseLogContext,`Presync ${s.size} Edited Nodes`);for(let u of s){let[l,{current:c,updated:d}]=u,p=this.nodeLogContext(l,c,d);cc.debugContext(p,"Syncing edited node:",l),cc.debugContext(p,"Current node data:",c,"Updated node data:",d),a.push(async()=>{let f=await this.editedNodePresyncUpdate(l,c,d);this.updateNodesCollection(l,f,"edited")})}return await n("syncNodes > columnSync",()=>One(a,12)),this.summary.log(this.baseLogContext),cc.infoContext(this.baseLogContext,`Presync handled ${this.totalNodesCount} nodes in ${performance.now()-i}ms`),t&&await n("syncNodes > detectPlatformBlockers",()=>this.detectPlatformBlockers(t)),this.perf.printSummary(),{nodes:this.presyncedCurrentState,details:this.presyncDetails}}addedNodePresyncUpdate=async(t,n,i)=>{let o=this.nodeLogContext(t,n),s=el(n),a=A3(s,o),u=w_e(a,this.platformKind),{materializationType:l}=s.operation;if(l!==i)return cc.infoContext(o,"data warehouse object has mismatched type. expected:",l,"actual:",i),vcr(s,i,u,a,this.platformKind,o);{let c=await this.syncColumnsWithWarehouse(t,s,this.updatedActualState);if(c){cc.debugContext(o,"data warehouse object has mismatched/unexpected columns and/or values.");let{syncedColumns:d,columnDiffs:p}=c;return cAc(s,d,p,this.platformKind,o)}else return cc.debugContext(o,"data warehouse object has all expected columns and values. metadata update."),cTo(s,this.platformKind,o)}};getLocationInformation(t,n,i){let o=w_e(A3(t,i),this.platformKind),s=w_e(A3(n,i),this.platformKind),a=this.getNodeComparisonName(t.operation.name),u=this.getNodeComparisonName(n.operation.name),l=!(o===s&&a===u);return cc.debugContext(i,"locationFromExpected:",o,"locationFromStaged:",s,"expectedName:",a,"stagedName:",u),{locationFromExpected:o,locationFromStaged:s,locationChanged:l}}editedNodePresyncUpdate=async(t,n,i)=>{let o=this.nodeLogContext(t,n,i),s=this.currentActualState.materializationTypeOnWarehouse(t),a=n.operation.materializationType,u=a!==s,l=el(i),{locationFromExpected:c,locationFromStaged:d,locationChanged:p}=this.getLocationInformation(n,l,o);return p?await this.getUpdateForEditedNodeWithLocationChange(t,n,l,a,c,d,s,u):s?u?(cc.infoContext(o,`Has no location difference and exists at current as unexpected type ${s}.`),vcr(n,s,c,null,this.platformKind,o)):(cc.debugContext(o,"Has no location difference and exists at current without type mismatch (expected)."),await this.getUpdateForEditedWithoutLocationChange(t,n,l)):(cc.infoContext(o,"Has no location difference and does not exist."),aAc(t,c))};async getUpdateForEditedNodeWithLocationChange(t,n,i,o,s,a,u,l){let c=this.nodeLogContext(t,n,i),d=this.updatedActualState.materializationTypeOnWarehouse(t),p=o!==d;if(d)if(p){cc.infoContext(c,`Has a location difference and unexpectedly already exists at updated location as unexpected type ${d}`);let f=A3(i,c);return vcr(i,d,a,f,this.platformKind,c)}else{let f=await this.syncColumnsWithWarehouse(t,i,this.updatedActualState);if(f){let{syncedColumns:h,columnDiffs:g}=f;return cc.infoContext(c,"Has location difference and unexpectedly exists at updated location with unexpected columns and/or col properties/values."),cAc(i,h,g,this.platformKind,c)}else return cc.infoContext(c,"Node unexpectedly existed at updated location but with all expected columns and values."),cTo(i,this.platformKind,c)}else if(cc.debugContext(c,"Has a location difference and does not exist at the updated location (expected)."),u){if(l)return cc.infoContext(c,`Has a location difference and exists at current as unexpected type ${u}.`),vcr(n,u,s,null,this.platformKind,c);{let f=await this.syncColumnsWithWarehouse(t,n,this.currentActualState);return f?(cc.infoContext(c,"Has a location difference and exists at current without type mismatch but with uneexpected column differences."),uAc(n,f.syncedColumns,f.columnDiffs,this.platformKind,c)):(cc.debugContext(c,"Has a location difference and exists at current without type mismatch (expected)."),sAc(n))}}else return cc.infoContext(c,"Has a location difference and does not exist at the current location."),aAc(t,s)}getUpdateForEditedWithoutLocationChange=async(t,n,i)=>{let o=this.nodeLogContext(t,n,i),s=await this.syncColumnsWithWarehouse(t,n,this.currentActualState);if(s){cc.debugContext(o,"Node has all expected columns and values, but location has changed. Checking.");let a=await this.syncColumnsWithWarehouse(t,i,this.updatedActualState);return a?(cc.debugContext(o,"Node exists as expected, but with column differences detected between staged and actual state."),uAc(n,s.syncedColumns,a.columnDiffs,this.platformKind,o)):(cc.infoContext(o,"Node unexpectedly exists as expected without differences between staged and actual state."),cTo(i,this.platformKind,o))}else return cc.debugContext(o,"Node has all expected columns and values (expected)."),sAc(n)};getNodeComparisonName(t){return $h({snowflake:()=>t,fabric:()=>t,databricks:()=>t.toLowerCase(),bigquery:()=>t},this.platformKind,"getNodeComparisonName")}getColumnComparisonName(t){return $h({snowflake:()=>t,fabric:()=>t,databricks:()=>t.toLowerCase(),bigquery:()=>t.toLowerCase()},this.platformKind,"getColumnComparisonName")}async syncColumnsWithWarehouse(t,n,i){let o="[Presync.syncColumnsWithWarehouse()]";if(!this.setupComplete)throw new Xe("Presync setup must be called before running presync");let s=this.nodeLogContext(t,n),a=Object.fromEntries(n.operation.metadata.columns.map((f,h)=>[this.getColumnComparisonName(f.name),{columnData:f,index:h}])),u;try{u=Object.fromEntries(i.getDataWarehouseColumns(t))}catch(f){throw cc.infoContext(s,"column lookup: ",i.columnLookup),cc.errorContext(s,"Error trying to get warehouse columns",f instanceof Xe?f.toString():f),f}let l={},c=[],d=new Set;for(let[f,h]of Object.entries(u)){if(!a[f]){l[f]=tAc(f);continue}let{columnData:g,index:b}=Fic(f,a,`${o} originalColumns`);if(f in a){let T={...g};await this.renderDynamicColumnValues(T,n,t,s);let v=i.updateColumnWithWarehouseColumn(T,h,n.operation.materializationType),R=[];Object.entries(v).forEach(([w,x])=>{x===void 0&&R.push(w)}),R.length&&cc.alertContext(s,`Undefined values detected for column at index ${b}`,R),v.nullable=v.nullable.toString()==="true",T.nullable=T.nullable.toString()==="true",UA("PRESYNC",!l[f],`Column diff already exists for column ${f} in the col diff record. Existing diff: `,l[f]);let O=this.getColumnDifferences((0,lAc.diff)(T,v),f,s);O&&(l[f]=O),c[b]=v}}for(let[f,{index:h}]of Object.entries(a))f in u||(d.add(h),l[f]=rAc());let p=c.filter((f,h)=>!d.has(h));return Object.keys(l).length?{syncedColumns:p,columnDiffs:l,shouldCreate:!1}:null}async renderDynamicColumnValue(t,n,i,o,s){let a=Coc(this.updatedWsData.stepTypes),u=Zyo(this.updatedWsData.stepTypes,n.operation.sqlType),l=DS(this.updatedWsData.steps),c=hC(n.operation,i,this.updatedWsData.steps,a,this.updatedActualState.locations,null,this.runTimeParameters,l,s,this.platformKind,!0),d=lie(o,u,n.operation.name,this.platformKind),p=yQe(zSo(this.platformKind,this.updatedWsData.macros)),{compiledSQL:f}=await VSo(o.name,this.renderer,{...c,column:d},t,p,this.updatedWsData.installedPackages||{},this.runTimeParameters,void 0,u,{nodeID:i},!0);return f}async renderDynamicColumnValues(t,n,i,o){let s=["description","defaultValue"];for(let a of s){let u=t[a];if(u){if(typeof u!="string")throw new Xe(`Expected column field ${a} to be a string, but got ${typeof t[a]}`);try{let l=/({{|{%|{#)/g;u.match(l)&&(t[a]=await this.renderDynamicColumnValue(u,n,i,t,o))}catch(l){throw cc.errorContext(o,"Error trying to render dynamic column values",l instanceof Xe?l.toString():l),l}}}return t}getColumnDifferences(t,n,i){let o={};return!t||!Array.isArray(t)?null:(cc.debugContext(i,`Diff detected for column ${n}:
10286
+ ${t}`),t};var cc=Ye("PRESYNC"),BQe=class{constructor(t,n,i,o){this.platformKind=t;this.runTimeParameters=n;this.baseLogContext=i;this.renderer=o;this.perf=Q0("[Presync]",i)}nodesToSync={added:new Map,edited:new Map};qualifiedNameLookup=new Map;presyncDetails={};presyncedCurrentState={};summary=new Tcr;totalNodesCount=0;setupComplete=!1;_currentActualState=null;_updatedActualState=null;_currentWsData=null;_updatedWsData=null;perf;nodeLogContext(t,n,i){let o=n.operation.name;if(i){let s=i.operation.name;return{...this.baseLogContext,nodeID:t,currentNodeName:o,updatedNodeName:s}}else return{...this.baseLogContext,nodeID:t,nodeName:o}}getNode(t,n){return z1(t,n,"[Presync.getNode()]")}get currentActualState(){if(!this._currentActualState)throw new Xe("currentActualState has not been set. Call setup before accessing this property");return this._currentActualState}get updatedActualState(){if(!this._updatedActualState)throw new Xe("updatedActualState has not been set. Call setup before accessing this property");return this._updatedActualState}get updatedWsData(){if(!this._updatedWsData)throw new Xe("updatedWsData has not been set. Call setup before accessing this property");return this._updatedWsData}updateNodesCollection(t,n,i){let{node:o,syncDetails:s}=n;if(!o&&!s)throw new Xe("nodeData and syncDetails cannot both be null");o&&(this.presyncedCurrentState[t]=o),s&&(this.presyncDetails[t]=s,this.summary.add(i,t,s)),cc.debugContext(this.baseLogContext,`updateNodesCollection: ${t} added to: ${o?"--updatedNodesCollection ":""}${s?"--presyncDetails":""}`)}async setup(t,n,i){if(this.setupComplete)throw new Xe("Presync has already been setup");let o=this.baseLogContext,{measure:s}=this.perf;cc.appContext(o,"Initializing Presync setup");let{nodesFromCurrent:a,nodesFromUpdated:u}=await s("setup > setNodesToBeSynced",()=>this.setNodesToBeSynced(n.steps,i.steps));cc.infoContext(o,"current nodes:",qT(a),"updated nodes:",qT(u));let l=await s("setup > getImplementedLocations",()=>N_e(a,o)),c=N_e(u,o);cc.infoContext(o,"currentImplementedLocs:",l,"updatedImplementedLocs:",c);let{validLocations:d,affectedNodes:p,validNodes:f}=await s("setup > filterMissingLocations",()=>this.filterMissingLocations(t,l,a,o));p.size>0&&(cc.warnContext(o,"[setup] Missing current state locations detected on platform - nodes referencing these locations will be removed from current state"),this.summary.addNodesMissingLocations(p)),this.updateNodesToSyncForMissingLocations(p);let{currentLookup:h,updatedLookup:g}=await s("setup > getObjectAndColumnLookups",()=>t.getObjectAndColumnLookups(d,c,o)),{currentActualState:b,updatedActualState:T}=await s("setup > createState",()=>({currentActualState:t.createState(f,d,h.columnLookup,h.objectLookup),updatedActualState:t.createState(u,c,g.columnLookup,g.objectLookup)}));this._currentActualState=b,this._updatedActualState=T,this._updatedWsData=i,this.setupComplete=!0,cc.appContext(this.baseLogContext,"Presync setup complete")}async filterMissingLocations(t,n,i,o){let s=this.getImplementedDbSchemaMap(n),a=await this.auditLocations(t,s,o),{validNodes:u,affectedNodes:l}=this.auditNodesByLocation(i,a);return{validLocations:a,affectedNodes:l,validNodes:u}}updateNodesToSyncForMissingLocations(t){let n=[];for(let[i,{name:o,locationName:s}]of t)if(this.nodesToSync.edited.has(i)){let a=this.nodesToSync.edited.get(i);if(!a)throw new Xe(`(${i}) edited node should exist in nodesToSync.edited but is undefined`);this.nodesToSync.added.set(i,a.updated),this.nodesToSync.edited.delete(i);let{name:u,locationName:l}=a.updated.operation;n.push(`current: ${o} (${s}) -> updated: ${u} (${l}) (ID: ${i})`)}cc.appContext(this.baseLogContext,"[updateNodesToSyncForMissingLocations] The following nodes were migrated from edited to added due to their current location no longer being valid.",n.join(" | "))}setNodesToBeSynced=(t,n)=>{let i=this.baseLogContext,{deletedEntityIDs:o,addedEntityIDs:s,entitiesWithPossibleEdits:a}=p_e(n,t);cc.infoContext(i,"deletedEntityIDs:",o,"addedEntityIDs:",s,"entitiesWithPossibleEdits:",a),this.totalNodesCount=o.length+s.length+a.length;let u={},l={};for(let c of s){let d=this.getNode(c,n);lTo(d,this.nodeLogContext(c,d))&&(this.nodesToSync.added.set(c,d),l[c]=d)}for(let c of a){let d=this.getNode(c,t),p=this.getNode(c,n);lTo(p,this.nodeLogContext(c,d))&&lTo(d,this.nodeLogContext(c,p))?(this.nodesToSync.edited.set(c,{updated:p,current:d}),u[c]=d,l[c]=p):this.presyncedCurrentState[c]=d}for(let c of o)this.presyncedCurrentState[c]=this.getNode(c,t);return this.qualifiedNameLookup=new Map(Object.entries(t).filter(([c,d])=>Fp(d,this.nodeLogContext(c,d))).map(([c,d])=>{let{database:p,name:f,schema:h}=d.operation;return[[p,h,f].join("."),c]})),{nodesFromCurrent:u,nodesFromUpdated:l}};async syncNodes(t){if(!this.setupComplete)throw new Xe("Presync setup must be called before running presync");let{measure:n}=this.perf,i=performance.now(),{added:o,edited:s}=this.nodesToSync,a=[];cc.appContext(this.baseLogContext,`Presync ${o.size} Added Nodes`);for(let[u,l]of o){let c=this.nodeLogContext(u,l),{operation:{database:d,name:p,schema:f}}=l;cc.debugContext(c,"Presync added node:",u,"nodeName:",p),cc.debugContext(c,"Node data:",l);let h=this.updatedActualState.materializationTypeOnWarehouse(u);if(!h){cc.debugContext(c,"Added node does not exist at destination, no conflict.");continue}let g=[d,f,p].join("."),b=this.qualifiedNameLookup.get(g);if(b){cc.infoContext(c,`Node with shared object ('${g}') & different id ('${b}') already exists at destination; skipping`);continue}cc.debugContext(c,"Added node already exists at destination, syncing..."),a.push(async()=>{let T=await this.addedNodePresyncUpdate(u,l,h);this.updateNodesCollection(u,T,"added")})}cc.appContext(this.baseLogContext,`Presync ${s.size} Edited Nodes`);for(let u of s){let[l,{current:c,updated:d}]=u,p=this.nodeLogContext(l,c,d);cc.debugContext(p,"Syncing edited node:",l),cc.debugContext(p,"Current node data:",c,"Updated node data:",d),a.push(async()=>{let f=await this.editedNodePresyncUpdate(l,c,d);this.updateNodesCollection(l,f,"edited")})}return await n("syncNodes > columnSync",()=>One(a,12)),this.summary.log(this.baseLogContext),cc.infoContext(this.baseLogContext,`Presync handled ${this.totalNodesCount} nodes in ${performance.now()-i}ms`),t&&await n("syncNodes > detectPlatformBlockers",()=>this.detectPlatformBlockers(t)),this.perf.printSummary(),{nodes:this.presyncedCurrentState,details:this.presyncDetails}}addedNodePresyncUpdate=async(t,n,i)=>{let o=this.nodeLogContext(t,n),s=el(n),a=A3(s,o),u=w_e(a,this.platformKind),{materializationType:l}=s.operation;if(l!==i)return cc.infoContext(o,"data warehouse object has mismatched type. expected:",l,"actual:",i),vcr(s,i,u,a,this.platformKind,o);{let c=await this.syncColumnsWithWarehouse(t,s,this.updatedActualState);if(c){cc.debugContext(o,"data warehouse object has mismatched/unexpected columns and/or values.");let{syncedColumns:d,columnDiffs:p}=c;return cAc(s,d,p,this.platformKind,o)}else return cc.debugContext(o,"data warehouse object has all expected columns and values. metadata update."),cTo(s,this.platformKind,o)}};getLocationInformation(t,n,i){let o=w_e(A3(t,i),this.platformKind),s=w_e(A3(n,i),this.platformKind),a=this.getNodeComparisonName(t.operation.name),u=this.getNodeComparisonName(n.operation.name),l=!(o===s&&a===u);return cc.debugContext(i,"locationFromExpected:",o,"locationFromStaged:",s,"expectedName:",a,"stagedName:",u),{locationFromExpected:o,locationFromStaged:s,locationChanged:l}}editedNodePresyncUpdate=async(t,n,i)=>{let o=this.nodeLogContext(t,n,i),s=this.currentActualState.materializationTypeOnWarehouse(t),a=n.operation.materializationType,u=a!==s,l=el(i),{locationFromExpected:c,locationFromStaged:d,locationChanged:p}=this.getLocationInformation(n,l,o);return p?await this.getUpdateForEditedNodeWithLocationChange(t,n,l,a,c,d,s,u):s?u?(cc.infoContext(o,`Has no location difference and exists at current as unexpected type ${s}.`),vcr(n,s,c,null,this.platformKind,o)):(cc.debugContext(o,"Has no location difference and exists at current without type mismatch (expected)."),await this.getUpdateForEditedWithoutLocationChange(t,n,l)):(cc.infoContext(o,"Has no location difference and does not exist."),aAc(t,c))};async getUpdateForEditedNodeWithLocationChange(t,n,i,o,s,a,u,l){let c=this.nodeLogContext(t,n,i),d=this.updatedActualState.materializationTypeOnWarehouse(t),p=o!==d;if(d)if(p){cc.infoContext(c,`Has a location difference and unexpectedly already exists at updated location as unexpected type ${d}`);let f=A3(i,c);return vcr(i,d,a,f,this.platformKind,c)}else{let f=await this.syncColumnsWithWarehouse(t,i,this.updatedActualState);if(f){let{syncedColumns:h,columnDiffs:g}=f;return cc.infoContext(c,"Has location difference and unexpectedly exists at updated location with unexpected columns and/or col properties/values."),cAc(i,h,g,this.platformKind,c)}else return cc.infoContext(c,"Node unexpectedly existed at updated location but with all expected columns and values."),cTo(i,this.platformKind,c)}else if(cc.debugContext(c,"Has a location difference and does not exist at the updated location (expected)."),u){if(l)return cc.infoContext(c,`Has a location difference and exists at current as unexpected type ${u}.`),vcr(n,u,s,null,this.platformKind,c);{let f=await this.syncColumnsWithWarehouse(t,n,this.currentActualState);return f?(cc.infoContext(c,"Has a location difference and exists at current without type mismatch but with uneexpected column differences."),uAc(n,f.syncedColumns,f.columnDiffs,this.platformKind,c)):(cc.debugContext(c,"Has a location difference and exists at current without type mismatch (expected)."),sAc(n))}}else return cc.infoContext(c,"Has a location difference and does not exist at the current location."),aAc(t,s)}getUpdateForEditedWithoutLocationChange=async(t,n,i)=>{let o=this.nodeLogContext(t,n,i),s=await this.syncColumnsWithWarehouse(t,n,this.currentActualState);if(s){cc.debugContext(o,"Node has all expected columns and values, but location has changed. Checking.");let a=await this.syncColumnsWithWarehouse(t,i,this.updatedActualState);if(a){cc.debugContext(o,"Node exists as expected, but with column differences detected between staged and actual state.");let u=new Set(s.syncedColumns.map(c=>c.columnReference.columnCounter)),l=[...s.syncedColumns,...a.syncedColumns.filter(c=>!u.has(c.columnReference.columnCounter))];return uAc(n,l,a.columnDiffs,this.platformKind,o)}else return cc.infoContext(o,"Node unexpectedly exists as expected without differences between staged and actual state."),cTo(i,this.platformKind,o)}else return cc.debugContext(o,"Node has all expected columns and values (expected)."),sAc(n)};getNodeComparisonName(t){return $h({snowflake:()=>t,fabric:()=>t,databricks:()=>t.toLowerCase(),bigquery:()=>t},this.platformKind,"getNodeComparisonName")}getColumnComparisonName(t){return $h({snowflake:()=>t,fabric:()=>t,databricks:()=>t.toLowerCase(),bigquery:()=>t.toLowerCase()},this.platformKind,"getColumnComparisonName")}async syncColumnsWithWarehouse(t,n,i){let o="[Presync.syncColumnsWithWarehouse()]";if(!this.setupComplete)throw new Xe("Presync setup must be called before running presync");let s=this.nodeLogContext(t,n),a=Object.fromEntries(n.operation.metadata.columns.map((f,h)=>[this.getColumnComparisonName(f.name),{columnData:f,index:h}])),u;try{u=Object.fromEntries(i.getDataWarehouseColumns(t))}catch(f){throw cc.infoContext(s,"column lookup: ",i.columnLookup),cc.errorContext(s,"Error trying to get warehouse columns",f instanceof Xe?f.toString():f),f}let l={},c=[],d=new Set;for(let[f,h]of Object.entries(u)){if(!a[f]){l[f]=tAc(f);continue}let{columnData:g,index:b}=Fic(f,a,`${o} originalColumns`);if(f in a){let T={...g};await this.renderDynamicColumnValues(T,n,t,s);let v=i.updateColumnWithWarehouseColumn(T,h,n.operation.materializationType),R=[];Object.entries(v).forEach(([w,x])=>{x===void 0&&R.push(w)}),R.length&&cc.alertContext(s,`Undefined values detected for column at index ${b}`,R),v.nullable=v.nullable.toString()==="true",T.nullable=T.nullable.toString()==="true",UA("PRESYNC",!l[f],`Column diff already exists for column ${f} in the col diff record. Existing diff: `,l[f]);let O=this.getColumnDifferences((0,lAc.diff)(T,v),f,s);O&&(l[f]=O),c[b]=v}}for(let[f,{index:h}]of Object.entries(a))f in u||(d.add(h),l[f]=rAc());let p=c.filter((f,h)=>!d.has(h));return Object.keys(l).length?{syncedColumns:p,columnDiffs:l,shouldCreate:!1}:null}async renderDynamicColumnValue(t,n,i,o,s){let a=Coc(this.updatedWsData.stepTypes),u=Zyo(this.updatedWsData.stepTypes,n.operation.sqlType),l=DS(this.updatedWsData.steps),c=hC(n.operation,i,this.updatedWsData.steps,a,this.updatedActualState.locations,null,this.runTimeParameters,l,s,this.platformKind,!0),d=lie(o,u,n.operation.name,this.platformKind),p=yQe(zSo(this.platformKind,this.updatedWsData.macros)),{compiledSQL:f}=await VSo(o.name,this.renderer,{...c,column:d},t,p,this.updatedWsData.installedPackages||{},this.runTimeParameters,void 0,u,{nodeID:i},!0);return f}async renderDynamicColumnValues(t,n,i,o){let s=["description","defaultValue"];for(let a of s){let u=t[a];if(u){if(typeof u!="string")throw new Xe(`Expected column field ${a} to be a string, but got ${typeof t[a]}`);try{let l=/({{|{%|{#)/g;u.match(l)&&(t[a]=await this.renderDynamicColumnValue(u,n,i,t,o))}catch(l){throw cc.errorContext(o,"Error trying to render dynamic column values",l instanceof Xe?l.toString():l),l}}}return t}getColumnDifferences(t,n,i){let o={};return!t||!Array.isArray(t)?null:(cc.debugContext(i,`Diff detected for column ${n}:
10287
10287
  `,t),t.forEach(s=>{o[s.kind]||(o[s.kind]=[]);let a=o[s.kind];if(!a)throw new Xe(`Column ${n} not found in columnChanges`);a.push(oAc(s,i))}),o)}getImplementedDbSchemaMap(t){let n=new Map,i=new Map;for(let{database:o,schema:s,locationName:a}of nHi(t)){let u=n.get(o);u?u.add(s):n.set(o,new Set([s]));let l=`${o}.${s}`;i.has(l)?i.get(l).push(a):i.set(l,[a])}return{dbToSchemasMap:n,qualifiedLocationToLocationNameMap:i}}async auditLocations(t,n,i){let o="[auditLocations]",s={},{dbToSchemasMap:a,qualifiedLocationToLocationNameMap:u}=n;for(let[l,c]of a){let d;try{d=new Set(await t.getSchemaNames(l))}catch(p){cc.warnContext(i,`${o} Error getting schemas for database ${l}:`,p);continue}for(let p of c){let f=u.get(`${l}.${p}`);if(d.has(p)){if(!f)throw new Xe(`Location names not found for qualified location ${l}.${p}`);for(let h of f)s[h]={database:l,schema:p,locationName:h}}else cc.warnContext(i,`${o} Schema ${p} not found in database ${l} for locations ${f?.join(", ")}`)}}return s}auditNodesByLocation(t,n){let i=new Map,o={};for(let[s,a]of Object.entries(t)){let u=a.operation.locationName;n[u]?o[s]=a:i.set(s,{name:a.operation.name,locationName:u})}return{validNodes:o,affectedNodes:i}}},sAc=r=>({syncDetails:null,node:r}),vcr=(r,t,n,i,o,s)=>{r.operation.materializationType=t;let a=nAc(r.operation.name,n,t),u=Icr(a,null);return{node:_cr(r,i,o,s).node,syncDetails:u}},aAc=(r,t)=>({syncDetails:Icr(`Node with ID ${r} did not exist at old location: ${t}`,null),node:null}),cTo=(r,t,n)=>{let i=A3(r,n),{node:o,location:s}=_cr(r,i,t,n),{operation:a}=r,u=aTo(a.name,s,a.materializationType)+"with all expected columns.";return{node:o,syncDetails:Icr(u,null)}},uAc=(r,t,n,i,o)=>{let s=_cr(r,null,i,o).node;return dAc(s,t,n,null)},cAc=(r,t,n,i,o)=>{let s=A3(r,o),{node:a,location:u}=_cr(r,s,i,o),{name:l,materializationType:c}=a.operation,d=aTo(l,u,c)+"with unexpected column differences.";return dAc(a,t,n,d)},dAc=(r,t,n,i)=>(r.operation.metadata.columns=t,{node:r,syncDetails:Icr(i,n)}),Icr=(r,t)=>({object:r,columns:t}),_cr=(r,t,n,i)=>{let o="";if(t){let s=r.operation.locationName;if(!t.database||!t.schema)throw new Xe(`Database or schema not found in location ${s}`);if(s!==t.locationName)throw new Xe(`Location name mismatch. Node is using ${s}, but location is using ${t.locationName}`);let{name:a,database:u,schema:l}=r.operation;cc.debugContext(i,`Augmenting location data for ${a}. Was:`,{database:u,schema:l,locationName:s},"Now:",t),r.operation.database=t.database,r.operation.schema=t.schema,o=w_e(t,n)}return{node:{...r,presync:!0},location:o}},lTo=(r,t)=>{if(!Fp(r,t))return!1;let n=r.operation.materializationType;return n==="table"||n==="view"};var Rcr=class extends BQe{constructor(t,n,i,o){if(t==="databricks")throw new Xe("databricks is not supported for PresyncCommon, use PresyncDatabricks instead");super(t,n,i,o)}async detectPlatformBlockers(){}};var vU1=r=>{let t={},n=r.match(/Table Properties:\s*\[(.*?)\]/i);return!n||!n[1]||n[1].split(/,\s*/).forEach(s=>{let a=s.indexOf("=");if(a!==-1){let u=s.substring(0,a).trim(),l=s.substring(a+1).trim();t[u]=l}}),t},pAc=r=>{let t=" ",n=`
10288
10288
  `,i=[];return Object.entries(r).forEach(([o,s])=>{i.push(`${o}:`),Object.keys(s).length===0?i.push(`${t}TBLPROPERTIES not found or empty`):Object.entries(s).forEach(([a,u])=>{i.push(`${t}${a}: ${u}`)}),i.push("")}),i.join(n)},fAc=(r,t)=>{let n={};r.forEach(o=>{if(!o.tableName||!o.information)return;let{database:s,tableName:a}=o,u=IU1(a,o.information),l=vU1(o.information);if(!(dTo(u,s,a)in t))return;let c=o.information.toLowerCase();!c.includes("provider: delta")||c.includes("type: view")||l["delta.columnMapping.mode"]!=="name"&&(n[a]=l)});let i=new Set;return Object.values(n).forEach(o=>{Object.keys(o).forEach(s=>i.add(s))}),n},IU1=(r,t)=>{let n=t.match(/Catalog:\s*(.*?)(?:\n|$)/i);if(n&&n[1])return n[1];throw new Xe(`Catalog not found for table: ${r}`,`Table information was missing a value for "Catalog": ${t}`)},dTo=(r,t,n)=>r?`${r}.${t}.${n}`:`${t}.${n}`;var _U1=Ye("PRESYNC"),Ocr=class extends BQe{constructor(t,n,i,o){if(t!=="databricks")throw new Xe(`platformKind: ${t} is not supported for PresyncDatabricks, use PresyncCommon instead`);super(t,n,i,o)}async detectPlatformBlockers(t){let n=lHi(this.presyncedCurrentState,(a,u)=>{let{database:l,schema:c,name:d,locationName:p,type:f}=u.operation;if(f==="sourceInput"||u.operation.materializationType==="view")return a;if(!l||!c){let h=`Database or schema not set for node: ${d}`,g=`Location: ${p}
10289
10289
  Database: ${l}
@@ -10330,7 +10330,7 @@ ${R} JOIN ${v.source.source}`,v.source.alias&&(g+=` AS ${v.source.alias}`),v.con
10330
10330
  `,c),Il.appContext(this.logContext,t,`Processing "delete" phase edits (${u.length})`),await n("deletes",()=>{u.forEach(d=>{let p={...this.logContext,nodeID:d};this.result.delete(d,this.getStepWithEditsForRemoveNode(d,s,p))})}),Il.appContext(this.logContext,t,`Processing "add" phase edits (${l.length})`),await n("adds",()=>{l.forEach(d=>{let p={...this.logContext,nodeID:d};this.result.add(d,this.getStepWithWorkspaceEditsForCreateNode(d,a,p))})}),Il.appContext(this.logContext,t,`Processing "alter" phase edits (${c.length})`),await n("alters",async()=>{let d=c.map(p=>async()=>{let f={...this.logContext,nodeID:p};try{return await this.getWorkspaceEditsForPossiblyEditedNode(p,s,a,f,o.measure)}catch(h){let g=kur({type:"delta",current:this.currentState.getNode(p).operation,desired:this.desiredState.getNode(p).operation});throw Yh(h,g)}});await V6u(d,12,"Could not generate plan for the following nodes");try{let p=await this.renderCacheManager?.syncCache();p!==void 0&&Il.infoContext(this.logContext,`Synced ${p} ref usages to cache`)}catch(p){Il.errorContext(this.logContext,"syncCache","Failed to sync ref cache",{error:p})}}),i(),o.printSummary(),this.result.finalize()};getAlteredTableData(t,n){return{oldName:t,oldLocation:this.currentState.getNodePartialLocationString(n)}}getStepWithEditsForRemoveNode(t,n,i){let o=this.currentState.getNode(t),s=o.operation.name,a={...i,nodeName:s},u=Imt(o.operation,this.currentState.stepTypes,a),l={};try{l=this.getAlteredTableData(s,t)}catch{Il.errorContext(a,`Mappings for node: ${s} don't exist`)}return Fp(o,a)&&u?(Il.infoContext(a,`getting advanced edit for remove node for ${s}...`),this.getAdvancedStepWithEditForRemoveNode(t,n,a)):Fp(o,a)&&"oldLocation"in l?(Il.debugContext(a,`getting standard edit for remove node for ${s}...`),this.getStepWithEditsForRemoveStandardNode(t,l,a)):(Il.debugContext(a,`getting metadata edit for remove node for ${s}...`),this.getStepWithEditsForRemoveMetadata(t,a))}async getWorkspaceEditsForPossiblyEditedNode(t,n,i,o,s){let a=this.currentState.getNode(t),u=this.desiredState.getNode(t),l={...o,nodeID:t,currentNodeName:a.operation.name,updatedNodeName:u.operation.name};if(Il.debugContext(l,`getting node edits for possibly edited node with updated name "${u.operation.name}"...`),uM(u,l)&&uM(a,l))return Il.debugContext(l,`getting metadata edit for edited node "${u.operation.name}"`),this.getStepWithWorkspaceEditsForAlterSourceNode(t,i,l);if(Fp(a,l)&&Fp(u,l))return Imt(u.operation,this.desiredState.stepTypes,l)?(Il.debugContext(l,`getting advanced edit for edited node "${u.operation.name}"`),await this.getStepWithWorkspaceEditsForAlterAdvancedDeployNode(t,n,i,s,{...l,nodeID:t})):(Il.debugContext(l,`getting standard edit for edited node "${u.operation.name}"`),await this.getStepWithWorkspaceEditsForAlterNonSourceNode(t,i,s,l));{let c=`Source node cannot be converted to SQL node and vice versa. Node name: ${u.operation.name}, Node id: ${t}`;throw Il.alertContext(l,c,a,u),new Xe(c)}}generateStepDataWithMetadataUpdateEdit=(t,n,i)=>{let o=this.desiredState.getNode(t),s={operationName:o.operation.name,destinationName:o.operation.locationName,mapping:this.desiredState.physicalLocs},a=hC(o.operation,t,this.desiredState.nodes,this.desiredState.stepTypes,this.desiredState.physicalLocs,null,this.runTimeParameters,n,i,this.name,!0),u=this.workspaceEditBuilder.metadataUpdate(t,o,a);return lU([u],o.operation,s,a)};getStepWithEditsForRemoveMetadata=(t,n)=>{Il.infoContext(n,"Getting metadata drop edit for node");let i=this.currentState.getNode(t),o={operationName:i.operation.name,destinationName:i.operation.locationName,mapping:this.currentState.physicalLocs};return lU([this.workspaceEditBuilder.metadataDelete(t,i)],i.operation,o,{})};getStepWithWorkspaceEditsForCreateNode=(t,n,i)=>{Il.debugContext(i,`getting node create requirements for node "${t}"`);let o=this.desiredState.getNode(t),s={operationName:o.operation.name,destinationName:o.operation.locationName,mapping:this.desiredState.physicalLocs,parameters:this.runTimeParameters},a=hC(o.operation,t,this.desiredState.nodes,this.desiredState.stepTypes,this.desiredState.physicalLocs,null,this.runTimeParameters,n,i,this.name,!0),u=Fp(o,i),l=u?ror(this.desiredState.stepTypes,o.operation.sqlType):"",c;u?Imt(o.operation,this.desiredState.stepTypes,i)?(c=`Advanced Deployment: Created node ${o.operation.name}`,a={currentState:void 0,desiredState:a,currentMapping:void 0,desiredMapping:this.desiredState.physicalLocs}):c=`Created node ${o.operation.name}`:c=`Adding metadata for ${o.operation.name}`,Il.debugContext({...i,nodeName:o.operation.name},`getting create edit for node "${o.operation.name}"`);let d=this.workspaceEditBuilder.createNode(t,c,l);return lU([d],o.operation,s,a)};getAdvancedStepWithEditForRemoveNode(t,n,i){let o=this.currentState.getSqlNode(t);Il.infoContext(i,`Getting advanced deploy drop edit for ${o.operation.name}`);let s=this.currentState.getUdn(o.operation.sqlType).metadata.templates.create.templateString,a=s.length?`Advanced Deployment: Deleted node ${o.operation.name}`:`Advanced Deployment: No SQL available for ${o.operation.name}`,u={operationName:o.operation.name,destinationName:o.operation.locationName,mapping:this.currentState.physicalLocs,parameters:this.runTimeParameters},c={currentState:hC(o.operation,t,this.currentState.nodes,this.currentState.stepTypes,this.currentState.physicalLocs,null,this.runTimeParameters,n,i,this.name,!0),desiredState:void 0,currentMapping:this.currentState.physicalLocs,desiredMapping:void 0};nd(!!qT(c),"hydratedMetadata for advanced deploy was missing data when creating workspace edits","PLAN",i);let d=this.workspaceEditBuilder.dropAdvancedDeploy(t,a,s);return lU([d],o.operation,u,c)}getStepWithEditsForRemoveStandardNode(t,n,i){let o=this.currentState.getSqlNode(t),s={operationName:o.operation.name,destinationName:o.operation.locationName,mapping:this.currentState.physicalLocs},a=`Deleted node ${o.operation.name}`,u,l=o.operation.materializationType==="view",c=o.operation.sqlType==="View";return l||c?(Il.infoContext(i,`Node ${o.operation.name} is a view, getting delete template...`),u=[this.workspaceEditBuilder.dropMaterialization(t,a,n,"view")]):(Il.infoContext(i,`Node ${o.operation.name} is a table, getting delete template...`),u=[this.workspaceEditBuilder.dropClone(t,n),this.workspaceEditBuilder.dropMaterialization(t,a,n,"table")]),lU(u,o.operation,s,{})}async getStepWithWorkspaceEditsForAlterAdvancedDeployNode(t,n,i,o,s){let a=this.currentState.getNode(t),u=this.desiredState.getNode(t),{name:l}=u.operation;if(Il.debugContext(s,`getting alter advanced deploy workspace edits for node ${l}`),!Fp(a,s)||!Fp(u,s))throw new Xe(`Node ${l} is not a SQL node, cannot get advanced deploy workspace edits for non-SQL nodes`);let c=this.desiredState.getUdn(u.operation.sqlType).metadata.templates.create.templateString,{advancedHydratedMetadata:d,nodeMetadata:p,renderResult:{compiledSQL:f,ref_usage:h}}=await this.renderAdvancedDeployNode(t,c,n,i,s),g=[],b=[],T=[],v=Jze(f);if(v.length){Il.debugContext(s,`parsed sql stages for advanced deploy node ${l}`,v);for(let F of v){let{deployPhaseOverride:j}=F;j==="create"?b.push(this.workspaceEditBuilder.alterAdvancedDeploy(t,"addedTable",F)):j==="drop"?g.push(this.workspaceEditBuilder.alterAdvancedDeploy(t,"deletedTable",F)):T.push(this.workspaceEditBuilder.alterAdvancedDeploy(t,"alteredTable",F))}this.addToDependencyRefLookup(t,h,s)}else{if(ATo(a,u,s)){Il.debugContext(s,`Advanced deploy node ${l} will have metadata update`);let F=hC(u.operation,t,this.desiredState.nodes,this.desiredState.stepTypes,this.desiredState.physicalLocs,null,this.runTimeParameters,i,s,this.name,!0),j=this.workspaceEditBuilder.metadataUpdate(t,u,F);b.push(j)}Il.debugContext(s,`Advanced deploy node ${l} had no parsed stages. Force rendering create template to gather refs...`),await o(`Advanced: ${t}`,async()=>{let F=await this.forceRenderedRefsAdvancedDeployNode(t,c,i,s);this.addToDependencyRefLookup(t,F,s)})}let R=F=>lU(F,u.operation,p,d),O=VH(b);O&&this.result.add(t,R(O));let w=VH(T);w&&this.result.alter(t,R(w));let x=VH(g);return x&&this.result.delete(t,R(x)),this}getStepWithWorkspaceEditsForAlterSourceNode(t,n,i){let o=this.currentState.getNode(t),s=this.desiredState.getNode(t),a={operationName:s.operation.name,destinationName:s.operation.locationName,mapping:this.desiredState.physicalLocs},u=hC(s.operation,t,this.desiredState.nodes,this.desiredState.stepTypes,this.desiredState.physicalLocs,null,this.runTimeParameters,n,i,this.name,!0);Il.debugContext(i,`getting alter src workspace edits for node with updated name "${s.operation.name}"...`);let l=this.currentState.getNodePartialLocationString(t),c=this.desiredState.getNodePartialLocationString(t),d=l!==c;if(!hTo(o,s,i)&&!d)return Il.debugContext(i,`No metadata diff detected for source node ${s.operation.name}`),this;Il.infoContext(i,`Metadata diff detected for source node ${s.operation.name}, getting workspace edits...`);let p=this.workspaceEditBuilder.metadataUpdate(t,s,u),f=lU([p],s.operation,a,u);return this.result.alter(t,f),this}async getStepWithWorkspaceEditsForAlterNonSourceNode(t,n,i,o){let s=this.currentState.getSqlNode(t),a=this.desiredState.getSqlNode(t);this.throwOnInvalidStandardSQLNode(s,a,o);let u=this.currentState.getNodePartialLocationString(t)!==this.desiredState.getNodePartialLocationString(t),l=hTo(s,a,o)||u,c=this.nodeIs(a,"view")&&yAc(t,this.currentState,this.desiredState,o);if(!l&&!c)return ATo(s,a,o)?this.result.add(t,this.generateStepDataWithMetadataUpdateEdit(t,n,o)):this.nodeIs(a,"view")&&await i(`Standard: ${t}`,async()=>{let T=this.desiredState.getUdn(a.operation.sqlType).metadata.templates.create.templateString,v=await this.getRefUsageForStandardDeployNode(t,T,this.desiredState,n,o);this.addToDependencyRefLookup(t,v,o)}),this;let d=Soc(a.operation,this.desiredState.stepTypes,o),p=l&&this.nodeIs(s,"table")&&this.nodeIs(a,"table")&&!d,f=c||l||d&&l,h=hC(a.operation,t,this.desiredState.nodes,this.desiredState.stepTypes,this.desiredState.physicalLocs,null,this.runTimeParameters,n,o,this.name,!0),g=this.getAlteredTableData(s.operation.name,t);if(p)return this.getWorkspaceEditsForAlteredTable(t,s,a,this.desiredState.physicalLocs,g,h,u,o);if(f)return this.getStepWithEditBlobsForDropAndRecreate(t,g,s.operation,a.operation,h,"",o);let b=`Reached dropthrough case for step ${t} when attempting to create workspace edits - one or both materialization types are invalid. Original materialization type: ${s.operation.materializationType}; updated materialization type: ${a.operation.materializationType}.`;throw Il.emergContext(o,b+"This should never happen."),new Xe(b)}getCloneTableEdit(t,n,i,o,s){let a=this.schemas.current[n.operation.locationName],u=this.schemas.updated[i.operation.locationName];if(!a||!u){let c=!a&&!u?"Current and updated":a?"Updated":"Current";throw Il.errorContext(s,`${c} schema missing for node. currentLoc:`,n.operation.locationName,"updatedLoc:",i.operation.locationName,"schemas:",this.schemas),new Xe(`${c} schema is missing for node ${i.operation.name} (updated name)`)}return this.workspaceEditBuilder.cloneTable(t,o,a,u,s)}getRenameEdits(t,n,i,o,s,a){let u=n.operation.name!==i.operation.name;if(u||s){Il.infoContext(a,`Node ${i.operation.name} is a table and has a name or location mapping difference. getting workspace edits for rename or swap...`,`
10331
10331
  hasNodeNameDifference: ${u}, hasLocationMappingDifference: ${s}`);let l=this.workspaceEditBuilder.renameTable(t,o),c=this.workspaceEditBuilder.dropMaterialization(t,"Deleting table",o,"table");return[l,c]}return[]}getEditsToRunAgainstClone(t,n,i,o,s){return[...this.workspaceEditBuilder.alteredTable(t,n,i,o,s),...o.hasNodeDescriptionChange?[this.workspaceEditBuilder.updateTableDescription(t,i)]:[]]}getEditsToCloneAlterSwapDrop(t,n,i,o,s,a,u){let l=this.getEditsToRunAgainstClone(t,i,o,a,u),c=this.getRenameEdits(t,n,i,o,s,u);return c.length?[this.getCloneTableEdit(t,n,i,o,u),...l,...c]:l.length?[this.getCloneTableEdit(t,n,i,o,u),...l,this.workspaceEditBuilder.handleSwapOrReplace(t,o),this.workspaceEditBuilder.dropClone(t,o)]:[]}getEditsToRunAgainstFinal(t,n,i,o){return[]}getWorkspaceEditsForAlteredTable(t,n,i,o,s,a,u,l){Il.infoContext(l,`Node ${i.operation.name} is a table. getting workspace edits for alter table...`);let c={operationName:i.operation.name,destinationName:i.operation.locationName,mapping:o},d=T=>lU(T,i.operation,c,a),p=this.workspaceEditBuilder.getNodeChanges(n,i),f=this.getEditsToCloneAlterSwapDrop(t,n,i,s,u,p,l),h=this.getEditsToRunAgainstFinal(t,n.operation,i.operation,s);if(!f.length&&!h.length){Il.infoContext(l,`No actual edits generated for node ${i.operation.name}. Creating metadata only update for add phase.`);let T=this.workspaceEditBuilder.metadataUpdate(t,i,a);return this.result.add(t,d([T])),this}let b=VH([...f,...h]);if(!b)throw new Xe(`No alters generated for node ${i.operation.name}. This should never happen.`);return this.result.alter(t,d(b)),this}getStepWithEditBlobsForDropAndRecreate=(t,n,i,o,s,a,u)=>{let l={operationName:i.name,destinationName:i.locationName,mapping:this.currentState.physicalLocs},c={operationName:o.name,destinationName:o.locationName,mapping:this.desiredState.physicalLocs};Il.infoContext(u,"Getting workspace edits for drop and recreate");let{dropEdit:d,createEdit:p}=this.workspaceEditBuilder.dropAndRecreate(t,n,this.desiredState.stepTypes,i,o,a),f=(h,g,b)=>lU([h],g,b,s);return this.result.delete(t,f(d,i,l)),this.result.add(t,f(p,o,c)),this};addToDependencyRefLookup=(t,n,i)=>{this.dependencyRefLookup[t]&&Il.emergContext(i,`Overwriting existing view ref lookup for node ${t}. This really shouldn't have happened. Proceeding anyway...`),this.dependencyRefLookup[t]=n};getRefUsageForStandardDeployNode=async(t,n,i,o,s)=>{let a=i.getSqlNode(t),u=mM(this.name,Bie(this.name,i.macros),n,i.installedPackages||{},i.getUdn(a.operation.sqlType),s),c={nodeMetadata:{...hC(a.operation,t,i.nodes,i.stepTypes,i.physicalLocs,null,this.runTimeParameters,o,s,this.name,!0),...wmt(a.operation,i.physicalLocs)},macrosString:u.macrosString,templateString:n,runTimeParameters:this.runTimeParameters,packages:u.packages};if(this.renderCacheManager){let p=await this.renderCacheManager.getCachedRef(t,c);if(p)return Il.infoContext(s,"getRefUsageForStandardDeployNode","Cache hit for ref",{nodeID:t}),p;Il.infoContext(s,"getRefUsageForStandardDeployNode","Cache miss for ref",{nodeID:t})}let{ref_usage:d}=await this.renderAndCache(t,c,s);return d};getRefUsageForAdvancedDeployNode=async(t,n,i,o,s,a)=>{let u={nodeMetadata:o,macrosString:i,templateString:n,runTimeParameters:o.parameters,packages:s};if(this.renderCacheManager){let c=await this.renderCacheManager.getCachedRef(t,u);if(c)return Il.infoContext(a,"getRefUsageForAdvancedDeployNode","Cache hit for ref",{nodeID:t}),c;Il.infoContext(a,"getRefUsageForAdvancedDeployNode","Cache miss for ref",{nodeID:t})}let{ref_usage:l}=await this.renderAndCache(t,u,a);return l};renderStandardDeployNode=async(t,n,i,o,s)=>{let a=i.getSqlNode(t),u=mM(this.name,Bie(this.name,i.macros),n,i.installedPackages||{},i.getUdn(a.operation.sqlType),s),c={nodeMetadata:{...hC(a.operation,t,i.nodes,i.stepTypes,i.physicalLocs,null,this.runTimeParameters,o,s,this.name,!0),...wmt(a.operation,i.physicalLocs)},macrosString:u.macrosString,templateString:n,runTimeParameters:this.runTimeParameters,packages:u.packages};return this.renderAndCache(t,c,s)};async renderAndCache(t,n,i){let o=await abt(this.renderer,n.templateString,n.macrosString,n.nodeMetadata,n.packages,this.name,i,void 0,!0);return this.renderCacheManager&&this.renderCacheManager.updateCache(t,o.ref_usage,this.renderCacheManager.hashRenderPlan(n)),o}renderAdvancedDeployNode=async(t,n,i,o,s)=>{let a=this.currentState.getSqlNode(t),u=this.currentState;if(Mcr(a)){let v=a.operation.locationName,R={...this.currentState.locations,[v]:{mappingDefinitions:{[this.currentState.workspaceID]:{database:a.operation.database,schema:a.operation.schema}}}};u=new vRe(this.currentState.workspaceID,{locations:R,stepTypes:this.currentState.stepTypes,steps:this.currentState.nodes,installedPackages:this.currentState.installedPackages,macros:this.currentState.macros},this.currentState.platformKind,s),Il.debugContext(s,`updated locations data for location ${v} for node ${t} to include the presynced node's location data. was:`,this.currentState.locations[v],"is now:",R[v])}let l=this.desiredState.getSqlNode(t),c={operationName:l.operation.name,destinationName:l.operation.locationName,mapping:this.desiredState.physicalLocs,parameters:this.runTimeParameters},d=(v,R,O,w,x)=>hC(v,t,R,O,w,null,this.runTimeParameters,x,s,this.name,!0),p=d(a.operation,u.nodes,u.stepTypes,u.physicalLocs,i),f=d(l.operation,this.desiredState.nodes,this.desiredState.stepTypes,this.desiredState.physicalLocs,o),h={currentState:p,desiredState:f,currentMapping:u.physicalLocs,desiredMapping:this.desiredState.physicalLocs},g=mM(this.name,Bie(this.name,this.desiredState.macros),n,this.desiredState.installedPackages||{},this.desiredState.getUdn(l.operation.sqlType),s),b={nodeMetadata:{...c,...h},macrosString:g.macrosString,templateString:n,runTimeParameters:this.runTimeParameters,packages:g.packages},T=await this.renderAndCache(t,b,s);return{advancedHydratedMetadata:h,nodeMetadata:c,renderResult:T}};forceRenderedRefsAdvancedDeployNode=async(t,n,i,o)=>{let s=this.desiredState.getSqlNode(t),a=mM(this.name,Bie(this.name,this.desiredState.macros),n,this.desiredState.installedPackages||{},this.desiredState.getUdn(s.operation.sqlType),o),u={operationName:s.operation.name,destinationName:s.operation.locationName,mapping:this.desiredState.physicalLocs,parameters:this.runTimeParameters},c={currentState:void 0,desiredState:hC(s.operation,t,this.desiredState.nodes,this.desiredState.stepTypes,this.desiredState.physicalLocs,null,this.runTimeParameters,i,o,this.name,!0),currentMapping:void 0,desiredMapping:this.desiredState.physicalLocs},d=await this.getRefUsageForAdvancedDeployNode(t,n,a.macrosString,{...u,...c},a.packages,o),{ref:p,ref_no_link:f}=d;return[...p,...f].filter(({node_name:g,location_name:b})=>!(g===s.operation.name&&b===s.operation.locationName)).length||Il.warnContext(o,`Forced render of create case for advanced deploy node ${s.operation.name} did not return any refs.`),d};throwOnInvalidStandardSQLNode=(t,n,i)=>{let o=Fp(n,i),s=Bvc(t.operation.materializationType),a=Bvc(n.operation.materializationType);if(!o||!s||!a){let{name:u,locationName:l}=n.operation,c=[`Node ${u} (${l}) is not a valid standard SQL node.`];throw n?o||c.push("Desired state was a Source node instead of a SQL node."):c.push("Desired state was missing."),a||c.push(`Desired materialization type (should be Table or View) but was: ${n.operation.materializationType}.`),s||c.push(`Current materialization type (should be Table or View) but was: ${t.operation.materializationType}.`),new Xe(c.join(`
10332
10332
  - `))}};nodeIs=(t,n)=>t.operation.materializationType===n};var MMo=Ye("PLAN"),yAr=class extends aK{constructor(t,n,i,o,s,a,u){let l=new Scr;super("bigquery",l,t,n,i,o,s,u,a)}getAlteredTableData(t,n){return{oldName:t,oldLocation:this.currentState.getNodePartialLocationString(n),desiredLocation:this.getDesiredLocation(n)}}getDesiredLocation(t){try{return this.desiredState.getNodePartialLocationString(t)}catch{return}}getStepWithEditsForRemoveStandardNode(t,n,i){let o=this.currentState.getSqlNode(t),s={operationName:o.operation.name,destinationName:o.operation.locationName,mapping:this.currentState.physicalLocs},a=`Deleted node ${o.operation.name}`,u,l=o.operation.materializationType==="view",c=o.operation.sqlType==="View";return l||c?(MMo.infoContext(i,`Node ${o.operation.name} is a view, getting delete template...`),u=[this.workspaceEditBuilder.dropMaterialization(t,a,n,"view")]):(MMo.infoContext(i,`Node ${o.operation.name} is a table, getting delete template...`),u=[this.workspaceEditBuilder.dropMaterialization(t,a,n,"table")],n.desiredLocation&&u.push(this.workspaceEditBuilder.dropClone(t,n))),lU(u,o.operation,s,{})}getEditsToCloneAlterSwapDrop(t,n,i,o,s,a,u){let{hasOnlyDescriptionChanges:l,hasNodeDescriptionChange:c,alteredColumnData:d}=a,p=n.operation.name!==i.operation.name;if(l&&!s&&!p){MMo.infoContext(u,`Node ${i.operation.name} has only description/label changes. Using in-place alter instead of clone.`);let h=[];return d.length>0&&h.push(...this.workspaceEditBuilder.inPlaceAlterColumnEdits(o,t,d)),c&&h.push(this.workspaceEditBuilder.inPlaceUpdateTableDescription(t,o)),h}return super.getEditsToCloneAlterSwapDrop(t,n,i,o,s,a,u)}};var bAr=class extends fV{constructor(){super("databricks",new xQe)}handleSwapOrReplace=(t,n)=>Q1(t,"Replacing table with clone","replaceTable",n,this.templateProvider.replaceWithCloneTemplate);getCloneTableTemplate(){return this.templateProvider.getCloneTableTemplate()}};var EAr=class extends fV{constructor(){super("fabric",new NQe)}updateTableDescription=()=>{throw new u1};handleSwapOrReplace=(t,n)=>Q1(t,"Replacing table with clone","replaceTable",n,this.templateProvider.replaceWithCloneTemplate);getCloneTableTemplate(){return this.templateProvider.getCloneTableTemplate()}alterColumnEdits(t,n,i){return i.length?[Q1(n,"Altered Columns","alteredColumn",{...t,alteredColumns:i},this.templateProvider.alterColumnTemplate)]:[]}};var CAr=class extends fV{constructor(){super("snowflake",new PQe)}handleSwapOrReplace=(t,n)=>Q1(t,"Swapping cloned table","swapClone",n,this.templateProvider.swapClonedTableTemplate);getCloneTableTemplate(t,n,i){return this.templateProvider.getCloneTableTemplate(t,n,i)}alterColumnEdits(t,n,i){return i.length?[Q1(n,"Altered Columns","alteredColumn",{...t,alteredColumns:i},this.templateProvider.alterColumnTemplate)]:[]}};var SAr=class extends aK{constructor(t,n,i,o,s,a,u){let l=new bAr;super("databricks",l,t,n,i,o,s,u,a)}getEditsToRunAgainstClone(t,n,i,o,s){return[...this.workspaceEditBuilder.alteredTable(t,n,i,o,s)]}getEditsToRunAgainstFinal(t,n,i,o){return n.description!==i.description?[this.workspaceEditBuilder.updateTableDescription(t,o)]:[]}};var TAr=class extends aK{constructor(t,n,i,o,s,a,u){let l=new EAr;super("fabric",l,t,n,i,o,s,u,a)}getEditsToRunAgainstClone(t,n,i,o,s){return[...this.workspaceEditBuilder.alteredTable(t,n,i,o,s)]}};var vAr=class extends aK{constructor(t,n,i,o,s,a,u){let l=new CAr;super("snowflake",l,t,n,i,o,s,u,a)}};var PSt=async(r,t,n,i,o,s,a,u,l)=>{let c="[getNodeEdits]",{measure:d,printSummary:p}=Q0(c,u);if(!n.locations||!t.locations)throw new Xe("Locations are required to calculate node edits");let f=Ctf(r,o,new vRe(i,t,o,u),new vRe(i,n,o,u),s,a,u,l),h=await d("editBuilder.getNodeEdits",()=>f.getNodeEdits());return p(),{edits:h,dependencies:f.dependencyRefLookup}},Ctf=(r,t,n,i,o,s,a,u)=>$h({snowflake:()=>new vAr(r,n,i,o,s,a,u),databricks:()=>new SAr(r,n,i,o,s,a,u),fabric:()=>new TAr(r,n,i,o,s,a,u),bigquery:()=>new yAr(r,n,i,o,s,a,u)},t,"getNodeEditBuilder");var kI=Ye("PLAN"),Lvc=async(r,t,n,i,o,s,a,u,l,c,d)=>{let p="[createPlan]",f=ou(),h={...d,planID:f,workspaceID:o,planOptions:{...r,platform:u.desiredWorkspaceData.platformKind}},{unsubscribe:g,workspaceStatusChangelog:b}=await jbc(l.org(i).workspace(o),h);try{kI.appContext(h,p,"Making plan workspace data plan ready");let T=new Ecr;return{plan:await T.monitor(d,async()=>{if(!o)throw kI.emergContext(d,p,"no workspaceID specified"),new Xe("no environmentID specified");let{platformKind:R}=u.desiredWorkspaceData,O=["invalidStorageMappings","invalidStorageLocations","invalidNodeTypes","missingNodeTypes","emptyColumnNames"];if(R==="databricks"&&O.push("databricksUppercaseNodeNames"),T.scan(new Lie(u.desiredWorkspaceData,o),O,["invalidColumnSources"],{jobSchedules:c??void 0,workspaceState:"desired",shouldThrow:!1}),T.scan(new Lie(u.currentWorkspaceData),["invalidNodeTypes"],[],{workspaceState:"current",shouldThrow:!1}),gAc(o,u,h),T.scan(new Lie(u.currentWorkspaceData),["invalidNodeLocationData","missingNodeTypes"],[],{workspaceState:"current"}),r.usePlanAPI)return Ubc(r,l,f,s,o,a,u,h);{let{platform:w,...x}=r;return await w.testConnection(),wMo(x,t,n,f,o,s,w,a,u,l,h)}}),issues:T}}finally{b.some(({status:T})=>T==="Deploying")&&kI.warnContext(h,p,"Workspace had a 'Deploying' status during plan",b),g?.()}},wMo=async(r,t,n,i,o,s,a,u,l,c,d)=>{let p="[createPlanInternal]",f={...d,planOptions:r},{measure:h,printSummary:g}=Q0("[createPlanInternal] \u2014 breakdown of operations WITHIN the parent 'plan calculation (total)' measurement",f),{disablePresync:b,skipPlanContextUpload:T}=r,v=new pI(kI,f),R=new dcr({context:f,logger:kI}),O=async(j,U)=>{R.startPhase(j);let G=await h(j,U);return R.endPhase(j),G},w={workspaceContext:{before:l.currentWorkspaceData,after:l.desiredWorkspaceData,runTimeParameters:u,schemas:void 0,workspaceID:o,skippedBeforeCollection:!!l.skippedBeforeCollection},presyncContext:void 0},x,F={id:i,presyncDetails:{},targetEnvironment:o,version:2};l.skippedBeforeCollection&&(F.skippedBeforeCollection=!0);try{let j={...l.currentWorkspaceData},U;try{if(!b&&!Fhc.includes(a.name)){let re=await O("presync",()=>hAc(a,j,l.desiredWorkspaceData,u,t,f));kI.appContext(f,p,"Finished with presync"),j.steps=re.nodes,F.presyncDetails=re.details,w.presyncContext=re}U=await O("schemas",()=>Pcr(a,j.steps,l.desiredWorkspaceData.steps,f)),kI.appContext(f,p,"Finished getting schemas"),w.workspaceContext.schemas=el(U)}catch(Y){throw kI.errorContext(f,p,"Presync/schemas failed! Error:",bXt(Y)),Y}finally{kI.appContext(f,p,"Finished getting schemas"),w.workspaceContext.schemas&&(pTo(w.workspaceContext.schemas.current),pTo(w.workspaceContext.schemas.updated)),T||(x=O("store plan context",()=>Nbc(c,s,o,i,w,f).catch(Y=>kI.errorContext(f,p,"storePlanContext failed. This is non-critical and can be ignored.",Y))))}let{edits:G,dependencies:H}=await O("node edits",()=>PSt(t,j,l.desiredWorkspaceData,o,a.name,u,U,f,n));kI.appContext(f,p,"Rendering");let k=await h("init deploy renderer",()=>new Gur(t,a.name,f,ghe(G),u,l.desiredWorkspaceData.macros,l.desiredWorkspaceData.installedPackages||{},new uie(l.desiredWorkspaceData.steps),G,new A6)),W=await O("render nodes",()=>k.renderAllNodes(UW(l.desiredWorkspaceData.stepTypes)));kI.appContext(f,p,"Building dependencies");let{phasedDependencies:V,phasedNodeEdits:q}=await O("build dependencies & edits",async()=>({phasedDependencies:new rfe(l.desiredWorkspaceData.steps,W,H,f).build(),phasedNodeEdits:W.toEdits().toRecord()})),K={...F,phasedDependencies:V,phasedNodeEdits:q};return nd(!!Ccr(K),`${p} Malformed plan!`,"PLAN",f),kI.appContext(f,p,`finished creating plan (version ${K.version})`),K}finally{x?r.skipAwaitPlanContext?kI.infoContext(f,p,"Skipping await of plan context upload"):await x:kI.errorContext(f,p,"storePlanContextPromise was never set"),v.logMetric("plan-execution",{orgID:d.orgID,workspaceID:o,planID:i,version:2}),await h("write plan analytics",async()=>{try{let j=R.toPlanAnalytics(o.toString());await c.org(d.orgID).writePlanAnalytics(i,j),kI.infoContext(f,p,"Successfully wrote plan analytics to Firestore")}catch(j){kI.errorContext(f,p,"Failed to write plan analytics to Firestore. This is non-critical.",j)}}),g()}};var jh2=Ye("DEBUG");var Stf=Ye("RUNNER_BACKEND"),kvc=async(r,t,n,i,o,s,a)=>{let u=aQe(t.name),l=await Pcr(t,u.steps,n.steps,a),{edits:c}=await PSt(r,u,{...n,steps:Qb(n.steps,[...s])},+o,t.name,i,l,a),d=Object.keys(c.deletedTable),p=Object.keys(c.alteredTable);return qp("RUNNER_BACKEND",!d.length&&!p.length,a,"GetWorkspaceEditsForDevRun: found edits of unexpected type for nodes; ","deleted: ",d,"altered: ",p),Stf.infoContext(a,"GetWorkspaceEditsForDevRun: done constructing edits ",Object.keys(c.addedTable)),c};var cA2=Ye("PIPELINE");var Ttf=Ye("PIPELINE"),xMo=async(r,t,n)=>{let i="createRun:",o={...vtf(r),version:3},{runType:s}=o,a;switch(s){case"deploy":a=t.runs.createDeployRun(o);break;case"refresh":a=t.runs.createRefreshRun(o);break;case"devRun":a=t.devRuns.createDevRun(o);break;default:{let l=s;throw Ttf.emergContext(n,i,"unexpected run type",l),new Xe(`${i} unexpected run type ${l}`)}}let u=await c0(`${i} runRepository creation`,n,"PIPELINE",()=>a,"info");return{runRepository:u,logContext:{...n,runID:u.id}}},vtf=r=>{if(!r.userCredentials)return r;let t={...r.userCredentials};return $h({snowflake:()=>{if(t.platformKind==="snowflake"||t.platformKind===void 0)for(let n of eZt)t[n]&&(t[n]="<REDACTED>")},databricks:()=>{if(t.platformKind==="databricks")for(let n of _Xt)t[n]&&(t[n]="<REDACTED>")},bigquery:()=>{if(t.platformKind==="bigquery")for(let n of IXt)t[n]&&(t[n]="<REDACTED>")},fabric:()=>{}},t.platformKind,"redactSensitiveRunInfo"),{...r,userCredentials:t}};var BGe=Ye("PIPELINE"),IAr=async(r,t)=>{let{logContext:n,runInfo:{runDetails:i,runDetails:{deployCommit:o,environmentID:s,fromWorkspaceData:a,clearTargetEnvironment:u}},teamInfoAndFirebase:{rootRepository:l,teamInfo:{fbTeamID:c}},platform:d}=r;BGe.infoContext(n,"[buildDeployPipeline] Starting");let{measure:p,printSummary:f}=Q0("[buildDeployPipeline] (version=2)",n),h=l.org(c).workspace(+s),g=khc(h,a,n);BGe.infoContext(n,"[buildDeployPipeline] Getting supplemental node data");let b=await Vyt(async()=>{let T=await p("query nodes",()=>hur(h)),v=Object.keys(i.phasedNodeEdits.deletedTable);return Object.fromEntries(Object.entries(T).filter(([R])=>v.includes(R)))},{attempts:3,tags:["[buildDeployPipeline] get supplemental node data"],waitSeconds:1});return f(),TRe(hhc,{...r,supplementalNodeData:b},{beforeRun:async()=>{let T="[buildPipeline Deploy beforeRun]";u&&await Lhc(h,d.name,["nodes","macros","nodeTypes","subgraphs","jobs","installedPackages"],n),BGe.infoContext(n,T,"Writing workspace entities pre-deploy"),await qhc(h,a.locations,n),await g("macros"),await g("nodeTypes","update"),await g("installedPackages","update"),BGe.infoContext(n,T,"Wrote workspace entities pre-deploy")},afterRun:async()=>{let T="[buildPipeline Deploy afterRun]";BGe.infoContext(n,T,"Writing workspace entities post-deploy");let v=i0e.assert(a.locations||{});await h.merge({mappingsV1:iN(v),lastCommit:o}),await g("subgraphs"),await g("jobs"),await g("nodeTypes","delete"),await g("installedPackages","delete"),BGe.infoContext(n,T,"Wrote workspace entities post-deploy")},onCleanup:t})};var Itf=Ye("PIPELINE"),qvc=async(r,t)=>{let n="buildDevRunPipeline:",{allWorkspaceData:i,logContext:o,nodesInRun:s,platform:a,renderer:u,runID:l,runInfo:c,workspaceID:d}=r;switch(c.runDetails.operationType){case"create":{let p=await kvc(u,a,i,c.runTimeParameters||{},d,s,o),f={...c,runDetails:{...c.runDetails,dividedStepDataWithWorkspaceEdits:p}};await _tf({...r,nodesInRun:ghe(p),runInfo:f},t);break}case"run":{await Rtf(r,t);break}default:{Itf.errorContext(o,n,`received unrecognized operation type ${c.runDetails.operationType} for run ${l}`);break}}},_tf=async(r,t)=>TRe(ghc,r,{beforeRun:void 0,afterRun:void 0,onCleanup:t}),Rtf=async(r,t)=>TRe(mhc,r,{beforeRun:void 0,afterRun:void 0,onCleanup:t});var LGe=async(r,t)=>TRe(Ahc,r,{beforeRun:void 0,afterRun:void 0,onCleanup:t});var NMo=Ye("RUN_RESULTS"),Otf=(r,t,n,i)=>{let o={},s={};return r.forEach(({id:a,...u})=>{let l=[],c=u.history;!u.queryResultSequence&&Array.isArray(c)&&(l.push(`Patching 'queryResultSequence' from 'history' for step ${a}`),u.queryResultSequence=c[c.length-1]?.queryResultSequence),u.runResultVersion||(l.push(`Patching 'version' to zero for step ${a}`),u.runResultVersion=0),u.runExecutionSequenceID||(l.push(`Patching 'runExecutionSequenceID' to zero for step ${a}`),u.runExecutionSequenceID=0),u.name||(l.push(`Patching 'name' to 'unknown' for step ${a}`),u.name="unknown"),u.queryResultSequence?.queryResults?.map((p,f)=>{p.status||(l.push(`Patching 'status' to 'Failure' for step ${a} query result ${f}`),p.status="Failure"),p.exportedRefs||(l.push(`Patching 'exportedRefs' for step ${a} query result ${f}`),p.exportedRefs=[]),p.invalidExportedRefs||(l.push(`Patching 'invalidExportedRefs' for step ${a} query result ${f}`),p.invalidExportedRefs=[]),p.fields?.map(h=>{h.type||(l.push(`Patching 'type' to 'unknown' for step ${a} field ${h.name} of query result ${f}`),h.type="unknown")})}),l.length&&n&&i&&NMo.infoContext(n,i,l);let d=Sfc.assert(u);s[a]=d,o[a]={nodeID:a,hasTestFailures:u.hasTestFailures,queryResultSequence:d.queryResultSequence,runState:d.runState},u.hasTestFailures!==void 0&&(t.runResults.hasFailedTest=u.hasTestFailures)}),{runResults:o,fullRunResults:s}},Mtf=r=>Object.entries(r).map(([i,o])=>(UA("RUN_RESULTS",o.runExecutionSequenceID!==void 0,"runExecutionSequenceID was undefined!"),{stepID:i,runExecutionSequenceID:o.runExecutionSequenceID})).sort((i,o)=>i.runExecutionSequenceID-o.runExecutionSequenceID).map(i=>i.stepID),Fvc=async(r,t)=>{let n=[];for(;r;){let i=parseInt(r);if(isNaN(i))break;n.push(i);try{r=(await t.run(i).fetch()).reRunID}catch{throw new ru(`Run ${i} does not exist`,"404: Specify a valid runCounter")}}return n},wtf=r=>{let{id:t,runDetails:{environmentID:n}}=r,i={runResults:{runStartTime:r.runStartTime,runEndTime:r.runEndTime,runType:r.runType,runStatus:r.runStatus,runID:t||0,runResults:[],runTimeParameters:r.runTimeParameters,environmentID:n||"n/a"}};if(r.userCredentials&&"snowflakeAuthType"in r.userCredentials&&(i.runResults.snowflakeAccount=r.userCredentials.snowflakeAccount,i.runResults.snowflakeWarehouse=r.userCredentials.snowflakeWarehouse),r.runType==="refresh"){let{jobID:o,includeNodesSelector:s,excludeNodesSelector:a}=r.runDetails;i.runResults.jobID=o===void 0?void 0:+o,i.runResults.includeNodesSelector=s,i.runResults.excludeNodesSelector=a}return typeof i.runResults.jobID=="string"&&(NMo.error(`Patching jobID from string to number for run ${r.id}`),i.runResults.jobID=parseInt(i.runResults.jobID)),Tur.assert(i)},zvc=async(r,t)=>{let n="[formatRunOutput]";t?nd(t.id===r.id,`in formatRunOutput, run.id (${t.id}) !== runRepository.id (${r.id})`,"RUN_RESULTS"):t=await r.fetch();let i=wtf(t),o=[],s=await r.runResults.select(void 0,{ignoreDataTransformErrors:!0,onDataTransformError:(c,d)=>o.push(`
10333
- ${c}`,d)});o.length&&NMo.infoContext(r.logContext,n,"Error(s) transforming results from storage",...o);let{runResults:a,fullRunResults:u}=Otf(s,i,r.logContext,n),l=Mtf(u);return i.runResults.runResults=l.map(c=>a[c]),i};var xtf=["nodes","subgraphs","jobs"],fg2=[...xtf,"docs","problems","subfolders"];var hg2=M.object({hierarchy:M.array(M.string()),key:M.string(),type:M.string(),value:M.string(),stepCounter:M.string().optional(),tagColor:M.string().optional()}),Ag2=M.object({selectedNodeIDs:M.object({nodes:M.record(M.boolean()),subgraphs:M.record(M.boolean()),jobs:M.record(M.boolean()),docs:M.record(M.boolean()),problems:M.record(M.boolean()),subfolders:M.record(M.boolean())})});var Ig2=Ye("GRAPH"),_Ar=async(r,t,n)=>{let i=await Ntf(r,t);return kGe(t.runDetails,n,i)},Ntf=async(r,t)=>{let n=new Set,i=await Fvc(t.reRunID,r);for(let o of i){let s=await r.run(o).runResults.select();for(let{id:a,runState:u}of s)u==="complete"&&n.add(a)}return n},kGe=(r,t,n)=>Ptf(r,t,n||new Set)||new Set(Object.keys(t.steps)),PMo=(r,t,n,i)=>({steps:r,allStepTypesYAML:Sur(n,t),allSubgraphs:i}),Ptf=(r,t,n)=>{let i=null,o="",s="";if(r.jobID?{includeSelector:o,excludeSelector:s}=yfc(t,+r.jobID):(r.includeNodesSelector||r.excludeNodesSelector)&&(o=r.includeNodesSelector||"",s=r.excludeNodesSelector||""),o||s||n.size){let a=PMo(t.steps,t.stepTypes,t.installedPackages??{},t.folders);i=new Set(Dtf(o,s,a).filter(u=>!n.has(u)))}return i},Dtf=(r,t,n)=>{let i=Cmt(n.steps);n.graphInformation=i;let o=Cur(r,t,n);return Object.keys(o.stepsResult)},ghe=r=>{let t=new Set;for(let n of Object.values(r))for(let i in n)t.add(i);return t};var Btf=(r,t)=>{if(t=t===void 0?rBu:t,r<=1||t<=1)return 1;let s=r<=4?r:Math.floor((r-4)/2)+4;return Math.min(s,t)},Ooe=(r,t)=>{let n=Btf(r,t),i=nBu,o=i<n?i:n;return o>0?o:1};var Uvc=async(r,t)=>{let n=await r.fetch({ignoreDataTransformErrors:!0}),i={fbEmail:t?.emptyEmail?"":n.email,fbUserID:r.id,fbTeamID:n.currentTeam,fbFullName:`${n.firstName} ${n.lastName}`,fbFirstName:n.firstName,fbLastName:n.lastName,fbHasSuperUserAccess:n.hasSuperUserAccess,fbActivated:n.activated??!0};return t?.populateProvisioningAccess&&(i.hasProvisioningAccess=n.hasProvisioningAccess),i},Qvc=async(r,t)=>(await r.org(t).fetch()).superDebugEnabled;var aSl=Tt(m9c());var uSl=Tt(Jho());var YCl=Tt(_Be());var VCl=Tt(_Be());var $Fc=Tt(JFc());var $Tr=Tt(v7());var LQo=require("perf_hooks");function w8f(r){let t=LQo.performance.now()*.001,n=Math.floor(t),i=Math.floor(t%1*1e9);return r!=null&&(n=n-r[0],i=i-r[1],i<0&&(n--,i+=1e9)),[n,i]}function gMe(){let[r,t]=w8f();return r*1e3+Math.floor(t/1e6)}function VTr(r){return new Promise(t=>setTimeout(t,r))}var KTr=class{constructor({bucketSize:t,tokensPerInterval:n,interval:i,parentBucket:o}){if(this.bucketSize=t,this.tokensPerInterval=n,typeof i=="string")switch(i){case"sec":case"second":this.interval=1e3;break;case"min":case"minute":this.interval=1e3*60;break;case"hr":case"hour":this.interval=1e3*60*60;break;case"day":this.interval=1e3*60*60*24;break;default:throw new Error("Invalid interval "+i)}else this.interval=i;this.parentBucket=o,this.content=0,this.lastDrip=gMe()}async removeTokens(t){if(this.bucketSize===0)return Number.POSITIVE_INFINITY;if(t>this.bucketSize)throw new Error(`Requested tokens ${t} exceeds bucket size ${this.bucketSize}`);this.drip();let n=async()=>{let i=Math.ceil((t-this.content)*(this.interval/this.tokensPerInterval));return await VTr(i),this.removeTokens(t)};if(t>this.content)return n();if(this.parentBucket!=null){let i=await this.parentBucket.removeTokens(t);return t>this.content?n():(this.content-=t,Math.min(i,this.content))}else return this.content-=t,this.content}tryRemoveTokens(t){return this.bucketSize?t>this.bucketSize||(this.drip(),t>this.content)||this.parentBucket&&!this.parentBucket.tryRemoveTokens(t)?!1:(this.content-=t,!0):!0}drip(){if(this.tokensPerInterval===0){let s=this.content;return this.content=this.bucketSize,this.content>s}let t=gMe(),n=Math.max(t-this.lastDrip,0);this.lastDrip=t;let i=n*(this.tokensPerInterval/this.interval),o=this.content;return this.content=Math.min(this.content+i,this.bucketSize),Math.floor(this.content)>Math.floor(o)}};var iKe=class{constructor({tokensPerInterval:t,interval:n,fireImmediately:i}){this.tokenBucket=new KTr({bucketSize:t,tokensPerInterval:t,interval:n}),this.tokenBucket.content=t,this.curIntervalStart=gMe(),this.tokensThisInterval=0,this.fireImmediately=i??!1}async removeTokens(t){if(t>this.tokenBucket.bucketSize)throw new Error(`Requested tokens ${t} exceeds maximum tokens per interval ${this.tokenBucket.bucketSize}`);let n=gMe();if((n<this.curIntervalStart||n-this.curIntervalStart>=this.tokenBucket.interval)&&(this.curIntervalStart=n,this.tokensThisInterval=0),t>this.tokenBucket.tokensPerInterval-this.tokensThisInterval){if(this.fireImmediately)return-1;{let o=Math.ceil(this.curIntervalStart+this.tokenBucket.interval-n);await VTr(o);let s=await this.tokenBucket.removeTokens(t);return this.tokensThisInterval+=t,s}}let i=await this.tokenBucket.removeTokens(t);return this.tokensThisInterval+=t,i}tryRemoveTokens(t){if(t>this.tokenBucket.bucketSize)return!1;let n=gMe();if((n<this.curIntervalStart||n-this.curIntervalStart>=this.tokenBucket.interval)&&(this.curIntervalStart=n,this.tokensThisInterval=0),t>this.tokenBucket.tokensPerInterval-this.tokensThisInterval)return!1;let i=this.tokenBucket.tryRemoveTokens(t);return i&&(this.tokensThisInterval+=t),i}getTokensRemaining(){return this.tokenBucket.drip(),this.tokenBucket.content}};var kQo=r=>r;var iY=Ye("SECRETS");function FQo(r,t){return!!r&&typeof r=="object"&&"code"in r&&r.code===t}function x8f(r){return FQo(r,$Tr.Status.INVALID_ARGUMENT)&&"details"in r&&typeof r.details=="string"&&r.details.startsWith("Secret payload data length must be at most")}function oKe(r){return FQo(r,$Tr.Status.NOT_FOUND)||N8f(r)}function N8f(r){return FQo(r,$Tr.Status.FAILED_PRECONDITION)&&typeof r?.message=="string"&&r.message.includes("is in DESTROYED state")}function qQo(r){if(!r)return null;let t=r.split("/").pop();if(!t)return null;let n=parseInt(t);return Number.isNaN(n)?null:n}var P8f=9e4,D8f=600,B8f=600,zQo=10,L8f=new iKe({tokensPerInterval:P8f/zQo,interval:"minute"}),k8f=new iKe({tokensPerInterval:D8f/zQo,interval:"minute"}),YTr=new iKe({tokensPerInterval:B8f/zQo,interval:"minute"}),q8f=new kD(1),JTr=class{constructor(t,n=new $Fc.SecretManagerServiceClient){this.projectId=t;this.client=n}shouldUpgradeLegacyResultsToNewName(){return!1}BuildSecretName=t=>`projects/${this.projectId}/secrets/${t}`;BuildSecretNameLatestVersion=t=>`${this.BuildSecretName(t)}/versions/latest`;getByName=async t=>{await L8f.removeTokens(1);let n=await this.client.accessSecretVersion({name:this.BuildSecretNameLatestVersion(t)}),[i]=n,o=i.payload?.data?.toString();return o?new cd(o):null};makeAddVersionData=(t,n)=>({parent:this.BuildSecretName(t),payload:{data:kQo(Buffer.from(n.sensitiveValue))}});addVersionByName=async(t,n)=>{let i=this.makeAddVersionData(t,n);await YTr.removeTokens(1),await this.client.addSecretVersion(i),this.PruneInaccessibleVersions(t).catch(o=>{iY.error("Could not prune versions for",t,o)})};setByName=async(t,n)=>{await YTr.removeTokens(2);let i=this.makeAddVersionData(t,n);await this.client.createSecret({parent:`projects/${this.projectId}`,secretId:t,secret:{replication:{automatic:{}}}}),await this.client.addSecretVersion(i)};listSecretVersions=async t=>{await k8f.removeTokens(1);let[n]=await this.client.listSecretVersions({parent:this.BuildSecretName(t)});return n};destroySecretVersion=async t=>{await YTr.removeTokens(1),await this.client.destroySecretVersion({name:t})};getMissingSecret=async t=>{let n=Zhr(t);if(n)try{let i=await this.getByName(n);return i&&this.shouldUpgradeLegacyResultsToNewName()&&(await this.setByName(t,i),await this.deleteByName(n)),i}catch(i){if(!oKe(i))throw i}return null};Get=async t=>{try{return await this.getByName(t)}catch(n){if(oKe(n))return this.getMissingSecret(t);throw n}};setMissingSecret=async(t,n)=>{let i=Zhr(t);if(!i)await this.setByName(t,n);else if(this.shouldUpgradeLegacyResultsToNewName())await this.setByName(t,n),await this.deleteByName(i,{ignoreMissing:!0});else try{await this.addVersionByName(i,n)}catch(o){if(oKe(o)){await this.setByName(t,n);return}throw o}};Set=async(t,n)=>{try{await this.addVersionByName(t,n);return}catch(i){if(oKe(i)){await this.setMissingSecret(t,n);return}throw x8f(i)&&iY.emerg("Payload exceeded for: ",t,"Reach out to support to determine if feature flags to adjust secret storage are appropriate next steps",i),i}};deleteByName=async(t,n)=>{try{await YTr.removeTokens(1),await this.client.deleteSecret({name:this.BuildSecretName(t)})}catch(i){if(n?.ignoreMissing&&oKe(i))return;throw i}};Delete=async t=>{let n=Zhr(t),i=[t];n&&i.push(n),await $4(i.map(o=>this.deleteByName(o,{ignoreMissing:!0})))};async PruneInaccessibleVersions(t){try{iY.info("Begin pruning inaccessible versions for secret",t),await q8f.runExclusive(async()=>{iY.info("Got lock for pruning inaccessible versions for secret",t);let i=(await this.listSecretVersions(t)).filter(o=>o.state==="ENABLED"&&!!qQo(o.name)).sort((o,s)=>qQo(s.name)-qQo(o.name)).slice(1);for(let o of i)try{iY.info("Destroying secret version",o.name),await this.destroySecretVersion(o.name),iY.info("Successfully destroyed secret version",o.name)}catch(s){oKe(s)?iY.error("Tried to prune secret",o.name,"but it was not found"):iY.alert("Error trying to prune inaccessible secret version",o.name,". Error: ",s)}},1)}catch(n){iY.alert("Fatal error trying to prune inaccessible secret versions for secret",t,". Error: ",n)}finally{iY.info("Completed pruning inaccessible versions for secret",t)}}};var Mol=Tt(Ool());var _Ir=class r{client;constructor(t){this.client=(0,Mol.createClient)({url:t})}static Create=async t=>{let n=new r(t);return await n.client.connect(),n};Get=async t=>{let n=await this.client.get(t);return n===null?null:new cd(n)};Set=async(t,n)=>{await this.client.set(t,n.sensitiveValue)};Delete=async t=>{await this.client.del(t)}};hr();var MRh=Tt(gwr());var bRh=M.enum(["org-settings-read","org-settings-write","org-users-read","org-users-write","org-users-create","org-users-delete","org-userRoles-read","org-userRoles-write","org-projects-create","org-tags-create","org-tags-delete","org-tags-write","org-tags-read"]),ERh=M.enum(["project-settings-read","project-summary-read","project-settings-write","project-delete","project-members-read","project-members-add","project-members-write","project-members-remove","project-userRoles-read","project-workspaces-create","project-documentation-read","project-environments-create","project-workspace-settings-read","project-workspace-settings-write","project-workspaces-delete","project-workspace-jobs-create","project-workspace-jobs-read","project-workspace-jobs-write","project-workspace-jobs-delete","project-workspace-macros-create","project-workspace-macros-read","project-workspace-macros-write","project-workspace-macros-delete","project-workspace-node-types-create","project-workspace-node-types-read","project-workspace-node-types-write","project-workspace-node-types-delete","project-workspace-nodes-create","project-workspace-nodes-read","project-workspace-nodes-write","project-workspace-nodes-delete","project-workspace-packages-create","project-workspace-packages-read","project-workspace-packages-write","project-workspace-packages-delete","project-workspace-sql-run","project-workspace-subgraphs-create","project-workspace-subgraphs-read","project-workspace-subgraphs-write","project-workspace-subgraphs-delete"]),CRh=M.enum(["env-run-results-read","env-run-job","env-delete","env-deploy","env-refresh","env-settings-read","env-summary-read","env-settings-write","env-documentation-read","env-members-read","env-members-add","env-members-write","env-members-remove","env-userRoles-read","env-job-schedule-write","env-job-schedule-read"]),SRh={reader:{permissions:["env-run-results-read","env-summary-read","env-documentation-read","env-job-schedule-read"],inheritedEnvironmentRoles:[]},admin:{permissions:[...CRh.options],inheritedEnvironmentRoles:["reader"]}},TRh={member:{permissions:["project-summary-read","project-members-read","project-workspace-sql-run","project-workspace-nodes-read"],inheritedProjectRoles:[],inheritedEnvironmentRoles:[]},contributor:{permissions:["project-settings-read","project-members-read","project-workspaces-create","project-workspace-settings-read","project-workspace-settings-write","project-workspaces-delete","project-workspace-jobs-read","project-workspace-jobs-create","project-workspace-jobs-write","project-workspace-jobs-delete","project-workspace-macros-read","project-workspace-node-types-read","project-workspace-nodes-create","project-workspace-nodes-write","project-workspace-nodes-delete","project-workspace-packages-read","project-workspace-subgraphs-read","project-workspace-subgraphs-create","project-workspace-subgraphs-write","project-workspace-subgraphs-delete","project-documentation-read","env-delete","env-documentation-read"],inheritedProjectRoles:["member"],inheritedEnvironmentRoles:["reader"]},architect:{permissions:["project-workspace-macros-create","project-workspace-macros-write","project-workspace-macros-delete","project-workspace-node-types-create","project-workspace-node-types-write","project-workspace-node-types-delete","project-workspace-packages-create","project-workspace-packages-write","project-workspace-packages-delete"],inheritedProjectRoles:["contributor"],inheritedEnvironmentRoles:["reader"]},admin:{permissions:[...ERh.options],inheritedProjectRoles:["architect"],inheritedEnvironmentRoles:["admin"]}},vRh={member:{permissions:["org-tags-read"],inheritedOrganizationRoles:[],inheritedProjectRoles:[]},contributor:{permissions:["org-projects-create"],inheritedOrganizationRoles:["member"],inheritedProjectRoles:[]},admin:{permissions:[...bRh.options],inheritedOrganizationRoles:["contributor"],inheritedProjectRoles:["admin"]}},ywr=r=>{let t=SRh[r],n=new Set(t.permissions);for(let i of t.inheritedEnvironmentRoles)ywr(i).forEach(o=>n.add(o));return Array.from(n)},NZe=r=>{let t=TRh[r],n=new Set(t.permissions);for(let i of t.inheritedProjectRoles)NZe(i).forEach(o=>n.add(o));for(let i of t.inheritedEnvironmentRoles)ywr(i).forEach(o=>n.add(o));return Array.from(n)},mwr=r=>{let t=vRh[r],n=new Set(t.permissions);for(let i of t.inheritedOrganizationRoles)mwr(i).forEach(o=>n.add(o));for(let i of t.inheritedProjectRoles)NZe(i).forEach(o=>n.add(o));return Array.from(n)},PCl={admin:mwr("admin"),contributor:mwr("contributor"),member:mwr("member")},DCl={admin:NZe("admin"),architect:NZe("architect"),contributor:NZe("contributor"),member:NZe("member")},BCl={admin:ywr("admin"),reader:ywr("reader")};var IRh=Ye("USER_MANAGEMENT"),tts=class{throwIfOrgPermissionDenied(t,n=!1){if(!this.hasOrgPermission(t))throw n?new ru("The requested resource is unavailable"):new gR("Permission denied",`Permission '${t}' is required to perform this operation.`)}throwIfProjectPermissionDenied(t,n,i=!1){if(!this.hasProjectPermission(t,n))throw i?new ru("The requested resource is unavailable"):new gR("Permission denied",`Permission '${t}' is required to perform this operation.`)}throwIfEnvironmentPermissionDenied(t,n,i,o=!1){if(!this.hasEnvironmentPermission(t,n,i))throw o?new ru("The requested resource is unavailable"):new gR("Permission denied",`'Permission ${t}' is required to perform this operation.`)}},bwr=class r extends tts{constructor(n,i){super();this.userRoles=n;this.hasSuperUserAccess=i;this.userPermissions=this.ConstructPermissionsFromRoles(n)}userPermissions;get orgRoles(){return this.userRoles?.organizationRoles??[]}hasOrgPermissionInternal(n){return this.userPermissions.organizationPermissions?.has(n)}hasOrgPermission(n){return this.hasSuperUserAccess||this.hasOrgPermissionInternal(n)}hasProjectPermissionInternal(n,i){return this.userPermissions.projects[i]?.projectPermissions?.has(n)}hasProjectPermission(n,i){return this.hasSuperUserAccess||this.hasOrgPermissionInternal(n)||this.hasProjectPermissionInternal(n,i)}hasEnvironmentPermissionInternal(n,i,o){return this.userPermissions.projects[i]?.environments[o]?.environmentPermissions?.has(n)}hasEnvironmentPermission(n,i,o){return this.hasSuperUserAccess||this.hasOrgPermissionInternal(n)||this.hasProjectPermissionInternal(n,i)||this.hasEnvironmentPermissionInternal(n,i,o)}ConstructPermissionsFromRoles(n){return n?(n.projects||(IRh.warn(`userRoles.projects is undefined for user ${n.id}, defaulting to empty object`),n.projects={}),{organizationPermissions:r.ConstructOrgPermissionsFromRoles(n.organizationRoles),projects:r.ConstructProjectsPermissionsFromRoles(n.projects)}):{organizationPermissions:new Set,projects:{}}}static ConstructEnvironmentPermissionsFromRoles(n){let i=new Set;return n.environmentRoles.forEach(o=>{BCl[o].forEach(s=>{i.add(s)})}),i}static ConstructEnvironmentPermissionObjectsFromRoles(n){let i={};return Object.entries(n).forEach(([o,s])=>{i[o]={environmentPermissions:r.ConstructEnvironmentPermissionsFromRoles(s)}}),i}static ConstructProjectPermissionsFromRoles(n){let i=new Set;return n.projectRoles.forEach(s=>{DCl[s].forEach(a=>{i.add(a)})}),{projectPermissions:i,environments:r.ConstructEnvironmentPermissionObjectsFromRoles(n.environments)}}static ConstructProjectsPermissionsFromRoles(n){let i={};return Object.keys(n).forEach(o=>{i[o]=this.ConstructProjectPermissionsFromRoles(n[o])}),i}static ConstructOrgPermissionsFromRoles(n){let i=new Set;return n.forEach(o=>{PCl[o].forEach(s=>{i.add(s)})}),i}};var s4t=Tt(q9()),_Rh=Tt(L$e()),Uk2=(0,s4t.Record)({version:s4t.String,hash:s4t.String}),RRh=()=>({version:"7.33.0-beta.1",hash:"f78d2f51c3cb33fbc4e30b044ebe3ae30d2fd903"}),s_=RRh();var LCl=Ye("AUTH"),Zk2=Ye("USER_MANAGEMENT"),wRh=async(r,t,n,i,o)=>{let s={};try{let a=await r(),{context:{userID:u},token:l}=await t(a);s.userID=u;let c=o||await n(a,l),d=await Uvc(c.user(u),{emptyEmail:!0,populateProvisioningAccess:!0});s.orgID=d.fbTeamID;let p=ID.getActiveSpan();p&&(p.setAttribute("userID",u),p.setAttribute("orgID",d.fbTeamID));let f=await i({org:{id:d.fbTeamID},user:{id:u},...s_&&{version:{id:s_.version}}},c,l),h=await c.org(d.fbTeamID).userRole(u).fetch(),g=new bwr(h,!!d.fbHasSuperUserAccess);return{teamInfo:d,firebase:a,rootRepository:c,token:l,featureFlags:f,userPermissions:g}}catch(a){if(LCl.errorContext(s,"AuthenticateTokenAndRetrieveTeamInfo",a),a instanceof gre)throw a;let u=a?.message||a?.error?.errorString;throw new Xe("Unable to authenticate, please check your network connection and ensure your token is valid",u)}},kCl=async(r,t,n,i,o)=>{let s="[AuthenticateTokenAndRetrieveTeamInfo]",a={};return yh(async()=>c0(s,a,"AUTH",()=>wRh(r,t,n,i,o),"info"),4,LCl,a,{alterRetryStrategy:l=>({name:"backoff",base:iqe(Da(l,!0),"QUOTA_EXCEEDED")||iqe(Da(l,!0),"Unable to authenticate")?60:1}),checkError:l=>{if(l instanceof gre)return l;let c=Da(l,!0);if(["INVALID_REFRESH_TOKEN","EXPIRED_TOKEN","Missing or invalid OAuth refresh token","OAuth access token expired","Invalid Token","JWT ID token expired","Decoding Firebase ID token failed","Token expired","User not found","User disabled"].some(p=>iqe(c,p)))return l},logOnRetry:`${s} retrying`,logOnFailure:`${s} failed, exhausted allotted retry attempts`})};var jCl=Tt(_Be()),GCl=require("node:fs/promises");hr();var qCl;var r$=(r,t={},n=1)=>{qCl&&qCl.increment(r,t,n)};var xRh=M.object({auth:M.string(),firestore:M.string(),storage:M.string()}),NRh=M.object({apiKey:M.string(),authDomain:M.string(),projectId:M.string(),storageBucket:M.string(),messagingSenderId:M.string(),appId:M.string(),measurementId:M.string(),emulatorInfo:xRh.optional()}),FCl=br("FirebaseConfig",NRh);var rts=r=>{r$("firestore.delete",{path:r})},Ewr=r=>{r$("firestore.set",{path:r})},nts=r=>{r$("firestore.update",{path:r})};var its=r=>{r$("firestore.get",{path:r})},ots=r=>{r$("firestore.transaction.get",{path:r})},sts=r=>{r$("firestore.transaction.set",{path:r})},ats=r=>{r$("firestore.transaction.update",{path:r})},uts=r=>{r$("firestore.transaction.delete",{path:r})},cts=()=>{r$("firestore.batch")};var dq2=ID.getTracer("firestore");var zCl=async(r,t=1e3)=>{let n=()=>{},i=new Promise(o=>{n=r.onSnapshot(s=>{s.metadata.hasPendingWrites||o(s)})});return await OI(t,`Timed out in GetDocumentNoPendingWrites after ${t} ms`,i).finally(n)};var UCl=Tt(Vkt());async function QCl(r){let t=new UCl.ProxyAgent;r.httpProxy=t,r.httpsProxy=t}var HCl=(0,jCl.default)(async()=>{if(J1e)return Ye("RUNNER_BACKEND").info("WARNING: API using Firebase emulator!"),{apiKey:"none",authDomain:"none",projectId:"coalesce",storageBucket:"coalesce.appspot.com",messagingSenderId:"none",appId:"none",measurementId:"none",emulatorInfo:J1e};{let r=await(0,GCl.readFile)(process.env.FIREBASE_CONFIG_PATH??"/firebase.json"),t=FCl.assert(JSON.parse(r.toString()));return Ye("RUNNER_BACKEND").info("API using Firebase config:",t),t}});async function WCl(){return a8e()||cja()?await HCl():A1.app().options}var lts=async(r,t)=>{await QCl(r),await E1c(r,t)};var dts=Ye("SECRETS"),KCl=(0,VCl.default)(async()=>{switch(process.env.COALESCE_SECRET_STORE_TYPE){case"redis":if(dts.app("Selected Redis secret store."),!process.env.COALESCE_SECRET_STORE_REDIS_URL)throw new Error("The redis secret store is configured, but no COALESCE_SECRET_STORE_REDIS_URL was defined.");return await _Ir.Create(process.env.COALESCE_SECRET_STORE_REDIS_URL);case"memory":return dts.app("Selected Memory secret store."),uMo();case"google":default:return dts.app("Selected Google Secrets Manager secret store."),new JTr((await WCl()).projectId)}});var nT=(0,YCl.default)(async()=>new mGe(await KCl()));var PRh=Ye("SECRETS"),PZe=async(r,t,n,i)=>{let o={userID:t.userId,orgID:t.orgId,workspaceID:+t.workspaceId},s=n.UpgradeTarget;if(i&&s){let a=await r.Get(s,t);return a||(a=await r.Get(n,t),a&&(PRh.infoContext(o,`v2 Snowflake secrets feature flag was set, found v1 credentials. Upgrading v1 to v2 for ${n.typeName}`),await r.Set(s,t,a),await r.Delete(n,t))),a}else return await r.Get(n,t)};var JCl=async(r,t,n)=>{let i=n.isFeatureEnabled("enableSnowflakeOAuthSecretsV2");return PZe(r,t,RGe,i)},$Cl=async(r,t)=>await r.Get(TGe,t);var XCl=async(r,t)=>await r.Get(SGe,t),ZCl=async(r,t)=>await r.Get(EGe,t),eSl=async(r,t,n)=>PZe(r,t,IGe,n),tSl=async(r,t,n)=>{let i=n.isFeatureEnabled("enableSnowflakeSecretsV2");return PZe(r,t,Roe,i)},rSl=async r=>await(await nT()).Get(CGe,r),nSl=async r=>await(await nT()).Get(oK,r);var iSl=async(r,t,n)=>await r.Set(oK,t,n),oSl=async(r,t,n,i)=>{if(!i.isFeatureEnabled("enableSnowflakeSecretsV2"))return await r.Set(Roe,t,n);await r.Get(Roe,t)&&await r.Delete(Roe,t),await r.Set(JOe,t,n)};var pts=Ye("CONNECTION"),sSl=r=>{if(!r.host)throw new no("Could not get a valid host for databricks user");if(!r.path)throw new no("Could not get a valid path for databricks user")},Cwr=async r=>{let t,n=new aSl.DBSQLClient;if(!r.host)throw new no("Could not get a valid host for databricks user");if(!r.path)throw new no("Could not get a valid path for databricks user");if(r.authenticator==="Token"){if(sSl(r),!r.token)throw new no("Could not get a valid token for databricks user");t={host:r.host,path:r.path,token:r.token,clientId:"Coalesce.IO_Coalesce"}}else if(r.authenticator==="OAuthU2M"){if(!r.accessToken)throw new no("Could not get a valid access token for databricks OAuth U2M authentication");t={host:r.host,path:r.path,token:r.accessToken,clientId:"Coalesce.IO_Coalesce",authType:"access-token"}}else if(r.authenticator==="OAuthM2M"){if(sSl(r),!r.clientID||!r.secret)throw new no("Missing Client ID or Secret for databricks user");t={authType:"databricks-oauth",oauthClientId:r.clientID,oauthClientSecret:r.secret,host:r.host,path:r.path}}else throw new no(`Could not authenticate using ${r.authenticator}`);return await n.connect(t)},cSl=async r=>{let{workspaceID:t,orgID:n,userID:i}=r;try{let o=await nSl({orgId:n,userId:i,workspaceId:t.toString()});if(pts.infoContext(r,"get access token from secret (GetAccessTokenFromSecret)"),!o)throw new uqe("Invalid OAuth configuration.");return o.sensitiveValue}catch(o){throw pts.errorContext(r,"Error getting access token from secret (GetAccessTokenFromSecret)",o),o}},lSl=async(r,t,n)=>{if(!r.connectionAccount)throw new X1e("Missing connection account for Databricks OAuth refresh","OAuthU2M");let o=`${`https://${r.connectionAccount}`}/oidc/v1/token`,s=uSl.default.stringify({grant_type:"refresh_token",client_id:r.oauthClientID,client_secret:r.oauthClientSecret,refresh_token:t}),a={method:"post",url:o,headers:{"Content-Type":"application/x-www-form-urlencoded"},data:s};try{return(await wc(a)).data.access_token}catch(u){throw pts.errorContext(n,"RefreshAccessToken",u),new X1e("An error occurred refreshing your OAuth refresh token. Please ensure your OAuth Security Integration is still valid or reauthenticate.","OAuthU2M",u)}};var dSl;var G3=(r,t={},n=1)=>{if(dSl){let o={appContext:lFt()??"unknown"};J1e&&(o.emulated="true"),dSl.increment(r,{...o,...t},n)}};var Zbe=Ye("CONNECTION"),n$=class{constructor(t,n,i,o,s,a){this.handle=t;this.logContext=n;this.connectionID=i;this.useCounter=o;this.parent=s;a?this.resetConnectionHandle=()=>a():s?this.resetConnectionHandle=()=>s.reinitialize():this.resetConnectionHandle=()=>{throw new Jg("Cannot reset the connection, no reset function provided")}}runningQueries=new Map;wasCanceled=!1;resetConnectionHandle;get memoryMonitor(){return this.parent?this.parent.managerQueryStats.memoryMonitor:(Zbe.debugContext(this.logContext,"Could not get memory monitor from parent. This may be expected.",this.connectionID),{logDeltaMetrics:()=>{},getMemoryUsage:()=>{}})}async resetConnection(){if(this.wasCanceled)throw new Jh("not resetting canceled connection");G3(`${this.name}.resetConnection`,this.logContext);let t=await this.resetConnectionHandle();return this.handle=t,t}async executeAndGetRunning(t){let n=this.parent?.managerQueryStats.executions,i=!n||n%50!==0,o=`(${this.name}) BaseConnection.executeAndGetRunning()`,s=this.memoryMonitor.getMemoryUsage(this.logContext),a=await this.makePendingQuery(t);return a.attachThen(u=>{this.parent&&this.parent.updateFromResult(u),this.memoryMonitor.logDeltaMetrics(this.logContext,s,o,`(connID: ${this.connectionID})[${this.name} executeAndGetRunning()]: completed pending query | total executions: ${n}`,i)}),this.parent&&this.parent.managerQueryStats.incrementExecutions(`[${this.name} executeAndGetRunning()]:${this.connectionID}`),a}execute=async t=>(await this.executeAndGetRunning(t)).promise;cancel=async()=>{try{if(Zbe.infoContext(this.logContext,"Begin canceling connection",this.connectionID),this.wasCanceled)return Zbe.infoContext(this.logContext,"Connection already canceled",this.connectionID),Promise.resolve();this.wasCanceled=!0;let t=Array.from(this.runningQueries.values()).map(async n=>{Zbe.infoContext(this.logContext,"Begin canceling query",n.coalesceQueryID,"conn",this.connectionID);try{await n.Cancel()}catch(i){throw Zbe.errorContext(this.logContext,"Error canceling query",n.coalesceQueryID,"conn",this.connectionID,i),i}finally{Zbe.infoContext(this.logContext,"Completed canceling query",n.coalesceQueryID,"conn",this.connectionID)}});await $4(t)}catch(t){throw Zbe.errorContext(this.logContext,"Error canceling connection",this.connectionID,t),t}finally{Zbe.infoContext(this.logContext,"Completed canceling connection",this.connectionID)}}};var DRh="20428800",e2e=()=>parseInt(process.env.MAX_RESPONSE_SIZE||DRh);var BRh=Ye("CONNECTION"),t2e=class{constructor(t,n,i,o,s=2){this.handle=t;this.queryParams=n;this.logContext=i;this.resetFunc=o;this.maxAttempts=s;this.queryParams.requestId=this.coalesceQueryID=this.queryParams.requestId??ou()}_promise=null;coalesceQueryID;get promise(){if(this.Start(),!this._promise)throw new Jg("Internal consistency error: _promise cannot be null after Start()!");return this._promise}Cancel=async()=>{BRh.infoContext(this.logContext,"Canceling the running query, CQID=",this.coalesceQueryID),await this.tryCancel()};Start=()=>{this._promise||(this._promise=this.queryWithRetries())};attachThen=t=>{this._promise=this.promise.then(n=>(t(n),n))}};var i$=Ye("CONNECTION"),a4t=class extends t2e{constructor(n,i,o,s,a,u=5){let l={...s,snowflakeRequestID:o.requestId};super(i,o,l,a,u);this.completion=n}get sqlText(){return this.queryParams.statement}mapRowCountColumns(n,i){let o=i.trim().toUpperCase(),s;return o.startsWith("UPDATE")?s="UPDATE":o.startsWith("DELETE")&&(s="DELETE"),n.map(a=>{if(a.columnName==="num_inserted_rows")return{...a,columnName:"number of rows inserted"};if(a.columnName==="num_updated_rows")return{...a,columnName:"number of rows updated"};if(a.columnName==="num_deleted_rows")return{...a,columnName:"number of rows deleted"};if(a.columnName==="num_affected_rows"){if(s==="UPDATE")return{...a,columnName:"number of rows updated"};if(s==="DELETE")return{...a,columnName:"number of rows deleted"}}return a})}shouldResetConnection=n=>!!Da(n).error.errorString.match(/(network ?error|econnreset)/i);handleRetry=async(n,i)=>{if(this.shouldResetConnection(n))try{this.handle=await this.resetFunc()}catch(o){throw i$.errorContext(this.logContext,"Resetting connect for request CQID=",this.coalesceQueryID,"Error running connection reset callback:",o),n.coalesceCause}return i};async ensureSession(){if(!this.handle.session){let o=this.getSessionOptions();return this.handle.session=await this.handle.client.openSession(o),this.handle.sessionOptions=o,this.handle.session}let n=this.getSessionOptions(),i=this.handle.sessionOptions;if(n.initialCatalog!==i.initialCatalog||n.initialSchema!==i.initialSchema){try{await this.handle.session.close()}catch(o){i$.errorContext(this.logContext,"Error closing previous session for CQID=",this.coalesceQueryID,o)}finally{this.handle.session=void 0}this.handle.session=await this.handle.client.openSession(n),this.handle.sessionOptions=n}return this.handle.session}getSessionOptions(){return{initialCatalog:this.queryParams.initialCatalog,initialSchema:this.queryParams.initialSchema}}async queryWithRetries(){let n=`[TrackedPendingDatabricksQuery.queryWithRetries() - '${this.queryParams.statement}']`;try{i$.debugContext(this.logContext,"Making request for CQID=",this.coalesceQueryID);let i=async()=>{G3("databricks.runQuery.attempt",this.logContext);let d={columns:[],rows:[],status:"Success"};try{let f=await(await this.ensureSession()).executeStatement(this.queryParams.statement,this.queryParams.queryOptions),h=(await f.getSchema())?.columns;h&&(d.columns=h);let g=0;do{let T=await f.fetchChunk({maxRows:1e3});if(g+=cqe(T,!0),d.rows.push(...T),g>e2e())throw new Hft(g,e2e())}while(await f.hasMoreRows());d.columns=this.mapRowCountColumns(d.columns,this.queryParams.statement);let b=await f.status();G3("databricks.runQuery.finished",{...this.logContext,statusCode:b.status.statusCode??-1})}catch(p){if(G3("databricks.runQuery.error",{...this.logContext,errorString:Da(p).error.errorString}),p instanceof Hft)return d.status=jD,i$.errorContext(this.logContext,n,p.detail,`; returning partial set of rows (${d.rows.length})`),d;throw i$.errorContext(this.logContext,"Error in databricks query",p),new cV(this.queryParams,p)}return i$.debugContext(this.logContext,"Successfully got result for CQID=",this.coalesceQueryID),d},o=(d,p)=>{if(!(typeof d=="object"&&d instanceof cV))return i$.errorContext(this.logContext,"Not retrying request CQID=",this.coalesceQueryID,"attempt=",p,"error=",d,"was unrecognized and not retryable"),d;if(DZe(d))return i$.infoContext(this.logContext,"Not retrying request, auth error detected CQID=",this.coalesceQueryID,"attempt=",p),d;if(d.isCancel())return i$.infoContext(this.logContext,"Not retrying request, it was canceled CQID=",this.coalesceQueryID,"error=",d.coalesceCause?d.coalesceCause:d),d},s=d=>["CQID=",this.coalesceQueryID,"error=",d.cause?d.cause:d],a={name:"backoff",base:5},u={name:"nowait"},l=async d=>{typeof d=="object"&&d instanceof cV&&await this.handleRetry(d,this.queryParams)},c=(d,p)=>{let f=JSON.stringify(Da(d,!0).error);return f.match(/network ?error/i)||f.match(/bad HTTP status code: 5/)?a:p||u};return await yh(i,this.maxAttempts,i$,this.logContext,{beforeRetry:l,alterRetryStrategy:c,checkError:o,retryStrategy:u,retryLogLevel:"info",logOnRetry:"Retrying databricks request",customErrorOnRetry:s,logOnFailure:"Out of retries for request",customErrorOnFailure:s})}catch(i){throw i instanceof cV?i:new cV(this.queryParams,i)}finally{this.completion(this.coalesceQueryID)}}async tryCancel(){try{await this.handle.session?.close()}catch(n){throw new Xe("Error canceling Databricks session",JSON.stringify(Da(n).error))}finally{this.handle.session=void 0}}},DZe=r=>{let t=r,n=t?.coalesceCause?.statusCode,i=t?.statusCode;return n===403||i===403};var fts=Ye("CONNECTION"),LRh=async(r,t,n,i,o)=>{let s=u=>{},a=new a4t(s,r,t,n,i,o);return a.Start(),a.promise},hts=r=>({client:r,session:void 0,sessionOptions:{}}),BZe=class extends n${name="databricks";async teardown(){await Ats(this.handle,this.logContext)}untrackedQuery=async(t,n)=>LRh(this.handle,{statement:t,...n},this.logContext,this.resetConnection,3);setQueryTag=async t=>{};async makePendingQuery(t){if(this.wasCanceled)throw new Jh;let n=o=>this.runningQueries.delete(o),i=new a4t(n,this.handle,t,this.logContext,this.resetConnection);if(this.runningQueries.set(i.coalesceQueryID,i),fts.debugContext(this.logContext,`Tracking ${this.name} query with CQID=`,i.coalesceQueryID,"query=",i.sqlText),i.Start(),this.wasCanceled)throw fts.infoContext(this.logContext,"Canceling just-started query due to connection cancellation",i.coalesceQueryID,"conn",this.connectionID),await i.Cancel(),new Jh("canceling new query on canceled connection");return i}getConnectionLogContext(){return this.logContext}},pSl=async(r,t)=>{let n=async()=>{let o=await Cwr(r);return hts(o)},i=await n();return new BZe(i,t,ou(),0,null,n)},Ats=async(r,t)=>{if(r.session)try{await r.session.close()}catch(n){fts.errorContext(t,"Error closing session during teardown",n)}await r.client.close()};var Jpd=Tt(Jho()),qFr=Tt(Tdd());function vdd(r,t=null){return t??`https://${r}.snowflakecomputing.com`}var Kpd=Tt(Vpd());var xuE=Ye("JSON_PARSER");var G7A=Ye("XML_PARSER");var Ypd=r=>{let t=new Kpd.Parser({attrkey:"@",charkey:"#",explicitArray:!1}),n;return t.parseString(r,(i,o)=>{if(i)throw i;n=o}),G7A.debug("$Parsed JSON",JSON.stringify(n,null,2)),n};var Yot=Ye("CONNECTION"),H7A=()=>{let r=process.env.COALESCE_SNOWFLAKE_LOG_LEVEL?.toUpperCase(),t=r==="ERROR"||r==="WARN"||r==="INFO"||r==="DEBUG"||r==="TRACE"?r:"OFF",n=process.env.COALESCE_SNOWFLAKE_LOG_FILE;qFr.configure({xmlColumnVariantParser:Ypd,keepAlive:!1,logLevel:t,logFilePath:n})};H7A();function V2s(r){return r.startsWith("https://")?r:`https://${r}`}var $pd={sfRetryMaxLoginRetries:4},K2s=r=>!(!r.accountName||!r.user),W7A=(r,t)=>{if(!K2s(r))throw Yot.infoContext(t,"Error creating basic snowflake connection parameters (CreateSnowflakeConnectionBasic)",{user:Gpe(r.user),accountName:Gpe(r.accountName)}),new uV("Invalid Connection Information, please correct your account or username and proceed.","Basic");let{accountName:n,accessUrl:i,user:o,password:s,role:a,warehouse:u}=r;return{account:n,username:o,password:s,application:"CoalesceIO_Coalesce",...i&&{accessUrl:V2s(i)},...a&&{role:a},...u&&{warehouse:u},...$pd}},V7A=(r,t)=>{if(!K2s(r))throw Yot.infoContext(t,"Error creating basic snowflake connection (CreateSnowflakeConnectionKeyPair)",{user:Gpe(r.user),accountName:Gpe(r.accountName)}),new uV("Invalid Connection Information, please correct your account or username and proceed.","KeyPair");let{accountName:n,accessUrl:i,user:o,warehouse:s,role:a}=r;return{authenticator:"SNOWFLAKE_JWT",privateKey:r.keyPairKey,privateKeyPass:r.keyPairPass??void 0,account:n,...i&&{accessUrl:V2s(i)},username:o,warehouse:s||"",application:"CoalesceIO_Coalesce",role:a||"",...$pd}},Y2s=async(r,t)=>{let{workspaceID:n,orgID:i,userID:o}=r;try{let s=await nT(),a=await tSl(s,{orgId:i,userId:o,workspaceId:n.toString()},t);if(Yot.infoContext(r,"get access token from secret (GetAccessTokenFromSecret)"),!a)throw new uqe("Invalid OAuth configuration.");return a.sensitiveValue}catch(s){throw Yot.errorContext(r,"Error getting access token from secret (GetAccessTokenFromSecret)",s),s}},K7A=async(r,t,n)=>{if(!K2s(r))throw Yot.infoContext(t,"Error creating basic snowflake connection (CreateSnowflakeConnectionOAuth)",{user:Gpe(r.user),accountName:Gpe(r.accountName)}),new uV("Invalid Connection Information, please correct your account or username and proceed.","OAuth");let i=await Y2s(t,n),{accountName:o,accessUrl:s,user:a,warehouse:u}=r;return{authenticator:"OAUTH",account:o,...s&&{accessUrl:V2s(s)},username:a,token:i.access_token??void 0,warehouse:u||"",application:"CoalesceIO_Coalesce"}},Y7A=r=>{try{return qFr.createConnection(r)}catch(t){throw new dRe(t)}},FFr=async(r,t,n)=>{let i;switch(r.authenticator){case"Basic":case"Cloud":i=W7A(r,t);break;case"KeyPair":i=V7A(r,t);break;case"OAuth":i=await K7A(r,t,n);break;default:throw new Jg(`authenticator type of ${r.authenticator} not supported`)}return Y7A(i)},J7A=r=>`${r.response?.data?.error}: ${r.response?.data?.message}`,Xpd=async(r,t,n)=>{let i=Jpd.default.stringify({grant_type:"refresh_token",refresh_token:t}),s=`${vdd(r.connectionAccount,r.accessUrl)}/oauth/token-request`,a=Buffer.from(`${r.oauthClientID}:${r.oauthClientSecret}`).toString("base64"),u={method:"post",url:s,headers:{Authorization:`Basic ${a}`,"Content-Type":"application/x-www-form-urlencoded"},data:i};try{return(await wc(u)).data.access_token}catch(l){let c=l instanceof qzi?J7A(l):"unknown";throw Yot.errorContext(n,`RefreshAccessToken: ${c}`,l),new uV("An error occured refreshing your OAuth refresh_token. Please ensure your OAuth Security Integration is still valid or reauthenticate.","OAuth",l)}};var J2s=Tt(require("v8"));var AX=Ye("CONNECTION"),zFr=class extends Error{constructor(n,i,o,s){super("Heap size exceeded");this.heapStats=n;this.currentSize=i;this.stmt=o;this.rows=s}makeResult=()=>{let n=this.stmt,i=n.getSessionState();return{rows:this.rows,stmt:this.stmt,status:jD,queryID:n.getStatementId(),warehouse:i?.getCurrentWarehouse?.()||null}};makeErrorMessage=n=>{let i=WH(this.heapStats.total_heap_size),o=WH(n.heap_size_limit),s=WH(this.currentSize),a=WH(e2e());return`Chunk limit reached,
10333
+ ${c}`,d)});o.length&&NMo.infoContext(r.logContext,n,"Error(s) transforming results from storage",...o);let{runResults:a,fullRunResults:u}=Otf(s,i,r.logContext,n),l=Mtf(u);return i.runResults.runResults=l.map(c=>a[c]),i};var xtf=["nodes","subgraphs","jobs"],fg2=[...xtf,"docs","problems","subfolders"];var hg2=M.object({hierarchy:M.array(M.string()),key:M.string(),type:M.string(),value:M.string(),stepCounter:M.string().optional(),tagColor:M.string().optional()}),Ag2=M.object({selectedNodeIDs:M.object({nodes:M.record(M.boolean()),subgraphs:M.record(M.boolean()),jobs:M.record(M.boolean()),docs:M.record(M.boolean()),problems:M.record(M.boolean()),subfolders:M.record(M.boolean())})});var Ig2=Ye("GRAPH"),_Ar=async(r,t,n)=>{let i=await Ntf(r,t);return kGe(t.runDetails,n,i)},Ntf=async(r,t)=>{let n=new Set,i=await Fvc(t.reRunID,r);for(let o of i){let s=await r.run(o).runResults.select();for(let{id:a,runState:u}of s)u==="complete"&&n.add(a)}return n},kGe=(r,t,n)=>Ptf(r,t,n||new Set)||new Set(Object.keys(t.steps)),PMo=(r,t,n,i)=>({steps:r,allStepTypesYAML:Sur(n,t),allSubgraphs:i}),Ptf=(r,t,n)=>{let i=null,o="",s="";if(r.jobID?{includeSelector:o,excludeSelector:s}=yfc(t,+r.jobID):(r.includeNodesSelector||r.excludeNodesSelector)&&(o=r.includeNodesSelector||"",s=r.excludeNodesSelector||""),o||s||n.size){let a=PMo(t.steps,t.stepTypes,t.installedPackages??{},t.folders);i=new Set(Dtf(o,s,a).filter(u=>!n.has(u)))}return i},Dtf=(r,t,n)=>{let i=Cmt(n.steps);n.graphInformation=i;let o=Cur(r,t,n);return Object.keys(o.stepsResult)},ghe=r=>{let t=new Set;for(let n of Object.values(r))for(let i in n)t.add(i);return t};var Btf=(r,t)=>{if(t=t===void 0?rBu:t,r<=1||t<=1)return 1;let s=r<=4?r:Math.floor((r-4)/2)+4;return Math.min(s,t)},Ooe=(r,t)=>{let n=Btf(r,t),i=nBu,o=i<n?i:n;return o>0?o:1};var Uvc=async(r,t)=>{let n=await r.fetch({ignoreDataTransformErrors:!0}),i={fbEmail:t?.emptyEmail?"":n.email,fbUserID:r.id,fbTeamID:n.currentTeam,fbFullName:`${n.firstName} ${n.lastName}`,fbFirstName:n.firstName,fbLastName:n.lastName,fbHasSuperUserAccess:n.hasSuperUserAccess,fbActivated:n.activated??!0};return t?.populateProvisioningAccess&&(i.hasProvisioningAccess=n.hasProvisioningAccess),i},Qvc=async(r,t)=>(await r.org(t).fetch()).superDebugEnabled;var aSl=Tt(m9c());var uSl=Tt(Jho());var YCl=Tt(_Be());var VCl=Tt(_Be());var $Fc=Tt(JFc());var $Tr=Tt(v7());var LQo=require("perf_hooks");function w8f(r){let t=LQo.performance.now()*.001,n=Math.floor(t),i=Math.floor(t%1*1e9);return r!=null&&(n=n-r[0],i=i-r[1],i<0&&(n--,i+=1e9)),[n,i]}function gMe(){let[r,t]=w8f();return r*1e3+Math.floor(t/1e6)}function VTr(r){return new Promise(t=>setTimeout(t,r))}var KTr=class{constructor({bucketSize:t,tokensPerInterval:n,interval:i,parentBucket:o}){if(this.bucketSize=t,this.tokensPerInterval=n,typeof i=="string")switch(i){case"sec":case"second":this.interval=1e3;break;case"min":case"minute":this.interval=1e3*60;break;case"hr":case"hour":this.interval=1e3*60*60;break;case"day":this.interval=1e3*60*60*24;break;default:throw new Error("Invalid interval "+i)}else this.interval=i;this.parentBucket=o,this.content=0,this.lastDrip=gMe()}async removeTokens(t){if(this.bucketSize===0)return Number.POSITIVE_INFINITY;if(t>this.bucketSize)throw new Error(`Requested tokens ${t} exceeds bucket size ${this.bucketSize}`);this.drip();let n=async()=>{let i=Math.ceil((t-this.content)*(this.interval/this.tokensPerInterval));return await VTr(i),this.removeTokens(t)};if(t>this.content)return n();if(this.parentBucket!=null){let i=await this.parentBucket.removeTokens(t);return t>this.content?n():(this.content-=t,Math.min(i,this.content))}else return this.content-=t,this.content}tryRemoveTokens(t){return this.bucketSize?t>this.bucketSize||(this.drip(),t>this.content)||this.parentBucket&&!this.parentBucket.tryRemoveTokens(t)?!1:(this.content-=t,!0):!0}drip(){if(this.tokensPerInterval===0){let s=this.content;return this.content=this.bucketSize,this.content>s}let t=gMe(),n=Math.max(t-this.lastDrip,0);this.lastDrip=t;let i=n*(this.tokensPerInterval/this.interval),o=this.content;return this.content=Math.min(this.content+i,this.bucketSize),Math.floor(this.content)>Math.floor(o)}};var iKe=class{constructor({tokensPerInterval:t,interval:n,fireImmediately:i}){this.tokenBucket=new KTr({bucketSize:t,tokensPerInterval:t,interval:n}),this.tokenBucket.content=t,this.curIntervalStart=gMe(),this.tokensThisInterval=0,this.fireImmediately=i??!1}async removeTokens(t){if(t>this.tokenBucket.bucketSize)throw new Error(`Requested tokens ${t} exceeds maximum tokens per interval ${this.tokenBucket.bucketSize}`);let n=gMe();if((n<this.curIntervalStart||n-this.curIntervalStart>=this.tokenBucket.interval)&&(this.curIntervalStart=n,this.tokensThisInterval=0),t>this.tokenBucket.tokensPerInterval-this.tokensThisInterval){if(this.fireImmediately)return-1;{let o=Math.ceil(this.curIntervalStart+this.tokenBucket.interval-n);await VTr(o);let s=await this.tokenBucket.removeTokens(t);return this.tokensThisInterval+=t,s}}let i=await this.tokenBucket.removeTokens(t);return this.tokensThisInterval+=t,i}tryRemoveTokens(t){if(t>this.tokenBucket.bucketSize)return!1;let n=gMe();if((n<this.curIntervalStart||n-this.curIntervalStart>=this.tokenBucket.interval)&&(this.curIntervalStart=n,this.tokensThisInterval=0),t>this.tokenBucket.tokensPerInterval-this.tokensThisInterval)return!1;let i=this.tokenBucket.tryRemoveTokens(t);return i&&(this.tokensThisInterval+=t),i}getTokensRemaining(){return this.tokenBucket.drip(),this.tokenBucket.content}};var kQo=r=>r;var iY=Ye("SECRETS");function FQo(r,t){return!!r&&typeof r=="object"&&"code"in r&&r.code===t}function x8f(r){return FQo(r,$Tr.Status.INVALID_ARGUMENT)&&"details"in r&&typeof r.details=="string"&&r.details.startsWith("Secret payload data length must be at most")}function oKe(r){return FQo(r,$Tr.Status.NOT_FOUND)||N8f(r)}function N8f(r){return FQo(r,$Tr.Status.FAILED_PRECONDITION)&&typeof r?.message=="string"&&r.message.includes("is in DESTROYED state")}function qQo(r){if(!r)return null;let t=r.split("/").pop();if(!t)return null;let n=parseInt(t);return Number.isNaN(n)?null:n}var P8f=9e4,D8f=600,B8f=600,zQo=10,L8f=new iKe({tokensPerInterval:P8f/zQo,interval:"minute"}),k8f=new iKe({tokensPerInterval:D8f/zQo,interval:"minute"}),YTr=new iKe({tokensPerInterval:B8f/zQo,interval:"minute"}),q8f=new kD(1),JTr=class{constructor(t,n=new $Fc.SecretManagerServiceClient){this.projectId=t;this.client=n}shouldUpgradeLegacyResultsToNewName(){return!1}BuildSecretName=t=>`projects/${this.projectId}/secrets/${t}`;BuildSecretNameLatestVersion=t=>`${this.BuildSecretName(t)}/versions/latest`;getByName=async t=>{await L8f.removeTokens(1);let n=await this.client.accessSecretVersion({name:this.BuildSecretNameLatestVersion(t)}),[i]=n,o=i.payload?.data?.toString();return o?new cd(o):null};makeAddVersionData=(t,n)=>({parent:this.BuildSecretName(t),payload:{data:kQo(Buffer.from(n.sensitiveValue))}});addVersionByName=async(t,n)=>{let i=this.makeAddVersionData(t,n);await YTr.removeTokens(1),await this.client.addSecretVersion(i),this.PruneInaccessibleVersions(t).catch(o=>{iY.error("Could not prune versions for",t,o)})};setByName=async(t,n)=>{await YTr.removeTokens(2);let i=this.makeAddVersionData(t,n);await this.client.createSecret({parent:`projects/${this.projectId}`,secretId:t,secret:{replication:{automatic:{}}}}),await this.client.addSecretVersion(i)};listSecretVersions=async t=>{await k8f.removeTokens(1);let[n]=await this.client.listSecretVersions({parent:this.BuildSecretName(t)});return n};destroySecretVersion=async t=>{await YTr.removeTokens(1),await this.client.destroySecretVersion({name:t})};getMissingSecret=async t=>{let n=Zhr(t);if(n)try{let i=await this.getByName(n);return i&&this.shouldUpgradeLegacyResultsToNewName()&&(await this.setByName(t,i),await this.deleteByName(n)),i}catch(i){if(!oKe(i))throw i}return null};Get=async t=>{try{return await this.getByName(t)}catch(n){if(oKe(n))return this.getMissingSecret(t);throw n}};setMissingSecret=async(t,n)=>{let i=Zhr(t);if(!i)await this.setByName(t,n);else if(this.shouldUpgradeLegacyResultsToNewName())await this.setByName(t,n),await this.deleteByName(i,{ignoreMissing:!0});else try{await this.addVersionByName(i,n)}catch(o){if(oKe(o)){await this.setByName(t,n);return}throw o}};Set=async(t,n)=>{try{await this.addVersionByName(t,n);return}catch(i){if(oKe(i)){await this.setMissingSecret(t,n);return}throw x8f(i)&&iY.emerg("Payload exceeded for: ",t,"Reach out to support to determine if feature flags to adjust secret storage are appropriate next steps",i),i}};deleteByName=async(t,n)=>{try{await YTr.removeTokens(1),await this.client.deleteSecret({name:this.BuildSecretName(t)})}catch(i){if(n?.ignoreMissing&&oKe(i))return;throw i}};Delete=async t=>{let n=Zhr(t),i=[t];n&&i.push(n),await $4(i.map(o=>this.deleteByName(o,{ignoreMissing:!0})))};async PruneInaccessibleVersions(t){try{iY.info("Begin pruning inaccessible versions for secret",t),await q8f.runExclusive(async()=>{iY.info("Got lock for pruning inaccessible versions for secret",t);let i=(await this.listSecretVersions(t)).filter(o=>o.state==="ENABLED"&&!!qQo(o.name)).sort((o,s)=>qQo(s.name)-qQo(o.name)).slice(1);for(let o of i)try{iY.info("Destroying secret version",o.name),await this.destroySecretVersion(o.name),iY.info("Successfully destroyed secret version",o.name)}catch(s){oKe(s)?iY.error("Tried to prune secret",o.name,"but it was not found"):iY.alert("Error trying to prune inaccessible secret version",o.name,". Error: ",s)}},1)}catch(n){iY.alert("Fatal error trying to prune inaccessible secret versions for secret",t,". Error: ",n)}finally{iY.info("Completed pruning inaccessible versions for secret",t)}}};var Mol=Tt(Ool());var _Ir=class r{client;constructor(t){this.client=(0,Mol.createClient)({url:t})}static Create=async t=>{let n=new r(t);return await n.client.connect(),n};Get=async t=>{let n=await this.client.get(t);return n===null?null:new cd(n)};Set=async(t,n)=>{await this.client.set(t,n.sensitiveValue)};Delete=async t=>{await this.client.del(t)}};hr();var MRh=Tt(gwr());var bRh=M.enum(["org-settings-read","org-settings-write","org-users-read","org-users-write","org-users-create","org-users-delete","org-userRoles-read","org-userRoles-write","org-projects-create","org-tags-create","org-tags-delete","org-tags-write","org-tags-read"]),ERh=M.enum(["project-settings-read","project-summary-read","project-settings-write","project-delete","project-members-read","project-members-add","project-members-write","project-members-remove","project-userRoles-read","project-workspaces-create","project-documentation-read","project-environments-create","project-workspace-settings-read","project-workspace-settings-write","project-workspaces-delete","project-workspace-jobs-create","project-workspace-jobs-read","project-workspace-jobs-write","project-workspace-jobs-delete","project-workspace-macros-create","project-workspace-macros-read","project-workspace-macros-write","project-workspace-macros-delete","project-workspace-node-types-create","project-workspace-node-types-read","project-workspace-node-types-write","project-workspace-node-types-delete","project-workspace-nodes-create","project-workspace-nodes-read","project-workspace-nodes-write","project-workspace-nodes-delete","project-workspace-packages-create","project-workspace-packages-read","project-workspace-packages-write","project-workspace-packages-delete","project-workspace-sql-run","project-workspace-subgraphs-create","project-workspace-subgraphs-read","project-workspace-subgraphs-write","project-workspace-subgraphs-delete"]),CRh=M.enum(["env-run-results-read","env-run-job","env-delete","env-deploy","env-refresh","env-settings-read","env-summary-read","env-settings-write","env-documentation-read","env-members-read","env-members-add","env-members-write","env-members-remove","env-userRoles-read","env-job-schedule-write","env-job-schedule-read"]),SRh={reader:{permissions:["env-run-results-read","env-summary-read","env-documentation-read","env-job-schedule-read"],inheritedEnvironmentRoles:[]},admin:{permissions:[...CRh.options],inheritedEnvironmentRoles:["reader"]}},TRh={member:{permissions:["project-summary-read","project-members-read","project-workspace-sql-run","project-workspace-nodes-read"],inheritedProjectRoles:[],inheritedEnvironmentRoles:[]},contributor:{permissions:["project-settings-read","project-members-read","project-workspaces-create","project-workspace-settings-read","project-workspace-settings-write","project-workspaces-delete","project-workspace-jobs-read","project-workspace-jobs-create","project-workspace-jobs-write","project-workspace-jobs-delete","project-workspace-macros-read","project-workspace-node-types-read","project-workspace-nodes-create","project-workspace-nodes-write","project-workspace-nodes-delete","project-workspace-packages-read","project-workspace-subgraphs-read","project-workspace-subgraphs-create","project-workspace-subgraphs-write","project-workspace-subgraphs-delete","project-documentation-read","env-delete","env-documentation-read"],inheritedProjectRoles:["member"],inheritedEnvironmentRoles:["reader"]},architect:{permissions:["project-workspace-macros-create","project-workspace-macros-write","project-workspace-macros-delete","project-workspace-node-types-create","project-workspace-node-types-write","project-workspace-node-types-delete","project-workspace-packages-create","project-workspace-packages-write","project-workspace-packages-delete"],inheritedProjectRoles:["contributor"],inheritedEnvironmentRoles:["reader"]},admin:{permissions:[...ERh.options],inheritedProjectRoles:["architect"],inheritedEnvironmentRoles:["admin"]}},vRh={member:{permissions:["org-tags-read"],inheritedOrganizationRoles:[],inheritedProjectRoles:[]},contributor:{permissions:["org-projects-create"],inheritedOrganizationRoles:["member"],inheritedProjectRoles:[]},admin:{permissions:[...bRh.options],inheritedOrganizationRoles:["contributor"],inheritedProjectRoles:["admin"]}},ywr=r=>{let t=SRh[r],n=new Set(t.permissions);for(let i of t.inheritedEnvironmentRoles)ywr(i).forEach(o=>n.add(o));return Array.from(n)},NZe=r=>{let t=TRh[r],n=new Set(t.permissions);for(let i of t.inheritedProjectRoles)NZe(i).forEach(o=>n.add(o));for(let i of t.inheritedEnvironmentRoles)ywr(i).forEach(o=>n.add(o));return Array.from(n)},mwr=r=>{let t=vRh[r],n=new Set(t.permissions);for(let i of t.inheritedOrganizationRoles)mwr(i).forEach(o=>n.add(o));for(let i of t.inheritedProjectRoles)NZe(i).forEach(o=>n.add(o));return Array.from(n)},PCl={admin:mwr("admin"),contributor:mwr("contributor"),member:mwr("member")},DCl={admin:NZe("admin"),architect:NZe("architect"),contributor:NZe("contributor"),member:NZe("member")},BCl={admin:ywr("admin"),reader:ywr("reader")};var IRh=Ye("USER_MANAGEMENT"),tts=class{throwIfOrgPermissionDenied(t,n=!1){if(!this.hasOrgPermission(t))throw n?new ru("The requested resource is unavailable"):new gR("Permission denied",`Permission '${t}' is required to perform this operation.`)}throwIfProjectPermissionDenied(t,n,i=!1){if(!this.hasProjectPermission(t,n))throw i?new ru("The requested resource is unavailable"):new gR("Permission denied",`Permission '${t}' is required to perform this operation.`)}throwIfEnvironmentPermissionDenied(t,n,i,o=!1){if(!this.hasEnvironmentPermission(t,n,i))throw o?new ru("The requested resource is unavailable"):new gR("Permission denied",`'Permission ${t}' is required to perform this operation.`)}},bwr=class r extends tts{constructor(n,i){super();this.userRoles=n;this.hasSuperUserAccess=i;this.userPermissions=this.ConstructPermissionsFromRoles(n)}userPermissions;get orgRoles(){return this.userRoles?.organizationRoles??[]}hasOrgPermissionInternal(n){return this.userPermissions.organizationPermissions?.has(n)}hasOrgPermission(n){return this.hasSuperUserAccess||this.hasOrgPermissionInternal(n)}hasProjectPermissionInternal(n,i){return this.userPermissions.projects[i]?.projectPermissions?.has(n)}hasProjectPermission(n,i){return this.hasSuperUserAccess||this.hasOrgPermissionInternal(n)||this.hasProjectPermissionInternal(n,i)}hasEnvironmentPermissionInternal(n,i,o){return this.userPermissions.projects[i]?.environments[o]?.environmentPermissions?.has(n)}hasEnvironmentPermission(n,i,o){return this.hasSuperUserAccess||this.hasOrgPermissionInternal(n)||this.hasProjectPermissionInternal(n,i)||this.hasEnvironmentPermissionInternal(n,i,o)}ConstructPermissionsFromRoles(n){return n?(n.projects||(IRh.warn(`userRoles.projects is undefined for user ${n.id}, defaulting to empty object`),n.projects={}),{organizationPermissions:r.ConstructOrgPermissionsFromRoles(n.organizationRoles),projects:r.ConstructProjectsPermissionsFromRoles(n.projects)}):{organizationPermissions:new Set,projects:{}}}static ConstructEnvironmentPermissionsFromRoles(n){let i=new Set;return n.environmentRoles.forEach(o=>{BCl[o].forEach(s=>{i.add(s)})}),i}static ConstructEnvironmentPermissionObjectsFromRoles(n){let i={};return Object.entries(n).forEach(([o,s])=>{i[o]={environmentPermissions:r.ConstructEnvironmentPermissionsFromRoles(s)}}),i}static ConstructProjectPermissionsFromRoles(n){let i=new Set;return n.projectRoles.forEach(s=>{DCl[s].forEach(a=>{i.add(a)})}),{projectPermissions:i,environments:r.ConstructEnvironmentPermissionObjectsFromRoles(n.environments)}}static ConstructProjectsPermissionsFromRoles(n){let i={};return Object.keys(n).forEach(o=>{i[o]=this.ConstructProjectPermissionsFromRoles(n[o])}),i}static ConstructOrgPermissionsFromRoles(n){let i=new Set;return n.forEach(o=>{PCl[o].forEach(s=>{i.add(s)})}),i}};var s4t=Tt(q9()),_Rh=Tt(L$e()),Uk2=(0,s4t.Record)({version:s4t.String,hash:s4t.String}),RRh=()=>({version:"7.33.0",hash:"8bde53626ab316b2e3d8a636a4442647f2d62e46"}),s_=RRh();var LCl=Ye("AUTH"),Zk2=Ye("USER_MANAGEMENT"),wRh=async(r,t,n,i,o)=>{let s={};try{let a=await r(),{context:{userID:u},token:l}=await t(a);s.userID=u;let c=o||await n(a,l),d=await Uvc(c.user(u),{emptyEmail:!0,populateProvisioningAccess:!0});s.orgID=d.fbTeamID;let p=ID.getActiveSpan();p&&(p.setAttribute("userID",u),p.setAttribute("orgID",d.fbTeamID));let f=await i({org:{id:d.fbTeamID},user:{id:u},...s_&&{version:{id:s_.version}}},c,l),h=await c.org(d.fbTeamID).userRole(u).fetch(),g=new bwr(h,!!d.fbHasSuperUserAccess);return{teamInfo:d,firebase:a,rootRepository:c,token:l,featureFlags:f,userPermissions:g}}catch(a){if(LCl.errorContext(s,"AuthenticateTokenAndRetrieveTeamInfo",a),a instanceof gre)throw a;let u=a?.message||a?.error?.errorString;throw new Xe("Unable to authenticate, please check your network connection and ensure your token is valid",u)}},kCl=async(r,t,n,i,o)=>{let s="[AuthenticateTokenAndRetrieveTeamInfo]",a={};return yh(async()=>c0(s,a,"AUTH",()=>wRh(r,t,n,i,o),"info"),4,LCl,a,{alterRetryStrategy:l=>({name:"backoff",base:iqe(Da(l,!0),"QUOTA_EXCEEDED")||iqe(Da(l,!0),"Unable to authenticate")?60:1}),checkError:l=>{if(l instanceof gre)return l;let c=Da(l,!0);if(["INVALID_REFRESH_TOKEN","EXPIRED_TOKEN","Missing or invalid OAuth refresh token","OAuth access token expired","Invalid Token","JWT ID token expired","Decoding Firebase ID token failed","Token expired","User not found","User disabled"].some(p=>iqe(c,p)))return l},logOnRetry:`${s} retrying`,logOnFailure:`${s} failed, exhausted allotted retry attempts`})};var jCl=Tt(_Be()),GCl=require("node:fs/promises");hr();var qCl;var r$=(r,t={},n=1)=>{qCl&&qCl.increment(r,t,n)};var xRh=M.object({auth:M.string(),firestore:M.string(),storage:M.string()}),NRh=M.object({apiKey:M.string(),authDomain:M.string(),projectId:M.string(),storageBucket:M.string(),messagingSenderId:M.string(),appId:M.string(),measurementId:M.string(),emulatorInfo:xRh.optional()}),FCl=br("FirebaseConfig",NRh);var rts=r=>{r$("firestore.delete",{path:r})},Ewr=r=>{r$("firestore.set",{path:r})},nts=r=>{r$("firestore.update",{path:r})};var its=r=>{r$("firestore.get",{path:r})},ots=r=>{r$("firestore.transaction.get",{path:r})},sts=r=>{r$("firestore.transaction.set",{path:r})},ats=r=>{r$("firestore.transaction.update",{path:r})},uts=r=>{r$("firestore.transaction.delete",{path:r})},cts=()=>{r$("firestore.batch")};var dq2=ID.getTracer("firestore");var zCl=async(r,t=1e3)=>{let n=()=>{},i=new Promise(o=>{n=r.onSnapshot(s=>{s.metadata.hasPendingWrites||o(s)})});return await OI(t,`Timed out in GetDocumentNoPendingWrites after ${t} ms`,i).finally(n)};var UCl=Tt(Vkt());async function QCl(r){let t=new UCl.ProxyAgent;r.httpProxy=t,r.httpsProxy=t}var HCl=(0,jCl.default)(async()=>{if(J1e)return Ye("RUNNER_BACKEND").info("WARNING: API using Firebase emulator!"),{apiKey:"none",authDomain:"none",projectId:"coalesce",storageBucket:"coalesce.appspot.com",messagingSenderId:"none",appId:"none",measurementId:"none",emulatorInfo:J1e};{let r=await(0,GCl.readFile)(process.env.FIREBASE_CONFIG_PATH??"/firebase.json"),t=FCl.assert(JSON.parse(r.toString()));return Ye("RUNNER_BACKEND").info("API using Firebase config:",t),t}});async function WCl(){return a8e()||cja()?await HCl():A1.app().options}var lts=async(r,t)=>{await QCl(r),await E1c(r,t)};var dts=Ye("SECRETS"),KCl=(0,VCl.default)(async()=>{switch(process.env.COALESCE_SECRET_STORE_TYPE){case"redis":if(dts.app("Selected Redis secret store."),!process.env.COALESCE_SECRET_STORE_REDIS_URL)throw new Error("The redis secret store is configured, but no COALESCE_SECRET_STORE_REDIS_URL was defined.");return await _Ir.Create(process.env.COALESCE_SECRET_STORE_REDIS_URL);case"memory":return dts.app("Selected Memory secret store."),uMo();case"google":default:return dts.app("Selected Google Secrets Manager secret store."),new JTr((await WCl()).projectId)}});var nT=(0,YCl.default)(async()=>new mGe(await KCl()));var PRh=Ye("SECRETS"),PZe=async(r,t,n,i)=>{let o={userID:t.userId,orgID:t.orgId,workspaceID:+t.workspaceId},s=n.UpgradeTarget;if(i&&s){let a=await r.Get(s,t);return a||(a=await r.Get(n,t),a&&(PRh.infoContext(o,`v2 Snowflake secrets feature flag was set, found v1 credentials. Upgrading v1 to v2 for ${n.typeName}`),await r.Set(s,t,a),await r.Delete(n,t))),a}else return await r.Get(n,t)};var JCl=async(r,t,n)=>{let i=n.isFeatureEnabled("enableSnowflakeOAuthSecretsV2");return PZe(r,t,RGe,i)},$Cl=async(r,t)=>await r.Get(TGe,t);var XCl=async(r,t)=>await r.Get(SGe,t),ZCl=async(r,t)=>await r.Get(EGe,t),eSl=async(r,t,n)=>PZe(r,t,IGe,n),tSl=async(r,t,n)=>{let i=n.isFeatureEnabled("enableSnowflakeSecretsV2");return PZe(r,t,Roe,i)},rSl=async r=>await(await nT()).Get(CGe,r),nSl=async r=>await(await nT()).Get(oK,r);var iSl=async(r,t,n)=>await r.Set(oK,t,n),oSl=async(r,t,n,i)=>{if(!i.isFeatureEnabled("enableSnowflakeSecretsV2"))return await r.Set(Roe,t,n);await r.Get(Roe,t)&&await r.Delete(Roe,t),await r.Set(JOe,t,n)};var pts=Ye("CONNECTION"),sSl=r=>{if(!r.host)throw new no("Could not get a valid host for databricks user");if(!r.path)throw new no("Could not get a valid path for databricks user")},Cwr=async r=>{let t,n=new aSl.DBSQLClient;if(!r.host)throw new no("Could not get a valid host for databricks user");if(!r.path)throw new no("Could not get a valid path for databricks user");if(r.authenticator==="Token"){if(sSl(r),!r.token)throw new no("Could not get a valid token for databricks user");t={host:r.host,path:r.path,token:r.token,clientId:"Coalesce.IO_Coalesce"}}else if(r.authenticator==="OAuthU2M"){if(!r.accessToken)throw new no("Could not get a valid access token for databricks OAuth U2M authentication");t={host:r.host,path:r.path,token:r.accessToken,clientId:"Coalesce.IO_Coalesce",authType:"access-token"}}else if(r.authenticator==="OAuthM2M"){if(sSl(r),!r.clientID||!r.secret)throw new no("Missing Client ID or Secret for databricks user");t={authType:"databricks-oauth",oauthClientId:r.clientID,oauthClientSecret:r.secret,host:r.host,path:r.path}}else throw new no(`Could not authenticate using ${r.authenticator}`);return await n.connect(t)},cSl=async r=>{let{workspaceID:t,orgID:n,userID:i}=r;try{let o=await nSl({orgId:n,userId:i,workspaceId:t.toString()});if(pts.infoContext(r,"get access token from secret (GetAccessTokenFromSecret)"),!o)throw new uqe("Invalid OAuth configuration.");return o.sensitiveValue}catch(o){throw pts.errorContext(r,"Error getting access token from secret (GetAccessTokenFromSecret)",o),o}},lSl=async(r,t,n)=>{if(!r.connectionAccount)throw new X1e("Missing connection account for Databricks OAuth refresh","OAuthU2M");let o=`${`https://${r.connectionAccount}`}/oidc/v1/token`,s=uSl.default.stringify({grant_type:"refresh_token",client_id:r.oauthClientID,client_secret:r.oauthClientSecret,refresh_token:t}),a={method:"post",url:o,headers:{"Content-Type":"application/x-www-form-urlencoded"},data:s};try{return(await wc(a)).data.access_token}catch(u){throw pts.errorContext(n,"RefreshAccessToken",u),new X1e("An error occurred refreshing your OAuth refresh token. Please ensure your OAuth Security Integration is still valid or reauthenticate.","OAuthU2M",u)}};var dSl;var G3=(r,t={},n=1)=>{if(dSl){let o={appContext:lFt()??"unknown"};J1e&&(o.emulated="true"),dSl.increment(r,{...o,...t},n)}};var Zbe=Ye("CONNECTION"),n$=class{constructor(t,n,i,o,s,a){this.handle=t;this.logContext=n;this.connectionID=i;this.useCounter=o;this.parent=s;a?this.resetConnectionHandle=()=>a():s?this.resetConnectionHandle=()=>s.reinitialize():this.resetConnectionHandle=()=>{throw new Jg("Cannot reset the connection, no reset function provided")}}runningQueries=new Map;wasCanceled=!1;resetConnectionHandle;get memoryMonitor(){return this.parent?this.parent.managerQueryStats.memoryMonitor:(Zbe.debugContext(this.logContext,"Could not get memory monitor from parent. This may be expected.",this.connectionID),{logDeltaMetrics:()=>{},getMemoryUsage:()=>{}})}async resetConnection(){if(this.wasCanceled)throw new Jh("not resetting canceled connection");G3(`${this.name}.resetConnection`,this.logContext);let t=await this.resetConnectionHandle();return this.handle=t,t}async executeAndGetRunning(t){let n=this.parent?.managerQueryStats.executions,i=!n||n%50!==0,o=`(${this.name}) BaseConnection.executeAndGetRunning()`,s=this.memoryMonitor.getMemoryUsage(this.logContext),a=await this.makePendingQuery(t);return a.attachThen(u=>{this.parent&&this.parent.updateFromResult(u),this.memoryMonitor.logDeltaMetrics(this.logContext,s,o,`(connID: ${this.connectionID})[${this.name} executeAndGetRunning()]: completed pending query | total executions: ${n}`,i)}),this.parent&&this.parent.managerQueryStats.incrementExecutions(`[${this.name} executeAndGetRunning()]:${this.connectionID}`),a}execute=async t=>(await this.executeAndGetRunning(t)).promise;cancel=async()=>{try{if(Zbe.infoContext(this.logContext,"Begin canceling connection",this.connectionID),this.wasCanceled)return Zbe.infoContext(this.logContext,"Connection already canceled",this.connectionID),Promise.resolve();this.wasCanceled=!0;let t=Array.from(this.runningQueries.values()).map(async n=>{Zbe.infoContext(this.logContext,"Begin canceling query",n.coalesceQueryID,"conn",this.connectionID);try{await n.Cancel()}catch(i){throw Zbe.errorContext(this.logContext,"Error canceling query",n.coalesceQueryID,"conn",this.connectionID,i),i}finally{Zbe.infoContext(this.logContext,"Completed canceling query",n.coalesceQueryID,"conn",this.connectionID)}});await $4(t)}catch(t){throw Zbe.errorContext(this.logContext,"Error canceling connection",this.connectionID,t),t}finally{Zbe.infoContext(this.logContext,"Completed canceling connection",this.connectionID)}}};var DRh="20428800",e2e=()=>parseInt(process.env.MAX_RESPONSE_SIZE||DRh);var BRh=Ye("CONNECTION"),t2e=class{constructor(t,n,i,o,s=2){this.handle=t;this.queryParams=n;this.logContext=i;this.resetFunc=o;this.maxAttempts=s;this.queryParams.requestId=this.coalesceQueryID=this.queryParams.requestId??ou()}_promise=null;coalesceQueryID;get promise(){if(this.Start(),!this._promise)throw new Jg("Internal consistency error: _promise cannot be null after Start()!");return this._promise}Cancel=async()=>{BRh.infoContext(this.logContext,"Canceling the running query, CQID=",this.coalesceQueryID),await this.tryCancel()};Start=()=>{this._promise||(this._promise=this.queryWithRetries())};attachThen=t=>{this._promise=this.promise.then(n=>(t(n),n))}};var i$=Ye("CONNECTION"),a4t=class extends t2e{constructor(n,i,o,s,a,u=5){let l={...s,snowflakeRequestID:o.requestId};super(i,o,l,a,u);this.completion=n}get sqlText(){return this.queryParams.statement}mapRowCountColumns(n,i){let o=i.trim().toUpperCase(),s;return o.startsWith("UPDATE")?s="UPDATE":o.startsWith("DELETE")&&(s="DELETE"),n.map(a=>{if(a.columnName==="num_inserted_rows")return{...a,columnName:"number of rows inserted"};if(a.columnName==="num_updated_rows")return{...a,columnName:"number of rows updated"};if(a.columnName==="num_deleted_rows")return{...a,columnName:"number of rows deleted"};if(a.columnName==="num_affected_rows"){if(s==="UPDATE")return{...a,columnName:"number of rows updated"};if(s==="DELETE")return{...a,columnName:"number of rows deleted"}}return a})}shouldResetConnection=n=>!!Da(n).error.errorString.match(/(network ?error|econnreset)/i);handleRetry=async(n,i)=>{if(this.shouldResetConnection(n))try{this.handle=await this.resetFunc()}catch(o){throw i$.errorContext(this.logContext,"Resetting connect for request CQID=",this.coalesceQueryID,"Error running connection reset callback:",o),n.coalesceCause}return i};async ensureSession(){if(!this.handle.session){let o=this.getSessionOptions();return this.handle.session=await this.handle.client.openSession(o),this.handle.sessionOptions=o,this.handle.session}let n=this.getSessionOptions(),i=this.handle.sessionOptions;if(n.initialCatalog!==i.initialCatalog||n.initialSchema!==i.initialSchema){try{await this.handle.session.close()}catch(o){i$.errorContext(this.logContext,"Error closing previous session for CQID=",this.coalesceQueryID,o)}finally{this.handle.session=void 0}this.handle.session=await this.handle.client.openSession(n),this.handle.sessionOptions=n}return this.handle.session}getSessionOptions(){return{initialCatalog:this.queryParams.initialCatalog,initialSchema:this.queryParams.initialSchema}}async queryWithRetries(){let n=`[TrackedPendingDatabricksQuery.queryWithRetries() - '${this.queryParams.statement}']`;try{i$.debugContext(this.logContext,"Making request for CQID=",this.coalesceQueryID);let i=async()=>{G3("databricks.runQuery.attempt",this.logContext);let d={columns:[],rows:[],status:"Success"};try{let f=await(await this.ensureSession()).executeStatement(this.queryParams.statement,this.queryParams.queryOptions),h=(await f.getSchema())?.columns;h&&(d.columns=h);let g=0;do{let T=await f.fetchChunk({maxRows:1e3});if(g+=cqe(T,!0),d.rows.push(...T),g>e2e())throw new Hft(g,e2e())}while(await f.hasMoreRows());d.columns=this.mapRowCountColumns(d.columns,this.queryParams.statement);let b=await f.status();G3("databricks.runQuery.finished",{...this.logContext,statusCode:b.status.statusCode??-1})}catch(p){if(G3("databricks.runQuery.error",{...this.logContext,errorString:Da(p).error.errorString}),p instanceof Hft)return d.status=jD,i$.errorContext(this.logContext,n,p.detail,`; returning partial set of rows (${d.rows.length})`),d;throw i$.errorContext(this.logContext,"Error in databricks query",p),new cV(this.queryParams,p)}return i$.debugContext(this.logContext,"Successfully got result for CQID=",this.coalesceQueryID),d},o=(d,p)=>{if(!(typeof d=="object"&&d instanceof cV))return i$.errorContext(this.logContext,"Not retrying request CQID=",this.coalesceQueryID,"attempt=",p,"error=",d,"was unrecognized and not retryable"),d;if(DZe(d))return i$.infoContext(this.logContext,"Not retrying request, auth error detected CQID=",this.coalesceQueryID,"attempt=",p),d;if(d.isCancel())return i$.infoContext(this.logContext,"Not retrying request, it was canceled CQID=",this.coalesceQueryID,"error=",d.coalesceCause?d.coalesceCause:d),d},s=d=>["CQID=",this.coalesceQueryID,"error=",d.cause?d.cause:d],a={name:"backoff",base:5},u={name:"nowait"},l=async d=>{typeof d=="object"&&d instanceof cV&&await this.handleRetry(d,this.queryParams)},c=(d,p)=>{let f=JSON.stringify(Da(d,!0).error);return f.match(/network ?error/i)||f.match(/bad HTTP status code: 5/)?a:p||u};return await yh(i,this.maxAttempts,i$,this.logContext,{beforeRetry:l,alterRetryStrategy:c,checkError:o,retryStrategy:u,retryLogLevel:"info",logOnRetry:"Retrying databricks request",customErrorOnRetry:s,logOnFailure:"Out of retries for request",customErrorOnFailure:s})}catch(i){throw i instanceof cV?i:new cV(this.queryParams,i)}finally{this.completion(this.coalesceQueryID)}}async tryCancel(){try{await this.handle.session?.close()}catch(n){throw new Xe("Error canceling Databricks session",JSON.stringify(Da(n).error))}finally{this.handle.session=void 0}}},DZe=r=>{let t=r,n=t?.coalesceCause?.statusCode,i=t?.statusCode;return n===403||i===403};var fts=Ye("CONNECTION"),LRh=async(r,t,n,i,o)=>{let s=u=>{},a=new a4t(s,r,t,n,i,o);return a.Start(),a.promise},hts=r=>({client:r,session:void 0,sessionOptions:{}}),BZe=class extends n${name="databricks";async teardown(){await Ats(this.handle,this.logContext)}untrackedQuery=async(t,n)=>LRh(this.handle,{statement:t,...n},this.logContext,this.resetConnection,3);setQueryTag=async t=>{};async makePendingQuery(t){if(this.wasCanceled)throw new Jh;let n=o=>this.runningQueries.delete(o),i=new a4t(n,this.handle,t,this.logContext,this.resetConnection);if(this.runningQueries.set(i.coalesceQueryID,i),fts.debugContext(this.logContext,`Tracking ${this.name} query with CQID=`,i.coalesceQueryID,"query=",i.sqlText),i.Start(),this.wasCanceled)throw fts.infoContext(this.logContext,"Canceling just-started query due to connection cancellation",i.coalesceQueryID,"conn",this.connectionID),await i.Cancel(),new Jh("canceling new query on canceled connection");return i}getConnectionLogContext(){return this.logContext}},pSl=async(r,t)=>{let n=async()=>{let o=await Cwr(r);return hts(o)},i=await n();return new BZe(i,t,ou(),0,null,n)},Ats=async(r,t)=>{if(r.session)try{await r.session.close()}catch(n){fts.errorContext(t,"Error closing session during teardown",n)}await r.client.close()};var Jpd=Tt(Jho()),qFr=Tt(Tdd());function vdd(r,t=null){return t??`https://${r}.snowflakecomputing.com`}var Kpd=Tt(Vpd());var xuE=Ye("JSON_PARSER");var G7A=Ye("XML_PARSER");var Ypd=r=>{let t=new Kpd.Parser({attrkey:"@",charkey:"#",explicitArray:!1}),n;return t.parseString(r,(i,o)=>{if(i)throw i;n=o}),G7A.debug("$Parsed JSON",JSON.stringify(n,null,2)),n};var Yot=Ye("CONNECTION"),H7A=()=>{let r=process.env.COALESCE_SNOWFLAKE_LOG_LEVEL?.toUpperCase(),t=r==="ERROR"||r==="WARN"||r==="INFO"||r==="DEBUG"||r==="TRACE"?r:"OFF",n=process.env.COALESCE_SNOWFLAKE_LOG_FILE;qFr.configure({xmlColumnVariantParser:Ypd,keepAlive:!1,logLevel:t,logFilePath:n})};H7A();function V2s(r){return r.startsWith("https://")?r:`https://${r}`}var $pd={sfRetryMaxLoginRetries:4},K2s=r=>!(!r.accountName||!r.user),W7A=(r,t)=>{if(!K2s(r))throw Yot.infoContext(t,"Error creating basic snowflake connection parameters (CreateSnowflakeConnectionBasic)",{user:Gpe(r.user),accountName:Gpe(r.accountName)}),new uV("Invalid Connection Information, please correct your account or username and proceed.","Basic");let{accountName:n,accessUrl:i,user:o,password:s,role:a,warehouse:u}=r;return{account:n,username:o,password:s,application:"CoalesceIO_Coalesce",...i&&{accessUrl:V2s(i)},...a&&{role:a},...u&&{warehouse:u},...$pd}},V7A=(r,t)=>{if(!K2s(r))throw Yot.infoContext(t,"Error creating basic snowflake connection (CreateSnowflakeConnectionKeyPair)",{user:Gpe(r.user),accountName:Gpe(r.accountName)}),new uV("Invalid Connection Information, please correct your account or username and proceed.","KeyPair");let{accountName:n,accessUrl:i,user:o,warehouse:s,role:a}=r;return{authenticator:"SNOWFLAKE_JWT",privateKey:r.keyPairKey,privateKeyPass:r.keyPairPass??void 0,account:n,...i&&{accessUrl:V2s(i)},username:o,warehouse:s||"",application:"CoalesceIO_Coalesce",role:a||"",...$pd}},Y2s=async(r,t)=>{let{workspaceID:n,orgID:i,userID:o}=r;try{let s=await nT(),a=await tSl(s,{orgId:i,userId:o,workspaceId:n.toString()},t);if(Yot.infoContext(r,"get access token from secret (GetAccessTokenFromSecret)"),!a)throw new uqe("Invalid OAuth configuration.");return a.sensitiveValue}catch(s){throw Yot.errorContext(r,"Error getting access token from secret (GetAccessTokenFromSecret)",s),s}},K7A=async(r,t,n)=>{if(!K2s(r))throw Yot.infoContext(t,"Error creating basic snowflake connection (CreateSnowflakeConnectionOAuth)",{user:Gpe(r.user),accountName:Gpe(r.accountName)}),new uV("Invalid Connection Information, please correct your account or username and proceed.","OAuth");let i=await Y2s(t,n),{accountName:o,accessUrl:s,user:a,warehouse:u}=r;return{authenticator:"OAUTH",account:o,...s&&{accessUrl:V2s(s)},username:a,token:i.access_token??void 0,warehouse:u||"",application:"CoalesceIO_Coalesce"}},Y7A=r=>{try{return qFr.createConnection(r)}catch(t){throw new dRe(t)}},FFr=async(r,t,n)=>{let i;switch(r.authenticator){case"Basic":case"Cloud":i=W7A(r,t);break;case"KeyPair":i=V7A(r,t);break;case"OAuth":i=await K7A(r,t,n);break;default:throw new Jg(`authenticator type of ${r.authenticator} not supported`)}return Y7A(i)},J7A=r=>`${r.response?.data?.error}: ${r.response?.data?.message}`,Xpd=async(r,t,n)=>{let i=Jpd.default.stringify({grant_type:"refresh_token",refresh_token:t}),s=`${vdd(r.connectionAccount,r.accessUrl)}/oauth/token-request`,a=Buffer.from(`${r.oauthClientID}:${r.oauthClientSecret}`).toString("base64"),u={method:"post",url:s,headers:{Authorization:`Basic ${a}`,"Content-Type":"application/x-www-form-urlencoded"},data:i};try{return(await wc(u)).data.access_token}catch(l){let c=l instanceof qzi?J7A(l):"unknown";throw Yot.errorContext(n,`RefreshAccessToken: ${c}`,l),new uV("An error occured refreshing your OAuth refresh_token. Please ensure your OAuth Security Integration is still valid or reauthenticate.","OAuth",l)}};var J2s=Tt(require("v8"));var AX=Ye("CONNECTION"),zFr=class extends Error{constructor(n,i,o,s){super("Heap size exceeded");this.heapStats=n;this.currentSize=i;this.stmt=o;this.rows=s}makeResult=()=>{let n=this.stmt,i=n.getSessionState();return{rows:this.rows,stmt:this.stmt,status:jD,queryID:n.getStatementId(),warehouse:i?.getCurrentWarehouse?.()||null}};makeErrorMessage=n=>{let i=WH(this.heapStats.total_heap_size),o=WH(n.heap_size_limit),s=WH(this.currentSize),a=WH(e2e());return`Chunk limit reached,
10334
10334
  current heap size: ${i} MB, total heap size: ${o} MB,
10335
10335
  response size: ${s} MB, max response size: ${a} MB`}},$7A=(r,t)=>new Promise((n,i)=>{let o=[],s=0,a=r.getStatementId(),u=r.streamRows(),l=r.getRequestId(),c=r.getSessionState()?.getCurrentWarehouse?.(),d=0;u.on("error",p=>{i(new mL(p,t,l,a,c))}).on("data",p=>{if(s+=cqe(p,!0),o.push(p),d++,s>e2e()){let f=J2s.default.getHeapStatistics(),h=new zFr(f,s,r,o);i(h),u.destroy()}}).on("end",()=>{n({rows:o,stmt:r,status:"Success",queryID:a,warehouse:c||null})})}),X7A=async(r,t,n,i)=>{try{return await $7A(r,t)}catch(o){if(typeof o=="object"&&o instanceof zFr){let s=o.makeErrorMessage(n);return AX.errorContext(i,s),o.makeResult()}else{let s=o;throw typeof o=="object"&&o instanceof mL&&(s=o.toTRunError()),AX.alertContext(i,"Error on streaming rows:",s),o}}},Z7A="Unable to perform operation using terminated connection.",ekA=407002,tkA="ECONNRESET",rkA="OAuth access token expired.",Jot=r=>!!r.message.includes(rkA),nkA=r=>r.code===ekA||r.message===Z7A,ikA=r=>r&&r.code&&r.code===tkA,okA=(r,t)=>{let{promise:n,resolve:i,reject:o}=tI();return{statement:r.execute({...t,streamResult:!0,complete:function(a,u){if(a){let l=t.requestId||u&&u.getRequestId(),c=u&&u.getStatementId(),d=u&&u.getSessionState()?.getCurrentWarehouse?.();o(new mL(a,t,l,c,d))}else i(u)}}),resultStatementPromise:n}},NPe=async r=>{try{return await Z6u(r)}catch(t){throw SSo(t)?new dRe(t):t}},oDt=class extends t2e{constructor(n,i,o,s,a,u=5){let l={...s,snowflakeRequestID:o.requestId};super(i,o,l,a,u);this.completion=n}initialStatement=null;firstSnowflakeExecutionError;get sqlText(){return this.queryParams.sqlText}makePendingQuery(n){let{statement:i,resultStatementPromise:o}=okA(this.handle,n),s=c0(`Back-end (retry=${n.requestId!==void 0}): ${n.sqlText}`,this.logContext,"PERFORMANCE",async()=>{let a=await o;return X7A(a,n,J2s.default.getHeapStatistics(),this.logContext)});return{statement:i,resultPromise:s}}shouldResetConnection=n=>{let i=n.coalesceCause;return Jot(i)||ikA(i)||nkA(i)};handleRetry=async(n,i)=>{if(this.shouldResetConnection(n))try{this.handle=await this.resetFunc()}catch(o){throw AX.errorContext(this.logContext,"Resetting connect for request CQID=",this.coalesceQueryID,"Error running connection reset callback:",o),n.coalesceCause}return i};async queryWithRetries(){try{let n={...this.queryParams};AX.debugContext(this.logContext,"Making request for CQID=",this.coalesceQueryID);let i=async()=>{G3("snowflake.runQuery.attempt",this.logContext);let d=this.makePendingQuery(n);this.initialStatement=d.statement;let p;try{p=await d.resultPromise;let f=p.status;G3("snowflake.runQuery.finished",{...this.logContext,status:f})}catch(f){throw G3("snowflake.runQuery.error",{...this.logContext,errorString:Da(f).error.errorString}),this.firstSnowflakeExecutionError||f}return AX.debugContext(this.logContext,"Successfully got result for CQID=",this.coalesceQueryID),p},o=(d,p)=>{if(!(typeof d=="object"&&d instanceof mL))return AX.errorContext(this.logContext,"Not retrying request CQID=",this.coalesceQueryID,"requestId=",n.requestId,"attempt=",p,"error=",d,"was unrecognized and not retryable"),d;if(d.getSnowflakeErrorCode()==="002043")return AX.infoContext(this.logContext,"Not retrying request, insufficient privileges CQID=",this.coalesceQueryID,"requestId=",n.requestId,"attempt=",p,"error=",d.coalesceCause?d.coalesceCause:d),d;if(d.isCancel())return AX.infoContext(this.logContext,"Not retrying request, it was canceled CQID=",this.coalesceQueryID,"error=",d.coalesceCause?d.coalesceCause:d),d},s=async d=>{typeof d=="object"&&d instanceof mL&&(this.firstSnowflakeExecutionError||(this.firstSnowflakeExecutionError=d),n=await this.handleRetry(d,n))},a=d=>["CQID=",this.coalesceQueryID,"requestId=",n.requestId,"retryOptions=",n,"error=",d.cause?d.cause:d],u={name:"backoff",base:5},l={name:"nowait"},c=(d,p)=>B1c(d)||JSON.stringify(Da(d,!0).error).match(/network ?error/i)?u:p||l;return await yh(i,this.maxAttempts,AX,this.logContext,{alterRetryStrategy:c,beforeRetry:s,checkError:o,retryStrategy:l,retryLogLevel:"info",logOnRetry:"Retrying snowflake request",customErrorOnRetry:a,logOnFailure:"Out of retries for request",customErrorOnFailure:a})}finally{this.completion(this.coalesceQueryID)}}async tryCancel(){let n=this.initialStatement;n!==null&&await yh(()=>NPe(i=>n.cancel(i)),20,AX,this.logContext,{retryStrategy:{name:"random",min:.05,max:.1},checkError:i=>{if(!(typeof i=="object"&&i instanceof Error&&i.message.includes("Identified SQL statement is not currently executing")))return i}})}};var Zpd=Ye("CONNECTION"),$2s=async(r,t,n,i,o)=>{let s=u=>{},a=new oDt(s,r,t,n,i,o);return a.Start(),a.promise},$ot=class extends n${currentQueryTag=null;name="snowflake";settingQueryTag=!1;async resetConnection(){let t=await super.resetConnection();return this.currentQueryTag&&!this.settingQueryTag&&await this.setQueryTag(this.currentQueryTag),t}untrackedQuery=async t=>$2s(this.handle,{sqlText:t},this.logContext,()=>this.resetConnection(),3);setQueryTag=async t=>{this.currentQueryTag=t;let n=this.currentQueryTag===null?"":this.currentQueryTag.replace("$","\\$");this.settingQueryTag=!0;try{G3("snowflake.queryTag",this.logContext),await this.untrackedQuery(`ALTER SESSION SET QUERY_TAG = $$${n}$$`)}finally{this.settingQueryTag=!1}};async makePendingQuery(t){if(this.wasCanceled)throw new Jh;let n=o=>this.runningQueries.delete(o),i=new oDt(n,this.handle,t,this.logContext,()=>this.resetConnection());if(this.runningQueries.set(i.coalesceQueryID,i),Zpd.debugContext(this.logContext,"Tracking snowflake query with CQID=",i.coalesceQueryID,"query=",i.sqlText),i.Start(),this.wasCanceled)throw Zpd.infoContext(this.logContext,"Canceling just-started query due to connection cancellation",i.coalesceQueryID,"conn",this.connectionID),await i.Cancel(),new Jh("canceling new query on canceled connection");return i}executeAndGetRunning(t){let n={...this.logContext};return t.requestId&&(n.requestID=t.requestId),G3("snowflake.execute",n),super.executeAndGetRunning(t)}async teardown(){await NPe(t=>this.handle?this.handle.destroy(t):t())}},e0d=async(r,t,n)=>{let i=()=>FFr(r,t,n),o=await i();return await NPe(s=>o.connect(s)),new $ot(o,t,ou(),0,null,i)};var PPe=require("perf_hooks");var UFr=class{logTag;gcStats;performanceObserver;isActive=!1;trackingStartTime;gcTypeMap={1:"scavenge",2:"mark_sweep_compact",4:"incremental_marking",8:"process_weak_callbacks",15:"all"};constructor(t={}){this.logTag="[GCTracker]: "+(t.logTag||""),this.gcStats=this.initializeGCStats(),t.autoStart!==!1&&this.start()}initializeGCStats(){return{scavenge:{count:0,totalDuration:0,maxPause:0},mark_sweep_compact:{count:0,totalDuration:0,maxPause:0},incremental_marking:{count:0,totalDuration:0,maxPause:0},process_weak_callbacks:{count:0,totalDuration:0,maxPause:0},all:{count:0,totalDuration:0,maxPause:0},unknown:{count:0,totalDuration:0,maxPause:0}}}start(){if(!(this.isActive||this.performanceObserver))try{if(typeof PPe.PerformanceObserver>"u"||typeof PPe.performance>"u"){Ye("PERFORMANCE").app(`${this.logTag} PerformanceObserver not available in this environment`);return}this.performanceObserver=new PPe.PerformanceObserver(t=>{for(let n of t.getEntries())if(n.entryType==="gc"){let o=n.detail.kind||0,s=this.gcTypeMap[o]||"unknown",a=n.duration,u=this.gcStats[s],l=this.gcStats.all,c=PPe.performance.timeOrigin+n.startTime;u.firstGCTime||(u.firstGCTime=c),l.firstGCTime||(l.firstGCTime=c),u.count++,l.count++,u.totalDuration+=a,l.totalDuration+=a,u.lastGCTime=c,l.lastGCTime=c,u.maxPause=Math.max(u.maxPause,a),l.maxPause=Math.max(l.maxPause,a)}}),this.performanceObserver.observe({entryTypes:["gc"]}),this.isActive=!0,this.trackingStartTime=Date.now(),Ye("PERFORMANCE").debug(`${this.logTag} GC performance monitoring enabled`)}catch(t){Ye("PERFORMANCE").error(`${this.logTag} Failed to initialize GC monitoring:`,t)}}stop(){this.performanceObserver&&(this.performanceObserver.disconnect(),this.performanceObserver=void 0,this.isActive=!1)}analyzePatterns(){let t=[],n=[];if(!this.performanceObserver||!this.trackingStartTime)return{statistics:n,warnings:t};let s=(PPe.performance.now()-this.trackingStartTime)/6e4,a=Object.entries(this.gcStats);for(let[l,c]of a)if(!(l!=="scavenge"&&l!=="mark_sweep_compact")&&(l==="scavenge"&&c.maxPause>100?t.push(`Long ${l} GC pause: ${c.maxPause.toFixed(1)}ms where typical is <10ms`):l==="mark_sweep_compact"&&c.maxPause>400&&t.push(`Long ${l} GC pause: ${c.maxPause.toFixed(1)}ms where typical is 10-100ms`),s>0)){let d=c.count/s;n.push(`${l} GC frequency: ${d.toFixed(1)} GCs/minute`)}let u=this.gcStats.mark_sweep_compact;if(u.count>0&&s>0){let l=u.count/s;n.push(`Full GCs (mark_sweep_compact): ${l.toFixed(1)}/minute`)}return{statistics:n,warnings:t}}};var sDt=Tt(require("fs")),t0d=Tt(require("v8"));var lj=class extends MQe{logTag="[MemoryMonitor]: ";gcTracker;threshold=75;memoryLimit;constructor(){super(),this.memoryLimit=this.getMemoryLimit(),this.suppress||(this.gcTracker=new UFr({logTag:this.logTag}))}cleanup(){this.gcTracker?.stop()}getLogContextWithMetrics(t,n,i){let o={metric_type:"memory_telemetry",operation:i,rss_bytes:n.memUsage.rss,heap_used_bytes:n.memUsage.heapUsed,heap_total_bytes:n.memUsage.heapTotal,external_bytes:n.memUsage.external,warnings_count:n.warnings.length};try{if(this.memoryLimit){let s=n.memUsage.rss/this.memoryLimit[0]*100;o.limit_bytes=this.memoryLimit[0],o.oom_risk=s>=90?1:0}try{o.v8_available_bytes=n.heapStats.total_available_size,o.v8_heap_size_limit_bytes=n.heapStats.heap_size_limit}catch{}n.deltaData&&(o.rss_delta_bytes=n.deltaData.rssDelta,o.heap_delta_bytes=n.deltaData.heapDelta,o.external_delta_bytes=n.deltaData.externalDelta)}catch(s){Ye("PERFORMANCE").errorContext(t,`${this.logTag} Error getting metrics: ${s}`)}return{...t,memoryMetrics:o}}getMemoryUsage(t){if(typeof process<"u"&&process.memoryUsage!==void 0)return process.memoryUsage();Ye("PERFORMANCE").infoContext(t,`${this.logTag} Cannot get memory usage in browser environment`)}getMemoryLimit(){try{let t=this.getContainerCgroupLimit();if(t)return[t,"container"];let n=this.getNodeMemoryInfo();if(n)return[n,"node"]}catch{}}getContainerCgroupLimit(){try{let t=["/sys/fs/cgroup/memory.max"],n=["/sys/fs/cgroup/memory/memory.limit_in_bytes"];for(let i of[...t,...n])if(sDt.default.existsSync(i)){let o=sDt.default.readFileSync(i,"utf8").trim();if(o!=="max"){let s=parseInt(o,10);if(s>=67108864&&s<=1099511627776)return s}}}catch{}}getNodeMemoryInfo(){try{if(sDt.default.existsSync("/proc/meminfo")){let n=sDt.default.readFileSync("/proc/meminfo","utf8").match(/^MemTotal:\s+(\d+)\s+kB$/m);if(n&&n[1]){let o=parseInt(n[1],10)*1024;if(o>=1073741824&&o<=1099511627776)return o}}}catch{}}formatBytes(t){if(t===0)return"0B";let n=1024,i=["B","KB","MB","GB","TB"],o=Math.floor(Math.log(t)/Math.log(n));return`${parseFloat((t/Math.pow(n,o)).toFixed(1))}${i[o]}`}getMemoryWarnings(t){let n=[];if(this.memoryLimit){let[a,u]=this.memoryLimit,l=t.rss/a*100,c=a-t.rss,d=u==="container"?"container limit":"node memory",p=u==="container"?"OOM kill":"node memory exhaustion";l>=90?n.push(`\u{1F6A8} K8S ${u==="container"?"OOM":"MEMORY"} RISK: RSS at ${l.toFixed(1)}% of ${d} - Only ${this.formatBytes(c)} remaining before ${p}`):l>=this.threshold&&n.push(`\u26A0\uFE0F K8S MEMORY PRESSURE: RSS at ${l.toFixed(1)}% of ${d} - ${this.formatBytes(c)} remaining`)}let i=t0d.default.getHeapStatistics(),o=i.total_available_size/(1024*1024);o<50?n.push(`\u{1F6A8} V8 CRITICAL: Only ${this.formatBytes(i.total_available_size)} heap space available`):o<100&&n.push(`\u26A0\uFE0F V8 WARNING: Only ${this.formatBytes(i.total_available_size)} heap space available`);let s=(t.heapTotal-t.heapUsed)/t.heapTotal*100;return s>60&&n.push(`\u26A0\uFE0F High heap fragmentation: ${s.toFixed(1)}% - consider restart`),{warnings:n,heapStats:i}}formatMemoryUsage(t){let n=t.heapUsed/t.heapTotal*100,i=`RSS: ${this.formatBytes(t.rss)}`;if(this.memoryLimit){let[o,s]=this.memoryLimit,a=t.rss/o*100;i+=` (${a.toFixed(1)}% of ${s})`}return i+=` | Heap: ${this.formatBytes(t.heapUsed)}/${this.formatBytes(t.heapTotal)} (${n.toFixed(1)}% used)`,i+=` | External: ${this.formatBytes(t.external)}`,i}analyzeDeltaConcerns(t,n,i,o){let s=[];if(t>500*1024*1024)if(this.memoryLimit){let[a]=this.memoryLimit,u=o/a*100,l=t/a*100,c=u+l;c>95?s.push(`\u{1F6A8} CRITICAL: RSS +${l.toFixed(1)}% (${this.formatBytes(t)}) \u2192 ${c.toFixed(1)}% of ${this.memoryLimit[1]} limit - OOM risk if pattern continues`):c>85?s.push(`\u26A0\uFE0F HIGH RISK: RSS +${l.toFixed(1)}% (${this.formatBytes(t)}) \u2192 ${c.toFixed(1)}% of ${this.memoryLimit[1]} limit - Monitor closely`):u>70&&s.push(`\u26A0\uFE0F LARGE INCREASE while at ${u.toFixed(1)}%: +${this.formatBytes(t)} - Watch for continued growth`)}else s.push(`\u26A0\uFE0F LARGE RSS INCREASE: +${this.formatBytes(t)} - Cannot assess OOM risk (container limit unknown)`);return n>200*1024*1024&&s.push(`\u26A0\uFE0F LARGE HEAP GROWTH: +${this.formatBytes(n)} - Check for memory leaks`),i>200*1024*1024&&s.push(`\u26A0\uFE0F LARGE EXTERNAL MEMORY: +${this.formatBytes(i)} - Check buffers`),s}getMemoryReport(t,n){let{warnings:i,heapStats:o}=this.getMemoryWarnings(t),s=this.formatMemoryUsage(t);if(this.gcTracker){let{warnings:a,statistics:u}=this.gcTracker.analyzePatterns();a.length&&a.push(`~~ GC Warnings ~~: ${a.join(" | ")}`),u.length&&(s+=` | ~~ GC Stats ~~: ${u.join(" | ")}`)}for(let a of n??[])i.push(a);return{summary:s,warnings:i,heapStats:o,memUsage:t}}logMemorySnapshot(t,n,i,o=[],s=!1){if(this.suppress||s)return;let a=this.getMemoryUsage(t);if(a)try{let u=this.getMemoryReport(a,o),l=this.getLogContextWithMetrics(t,u,i);this.logMemory(l,n,u.summary,u.warnings);return}catch(u){Ye("PERFORMANCE").errorContext(t,`${this.logTag} Error logging memory snapshot: ${u}`);return}}logDeltaMetrics(t,n,i,o,s=!1){if(this.suppress||!n||s)return;let a=process.memoryUsage(),u={rssDelta:a.rss-n.rss,heapDelta:a.heapUsed-n.heapUsed,externalDelta:a.external-n.external},l=this.analyzeDeltaConcerns(u.rssDelta,u.heapDelta,u.externalDelta,a.rss),c=this.getMemoryReport(a,l);c.deltaData=u;let d=this.getLogContextWithMetrics(t,c,i);this.logMemory(d,o,c.summary,c.warnings)}};var yT=Ye("CONNECTION"),QFr=r=>t=>{yT.assert(`Got error in cannot-reject function "${r}":`,t)},gX=class{constructor(t,n,i,o){this.userContext=t;this.teamInfo=n;this.logContext=i;this.maximumConnections=o;this.slots=[],this.shutdown=!1,this.canceled=!1,qp("CONNECTION",o>0,this.logContext,"At least one connection is required!")}slots;shutdown;canceled;queued=[];memoryMonitor=new lj;totalExecutions=0;incrementExecutions(t){yT.infoContext(this.logContext,`Incrementing executions for ${t}. Total executions: ${this.totalExecutions}`),this.totalExecutions++}get executions(){return this.totalExecutions}setup=async()=>{let t="[ConnectionManager.setup()]";yT.infoContext(this.logContext,"Setting up",this.maximumConnections,"connections - for large values this may take some time");let n="ConnectionManager.setup()";await this.memoryMonitor.measureOperationAsync(this.logContext,`${t}: Before setup initialization`,`${t}: Setup completed`,n,async()=>{for(let i=0;i<this.maximumConnections;i++){let o=this.NewSlotMember(i,ou());i===0&&(yT.infoContext(this.logContext,`${t}: first slot awaiting initialization`),await this.memoryMonitor.measureOperationAsync(this.logContext,`${t}: Before first connection initialization`,`${t}: First connection initialization completed`,n,async()=>{await o.isInitialized()}),yT.infoContext(this.logContext,`${t}: first slot done initializing`)),yT.infoContext(this.logContext,`${t}: initializing slot ${o.slotID}`),o.isInitialized().then(()=>{yT.infoContext(this.logContext,`${t}: slot ${o.slotID} initialized, marking unused`,o.connectionID),this.handleNextQueued(o.markUsed()).catch(QFr("handleNextQueued"))},s=>{yT.errorContext(this.logContext,`${t}: slot ${o.slotID} failed to initialize, error: `,o.connectionID,s),o.markErrored()}),this.slots.push(o)}})};teardown=async()=>{this.memoryMonitor.cleanup(),this.shutdown=!0,await $4(this.slots.map(t=>t.teardown()))};getFirstAvailableSlot=()=>this.slots.find(t=>t.state==="unused")?.markUsed()??null;handleNextQueued=async t=>{let n=this.queued.pop();if(n===void 0){yT.debug("connection pool queue empty, freeing slot"),t.markUnused();return}await this.handleItem(t,n).catch(QFr("handleItem"))};handleItem=async(t,n)=>{let{slotID:i}=t;yT.debugContext(this.logContext,"using connection: ",i);let{callback:o,promise:s,resolve:a,reject:u}=n;try{if(this.shutdown)throw new Xe("Can't run on shutdown");if(this.canceled)return u(new Jh(`Connection was canceled, aborting on slot ${i}`)),this.handleNextQueued(t).catch(QFr("handleNextQueued")),s;await t.acquire();let l=await t.unwrap(),c=await o(l);a(c)}catch(l){u(l)}finally{await t.release()}return this.handleNextQueued(t).catch(QFr("handleNextQueued")),s};withConnection=async t=>{let n="[ConnectionManager.withConnection()]";if(this.shutdown)throw yT.errorContext(this.logContext,`${n}: can't run on shutdown`),new Xe("Can't run on shutdown");if(this.canceled)throw yT.errorContext(this.logContext,`${n}: connection canceled before item was handled or queued`),new Jh("Connection canceled before item was handled or queued");let i={...tI(),callback:t};yT.infoContext(this.logContext,`${n}: attempting to acquire a connection slot`);let o=this.getFirstAvailableSlot();return o!==null?(yT.infoContext(this.logContext,`${n}: slot ${o.slotID} found ${o.state}`),await this.handleItem(o,i)):(yT.infoContext(this.logContext,`${n}:`,this.slots.length,"connections in use, couldn't find available slot, enqueueing item"),this.queued.push(i),this.queued.length>5&&this.memoryMonitor.logMemorySnapshot(this.logContext,`${n}: Queued connection pool size: ${this.queued.length}`,"withConnection")),i.promise};getConnection=()=>({execute:n=>this.withConnection(i=>i.execute(n)),teardown:()=>Promise.resolve(),logContext:this.logContext});async cancel(){try{if(yT.infoContext(this.logContext,"Begin canceling all connections in the ConnectionManager"),this.canceled)return yT.infoContext(this.logContext,"ConnectionManager already canceled"),Promise.resolve();this.canceled=!0;let t=this.slots.map(n=>n.cancel());await $4(t)}catch(t){throw yT.errorContext(this.logContext,"Error canceling all connections in the ConnectionManager",t),t}finally{yT.infoContext(this.logContext,"Completed canceling all connections in the ConnectionManager")}}showQueued=()=>this.queued};var Xot=Ye("CONNECTION"),mX=class{constructor(t,n,i,o,s,a){this.slotID=t;this.connectionID=n;this.userContext=i;this.teamInfoAndFirebase=o;this.logContext=s;this.managerQueryStats=a;this.initPromise=this.initialize(),this.firstInitPromise=this.initPromise.then(()=>this.setup())}handle=null;state="initializing";connection=null;firstInitPromise;initPromise;useCount=0;canceled=!1;async initialize(){this.handle=await this.createConnection()}async postReInitialization(){}reinitialize=async()=>{let t=this.createConnection();return this.initPromise=t.then(n=>{this.handle=n}),await this.postReInitialization(),t};async acquire(t){if(this.state!=="in use")throw new Jg(`Expected slot to be in use, but it was ${this.state}`);if(!t&&this.connection)throw qp("CONNECTION",!1,this.logContext,`Cannot acquire in-use connection ${this.connectionID}`),new Jg(`Cannot acquire in-use connection ${this.connectionID}`);if(this.canceled)throw new Jh("Attempting to acquire canceled connection slot entry",this.connectionID);if(this.connection=await this.newConnection(),this.useCount++,this.canceled)throw this.connection=null,new Jh("Attempting to acquire canceled connection slot entry",this.connectionID)}async release(){this.connection=null}async unwrap(){if(this.state!=="in use")throw new Jg(`Expected slot to be in use, but it was ${this.state}`);return await this.assertConnected(),this.connection}async isInitialized(){return this.firstInitPromise}async assertConnected(){if(!this.connection)throw qp("CONNECTION",!1,this.logContext,"Connection was not acquired before connection check!"),new Jg("Connection was not acquired before connection check!");await this.initPromise}async execute(t){return(await c0("Waiting for connection",this.logContext,"CONNECTION",()=>this.unwrap())).execute(t)}async cancel(){try{if(Xot.infoContext(this.logContext,"Begin canceling connection slot entry",this.connectionID),this.canceled){Xot.infoContext(this.logContext,"Connection slot entry already canceled",this.connectionID);return}if(this.canceled=!0,this.connection){let t=this.connection;this.connection=null,await t.cancel()}else Xot.infoContext(this.logContext,"Canceling connection slot entry",this.connectionID,"no-op")}catch(t){throw Xot.errorContext(this.logContext,"Error canceling connection slot entry",this.connectionID,t),t}finally{Xot.infoContext(this.logContext,"Completed canceling managed connection",this.connectionID)}}async teardown(){Xot.infoContext(this.logContext,`[ConnectionManagerSlotMember]: Tearing down ${this.userContext.kind} connection`,this.connectionID),await this.initPromise.catch(()=>{}),await this.firstInitPromise.catch(()=>{}),this.handle&&await(this.connection||this.newConnection()).teardown()}markUsed(){return this.state="in use",this}markUnused(){return this.state="unused",this}markErrored(){return this.state="error",this}};var aDt=Ye("CONNECTION"),Z2s=async(r,t,n)=>{let{rootRepository:i,teamInfo:o,teamInfo:{fbTeamID:s,fbUserID:a}}=t,{environmentID:u}=r;if(!u)throw new Xe(`No environmentID provided for org ${s} by user ${a}`);if(r.authenticator!=="OAuth")throw new uV("Cannot reset oauth connection on a non-oauth error!","OAuth");let l=await GFr(i.org(s),o,u,QA.Values.snowflake),c=xu().featureFlags,d=await Y2s(n,c);if(!d.refresh_token)throw new uV("Missing or invalid OAuth refresh token, please reauthenticate.","OAuth");let p=d.refresh_token,h={access_token:await Xpd(l,p,n),refresh_token:p},g=await nT(),b={orgId:s,userId:a,workspaceId:u.toString()};await oSl(g,b,new cd(h),c)},X2s=class extends mX{defaultWarehouse=null;currentWarehouse=null;async createConnection(){return FFr(this.userContext,this.logContext,xu().featureFlags)}async updateFromResult(t){t.warehouse&&(this.currentWarehouse=t.warehouse)}async setup(){await this.connectAndValidate(),await this.setDefaultWarehouse()}async assertConnected(){if(await super.assertConnected(),this.handle===null)throw qp("CONNECTION",!1,this.logContext,"Connection was null after initPromise!"),new Jg("Connection was null after initPromise!")}async postReInitialization(){await this.connectAndValidate(),await this.acquire(!0)}connect(){return NPe(t=>this.handle.connect(t))}async resetOauth(){await Z2s(this.userContext,this.teamInfoAndFirebase,this.logContext)}async connectAndValidate(){await this.initPromise;try{await this.connect()}catch(t){if(this.userContext.authenticator==="OAuth"&&Jot(t)){await this.resetOauth();try{this.initPromise=this.initialize(),await this.initPromise,await this.connect()}catch(n){if(aDt.errorContext(this.logContext,"Just refreshed oauth tokens, shouldn't have gotten here unless another process did it at the same time: ",n),Jot(n))this.initPromise=this.initialize(),await this.initPromise,await this.connect();else throw n}}else throw t}this.currentWarehouse&&this.currentWarehouse!==this.defaultWarehouse&&await this.setWarehouse(this.currentWarehouse)}async setDefaultWarehouse(){let n=(await $2s(this.handle,{sqlText:"SELECT 1 as coalesce_init"},this.logContext,async()=>{throw new Jg("Cannot reset connection")},2)).stmt;if(!n)throw qp("CONNECTION",!!n,this.logContext,"Snowflake did not return a statement!"),new Jg("Snowflake did not return a statement!");if(this.defaultWarehouse=n.getSessionState()?.getCurrentWarehouse?.()||null,!this.defaultWarehouse)throw aDt.errorContext(this.logContext,"Could not identify the initial warehouse - possibly invalid credentials?"),new mur(this.userContext.warehouse)}async teardown(){aDt.infoContext(this.logContext,"[SnowflakeConnectionManagerSlotMember]: Tearing down snowflake connection",this.connectionID),await this.initPromise.catch(()=>{}),await this.firstInitPromise.catch(()=>{}),await NPe(t=>this.handle?this.handle.destroy(t):t())}async acquire(t){if(!t&&this.connection)throw qp("CONNECTION",!1,this.logContext,`Cannot acquire in-use connection ${this.connectionID}`),new Jg(`Cannot acquire in-use connection ${this.connectionID}`);if(this.canceled)throw new Jh("Attempting to acquire canceled connection slot entry",this.connectionID);if(this.connection=new $ot(this.handle,this.logContext,this.connectionID,this.useCount,this),this.useCount++,this.canceled)throw this.connection=null,new Jh("Attempting to acquire canceled connection slot entry",this.connectionID)}newConnection(){return new $ot(this.handle,this.logContext,this.connectionID,this.useCount,this)}setWarehouse=async t=>{await this.assertConnected(),await this.execute({sqlText:`USE WAREHOUSE "${t}"`})};async release(){this.currentWarehouse&&this.currentWarehouse!==this.defaultWarehouse&&await this.setWarehouse(this.defaultWarehouse),this.currentWarehouse=null,await super.release()}},jFr=class r extends gX{static async New(t,n,i,o){let s=new r(t,n,i,o);return await s.setup(),s}NewSlotMember(t,n){return new X2s(t,n,{...this.userContext},this.teamInfo,this.logContext,this)}async test(){let t="SnowflakeConnectionManager.test:",n=Qb(this.userContext,"authenticator","kind","role","user","warehouse");aDt.appContext(this.logContext,t,"testing connection",n),await this.withConnection(async i=>i.execute({sqlText:fN})),aDt.appContext(this.logContext,t,"connection successfully established",n)}};var r0d=(r,t)=>$h({snowflake:()=>Jot(r),databricks:()=>DZe(r),fabric:()=>!1,bigquery:()=>!1},t,"CheckForDeadAccessTokenFromPlatform"),n0d=async(r,t,n)=>GD(r,{snowflake:async i=>await Z2s(i,t,n),databricks:async i=>await eEs(i,t,n),fabric:async()=>Promise.reject(new Xe(`Invalid platform kind: ${r.kind} for ResetOauthConnectionForPlatform`)),bigquery:async()=>Promise.reject(new Xe(`Invalid platform kind: ${r.kind} for ResetOauthConnectionForPlatform`))}),GFr=async(r,t,n,i)=>{let o;try{o=await r.workspace(n).fetch()}catch(l){throw Ug(l)&&l instanceof ru?new Xe(`Environment ${n} doesn't exist in org ${t.fbTeamID}`):l}let s={orgId:t.fbTeamID,userId:t.fbUserID,workspaceId:n.toString()},a=await nT(),u=await $h({databricks:()=>$Cl(a,s),snowflake:()=>JCl(a,s,xu().featureFlags),fabric:()=>Promise.resolve(null),bigquery:()=>Promise.resolve(null)},i,"GetOAuthDetailsFromEnvironment");if(u===null)throw new Xe(`Unable to get OAuth credentials for environment ${n} in org ${t.fbTeamID}!`);return{oauthClientID:u.sensitiveValue.clientId,oauthClientSecret:u.sensitiveValue.clientSecret,connectionAccount:o.connectionAccount,accessUrl:o.accessUrl||null}};var uDt=Ye("CONNECTION"),eEs=async(r,t,n)=>{let{rootRepository:i,teamInfo:o,teamInfo:{fbTeamID:s,fbUserID:a}}=t,{environmentID:u}=r;if(!u)throw new Xe(`No environmentID provided for org ${s} by user ${a}`);if(r.authenticator!=="OAuthU2M")throw new X1e(`Cannot reset oauth connection on a non-oauth connection type: ${r.authenticator}`,"OAuthU2M");let l=await GFr(i.org(s),o,u,QA.Values.databricks),c=await cSl(n);if(!c.refresh_token)throw new X1e("Missing or invalid OAuth refresh token, please reauthenticate.","OAuthU2M");let d=c.refresh_token,p=await lSl(l,d,n),f={access_token:p,refresh_token:d},h=await nT(),g={orgId:s,userId:a,workspaceId:u.toString()};return await iSl(h,g,new cd(f)),await h.Set(oK,g,new cd(f)),p},tEs=class extends mX{name="databricks";async createConnection(){let t=await Cwr(this.userContext);return hts(t)}async updateFromResult(t){}async setup(){await this.connectAndValidate()}async assertConnected(){if(await super.assertConnected(),this.handle===null)throw qp("CONNECTION",!1,this.logContext,"Connection was null after initPromise!"),new Jg("Connection was null after initPromise!")}async postReInitialization(){await this.connectAndValidate(),await this.acquire(!0)}async connect(){if(!this.handle){uDt.alertContext(this.logContext,"No handle found in DatabricksConnectionManagerSlotMember.connect");return}return this.handle.session||(this.handle.session=await this.handle.client.openSession(),this.handle.sessionOptions={}),this.handle.session}async resetOauth(){let t=await eEs(this.userContext,this.teamInfoAndFirebase,this.logContext);this.userContext.accessToken=t}async connectAndValidate(){await this.initPromise;try{await this.connect()}catch(t){if(this.userContext.authenticator==="OAuthU2M"&&DZe(t)){await this.resetOauth();try{this.initPromise=this.initialize(),await this.initPromise,await this.connect()}catch(n){if(uDt.errorContext(this.logContext,"Just refreshed oauth tokens, shouldn't have gotten here unless another process did it at the same time: ",n),DZe(n))this.initPromise=this.initialize(),await this.initPromise,await this.connect();else throw n}}else throw t}}async teardown(){uDt.infoContext(this.logContext,`Tearing down ${this.name} connection`,this.connectionID),await this.initPromise.catch(()=>{}),await this.firstInitPromise.catch(()=>{}),this.handle&&await Ats(this.handle,this.logContext)}async acquire(t){if(!t&&this.connection)throw qp("CONNECTION",!1,this.logContext,`Cannot acquire in-use connection ${this.connectionID}`),new Jg(`Cannot acquire in-use connection ${this.connectionID}`);if(this.canceled)throw new Jh("Attempting to acquire canceled connection slot entry",this.connectionID);if(this.connection=new BZe(this.handle,this.logContext,this.connectionID,this.useCount,this),this.useCount++,this.canceled)throw this.connection=null,new Jh("Attempting to acquire canceled connection slot entry",this.connectionID)}newConnection(){return new BZe(this.handle,this.logContext,this.connectionID,this.useCount,this)}},HFr=class r extends gX{static async New(t,n,i,o){let s=new r(t,n,i,o);return await s.setup(),s}NewSlotMember(t,n){return new tEs(t,n,{...this.userContext},this.teamInfo,this.logContext,this)}async test(){let t="DatabricksConnectionManager.test:",n=Qb(this.userContext,"authenticator","kind","role","user","warehouse");uDt.appContext(this.logContext,t,"testing connection",n),await this.withConnection(async i=>i.execute({statement:fN})),uDt.appContext(this.logContext,t,"connection successfully established",n)}};var WFr=async(r,t,n,i,o)=>{let s={kind:"bigquery",authenticator:r,environmentID:i},a=await nT(),u={orgId:t,userId:n,workspaceId:i.toString()};if(r==="OAuth"){let l=await ZCl(a,u);if(!l?.sensitiveValue.clientID||!l?.sensitiveValue.clientSecret)throw new ru("BigQuery OAuth app credentials (clientID/clientSecret) not found");let c=await rSl(u);if(!c?.sensitiveValue.refresh_token)throw new ru("BigQuery OAuth refresh token not found. Please re-authenticate.");s={...s,clientID:l.sensitiveValue.clientID,secret:l.sensitiveValue.clientSecret,refreshToken:c.sensitiveValue.refresh_token}}else if(r==="ServiceAccount"){let l=o;if(l||(l=(await XCl(a,u))?.sensitiveValue?.serviceAccountKey),!l)throw new ru("No BigQuery credentials found!");s={...s,serviceAccountKey:l}}else throw new no(`connection type: ${r} is not valid for BigQuery`);return s};var rEs=require("crypto");function DPe(r,t){try{let i=(0,rEs.createPrivateKey)({key:r.sensitiveValue,passphrase:t?.sensitiveValue}).export({format:"pem",type:"pkcs8"});if(!i||typeof i!="string")throw new Xe("Unknown error decrypting private key");if(t!==void 0)try{throw(0,rEs.createPrivateKey)({key:r.sensitiveValue}),new no("Private key is not encrypted but a passphrase was provided")}catch(o){if(Ug(o))throw o;if(!o||typeof o!="object"||"code"in o&&o.code!=="ERR_OSSL_CRYPTO_INTERRUPTED_OR_CANCELLED")throw new Xe("Unknown error decrypting private key",o?.message)}return new cd(i)}catch(n){if(Ug(n))throw n;if(n&&typeof n=="object"&&"code"in n)switch(n.code){case"ERR_OSSL_UNSUPPORTED":throw new no("Unable to parse private key");case"ERR_OSSL_BAD_DECRYPT":throw new no("Unable to decrypt private key using the provided passphrase");case"ERR_OSSL_CRYPTO_INTERRUPTED_OR_CANCELLED":throw new no("Unable to decrypt private key without a passphrase")}throw new Xe("Unknown error decrypting private key",n?.message)}}var i0d=r=>{let t=r.passphrase===void 0||r.passphrase===null?void 0:new cd(r.passphrase);try{DPe(new cd(r.key),t)}catch(n){if(!Ug(n)||n.message!=="Unable to decrypt private key without a passphrase")throw new FD(n instanceof Error?n.message:"Unknown error validating private key")}};var skA=async(r,t,n)=>{let i=n.isFeatureEnabled("enableSnowflakeSecretsV2"),o=await PZe(r,t,_Ge,i);return o&&i0d(o.sensitiveValue),o},akA=async(r,t,n,i,o,s)=>{let a=n.toString();if(bqe(i)){if(!sht(o))throw new no(`Got a snowflake user credentials with a ${o.connectionKind} userConnection`);if(i.snowflakeAuthType==="KeyPair"&&(i.snowflakeKeyPairKey===void 0||i.snowflakeKeyPairPass===void 0)){let u=await nT(),l=await skA(u,{orgId:r,userId:t,workspaceId:a},s);if(!l){if(i.snowflakeKeyPairKey===void 0)throw new no("No Snowflake private key found!");return}i.snowflakeKeyPairKey===void 0&&(i.snowflakeKeyPairKey=l.sensitiveValue.key),!i.snowflakeUsername&&o.connectionDetails?.user&&(i.snowflakeUsername=o.connectionDetails.user),i.snowflakeKeyPairPass===void 0&&l.sensitiveValue.passphrase!==void 0&&(i.snowflakeKeyPairPass=l.sensitiveValue.passphrase)}}},ukA=async(r,t,n,i,o,s)=>{let a={orgId:r,userId:t,workspaceId:n.toString()};await u0e(i,{databricks:async u=>{if(u.databricksAuthType==="Token")return;if(!aZt(o))throw new no(`Got a databricks auth credentials with a ${o.connectionKind??"snowflake"} userConnection`);let l=await nT();if(u.databricksAuthType==="OAuthM2M"){let c=await l.Get(YOe,a);if(!c)throw new no("No Databricks OAuth M2M credentials found!");u.clientSecret=c.sensitiveValue.clientSecret,u.clientID=c.sensitiveValue.clientID}else if(u.databricksAuthType==="Cloud"){let c=await l.Get(vGe,a);if(!c)throw new no("No Databricks Cloud credentials found!");u.databricksAuthType="Token",u.token=c.sensitiveValue.token}else if(u.databricksAuthType==="OAuthU2M"){let c=await l.Get(oK,a);if(!c)throw new no("No Databricks credentials found!");!u.path&&o.connectionDetails.path&&(u.path=o.connectionDetails.path),u.accessToken=c.sensitiveValue.access_token}!u.path&&o.connectionDetails.path&&(u.path=o.connectionDetails.path)},snowflake:async u=>{if(u.snowflakeAuthType!=="Cloud")return;if(!sht(o))throw new no(`Got a snowflake user credentials with a ${o.connectionKind} userConnection`);let l=await nT(),c=await eSl(l,a,s.isFeatureEnabled("enableSnowflakeSecretsV2"));if(!c)throw new no("No Snowflake credentials found!");!u.snowflakeUsername&&o.connectionDetails?.user&&(u.snowflakeUsername=o.connectionDetails.user),!u.snowflakeWarehouse&&o.connectionDetails?.warehouse&&(u.snowflakeWarehouse=o.connectionDetails.warehouse),!u.snowflakeRole&&o.connectionDetails?.role&&(u.snowflakeRole=o.connectionDetails.role),u.snowflakeAuthType="Basic",u.snowflakePassword=c.sensitiveValue.password},bigquery:async u=>{if(!uZt(o))throw new no(`Got a bigquery user credentials with a ${o.connectionKind} userConnection`);if(u.bigQueryAuthType!=="ServiceAccount")return;let{serviceAccountKey:l}=await WFr(o.connectionType,r,t,n,u.serviceAccountKey);Object.assign(u,{serviceAccountKey:l})},fabric:async()=>{}},"CloudCredsToBasic")},BPe=async(r,t,n,i,o,s)=>{await ukA(r,t,n,i,o,s),await akA(r,t,n,i,o,s)};var o0d=require("crypto");var ckA=Ye("PLAN"),VFr=class r{constructor(t,n,i,o){this.client=t;this.environmentID=n;this.cachedRefs=i;this.logContext=o}async getCachedRef(t,n){let i=this.hashRenderPlan(n);return this.cachedRefs.get(`${t}:${i}`)?.refData}updateCache(t,n,i){let o=`${t}:${i}`;this.cachedRefs.has(o)||this.cachedRefs.set(o,{nodeID:t,hash:i,requiresSync:!0,refData:n})}async syncCache(){let t=Array.from(this.cachedRefs.values()).filter(({requiresSync:n})=>n);return t.length===0?0:(await this.client.postRenderedRefs({renderedRefs:t.map(({nodeID:n,hash:i,refData:o})=>({nodeID:n,hash:i,ref:JSON.stringify(o),environmentID:this.environmentID}))}),t.length)}static stringifyNodeMetadata(t){let{operationName:n,deployStrategy:i,parameters:o}=t,s=[n,i];return o&&s.push(JSON.stringify(o)),s.join("|")}static async fetchCachedRefs(t,n,i){let o=await t.getRenderedRefs({queries:{environmentID:n}}),s=new Map;for(let a of o.renderedRefs){let u=r.parseRefUsage(a.ref,i);u&&s.set(`${a.nodeID}:${a.hash}`,{nodeID:a.nodeID,hash:a.hash,refData:u,requiresSync:!1})}return s}static parseRefUsage(t,n){try{return JSON.parse(t)}catch(i){ckA.errorContext(n,"parseRefUsage",`Failed to parse ref usage: ${t}`,{error:i});return}}};var cDt=class r extends VFr{hashRenderPlan(t){let{templateString:n,macrosString:i,nodeMetadata:o,runTimeParameters:s={},packages:a}=t,u=(0,o0d.createHash)("sha256");return u.update(n),u.update(i),u.update(r.stringifyNodeMetadata(o)),u.update(JSON.stringify(s)),u.update(JSON.stringify(a)),u.digest("hex")}};var lkA=Ye("PLAN"),dkA=async(r,t,n)=>{let i=await cDt.fetchCachedRefs(t,r,n);return new cDt(t,r,i,n)};async function KFr(r,t){try{let n=ERe();return await dkA(r,n,t)}catch(n){lkA.warnContext(t,"getRenderCache","Failed to get render cache",{error:n});return}}var AAd=Tt(DEs());var gAd=Tt(KQe());var V1d=["DATE","DATETIME","TIMESTAMP","TIME"],cFA=M.object({table_catalog:M.string().optional(),table_schema:M.string().optional(),table_name:M.string(),column_name:M.string(),ordinal_position:M.union([M.number(),M.string()]).nullable().optional(),is_nullable:M.union([M.literal("YES"),M.literal("NO")]),data_type:M.string(),column_default:M.string().nullable().optional(),comment:M.string().nullable().optional()}),yzr=class extends qV{validator=br("BigQueryInformationSchemaColumnsType",cFA);query(t){let n=rA([t.database,t.schema],"bigquery");return`SELECT
10336
10336
  c.table_catalog,