@dqcai/sqlite 3.2.3 → 3.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.mjs CHANGED
@@ -1,14 +1,14 @@
1
- const R=(...p)=>p.map(e=>typeof e=="object"?JSON.stringify(e,null,2):String(e)).join(" ");class q{constructor(e){this.transports=new Map,this.transportFactory=null,this.config=e,this.sessionId=e.sessionId||this.generateSessionId()}generateSessionId(){return`session_${Date.now()}_${Math.random().toString(36).substr(2,9)}`}getSessionId(){return this.sessionId}renewSession(){return this.sessionId=this.generateSessionId(),this.sessionId}setTransportFactory(e){this.transportFactory=e}addTransport(e){this.transports.set(e.name,e)}removeTransport(e){return this.transports.delete(e)}getTransport(e){return this.transports.get(e)}listTransports(){return Array.from(this.transports.keys())}setModuleConfig(e,t){this.config.modules[e]=t}getModuleConfig(e){return this.config.modules[e]}setGlobalEnabled(e){this.config.enabled=e}isGlobalEnabled(){return this.config.enabled}setDefaultLevel(e){this.config.defaultLevel=e}getConfig(){return JSON.parse(JSON.stringify(this.config))}setMetadata(e){this.config.metadata=Object.assign(Object.assign({},this.config.metadata),e)}shouldLog(e,t){if(!this.config.enabled)return!1;const a=this.config.modules[e];return a?a.enabled&&a.levels.includes(t):this.isLevelEnabled(t,this.config.defaultLevel)}isLevelEnabled(e,t){const a=["trace","debug","info","warn","error"],s=a.indexOf(e),r=a.indexOf(t);return s===-1||r===-1?!1:s>=r}getTransportsForModule(e){const t=this.config.modules[e];return((t==null?void 0:t.transports)||["console"]).map(a=>this.transports.get(a)).filter(a=>a!==void 0)}async sendToTransports(e,t){if(t.length===0)return;const a=t.map(async s=>{try{await s.log(e)}catch(r){console.error(`[UniversalLogger] Transport ${s.name} failed:`,r)}});await Promise.allSettled(a)}async log(e,t,a,s){if(!this.shouldLog(e,t))return;const r={timestamp:new Date().toISOString(),level:t,module:e,message:a,data:s,metadata:this.config.metadata,sessionId:this.sessionId},i=this.getTransportsForModule(e);await this.sendToTransports(r,i)}async trace(e,...t){const a=R(...t);await this.log(e,"trace",a)}async debug(e,...t){const a=R(...t);await this.log(e,"debug",a)}async info(e,...t){const a=R(...t);await this.log(e,"info",a)}async warn(e,...t){const a=R(...t);await this.log(e,"warn",a)}async error(e,...t){const a=R(...t);await this.log(e,"error",a)}async flush(){const e=Array.from(this.transports.values()).filter(t=>typeof t.flush=="function").map(t=>t.flush());await Promise.allSettled(e)}async cleanup(){const e=Array.from(this.transports.values()).filter(t=>typeof t.cleanup=="function").map(t=>t.cleanup());await Promise.allSettled(e)}createModuleLogger(e){return new V(e,this)}}class V{constructor(e,t){this.module=e,this.logger=t}async flush(){await this.logger.flush()}async trace(...e){const t=R(...e);await this.logger.trace(this.module,t)}async debug(...e){const t=R(...e);await this.logger.debug(this.module,t)}async info(...e){const t=R(...e);await this.logger.info(this.module,t)}async warn(...e){const t=R(...e);await this.logger.warn(this.module,t)}async error(...e){const t=R(...e);await this.logger.error(this.module,t)}getModuleName(){return this.module}}class Q{constructor(e={}){this.name="console",this.config=Object.assign({colorize:!0,timestamp:!0,prefix:""},e)}log(e){const t=[];this.config.timestamp&&t.push(`[${e.timestamp}]`),this.config.prefix&&t.push(`[${this.config.prefix}]`),t.push(`[${e.module}]`,`[${e.level.toUpperCase()}]`,e.message);const a=t.join(" "),s=e.data?[e.data]:[];this.config.colorize?this.logWithColor(e.level,a,s):this.logWithoutColor(e.level,a,s)}logWithColor(e,t,a){const s={trace:"#999999",debug:"#0066cc",info:"#00cc66",warn:"#ff9900",error:"#cc0000"}[e];console.log(`%c${t}`,`color: ${s}`,...a)}logWithoutColor(e,t,a){switch(e){case"error":console.error(t,...a);break;case"warn":console.warn(t,...a);break;case"info":console.info(t,...a);break;default:console.log(t,...a)}}}class k{constructor(){this.config={enabled:!0,defaultLevel:"info",modules:{}}}setEnabled(e){return this.config.enabled=e,this}setDefaultLevel(e){return this.config.defaultLevel=e,this}setSessionId(e){return this.config.sessionId=e,this}setMetadata(e){return this.config.metadata=e,this}addModule(e,t=!0,a=["info","warn","error"],s=["console"]){return this.config.modules[e]={enabled:t,levels:a,transports:s},this}build(){return JSON.parse(JSON.stringify(this.config))}}class F{static createDevelopmentConfig(){return new k().setEnabled(!0).setDefaultLevel("debug").addModule("DatabaseManager",!0,["debug","info","warn","error"],["console"]).addModule("ApiClient",!0,["info","warn","error"],["console"]).addModule("Cache",!0,["debug","info","warn","error"],["console"]).build()}static createProductionConfig(){return new k().setEnabled(!0).setDefaultLevel("warn").addModule("DatabaseManager",!0,["error"],["console"]).addModule("ApiClient",!0,["warn","error"],["console"]).addModule("Cache",!1,[],[]).build()}static createCustomConfig(e=!1){return e?F.createDevelopmentConfig():F.createProductionConfig()}}const K=p=>{const e=p||F.createDevelopmentConfig(),t=new q(e);return t.addTransport(new Q),t},G={APP:"App",CONFIG:"Config",AUTH:"Auth",API:"API",DATABASE:"Database",MIDDLEWARE:"Middleware",UTILS:"Utils",SECURITY:"Security",VALIDATION:"Validation",CACHE:"Cache",FILE_SYSTEM:"FileSystem",NETWORK:"Network",SCHEDULER:"Scheduler",ERROR_HANDLER:"ErrorHandler"};class W{constructor(e){this.moduleName=e}trace(e,...t){C.getInstance().trace(this.moduleName,e,...t)}debug(e,...t){C.getInstance().debug(this.moduleName,e,...t)}info(e,...t){C.getInstance().info(this.moduleName,e,...t)}warn(e,...t){C.getInstance().warn(this.moduleName,e,...t)}error(e,...t){C.getInstance().error(this.moduleName,e,...t)}}let C=class f{static createDefaultConfig(){return new k().setEnabled(!0).setDefaultLevel("warn").build()}static initialize(e){if(f.isInitializing||f.instance&&!e)return f.instance;f.isInitializing=!0;const t=e||f.createDefaultConfig();return f.currentConfig=t,f.instance=K(t),f.isInitializing=!1,f.instance}static getInstance(){return f.instance?f.instance:f.initialize()}static updateConfiguration(e){const t=f.currentConfig;t&&t.enabled===e.enabled&&t.defaultLevel===e.defaultLevel&&JSON.stringify(t.modules)===JSON.stringify(e.modules)||(f.currentConfig=e,f.instance=K(e))}static setEnabled(e){f.currentConfig&&(f.currentConfig.enabled=e,f.updateConfiguration(f.currentConfig))}static enableModule(e,t,a){f.currentConfig&&f.currentConfig.modules&&(f.currentConfig.modules[e]={enabled:!0,levels:t||["debug","info","warn","error"],appenders:a||["console"]},f.updateConfiguration(f.currentConfig))}static disableModule(e){f.currentConfig&&f.currentConfig.modules&&(f.currentConfig.modules[e]={enabled:!1},f.updateConfiguration(f.currentConfig))}static createDebugConfig(){return new k().setEnabled(!0).setDefaultLevel("trace").build()}static createProductionConfig(){return new k().setEnabled(!0).setDefaultLevel("warn").build()}static reset(){return f.initialize()}static getActiveProxyModules(){return Array.from(f.proxyInstances.keys())}static getCurrentConfig(){return f.currentConfig?Object.assign({},f.currentConfig):null}};C.instance=null,C.currentConfig=null,C.isInitializing=!1,C.proxyInstances=new Map;const D=p=>{if(C.proxyInstances.has(p))return C.proxyInstances.get(p);const e=new W(p);return C.proxyInstances.set(p,e),e},A=Object.assign(Object.assign({},G),{DATABASE_MANAGER:"DatabaseManager",DATABASE_FACTORY:"DatabaseFactory",UNIVERSAL_DAO:"UniversalDAO",BASE_SERVICE:"BaseService",SERVICE_MANAGER:"ServiceManager",QUERY_BUILDER:"QueryBuilder",BASE_ADAPTER:"BaseAdapter",NODEJS_ADAPTER:"NodeJSAdapter",REACTNATIVE_ADAPTER:"RN-Adapter",UNIVERSAL_SQLITE:"UniversalSQLite",TRANSACTION:"Transaction",CONNECTION:"Connection"});function U(p=!0,e="warn"){const t=new k().setEnabled(p).setDefaultLevel(e).build();C.updateConfiguration(t)}U(!0,"warn");const c=D(A.UNIVERSAL_DAO);class z{constructor(e,t,a){var s,r;this.adapter=e,this.dbPath=t,this.options=a,this.connection=null,this.isConnected=!1,this.inTransaction=!1,this.typeMappingConfig=null,this.createIfNotExists=!1,this.forceRecreate=!1,this.createIfNotExists=(s=a==null?void 0:a.createIfNotExists)!==null&&s!==void 0?s:!1,this.forceRecreate=(r=a==null?void 0:a.forceRecreate)!==null&&r!==void 0?r:!1,c.trace("UniversalDAO constructor initialized",{dbPath:this.dbPath,createIfNotExists:this.createIfNotExists,forceRecreate:this.forceRecreate})}async connect(){if(c.trace("Attempting to connect to database",{dbPath:this.dbPath}),this.isConnected){c.debug("Already connected to database, skipping connection");return}try{this.connection=await this.adapter.connect(this.dbPath),this.isConnected=!0,c.info("Successfully connected to database",{dbPath:this.dbPath})}catch(e){throw c.error("Failed to connect to database",{dbPath:this.dbPath,error:e instanceof Error?e.message:e}),e}}async disconnect(){if(c.trace("Attempting to disconnect from database"),this.connection&&this.isConnected)try{await this.connection.close(),this.connection=null,this.isConnected=!1,c.info("Successfully disconnected from database")}catch(e){throw c.error("Error during database disconnection",{error:e instanceof Error?e.message:e}),e}else c.debug("Database was not connected, nothing to disconnect")}async close(){c.trace("Closing database connection"),await this.disconnect()}setTypeMappingConfig(e){c.trace("Setting type mapping configuration",{config:e}),this.typeMappingConfig=e,c.debug("Type mapping configuration updated")}convertToSQLiteType(e){if(c.trace("Converting generic type to SQLite type",{genericType:e}),!this.typeMappingConfig||!this.typeMappingConfig.sqlite){const a=this.getDefaultSQLiteType(e);return c.debug("Using default type mapping",{genericType:e,sqliteType:a}),a}const t=this.typeMappingConfig.sqlite[e.toLowerCase()]||"TEXT";return c.debug("Using custom type mapping",{genericType:e,sqliteType:t}),t}getDefaultSQLiteType(e){return{string:"TEXT",varchar:"TEXT",char:"TEXT",email:"TEXT",url:"TEXT",uuid:"TEXT",integer:"INTEGER",bigint:"INTEGER",smallint:"INTEGER",tinyint:"INTEGER",decimal:"REAL",numeric:"REAL",float:"REAL",double:"REAL",boolean:"INTEGER",timestamp:"TEXT",datetime:"TEXT",date:"TEXT",time:"TEXT",json:"TEXT",array:"TEXT",blob:"BLOB",binary:"BLOB"}[e.toLowerCase()]||"TEXT"}processColumnDefinition(e){c.trace("Processing column definition",{columnName:e.name,originalType:e.type});const t=Object.assign({},e);t.type=this.convertToSQLiteType(e.type);const a=[];if(e.constraints){c.trace("Processing column constraints",{columnName:e.name,constraints:e.constraints});const s=e.constraints.toUpperCase().split(" ");s.includes("PRIMARY")&&(a.push("PRIMARY KEY"),t.primary_key=!0),(s.includes("AUTO_INCREMENT")||s.includes("AUTOINCREMENT"))&&(t.primary_key&&a.push("AUTOINCREMENT"),t.auto_increment=!0),s.includes("NOT")&&s.includes("NULL")&&(a.push("NOT NULL"),t.nullable=!1),s.includes("UNIQUE")&&(t.primary_key||a.push("UNIQUE"),t.unique=!0);const r=s.indexOf("DEFAULT");if(r!==-1&&s.length>r+1){const i=s[r+1];a.push(`DEFAULT ${i}`),t.default=i}}return t.option_key=a.join(" ").trim(),c.debug("Column definition processed",{columnName:e.name,finalType:t.type,options:t.option_key}),t}async initializeFromSchema(e){var t,a;c.info("Initializing database schema",{schemaVersion:e.version,tableCount:Object.keys(e.schemas).length}),this.ensureConnected();let s=!1;try{const r=await this.execute("SELECT version FROM _schema_info ORDER BY applied_at DESC LIMIT 1");s=r.rows.length>0,s&&c.debug("Existing schema detected",{currentVersion:(t=r.rows[0])===null||t===void 0?void 0:t.version})}catch(r){c.debug("The first time for init from Schema! No existing schema _schema_info detected"),s=!1}if(s&&!this.createIfNotExists&&!this.forceRecreate){c.info("Schema exists and no recreation options set, using existing schema"),e.type_mapping&&this.setTypeMappingConfig(e.type_mapping);return}s&&this.forceRecreate&&(c.warn("Force recreate option enabled, dropping all existing tables"),await this.dropAllTables()),e.type_mapping&&this.setTypeMappingConfig(e.type_mapping);try{c.debug("Enabling foreign key constraints"),await this.execute("PRAGMA foreign_keys = ON")}catch(r){c.warn("Failed to enable foreign key constraints",{error:r instanceof Error?r.message:r})}await this.beginTransaction();try{c.info("Creating tables from schema");for(const[r,i]of Object.entries(e.schemas)){c.debug("Creating table",{tableName:r,columnCount:i.cols.length});const n={name:r,cols:i.cols.map(h=>this.processColumnDefinition(h)),description:i.description,indexes:i.indexes,foreign_keys:i.foreign_keys};await this.createTableWithForeignKeys(n)}c.info("Creating indexes for tables");for(const[r,i]of Object.entries(e.schemas))!((a=i.indexes)===null||a===void 0)&&a.length&&(c.debug("Creating indexes for table",{tableName:r,indexCount:i.indexes.length}),await this.createIndexesForTable(r,i.indexes));await this.setSchemaVersion(e.version),await this.commitTransaction(),c.info("Schema initialization completed successfully",{version:e.version})}catch(r){throw c.error("Schema initialization failed, rolling back transaction",{error:r instanceof Error?r.message:r}),await this.rollbackTransaction(),r}}async dropAllTables(){c.info("Dropping all existing tables");const e=await this.execute("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'");c.debug("Found tables to drop",{tableCount:e.rows.length}),await this.beginTransaction();try{for(const t of e.rows)c.trace("Dropping table",{tableName:t.name}),await this.execute(`DROP TABLE IF EXISTS ${t.name}`);await this.commitTransaction(),c.info("All tables dropped successfully")}catch(t){throw c.error("Failed to drop tables, rolling back",{error:t instanceof Error?t.message:t}),await this.rollbackTransaction(),t}}async createTableWithForeignKeys(e){var t,a,s,r,i;c.trace("Creating table with foreign keys",{tableName:e.name});const n=e.cols.map(d=>`${d.name} ${d.type} ${d.option_key||""}`.trim()),h=[],m=e.foreign_keys;if(m){c.debug("Processing foreign keys",{tableName:e.name,fkCount:m.length});for(const d of m){const E=d.columns?Array.isArray(d.columns)?d.columns:[d.columns]:[];if(E.length===0){c.warn("Foreign key without columns found",{tableName:e.name,foreignKey:d});continue}const y=(t=d.references)===null||t===void 0?void 0:t.table,O=!((a=d.references)===null||a===void 0)&&a.columns?Array.isArray((s=d.references)===null||s===void 0?void 0:s.columns)?(r=d.references)===null||r===void 0?void 0:r.columns:[(i=d.references)===null||i===void 0?void 0:i.columns]:[];if(!y||O.length===0){c.warn("Invalid foreign key reference",{tableName:e.name,foreignKey:d});continue}let $=`FOREIGN KEY (${E.join(", ")}) REFERENCES ${y}(${O.join(", ")})`;const M=d.on_delete,_=d.on_update;M&&($+=` ON DELETE ${M}`),_&&($+=` ON UPDATE ${_}`),h.push($)}}const b=[...n,...h],N=`CREATE TABLE IF NOT EXISTS ${e.name} (${b.join(", ")})`;try{await this.execute(N),c.debug("Table created successfully",{tableName:e.name})}catch(d){throw c.error("Failed to create table",{tableName:e.name,sql:N,error:d instanceof Error?d.message:d}),d}}async createIndexesForTable(e,t){c.trace("Creating indexes for table",{tableName:e,indexCount:t.length});for(const a of t){const s=a.columns.join(", "),r=a.unique||!1,i=`CREATE ${r?"UNIQUE":""} INDEX IF NOT EXISTS ${a.name} ON ${e} (${s})`;try{await this.execute(i),c.debug("Index created successfully",{indexName:a.name,tableName:e,columns:a.columns,unique:r})}catch(n){throw c.error("Failed to create index",{indexName:a.name,tableName:e,sql:i,error:n instanceof Error?n.message:n}),n}}}async beginTransaction(){if(c.trace("Beginning transaction"),this.inTransaction){const e=new Error("Transaction already in progress");throw c.error("Cannot begin transaction",{error:e.message}),e}try{await this.execute("BEGIN TRANSACTION"),this.inTransaction=!0,c.debug("Transaction started successfully")}catch(e){throw c.error("Failed to begin transaction",{error:e instanceof Error?e.message:e}),e}}async commitTransaction(){if(c.trace("Committing transaction"),!this.inTransaction){const e=new Error("No transaction in progress");throw c.error("Cannot commit transaction",{error:e.message}),e}try{await this.execute("COMMIT"),this.inTransaction=!1,c.debug("Transaction committed successfully")}catch(e){throw c.error("Failed to commit transaction",{error:e instanceof Error?e.message:e}),e}}async rollbackTransaction(){if(c.trace("Rolling back transaction"),!this.inTransaction){const e=new Error("No transaction in progress");throw c.error("Cannot rollback transaction",{error:e.message}),e}try{await this.execute("ROLLBACK"),this.inTransaction=!1,c.debug("Transaction rolled back successfully")}catch(e){throw c.error("Failed to rollback transaction",{error:e instanceof Error?e.message:e}),e}}async getSchemaVersion(){c.trace("Getting schema version");try{const e=(await this.getRst("SELECT version FROM _schema_info ORDER BY applied_at DESC LIMIT 1")).version||"0";return c.debug("Schema version retrieved",{version:e}),e}catch(e){return c.debug("No schema version found, returning default",{defaultVersion:"0"}),"0"}}async setSchemaVersion(e){c.trace("Setting schema version",{version:e});try{await this.execute(`CREATE TABLE IF NOT EXISTS _schema_info (
1
+ const R=(...p)=>p.map(e=>{if(typeof e=="bigint")return e.toString();if(typeof e=="object"&&e!==null)try{return JSON.stringify(e,(t,a)=>typeof a=="bigint"?a.toString():a instanceof Date?a.toISOString():typeof Buffer!="undefined"&&Buffer.isBuffer(a)?`<Buffer ${a.length} bytes>`:typeof a=="function"?`<Function ${a.name||"anonymous"}>`:a,2)}catch(t){return`<Error stringifying: ${t.message}>`}return String(e)}).join(" ");class V{constructor(e){this.transports=new Map,this.transportFactory=null,this.config=e,this.sessionId=e.sessionId||this.generateSessionId()}generateSessionId(){return`session_${Date.now()}_${Math.random().toString(36).substr(2,9)}`}getSessionId(){return this.sessionId}renewSession(){return this.sessionId=this.generateSessionId(),this.sessionId}setTransportFactory(e){this.transportFactory=e}addTransport(e){this.transports.set(e.name,e)}removeTransport(e){return this.transports.delete(e)}getTransport(e){return this.transports.get(e)}listTransports(){return Array.from(this.transports.keys())}setModuleConfig(e,t){this.config.modules[e]=t}getModuleConfig(e){return this.config.modules[e]}setGlobalEnabled(e){this.config.enabled=e}isGlobalEnabled(){return this.config.enabled}setDefaultLevel(e){this.config.defaultLevel=e}getConfig(){return JSON.parse(JSON.stringify(this.config))}setMetadata(e){this.config.metadata=Object.assign(Object.assign({},this.config.metadata),e)}shouldLog(e,t){if(!this.config.enabled)return!1;const a=this.config.modules[e];return a?a.enabled&&a.levels.includes(t):this.isLevelEnabled(t,this.config.defaultLevel)}isLevelEnabled(e,t){const a=["trace","debug","info","warn","error"],s=a.indexOf(e),r=a.indexOf(t);return s===-1||r===-1?!1:s>=r}getTransportsForModule(e){const t=this.config.modules[e];return((t==null?void 0:t.transports)||["console"]).map(a=>this.transports.get(a)).filter(a=>a!==void 0)}async sendToTransports(e,t){if(t.length===0)return;const a=t.map(async s=>{try{await s.log(e)}catch(r){console.error(`[UniversalLogger] Transport ${s.name} failed:`,r)}});await Promise.allSettled(a)}async log(e,t,a,s){if(!this.shouldLog(e,t))return;const r={timestamp:new Date().toISOString(),level:t,module:e,message:a,data:s,metadata:this.config.metadata,sessionId:this.sessionId},i=this.getTransportsForModule(e);await this.sendToTransports(r,i)}async trace(e,...t){const a=R(...t);await this.log(e,"trace",a)}async debug(e,...t){const a=R(...t);await this.log(e,"debug",a)}async info(e,...t){const a=R(...t);await this.log(e,"info",a)}async warn(e,...t){const a=R(...t);await this.log(e,"warn",a)}async error(e,...t){const a=R(...t);await this.log(e,"error",a)}async flush(){const e=Array.from(this.transports.values()).filter(t=>typeof t.flush=="function").map(t=>t.flush());await Promise.allSettled(e)}async cleanup(){const e=Array.from(this.transports.values()).filter(t=>typeof t.cleanup=="function").map(t=>t.cleanup());await Promise.allSettled(e)}createModuleLogger(e){return new Q(e,this)}}class Q{constructor(e,t){this.module=e,this.logger=t}async flush(){await this.logger.flush()}async trace(...e){const t=R(...e);await this.logger.trace(this.module,t)}async debug(...e){const t=R(...e);await this.logger.debug(this.module,t)}async info(...e){const t=R(...e);await this.logger.info(this.module,t)}async warn(...e){const t=R(...e);await this.logger.warn(this.module,t)}async error(...e){const t=R(...e);await this.logger.error(this.module,t)}getModuleName(){return this.module}}let G=class{constructor(e={}){this.name="console",this.config=Object.assign({colorize:!0,timestamp:!0,prefix:""},e)}log(e){const t=[];this.config.timestamp&&t.push(`[${e.timestamp}]`),this.config.prefix&&t.push(`[${this.config.prefix}]`),t.push(`[${e.module}]`,`[${e.level.toUpperCase()}]`,e.message);const a=t.join(" "),s=e.data?[e.data]:[];this.config.colorize?this.logWithColor(e.level,a,s):this.logWithoutColor(e.level,a,s)}logWithColor(e,t,a){const s={trace:"#999999",debug:"#0066cc",info:"#00cc66",warn:"#ff9900",error:"#cc0000"}[e];console.log(`%c${t}`,`color: ${s}`,...a)}logWithoutColor(e,t,a){switch(e){case"error":console.error(t,...a);break;case"warn":console.warn(t,...a);break;case"info":console.info(t,...a);break;default:console.log(t,...a)}}};class x{constructor(){this.config={enabled:!0,defaultLevel:"info",modules:{}}}setEnabled(e){return this.config.enabled=e,this}setDefaultLevel(e){return this.config.defaultLevel=e,this}setSessionId(e){return this.config.sessionId=e,this}setMetadata(e){return this.config.metadata=e,this}addModule(e,t=!0,a=["info","warn","error"],s=["console"]){return this.config.modules[e]={enabled:t,levels:a,transports:s},this}addModules(e){return e.forEach(t=>{var a,s,r;this.addModule(t.name,(a=t.enabled)!==null&&a!==void 0?a:!0,(s=t.levels)!==null&&s!==void 0?s:["info","warn","error"],(r=t.transports)!==null&&r!==void 0?r:["console"])}),this}setModuleTransports(e,t){return this.config.modules[e]?this.config.modules[e].transports=t:this.config.modules[e]={enabled:!0,levels:["info","warn","error"],transports:t},this}enableAllLevelsForModule(e){return this.config.modules[e]?this.config.modules[e].levels=["trace","debug","info","warn","error"]:this.config.modules[e]={enabled:!0,levels:["trace","debug","info","warn","error"],transports:["console"]},this}useDevelopmentPreset(){return this.config.enabled=!0,this.config.defaultLevel="debug",this}useProductionPreset(){return this.config.enabled=!0,this.config.defaultLevel="warn",this}useTestingPreset(){return this.config.enabled=!0,this.config.defaultLevel="trace",this}build(){return JSON.parse(JSON.stringify(this.config))}}class ${static createDevelopmentConfig(){return new x().setEnabled(!0).setDefaultLevel("debug").addModule("DatabaseManager",!0,["debug","info","warn","error"],["console"]).addModule("ApiClient",!0,["info","warn","error"],["console"]).addModule("Cache",!0,["debug","info","warn","error"],["console"]).build()}static createProductionConfig(){return new x().setEnabled(!0).setDefaultLevel("warn").addModule("DatabaseManager",!0,["error"],["console"]).addModule("ApiClient",!0,["warn","error"],["console"]).addModule("Cache",!1,[],[]).build()}static createCustomConfig(e=!1){return e?$.createDevelopmentConfig():$.createProductionConfig()}}const U=(p,e)=>{const t=p||$.createDevelopmentConfig(),a=new V(t);return a.addTransport(new G),e&&e.length>0&&e.forEach(s=>{a.addTransport(s)}),a},W={APP:"App",CONFIG:"Config",AUTH:"Auth",API:"API",DATABASE:"Database",MIDDLEWARE:"Middleware",UTILS:"Utils",SECURITY:"Security",VALIDATION:"Validation",CACHE:"Cache",FILE_SYSTEM:"FileSystem",NETWORK:"Network",SCHEDULER:"Scheduler",ERROR_HANDLER:"ErrorHandler"};class H{constructor(e){this.moduleName=e}trace(e,...t){S.getInstance().trace(this.moduleName,e,...t)}debug(e,...t){S.getInstance().debug(this.moduleName,e,...t)}info(e,...t){S.getInstance().info(this.moduleName,e,...t)}warn(e,...t){S.getInstance().warn(this.moduleName,e,...t)}error(e,...t){S.getInstance().error(this.moduleName,e,...t)}}let S=class u{static createDefaultConfig(){return new x().setEnabled(!0).setDefaultLevel("warn").build()}static setTransports(e){u.customTransports=e,u.instance&&e.forEach(t=>{u.instance.addTransport(t)})}static addTransport(e){u.customTransports.push(e),u.instance&&u.instance.addTransport(e)}static removeTransport(e){return u.customTransports=u.customTransports.filter(t=>t.name!==e),u.instance?u.instance.removeTransport(e):!1}static listTransports(){return u.instance?u.instance.listTransports():u.customTransports.map(e=>e.name)}static initialize(e,t){if(u.isInitializing||u.instance&&!e&&!t)return u.instance;u.isInitializing=!0;const a=e||u.createDefaultConfig(),s=[...u.customTransports,...t||[]];return u.currentConfig=a,u.instance=U(a,s),u.isInitializing=!1,u.instance}static getInstance(){return u.instance?u.instance:u.initialize()}static updateConfiguration(e){const t=u.currentConfig;t&&t.enabled===e.enabled&&t.defaultLevel===e.defaultLevel&&JSON.stringify(t.modules)===JSON.stringify(e.modules)||(u.currentConfig=e,u.instance=U(e,u.customTransports))}static async flush(){u.instance&&await u.instance.flush()}static async cleanup(){u.instance&&await u.instance.cleanup()}static setEnabled(e){u.currentConfig&&(u.currentConfig.enabled=e,u.updateConfiguration(u.currentConfig))}static enableModule(e,t,a){u.currentConfig&&u.currentConfig.modules&&(u.currentConfig.modules[e]={enabled:!0,levels:t||["debug","info","warn","error"],transports:a||["console"]},u.updateConfiguration(u.currentConfig))}static disableModule(e){u.currentConfig&&u.currentConfig.modules&&(u.currentConfig.modules[e]={enabled:!1,levels:[],transports:[]},u.updateConfiguration(u.currentConfig))}static createDebugConfig(){return new x().setEnabled(!0).setDefaultLevel("trace").build()}static createProductionConfig(){return new x().setEnabled(!0).setDefaultLevel("warn").build()}static reset(){return u.customTransports=[],u.initialize()}static getActiveProxyModules(){return Array.from(u.proxyInstances.keys())}static getCurrentConfig(){return u.currentConfig?Object.assign({},u.currentConfig):null}};S.instance=null,S.currentConfig=null,S.customTransports=[],S.isInitializing=!1,S.proxyInstances=new Map;const k=p=>{if(S.proxyInstances.has(p))return S.proxyInstances.get(p);const e=new H(p);return S.proxyInstances.set(p,e),e},N={id:{name:"id",type:"integer",primaryKey:!0,autoIncrement:!0,required:!0,description:"ID t\u1EF1 \u0111\u1ED9ng t\u0103ng"},timestamp:{name:"timestamp",type:"timestamp",required:!0,index:!0,description:"Th\u1EDDi gian log"},level:{name:"level",type:"varchar",length:20,required:!0,index:!0,enum:["trace","debug","info","warn","error"],description:"M\u1EE9c \u0111\u1ED9 log"},module:{name:"module",type:"varchar",length:100,required:!0,index:!0,description:"T\xEAn module"},message:{name:"message",type:"text",required:!0,description:"N\u1ED9i dung log"},data:{name:"data",type:"text",nullable:!0,description:"D\u1EEF li\u1EC7u b\u1ED5 sung (JSON string)"},metadata:{name:"metadata",type:"text",nullable:!0,description:"Metadata (JSON string)"},session_id:{name:"session_id",type:"varchar",length:100,nullable:!0,index:!0,description:"Session ID"},created_at:{name:"created_at",type:"timestamp",default:"CURRENT_TIMESTAMP",description:"Th\u1EDDi gian t\u1EA1o b\u1EA3n ghi"}};Object.assign({},N.id),Object.assign({},N.timestamp),Object.assign({},N.level),Object.assign({},N.module),Object.assign({},N.message),Object.assign({},N.data),Object.assign({},N.metadata),Object.assign({},N.session_id),Object.assign({},N.created_at),Object.assign({},N.id),Object.assign({},N.timestamp),Object.assign({},N.module),Object.assign({},N.message),Object.assign({},N.data),Object.assign({},N.metadata),Object.assign({},N.session_id),Object.assign({},N.created_at),Object.assign({},N.module),Object.assign({},N.level),Object.assign({},N.created_at),Object.assign({},N.session_id),Object.assign({},N.created_at);const O=Object.assign(Object.assign({},W),{DATABASE_MANAGER:"DatabaseManager",DATABASE_FACTORY:"DatabaseFactory",UNIVERSAL_DAO:"UniversalDAO",BASE_SERVICE:"BaseService",SERVICE_MANAGER:"ServiceManager",QUERY_BUILDER:"QueryBuilder",BASE_ADAPTER:"BaseAdapter",NODEJS_ADAPTER:"NodeJSAdapter",REACTNATIVE_ADAPTER:"RN-Adapter",UNIVERSAL_SQLITE:"UniversalSQLite",TRANSACTION:"Transaction",CONNECTION:"Connection"});function B(p=!0,e="warn"){const t=new x().setEnabled(p).setDefaultLevel(e).build();S.updateConfiguration(t)}B(!0,"warn");const c=k(O.UNIVERSAL_DAO);class P{constructor(e,t,a){var s,r;this.adapter=e,this.dbPath=t,this.options=a,this.connection=null,this.isConnected=!1,this.inTransaction=!1,this.typeMappingConfig=null,this.createIfNotExists=!1,this.forceRecreate=!1,this.createIfNotExists=(s=a==null?void 0:a.createIfNotExists)!==null&&s!==void 0?s:!1,this.forceRecreate=(r=a==null?void 0:a.forceRecreate)!==null&&r!==void 0?r:!1,c.trace("UniversalDAO constructor initialized",{dbPath:this.dbPath,createIfNotExists:this.createIfNotExists,forceRecreate:this.forceRecreate})}async connect(){if(c.trace("Attempting to connect to database",{dbPath:this.dbPath}),this.isConnected){c.debug("Already connected to database, skipping connection");return}try{this.connection=await this.adapter.connect(this.dbPath),this.isConnected=!0,c.info("Successfully connected to database",{dbPath:this.dbPath})}catch(e){throw c.error("Failed to connect to database",{dbPath:this.dbPath,error:e instanceof Error?e.message:e}),e}}async disconnect(){if(c.trace("Attempting to disconnect from database"),this.connection&&this.isConnected)try{await this.connection.close(),this.connection=null,this.isConnected=!1,c.info("Successfully disconnected from database")}catch(e){throw c.error("Error during database disconnection",{error:e instanceof Error?e.message:e}),e}else c.debug("Database was not connected, nothing to disconnect")}async close(){c.trace("Closing database connection"),await this.disconnect()}setTypeMappingConfig(e){c.trace("Setting type mapping configuration",{config:e}),this.typeMappingConfig=e,c.debug("Type mapping configuration updated")}convertToSQLiteType(e){if(c.trace("Converting generic type to SQLite type",{genericType:e}),!this.typeMappingConfig||!this.typeMappingConfig.sqlite){const a=this.getDefaultSQLiteType(e);return c.debug("Using default type mapping",{genericType:e,sqliteType:a}),a}const t=this.typeMappingConfig.sqlite[e.toLowerCase()]||"TEXT";return c.debug("Using custom type mapping",{genericType:e,sqliteType:t}),t}getDefaultSQLiteType(e){return{string:"TEXT",varchar:"TEXT",char:"TEXT",email:"TEXT",url:"TEXT",uuid:"TEXT",integer:"INTEGER",bigint:"INTEGER",smallint:"INTEGER",tinyint:"INTEGER",decimal:"REAL",numeric:"REAL",float:"REAL",double:"REAL",boolean:"INTEGER",timestamp:"TEXT",datetime:"TEXT",date:"TEXT",time:"TEXT",json:"TEXT",array:"TEXT",blob:"BLOB",binary:"BLOB"}[e.toLowerCase()]||"TEXT"}processColumnDefinition(e){c.trace("Processing column definition",{columnName:e.name,originalType:e.type});const t=Object.assign({},e);t.type=this.convertToSQLiteType(e.type);const a=[];if(e.constraints){c.trace("Processing column constraints",{columnName:e.name,constraints:e.constraints});const s=e.constraints.toUpperCase().split(" ");s.includes("PRIMARY")&&(a.push("PRIMARY KEY"),t.primary_key=!0),(s.includes("AUTO_INCREMENT")||s.includes("AUTOINCREMENT"))&&(t.primary_key&&a.push("AUTOINCREMENT"),t.auto_increment=!0),s.includes("NOT")&&s.includes("NULL")&&(a.push("NOT NULL"),t.nullable=!1),s.includes("UNIQUE")&&(t.primary_key||a.push("UNIQUE"),t.unique=!0);const r=s.indexOf("DEFAULT");if(r!==-1&&s.length>r+1){const i=s[r+1];a.push(`DEFAULT ${i}`),t.default=i}}return t.option_key=a.join(" ").trim(),c.debug("Column definition processed",{columnName:e.name,finalType:t.type,options:t.option_key}),t}async initializeFromSchema(e){var t,a;c.info("Initializing database schema",{schemaVersion:e.version,tableCount:Object.keys(e.schemas).length}),this.ensureConnected();let s=!1;try{const r=await this.execute("SELECT version FROM _schema_info ORDER BY applied_at DESC LIMIT 1");s=r.rows.length>0,s&&c.debug("Existing schema detected",{currentVersion:(t=r.rows[0])===null||t===void 0?void 0:t.version})}catch(r){c.debug("The first time for init from Schema! No existing schema _schema_info detected"),s=!1}if(s&&!this.createIfNotExists&&!this.forceRecreate){c.info("Schema exists and no recreation options set, using existing schema"),e.type_mapping&&this.setTypeMappingConfig(e.type_mapping);return}s&&this.forceRecreate&&(c.warn("Force recreate option enabled, dropping all existing tables"),await this.dropAllTables()),e.type_mapping&&this.setTypeMappingConfig(e.type_mapping);try{c.debug("Enabling foreign key constraints"),await this.execute("PRAGMA foreign_keys = ON")}catch(r){c.warn("Failed to enable foreign key constraints",{error:r instanceof Error?r.message:r})}await this.beginTransaction();try{c.info("Creating tables from schema");for(const[r,i]of Object.entries(e.schemas)){c.debug("Creating table",{tableName:r,columnCount:i.cols.length});const n={name:r,cols:i.cols.map(h=>this.processColumnDefinition(h)),description:i.description,indexes:i.indexes,foreign_keys:i.foreign_keys};await this.createTableWithForeignKeys(n)}c.info("Creating indexes for tables");for(const[r,i]of Object.entries(e.schemas))!((a=i.indexes)===null||a===void 0)&&a.length&&(c.debug("Creating indexes for table",{tableName:r,indexCount:i.indexes.length}),await this.createIndexesForTable(r,i.indexes));await this.setSchemaVersion(e.version),await this.commitTransaction(),c.info("Schema initialization completed successfully",{version:e.version})}catch(r){throw c.error("Schema initialization failed, rolling back transaction",{error:r instanceof Error?r.message:r}),await this.rollbackTransaction(),r}}async dropAllTables(){c.info("Dropping all existing tables");const e=await this.execute("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'");c.debug("Found tables to drop",{tableCount:e.rows.length}),await this.beginTransaction();try{for(const t of e.rows)c.trace("Dropping table",{tableName:t.name}),await this.execute(`DROP TABLE IF EXISTS ${t.name}`);await this.commitTransaction(),c.info("All tables dropped successfully")}catch(t){throw c.error("Failed to drop tables, rolling back",{error:t instanceof Error?t.message:t}),await this.rollbackTransaction(),t}}async createTableWithForeignKeys(e){var t,a,s,r,i;c.trace("Creating table with foreign keys",{tableName:e.name});const n=e.cols.map(d=>`${d.name} ${d.type} ${d.option_key||""}`.trim()),h=[],m=e.foreign_keys;if(m){c.debug("Processing foreign keys",{tableName:e.name,fkCount:m.length});for(const d of m){const C=d.columns?Array.isArray(d.columns)?d.columns:[d.columns]:[];if(C.length===0){c.warn("Foreign key without columns found",{tableName:e.name,foreignKey:d});continue}const w=(t=d.references)===null||t===void 0?void 0:t.table,D=!((a=d.references)===null||a===void 0)&&a.columns?Array.isArray((s=d.references)===null||s===void 0?void 0:s.columns)?(r=d.references)===null||r===void 0?void 0:r.columns:[(i=d.references)===null||i===void 0?void 0:i.columns]:[];if(!w||D.length===0){c.warn("Invalid foreign key reference",{tableName:e.name,foreignKey:d});continue}let j=`FOREIGN KEY (${C.join(", ")}) REFERENCES ${w}(${D.join(", ")})`;const _=d.on_delete,K=d.on_update;_&&(j+=` ON DELETE ${_}`),K&&(j+=` ON UPDATE ${K}`),h.push(j)}}const f=[...n,...h],y=`CREATE TABLE IF NOT EXISTS ${e.name} (${f.join(", ")})`;try{await this.execute(y),c.debug("Table created successfully",{tableName:e.name})}catch(d){throw c.error("Failed to create table",{tableName:e.name,sql:y,error:d instanceof Error?d.message:d}),d}}async createIndexesForTable(e,t){c.trace("Creating indexes for table",{tableName:e,indexCount:t.length});for(const a of t){const s=a.columns.join(", "),r=a.unique||!1,i=`CREATE ${r?"UNIQUE":""} INDEX IF NOT EXISTS ${a.name} ON ${e} (${s})`;try{await this.execute(i),c.debug("Index created successfully",{indexName:a.name,tableName:e,columns:a.columns,unique:r})}catch(n){throw c.error("Failed to create index",{indexName:a.name,tableName:e,sql:i,error:n instanceof Error?n.message:n}),n}}}async beginTransaction(){if(c.trace("Beginning transaction"),this.inTransaction){const e=new Error("Transaction already in progress");throw c.error("Cannot begin transaction",{error:e.message}),e}try{await this.execute("BEGIN TRANSACTION"),this.inTransaction=!0,c.debug("Transaction started successfully")}catch(e){throw c.error("Failed to begin transaction",{error:e instanceof Error?e.message:e}),e}}async commitTransaction(){if(c.trace("Committing transaction"),!this.inTransaction){const e=new Error("No transaction in progress");throw c.error("Cannot commit transaction",{error:e.message}),e}try{await this.execute("COMMIT"),this.inTransaction=!1,c.debug("Transaction committed successfully")}catch(e){throw c.error("Failed to commit transaction",{error:e instanceof Error?e.message:e}),e}}async rollbackTransaction(){if(c.trace("Rolling back transaction"),!this.inTransaction){const e=new Error("No transaction in progress");throw c.error("Cannot rollback transaction",{error:e.message}),e}try{await this.execute("ROLLBACK"),this.inTransaction=!1,c.debug("Transaction rolled back successfully")}catch(e){throw c.error("Failed to rollback transaction",{error:e instanceof Error?e.message:e}),e}}async getSchemaVersion(){c.trace("Getting schema version");try{const e=(await this.getRst("SELECT version FROM _schema_info ORDER BY applied_at DESC LIMIT 1")).version||"0";return c.debug("Schema version retrieved",{version:e}),e}catch(e){return c.debug("No schema version found, returning default",{defaultVersion:"0"}),"0"}}async setSchemaVersion(e){c.trace("Setting schema version",{version:e});try{await this.execute(`CREATE TABLE IF NOT EXISTS _schema_info (
2
2
  version TEXT NOT NULL,
3
3
  applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
4
- )`),await this.execute("INSERT INTO _schema_info (version) VALUES (?)",[e]),c.info("Schema version set successfully",{version:e})}catch(t){throw c.error("Failed to set schema version",{version:e,error:t instanceof Error?t.message:t}),t}}async insert(e){c.trace("Performing insert operation",{tableName:e.name});const t=e.cols.filter(n=>n.value!==void 0&&n.value!==null);if(t.length===0){const n=new Error("No valid columns to insert");throw c.error("Insert operation failed",{tableName:e.name,error:n.message}),n}const a=t.map(n=>n.name).join(", "),s=t.map(()=>"?").join(", "),r=t.map(n=>typeof n.value=="object"?JSON.stringify(n.value):n.value),i=`INSERT INTO ${e.name} (${a}) VALUES (${s})`;c.debug("Executing insert query",{tableName:e.name,columnCount:t.length,sql:i});try{const n=await this.execute(i,r);return c.info("Insert operation completed successfully",{tableName:e.name,rowsAffected:n.rowsAffected,lastInsertRowid:n.lastInsertRowId}),n}catch(n){throw c.error("Insert operation failed",{tableName:e.name,sql:i,error:n instanceof Error?n.message:n}),n}}async update(e){var t;c.trace("Performing update operation",{tableName:e.name});const a=e.cols.filter(h=>{var m;return h.value!==void 0&&!(!((m=e.wheres)===null||m===void 0)&&m.some(b=>b.name===h.name))});if(a.length===0){const h=new Error("No columns to update");throw c.error("Update operation failed",{tableName:e.name,error:h.message}),h}const s=a.map(h=>`${h.name} = ?`).join(", "),r=a.map(h=>typeof h.value=="object"?JSON.stringify(h.value):h.value);let i=`UPDATE ${e.name} SET ${s}`;const n=this.buildWhereClause(e.wheres);if(!n.sql){const h=new Error("WHERE clause is required for UPDATE operation");throw c.error("Update operation failed",{tableName:e.name,error:h.message}),h}i+=n.sql,r.push(...n.params),c.debug("Executing update query",{tableName:e.name,updateColumnCount:a.length,whereConditions:((t=e.wheres)===null||t===void 0?void 0:t.length)||0,sql:i});try{const h=await this.execute(i,r);return c.info("Update operation completed successfully",{tableName:e.name,rowsAffected:h.rowsAffected}),h}catch(h){throw c.error("Update operation failed",{tableName:e.name,sql:i,error:h instanceof Error?h.message:h}),h}}async delete(e){var t;c.trace("Performing delete operation",{tableName:e.name});let a=`DELETE FROM ${e.name}`;const s=this.buildWhereClause(e.wheres);if(!s.sql){const r=new Error("WHERE clause is required for DELETE operation");throw c.error("Delete operation failed",{tableName:e.name,error:r.message}),r}a+=s.sql,c.debug("Executing delete query",{tableName:e.name,whereConditions:((t=e.wheres)===null||t===void 0?void 0:t.length)||0,sql:a});try{const r=await this.execute(a,s.params);return c.info("Delete operation completed successfully",{tableName:e.name,rowsAffected:r.rowsAffected}),r}catch(r){throw c.error("Delete operation failed",{tableName:e.name,sql:a,error:r instanceof Error?r.message:r}),r}}async select(e){c.trace("Performing select single operation",{tableName:e.name});const{sql:t,params:a}=this.buildSelectQuery(e," LIMIT 1");c.debug("Executing select single query",{tableName:e.name,sql:t});try{const s=await this.execute(t,a),r=s.rows[0]||{};return c.debug("Select single operation completed",{tableName:e.name,hasResult:!!s.rows[0]}),r}catch(s){throw c.error("Select single operation failed",{tableName:e.name,sql:t,error:s instanceof Error?s.message:s}),s}}async selectAll(e){c.trace("Performing select all operation",{tableName:e.name});const{sql:t,params:a}=this.buildSelectQuery(e);c.debug("Executing select all query",{tableName:e.name,sql:t});try{const s=await this.execute(t,a);return c.debug("Select all operation completed",{tableName:e.name,rowCount:s.rows.length}),s.rows}catch(s){throw c.error("Select all operation failed",{tableName:e.name,sql:t,error:s instanceof Error?s.message:s}),s}}buildSelectQuery(e,t=""){var a;let s=`SELECT ${e.cols.length>0?e.cols.map(i=>i.name).join(", "):"*"} FROM ${e.name}`;const r=this.buildWhereClause(e.wheres);if(s+=r.sql,!((a=e.orderbys)===null||a===void 0)&&a.length){const i=e.orderbys.map(n=>`${n.name} ${n.direction||"ASC"}`).join(", ");s+=` ORDER BY ${i}`}return e.limitOffset&&(e.limitOffset.limit&&(s+=` LIMIT ${e.limitOffset.limit}`),e.limitOffset.offset&&(s+=` OFFSET ${e.limitOffset.offset}`)),s+=t,{sql:s,params:r.params}}buildWhereClause(e,t="WHERE"){if(!e||e.length===0)return{sql:"",params:[]};const a=[],s=[];for(const r of e){const i=r.operator||"=";a.push(`${r.name} ${i} ?`),s.push(r.value)}return{sql:` ${t} ${a.join(" AND ")}`,params:s}}convertJsonToQueryTable(e,t,a=["id"]){var s,r;c.trace("Converting JSON to QueryTable",{tableName:e,fieldCount:Object.keys(t).length,idFields:a});const i={name:e,cols:[],wheres:[]};for(const[n,h]of Object.entries(t))i.cols.push({name:n,value:h}),a.includes(n)&&h!==void 0&&((s=i.wheres)===null||s===void 0||s.push({name:n,value:h}));return c.debug("JSON converted to QueryTable",{tableName:e,columnCount:i.cols.length,whereCount:((r=i.wheres)===null||r===void 0?void 0:r.length)||0}),i}async importData(e){c.info("Starting data import operation",{tableName:e.tableName,totalRows:e.data.length,batchSize:e.batchSize||1e3,validateData:e.validateData,updateOnConflict:e.updateOnConflict,skipErrors:e.skipErrors});const t=Date.now(),a={totalRows:e.data.length,successRows:0,errorRows:0,errors:[],executionTime:0};if(!this.isConnected){const m=new Error("Database is not connected");throw c.error("Import failed - database not connected"),m}if(!e.data||e.data.length===0)return c.warn("No data provided for import, returning empty result"),a.executionTime=Date.now()-t,a;const s=await this.getTableInfo(e.tableName);if(s.length===0){const m=new Error(`Table '${e.tableName}' does not exist`);throw c.error("Import failed - table does not exist",{tableName:e.tableName}),m}c.debug("Table info retrieved for import",{tableName:e.tableName,columnCount:s.length});const r=new Map(s.map(m=>[m.name.toLowerCase(),m])),i=e.batchSize||1e3;let n=0;const h=!e.includeAutoIncrementPK;try{await this.beginTransaction();for(let m=0;m<e.data.length;m+=i){const b=e.data.slice(m,m+i);c.debug("Processing import batch",{batchNumber:Math.floor(m/i)+1,batchSize:b.length,totalBatches:Math.ceil(e.data.length/i)});for(let N=0;N<b.length;N++){const d=m+N,E=b[N];try{const y=e.validateData?this.validateAndTransformRow(E,r,e.tableName,h):this.transformRowData(E,r,h);e.updateOnConflict&&e.conflictColumns?await this.insertOrUpdate(e.tableName,y,e.conflictColumns):await this.insertRow(e.tableName,y),a.successRows++}catch(y){a.errorRows++;const O={rowIndex:d,error:y instanceof Error?y.message:String(y),rowData:E};if(a.errors.push(O),c.warn("Row import failed",{rowIndex:d,tableName:e.tableName,error:y instanceof Error?y.message:y}),e.onError&&e.onError(y instanceof Error?y:new Error(String(y)),d,E),!e.skipErrors)throw c.error("Import operation stopped due to error and skipErrors=false"),y}n++,e.onProgress&&n%100===0&&e.onProgress(n,e.data.length)}}await this.commitTransaction(),c.info("Data import completed successfully",{tableName:e.tableName,totalRows:a.totalRows,successRows:a.successRows,errorRows:a.errorRows,executionTime:Date.now()-t})}catch(m){throw c.error("Import operation failed, rolling back transaction",{tableName:e.tableName,processedCount:n,error:m instanceof Error?m.message:m}),await this.rollbackTransaction(),m}return e.onProgress&&e.onProgress(n,e.data.length),a.executionTime=Date.now()-t,a}async importDataWithMapping(e,t,a,s={}){c.info("Starting data import with column mapping",{tableName:e,dataRows:t.length,mappingCount:a.length});const r=t.map((i,n)=>{c.trace("Transforming row with column mappings",{rowIndex:n});const h={};return a.forEach(m=>{if(i.hasOwnProperty(m.sourceColumn)){let b=i[m.sourceColumn];if(m.transform)try{b=m.transform(b)}catch(N){c.warn("Column transformation failed",{rowIndex:n,sourceColumn:m.sourceColumn,targetColumn:m.targetColumn,error:N instanceof Error?N.message:N})}h[m.targetColumn]=b}}),h});return c.debug("Data transformation completed",{originalRowCount:t.length,transformedRowCount:r.length}),await this.importData(Object.assign({tableName:e,data:r},s))}async importFromCSV(e,t,a={}){c.info("Starting CSV import",{tableName:e,csvLength:t.length,delimiter:a.delimiter||",",hasHeader:a.hasHeader!==!1});const s=a.delimiter||",",r=a.hasHeader!==!1,i=t.split(`
5
- `).filter(b=>b.trim());if(i.length===0){const b=new Error("CSV data is empty");throw c.error("CSV import failed - empty data"),b}let n=[],h=0;if(r)n=i[0].split(s).map(b=>b.trim().replace(/^["']|["']$/g,"")),h=1,c.debug("CSV headers extracted",{headers:n,headerCount:n.length});else{const b=i[0].split(s).length;n=Array.from({length:b},(N,d)=>`column_${d+1}`),c.debug("Generated column headers for headerless CSV",{columnCount:b,headers:n})}const m=[];for(let b=h;b<i.length;b++){const N=i[b].split(s).map(E=>E.trim().replace(/^["']|["']$/g,"")),d={};n.forEach((E,y)=>{d[E]=N[y]||null}),m.push(d)}return c.debug("CSV data parsed",{totalLines:i.length,dataRows:m.length,skipHeader:r}),a.columnMappings?(c.debug("Using column mappings for CSV import"),await this.importDataWithMapping(e,m,a.columnMappings,a)):await this.importData(Object.assign({tableName:e,data:m},a))}validateAndTransformRow(e,t,a,s=!0){c.trace("Validating and transforming row data",{tableName:a});const r={};for(const[i,n]of t.entries()){const h=n.notnull===1&&!n.dflt_value,m=n.pk===1&&n.type.toLowerCase().includes("integer");if(s&&m)continue;const b=this.findValueForColumn(e,i);if(h&&b==null){const N=new Error(`Required column '${i}' is missing or null in table '${a}'`);throw c.error("Row validation failed",{tableName:a,columnName:i,error:N.message}),N}if(b!=null)try{r[i]=this.convertValueToColumnType(b,n.type)}catch(N){throw c.error("Value conversion failed during validation",{tableName:a,columnName:i,value:b,columnType:n.type,error:N instanceof Error?N.message:N}),N}}return r}transformRowData(e,t,a=!0){c.trace("Transforming row data without validation");const s={};for(const[r,i]of Object.entries(e)){const n=r.toLowerCase(),h=t.get(n);if(!h){c.trace("Column not found in table schema, skipping",{columnName:r});continue}const m=h.pk===1&&h.type.toLowerCase().includes("integer");if(!(a&&m)&&i!=null)try{s[r]=this.convertValueToColumnType(i,h.type)}catch(b){c.warn("Value conversion failed during transformation",{columnName:r,value:i,columnType:h.type,error:b instanceof Error?b.message:b})}}return s}findValueForColumn(e,t){if(e.hasOwnProperty(t))return e[t];const a=t.toLowerCase();for(const[s,r]of Object.entries(e))if(s.toLowerCase()===a)return r}convertValueToColumnType(e,t){if(e==null)return null;const a=t.toLowerCase();try{if(a.includes("integer")||a.includes("int")){if(typeof e=="boolean")return e?1:0;const s=parseInt(String(e));return isNaN(s)?null:s}if(a.includes("real")||a.includes("float")||a.includes("decimal")){const s=parseFloat(String(e));return isNaN(s)?null:s}if(a.includes("boolean")){if(typeof e=="boolean")return e?1:0;if(typeof e=="string"){const s=e.toLowerCase();return s==="true"||s==="1"||s==="yes"?1:0}return e?1:0}if(a.includes("json")){if(typeof e=="object")return JSON.stringify(e);if(typeof e=="string")try{return JSON.parse(e),e}catch(s){throw new Error(`Invalid JSON format for column type '${t}'`)}return JSON.stringify(e)}if(a.includes("timestamp")||a.includes("datetime")){if(e instanceof Date)return e.toISOString();if(typeof e=="string"||typeof e=="number"){const s=new Date(e);return isNaN(s.getTime())?e:s.toISOString()}return String(e)}return String(e)}catch(s){throw new Error(`Cannot convert value '${e}' to column type '${t}'`)}}async insertRow(e,t){const a=Object.keys(t),s=Object.values(t),r=a.map(()=>"?").join(", "),i=`INSERT INTO ${e} (${a.join(", ")}) VALUES (${r})`;try{await this.execute(i,s)}catch(n){throw c.trace("Insert row failed",{tableName:e,columns:a,error:n instanceof Error?n.message:n}),n}}async insertOrUpdate(e,t,a){c.trace("Attempting insert or update",{tableName:e,conflictColumns:a});try{await this.insertRow(e,t)}catch(s){if(this.isConflictError(s))c.debug("Insert conflict detected, attempting update",{tableName:e}),await this.updateRowByColumns(e,t,a);else throw s}}async updateRowByColumns(e,t,a){const s=Object.keys(t).filter(d=>!a.includes(d)),r=a;if(s.length===0){c.debug("No columns to update, skipping update operation",{tableName:e});return}const i=s.map(d=>`${d} = ?`).join(", "),n=r.map(d=>`${d} = ?`).join(" AND "),h=s.map(d=>t[d]),m=r.map(d=>t[d]),b=[...h,...m],N=`UPDATE ${e} SET ${i} WHERE ${n}`;try{await this.execute(N,b),c.trace("Update by columns completed",{tableName:e,updateColumns:s,whereColumns:r})}catch(d){throw c.error("Update by columns failed",{tableName:e,sql:N,error:d instanceof Error?d.message:d}),d}}isConflictError(e){return e.code==="SQLITE_CONSTRAINT_UNIQUE"||e.code==="SQLITE_CONSTRAINT_PRIMARYKEY"||e.message&&e.message.includes("UNIQUE constraint failed")}async getDatabaseInfo(){c.trace("Getting database information");try{const e=await this.execute("SELECT name FROM sqlite_master WHERE type='table'"),t=await this.getSchemaVersion(),a={name:this.dbPath,tables:e.rows.map(s=>s.name),isConnected:this.isConnected,version:t};return c.debug("Database information retrieved",{tableCount:a.tables.length,isConnected:a.isConnected,version:a.version}),a}catch(e){throw c.error("Failed to get database information",{error:e instanceof Error?e.message:e}),e}}async getTableInfo(e){c.trace("Getting table information",{tableName:e});try{const t=await this.execute(`PRAGMA table_info(${e})`);return c.debug("Table information retrieved",{tableName:e,columnCount:t.rows.length}),t.rows}catch(t){throw c.error("Failed to get table information",{tableName:e,error:t instanceof Error?t.message:t}),t}}async dropTable(e){c.info("Dropping table",{tableName:e});const t=`DROP TABLE IF EXISTS ${e}`;try{await this.execute(t),c.info("Table dropped successfully",{tableName:e})}catch(a){throw c.error("Failed to drop table",{tableName:e,error:a instanceof Error?a.message:a}),a}}isConnectionOpen(){const e=this.isConnected&&!!this.connection;return c.trace("Connection status checked",{isOpen:e}),e}async ensureConnected(){this.isConnectionOpen()||(c.debug("Connection not open, attempting to connect"),await this.connect())}async execute(e,t=[]){var a;c.trace("Executing SQL query",{sql:e.substring(0,100)+(e.length>100?"...":""),paramCount:t.length}),this.ensureConnected();try{const s=await this.connection.execute(e,t);return c.trace("SQL query executed successfully",{rowsAffected:s.rowsAffected,rowsReturned:((a=s.rows)===null||a===void 0?void 0:a.length)||0}),s}catch(s){throw e.indexOf("_schema_info")===-1&&c.error("SQL query execution failed",{sql:e.substring(0,200)+(e.length>200?"...":""),paramCount:t.length,error:s}),s}}async getRst(e,t=[]){return(await this.execute(e,t)).rows[0]||{}}async getRsts(e,t=[]){return(await this.execute(e,t)).rows}}const g=D(A.DATABASE_FACTORY);class T{static registerAdapter(e){g.info(`Registering SQLite adapter: ${e.constructor.name}`,{adapterName:e.constructor.name,totalAdapters:this.adapters.length+1}),this.adapters.push(e),g.debug(`Successfully registered adapter. Total adapters: ${this.adapters.length}`)}static getEnvironmentInfo(){g.trace("Detecting runtime environment");let e;return typeof navigator!="undefined"&&navigator.product==="ReactNative"?e="React Native":typeof globalThis.Bun!="undefined"?e="Bun":typeof globalThis.Deno!="undefined"?e="Deno":typeof window!="undefined"?e="Browser":typeof process!="undefined"?e="Node.js":e="Unknown",g.debug(`Detected runtime environment: ${e}`),e}static async detectBestAdapter(){g.trace("Detecting best available SQLite adapter",{totalAdapters:this.adapters.length,environment:this.getEnvironmentInfo()});for(const e of this.adapters){if(g.trace(`Testing adapter: ${e.constructor.name}`),await e.isSupported())return g.info(`Selected adapter: ${e.constructor.name}`,{adapterName:e.constructor.name,environment:this.getEnvironmentInfo()}),e;g.debug(`Adapter ${e.constructor.name} is not supported in current environment`)}throw g.error("No supported SQLite adapter found",{totalAdapters:this.adapters.length,environment:this.getEnvironmentInfo()}),new Error("No supported SQLite adapter found")}static async validateSchemaVersion(e,t){g.trace("Validating schema version compatibility",{databaseName:t.database_name,configVersion:t.version});try{const a=await e.getDatabaseInfo();if(g.debug("Retrieved database info",{databaseVersion:a.version,configVersion:t.version}),a.version!==t.version){const s=`Schema version mismatch: database (${a.version}) vs config (${t.version})`;throw g.error("Schema version mismatch",{databaseName:t.database_name,databaseVersion:a.version,configVersion:t.version}),new Error(s)}g.debug("Schema version validation successful",{databaseName:t.database_name,version:t.version})}catch(a){throw g.error("Error during schema version validation",{databaseName:t.database_name,error:a.message}),new Error(`Error validating schema version for ${t.database_name}: ${a.message}`)}}static validateSchema(e){if(g.trace("Validating database schema configuration"),!e)throw g.error("Schema validation failed: null or undefined schema"),new Error("Schema configuration is null or undefined.");if(typeof e.database_name!="string"||e.database_name.trim()==="")throw g.error("Schema validation failed: invalid database_name",{databaseName:e.database_name,type:typeof e.database_name}),new Error("Invalid or missing 'database_name' in schema. This is required to name the database file.");if(typeof e.schemas!="object"||e.schemas===null||Object.keys(e.schemas).length===0)throw g.error("Schema validation failed: invalid schemas object",{databaseName:e.database_name,schemasType:typeof e.schemas,schemasCount:e.schemas?Object.keys(e.schemas).length:0}),new Error("Invalid or missing 'schemas' object in schema. At least one table definition is required.");return g.debug("Schema validation successful",{databaseName:e.database_name,tablesCount:Object.keys(e.schemas).length,version:e.version}),!0}static async createDAO(e,t){var a,s,r,i;g.info("Creating new UniversalDAO instance",{dbPath:e,hasCustomAdapter:!!(t!=null&&t.adapter),createIfNotExists:(a=t==null?void 0:t.createIfNotExists)!==null&&a!==void 0?a:!1,forceRecreate:(s=t==null?void 0:t.forceRecreate)!==null&&s!==void 0?s:!1});let n;t!=null&&t.adapter?(g.debug("Using provided custom adapter",{adapterName:t.adapter.constructor.name}),n=t.adapter):(g.debug("Detecting best adapter automatically"),n=await this.detectBestAdapter());const h=new z(n,e,{createIfNotExists:(r=t==null?void 0:t.createIfNotExists)!==null&&r!==void 0?r:!1,forceRecreate:(i=t==null?void 0:t.forceRecreate)!==null&&i!==void 0?i:!1});return g.debug("UniversalDAO instance created successfully",{dbPath:e,adapterName:n.constructor.name}),h}static async openExisting(e,t={}){g.info("Opening existing database",{dbName:e,options:t});const a=e.endsWith(".db")?e:`${e}.db`;g.debug("Resolved database filename",{originalName:e,resolvedName:a});const s=await this.createDAO(a,t);try{return g.debug("Connecting to database",{dbFileName:a}),await s.connect(),g.debug("Running integrity check",{dbFileName:a}),await s.execute("PRAGMA integrity_check"),g.info("Database opened successfully",{dbFileName:a}),s}catch(r){g.error("Error opening database",{dbFileName:a,error:r.message});try{await s.close()}catch(i){g.warn("Error closing DAO after failed open",{dbFileName:a,closeError:i.message})}throw new Error(`Error opening database '${a}': ${r.message}`)}}static async createOrOpenInternal(e,t=!1,a=!1){g.info("Creating or opening database internally",{isForceInit:t,isForceDelete:a,hasConfig:!!e.config,hasConfigAsset:!!e.configAsset});let s;if(g.trace("Loading database schema"),e.config)g.debug("Using provided config object"),s=e.config;else if(e.configAsset)g.debug("Using provided config asset"),s=e.configAsset;else throw g.error("No database schema configuration provided"),new Error("Either 'config', 'configAsset', or 'configPath' must be provided to the factory.");g.trace("Validating schema configuration"),this.validateSchema(s);const r=s.database_name.endsWith(".db")?s.database_name:`${s.database_name}.db`;g.debug("Database filename resolved",{originalName:s.database_name,resolvedName:r}),g.debug("Creating DAO instance",{dbFileName:r,hasCustomAdapter:!!e.adapter,createIfNotExists:t,forceRecreate:a});const i=await this.createDAO(r,{adapter:e.adapter,createIfNotExists:t,forceRecreate:a});try{g.debug("Connecting to database",{dbFileName:r}),await i.connect(),g.debug("Initializing database schema",{dbFileName:r}),await i.initializeFromSchema(s),g.debug("Validating schema version compatibility");try{await this.validateSchemaVersion(i,s)}catch(n){throw g.error("Schema version validation failed",{dbFileName:r,error:n.message}),await i.close(),new Error(`Schema mismatch in existing database. Use forceRecreate=true to recreate with updated schema. Error: ${n.message}`)}return g.info("Database created/opened successfully",{dbFileName:r,databaseName:s.database_name,version:s.version}),i}catch(n){if(g.error("Error during database creation/opening",{dbFileName:r,error:n.message}),i.isConnectionOpen())try{await i.close()}catch(h){g.warn("Error closing DAO after failed operation",{dbFileName:r,closeError:h.message})}throw n}}static async create(e){var t,a;return g.warn("Creating database with force recreate - this will delete existing database",{databaseName:((t=e.config)===null||t===void 0?void 0:t.database_name)||((a=e.configAsset)===null||a===void 0?void 0:a.database_name)}),this.createOrOpenInternal(e,!0,!0)}static async createOrOpen(e,t=!1){var a,s;return g.info("Smart create or open database",{databaseName:((a=e.config)===null||a===void 0?void 0:a.database_name)||((s=e.configAsset)===null||s===void 0?void 0:s.database_name),isForceInit:t}),this.createOrOpenInternal(e,t)}static async createFromAsset(e,t={}){g.info("Creating database from asset",{databaseName:e.database_name,version:e.version});try{return await this.create(Object.assign(Object.assign({},t),{configAsset:e}))}catch(a){throw g.error("Error creating database from asset",{databaseName:e.database_name,error:a.message}),new Error(`Error creating database from asset: ${a.message}`)}}static async createFromConfig(e,t={}){g.info("Creating database from config",{databaseName:e.database_name,version:e.version});try{return await this.create(Object.assign(Object.assign({},t),{config:e}))}catch(a){throw g.error("Error creating database from config",{databaseName:e.database_name,error:a.message}),new Error(`Error creating database from config: ${a.message}`)}}}T.adapters=[];const o=D(A.DATABASE_MANAGER);class v{static getMaxConnections(){return o.trace("Getting max connections",{maxConnections:this.maxConnections}),this.maxConnections}static setMaxConnections(e){if(o.debug("Setting max connections",{newMaxConnections:e,currentMax:this.maxConnections}),e<=0)throw o.error("Invalid max connections value",{maxConnections:e}),new Error("Maximum connections must be a positive number");const t=Object.keys(this.connections).length;if(t>e)throw o.error("Cannot set max connections - would exceed current active connections",{requestedMax:e,currentActiveConnections:t,activeConnectionKeys:Object.keys(this.connections)}),new Error(`Cannot set maximum connections to ${e}. Current active connections (${t}) exceed the new limit. Please close some connections first.`);this.maxConnections=e,o.info("Max connections updated successfully",{newMaxConnections:e,currentActiveConnections:t})}static setSchemaManager(e){o.debug("Setting schema manager",{hadPreviousManager:this.schemaManager!==null}),this.schemaManager=e,o.info("Schema manager set successfully")}static registerSchema(e,t){o.debug("Registering schema",{key:e,schemaName:t.database_name}),this.schemaConfigurations[e]=t,o.info("Schema registered successfully",{key:e,schemaName:t.database_name})}static registerSchemas(e){const t=Object.keys(e);o.debug("Registering multiple schemas",{count:t.length,keys:t}),Object.entries(e).forEach(([a,s])=>{this.registerSchema(a,s)}),o.info("Multiple schemas registered successfully",{count:t.length})}static getSchema(e){if(o.trace("Getting schema",{key:e}),this.schemaConfigurations[e])return o.trace("Schema found in internal configurations",{key:e}),this.schemaConfigurations[e];if(this.schemaManager){o.trace("Checking external schema manager",{key:e});const t=this.schemaManager.getSchema(e);if(t)return o.trace("Schema found in external manager",{key:e}),t}o.warn("Schema not found",{key:e})}static getAvailableSchemas(){var e;const t=Object.keys(this.schemaConfigurations),a=((e=this.schemaManager)===null||e===void 0?void 0:e.getAllSchemaKeys())||[],s=[...new Set([...t,...a])];return o.trace("Getting available schemas",{internalCount:t.length,externalCount:a.length,totalUnique:s.length}),s}static registerRole(e){o.debug("Registering role",{roleName:e.roleName,requiredDatabases:e.requiredDatabases,optionalDatabases:e.optionalDatabases,priority:e.priority}),this.roleRegistry[e.roleName]=e,o.info("Role registered successfully",{roleName:e.roleName})}static registerRoles(e){o.debug("Registering multiple roles",{count:e.length}),e.forEach(t=>this.registerRole(t)),o.info("Multiple roles registered successfully",{count:e.length})}static getRegisteredRoles(){return o.trace("Getting registered roles",{count:Object.keys(this.roleRegistry).length}),Object.assign({},this.roleRegistry)}static getRoleDatabases(e){o.trace("Getting role databases",{roleName:e});const t=this.roleRegistry[e];if(!t)throw o.error("Role not found in registry",{roleName:e,availableRoles:Object.keys(this.roleRegistry)}),new Error(`Role '${e}' is not registered.`);const a=[...t.requiredDatabases,...t.optionalDatabases||[]];return o.trace("Role databases retrieved",{roleName:e,databases:a}),a}static getCurrentUserDatabases(){o.trace("Getting current user databases",{currentUserRoles:this.currentUserRoles});const e=new Set;e.add("core");for(const a of this.currentUserRoles){const s=this.roleRegistry[a];s?(s.requiredDatabases.forEach(r=>e.add(r)),s.optionalDatabases&&s.optionalDatabases.forEach(r=>e.add(r))):o.warn("Role config not found for current user role",{roleName:a})}const t=Array.from(e);return o.debug("Current user databases calculated",{userRoles:this.currentUserRoles,databases:t}),t}static async initializeCoreConnection(){if(console.log("initializeCoreConnection logger config:",C.getCurrentConfig()),o.debug("Initializing core database connection"),this.connections.core){o.debug("Core connection already exists");return}try{const e=this.getSchema("core");if(!e)throw o.error("Core database schema not found"),new Error("Core database schema not found.");o.debug("Creating core database connection",{schemaName:e.database_name});const t=await T.createOrOpen({config:e},!1);await t.execute("PRAGMA integrity_check"),this.connections.core=t,o.info("Core database connection initialized successfully")}catch(e){throw o.error("Error initializing core database",{error:e.message}),new Error(`Error initializing core database: ${e.message}`)}}static async setCurrentUserRoles(e,t){o.debug("Setting current user roles",{userRoles:e,primaryRole:t});for(const s of e)if(!this.roleRegistry[s])throw o.error("Role not registered",{roleName:s,availableRoles:Object.keys(this.roleRegistry)}),new Error(`Role '${s}' is not registered. Please register it first.`);const a=[...this.currentUserRoles];this.currentUserRoles=e,this.currentRole=t||e[0]||null,o.info("User roles updated",{previousRoles:a,newRoles:e,primaryRole:this.currentRole});try{await this.initializeUserRoleConnections(),await this.cleanupUnusedConnections(a),o.info("User role connections initialized successfully")}catch(s){throw o.error("Failed to initialize user role connections",{error:s.message}),s}}static getCurrentUserRoles(){return o.trace("Getting current user roles",{roles:this.currentUserRoles}),[...this.currentUserRoles]}static getCurrentRole(){return o.trace("Getting current primary role",{role:this.currentRole}),this.currentRole}static async initializeUserRoleConnections(){const e=this.getCurrentUserDatabases();o.debug("Initializing user role connections",{requiredDatabases:e});const t=[],a=e.map(async s=>{if(this.connections[s]){o.trace("Database already connected",{dbKey:s});return}try{o.debug("Initializing database connection",{dbKey:s});const r=this.getSchema(s);if(!r)throw new Error(`Database key '${s}' not found in schema configurations.`);const i=await T.createOrOpen({config:r},!1);await i.execute("PRAGMA integrity_check"),this.connections[s]=i,o.info("Database connection initialized",{dbKey:s,schemaName:r.database_name})}catch(r){const i=r instanceof Error?r:new Error(String(r));o.error("Failed to initialize database connection",{dbKey:s,error:i.message}),this.currentUserRoles.some(n=>{const h=this.roleRegistry[n];return h&&h.requiredDatabases.includes(s)})?t.push({key:s,error:i}):o.warn("Optional database initialization failed",{dbKey:s,error:i.message})}});if(await Promise.all(a),t.length>0){const s=t.map(r=>` - ${r.key}: ${r.error.message}`).join(`
4
+ )`),await this.execute("INSERT INTO _schema_info (version) VALUES (?)",[e]),c.info("Schema version set successfully",{version:e})}catch(t){throw c.error("Failed to set schema version",{version:e,error:t instanceof Error?t.message:t}),t}}async insert(e){c.trace("Performing insert operation",{tableName:e.name});const t=e.cols.filter(n=>n.value!==void 0&&n.value!==null);if(t.length===0){const n=new Error("No valid columns to insert");throw c.error("Insert operation failed",{tableName:e.name,error:n.message}),n}const a=t.map(n=>n.name).join(", "),s=t.map(()=>"?").join(", "),r=t.map(n=>typeof n.value=="object"?JSON.stringify(n.value):n.value),i=`INSERT INTO ${e.name} (${a}) VALUES (${s})`;c.debug("Executing insert query",{tableName:e.name,columnCount:t.length,sql:i});try{const n=await this.execute(i,r);return c.info("Insert operation completed successfully",{tableName:e.name,rowsAffected:n.rowsAffected,lastInsertRowid:n.lastInsertRowId}),n}catch(n){throw c.error("Insert operation failed",{tableName:e.name,sql:i,error:n instanceof Error?n.message:n}),n}}async update(e){var t;c.trace("Performing update operation",{tableName:e.name});const a=e.cols.filter(h=>{var m;return h.value!==void 0&&!(!((m=e.wheres)===null||m===void 0)&&m.some(f=>f.name===h.name))});if(a.length===0){const h=new Error("No columns to update");throw c.error("Update operation failed",{tableName:e.name,error:h.message}),h}const s=a.map(h=>`${h.name} = ?`).join(", "),r=a.map(h=>typeof h.value=="object"?JSON.stringify(h.value):h.value);let i=`UPDATE ${e.name} SET ${s}`;const n=this.buildWhereClause(e.wheres);if(!n.sql){const h=new Error("WHERE clause is required for UPDATE operation");throw c.error("Update operation failed",{tableName:e.name,error:h.message}),h}i+=n.sql,r.push(...n.params),c.debug("Executing update query",{tableName:e.name,updateColumnCount:a.length,whereConditions:((t=e.wheres)===null||t===void 0?void 0:t.length)||0,sql:i});try{const h=await this.execute(i,r);return c.info("Update operation completed successfully",{tableName:e.name,rowsAffected:h.rowsAffected}),h}catch(h){throw c.error("Update operation failed",{tableName:e.name,sql:i,error:h instanceof Error?h.message:h}),h}}async delete(e){var t;c.trace("Performing delete operation",{tableName:e.name});let a=`DELETE FROM ${e.name}`;const s=this.buildWhereClause(e.wheres);if(!s.sql){const r=new Error("WHERE clause is required for DELETE operation");throw c.error("Delete operation failed",{tableName:e.name,error:r.message}),r}a+=s.sql,c.debug("Executing delete query",{tableName:e.name,whereConditions:((t=e.wheres)===null||t===void 0?void 0:t.length)||0,sql:a});try{const r=await this.execute(a,s.params);return c.info("Delete operation completed successfully",{tableName:e.name,rowsAffected:r.rowsAffected}),r}catch(r){throw c.error("Delete operation failed",{tableName:e.name,sql:a,error:r instanceof Error?r.message:r}),r}}async select(e){c.trace("Performing select single operation",{tableName:e.name});const{sql:t,params:a}=this.buildSelectQuery(e," LIMIT 1");c.debug("Executing select single query",{tableName:e.name,sql:t});try{const s=await this.execute(t,a),r=s.rows[0]||{};return c.debug("Select single operation completed",{tableName:e.name,hasResult:!!s.rows[0]}),r}catch(s){throw c.error("Select single operation failed",{tableName:e.name,sql:t,error:s instanceof Error?s.message:s}),s}}async selectAll(e){c.trace("Performing select all operation",{tableName:e.name});const{sql:t,params:a}=this.buildSelectQuery(e);c.debug("Executing select all query",{tableName:e.name,sql:t});try{const s=await this.execute(t,a);return c.debug("Select all operation completed",{tableName:e.name,rowCount:s.rows.length}),s.rows}catch(s){throw c.error("Select all operation failed",{tableName:e.name,sql:t,error:s instanceof Error?s.message:s}),s}}buildSelectQuery(e,t=""){var a;let s=`SELECT ${e.cols.length>0?e.cols.map(i=>i.name).join(", "):"*"} FROM ${e.name}`;const r=this.buildWhereClause(e.wheres);if(s+=r.sql,!((a=e.orderbys)===null||a===void 0)&&a.length){const i=e.orderbys.map(n=>`${n.name} ${n.direction||"ASC"}`).join(", ");s+=` ORDER BY ${i}`}return e.limitOffset&&(e.limitOffset.limit&&(s+=` LIMIT ${e.limitOffset.limit}`),e.limitOffset.offset&&(s+=` OFFSET ${e.limitOffset.offset}`)),s+=t,{sql:s,params:r.params}}buildWhereClause(e,t="WHERE"){if(!e||e.length===0)return{sql:"",params:[]};const a=[],s=[];for(const r of e){const i=r.operator||"=";a.push(`${r.name} ${i} ?`),s.push(r.value)}return{sql:` ${t} ${a.join(" AND ")}`,params:s}}convertJsonToQueryTable(e,t,a=["id"]){var s,r;c.trace("Converting JSON to QueryTable",{tableName:e,fieldCount:Object.keys(t).length,idFields:a});const i={name:e,cols:[],wheres:[]};for(const[n,h]of Object.entries(t))i.cols.push({name:n,value:h}),a.includes(n)&&h!==void 0&&((s=i.wheres)===null||s===void 0||s.push({name:n,value:h}));return c.debug("JSON converted to QueryTable",{tableName:e,columnCount:i.cols.length,whereCount:((r=i.wheres)===null||r===void 0?void 0:r.length)||0}),i}async importData(e){c.info("Starting data import operation",{tableName:e.tableName,totalRows:e.data.length,batchSize:e.batchSize||1e3,validateData:e.validateData,updateOnConflict:e.updateOnConflict,skipErrors:e.skipErrors});const t=Date.now(),a={totalRows:e.data.length,successRows:0,errorRows:0,errors:[],executionTime:0};if(!this.isConnected){const m=new Error("Database is not connected");throw c.error("Import failed - database not connected"),m}if(!e.data||e.data.length===0)return c.warn("No data provided for import, returning empty result"),a.executionTime=Date.now()-t,a;const s=await this.getTableInfo(e.tableName);if(s.length===0){const m=new Error(`Table '${e.tableName}' does not exist`);throw c.error("Import failed - table does not exist",{tableName:e.tableName}),m}c.debug("Table info retrieved for import",{tableName:e.tableName,columnCount:s.length});const r=new Map(s.map(m=>[m.name.toLowerCase(),m])),i=e.batchSize||1e3;let n=0;const h=!e.includeAutoIncrementPK;try{await this.beginTransaction();for(let m=0;m<e.data.length;m+=i){const f=e.data.slice(m,m+i);c.debug("Processing import batch",{batchNumber:Math.floor(m/i)+1,batchSize:f.length,totalBatches:Math.ceil(e.data.length/i)});for(let y=0;y<f.length;y++){const d=m+y,C=f[y];try{const w=e.validateData?this.validateAndTransformRow(C,r,e.tableName,h):this.transformRowData(C,r,h);e.updateOnConflict&&e.conflictColumns?await this.insertOrUpdate(e.tableName,w,e.conflictColumns):await this.insertRow(e.tableName,w),a.successRows++}catch(w){a.errorRows++;const D={rowIndex:d,error:w instanceof Error?w.message:String(w),rowData:C};if(a.errors.push(D),c.warn("Row import failed",{rowIndex:d,tableName:e.tableName,error:w instanceof Error?w.message:w}),e.onError&&e.onError(w instanceof Error?w:new Error(String(w)),d,C),!e.skipErrors)throw c.error("Import operation stopped due to error and skipErrors=false"),w}n++,e.onProgress&&n%100===0&&e.onProgress(n,e.data.length)}}await this.commitTransaction(),c.info("Data import completed successfully",{tableName:e.tableName,totalRows:a.totalRows,successRows:a.successRows,errorRows:a.errorRows,executionTime:Date.now()-t})}catch(m){throw c.error("Import operation failed, rolling back transaction",{tableName:e.tableName,processedCount:n,error:m instanceof Error?m.message:m}),await this.rollbackTransaction(),m}return e.onProgress&&e.onProgress(n,e.data.length),a.executionTime=Date.now()-t,a}async importDataWithMapping(e,t,a,s={}){c.info("Starting data import with column mapping",{tableName:e,dataRows:t.length,mappingCount:a.length});const r=t.map((i,n)=>{c.trace("Transforming row with column mappings",{rowIndex:n});const h={};return a.forEach(m=>{if(i.hasOwnProperty(m.sourceColumn)){let f=i[m.sourceColumn];if(m.transform)try{f=m.transform(f)}catch(y){c.warn("Column transformation failed",{rowIndex:n,sourceColumn:m.sourceColumn,targetColumn:m.targetColumn,error:y instanceof Error?y.message:y})}h[m.targetColumn]=f}}),h});return c.debug("Data transformation completed",{originalRowCount:t.length,transformedRowCount:r.length}),await this.importData(Object.assign({tableName:e,data:r},s))}async importFromCSV(e,t,a={}){c.info("Starting CSV import",{tableName:e,csvLength:t.length,delimiter:a.delimiter||",",hasHeader:a.hasHeader!==!1});const s=a.delimiter||",",r=a.hasHeader!==!1,i=t.split(`
5
+ `).filter(f=>f.trim());if(i.length===0){const f=new Error("CSV data is empty");throw c.error("CSV import failed - empty data"),f}let n=[],h=0;if(r)n=i[0].split(s).map(f=>f.trim().replace(/^["']|["']$/g,"")),h=1,c.debug("CSV headers extracted",{headers:n,headerCount:n.length});else{const f=i[0].split(s).length;n=Array.from({length:f},(y,d)=>`column_${d+1}`),c.debug("Generated column headers for headerless CSV",{columnCount:f,headers:n})}const m=[];for(let f=h;f<i.length;f++){const y=i[f].split(s).map(C=>C.trim().replace(/^["']|["']$/g,"")),d={};n.forEach((C,w)=>{d[C]=y[w]||null}),m.push(d)}return c.debug("CSV data parsed",{totalLines:i.length,dataRows:m.length,skipHeader:r}),a.columnMappings?(c.debug("Using column mappings for CSV import"),await this.importDataWithMapping(e,m,a.columnMappings,a)):await this.importData(Object.assign({tableName:e,data:m},a))}validateAndTransformRow(e,t,a,s=!0){c.trace("Validating and transforming row data",{tableName:a});const r={};for(const[i,n]of t.entries()){const h=n.notnull===1&&!n.dflt_value,m=n.pk===1&&n.type.toLowerCase().includes("integer");if(s&&m)continue;const f=this.findValueForColumn(e,i);if(h&&f==null){const y=new Error(`Required column '${i}' is missing or null in table '${a}'`);throw c.error("Row validation failed",{tableName:a,columnName:i,error:y.message}),y}if(f!=null)try{r[i]=this.convertValueToColumnType(f,n.type)}catch(y){throw c.error("Value conversion failed during validation",{tableName:a,columnName:i,value:f,columnType:n.type,error:y instanceof Error?y.message:y}),y}}return r}transformRowData(e,t,a=!0){c.trace("Transforming row data without validation");const s={};for(const[r,i]of Object.entries(e)){const n=r.toLowerCase(),h=t.get(n);if(!h){c.trace("Column not found in table schema, skipping",{columnName:r});continue}const m=h.pk===1&&h.type.toLowerCase().includes("integer");if(!(a&&m)&&i!=null)try{s[r]=this.convertValueToColumnType(i,h.type)}catch(f){c.warn("Value conversion failed during transformation",{columnName:r,value:i,columnType:h.type,error:f instanceof Error?f.message:f})}}return s}findValueForColumn(e,t){if(e.hasOwnProperty(t))return e[t];const a=t.toLowerCase();for(const[s,r]of Object.entries(e))if(s.toLowerCase()===a)return r}convertValueToColumnType(e,t){if(e==null)return null;const a=t.toLowerCase();try{if(a.includes("integer")||a.includes("int")){if(typeof e=="boolean")return e?1:0;const s=parseInt(String(e));return isNaN(s)?null:s}if(a.includes("real")||a.includes("float")||a.includes("decimal")){const s=parseFloat(String(e));return isNaN(s)?null:s}if(a.includes("boolean")){if(typeof e=="boolean")return e?1:0;if(typeof e=="string"){const s=e.toLowerCase();return s==="true"||s==="1"||s==="yes"?1:0}return e?1:0}if(a.includes("json")){if(typeof e=="object")return JSON.stringify(e);if(typeof e=="string")try{return JSON.parse(e),e}catch(s){throw new Error(`Invalid JSON format for column type '${t}'`)}return JSON.stringify(e)}if(a.includes("timestamp")||a.includes("datetime")){if(e instanceof Date)return e.toISOString();if(typeof e=="string"||typeof e=="number"){const s=new Date(e);return isNaN(s.getTime())?e:s.toISOString()}return String(e)}return String(e)}catch(s){throw new Error(`Cannot convert value '${e}' to column type '${t}'`)}}async insertRow(e,t){const a=Object.keys(t),s=Object.values(t),r=a.map(()=>"?").join(", "),i=`INSERT INTO ${e} (${a.join(", ")}) VALUES (${r})`;try{await this.execute(i,s)}catch(n){throw c.trace("Insert row failed",{tableName:e,columns:a,error:n instanceof Error?n.message:n}),n}}async insertOrUpdate(e,t,a){c.trace("Attempting insert or update",{tableName:e,conflictColumns:a});try{await this.insertRow(e,t)}catch(s){if(this.isConflictError(s))c.debug("Insert conflict detected, attempting update",{tableName:e}),await this.updateRowByColumns(e,t,a);else throw s}}async updateRowByColumns(e,t,a){const s=Object.keys(t).filter(d=>!a.includes(d)),r=a;if(s.length===0){c.debug("No columns to update, skipping update operation",{tableName:e});return}const i=s.map(d=>`${d} = ?`).join(", "),n=r.map(d=>`${d} = ?`).join(" AND "),h=s.map(d=>t[d]),m=r.map(d=>t[d]),f=[...h,...m],y=`UPDATE ${e} SET ${i} WHERE ${n}`;try{await this.execute(y,f),c.trace("Update by columns completed",{tableName:e,updateColumns:s,whereColumns:r})}catch(d){throw c.error("Update by columns failed",{tableName:e,sql:y,error:d instanceof Error?d.message:d}),d}}isConflictError(e){return e.code==="SQLITE_CONSTRAINT_UNIQUE"||e.code==="SQLITE_CONSTRAINT_PRIMARYKEY"||e.message&&e.message.includes("UNIQUE constraint failed")}async getDatabaseInfo(){c.trace("Getting database information");try{const e=await this.execute("SELECT name FROM sqlite_master WHERE type='table'"),t=await this.getSchemaVersion(),a={name:this.dbPath,tables:e.rows.map(s=>s.name),isConnected:this.isConnected,version:t};return c.debug("Database information retrieved",{tableCount:a.tables.length,isConnected:a.isConnected,version:a.version}),a}catch(e){throw c.error("Failed to get database information",{error:e instanceof Error?e.message:e}),e}}async getTableInfo(e){c.trace("Getting table information",{tableName:e});try{const t=await this.execute(`PRAGMA table_info(${e})`);return c.debug("Table information retrieved",{tableName:e,columnCount:t.rows.length}),t.rows}catch(t){throw c.error("Failed to get table information",{tableName:e,error:t instanceof Error?t.message:t}),t}}async dropTable(e){c.info("Dropping table",{tableName:e});const t=`DROP TABLE IF EXISTS ${e}`;try{await this.execute(t),c.info("Table dropped successfully",{tableName:e})}catch(a){throw c.error("Failed to drop table",{tableName:e,error:a instanceof Error?a.message:a}),a}}isConnectionOpen(){const e=this.isConnected&&!!this.connection;return c.trace("Connection status checked",{isOpen:e}),e}async ensureConnected(){this.isConnectionOpen()||(c.debug("Connection not open, attempting to connect"),await this.connect())}async execute(e,t=[]){var a;c.trace("Executing SQL query",{sql:e.substring(0,100)+(e.length>100?"...":""),paramCount:t.length}),this.ensureConnected();try{const s=await this.connection.execute(e,t);return c.trace("SQL query executed successfully",{rowsAffected:s.rowsAffected,rowsReturned:((a=s.rows)===null||a===void 0?void 0:a.length)||0}),s}catch(s){throw e.indexOf("_schema_info")===-1&&c.error("SQL query execution failed",{sql:e.substring(0,200)+(e.length>200?"...":""),paramCount:t.length,error:s}),s}}async getRst(e,t=[]){return(await this.execute(e,t)).rows[0]||{}}async getRsts(e,t=[]){return(await this.execute(e,t)).rows}}const b=k(O.DATABASE_FACTORY);class I{static registerAdapter(e){b.info(`Registering SQLite adapter: ${e.constructor.name}`,{adapterName:e.constructor.name,totalAdapters:this.adapters.length+1}),this.adapters.push(e),b.debug(`Successfully registered adapter. Total adapters: ${this.adapters.length}`)}static getEnvironmentInfo(){b.trace("Detecting runtime environment");let e;return typeof navigator!="undefined"&&navigator.product==="ReactNative"?e="React Native":typeof globalThis.Bun!="undefined"?e="Bun":typeof globalThis.Deno!="undefined"?e="Deno":typeof window!="undefined"?e="Browser":typeof process!="undefined"?e="Node.js":e="Unknown",b.debug(`Detected runtime environment: ${e}`),e}static async detectBestAdapter(){b.trace("Detecting best available SQLite adapter",{totalAdapters:this.adapters.length,environment:this.getEnvironmentInfo()});for(const e of this.adapters){if(b.trace(`Testing adapter: ${e.constructor.name}`),await e.isSupported())return b.info(`Selected adapter: ${e.constructor.name}`,{adapterName:e.constructor.name,environment:this.getEnvironmentInfo()}),e;b.debug(`Adapter ${e.constructor.name} is not supported in current environment`)}throw b.error("No supported SQLite adapter found",{totalAdapters:this.adapters.length,environment:this.getEnvironmentInfo()}),new Error("No supported SQLite adapter found")}static async validateSchemaVersion(e,t){b.trace("Validating schema version compatibility",{databaseName:t.database_name,configVersion:t.version});try{const a=await e.getDatabaseInfo();if(b.debug("Retrieved database info",{databaseVersion:a.version,configVersion:t.version}),a.version!==t.version){const s=`Schema version mismatch: database (${a.version}) vs config (${t.version})`;throw b.error("Schema version mismatch",{databaseName:t.database_name,databaseVersion:a.version,configVersion:t.version}),new Error(s)}b.debug("Schema version validation successful",{databaseName:t.database_name,version:t.version})}catch(a){throw b.error("Error during schema version validation",{databaseName:t.database_name,error:a.message}),new Error(`Error validating schema version for ${t.database_name}: ${a.message}`)}}static validateSchema(e){if(b.trace("Validating database schema configuration"),!e)throw b.error("Schema validation failed: null or undefined schema"),new Error("Schema configuration is null or undefined.");if(typeof e.database_name!="string"||e.database_name.trim()==="")throw b.error("Schema validation failed: invalid database_name",{databaseName:e.database_name,type:typeof e.database_name}),new Error("Invalid or missing 'database_name' in schema. This is required to name the database file.");if(typeof e.schemas!="object"||e.schemas===null||Object.keys(e.schemas).length===0)throw b.error("Schema validation failed: invalid schemas object",{databaseName:e.database_name,schemasType:typeof e.schemas,schemasCount:e.schemas?Object.keys(e.schemas).length:0}),new Error("Invalid or missing 'schemas' object in schema. At least one table definition is required.");return b.debug("Schema validation successful",{databaseName:e.database_name,tablesCount:Object.keys(e.schemas).length,version:e.version}),!0}static async createDAO(e,t){var a,s,r,i;b.info("Creating new UniversalDAO instance",{dbPath:e,hasCustomAdapter:!!(t!=null&&t.adapter),createIfNotExists:(a=t==null?void 0:t.createIfNotExists)!==null&&a!==void 0?a:!1,forceRecreate:(s=t==null?void 0:t.forceRecreate)!==null&&s!==void 0?s:!1});let n;t!=null&&t.adapter?(b.debug("Using provided custom adapter",{adapterName:t.adapter.constructor.name}),n=t.adapter):(b.debug("Detecting best adapter automatically"),n=await this.detectBestAdapter());const h=new P(n,e,{createIfNotExists:(r=t==null?void 0:t.createIfNotExists)!==null&&r!==void 0?r:!1,forceRecreate:(i=t==null?void 0:t.forceRecreate)!==null&&i!==void 0?i:!1});return b.debug("UniversalDAO instance created successfully",{dbPath:e,adapterName:n.constructor.name}),h}static async openExisting(e,t={}){b.info("Opening existing database",{dbName:e,options:t});const a=e.endsWith(".db")?e:`${e}.db`;b.debug("Resolved database filename",{originalName:e,resolvedName:a});const s=await this.createDAO(a,t);try{return b.debug("Connecting to database",{dbFileName:a}),await s.connect(),b.debug("Running integrity check",{dbFileName:a}),await s.execute("PRAGMA integrity_check"),b.info("Database opened successfully",{dbFileName:a}),s}catch(r){b.error("Error opening database",{dbFileName:a,error:r.message});try{await s.close()}catch(i){b.warn("Error closing DAO after failed open",{dbFileName:a,closeError:i.message})}throw new Error(`Error opening database '${a}': ${r.message}`)}}static async createOrOpenInternal(e,t=!1,a=!1){b.info("Creating or opening database internally",{isForceInit:t,isForceDelete:a,hasConfig:!!e.config,hasConfigAsset:!!e.configAsset});let s;if(b.trace("Loading database schema"),e.config)b.debug("Using provided config object"),s=e.config;else if(e.configAsset)b.debug("Using provided config asset"),s=e.configAsset;else throw b.error("No database schema configuration provided"),new Error("Either 'config', 'configAsset', or 'configPath' must be provided to the factory.");b.trace("Validating schema configuration"),this.validateSchema(s);const r=s.database_name.endsWith(".db")?s.database_name:`${s.database_name}.db`;b.debug("Database filename resolved",{originalName:s.database_name,resolvedName:r}),b.debug("Creating DAO instance",{dbFileName:r,hasCustomAdapter:!!e.adapter,createIfNotExists:t,forceRecreate:a});const i=await this.createDAO(r,{adapter:e.adapter,createIfNotExists:t,forceRecreate:a});try{b.debug("Connecting to database",{dbFileName:r}),await i.connect(),b.debug("Initializing database schema",{dbFileName:r}),await i.initializeFromSchema(s),b.debug("Validating schema version compatibility");try{await this.validateSchemaVersion(i,s)}catch(n){throw b.error("Schema version validation failed",{dbFileName:r,error:n.message}),await i.close(),new Error(`Schema mismatch in existing database. Use forceRecreate=true to recreate with updated schema. Error: ${n.message}`)}return b.info("Database created/opened successfully",{dbFileName:r,databaseName:s.database_name,version:s.version}),i}catch(n){if(b.error("Error during database creation/opening",{dbFileName:r,error:n.message}),i.isConnectionOpen())try{await i.close()}catch(h){b.warn("Error closing DAO after failed operation",{dbFileName:r,closeError:h.message})}throw n}}static async create(e){var t,a;return b.warn("Creating database with force recreate - this will delete existing database",{databaseName:((t=e.config)===null||t===void 0?void 0:t.database_name)||((a=e.configAsset)===null||a===void 0?void 0:a.database_name)}),this.createOrOpenInternal(e,!0,!0)}static async createOrOpen(e,t=!1){var a,s;return b.info("Smart create or open database",{databaseName:((a=e.config)===null||a===void 0?void 0:a.database_name)||((s=e.configAsset)===null||s===void 0?void 0:s.database_name),isForceInit:t}),this.createOrOpenInternal(e,t)}static async createFromAsset(e,t={}){b.info("Creating database from asset",{databaseName:e.database_name,version:e.version});try{return await this.create(Object.assign(Object.assign({},t),{configAsset:e}))}catch(a){throw b.error("Error creating database from asset",{databaseName:e.database_name,error:a.message}),new Error(`Error creating database from asset: ${a.message}`)}}static async createFromConfig(e,t={}){b.info("Creating database from config",{databaseName:e.database_name,version:e.version});try{return await this.create(Object.assign(Object.assign({},t),{config:e}))}catch(a){throw b.error("Error creating database from config",{databaseName:e.database_name,error:a.message}),new Error(`Error creating database from config: ${a.message}`)}}}I.adapters=[];const o=k(O.DATABASE_MANAGER);class v{static getMaxConnections(){return o.trace("Getting max connections",{maxConnections:this.maxConnections}),this.maxConnections}static setMaxConnections(e){if(o.debug("Setting max connections",{newMaxConnections:e,currentMax:this.maxConnections}),e<=0)throw o.error("Invalid max connections value",{maxConnections:e}),new Error("Maximum connections must be a positive number");const t=Object.keys(this.connections).length;if(t>e)throw o.error("Cannot set max connections - would exceed current active connections",{requestedMax:e,currentActiveConnections:t,activeConnectionKeys:Object.keys(this.connections)}),new Error(`Cannot set maximum connections to ${e}. Current active connections (${t}) exceed the new limit. Please close some connections first.`);this.maxConnections=e,o.info("Max connections updated successfully",{newMaxConnections:e,currentActiveConnections:t})}static setSchemaManager(e){o.debug("Setting schema manager",{hadPreviousManager:this.schemaManager!==null}),this.schemaManager=e,o.info("Schema manager set successfully")}static registerSchema(e,t){o.debug("Registering schema",{key:e,schemaName:t.database_name}),this.schemaConfigurations[e]=t,o.info("Schema registered successfully",{key:e,schemaName:t.database_name})}static registerSchemas(e){const t=Object.keys(e);o.debug("Registering multiple schemas",{count:t.length,keys:t}),Object.entries(e).forEach(([a,s])=>{this.registerSchema(a,s)}),o.info("Multiple schemas registered successfully",{count:t.length})}static getSchema(e){if(o.trace("Getting schema",{key:e}),this.schemaConfigurations[e])return o.trace("Schema found in internal configurations",{key:e}),this.schemaConfigurations[e];if(this.schemaManager){o.trace("Checking external schema manager",{key:e});const t=this.schemaManager.getSchema(e);if(t)return o.trace("Schema found in external manager",{key:e}),t}o.warn("Schema not found",{key:e})}static getAvailableSchemas(){var e;const t=Object.keys(this.schemaConfigurations),a=((e=this.schemaManager)===null||e===void 0?void 0:e.getAllSchemaKeys())||[],s=[...new Set([...t,...a])];return o.trace("Getting available schemas",{internalCount:t.length,externalCount:a.length,totalUnique:s.length}),s}static registerRole(e){o.debug("Registering role",{roleName:e.roleName,requiredDatabases:e.requiredDatabases,optionalDatabases:e.optionalDatabases,priority:e.priority}),this.roleRegistry[e.roleName]=e,o.info("Role registered successfully",{roleName:e.roleName})}static registerRoles(e){o.debug("Registering multiple roles",{count:e.length}),e.forEach(t=>this.registerRole(t)),o.info("Multiple roles registered successfully",{count:e.length})}static getRegisteredRoles(){return o.trace("Getting registered roles",{count:Object.keys(this.roleRegistry).length}),Object.assign({},this.roleRegistry)}static getRoleDatabases(e){o.trace("Getting role databases",{roleName:e});const t=this.roleRegistry[e];if(!t)throw o.error("Role not found in registry",{roleName:e,availableRoles:Object.keys(this.roleRegistry)}),new Error(`Role '${e}' is not registered.`);const a=[...t.requiredDatabases,...t.optionalDatabases||[]];return o.trace("Role databases retrieved",{roleName:e,databases:a}),a}static getCurrentUserDatabases(){o.trace("Getting current user databases",{currentUserRoles:this.currentUserRoles});const e=new Set;e.add("core");for(const a of this.currentUserRoles){const s=this.roleRegistry[a];s?(s.requiredDatabases.forEach(r=>e.add(r)),s.optionalDatabases&&s.optionalDatabases.forEach(r=>e.add(r))):o.warn("Role config not found for current user role",{roleName:a})}const t=Array.from(e);return o.debug("Current user databases calculated",{userRoles:this.currentUserRoles,databases:t}),t}static async initializeCoreConnection(){if(console.log("initializeCoreConnection logger config:",S.getCurrentConfig()),o.debug("Initializing core database connection"),this.connections.core){o.debug("Core connection already exists");return}try{const e=this.getSchema("core");if(!e)throw o.error("Core database schema not found"),new Error("Core database schema not found.");o.debug("Creating core database connection",{schemaName:e.database_name});const t=await I.createOrOpen({config:e},!1);await t.execute("PRAGMA integrity_check"),this.connections.core=t,o.info("Core database connection initialized successfully")}catch(e){throw o.error("Error initializing core database",{error:e.message}),new Error(`Error initializing core database: ${e.message}`)}}static async setCurrentUserRoles(e,t){o.debug("Setting current user roles",{userRoles:e,primaryRole:t});for(const s of e)if(!this.roleRegistry[s])throw o.error("Role not registered",{roleName:s,availableRoles:Object.keys(this.roleRegistry)}),new Error(`Role '${s}' is not registered. Please register it first.`);const a=[...this.currentUserRoles];this.currentUserRoles=e,this.currentRole=t||e[0]||null,o.info("User roles updated",{previousRoles:a,newRoles:e,primaryRole:this.currentRole});try{await this.initializeUserRoleConnections(),await this.cleanupUnusedConnections(a),o.info("User role connections initialized successfully")}catch(s){throw o.error("Failed to initialize user role connections",{error:s.message}),s}}static getCurrentUserRoles(){return o.trace("Getting current user roles",{roles:this.currentUserRoles}),[...this.currentUserRoles]}static getCurrentRole(){return o.trace("Getting current primary role",{role:this.currentRole}),this.currentRole}static async initializeUserRoleConnections(){const e=this.getCurrentUserDatabases();o.debug("Initializing user role connections",{requiredDatabases:e});const t=[],a=e.map(async s=>{if(this.connections[s]){o.trace("Database already connected",{dbKey:s});return}try{o.debug("Initializing database connection",{dbKey:s});const r=this.getSchema(s);if(!r)throw new Error(`Database key '${s}' not found in schema configurations.`);const i=await I.createOrOpen({config:r},!1);await i.execute("PRAGMA integrity_check"),this.connections[s]=i,o.info("Database connection initialized",{dbKey:s,schemaName:r.database_name})}catch(r){const i=r instanceof Error?r:new Error(String(r));o.error("Failed to initialize database connection",{dbKey:s,error:i.message}),this.currentUserRoles.some(n=>{const h=this.roleRegistry[n];return h&&h.requiredDatabases.includes(s)})?t.push({key:s,error:i}):o.warn("Optional database initialization failed",{dbKey:s,error:i.message})}});if(await Promise.all(a),t.length>0){const s=t.map(r=>` - ${r.key}: ${r.error.message}`).join(`
6
6
  `);throw o.error("Failed to initialize required databases",{failedDatabases:t.map(r=>r.key),errorSummary:s}),new Error(`Failed to initialize required databases for user roles:
7
- ${s}`)}}static async cleanupUnusedConnections(e){o.debug("Cleaning up unused connections",{previousRoles:e});const t=new Set;t.add("core");for(const r of e){const i=this.roleRegistry[r];i&&(i.requiredDatabases.forEach(n=>t.add(n)),i.optionalDatabases&&i.optionalDatabases.forEach(n=>t.add(n)))}const a=new Set(this.getCurrentUserDatabases()),s=Array.from(t).filter(r=>!a.has(r));if(o.debug("Databases to cleanup",{databasesToClose:s,previousDatabaseCount:t.size,currentDatabaseCount:a.size}),s.length>0){for(const r of s)if(this.connections[r])try{o.debug("Closing unused database connection",{dbKey:r}),await this.connections[r].close(),delete this.connections[r],o.info("Database connection closed",{dbKey:r})}catch(i){o.error("Error closing database connection during cleanup",{dbKey:r,error:i.message})}o.info("Cleanup completed",{closedConnections:s})}else o.debug("No connections to cleanup")}static hasAccessToDatabase(e){const t=this.getSchema(e)!==void 0;return o.trace("Checking database access",{dbKey:e,hasAccess:t}),t}static get(e){if(o.trace("Getting database connection",{key:e}),!this.hasAccessToDatabase(e))throw o.error("Access denied to database",{key:e}),new Error(`Access denied: Database '${e}' is not accessible.`);const t=this.connections[e];if(!t)throw o.error("Database not connected",{key:e,availableConnections:Object.keys(this.connections)}),new Error(`Database '${e}' is not connected. Please ensure it's initialized.`);return o.trace("Database connection retrieved successfully",{key:e}),t}static onDatabaseReconnect(e,t){o.debug("Registering database reconnect listener",{schemaName:e}),this.eventListeners.has(e)||this.eventListeners.set(e,[]),this.eventListeners.get(e).push(t),o.trace("Database reconnect listener registered",{schemaName:e,listenerCount:this.eventListeners.get(e).length})}static offDatabaseReconnect(e,t){o.debug("Removing database reconnect listener",{schemaName:e});const a=this.eventListeners.get(e);if(a){const s=a.indexOf(t);s>-1?(a.splice(s,1),o.trace("Database reconnect listener removed",{schemaName:e,remainingListeners:a.length})):o.warn("Database reconnect listener not found for removal",{schemaName:e})}else o.warn("No listeners found for schema",{schemaName:e})}static notifyDatabaseReconnect(e,t){const a=this.eventListeners.get(e);a?(o.debug("Notifying database reconnect listeners",{schemaName:e,listenerCount:a.length}),a.forEach((s,r)=>{try{s(t),o.trace("Database reconnect listener notified",{schemaName:e,listenerIndex:r})}catch(i){o.error("Error in database reconnect listener",{schemaName:e,listenerIndex:r,error:i.message})}})):o.trace("No listeners to notify for database reconnection",{schemaName:e})}static async closeAllConnections(){if(this.isClosingConnections){o.debug("Already closing connections, skipping");return}o.info("Closing all database connections",{connectionCount:Object.keys(this.connections).length}),this.isClosingConnections=!0;try{const e=Object.keys(this.connections);e.forEach(a=>this.activeDatabases.add(a)),o.debug("Saving active database list for potential reconnection",{activeDatabases:e});const t=Object.entries(this.connections).map(async([a,s])=>{try{o.debug("Closing database connection",{dbKey:a}),await s.close(),o.trace("Database connection closed",{dbKey:a})}catch(r){o.error("Error closing database connection",{dbKey:a,error:r.message})}});await Promise.all(t),this.connections={},o.info("All database connections closed successfully")}finally{this.isClosingConnections=!1}}static async reopenConnections(){o.info("Reopening database connections");try{await this.initializeCoreConnection(),this.currentUserRoles.length>0&&await this.initializeUserRoleConnections();const e=Array.from(this.activeDatabases);if(o.debug("Reinitializing previously active databases",{activeDatabases:e}),e.length>0)for(const t of e)if(this.connections[t])this.connections[t]&&(o.trace("Database already connected, notifying listeners",{dbKey:t}),this.notifyDatabaseReconnect(t,this.connections[t]));else{const a=this.getSchema(t);if(a)try{o.debug("Reopening database connection",{dbKey:t});const s=await T.createOrOpen({config:a},!1);await s.connect(),this.connections[t]=s,this.notifyDatabaseReconnect(t,s),o.info("Database connection reopened",{dbKey:t})}catch(s){o.error("Failed to reopen database connection",{dbKey:t,error:s.message})}else o.warn("Schema not found for previously active database",{dbKey:t})}o.info("Database connections reopened successfully")}catch(e){throw o.error("Failed to reopen database connections",{error:e.message}),e}}static async ensureDatabaseConnection(e){if(o.debug("Ensuring database connection",{key:e}),this.activeDatabases.add(e),!this.hasAccessToDatabase(e))throw o.error("Access denied when ensuring database connection",{key:e}),new Error(`Access denied: Database '${e}' is not accessible.`);if(this.connections[e])try{if(this.connections[e].isConnectionOpen())return o.trace("Database connection already active",{key:e}),this.connections[e];o.warn("Database connection inactive, cleaning up",{key:e});try{await this.connections[e].close().catch(()=>{})}catch(t){o.debug("Error during connection cleanup",{key:e,error:t.message})}delete this.connections[e]}catch(t){o.error("Error checking connection status",{key:e,error:t.message}),delete this.connections[e]}return o.debug("Creating new database connection",{key:e}),await this.getLazyLoading(e)}static getConnections(){return o.trace("Getting all connections",{count:Object.keys(this.connections).length}),Object.assign({},this.connections)}static async openAllExisting(e){o.info("Opening all existing databases",{databaseKeys:e});const t=[];for(const a of e)try{o.debug("Opening database",{key:a});const s=this.getSchema(a);if(!s)throw new Error(`Invalid database key: ${a}. Schema not found.`);const r=await T.createOrOpen({config:s},!1);await r.execute("PRAGMA integrity_check"),this.connections[a]=r,o.info("Database opened successfully",{key:a,schemaName:s.database_name})}catch(s){const r=s instanceof Error?s:new Error(String(s));o.error("Failed to open database",{key:a,error:r.message}),t.push({key:a,error:r})}if(t.length>0){const a=t.map(s=>` - ${s.key}: ${s.error.message}`).join(`
7
+ ${s}`)}}static async cleanupUnusedConnections(e){o.debug("Cleaning up unused connections",{previousRoles:e});const t=new Set;t.add("core");for(const r of e){const i=this.roleRegistry[r];i&&(i.requiredDatabases.forEach(n=>t.add(n)),i.optionalDatabases&&i.optionalDatabases.forEach(n=>t.add(n)))}const a=new Set(this.getCurrentUserDatabases()),s=Array.from(t).filter(r=>!a.has(r));if(o.debug("Databases to cleanup",{databasesToClose:s,previousDatabaseCount:t.size,currentDatabaseCount:a.size}),s.length>0){for(const r of s)if(this.connections[r])try{o.debug("Closing unused database connection",{dbKey:r}),await this.connections[r].close(),delete this.connections[r],o.info("Database connection closed",{dbKey:r})}catch(i){o.error("Error closing database connection during cleanup",{dbKey:r,error:i.message})}o.info("Cleanup completed",{closedConnections:s})}else o.debug("No connections to cleanup")}static hasAccessToDatabase(e){const t=this.getSchema(e)!==void 0;return o.trace("Checking database access",{dbKey:e,hasAccess:t}),t}static get(e){if(o.trace("Getting database connection",{key:e}),!this.hasAccessToDatabase(e))throw o.error("Access denied to database",{key:e}),new Error(`Access denied: Database '${e}' is not accessible.`);const t=this.connections[e];if(!t)throw o.error("Database not connected",{key:e,availableConnections:Object.keys(this.connections)}),new Error(`Database '${e}' is not connected. Please ensure it's initialized.`);return o.trace("Database connection retrieved successfully",{key:e}),t}static onDatabaseReconnect(e,t){o.debug("Registering database reconnect listener",{schemaName:e}),this.eventListeners.has(e)||this.eventListeners.set(e,[]),this.eventListeners.get(e).push(t),o.trace("Database reconnect listener registered",{schemaName:e,listenerCount:this.eventListeners.get(e).length})}static offDatabaseReconnect(e,t){o.debug("Removing database reconnect listener",{schemaName:e});const a=this.eventListeners.get(e);if(a){const s=a.indexOf(t);s>-1?(a.splice(s,1),o.trace("Database reconnect listener removed",{schemaName:e,remainingListeners:a.length})):o.warn("Database reconnect listener not found for removal",{schemaName:e})}else o.warn("No listeners found for schema",{schemaName:e})}static notifyDatabaseReconnect(e,t){const a=this.eventListeners.get(e);a?(o.debug("Notifying database reconnect listeners",{schemaName:e,listenerCount:a.length}),a.forEach((s,r)=>{try{s(t),o.trace("Database reconnect listener notified",{schemaName:e,listenerIndex:r})}catch(i){o.error("Error in database reconnect listener",{schemaName:e,listenerIndex:r,error:i.message})}})):o.trace("No listeners to notify for database reconnection",{schemaName:e})}static async closeAllConnections(){if(this.isClosingConnections){o.debug("Already closing connections, skipping");return}o.info("Closing all database connections",{connectionCount:Object.keys(this.connections).length}),this.isClosingConnections=!0;try{const e=Object.keys(this.connections);e.forEach(a=>this.activeDatabases.add(a)),o.debug("Saving active database list for potential reconnection",{activeDatabases:e});const t=Object.entries(this.connections).map(async([a,s])=>{try{o.debug("Closing database connection",{dbKey:a}),await s.close(),o.trace("Database connection closed",{dbKey:a})}catch(r){o.error("Error closing database connection",{dbKey:a,error:r.message})}});await Promise.all(t),this.connections={},o.info("All database connections closed successfully")}finally{this.isClosingConnections=!1}}static async reopenConnections(){o.info("Reopening database connections");try{await this.initializeCoreConnection(),this.currentUserRoles.length>0&&await this.initializeUserRoleConnections();const e=Array.from(this.activeDatabases);if(o.debug("Reinitializing previously active databases",{activeDatabases:e}),e.length>0)for(const t of e)if(this.connections[t])this.connections[t]&&(o.trace("Database already connected, notifying listeners",{dbKey:t}),this.notifyDatabaseReconnect(t,this.connections[t]));else{const a=this.getSchema(t);if(a)try{o.debug("Reopening database connection",{dbKey:t});const s=await I.createOrOpen({config:a},!1);await s.connect(),this.connections[t]=s,this.notifyDatabaseReconnect(t,s),o.info("Database connection reopened",{dbKey:t})}catch(s){o.error("Failed to reopen database connection",{dbKey:t,error:s.message})}else o.warn("Schema not found for previously active database",{dbKey:t})}o.info("Database connections reopened successfully")}catch(e){throw o.error("Failed to reopen database connections",{error:e.message}),e}}static async ensureDatabaseConnection(e){if(o.debug("Ensuring database connection",{key:e}),this.activeDatabases.add(e),!this.hasAccessToDatabase(e))throw o.error("Access denied when ensuring database connection",{key:e}),new Error(`Access denied: Database '${e}' is not accessible.`);if(this.connections[e])try{if(this.connections[e].isConnectionOpen())return o.trace("Database connection already active",{key:e}),this.connections[e];o.warn("Database connection inactive, cleaning up",{key:e});try{await this.connections[e].close().catch(()=>{})}catch(t){o.debug("Error during connection cleanup",{key:e,error:t.message})}delete this.connections[e]}catch(t){o.error("Error checking connection status",{key:e,error:t.message}),delete this.connections[e]}return o.debug("Creating new database connection",{key:e}),await this.getLazyLoading(e)}static getConnections(){return o.trace("Getting all connections",{count:Object.keys(this.connections).length}),Object.assign({},this.connections)}static async openAllExisting(e){o.info("Opening all existing databases",{databaseKeys:e});const t=[];for(const a of e)try{o.debug("Opening database",{key:a});const s=this.getSchema(a);if(!s)throw new Error(`Invalid database key: ${a}. Schema not found.`);const r=await I.createOrOpen({config:s},!1);await r.execute("PRAGMA integrity_check"),this.connections[a]=r,o.info("Database opened successfully",{key:a,schemaName:s.database_name})}catch(s){const r=s instanceof Error?s:new Error(String(s));o.error("Failed to open database",{key:a,error:r.message}),t.push({key:a,error:r})}if(t.length>0){const a=t.map(s=>` - ${s.key}: ${s.error.message}`).join(`
8
8
  `);throw o.error("Failed to open one or more databases",{failedDatabases:t.map(s=>s.key),errorSummary:a}),new Error(`Failed to open one or more databases:
9
- ${a}`)}return this.isInitialized=!0,o.info("All databases opened successfully",{count:e.length}),!0}static async initLazySchema(e){o.debug("Initializing databases lazily",{databaseKeys:e});const t=e.filter(n=>!this.getSchema(n));if(t.length>0)throw o.error("Invalid database keys found",{invalidKeys:t}),new Error(`Invalid database keys: ${t.join(", ")}. Schemas not found.`);const a=e.filter(n=>!this.connections[n]).length,s=Object.keys(this.connections).length;if(s+a>this.maxConnections)throw o.error("Would exceed maximum connections",{currentConnections:s,newConnections:a,maxConnections:this.maxConnections}),new Error(`Cannot initialize ${a} new connections. Would exceed maximum of ${this.maxConnections} connections. Current: ${s}`);const r=[],i=e.map(async n=>{if(this.connections[n]){o.trace("Database already initialized",{key:n});return}try{o.debug("Initializing database",{key:n});const h=this.getSchema(n),m=await T.createOrOpen({config:h},!1);await m.execute("PRAGMA integrity_check"),this.connections[n]=m,o.info("Database initialized successfully",{key:n,schemaName:h.database_name})}catch(h){const m=h instanceof Error?h:new Error(String(h));o.error("Failed to initialize database",{key:n,error:m.message}),r.push({key:n,error:m})}});if(await Promise.all(i),r.length>0){const n=r.map(h=>` - ${h.key}: ${h.error.message}`).join(`
9
+ ${a}`)}return this.isInitialized=!0,o.info("All databases opened successfully",{count:e.length}),!0}static async initLazySchema(e){o.debug("Initializing databases lazily",{databaseKeys:e});const t=e.filter(n=>!this.getSchema(n));if(t.length>0)throw o.error("Invalid database keys found",{invalidKeys:t}),new Error(`Invalid database keys: ${t.join(", ")}. Schemas not found.`);const a=e.filter(n=>!this.connections[n]).length,s=Object.keys(this.connections).length;if(s+a>this.maxConnections)throw o.error("Would exceed maximum connections",{currentConnections:s,newConnections:a,maxConnections:this.maxConnections}),new Error(`Cannot initialize ${a} new connections. Would exceed maximum of ${this.maxConnections} connections. Current: ${s}`);const r=[],i=e.map(async n=>{if(this.connections[n]){o.trace("Database already initialized",{key:n});return}try{o.debug("Initializing database",{key:n});const h=this.getSchema(n),m=await I.createOrOpen({config:h},!1);await m.execute("PRAGMA integrity_check"),this.connections[n]=m,o.info("Database initialized successfully",{key:n,schemaName:h.database_name})}catch(h){const m=h instanceof Error?h:new Error(String(h));o.error("Failed to initialize database",{key:n,error:m.message}),r.push({key:n,error:m})}});if(await Promise.all(i),r.length>0){const n=r.map(h=>` - ${h.key}: ${h.error.message}`).join(`
10
10
  `);throw o.error("Failed to initialize one or more databases",{failedDatabases:r.map(h=>h.key),errorSummary:n}),new Error(`Failed to initialize one or more databases:
11
- ${n}`)}return Object.keys(this.connections).length>0&&(this.isInitialized=!0,o.info("Lazy schema initialization completed",{initializedCount:e.length-r.length})),!0}static async initializeAll(){if(this.isInitialized){o.debug("Database manager already initialized");return}const e=this.getAvailableSchemas();o.info("Initializing all available databases",{schemaCount:e.length,schemas:e});const t=[],a=e.map(async s=>{try{o.debug("Initializing schema",{key:s});const r=this.getSchema(s),i=await T.createOrOpen({config:r},!1);this.connections[s]=i,o.info("Schema initialized successfully",{key:s,schemaName:r.database_name})}catch(r){const i=r instanceof Error?r:new Error(String(r));o.error("Failed to initialize schema",{key:s,error:i.message}),t.push({key:s,error:i})}});if(await Promise.all(a),t.length>0){this.isInitialized=!1;const s=t.map(r=>` - ${r.key}: ${r.error.message}`).join(`
11
+ ${n}`)}return Object.keys(this.connections).length>0&&(this.isInitialized=!0,o.info("Lazy schema initialization completed",{initializedCount:e.length-r.length})),!0}static async initializeAll(){if(this.isInitialized){o.debug("Database manager already initialized");return}const e=this.getAvailableSchemas();o.info("Initializing all available databases",{schemaCount:e.length,schemas:e});const t=[],a=e.map(async s=>{try{o.debug("Initializing schema",{key:s});const r=this.getSchema(s),i=await I.createOrOpen({config:r},!1);this.connections[s]=i,o.info("Schema initialized successfully",{key:s,schemaName:r.database_name})}catch(r){const i=r instanceof Error?r:new Error(String(r));o.error("Failed to initialize schema",{key:s,error:i.message}),t.push({key:s,error:i})}});if(await Promise.all(a),t.length>0){this.isInitialized=!1;const s=t.map(r=>` - ${r.key}: ${r.error.message}`).join(`
12
12
  `);throw o.error("Failed to initialize one or more databases",{failedDatabases:t.map(r=>r.key),errorSummary:s}),new Error(`Failed to initialize one or more databases:
13
- ${s}`)}this.isInitialized=!0,o.info("All databases initialized successfully",{totalSchemas:e.length})}static async getLazyLoading(e){if(o.debug("Getting database with lazy loading",{key:e}),this.activeDatabases.add(e),!this.hasAccessToDatabase(e))throw o.error("Access denied for lazy loading",{key:e}),new Error(`Access denied: Database '${e}' is not accessible.`);if(!this.connections[e]){const t=this.getSchema(e);if(!t)throw o.error("Schema not found for lazy loading",{key:e}),new Error(`Invalid database key: ${e}. Schema not found.`);if(Object.keys(this.connections).length>=this.maxConnections)throw o.error("Maximum connections reached",{currentConnections:Object.keys(this.connections).length,maxConnections:this.maxConnections}),new Error("Maximum number of database connections reached");o.debug("Creating new connection for lazy loading",{key:e,schemaName:t.database_name});const a=await T.createOrOpen({config:t},!1);await a.connect(),this.connections[e]=a,o.info("Database connection created via lazy loading",{key:e})}return this.isInitialized=!0,this.connections[e]}static async executeCrossSchemaTransaction(e,t){o.debug("Executing cross-schema transaction",{schemas:e});for(const s of e)if(!this.hasAccessToDatabase(s))throw o.error("Access denied for cross-schema transaction",{key:s,schemas:e}),new Error(`Access denied: Database '${s}' is not accessible.`);const a=e.reduce((s,r)=>(s[r]=this.get(r),s),{});o.debug("Starting cross-schema transaction",{schemas:e});try{await Promise.all(Object.values(a).map(s=>s.beginTransaction())),o.trace("All transactions started successfully"),await t(a),o.trace("Transaction callback completed successfully"),await Promise.all(Object.values(a).map(s=>s.commitTransaction())),o.info("Cross-schema transaction completed successfully",{schemas:e})}catch(s){throw o.error("Cross-schema transaction failed, rolling back",{schemas:e,error:s.message}),await Promise.all(Object.values(a).map(r=>r.rollbackTransaction())),o.debug("Cross-schema transaction rolled back"),s}}static async importDataToTable(e,t,a,s={}){if(o.debug("Importing data to table",{databaseKey:e,tableName:t,recordCount:a.length,options:s}),!this.hasAccessToDatabase(e))throw o.error("Access denied for data import",{databaseKey:e,tableName:t}),new Error(`Access denied: Database '${e}' is not accessible.`);const r=this.get(e);try{const i=await r.importData(Object.assign({tableName:t,data:a},s));return o.info("Data import completed successfully",{databaseKey:e,tableName:t,importedRows:i.successRows,skippedRows:i.errorRows}),i}catch(i){throw o.error("Data import failed",{databaseKey:e,tableName:t,error:i.message}),i}}static async importDataWithMapping(e,t,a,s,r={}){if(o.debug("Importing data with column mapping",{databaseKey:e,tableName:t,recordCount:a.length,mappingCount:s.length,options:r}),!this.hasAccessToDatabase(e))throw o.error("Access denied for data import with mapping",{databaseKey:e,tableName:t}),new Error(`Access denied: Database '${e}' is not accessible.`);const i=this.get(e);try{const n=await i.importDataWithMapping(t,a,s,r);return o.info("Data import with mapping completed successfully",{databaseKey:e,tableName:t,importedRows:n.successRows,skippedRows:n.errorRows}),n}catch(n){throw o.error("Data import with mapping failed",{databaseKey:e,tableName:t,error:n.message}),n}}static async bulkImport(e){const t=Date.now();o.info("Starting bulk import",{configCount:e.length,configs:e.map(s=>({databaseKey:s.databaseKey,tableName:s.tableName,recordCount:s.data.length}))});const a={totalDatabases:e.length,successDatabases:0,results:{},errors:{},executionTime:0};for(const s of e){const r=`${s.databaseKey}.${s.tableName}`;o.debug("Processing bulk import config",{configKey:r});try{if(!this.hasAccessToDatabase(s.databaseKey))throw new Error(`Access denied: Database '${s.databaseKey}' is not accessible.`);const i=this.get(s.databaseKey);let n;s.columnMappings?(o.trace("Using column mappings for import",{configKey:r}),n=await i.importDataWithMapping(s.tableName,s.data,s.columnMappings,s.options)):(o.trace("Using direct import",{configKey:r}),n=await i.importData(Object.assign({tableName:s.tableName,data:s.data},s.options))),a.results[r]=n,a.successDatabases++,o.info("Bulk import config completed successfully",{configKey:r,importedRows:n.successRows,skippedRows:n.errorRows})}catch(i){const n=i instanceof Error?i:new Error(String(i));o.error("Bulk import config failed",{configKey:r,error:n.message}),a.errors[r]=n}}return a.executionTime=Date.now()-t,o.info("Bulk import completed",{totalConfigs:a.totalDatabases,successfulConfigs:a.successDatabases,failedConfigs:Object.keys(a.errors).length,executionTimeMs:a.executionTime}),a}static async importFromCSV(e,t,a,s={}){if(o.debug("Importing from CSV",{databaseKey:e,tableName:t,csvSize:a.length,options:s}),!this.hasAccessToDatabase(e))throw o.error("Access denied for CSV import",{databaseKey:e,tableName:t}),new Error(`Access denied: Database '${e}' is not accessible.`);const r=this.get(e);try{const i=await r.importFromCSV(t,a,s);return o.info("CSV import completed successfully",{databaseKey:e,tableName:t,importedRows:i.successRows,skippedRows:i.errorRows}),i}catch(i){throw o.error("CSV import failed",{databaseKey:e,tableName:t,error:i.message}),i}}static getConnectionCount(){const e=Object.keys(this.connections).length;return o.trace("Getting connection count",{count:e}),e}static listConnections(){const e=Object.keys(this.connections);return o.trace("Listing connections",{connections:e}),e}static async closeConnection(e){o.debug("Closing specific connection",{dbKey:e});const t=this.connections[e];if(t)try{await t.disconnect(),delete this.connections[e],o.info("Database connection closed successfully",{dbKey:e})}catch(a){throw o.error("Error closing database connection",{dbKey:e,error:a.message}),a}else o.warn("Attempted to close non-existent connection",{dbKey:e})}static async closeAll(){o.info("Closing all connections and resetting state"),await this.closeAllConnections(),this.currentUserRoles=[],this.currentRole=null,this.isInitialized=!1,this.activeDatabases.clear(),this.eventListeners.clear(),this.isClosingConnections=!1,o.info("All connections closed and state reset successfully")}static async logout(){o.info("Logging out user",{currentUserRoles:this.currentUserRoles});const e=Object.keys(this.connections).filter(t=>t!=="core");o.debug("Closing role-specific connections",{connectionsToClose:e});for(const t of e)try{await this.connections[t].close(),delete this.connections[t],o.debug("Role-specific connection closed",{dbKey:t})}catch(a){o.error("Error closing connection during logout",{dbKey:t,error:a.message})}this.currentUserRoles=[],this.currentRole=null,o.info("User logout completed successfully",{closedConnections:e.length})}}v.maxConnections=10,v.connections={},v.isInitialized=!1,v.roleRegistry={},v.currentRole=null,v.currentUserRoles=[],v.activeDatabases=new Set,v.isClosingConnections=!1,v.schemaConfigurations={},v.schemaManager=null,v.eventListeners=new Map;const l=D(A.BASE_SERVICE);class B{constructor(e,t){this.dao=null,this.isOpened=!1,this.isInitialized=!1,this.errorHandlers=new Map,this.eventListeners=new Map,this.primaryKeyFields=["id"],this.cache=new Map,this.schemaName=e,this.tableName=t||e,l.debug("Creating BaseService instance",{schemaName:this.schemaName,tableName:this.tableName,primaryKeyFields:this.primaryKeyFields}),this.reconnectHandler=a=>{l.info("Database reconnected for service",{schemaName:this.schemaName,tableName:this.tableName}),this.dao=a,this._emit("daoReconnected",{schemaName:this.schemaName})},v.onDatabaseReconnect(e,this.reconnectHandler),this.bindMethods(),l.trace("BaseService instance created successfully",{schemaName:this.schemaName,tableName:this.tableName})}bindMethods(){l.trace("Binding service methods",{schemaName:this.schemaName,tableName:this.tableName}),Object.getOwnPropertyNames(Object.getPrototypeOf(this)).forEach(e=>{typeof this[e]=="function"&&e!=="constructor"&&(this[e]=this[e].bind(this))})}setPrimaryKeyFields(e){return l.debug("Setting primary key fields",{schemaName:this.schemaName,tableName:this.tableName,previousFields:this.primaryKeyFields,newFields:e}),this.primaryKeyFields=e,this}async init(){l.info("Initializing BaseService",{schemaName:this.schemaName,tableName:this.tableName,isInitialized:this.isInitialized});try{if(this.isInitialized)return l.debug("Service already initialized, skipping",{schemaName:this.schemaName}),this;if(l.debug("Getting DAO from DatabaseManager",{schemaName:this.schemaName}),this.dao=await v.getLazyLoading(this.schemaName),!this.dao){const e=`Failed to initialize DAO for schema: ${this.schemaName}`;throw l.error(e,{schemaName:this.schemaName}),new Error(e)}return this.dao.isConnectionOpen()||(l.debug("DAO connection not open, connecting",{schemaName:this.schemaName}),await this.dao.connect()),this.isOpened=!0,this.isInitialized=!0,l.info("BaseService initialized successfully",{schemaName:this.schemaName,tableName:this.tableName,isOpened:this.isOpened,isInitialized:this.isInitialized}),this._emit("initialized",{schemaName:this.schemaName}),this}catch(e){throw l.error("Error initializing BaseService",{schemaName:this.schemaName,tableName:this.tableName,error:e.message}),this._handleError("INIT_ERROR",e),e}}async upsert(e,t){l.debug("Starting upsert operation",{schemaName:this.schemaName,tableName:this.tableName,hasData:!!e,dataKeys:e?Object.keys(e):[],searchFields:t||this.primaryKeyFields}),await this._ensureInitialized(),await this.ensureValidConnection();try{this._validateData(e);const a=t||this.primaryKeyFields;l.trace("Building conditions for existence check",{schemaName:this.schemaName,tableName:this.tableName,fieldsToCheck:a});const s={};let r=!0;for(const n of a){const h=e[n];if(h!=null)s[n]=h;else{r=!1,l.trace("Missing required field for upsert check",{schemaName:this.schemaName,tableName:this.tableName,field:n});break}}if(!r)return l.debug("Missing required fields, performing insert",{schemaName:this.schemaName,tableName:this.tableName,fieldsToCheck:a}),await this.create(e);l.trace("Checking if record exists",{schemaName:this.schemaName,tableName:this.tableName,conditions:s});const i=await this.findFirst(s);if(i){const n=i[this.primaryKeyFields[0]];l.debug("Record exists, performing update",{schemaName:this.schemaName,tableName:this.tableName,primaryKeyField:this.primaryKeyFields[0],primaryKeyValue:n});const h=await this.update(n,e);return l.info("Upsert completed (update)",{schemaName:this.schemaName,tableName:this.tableName,operation:"update",primaryKeyValue:n}),this._emit("dataUpserted",{operation:"upsert",action:"update",data:h}),h}else{l.debug("Record does not exist, performing insert",{schemaName:this.schemaName,tableName:this.tableName});const n=await this.create(e);return l.info("Upsert completed (insert)",{schemaName:this.schemaName,tableName:this.tableName,operation:"insert"}),this._emit("dataUpserted",{operation:"upsert",action:"insert",data:n}),n}}catch(a){throw l.error("Error during upsert operation",{schemaName:this.schemaName,tableName:this.tableName,error:a.message}),this._handleError("UPSERT_ERROR",a),a}}async create(e){l.debug("Creating new record",{schemaName:this.schemaName,tableName:this.tableName,hasData:!!e,dataKeys:e?Object.keys(e):[]}),await this._ensureInitialized(),await this.ensureValidConnection();try{this._validateData(e),l.trace("Building data table for insert",{schemaName:this.schemaName,tableName:this.tableName});const t=this.buildDataTable(e),a=await this.dao.insert(t);if(a.rowsAffected===0){const i="Insert operation failed - no rows affected";throw l.error(i,{schemaName:this.schemaName,tableName:this.tableName,result:a}),new Error(i)}l.debug("Insert operation successful",{schemaName:this.schemaName,tableName:this.tableName,rowsAffected:a.rowsAffected,lastInsertRowId:a.lastInsertRowId});let s=null;const r=e[this.primaryKeyFields[0]];try{r!=null?(l.trace("Retrieving created record by primary key",{schemaName:this.schemaName,tableName:this.tableName,primaryKeyField:this.primaryKeyFields[0],primaryKeyValue:r}),s=await this.findById(r)):a.lastInsertRowId&&(l.trace("Retrieving created record by last insert ID",{schemaName:this.schemaName,tableName:this.tableName,lastInsertRowId:a.lastInsertRowId}),s=await this.findById(a.lastInsertRowId))}catch(i){l.warn("Could not retrieve created record",{schemaName:this.schemaName,tableName:this.tableName,findError:i.message})}return s||(l.debug("Using original data as created record",{schemaName:this.schemaName,tableName:this.tableName}),s=e),l.info("Record created successfully",{schemaName:this.schemaName,tableName:this.tableName,recordRetrieved:!!s}),this._emit("dataCreated",{operation:"create",data:s}),s}catch(t){throw l.error("Error creating record",{schemaName:this.schemaName,tableName:this.tableName,error:t.message}),this._handleError("CREATE_ERROR",t),t}}async update(e,t){l.debug("Updating record",{schemaName:this.schemaName,tableName:this.tableName,id:e,hasData:!!t,dataKeys:t?Object.keys(t):[]}),await this._ensureInitialized();try{if(!e){const i="ID is required for update";throw l.error(i,{schemaName:this.schemaName,tableName:this.tableName}),new Error(i)}this._validateData(t);const a=Object.assign(Object.assign({},t),{[this.primaryKeyFields[0]]:e});l.trace("Building update query table",{schemaName:this.schemaName,tableName:this.tableName,id:e});const s=this.buildDataTable(a);await this.dao.update(s),l.debug("Update operation completed",{schemaName:this.schemaName,tableName:this.tableName,id:e});const r=await this.findById(e);return l.info("Record updated successfully",{schemaName:this.schemaName,tableName:this.tableName,id:e,recordFound:!!r}),this._emit("dataUpdated",{operation:"update",id:e,data:r}),r}catch(a){throw l.error("Error updating record",{schemaName:this.schemaName,tableName:this.tableName,id:e,error:a.message}),this._handleError("UPDATE_ERROR",a),a}}async delete(e){l.debug("Deleting record",{schemaName:this.schemaName,tableName:this.tableName,id:e}),await this._ensureInitialized();try{if(!e){const r="ID is required for delete";throw l.error(r,{schemaName:this.schemaName,tableName:this.tableName}),new Error(r)}const t={name:this.tableName,cols:[],wheres:[{name:this.primaryKeyFields[0],value:e}]};l.trace("Executing delete operation",{schemaName:this.schemaName,tableName:this.tableName,id:e,primaryKeyField:this.primaryKeyFields[0]});const a=await this.dao.delete(t),s=a.rowsAffected>0;return s?(l.info("Record deleted successfully",{schemaName:this.schemaName,tableName:this.tableName,id:e,rowsAffected:a.rowsAffected}),this._emit("dataDeleted",{operation:"delete",id:e})):l.warn("Delete operation completed but no rows affected",{schemaName:this.schemaName,tableName:this.tableName,id:e}),s}catch(t){throw l.error("Error deleting record",{schemaName:this.schemaName,tableName:this.tableName,id:e,error:t.message}),this._handleError("DELETE_ERROR",t),t}}async findById(e){l.debug("Finding record by ID",{schemaName:this.schemaName,tableName:this.tableName,id:e}),await this._ensureInitialized();try{if(!e){const i="ID is required";throw l.error(i,{schemaName:this.schemaName,tableName:this.tableName}),new Error(i)}const t={[this.primaryKeyFields[0]]:e};l.trace("Building select query",{schemaName:this.schemaName,tableName:this.tableName,conditions:t});const a=this.buildSelectTable(t),s=await this.dao.select(a),r=Object.keys(s).length>0?s:null;return l.debug("Find by ID completed",{schemaName:this.schemaName,tableName:this.tableName,id:e,recordFound:!!r}),this._emit("dataFetched",{operation:"findById",id:e}),r}catch(t){throw l.error("Error finding record by ID",{schemaName:this.schemaName,tableName:this.tableName,id:e,error:t.message}),this._handleError("FIND_BY_ID_ERROR",t),t}}async findFirst(e={}){l.debug("Finding first record",{schemaName:this.schemaName,tableName:this.tableName,conditionsCount:Object.keys(e).length,conditions:e}),await this._ensureInitialized();try{l.trace("Building select query for findFirst",{schemaName:this.schemaName,tableName:this.tableName});const t=this.buildSelectTable(e),a=await this.dao.select(t),s=Object.keys(a).length>0?a:null;return l.debug("Find first completed",{schemaName:this.schemaName,tableName:this.tableName,recordFound:!!s}),this._emit("dataFetched",{operation:"findFirst"}),s}catch(t){throw l.error("Error finding first record",{schemaName:this.schemaName,tableName:this.tableName,conditions:e,error:t.message}),this._handleError("FIND_FIRST_ERROR",t),t}}async findAll(e={},t={}){l.debug("Finding all records",{schemaName:this.schemaName,tableName:this.tableName,conditionsCount:Object.keys(e).length,hasLimit:!!t.limit,hasOffset:!!t.offset,hasOrderBy:!!(t.orderBy&&t.orderBy.length>0),limit:t.limit,offset:t.offset}),await this._ensureInitialized();try{const a=[...this.buildWhereFromObject(e),...t.where||[]];l.trace("Building query for findAll",{schemaName:this.schemaName,tableName:this.tableName,totalWheres:a.length,hasColumns:!!(t.columns&&t.columns.length>0)});const s={name:this.tableName,cols:t.columns?t.columns.map(i=>({name:i})):[],wheres:a,orderbys:t.orderBy,limitOffset:{limit:t.limit,offset:t.offset}},r=await this.dao.selectAll(s);return l.info("Find all completed",{schemaName:this.schemaName,tableName:this.tableName,recordsFound:r.length,conditionsCount:Object.keys(e).length}),this._emit("dataFetched",{operation:"findAll",count:r.length}),r}catch(a){throw l.error("Error finding all records",{schemaName:this.schemaName,tableName:this.tableName,conditions:e,options:t,error:a.message}),this._handleError("FIND_ALL_ERROR",a),a}}async count(e){l.debug("Counting records",{schemaName:this.schemaName,tableName:this.tableName,hasWhere:!!e,whereType:e?Array.isArray(e)?"array":"object":"none"}),await this._ensureInitialized();try{let t=[];Array.isArray(e)?(t=e,l.trace("Using array where conditions",{schemaName:this.schemaName,whereCount:t.length})):e&&typeof e=="object"&&(t=this.buildWhereFromObject(e),l.trace("Built where conditions from object",{schemaName:this.schemaName,whereCount:t.length}));const a={name:this.tableName,cols:[{name:"COUNT(*) as count"}],wheres:t},s=(await this.dao.select(a)).count||0;return l.debug("Count completed",{schemaName:this.schemaName,tableName:this.tableName,count:s}),s}catch(t){throw l.error("Error counting records",{schemaName:this.schemaName,tableName:this.tableName,where:e,error:t.message}),this._handleError("COUNT_ERROR",t),t}}async exists(e){l.debug("Checking if record exists",{schemaName:this.schemaName,tableName:this.tableName,id:e});const t=await this.findById(e)!==null;return l.debug("Existence check completed",{schemaName:this.schemaName,tableName:this.tableName,id:e,exists:t}),t}async truncate(){l.warn("Truncating table - this will delete all data",{schemaName:this.schemaName,tableName:this.tableName}),await this._ensureInitialized();try{l.debug("Executing truncate operations",{schemaName:this.schemaName,tableName:this.tableName}),await this.dao.execute(`DELETE FROM ${this.tableName}`),await this.dao.execute(`DELETE FROM sqlite_sequence WHERE name='${this.tableName}'`),l.info("Table truncated successfully",{schemaName:this.schemaName,tableName:this.tableName}),this._emit("tableTruncated",{tableName:this.tableName})}catch(e){throw l.error("Error truncating table",{schemaName:this.schemaName,tableName:this.tableName,error:e.message}),this._handleError("TRUNCATE_ERROR",e),e}}async bulkUpsert(e,t,a=!0){l.info("Starting bulk upsert operation",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length,searchFields:t||this.primaryKeyFields,useTransaction:a}),await this._ensureInitialized(),await this.ensureValidConnection();try{if(!Array.isArray(e)||e.length===0){const n="Data must be a non-empty array";throw l.error(n,{schemaName:this.schemaName,tableName:this.tableName,dataType:typeof e,dataLength:Array.isArray(e)?e.length:"N/A"}),new Error(n)}const s={created:[],updated:[],total:e.length,errors:[]},r=t||this.primaryKeyFields;l.debug("Preparing bulk upsert",{schemaName:this.schemaName,tableName:this.tableName,fieldsToCheck:r,itemsCount:e.length});const i=async()=>{for(let n=0;n<e.length;n++){const h=e[n];try{n%100===0&&l.trace("Bulk upsert progress",{schemaName:this.schemaName,tableName:this.tableName,processed:n,total:e.length,created:s.created.length,updated:s.updated.length,errors:s.errors.length}),this._validateData(h);const m={};let b=!0;for(const d of r){const E=h[d];if(E!=null)m[d]=E;else{b=!1;break}}if(!b){l.trace("Missing required fields for item, performing insert",{schemaName:this.schemaName,tableName:this.tableName,index:n,fieldsToCheck:r});const d=this.buildDataTable(h);await this.dao.insert(d),s.created.push(h);continue}const N=await this.findFirst(m);if(N){const d=N[this.primaryKeyFields[0]];l.trace("Record exists, performing update",{schemaName:this.schemaName,tableName:this.tableName,index:n,primaryKeyValue:d});const E=Object.assign(Object.assign({},h),{[this.primaryKeyFields[0]]:d}),y=this.buildDataTable(E);await this.dao.update(y);const O=await this.findById(d);O&&s.updated.push(O)}else{l.trace("Record does not exist, performing insert",{schemaName:this.schemaName,tableName:this.tableName,index:n});const d=this.buildDataTable(h);await this.dao.insert(d),s.created.push(h)}}catch(m){l.warn("Error processing item in bulk upsert",{schemaName:this.schemaName,tableName:this.tableName,index:n,error:m.message}),s.errors.push({index:n,data:h,error:m.message})}}};return a?(l.debug("Executing bulk upsert in transaction",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length}),await this.executeTransaction(i)):(l.debug("Executing bulk upsert without transaction",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length}),await i()),l.info("Bulk upsert completed",{schemaName:this.schemaName,tableName:this.tableName,total:s.total,created:s.created.length,updated:s.updated.length,errors:s.errors.length,successRate:`${((s.created.length+s.updated.length)/s.total*100).toFixed(2)}%`}),this._emit("dataBulkUpserted",{operation:"bulkUpsert",created:s.created.length,updated:s.updated.length,errors:s.errors.length,total:s.total}),s}catch(s){throw l.error("Error during bulk upsert operation",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length,error:s.message}),this._handleError("BULK_UPSERT_ERROR",s),s}}async bulkInsert(e){l.info("Starting bulk insert",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length}),await this._ensureInitialized();try{if(!Array.isArray(e)||e.length===0){const a="Items must be a non-empty array";throw l.error(a,{schemaName:this.schemaName,tableName:this.tableName,itemsType:typeof e,itemsLength:Array.isArray(e)?e.length:"N/A"}),new Error(a)}l.debug("Executing bulk insert operation",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length});const t=await this.dao.importData({tableName:this.tableName,data:e,batchSize:1e3,skipErrors:!1,validateData:!0});return l.info("Bulk insert completed",{schemaName:this.schemaName,tableName:this.tableName,totalRows:t.totalRows,successRows:t.successRows,errorRows:t.errorRows}),this._emit("dataBulkCreated",{operation:"bulkInsert",count:t.successRows}),t}catch(t){throw l.error("Error during bulk insert",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length,error:t.message}),this._handleError("BULK_INSERT_ERROR",t),t}}async bulkCreate(e){l.info("Starting bulk create with transaction",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length}),await this._ensureInitialized();try{if(!Array.isArray(e)||e.length===0){const a="Data must be a non-empty array";throw l.error(a,{schemaName:this.schemaName,tableName:this.tableName,dataType:typeof e,dataLength:Array.isArray(e)?e.length:"N/A"}),new Error(a)}const t=[];return l.debug("Executing bulk create in transaction",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length}),await this.executeTransaction(async()=>{for(let a=0;a<e.length;a++){const s=e[a];a%100===0&&l.trace("Bulk create progress",{schemaName:this.schemaName,tableName:this.tableName,processed:a,total:e.length}),this._validateData(s);const r=this.buildDataTable(s);await this.dao.insert(r),t.push(s)}}),l.info("Bulk create completed successfully",{schemaName:this.schemaName,tableName:this.tableName,recordsCreated:t.length}),this._emit("dataBulkCreated",{operation:"bulkCreate",count:t.length}),t}catch(t){throw l.error("Error during bulk create",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length,error:t.message}),this._handleError("BULK_CREATE_ERROR",t),t}}async executeTransaction(e){l.debug("Starting transaction",{schemaName:this.schemaName,tableName:this.tableName}),await this._ensureInitialized();try{l.trace("Beginning database transaction",{schemaName:this.schemaName}),await this.dao.beginTransaction();const t=await e();return l.trace("Committing transaction",{schemaName:this.schemaName}),await this.dao.commitTransaction(),l.info("Transaction completed successfully",{schemaName:this.schemaName,tableName:this.tableName}),this._emit("transactionCompleted",{operation:"transaction"}),t}catch(t){l.error("Transaction failed, rolling back",{schemaName:this.schemaName,tableName:this.tableName,error:t.message});try{await this.dao.rollbackTransaction(),l.debug("Transaction rollback successful",{schemaName:this.schemaName})}catch(a){l.error("Error during transaction rollback",{schemaName:this.schemaName,rollbackError:a.message}),this._handleError("ROLLBACK_ERROR",a)}throw this._handleError("TRANSACTION_ERROR",t),t}}async importFromCSV(e,t={}){l.info("Starting CSV import",{schemaName:this.schemaName,tableName:this.tableName,csvDataLength:e.length,delimiter:t.delimiter,hasHeader:t.hasHeader,hasMappings:!!(t.columnMappings&&t.columnMappings.length>0)}),await this._ensureInitialized();try{const a=await this.dao.importFromCSV(this.tableName,e,t);return l.info("CSV import completed",{schemaName:this.schemaName,tableName:this.tableName,totalRows:a.totalRows,successRows:a.successRows,errorRows:a.errorRows}),this._emit("dataImported",{operation:"importFromCSV",result:a}),a}catch(a){throw l.error("Error during CSV import",{schemaName:this.schemaName,tableName:this.tableName,csvDataLength:e.length,error:a.message}),this._handleError("IMPORT_CSV_ERROR",a),a}}async importDataWithMapping(e,t,a={}){l.info("Starting import with column mapping",{schemaName:this.schemaName,tableName:this.tableName,dataCount:e.length,mappingsCount:t.length}),await this._ensureInitialized();try{const s=await this.dao.importDataWithMapping(this.tableName,e,t,a);return l.info("Import with mapping completed",{schemaName:this.schemaName,tableName:this.tableName,totalRows:s.totalRows,successRows:s.successRows,errorRows:s.errorRows}),this._emit("dataImported",{operation:"importWithMapping",result:s}),s}catch(s){throw l.error("Error during import with mapping",{schemaName:this.schemaName,tableName:this.tableName,dataCount:e.length,mappingsCount:t.length,error:s.message}),this._handleError("IMPORT_MAPPING_ERROR",s),s}}buildSelectTable(e={},t={}){l.trace("Building select table query",{schemaName:this.schemaName,tableName:this.tableName,conditionsCount:Object.keys(e).length,hasOptions:Object.keys(t).length>0});const a={name:this.tableName,cols:[],wheres:[],orderbys:t.orderBy||[],limitOffset:{}};return t.columns&&t.columns.length>0&&(a.cols=t.columns.map(s=>({name:s}))),e&&Object.keys(e).length>0&&(a.wheres=Object.entries(e).map(([s,r])=>({name:s,value:r,operator:"="}))),t.limit!==void 0&&(a.limitOffset.limit=t.limit),t.offset!==void 0&&(a.limitOffset.offset=t.offset),a}buildDataTable(e){const t=Object.entries(e).reduce((a,[s,r])=>(r!==void 0&&(a[s]=r),a),{});return l.trace("Building data table for query",{schemaName:this.schemaName,tableName:this.tableName,dataKeys:Object.keys(t),removedKeys:Object.keys(e).filter(a=>e[a]===void 0)}),this.dao.convertJsonToQueryTable(this.tableName,t,this.primaryKeyFields)}buildWhereFromObject(e){const t=Object.entries(e).filter(([a,s])=>s!==void 0).map(([a,s])=>({name:a,value:s}));return l.trace("Built where clauses from object",{schemaName:this.schemaName,originalKeys:Object.keys(e).length,filteredWheres:t.length}),t}on(e,t){return l.trace("Adding event listener",{schemaName:this.schemaName,tableName:this.tableName,event:e}),this.eventListeners.has(e)||this.eventListeners.set(e,[]),this.eventListeners.get(e).push(t),this}off(e,t){l.trace("Removing event listener",{schemaName:this.schemaName,tableName:this.tableName,event:e});const a=this.eventListeners.get(e);if(a){const s=a.indexOf(t);s>-1&&a.splice(s,1)}return this}_emit(e,t){l.trace("Emitting event",{schemaName:this.schemaName,tableName:this.tableName,event:e,hasData:!!t});const a=this.eventListeners.get(e);a&&a.forEach(s=>{try{s(t)}catch(r){l.error("Error in event handler",{schemaName:this.schemaName,tableName:this.tableName,event:e,error:r.message})}})}setErrorHandler(e,t){return l.debug("Setting error handler",{schemaName:this.schemaName,tableName:this.tableName,errorType:e}),this.errorHandlers.set(e,t),this}_handleError(e,t){l.error("Handling service error",{schemaName:this.schemaName,tableName:this.tableName,errorType:e,error:t.message});const a=this.errorHandlers.get(e);if(a)try{a(t)}catch(s){l.error("Error in error handler",{schemaName:this.schemaName,tableName:this.tableName,errorType:e,handlerError:s.message})}this._emit("error",{errorType:e,error:t})}_validateData(e){if(!e||typeof e!="object"){const t="Data must be a valid object";throw l.error("Data validation failed",{schemaName:this.schemaName,tableName:this.tableName,dataType:typeof e,isNull:e===null}),new Error(t)}Object.keys(e).forEach(t=>{e[t]===void 0&&(delete e[t],l.trace("Removed undefined field",{schemaName:this.schemaName,tableName:this.tableName,field:t}))})}async _ensureInitialized(){this.isInitialized||(l.debug("Service not initialized, initializing now",{schemaName:this.schemaName,tableName:this.tableName}),await this.init())}async ensureValidConnection(){var e;l.trace("Ensuring valid database connection",{schemaName:this.schemaName,tableName:this.tableName});try{!((e=this.dao)===null||e===void 0)&&e.isConnectionOpen()||(l.debug("Connection not valid, getting new connection",{schemaName:this.schemaName}),this.dao=await v.ensureDatabaseConnection(this.schemaName))}catch(t){l.warn("Error checking connection, getting new connection",{schemaName:this.schemaName,error:t.message}),this.dao=await v.ensureDatabaseConnection(this.schemaName)}}async getDatabaseInfo(){return l.trace("Getting database info",{schemaName:this.schemaName,tableName:this.tableName}),await this._ensureInitialized(),await this.dao.getDatabaseInfo()}async getTableInfo(){return l.trace("Getting table info",{schemaName:this.schemaName,tableName:this.tableName}),await this._ensureInitialized(),await this.dao.getTableInfo(this.tableName)}getStatus(){const e={schemaName:this.schemaName,tableName:this.tableName,isOpened:this.isOpened,isInitialized:this.isInitialized,hasDao:!!this.dao};return l.trace("Getting service status",e),e}async healthCheck(){l.debug("Performing health check",{schemaName:this.schemaName,tableName:this.tableName});try{await this._ensureInitialized();const e=await this.count(),t={healthy:!0,schemaName:this.schemaName,recordCount:e,timestamp:new Date().toISOString()};return l.info("Health check passed",{schemaName:this.schemaName,tableName:this.tableName,recordCount:e}),t}catch(e){const t={healthy:!1,schemaName:this.schemaName,error:e.message,timestamp:new Date().toISOString()};return l.error("Health check failed",{schemaName:this.schemaName,tableName:this.tableName,error:e.message}),t}}async close(){l.info("Closing BaseService",{schemaName:this.schemaName,tableName:this.tableName,isOpened:this.isOpened,isInitialized:this.isInitialized});try{return this.dao&&(await this.dao.close(),l.debug("DAO closed successfully",{schemaName:this.schemaName})),this.isOpened=!1,this.isInitialized=!1,this.eventListeners.clear(),this.errorHandlers.clear(),this.cache.clear(),l.info("BaseService closed successfully",{schemaName:this.schemaName,tableName:this.tableName}),this._emit("closed",{schemaName:this.schemaName}),!0}catch(e){throw l.error("Error closing BaseService",{schemaName:this.schemaName,tableName:this.tableName,error:e.message}),this._handleError("CLOSE_ERROR",e),e}}destroy(){l.debug("Destroying BaseService",{schemaName:this.schemaName,tableName:this.tableName}),v.offDatabaseReconnect(this.schemaName,this.reconnectHandler),this.eventListeners.clear(),this.errorHandlers.clear(),this.cache.clear(),l.trace("BaseService destroyed",{schemaName:this.schemaName,tableName:this.tableName})}async getAll(e={},t={}){return l.trace("Using getAll alias",{schemaName:this.schemaName,tableName:this.tableName}),this.findAll(e,t)}async getById(e){return l.trace("Using getById alias",{schemaName:this.schemaName,tableName:this.tableName,id:e}),this.findById(e)}async getFirst(e={}){return l.trace("Using getFirst alias",{schemaName:this.schemaName,tableName:this.tableName}),this.findFirst(e)}}const u=D(A.SERVICE_MANAGER);class L extends B{}class S{constructor(){this.services=new Map,this.serviceConfigs=new Map,this.serviceMetadata=new Map,this.eventHandlers=new Map,this.isShuttingDown=!1,this.cleanupInterval=null,u.info("ServiceManager instance created"),this.bindMethods(),this.startPeriodicCleanup()}static getInstance(){return S.instance?u.trace("Returning existing ServiceManager singleton instance"):(u.debug("Creating new ServiceManager singleton instance"),S.instance=new S),S.instance}static resetInstance(){u.warn("Resetting ServiceManager singleton instance"),S.instance?(S.instance.destroy(),S.instance=null,u.info("ServiceManager singleton instance reset successfully")):u.debug("No ServiceManager instance to reset")}bindMethods(){u.trace("Binding ServiceManager methods");const e=Object.getOwnPropertyNames(Object.getPrototypeOf(this));e.forEach(t=>{typeof this[t]=="function"&&t!=="constructor"&&(this[t]=this[t].bind(this))}),u.trace("ServiceManager methods bound successfully",{methodCount:e.length})}createServiceKey(e,t){const a=`${e}:${t}`;return u.trace("Created service key",{schemaName:e,tableName:t,key:a}),a}validateServiceConfig(e){var t,a;if(u.trace("Validating service config",{schemaName:e.schemaName,tableName:e.tableName}),!(!((t=e.schemaName)===null||t===void 0)&&t.trim()))throw u.error("Invalid service config: schema name missing",{config:e}),new Error("Schema name is required and cannot be empty");if(!(!((a=e.tableName)===null||a===void 0)&&a.trim()))throw u.error("Invalid service config: table name missing",{config:e}),new Error("Table name is required and cannot be empty");u.trace("Service config validation passed",{schemaName:e.schemaName,tableName:e.tableName})}registerService(e){u.debug("Registering service",{schemaName:e.schemaName,tableName:e.tableName,primaryKeyFields:e.primaryKeyFields,hasCustomServiceClass:!!e.serviceClass}),this.validateServiceConfig(e);const t=this.createServiceKey(e.schemaName,e.tableName),a={schemaName:e.schemaName.trim(),tableName:e.tableName.trim(),primaryKeyFields:e.primaryKeyFields||["id"],serviceClass:e.serviceClass||L},s=this.serviceConfigs.has(t);return this.serviceConfigs.set(t,a),s?u.info("Service configuration updated",{serviceKey:t}):u.info("Service registered successfully",{serviceKey:t}),this}registerServices(e){return u.debug("Registering multiple services",{count:e.length}),e.forEach((t,a)=>{try{this.registerService(t)}catch(s){throw u.error("Failed to register service in batch",{index:a,config:t,error:s.message}),s}}),u.info("Multiple services registered successfully",{count:e.length}),this}async createServiceInstance(e){var t;u.debug("Creating service instance",{schemaName:e.schemaName,tableName:e.tableName,serviceClassName:((t=e.serviceClass)===null||t===void 0?void 0:t.name)||"DefaultService",serviceClass:e.serviceClass,isDefaultService:e.serviceClass===L});const a=e.serviceClass||L;if(!a)throw u.error("ServiceClass is undefined",{config:e}),new Error("ServiceClass is undefined");u.debug("About to instantiate service",{ServiceClassConstructor:a,ServiceClassName:a.name});const s=new a(e.schemaName,e.tableName);return u.debug("Service instance created",{serviceConstructor:s.constructor.name,servicePrototype:Object.getPrototypeOf(s).constructor.name,hasFindByStoreId:typeof s.findByStoreId=="function"}),e.primaryKeyFields&&(u.trace("Setting primary key fields",{schemaName:e.schemaName,tableName:e.tableName,primaryKeyFields:e.primaryKeyFields}),s.setPrimaryKeyFields(e.primaryKeyFields)),u.info("Service instance created successfully",{schemaName:e.schemaName,tableName:e.tableName}),s}async getService(e,t){if(this.isShuttingDown)throw u.error("ServiceManager is shutting down, cannot get service",{schemaName:e,tableName:t}),new Error("ServiceManager is shutting down");const a=this.createServiceKey(e,t);u.debug("Getting service",{serviceKey:a});const s=this.serviceMetadata.get(a);if(s&&(s.lastAccessed=new Date().toISOString(),u.trace("Updated service access time",{serviceKey:a})),this.services.has(a))return u.trace("Returning existing service",{serviceKey:a}),this.services.get(a);let r=this.serviceConfigs.get(a);r||(u.debug("Creating default config for unregistered service",{serviceKey:a}),r={schemaName:e,tableName:t,primaryKeyFields:["id"],serviceClass:L},this.serviceConfigs.set(a,r));try{const i=await this.createServiceInstance(r);return this.services.set(a,i),this.serviceMetadata.set(a,{createdAt:new Date().toISOString(),lastAccessed:new Date().toISOString()}),this.emit("SERVICE_CREATED",{serviceKey:a,schemaName:e,tableName:t}),u.info("Service created and cached successfully",{serviceKey:a}),i}catch(i){throw u.error("Failed to create service",{serviceKey:a,error:i.message}),this.emit("SERVICE_ERROR",{serviceKey:a,schemaName:e,tableName:t,error:i}),i}}getExistingService(e,t){const a=this.createServiceKey(e,t);u.trace("Getting existing service",{serviceKey:a});const s=this.services.get(a)||null;return s?u.trace("Existing service found",{serviceKey:a}):u.trace("Existing service not found",{serviceKey:a}),s}async initializeService(e,t){const a=this.createServiceKey(e,t);u.debug("Initializing service",{serviceKey:a});try{const s=await this.getService(e,t);return await s.init(),u.info("Service initialized successfully",{serviceKey:a}),s}catch(s){throw u.error("Failed to initialize service",{serviceKey:a,error:s.message}),s}}async destroyService(e,t){const a=this.createServiceKey(e,t);u.debug("Destroying service",{serviceKey:a});const s=this.services.get(a);if(!s)return u.warn("Service not found for destruction",{serviceKey:a}),!1;try{return await s.close(),s.destroy(),this.services.delete(a),this.serviceMetadata.delete(a),this.emit("SERVICE_DESTROYED",{serviceKey:a,schemaName:e,tableName:t}),u.info("Service destroyed successfully",{serviceKey:a}),!0}catch(r){return u.error("Failed to destroy service",{serviceKey:a,error:r.message}),this.emit("SERVICE_ERROR",{serviceKey:a,schemaName:e,tableName:t,error:r}),!1}}getServicesBySchema(e){u.trace("Getting services by schema",{schemaName:e});const t=[];for(const[a,s]of this.services){const[r]=a.split(":");r===e&&t.push(s)}return u.debug("Found services for schema",{schemaName:e,count:t.length}),t}getServiceKeysBySchema(e){u.trace("Getting service keys by schema",{schemaName:e});const t=[];for(const a of this.services.keys()){const[s]=a.split(":");s===e&&t.push(a)}return u.debug("Found service keys for schema",{schemaName:e,keys:t}),t}async destroyServicesBySchema(e){const t=this.getServiceKeysBySchema(e);if(u.debug("Destroying services by schema",{schemaName:e,serviceKeys:t}),t.length===0){u.debug("No services found to destroy for schema",{schemaName:e});return}const a=t.map(async r=>{const[,i]=r.split(":");try{const n=await this.destroyService(e,i);return u.trace("Service destroy result",{serviceKey:r,result:n}),n}catch(n){return u.error("Error destroying service in schema cleanup",{serviceKey:r,error:n.message}),!1}}),s=(await Promise.all(a)).filter(Boolean).length;u.info("Schema services destruction completed",{schemaName:e,totalServices:t.length,successfulDestroys:s})}getAllServiceInfo(){u.trace("Getting all service info");const e=[];for(const[t,a]of this.serviceConfigs){const s=this.services.get(t),r=this.serviceMetadata.get(t);e.push({key:t,schemaName:a.schemaName,tableName:a.tableName,status:s?s.getStatus():{schemaName:a.schemaName,tableName:a.tableName,isOpened:!1,isInitialized:!1,hasDao:!1},isRegistered:!0,createdAt:(r==null?void 0:r.createdAt)||"N/A",lastAccessed:r==null?void 0:r.lastAccessed})}for(const[t,a]of this.services)if(!this.serviceConfigs.has(t)){const[s,r]=t.split(":"),i=this.serviceMetadata.get(t);e.push({key:t,schemaName:s,tableName:r,status:a.getStatus(),isRegistered:!1,createdAt:(i==null?void 0:i.createdAt)||"N/A",lastAccessed:i==null?void 0:i.lastAccessed})}return e}async healthCheck(){const e=Array.from(this.services.entries()).map(async([r,i])=>{try{const n=await i.healthCheck();return Object.assign(Object.assign({},n),{serviceKey:r})}catch(n){const[h,m]=r.split(":");return{healthy:!1,schemaName:h,error:n.message,timestamp:new Date().toISOString(),serviceKey:r}}}),t=await Promise.all(e),a=t.filter(r=>r.healthy).length,s={totalServices:t.length,healthyServices:a,unhealthyServices:t.length-a,services:t,timestamp:new Date().toISOString(),overallHealth:a===t.length};return this.emit("HEALTH_CHECK_COMPLETED",{serviceKey:"*",schemaName:"*",tableName:"*",data:s}),s}async executeSchemaTransaction(e,t){u.trace(`Executing schema transaction for schema ${e}`);const a=this.getServicesBySchema(e);if(a.length===0)throw u.error("No services found for schema",{schemaName:e}),new Error(`No services found for schema: ${e}`);for(const s of a)await s.init();return await a[0].executeTransaction(async()=>await t(a))}startPeriodicCleanup(){this.cleanupInterval=setInterval(()=>{this.cleanupUnusedServices()},300*1e3)}async cleanupUnusedServices(e=1800*1e3){if(this.isShuttingDown)return;const t=Date.now(),a=[];for(const[s,r]of this.serviceMetadata){if(!r.lastAccessed)continue;const i=new Date(r.lastAccessed).getTime();t-i>e&&a.push(s)}for(const s of a){const[r,i]=s.split(":");await this.destroyService(r,i)}}on(e,t){return this.eventHandlers.has(e)||this.eventHandlers.set(e,[]),this.eventHandlers.get(e).push(t),this}off(e,t){const a=this.eventHandlers.get(e);if(a){const s=a.indexOf(t);s>-1&&a.splice(s,1)}return this}emit(e,t){const a=Object.assign(Object.assign({},t),{type:e,timestamp:new Date().toISOString()}),s=this.eventHandlers.get(e);s&&s.forEach(i=>{try{i(a)}catch(n){console.error(`ServiceManager: Error in ${e} event handler:`,n)}});const r=this.eventHandlers.get("*");r&&r.forEach(i=>{try{i(a)}catch(n){console.error("ServiceManager: Error in global event handler:",n)}})}hasService(e,t){const a=this.createServiceKey(e,t);return this.services.has(a)}isRegistered(e,t){const a=this.createServiceKey(e,t);return this.serviceConfigs.has(a)}getServiceCount(){return this.services.size}getRegisteredCount(){return this.serviceConfigs.size}getSchemas(){const e=new Set;for(const t of this.services.keys()){const[a]=t.split(":");e.add(a)}return Array.from(e)}async destroy(){this.isShuttingDown=!0,this.cleanupInterval&&(clearInterval(this.cleanupInterval),this.cleanupInterval=null);const e=Array.from(this.services.entries()).map(async([t,a])=>{try{await a.close(),a.destroy()}catch(s){console.error(`Error destroying service ${t}:`,s)}});await Promise.all(e),this.services.clear(),this.serviceConfigs.clear(),this.serviceMetadata.clear(),this.eventHandlers.clear(),this.isShuttingDown=!1}}S.instance=null,S.getInstance();class x{constructor(e){this.tableName="",this.selectFields=["*"],this.joinClauses=[],this.whereConditions=[],this.groupByFields=[],this.havingConditions=[],this.orderByFields=[],this.limitValue=null,this.offsetValue=null,this.params=[],this.unionQueries=[],this.subQueries=[],this.cteQueries=new Map,this.dao=null,this.dao=e||null}static table(e,t){const a=new x(t);return a.tableName=e,a}static from(e,t){return x.table(e,t)}select(e){return this.selectFields=Array.isArray(e)?e:[e],this}selectRaw(e){return this.selectFields=[e],this}selectDistinct(e){const t=Array.isArray(e)?e.join(", "):e;return this.selectFields=[`DISTINCT ${t}`],this}join(e,t,a="INNER"){return this.joinClauses.push({type:a,table:e,condition:t}),this}innerJoin(e,t){return this.join(e,t,"INNER")}leftJoin(e,t){return this.join(e,t,"LEFT")}rightJoin(e,t){return this.join(e,t,"RIGHT")}fullOuterJoin(e,t){return this.join(e,t,"FULL OUTER")}where(e,t,a){if(typeof e=="object")return Object.entries(e).forEach(([i,n])=>{this.whereConditions.push({field:i,operator:"=",value:n})}),this;let s="=",r=t;return arguments.length===3&&(s=t,r=a),this.whereConditions.push({field:e,operator:s,value:r}),this}whereEquals(e,t){return this.where(e,"=",t)}whereNot(e,t){return this.where(e,"!=",t)}whereLike(e,t){return this.where(e,"LIKE",t)}whereNotLike(e,t){return this.where(e,"NOT LIKE",t)}whereIn(e,t){return this.whereConditions.push({field:e,operator:"IN",value:t}),this}whereNotIn(e,t){return this.whereConditions.push({field:e,operator:"NOT IN",value:t}),this}whereBetween(e,t,a){return this.whereConditions.push({field:e,operator:"BETWEEN",value:[t,a]}),this}whereNotBetween(e,t,a){return this.whereConditions.push({field:e,operator:"NOT BETWEEN",value:[t,a]}),this}whereNull(e){return this.whereConditions.push({field:e,operator:"IS NULL",value:null}),this}whereNotNull(e){return this.whereConditions.push({field:e,operator:"IS NOT NULL",value:null}),this}whereExists(e){return this.whereConditions.push({field:"",operator:"EXISTS",value:e}),this}whereNotExists(e){return this.whereConditions.push({field:"",operator:"NOT EXISTS",value:e}),this}orWhere(e,t,a){return this.where(e,t,a)}groupBy(e){return this.groupByFields=Array.isArray(e)?e:[e],this}having(e,t,a){let s="=",r=t;return arguments.length===3&&(s=t,r=a),this.havingConditions.push({field:e,operator:s,value:r}),this}havingCount(e,t,a){return this.having(`COUNT(${e})`,t,a)}orderBy(e,t="ASC"){return this.orderByFields.push(`${e} ${t}`),this}orderByDesc(e){return this.orderBy(e,"DESC")}orderByRaw(e){return this.orderByFields.push(e),this}latest(e="created_at"){return this.orderByDesc(e)}oldest(e="created_at"){return this.orderBy(e,"ASC")}limit(e){return this.limitValue=e,this}offset(e){return this.offsetValue=e,this}skip(e){return this.offset(e)}take(e){return this.limit(e)}firstRow(){return this.limit(1)}paginate(e,t){return this.limitValue=t,this.offsetValue=(e-1)*t,this}union(e){return this.unionQueries.push(e),this}unionAll(e){return this.union(e)}with(e,t){return this.cteQueries.set(e,t),this}whereSubQuery(e,t,a){return this.subQueries.push({query:a,alias:""}),this.whereConditions.push({field:e,operator:t,value:a}),this}count(e="*"){return this.selectFields=[`COUNT(${e}) as count`],this}sum(e){return this.selectFields=[`SUM(${e}) as sum`],this}avg(e){return this.selectFields=[`AVG(${e}) as avg`],this}max(e){return this.selectFields=[`MAX(${e}) as max`],this}min(e){return this.selectFields=[`MIN(${e}) as min`],this}toSQL(){let e="";const t=[];if(this.cteQueries.size>0){const a=[];this.cteQueries.forEach((s,r)=>{const{sql:i,params:n}=s.toSQL();a.push(`${r} AS (${i})`),t.push(...n)}),e+=`WITH ${a.join(", ")} `}if(e+=`SELECT ${this.selectFields.join(", ")} FROM ${this.tableName}`,this.joinClauses.length>0&&this.joinClauses.forEach(a=>{e+=` ${a.type} JOIN ${a.table} ON ${a.condition}`}),this.whereConditions.length>0){const a=[];this.whereConditions.forEach(s=>{const{clause:r,conditionParams:i}=this.buildCondition(s);a.push(r),t.push(...i)}),e+=` WHERE ${a.join(" AND ")}`}if(this.groupByFields.length>0&&(e+=` GROUP BY ${this.groupByFields.join(", ")}`),this.havingConditions.length>0){const a=[];this.havingConditions.forEach(s=>{const{clause:r,conditionParams:i}=this.buildCondition(s);a.push(r),t.push(...i)}),e+=` HAVING ${a.join(" AND ")}`}return this.orderByFields.length>0&&(e+=` ORDER BY ${this.orderByFields.join(", ")}`),this.limitValue!==null&&(e+=` LIMIT ${this.limitValue}`),this.offsetValue!==null&&(e+=` OFFSET ${this.offsetValue}`),this.unionQueries.length>0&&this.unionQueries.forEach(a=>{const{sql:s,params:r}=a.toSQL();e+=` UNION ${s}`,t.push(...r)}),{sql:e,params:t}}buildCondition(e){const{field:t,operator:a,value:s}=e,r=[];switch(a.toUpperCase()){case"IN":case"NOT IN":const i=s.map(()=>"?").join(", ");return r.push(...s),{clause:`${t} ${a} (${i})`,conditionParams:r};case"BETWEEN":case"NOT BETWEEN":return r.push(s[0],s[1]),{clause:`${t} ${a} ? AND ?`,conditionParams:r};case"IS NULL":case"IS NOT NULL":return{clause:`${t} ${a}`,conditionParams:[]};case"EXISTS":case"NOT EXISTS":const{sql:n,params:h}=s.toSQL();return r.push(...h),{clause:`${a} (${n})`,conditionParams:r};default:if(s instanceof x){const{sql:m,params:b}=s.toSQL();return r.push(...b),{clause:`${t} ${a} (${m})`,conditionParams:r}}return r.push(s),{clause:`${t} ${a} ?`,conditionParams:r}}}async get(){if(!this.dao)throw new Error("DAO instance required for query execution");const{sql:e,params:t}=this.toSQL();return(await this.dao.execute(e,t)).rows}async first(){this.limit(1);const e=await this.get();return e.length>0?e[0]:null}async pluck(e){return this.select(e),(await this.get()).map(t=>t[e])}async exists(){return this.select("1").limit(1),(await this.get()).length>0}async countResult(){this.count();const e=await this.first();return e?e.count:0}static insert(e,t){const a=Object.keys(t),s=Object.values(t),r=s.map(()=>"?").join(", ");return{sql:`INSERT INTO ${e} (${a.join(", ")}) VALUES (${r})`,params:s}}static insertMany(e,t){if(t.length===0)throw new Error("Data array cannot be empty");const a=Object.keys(t[0]),s=a.map(()=>"?").join(", "),r=t.map(()=>`(${s})`).join(", "),i=t.flatMap(n=>Object.values(n));return{sql:`INSERT INTO ${e} (${a.join(", ")}) VALUES ${r}`,params:i}}static update(e,t,a,s=[]){const r=Object.keys(t).map(n=>`${n} = ?`).join(", "),i=[...Object.values(t),...s];return{sql:`UPDATE ${e} SET ${r} WHERE ${a}`,params:i}}static delete(e,t,a=[]){return{sql:`DELETE FROM ${e} WHERE ${t}`,params:a}}static upsert(e,t,a){const s=Object.keys(t),r=Object.values(t),i=r.map(()=>"?").join(", "),n=s.filter(b=>!a.includes(b)),h=n.length>0?n.map(b=>`${b} = excluded.${b}`).join(", "):"";let m=`INSERT INTO ${e} (${s.join(", ")}) VALUES (${i})`;return n.length>0?m+=` ON CONFLICT(${a.join(", ")}) DO UPDATE SET ${h}`:m+=` ON CONFLICT(${a.join(", ")}) DO NOTHING`,{sql:m,params:r}}clone(){if(!this.dao)throw new Error("DAO instance required for cloning QueryBuilder");const e=new x(this.dao);return e.tableName=this.tableName,e.selectFields=[...this.selectFields],e.joinClauses=[...this.joinClauses],e.whereConditions=[...this.whereConditions],e.groupByFields=[...this.groupByFields],e.havingConditions=[...this.havingConditions],e.orderByFields=[...this.orderByFields],e.limitValue=this.limitValue,e.offsetValue=this.offsetValue,e.unionQueries=[...this.unionQueries],e.subQueries=[...this.subQueries],e.cteQueries=new Map(this.cteQueries),e}toRawSQL(){const{sql:e,params:t}=this.toSQL();let a=e;return t.forEach(s=>{typeof s=="string"?a=a.replace("?",`'${s.replace(/'/g,"''")}'`):s==null?a=a.replace("?","NULL"):a=a.replace("?",String(s))}),a}explain(){return this.selectFields=["EXPLAIN QUERY PLAN "+this.selectFields.join(", ")],this}}class j{sanitizeSQL(e){return e.trim()}bindParameters(e,t){if(!t||t.length===0)return e;let a=0;return e.replace(/\?/g,()=>{if(a<t.length){const s=t[a++];return typeof s=="string"?`'${s.replace(/'/g,"''")}'`:s==null?"NULL":String(s)}return"?"})}}const w=D(A.NODEJS_ADAPTER);class H extends j{constructor(){super(...arguments),this.Database=null}async isSupported(){try{return typeof process!="undefined"&&process.versions!=null&&process.versions.node!=null}catch(e){return w.debug("Node.js environment not detected",e),!1}}async loadBetterSqlite3(){if(this.Database)return this.Database;try{const e=(await import("better-sqlite3")).default;return this.Database=e,e}catch(e){throw w.error("Failed to load better-sqlite3",e),new Error("better-sqlite3 is not available in this environment")}}async connect(e){w.trace(`Connecting to database: ${e}`);try{if(!await this.isSupported())throw new Error("Node.js environment is not supported");const t=await this.loadBetterSqlite3(),a=new t(e,{});return a.pragma("journal_mode = WAL"),a.pragma("synchronous = NORMAL"),w.debug(`Connected to database: ${e}`,a),new X(a)}catch(t){throw w.error(`Failed to connect to database: ${e}`,t),new Error(`Failed to connect to database: ${t}`)}}}const P=p=>p.map(e=>{if(typeof e=="boolean")return e?1:0;if(e===void 0)return null;if(e instanceof Date)return e.toISOString();if(typeof e=="object"&&e!==null)throw new Error(`Cannot bind object/array: ${JSON.stringify(e)}`);return e});let X=class{constructor(e){this.db=e}getQueryType(e){const t=e.trim().toUpperCase();return t.startsWith("SELECT")?"SELECT":t.startsWith("PRAGMA")?"PRAGMA":"MODIFY"}async execute(e,t=[]){var a;w.debug("NodeJSConnection.execute() sql:",e),w.debug("NodeJSConnection.execute() params:",t);try{const s=P(t);w.debug("Normalized params:",s);const r=this.getQueryType(e),i=this.db.prepare(e);switch(r){case"SELECT":{const n=i.all(...s);return w.debug("SQL SELECT result:",n),{rows:n||[],rowsAffected:0}}case"PRAGMA":try{const n=i.all(...s);return w.debug("PRAGMA result:",n),{rows:n||[],rowsAffected:0,lastInsertRowId:0}}catch(n){if(!((a=n.message)===null||a===void 0)&&a.includes("does not return data")){const h=i.run(...s);return w.debug("PRAGMA execution result:",h),{rows:[],rowsAffected:0,lastInsertRowId:0}}throw n}case"MODIFY":default:{const n=i.run(...s);return w.debug("SQL execution result:",{changes:n.changes,lastInsertRowid:n.lastInsertRowid}),{rows:[],rowsAffected:n.changes||0,lastInsertRowId:n.lastInsertRowid||void 0}}}}catch(s){throw(!s.message||s.message.indexOf("_schema_info")===-1)&&w.error("SQL execution failed",{code:s.code,message:s.message}),s}}async close(){try{this.db.close(),w.debug("Database connection closed")}catch(e){throw w.error("Close Error:",e),e}}async transaction(e){const t=this.db.transaction(async()=>{await e(new J(this.db))});try{t(),w.debug("Transaction completed successfully")}catch(a){throw w.error("Transaction failed",a),a}}};class J{constructor(e){this.db=e}getQueryType(e){const t=e.trim().toUpperCase();return t.startsWith("SELECT")?"SELECT":t.startsWith("PRAGMA")?"PRAGMA":"MODIFY"}async executeSql(e,t=[]){var a;try{const s=P(t);w.debug("Normalized params:",s);const r=this.getQueryType(e),i=this.db.prepare(e);switch(r){case"SELECT":return{rows:i.all(...s)||[],rowsAffected:0};case"PRAGMA":try{return{rows:i.all(...s)||[],rowsAffected:0,lastInsertRowId:0}}catch(n){if(!((a=n.message)===null||a===void 0)&&a.includes("does not return data")){const h=i.run(...s);return{rows:[],rowsAffected:0,lastInsertRowId:0}}throw n}case"MODIFY":default:{const n=i.run(...s);return{rows:[],rowsAffected:n.changes||0,lastInsertRowId:n.lastInsertRowid||void 0}}}}catch(s){throw w.error("Transaction SQL execution failed",s),s}}}const I=D(A.REACTNATIVE_ADAPTER);class Y extends j{constructor(){super(...arguments),this.SQLite=null,this.Platform=null}async isSupported(){try{const{Platform:e}=await import("react-native");return this.Platform=e,e.OS==="ios"||e.OS==="android"}catch(e){return I.debug("React Native environment not detected",e),!1}}async loadSQLite(){if(this.SQLite)return this.SQLite;try{const e=(await import("react-native-sqlite-storage")).default;return e.DEBUG(!1),e.enablePromise(!0),this.SQLite=e,e}catch(e){throw I.error("Failed to load react-native-sqlite-storage",e),new Error("react-native-sqlite-storage is not available in this environment")}}async connect(e){try{if(!await this.isSupported())throw new Error("React Native environment is not supported");const t=await(await this.loadSQLite()).openDatabase({name:e,location:"default"});return I.debug(`Connected to database: ${e}`,t),new Z(t)}catch(t){throw I.error(`Failed to connect to database: ${e}`,t),new Error(`Failed to connect to database: ${t}`)}}}class Z{constructor(e){this.db=e}async execute(e,t=[]){var a;try{if(e.trim().toUpperCase().startsWith("SELECT")){const s=await this.db.executeSql(e,t),r=[];if(s&&s.length>0){const i=s[0];I.debug("SQL SELECT result:",i);for(let n=0;n<i.rows.length;n++)r.push(i.rows.item(n));I.debug("SQL SELECT rows:",r)}return{rows:r,rowsAffected:0}}else{const s=await this.db.executeSql(e,t);if(s&&s.length>0){const r=s[0];return I.debug("SQL execution result:",r),{rows:((a=r.rows)===null||a===void 0?void 0:a.raw())||[],rowsAffected:r.rowsAffected||0,lastInsertRowId:r.insertId||void 0}}return{rows:[],rowsAffected:0}}}catch(s){throw I.error("SQL execution failed",s),s}}async close(){try{await this.db.close()}catch(e){throw I.error("Close Error:",e),e}}async transaction(e){return new Promise((t,a)=>{this.db.transaction(async s=>{try{await e(new ee(s)),t()}catch(r){a(r)}},s=>{a(s)})})}}class ee{constructor(e){this.tx=e}async executeSql(e,t=[]){return new Promise((a,s)=>{this.tx.executeSql(e,t,(r,i)=>{if(e.trim().toUpperCase().startsWith("SELECT")){const n=[];for(let h=0;h<i.rows.length;h++)n.push(i.rows.item(h));a({rows:n,rowsAffected:0})}else a({rows:[],rowsAffected:i.rowsAffected||0,lastInsertRowId:i.insertId||void 0})},(r,i)=>{s(i)})})}}const te={sqlite:{string:"TEXT",varchar:"TEXT",char:"TEXT",text:"TEXT",email:"TEXT",url:"TEXT",uuid:"TEXT",integer:"INTEGER",int:"INTEGER",bigint:"INTEGER",smallint:"INTEGER",tinyint:"INTEGER",number:"REAL",decimal:"REAL",numeric:"REAL",float:"REAL",double:"REAL",boolean:"INTEGER",bool:"INTEGER",timestamp:"TEXT",datetime:"TEXT",date:"TEXT",time:"TEXT",json:"TEXT",jsonb:"TEXT",array:"TEXT",object:"TEXT",blob:"BLOB",binary:"BLOB",objectid:"TEXT"}};export{j as BaseAdapter,B as BaseService,T as DatabaseFactory,v as DatabaseManager,L as DefaultService,H as NodeJSAdapter,x as QueryBuilder,Y as ReactNativeAdapter,te as SQLITE_TYPE_MAPPING,A as SQLiteModules,S as ServiceManager,z as UniversalDAO,U as configureSQLiteLogger};
13
+ ${s}`)}this.isInitialized=!0,o.info("All databases initialized successfully",{totalSchemas:e.length})}static async getLazyLoading(e){if(o.debug("Getting database with lazy loading",{key:e}),this.activeDatabases.add(e),!this.hasAccessToDatabase(e))throw o.error("Access denied for lazy loading",{key:e}),new Error(`Access denied: Database '${e}' is not accessible.`);if(!this.connections[e]){const t=this.getSchema(e);if(!t)throw o.error("Schema not found for lazy loading",{key:e}),new Error(`Invalid database key: ${e}. Schema not found.`);if(Object.keys(this.connections).length>=this.maxConnections)throw o.error("Maximum connections reached",{currentConnections:Object.keys(this.connections).length,maxConnections:this.maxConnections}),new Error("Maximum number of database connections reached");o.debug("Creating new connection for lazy loading",{key:e,schemaName:t.database_name});const a=await I.createOrOpen({config:t},!1);await a.connect(),this.connections[e]=a,o.info("Database connection created via lazy loading",{key:e})}return this.isInitialized=!0,this.connections[e]}static async executeCrossSchemaTransaction(e,t){o.debug("Executing cross-schema transaction",{schemas:e});for(const s of e)if(!this.hasAccessToDatabase(s))throw o.error("Access denied for cross-schema transaction",{key:s,schemas:e}),new Error(`Access denied: Database '${s}' is not accessible.`);const a=e.reduce((s,r)=>(s[r]=this.get(r),s),{});o.debug("Starting cross-schema transaction",{schemas:e});try{await Promise.all(Object.values(a).map(s=>s.beginTransaction())),o.trace("All transactions started successfully"),await t(a),o.trace("Transaction callback completed successfully"),await Promise.all(Object.values(a).map(s=>s.commitTransaction())),o.info("Cross-schema transaction completed successfully",{schemas:e})}catch(s){throw o.error("Cross-schema transaction failed, rolling back",{schemas:e,error:s.message}),await Promise.all(Object.values(a).map(r=>r.rollbackTransaction())),o.debug("Cross-schema transaction rolled back"),s}}static async importDataToTable(e,t,a,s={}){if(o.debug("Importing data to table",{databaseKey:e,tableName:t,recordCount:a.length,options:s}),!this.hasAccessToDatabase(e))throw o.error("Access denied for data import",{databaseKey:e,tableName:t}),new Error(`Access denied: Database '${e}' is not accessible.`);const r=this.get(e);try{const i=await r.importData(Object.assign({tableName:t,data:a},s));return o.info("Data import completed successfully",{databaseKey:e,tableName:t,importedRows:i.successRows,skippedRows:i.errorRows}),i}catch(i){throw o.error("Data import failed",{databaseKey:e,tableName:t,error:i.message}),i}}static async importDataWithMapping(e,t,a,s,r={}){if(o.debug("Importing data with column mapping",{databaseKey:e,tableName:t,recordCount:a.length,mappingCount:s.length,options:r}),!this.hasAccessToDatabase(e))throw o.error("Access denied for data import with mapping",{databaseKey:e,tableName:t}),new Error(`Access denied: Database '${e}' is not accessible.`);const i=this.get(e);try{const n=await i.importDataWithMapping(t,a,s,r);return o.info("Data import with mapping completed successfully",{databaseKey:e,tableName:t,importedRows:n.successRows,skippedRows:n.errorRows}),n}catch(n){throw o.error("Data import with mapping failed",{databaseKey:e,tableName:t,error:n.message}),n}}static async bulkImport(e){const t=Date.now();o.info("Starting bulk import",{configCount:e.length,configs:e.map(s=>({databaseKey:s.databaseKey,tableName:s.tableName,recordCount:s.data.length}))});const a={totalDatabases:e.length,successDatabases:0,results:{},errors:{},executionTime:0};for(const s of e){const r=`${s.databaseKey}.${s.tableName}`;o.debug("Processing bulk import config",{configKey:r});try{if(!this.hasAccessToDatabase(s.databaseKey))throw new Error(`Access denied: Database '${s.databaseKey}' is not accessible.`);const i=this.get(s.databaseKey);let n;s.columnMappings?(o.trace("Using column mappings for import",{configKey:r}),n=await i.importDataWithMapping(s.tableName,s.data,s.columnMappings,s.options)):(o.trace("Using direct import",{configKey:r}),n=await i.importData(Object.assign({tableName:s.tableName,data:s.data},s.options))),a.results[r]=n,a.successDatabases++,o.info("Bulk import config completed successfully",{configKey:r,importedRows:n.successRows,skippedRows:n.errorRows})}catch(i){const n=i instanceof Error?i:new Error(String(i));o.error("Bulk import config failed",{configKey:r,error:n.message}),a.errors[r]=n}}return a.executionTime=Date.now()-t,o.info("Bulk import completed",{totalConfigs:a.totalDatabases,successfulConfigs:a.successDatabases,failedConfigs:Object.keys(a.errors).length,executionTimeMs:a.executionTime}),a}static async importFromCSV(e,t,a,s={}){if(o.debug("Importing from CSV",{databaseKey:e,tableName:t,csvSize:a.length,options:s}),!this.hasAccessToDatabase(e))throw o.error("Access denied for CSV import",{databaseKey:e,tableName:t}),new Error(`Access denied: Database '${e}' is not accessible.`);const r=this.get(e);try{const i=await r.importFromCSV(t,a,s);return o.info("CSV import completed successfully",{databaseKey:e,tableName:t,importedRows:i.successRows,skippedRows:i.errorRows}),i}catch(i){throw o.error("CSV import failed",{databaseKey:e,tableName:t,error:i.message}),i}}static getConnectionCount(){const e=Object.keys(this.connections).length;return o.trace("Getting connection count",{count:e}),e}static listConnections(){const e=Object.keys(this.connections);return o.trace("Listing connections",{connections:e}),e}static async closeConnection(e){o.debug("Closing specific connection",{dbKey:e});const t=this.connections[e];if(t)try{await t.disconnect(),delete this.connections[e],o.info("Database connection closed successfully",{dbKey:e})}catch(a){throw o.error("Error closing database connection",{dbKey:e,error:a.message}),a}else o.warn("Attempted to close non-existent connection",{dbKey:e})}static async closeAll(){o.info("Closing all connections and resetting state"),await this.closeAllConnections(),this.currentUserRoles=[],this.currentRole=null,this.isInitialized=!1,this.activeDatabases.clear(),this.eventListeners.clear(),this.isClosingConnections=!1,o.info("All connections closed and state reset successfully")}static async logout(){o.info("Logging out user",{currentUserRoles:this.currentUserRoles});const e=Object.keys(this.connections).filter(t=>t!=="core");o.debug("Closing role-specific connections",{connectionsToClose:e});for(const t of e)try{await this.connections[t].close(),delete this.connections[t],o.debug("Role-specific connection closed",{dbKey:t})}catch(a){o.error("Error closing connection during logout",{dbKey:t,error:a.message})}this.currentUserRoles=[],this.currentRole=null,o.info("User logout completed successfully",{closedConnections:e.length})}}v.maxConnections=10,v.connections={},v.isInitialized=!1,v.roleRegistry={},v.currentRole=null,v.currentUserRoles=[],v.activeDatabases=new Set,v.isClosingConnections=!1,v.schemaConfigurations={},v.schemaManager=null,v.eventListeners=new Map;const l=k(O.BASE_SERVICE);class z{constructor(e,t){this.dao=null,this.isOpened=!1,this.isInitialized=!1,this.errorHandlers=new Map,this.eventListeners=new Map,this.primaryKeyFields=["id"],this.cache=new Map,this.schemaName=e,this.tableName=t||e,l.debug("Creating BaseService instance",{schemaName:this.schemaName,tableName:this.tableName,primaryKeyFields:this.primaryKeyFields}),this.reconnectHandler=a=>{l.info("Database reconnected for service",{schemaName:this.schemaName,tableName:this.tableName}),this.dao=a,this._emit("daoReconnected",{schemaName:this.schemaName})},v.onDatabaseReconnect(e,this.reconnectHandler),this.bindMethods(),l.trace("BaseService instance created successfully",{schemaName:this.schemaName,tableName:this.tableName})}bindMethods(){l.trace("Binding service methods",{schemaName:this.schemaName,tableName:this.tableName}),Object.getOwnPropertyNames(Object.getPrototypeOf(this)).forEach(e=>{typeof this[e]=="function"&&e!=="constructor"&&(this[e]=this[e].bind(this))})}setPrimaryKeyFields(e){return l.debug("Setting primary key fields",{schemaName:this.schemaName,tableName:this.tableName,previousFields:this.primaryKeyFields,newFields:e}),this.primaryKeyFields=e,this}async init(){l.info("Initializing BaseService",{schemaName:this.schemaName,tableName:this.tableName,isInitialized:this.isInitialized});try{if(this.isInitialized)return l.debug("Service already initialized, skipping",{schemaName:this.schemaName}),this;if(l.debug("Getting DAO from DatabaseManager",{schemaName:this.schemaName}),this.dao=await v.getLazyLoading(this.schemaName),!this.dao){const e=`Failed to initialize DAO for schema: ${this.schemaName}`;throw l.error(e,{schemaName:this.schemaName}),new Error(e)}return this.dao.isConnectionOpen()||(l.debug("DAO connection not open, connecting",{schemaName:this.schemaName}),await this.dao.connect()),this.isOpened=!0,this.isInitialized=!0,l.info("BaseService initialized successfully",{schemaName:this.schemaName,tableName:this.tableName,isOpened:this.isOpened,isInitialized:this.isInitialized}),this._emit("initialized",{schemaName:this.schemaName}),this}catch(e){throw l.error("Error initializing BaseService",{schemaName:this.schemaName,tableName:this.tableName,error:e.message}),this._handleError("INIT_ERROR",e),e}}async upsert(e,t){l.debug("Starting upsert operation",{schemaName:this.schemaName,tableName:this.tableName,hasData:!!e,dataKeys:e?Object.keys(e):[],searchFields:t||this.primaryKeyFields}),await this._ensureInitialized(),await this.ensureValidConnection();try{this._validateData(e);const a=t||this.primaryKeyFields;l.trace("Building conditions for existence check",{schemaName:this.schemaName,tableName:this.tableName,fieldsToCheck:a});const s={};let r=!0;for(const n of a){const h=e[n];if(h!=null)s[n]=h;else{r=!1,l.trace("Missing required field for upsert check",{schemaName:this.schemaName,tableName:this.tableName,field:n});break}}if(!r)return l.debug("Missing required fields, performing insert",{schemaName:this.schemaName,tableName:this.tableName,fieldsToCheck:a}),await this.create(e);l.trace("Checking if record exists",{schemaName:this.schemaName,tableName:this.tableName,conditions:s});const i=await this.findFirst(s);if(i){const n=i[this.primaryKeyFields[0]];l.debug("Record exists, performing update",{schemaName:this.schemaName,tableName:this.tableName,primaryKeyField:this.primaryKeyFields[0],primaryKeyValue:n});const h=await this.update(n,e);return l.info("Upsert completed (update)",{schemaName:this.schemaName,tableName:this.tableName,operation:"update",primaryKeyValue:n}),this._emit("dataUpserted",{operation:"upsert",action:"update",data:h}),h}else{l.debug("Record does not exist, performing insert",{schemaName:this.schemaName,tableName:this.tableName});const n=await this.create(e);return l.info("Upsert completed (insert)",{schemaName:this.schemaName,tableName:this.tableName,operation:"insert"}),this._emit("dataUpserted",{operation:"upsert",action:"insert",data:n}),n}}catch(a){throw l.error("Error during upsert operation",{schemaName:this.schemaName,tableName:this.tableName,error:a.message}),this._handleError("UPSERT_ERROR",a),a}}async create(e){l.debug("Creating new record",{schemaName:this.schemaName,tableName:this.tableName,hasData:!!e,dataKeys:e?Object.keys(e):[]}),await this._ensureInitialized(),await this.ensureValidConnection();try{this._validateData(e),l.trace("Building data table for insert",{schemaName:this.schemaName,tableName:this.tableName});const t=this.buildDataTable(e),a=await this.dao.insert(t);if(a.rowsAffected===0){const i="Insert operation failed - no rows affected";throw l.error(i,{schemaName:this.schemaName,tableName:this.tableName,result:a}),new Error(i)}l.debug("Insert operation successful",{schemaName:this.schemaName,tableName:this.tableName,rowsAffected:a.rowsAffected,lastInsertRowId:a.lastInsertRowId});let s=null;const r=e[this.primaryKeyFields[0]];try{r!=null?(l.trace("Retrieving created record by primary key",{schemaName:this.schemaName,tableName:this.tableName,primaryKeyField:this.primaryKeyFields[0],primaryKeyValue:r}),s=await this.findById(r)):a.lastInsertRowId&&(l.trace("Retrieving created record by last insert ID",{schemaName:this.schemaName,tableName:this.tableName,lastInsertRowId:a.lastInsertRowId}),s=await this.findById(a.lastInsertRowId))}catch(i){l.warn("Could not retrieve created record",{schemaName:this.schemaName,tableName:this.tableName,findError:i.message})}return s||(l.debug("Using original data as created record",{schemaName:this.schemaName,tableName:this.tableName}),s=e),l.info("Record created successfully",{schemaName:this.schemaName,tableName:this.tableName,recordRetrieved:!!s}),this._emit("dataCreated",{operation:"create",data:s}),s}catch(t){throw l.error("Error creating record",{schemaName:this.schemaName,tableName:this.tableName,error:t.message}),this._handleError("CREATE_ERROR",t),t}}async update(e,t){l.debug("Updating record",{schemaName:this.schemaName,tableName:this.tableName,id:e,hasData:!!t,dataKeys:t?Object.keys(t):[]}),await this._ensureInitialized();try{if(!e){const i="ID is required for update";throw l.error(i,{schemaName:this.schemaName,tableName:this.tableName}),new Error(i)}this._validateData(t);const a=Object.assign(Object.assign({},t),{[this.primaryKeyFields[0]]:e});l.trace("Building update query table",{schemaName:this.schemaName,tableName:this.tableName,id:e});const s=this.buildDataTable(a);await this.dao.update(s),l.debug("Update operation completed",{schemaName:this.schemaName,tableName:this.tableName,id:e});const r=await this.findById(e);return l.info("Record updated successfully",{schemaName:this.schemaName,tableName:this.tableName,id:e,recordFound:!!r}),this._emit("dataUpdated",{operation:"update",id:e,data:r}),r}catch(a){throw l.error("Error updating record",{schemaName:this.schemaName,tableName:this.tableName,id:e,error:a.message}),this._handleError("UPDATE_ERROR",a),a}}async delete(e){l.debug("Deleting record",{schemaName:this.schemaName,tableName:this.tableName,id:e}),await this._ensureInitialized();try{if(!e){const r="ID is required for delete";throw l.error(r,{schemaName:this.schemaName,tableName:this.tableName}),new Error(r)}const t={name:this.tableName,cols:[],wheres:[{name:this.primaryKeyFields[0],value:e}]};l.trace("Executing delete operation",{schemaName:this.schemaName,tableName:this.tableName,id:e,primaryKeyField:this.primaryKeyFields[0]});const a=await this.dao.delete(t),s=a.rowsAffected>0;return s?(l.info("Record deleted successfully",{schemaName:this.schemaName,tableName:this.tableName,id:e,rowsAffected:a.rowsAffected}),this._emit("dataDeleted",{operation:"delete",id:e})):l.warn("Delete operation completed but no rows affected",{schemaName:this.schemaName,tableName:this.tableName,id:e}),s}catch(t){throw l.error("Error deleting record",{schemaName:this.schemaName,tableName:this.tableName,id:e,error:t.message}),this._handleError("DELETE_ERROR",t),t}}async findById(e){l.debug("Finding record by ID",{schemaName:this.schemaName,tableName:this.tableName,id:e}),await this._ensureInitialized();try{if(!e){const i="ID is required";throw l.error(i,{schemaName:this.schemaName,tableName:this.tableName}),new Error(i)}const t={[this.primaryKeyFields[0]]:e};l.trace("Building select query",{schemaName:this.schemaName,tableName:this.tableName,conditions:t});const a=this.buildSelectTable(t),s=await this.dao.select(a),r=Object.keys(s).length>0?s:null;return l.debug("Find by ID completed",{schemaName:this.schemaName,tableName:this.tableName,id:e,recordFound:!!r}),this._emit("dataFetched",{operation:"findById",id:e}),r}catch(t){throw l.error("Error finding record by ID",{schemaName:this.schemaName,tableName:this.tableName,id:e,error:t.message}),this._handleError("FIND_BY_ID_ERROR",t),t}}async findFirst(e={}){l.debug("Finding first record",{schemaName:this.schemaName,tableName:this.tableName,conditionsCount:Object.keys(e).length,conditions:e}),await this._ensureInitialized();try{l.trace("Building select query for findFirst",{schemaName:this.schemaName,tableName:this.tableName});const t=this.buildSelectTable(e),a=await this.dao.select(t),s=Object.keys(a).length>0?a:null;return l.debug("Find first completed",{schemaName:this.schemaName,tableName:this.tableName,recordFound:!!s}),this._emit("dataFetched",{operation:"findFirst"}),s}catch(t){throw l.error("Error finding first record",{schemaName:this.schemaName,tableName:this.tableName,conditions:e,error:t.message}),this._handleError("FIND_FIRST_ERROR",t),t}}async findAll(e={},t={}){l.debug("Finding all records",{schemaName:this.schemaName,tableName:this.tableName,conditionsCount:Object.keys(e).length,hasLimit:!!t.limit,hasOffset:!!t.offset,hasOrderBy:!!(t.orderBy&&t.orderBy.length>0),limit:t.limit,offset:t.offset}),await this._ensureInitialized();try{const a=[...this.buildWhereFromObject(e),...t.where||[]];l.trace("Building query for findAll",{schemaName:this.schemaName,tableName:this.tableName,totalWheres:a.length,hasColumns:!!(t.columns&&t.columns.length>0)});const s={name:this.tableName,cols:t.columns?t.columns.map(i=>({name:i})):[],wheres:a,orderbys:t.orderBy,limitOffset:{limit:t.limit,offset:t.offset}},r=await this.dao.selectAll(s);return l.info("Find all completed",{schemaName:this.schemaName,tableName:this.tableName,recordsFound:r.length,conditionsCount:Object.keys(e).length}),this._emit("dataFetched",{operation:"findAll",count:r.length}),r}catch(a){throw l.error("Error finding all records",{schemaName:this.schemaName,tableName:this.tableName,conditions:e,options:t,error:a.message}),this._handleError("FIND_ALL_ERROR",a),a}}async count(e){l.debug("Counting records",{schemaName:this.schemaName,tableName:this.tableName,hasWhere:!!e,whereType:e?Array.isArray(e)?"array":"object":"none"}),await this._ensureInitialized();try{let t=[];Array.isArray(e)?(t=e,l.trace("Using array where conditions",{schemaName:this.schemaName,whereCount:t.length})):e&&typeof e=="object"&&(t=this.buildWhereFromObject(e),l.trace("Built where conditions from object",{schemaName:this.schemaName,whereCount:t.length}));const a={name:this.tableName,cols:[{name:"COUNT(*) as count"}],wheres:t},s=(await this.dao.select(a)).count||0;return l.debug("Count completed",{schemaName:this.schemaName,tableName:this.tableName,count:s}),s}catch(t){throw l.error("Error counting records",{schemaName:this.schemaName,tableName:this.tableName,where:e,error:t.message}),this._handleError("COUNT_ERROR",t),t}}async exists(e){l.debug("Checking if record exists",{schemaName:this.schemaName,tableName:this.tableName,id:e});const t=await this.findById(e)!==null;return l.debug("Existence check completed",{schemaName:this.schemaName,tableName:this.tableName,id:e,exists:t}),t}async truncate(){l.warn("Truncating table - this will delete all data",{schemaName:this.schemaName,tableName:this.tableName}),await this._ensureInitialized();try{l.debug("Executing truncate operations",{schemaName:this.schemaName,tableName:this.tableName}),await this.dao.execute(`DELETE FROM ${this.tableName}`),await this.dao.execute(`DELETE FROM sqlite_sequence WHERE name='${this.tableName}'`),l.info("Table truncated successfully",{schemaName:this.schemaName,tableName:this.tableName}),this._emit("tableTruncated",{tableName:this.tableName})}catch(e){throw l.error("Error truncating table",{schemaName:this.schemaName,tableName:this.tableName,error:e.message}),this._handleError("TRUNCATE_ERROR",e),e}}async bulkUpsert(e,t,a=!0){l.info("Starting bulk upsert operation",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length,searchFields:t||this.primaryKeyFields,useTransaction:a}),await this._ensureInitialized(),await this.ensureValidConnection();try{if(!Array.isArray(e)||e.length===0){const n="Data must be a non-empty array";throw l.error(n,{schemaName:this.schemaName,tableName:this.tableName,dataType:typeof e,dataLength:Array.isArray(e)?e.length:"N/A"}),new Error(n)}const s={created:[],updated:[],total:e.length,errors:[]},r=t||this.primaryKeyFields;l.debug("Preparing bulk upsert",{schemaName:this.schemaName,tableName:this.tableName,fieldsToCheck:r,itemsCount:e.length});const i=async()=>{for(let n=0;n<e.length;n++){const h=e[n];try{n%100===0&&l.trace("Bulk upsert progress",{schemaName:this.schemaName,tableName:this.tableName,processed:n,total:e.length,created:s.created.length,updated:s.updated.length,errors:s.errors.length}),this._validateData(h);const m={};let f=!0;for(const d of r){const C=h[d];if(C!=null)m[d]=C;else{f=!1;break}}if(!f){l.trace("Missing required fields for item, performing insert",{schemaName:this.schemaName,tableName:this.tableName,index:n,fieldsToCheck:r});const d=this.buildDataTable(h);await this.dao.insert(d),s.created.push(h);continue}const y=await this.findFirst(m);if(y){const d=y[this.primaryKeyFields[0]];l.trace("Record exists, performing update",{schemaName:this.schemaName,tableName:this.tableName,index:n,primaryKeyValue:d});const C=Object.assign(Object.assign({},h),{[this.primaryKeyFields[0]]:d}),w=this.buildDataTable(C);await this.dao.update(w);const D=await this.findById(d);D&&s.updated.push(D)}else{l.trace("Record does not exist, performing insert",{schemaName:this.schemaName,tableName:this.tableName,index:n});const d=this.buildDataTable(h);await this.dao.insert(d),s.created.push(h)}}catch(m){l.warn("Error processing item in bulk upsert",{schemaName:this.schemaName,tableName:this.tableName,index:n,error:m.message}),s.errors.push({index:n,data:h,error:m.message})}}};return a?(l.debug("Executing bulk upsert in transaction",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length}),await this.executeTransaction(i)):(l.debug("Executing bulk upsert without transaction",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length}),await i()),l.info("Bulk upsert completed",{schemaName:this.schemaName,tableName:this.tableName,total:s.total,created:s.created.length,updated:s.updated.length,errors:s.errors.length,successRate:`${((s.created.length+s.updated.length)/s.total*100).toFixed(2)}%`}),this._emit("dataBulkUpserted",{operation:"bulkUpsert",created:s.created.length,updated:s.updated.length,errors:s.errors.length,total:s.total}),s}catch(s){throw l.error("Error during bulk upsert operation",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length,error:s.message}),this._handleError("BULK_UPSERT_ERROR",s),s}}async bulkInsert(e){l.info("Starting bulk insert",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length}),await this._ensureInitialized();try{if(!Array.isArray(e)||e.length===0){const a="Items must be a non-empty array";throw l.error(a,{schemaName:this.schemaName,tableName:this.tableName,itemsType:typeof e,itemsLength:Array.isArray(e)?e.length:"N/A"}),new Error(a)}l.debug("Executing bulk insert operation",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length});const t=await this.dao.importData({tableName:this.tableName,data:e,batchSize:1e3,skipErrors:!1,validateData:!0});return l.info("Bulk insert completed",{schemaName:this.schemaName,tableName:this.tableName,totalRows:t.totalRows,successRows:t.successRows,errorRows:t.errorRows}),this._emit("dataBulkCreated",{operation:"bulkInsert",count:t.successRows}),t}catch(t){throw l.error("Error during bulk insert",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length,error:t.message}),this._handleError("BULK_INSERT_ERROR",t),t}}async bulkCreate(e){l.info("Starting bulk create with transaction",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length}),await this._ensureInitialized();try{if(!Array.isArray(e)||e.length===0){const a="Data must be a non-empty array";throw l.error(a,{schemaName:this.schemaName,tableName:this.tableName,dataType:typeof e,dataLength:Array.isArray(e)?e.length:"N/A"}),new Error(a)}const t=[];return l.debug("Executing bulk create in transaction",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length}),await this.executeTransaction(async()=>{for(let a=0;a<e.length;a++){const s=e[a];a%100===0&&l.trace("Bulk create progress",{schemaName:this.schemaName,tableName:this.tableName,processed:a,total:e.length}),this._validateData(s);const r=this.buildDataTable(s);await this.dao.insert(r),t.push(s)}}),l.info("Bulk create completed successfully",{schemaName:this.schemaName,tableName:this.tableName,recordsCreated:t.length}),this._emit("dataBulkCreated",{operation:"bulkCreate",count:t.length}),t}catch(t){throw l.error("Error during bulk create",{schemaName:this.schemaName,tableName:this.tableName,itemsCount:e.length,error:t.message}),this._handleError("BULK_CREATE_ERROR",t),t}}async executeTransaction(e){l.debug("Starting transaction",{schemaName:this.schemaName,tableName:this.tableName}),await this._ensureInitialized();try{l.trace("Beginning database transaction",{schemaName:this.schemaName}),await this.dao.beginTransaction();const t=await e();return l.trace("Committing transaction",{schemaName:this.schemaName}),await this.dao.commitTransaction(),l.info("Transaction completed successfully",{schemaName:this.schemaName,tableName:this.tableName}),this._emit("transactionCompleted",{operation:"transaction"}),t}catch(t){l.error("Transaction failed, rolling back",{schemaName:this.schemaName,tableName:this.tableName,error:t.message});try{await this.dao.rollbackTransaction(),l.debug("Transaction rollback successful",{schemaName:this.schemaName})}catch(a){l.error("Error during transaction rollback",{schemaName:this.schemaName,rollbackError:a.message}),this._handleError("ROLLBACK_ERROR",a)}throw this._handleError("TRANSACTION_ERROR",t),t}}async importFromCSV(e,t={}){l.info("Starting CSV import",{schemaName:this.schemaName,tableName:this.tableName,csvDataLength:e.length,delimiter:t.delimiter,hasHeader:t.hasHeader,hasMappings:!!(t.columnMappings&&t.columnMappings.length>0)}),await this._ensureInitialized();try{const a=await this.dao.importFromCSV(this.tableName,e,t);return l.info("CSV import completed",{schemaName:this.schemaName,tableName:this.tableName,totalRows:a.totalRows,successRows:a.successRows,errorRows:a.errorRows}),this._emit("dataImported",{operation:"importFromCSV",result:a}),a}catch(a){throw l.error("Error during CSV import",{schemaName:this.schemaName,tableName:this.tableName,csvDataLength:e.length,error:a.message}),this._handleError("IMPORT_CSV_ERROR",a),a}}async importDataWithMapping(e,t,a={}){l.info("Starting import with column mapping",{schemaName:this.schemaName,tableName:this.tableName,dataCount:e.length,mappingsCount:t.length}),await this._ensureInitialized();try{const s=await this.dao.importDataWithMapping(this.tableName,e,t,a);return l.info("Import with mapping completed",{schemaName:this.schemaName,tableName:this.tableName,totalRows:s.totalRows,successRows:s.successRows,errorRows:s.errorRows}),this._emit("dataImported",{operation:"importWithMapping",result:s}),s}catch(s){throw l.error("Error during import with mapping",{schemaName:this.schemaName,tableName:this.tableName,dataCount:e.length,mappingsCount:t.length,error:s.message}),this._handleError("IMPORT_MAPPING_ERROR",s),s}}buildSelectTable(e={},t={}){l.trace("Building select table query",{schemaName:this.schemaName,tableName:this.tableName,conditionsCount:Object.keys(e).length,hasOptions:Object.keys(t).length>0});const a={name:this.tableName,cols:[],wheres:[],orderbys:t.orderBy||[],limitOffset:{}};return t.columns&&t.columns.length>0&&(a.cols=t.columns.map(s=>({name:s}))),e&&Object.keys(e).length>0&&(a.wheres=Object.entries(e).map(([s,r])=>({name:s,value:r,operator:"="}))),t.limit!==void 0&&(a.limitOffset.limit=t.limit),t.offset!==void 0&&(a.limitOffset.offset=t.offset),a}buildDataTable(e){const t=Object.entries(e).reduce((a,[s,r])=>(r!==void 0&&(a[s]=r),a),{});return l.trace("Building data table for query",{schemaName:this.schemaName,tableName:this.tableName,dataKeys:Object.keys(t),removedKeys:Object.keys(e).filter(a=>e[a]===void 0)}),this.dao.convertJsonToQueryTable(this.tableName,t,this.primaryKeyFields)}buildWhereFromObject(e){const t=Object.entries(e).filter(([a,s])=>s!==void 0).map(([a,s])=>({name:a,value:s}));return l.trace("Built where clauses from object",{schemaName:this.schemaName,originalKeys:Object.keys(e).length,filteredWheres:t.length}),t}on(e,t){return l.trace("Adding event listener",{schemaName:this.schemaName,tableName:this.tableName,event:e}),this.eventListeners.has(e)||this.eventListeners.set(e,[]),this.eventListeners.get(e).push(t),this}off(e,t){l.trace("Removing event listener",{schemaName:this.schemaName,tableName:this.tableName,event:e});const a=this.eventListeners.get(e);if(a){const s=a.indexOf(t);s>-1&&a.splice(s,1)}return this}_emit(e,t){l.trace("Emitting event",{schemaName:this.schemaName,tableName:this.tableName,event:e,hasData:!!t});const a=this.eventListeners.get(e);a&&a.forEach(s=>{try{s(t)}catch(r){l.error("Error in event handler",{schemaName:this.schemaName,tableName:this.tableName,event:e,error:r.message})}})}setErrorHandler(e,t){return l.debug("Setting error handler",{schemaName:this.schemaName,tableName:this.tableName,errorType:e}),this.errorHandlers.set(e,t),this}_handleError(e,t){l.error("Handling service error",{schemaName:this.schemaName,tableName:this.tableName,errorType:e,error:t.message});const a=this.errorHandlers.get(e);if(a)try{a(t)}catch(s){l.error("Error in error handler",{schemaName:this.schemaName,tableName:this.tableName,errorType:e,handlerError:s.message})}this._emit("error",{errorType:e,error:t})}_validateData(e){if(!e||typeof e!="object"){const t="Data must be a valid object";throw l.error("Data validation failed",{schemaName:this.schemaName,tableName:this.tableName,dataType:typeof e,isNull:e===null}),new Error(t)}Object.keys(e).forEach(t=>{e[t]===void 0&&(delete e[t],l.trace("Removed undefined field",{schemaName:this.schemaName,tableName:this.tableName,field:t}))})}async _ensureInitialized(){this.isInitialized||(l.debug("Service not initialized, initializing now",{schemaName:this.schemaName,tableName:this.tableName}),await this.init())}async ensureValidConnection(){var e;l.trace("Ensuring valid database connection",{schemaName:this.schemaName,tableName:this.tableName});try{!((e=this.dao)===null||e===void 0)&&e.isConnectionOpen()||(l.debug("Connection not valid, getting new connection",{schemaName:this.schemaName}),this.dao=await v.ensureDatabaseConnection(this.schemaName))}catch(t){l.warn("Error checking connection, getting new connection",{schemaName:this.schemaName,error:t.message}),this.dao=await v.ensureDatabaseConnection(this.schemaName)}}async getDatabaseInfo(){return l.trace("Getting database info",{schemaName:this.schemaName,tableName:this.tableName}),await this._ensureInitialized(),await this.dao.getDatabaseInfo()}async getTableInfo(){return l.trace("Getting table info",{schemaName:this.schemaName,tableName:this.tableName}),await this._ensureInitialized(),await this.dao.getTableInfo(this.tableName)}getStatus(){const e={schemaName:this.schemaName,tableName:this.tableName,isOpened:this.isOpened,isInitialized:this.isInitialized,hasDao:!!this.dao};return l.trace("Getting service status",e),e}async healthCheck(){l.debug("Performing health check",{schemaName:this.schemaName,tableName:this.tableName});try{await this._ensureInitialized();const e=await this.count(),t={healthy:!0,schemaName:this.schemaName,recordCount:e,timestamp:new Date().toISOString()};return l.info("Health check passed",{schemaName:this.schemaName,tableName:this.tableName,recordCount:e}),t}catch(e){const t={healthy:!1,schemaName:this.schemaName,error:e.message,timestamp:new Date().toISOString()};return l.error("Health check failed",{schemaName:this.schemaName,tableName:this.tableName,error:e.message}),t}}async close(){l.info("Closing BaseService",{schemaName:this.schemaName,tableName:this.tableName,isOpened:this.isOpened,isInitialized:this.isInitialized});try{return this.dao&&(await this.dao.close(),l.debug("DAO closed successfully",{schemaName:this.schemaName})),this.isOpened=!1,this.isInitialized=!1,this.eventListeners.clear(),this.errorHandlers.clear(),this.cache.clear(),l.info("BaseService closed successfully",{schemaName:this.schemaName,tableName:this.tableName}),this._emit("closed",{schemaName:this.schemaName}),!0}catch(e){throw l.error("Error closing BaseService",{schemaName:this.schemaName,tableName:this.tableName,error:e.message}),this._handleError("CLOSE_ERROR",e),e}}destroy(){l.debug("Destroying BaseService",{schemaName:this.schemaName,tableName:this.tableName}),v.offDatabaseReconnect(this.schemaName,this.reconnectHandler),this.eventListeners.clear(),this.errorHandlers.clear(),this.cache.clear(),l.trace("BaseService destroyed",{schemaName:this.schemaName,tableName:this.tableName})}async getAll(e={},t={}){return l.trace("Using getAll alias",{schemaName:this.schemaName,tableName:this.tableName}),this.findAll(e,t)}async getById(e){return l.trace("Using getById alias",{schemaName:this.schemaName,tableName:this.tableName,id:e}),this.findById(e)}async getFirst(e={}){return l.trace("Using getFirst alias",{schemaName:this.schemaName,tableName:this.tableName}),this.findFirst(e)}}const g=k(O.SERVICE_MANAGER);class L extends z{}class T{constructor(){this.services=new Map,this.serviceConfigs=new Map,this.serviceMetadata=new Map,this.eventHandlers=new Map,this.isShuttingDown=!1,this.cleanupInterval=null,g.info("ServiceManager instance created"),this.bindMethods(),this.startPeriodicCleanup()}static getInstance(){return T.instance?g.trace("Returning existing ServiceManager singleton instance"):(g.debug("Creating new ServiceManager singleton instance"),T.instance=new T),T.instance}static resetInstance(){g.warn("Resetting ServiceManager singleton instance"),T.instance?(T.instance.destroy(),T.instance=null,g.info("ServiceManager singleton instance reset successfully")):g.debug("No ServiceManager instance to reset")}bindMethods(){g.trace("Binding ServiceManager methods");const e=Object.getOwnPropertyNames(Object.getPrototypeOf(this));e.forEach(t=>{typeof this[t]=="function"&&t!=="constructor"&&(this[t]=this[t].bind(this))}),g.trace("ServiceManager methods bound successfully",{methodCount:e.length})}createServiceKey(e,t){const a=`${e}:${t}`;return g.trace("Created service key",{schemaName:e,tableName:t,key:a}),a}validateServiceConfig(e){var t,a;if(g.trace("Validating service config",{schemaName:e.schemaName,tableName:e.tableName}),!(!((t=e.schemaName)===null||t===void 0)&&t.trim()))throw g.error("Invalid service config: schema name missing",{config:e}),new Error("Schema name is required and cannot be empty");if(!(!((a=e.tableName)===null||a===void 0)&&a.trim()))throw g.error("Invalid service config: table name missing",{config:e}),new Error("Table name is required and cannot be empty");g.trace("Service config validation passed",{schemaName:e.schemaName,tableName:e.tableName})}registerService(e){g.debug("Registering service",{schemaName:e.schemaName,tableName:e.tableName,primaryKeyFields:e.primaryKeyFields,hasCustomServiceClass:!!e.serviceClass}),this.validateServiceConfig(e);const t=this.createServiceKey(e.schemaName,e.tableName),a={schemaName:e.schemaName.trim(),tableName:e.tableName.trim(),primaryKeyFields:e.primaryKeyFields||["id"],serviceClass:e.serviceClass||L},s=this.serviceConfigs.has(t);return this.serviceConfigs.set(t,a),s?g.info("Service configuration updated",{serviceKey:t}):g.info("Service registered successfully",{serviceKey:t}),this}registerServices(e){return g.debug("Registering multiple services",{count:e.length}),e.forEach((t,a)=>{try{this.registerService(t)}catch(s){throw g.error("Failed to register service in batch",{index:a,config:t,error:s.message}),s}}),g.info("Multiple services registered successfully",{count:e.length}),this}async createServiceInstance(e){var t;g.debug("Creating service instance",{schemaName:e.schemaName,tableName:e.tableName,serviceClassName:((t=e.serviceClass)===null||t===void 0?void 0:t.name)||"DefaultService",serviceClass:e.serviceClass,isDefaultService:e.serviceClass===L});const a=e.serviceClass||L;if(!a)throw g.error("ServiceClass is undefined",{config:e}),new Error("ServiceClass is undefined");g.debug("About to instantiate service",{ServiceClassConstructor:a,ServiceClassName:a.name});const s=new a(e.schemaName,e.tableName);return g.debug("Service instance created",{serviceConstructor:s.constructor.name,servicePrototype:Object.getPrototypeOf(s).constructor.name,hasFindByStoreId:typeof s.findByStoreId=="function"}),e.primaryKeyFields&&(g.trace("Setting primary key fields",{schemaName:e.schemaName,tableName:e.tableName,primaryKeyFields:e.primaryKeyFields}),s.setPrimaryKeyFields(e.primaryKeyFields)),g.info("Service instance created successfully",{schemaName:e.schemaName,tableName:e.tableName}),s}async getService(e,t){if(this.isShuttingDown)throw g.error("ServiceManager is shutting down, cannot get service",{schemaName:e,tableName:t}),new Error("ServiceManager is shutting down");const a=this.createServiceKey(e,t);g.debug("Getting service",{serviceKey:a});const s=this.serviceMetadata.get(a);if(s&&(s.lastAccessed=new Date().toISOString(),g.trace("Updated service access time",{serviceKey:a})),this.services.has(a))return g.trace("Returning existing service",{serviceKey:a}),this.services.get(a);let r=this.serviceConfigs.get(a);r||(g.debug("Creating default config for unregistered service",{serviceKey:a}),r={schemaName:e,tableName:t,primaryKeyFields:["id"],serviceClass:L},this.serviceConfigs.set(a,r));try{const i=await this.createServiceInstance(r);return this.services.set(a,i),this.serviceMetadata.set(a,{createdAt:new Date().toISOString(),lastAccessed:new Date().toISOString()}),this.emit("SERVICE_CREATED",{serviceKey:a,schemaName:e,tableName:t}),g.info("Service created and cached successfully",{serviceKey:a}),i}catch(i){throw g.error("Failed to create service",{serviceKey:a,error:i.message}),this.emit("SERVICE_ERROR",{serviceKey:a,schemaName:e,tableName:t,error:i}),i}}getExistingService(e,t){const a=this.createServiceKey(e,t);g.trace("Getting existing service",{serviceKey:a});const s=this.services.get(a)||null;return s?g.trace("Existing service found",{serviceKey:a}):g.trace("Existing service not found",{serviceKey:a}),s}async initializeService(e,t){const a=this.createServiceKey(e,t);g.debug("Initializing service",{serviceKey:a});try{const s=await this.getService(e,t);return await s.init(),g.info("Service initialized successfully",{serviceKey:a}),s}catch(s){throw g.error("Failed to initialize service",{serviceKey:a,error:s.message}),s}}async destroyService(e,t){const a=this.createServiceKey(e,t);g.debug("Destroying service",{serviceKey:a});const s=this.services.get(a);if(!s)return g.warn("Service not found for destruction",{serviceKey:a}),!1;try{return await s.close(),s.destroy(),this.services.delete(a),this.serviceMetadata.delete(a),this.emit("SERVICE_DESTROYED",{serviceKey:a,schemaName:e,tableName:t}),g.info("Service destroyed successfully",{serviceKey:a}),!0}catch(r){return g.error("Failed to destroy service",{serviceKey:a,error:r.message}),this.emit("SERVICE_ERROR",{serviceKey:a,schemaName:e,tableName:t,error:r}),!1}}getServicesBySchema(e){g.trace("Getting services by schema",{schemaName:e});const t=[];for(const[a,s]of this.services){const[r]=a.split(":");r===e&&t.push(s)}return g.debug("Found services for schema",{schemaName:e,count:t.length}),t}getServiceKeysBySchema(e){g.trace("Getting service keys by schema",{schemaName:e});const t=[];for(const a of this.services.keys()){const[s]=a.split(":");s===e&&t.push(a)}return g.debug("Found service keys for schema",{schemaName:e,keys:t}),t}async destroyServicesBySchema(e){const t=this.getServiceKeysBySchema(e);if(g.debug("Destroying services by schema",{schemaName:e,serviceKeys:t}),t.length===0){g.debug("No services found to destroy for schema",{schemaName:e});return}const a=t.map(async r=>{const[,i]=r.split(":");try{const n=await this.destroyService(e,i);return g.trace("Service destroy result",{serviceKey:r,result:n}),n}catch(n){return g.error("Error destroying service in schema cleanup",{serviceKey:r,error:n.message}),!1}}),s=(await Promise.all(a)).filter(Boolean).length;g.info("Schema services destruction completed",{schemaName:e,totalServices:t.length,successfulDestroys:s})}getAllServiceInfo(){g.trace("Getting all service info");const e=[];for(const[t,a]of this.serviceConfigs){const s=this.services.get(t),r=this.serviceMetadata.get(t);e.push({key:t,schemaName:a.schemaName,tableName:a.tableName,status:s?s.getStatus():{schemaName:a.schemaName,tableName:a.tableName,isOpened:!1,isInitialized:!1,hasDao:!1},isRegistered:!0,createdAt:(r==null?void 0:r.createdAt)||"N/A",lastAccessed:r==null?void 0:r.lastAccessed})}for(const[t,a]of this.services)if(!this.serviceConfigs.has(t)){const[s,r]=t.split(":"),i=this.serviceMetadata.get(t);e.push({key:t,schemaName:s,tableName:r,status:a.getStatus(),isRegistered:!1,createdAt:(i==null?void 0:i.createdAt)||"N/A",lastAccessed:i==null?void 0:i.lastAccessed})}return e}async healthCheck(){const e=Array.from(this.services.entries()).map(async([r,i])=>{try{const n=await i.healthCheck();return Object.assign(Object.assign({},n),{serviceKey:r})}catch(n){const[h,m]=r.split(":");return{healthy:!1,schemaName:h,error:n.message,timestamp:new Date().toISOString(),serviceKey:r}}}),t=await Promise.all(e),a=t.filter(r=>r.healthy).length,s={totalServices:t.length,healthyServices:a,unhealthyServices:t.length-a,services:t,timestamp:new Date().toISOString(),overallHealth:a===t.length};return this.emit("HEALTH_CHECK_COMPLETED",{serviceKey:"*",schemaName:"*",tableName:"*",data:s}),s}async executeSchemaTransaction(e,t){g.trace(`Executing schema transaction for schema ${e}`);const a=this.getServicesBySchema(e);if(a.length===0)throw g.error("No services found for schema",{schemaName:e}),new Error(`No services found for schema: ${e}`);for(const s of a)await s.init();return await a[0].executeTransaction(async()=>await t(a))}startPeriodicCleanup(){this.cleanupInterval=setInterval(()=>{this.cleanupUnusedServices()},300*1e3)}async cleanupUnusedServices(e=1800*1e3){if(this.isShuttingDown)return;const t=Date.now(),a=[];for(const[s,r]of this.serviceMetadata){if(!r.lastAccessed)continue;const i=new Date(r.lastAccessed).getTime();t-i>e&&a.push(s)}for(const s of a){const[r,i]=s.split(":");await this.destroyService(r,i)}}on(e,t){return this.eventHandlers.has(e)||this.eventHandlers.set(e,[]),this.eventHandlers.get(e).push(t),this}off(e,t){const a=this.eventHandlers.get(e);if(a){const s=a.indexOf(t);s>-1&&a.splice(s,1)}return this}emit(e,t){const a=Object.assign(Object.assign({},t),{type:e,timestamp:new Date().toISOString()}),s=this.eventHandlers.get(e);s&&s.forEach(i=>{try{i(a)}catch(n){console.error(`ServiceManager: Error in ${e} event handler:`,n)}});const r=this.eventHandlers.get("*");r&&r.forEach(i=>{try{i(a)}catch(n){console.error("ServiceManager: Error in global event handler:",n)}})}hasService(e,t){const a=this.createServiceKey(e,t);return this.services.has(a)}isRegistered(e,t){const a=this.createServiceKey(e,t);return this.serviceConfigs.has(a)}getServiceCount(){return this.services.size}getRegisteredCount(){return this.serviceConfigs.size}getSchemas(){const e=new Set;for(const t of this.services.keys()){const[a]=t.split(":");e.add(a)}return Array.from(e)}async destroy(){this.isShuttingDown=!0,this.cleanupInterval&&(clearInterval(this.cleanupInterval),this.cleanupInterval=null);const e=Array.from(this.services.entries()).map(async([t,a])=>{try{await a.close(),a.destroy()}catch(s){console.error(`Error destroying service ${t}:`,s)}});await Promise.all(e),this.services.clear(),this.serviceConfigs.clear(),this.serviceMetadata.clear(),this.eventHandlers.clear(),this.isShuttingDown=!1}}T.instance=null,T.getInstance();class F{constructor(e){this.tableName="",this.selectFields=["*"],this.joinClauses=[],this.whereConditions=[],this.groupByFields=[],this.havingConditions=[],this.orderByFields=[],this.limitValue=null,this.offsetValue=null,this.params=[],this.unionQueries=[],this.subQueries=[],this.cteQueries=new Map,this.dao=null,this.dao=e||null}static table(e,t){const a=new F(t);return a.tableName=e,a}static from(e,t){return F.table(e,t)}select(e){return this.selectFields=Array.isArray(e)?e:[e],this}selectRaw(e){return this.selectFields=[e],this}selectDistinct(e){const t=Array.isArray(e)?e.join(", "):e;return this.selectFields=[`DISTINCT ${t}`],this}join(e,t,a="INNER"){return this.joinClauses.push({type:a,table:e,condition:t}),this}innerJoin(e,t){return this.join(e,t,"INNER")}leftJoin(e,t){return this.join(e,t,"LEFT")}rightJoin(e,t){return this.join(e,t,"RIGHT")}fullOuterJoin(e,t){return this.join(e,t,"FULL OUTER")}where(e,t,a){if(typeof e=="object")return Object.entries(e).forEach(([i,n])=>{this.whereConditions.push({field:i,operator:"=",value:n})}),this;let s="=",r=t;return arguments.length===3&&(s=t,r=a),this.whereConditions.push({field:e,operator:s,value:r}),this}whereEquals(e,t){return this.where(e,"=",t)}whereNot(e,t){return this.where(e,"!=",t)}whereLike(e,t){return this.where(e,"LIKE",t)}whereNotLike(e,t){return this.where(e,"NOT LIKE",t)}whereIn(e,t){return this.whereConditions.push({field:e,operator:"IN",value:t}),this}whereNotIn(e,t){return this.whereConditions.push({field:e,operator:"NOT IN",value:t}),this}whereBetween(e,t,a){return this.whereConditions.push({field:e,operator:"BETWEEN",value:[t,a]}),this}whereNotBetween(e,t,a){return this.whereConditions.push({field:e,operator:"NOT BETWEEN",value:[t,a]}),this}whereNull(e){return this.whereConditions.push({field:e,operator:"IS NULL",value:null}),this}whereNotNull(e){return this.whereConditions.push({field:e,operator:"IS NOT NULL",value:null}),this}whereExists(e){return this.whereConditions.push({field:"",operator:"EXISTS",value:e}),this}whereNotExists(e){return this.whereConditions.push({field:"",operator:"NOT EXISTS",value:e}),this}orWhere(e,t,a){return this.where(e,t,a)}groupBy(e){return this.groupByFields=Array.isArray(e)?e:[e],this}having(e,t,a){let s="=",r=t;return arguments.length===3&&(s=t,r=a),this.havingConditions.push({field:e,operator:s,value:r}),this}havingCount(e,t,a){return this.having(`COUNT(${e})`,t,a)}orderBy(e,t="ASC"){return this.orderByFields.push(`${e} ${t}`),this}orderByDesc(e){return this.orderBy(e,"DESC")}orderByRaw(e){return this.orderByFields.push(e),this}latest(e="created_at"){return this.orderByDesc(e)}oldest(e="created_at"){return this.orderBy(e,"ASC")}limit(e){return this.limitValue=e,this}offset(e){return this.offsetValue=e,this}skip(e){return this.offset(e)}take(e){return this.limit(e)}firstRow(){return this.limit(1)}paginate(e,t){return this.limitValue=t,this.offsetValue=(e-1)*t,this}union(e){return this.unionQueries.push(e),this}unionAll(e){return this.union(e)}with(e,t){return this.cteQueries.set(e,t),this}whereSubQuery(e,t,a){return this.subQueries.push({query:a,alias:""}),this.whereConditions.push({field:e,operator:t,value:a}),this}count(e="*"){return this.selectFields=[`COUNT(${e}) as count`],this}sum(e){return this.selectFields=[`SUM(${e}) as sum`],this}avg(e){return this.selectFields=[`AVG(${e}) as avg`],this}max(e){return this.selectFields=[`MAX(${e}) as max`],this}min(e){return this.selectFields=[`MIN(${e}) as min`],this}toSQL(){let e="";const t=[];if(this.cteQueries.size>0){const a=[];this.cteQueries.forEach((s,r)=>{const{sql:i,params:n}=s.toSQL();a.push(`${r} AS (${i})`),t.push(...n)}),e+=`WITH ${a.join(", ")} `}if(e+=`SELECT ${this.selectFields.join(", ")} FROM ${this.tableName}`,this.joinClauses.length>0&&this.joinClauses.forEach(a=>{e+=` ${a.type} JOIN ${a.table} ON ${a.condition}`}),this.whereConditions.length>0){const a=[];this.whereConditions.forEach(s=>{const{clause:r,conditionParams:i}=this.buildCondition(s);a.push(r),t.push(...i)}),e+=` WHERE ${a.join(" AND ")}`}if(this.groupByFields.length>0&&(e+=` GROUP BY ${this.groupByFields.join(", ")}`),this.havingConditions.length>0){const a=[];this.havingConditions.forEach(s=>{const{clause:r,conditionParams:i}=this.buildCondition(s);a.push(r),t.push(...i)}),e+=` HAVING ${a.join(" AND ")}`}return this.orderByFields.length>0&&(e+=` ORDER BY ${this.orderByFields.join(", ")}`),this.limitValue!==null&&(e+=` LIMIT ${this.limitValue}`),this.offsetValue!==null&&(e+=` OFFSET ${this.offsetValue}`),this.unionQueries.length>0&&this.unionQueries.forEach(a=>{const{sql:s,params:r}=a.toSQL();e+=` UNION ${s}`,t.push(...r)}),{sql:e,params:t}}buildCondition(e){const{field:t,operator:a,value:s}=e,r=[];switch(a.toUpperCase()){case"IN":case"NOT IN":const i=s.map(()=>"?").join(", ");return r.push(...s),{clause:`${t} ${a} (${i})`,conditionParams:r};case"BETWEEN":case"NOT BETWEEN":return r.push(s[0],s[1]),{clause:`${t} ${a} ? AND ?`,conditionParams:r};case"IS NULL":case"IS NOT NULL":return{clause:`${t} ${a}`,conditionParams:[]};case"EXISTS":case"NOT EXISTS":const{sql:n,params:h}=s.toSQL();return r.push(...h),{clause:`${a} (${n})`,conditionParams:r};default:if(s instanceof F){const{sql:m,params:f}=s.toSQL();return r.push(...f),{clause:`${t} ${a} (${m})`,conditionParams:r}}return r.push(s),{clause:`${t} ${a} ?`,conditionParams:r}}}async get(){if(!this.dao)throw new Error("DAO instance required for query execution");const{sql:e,params:t}=this.toSQL();return(await this.dao.execute(e,t)).rows}async first(){this.limit(1);const e=await this.get();return e.length>0?e[0]:null}async pluck(e){return this.select(e),(await this.get()).map(t=>t[e])}async exists(){return this.select("1").limit(1),(await this.get()).length>0}async countResult(){this.count();const e=await this.first();return e?e.count:0}static insert(e,t){const a=Object.keys(t),s=Object.values(t),r=s.map(()=>"?").join(", ");return{sql:`INSERT INTO ${e} (${a.join(", ")}) VALUES (${r})`,params:s}}static insertMany(e,t){if(t.length===0)throw new Error("Data array cannot be empty");const a=Object.keys(t[0]),s=a.map(()=>"?").join(", "),r=t.map(()=>`(${s})`).join(", "),i=t.flatMap(n=>Object.values(n));return{sql:`INSERT INTO ${e} (${a.join(", ")}) VALUES ${r}`,params:i}}static update(e,t,a,s=[]){const r=Object.keys(t).map(n=>`${n} = ?`).join(", "),i=[...Object.values(t),...s];return{sql:`UPDATE ${e} SET ${r} WHERE ${a}`,params:i}}static delete(e,t,a=[]){return{sql:`DELETE FROM ${e} WHERE ${t}`,params:a}}static upsert(e,t,a){const s=Object.keys(t),r=Object.values(t),i=r.map(()=>"?").join(", "),n=s.filter(f=>!a.includes(f)),h=n.length>0?n.map(f=>`${f} = excluded.${f}`).join(", "):"";let m=`INSERT INTO ${e} (${s.join(", ")}) VALUES (${i})`;return n.length>0?m+=` ON CONFLICT(${a.join(", ")}) DO UPDATE SET ${h}`:m+=` ON CONFLICT(${a.join(", ")}) DO NOTHING`,{sql:m,params:r}}clone(){if(!this.dao)throw new Error("DAO instance required for cloning QueryBuilder");const e=new F(this.dao);return e.tableName=this.tableName,e.selectFields=[...this.selectFields],e.joinClauses=[...this.joinClauses],e.whereConditions=[...this.whereConditions],e.groupByFields=[...this.groupByFields],e.havingConditions=[...this.havingConditions],e.orderByFields=[...this.orderByFields],e.limitValue=this.limitValue,e.offsetValue=this.offsetValue,e.unionQueries=[...this.unionQueries],e.subQueries=[...this.subQueries],e.cteQueries=new Map(this.cteQueries),e}toRawSQL(){const{sql:e,params:t}=this.toSQL();let a=e;return t.forEach(s=>{typeof s=="string"?a=a.replace("?",`'${s.replace(/'/g,"''")}'`):s==null?a=a.replace("?","NULL"):a=a.replace("?",String(s))}),a}explain(){return this.selectFields=["EXPLAIN QUERY PLAN "+this.selectFields.join(", ")],this}}class M{sanitizeSQL(e){return e.trim()}bindParameters(e,t){if(!t||t.length===0)return e;let a=0;return e.replace(/\?/g,()=>{if(a<t.length){const s=t[a++];return typeof s=="string"?`'${s.replace(/'/g,"''")}'`:s==null?"NULL":String(s)}return"?"})}}const E=k(O.NODEJS_ADAPTER);class X extends M{constructor(){super(...arguments),this.Database=null}async isSupported(){try{return typeof process!="undefined"&&process.versions!=null&&process.versions.node!=null}catch(e){return E.debug("Node.js environment not detected",e),!1}}async loadBetterSqlite3(){if(this.Database)return this.Database;try{const e=(await import("better-sqlite3")).default;return this.Database=e,e}catch(e){throw E.error("Failed to load better-sqlite3",e),new Error("better-sqlite3 is not available in this environment")}}async connect(e){E.trace(`Connecting to database: ${e}`);try{if(!await this.isSupported())throw new Error("Node.js environment is not supported");const t=await this.loadBetterSqlite3(),a=new t(e,{});return a.pragma("journal_mode = WAL"),a.pragma("synchronous = NORMAL"),E.debug(`Connected to database: ${e}`,a),new J(a)}catch(t){throw E.error(`Failed to connect to database: ${e}`,t),new Error(`Failed to connect to database: ${t}`)}}}const q=p=>p.map(e=>{if(typeof e=="boolean")return e?1:0;if(e===void 0)return null;if(e instanceof Date)return e.toISOString();if(typeof e=="object"&&e!==null)throw new Error(`Cannot bind object/array: ${JSON.stringify(e)}`);return e});let J=class{constructor(e){this.db=e}getQueryType(e){const t=e.trim().toUpperCase();return t.startsWith("SELECT")?"SELECT":t.startsWith("PRAGMA")?"PRAGMA":"MODIFY"}async execute(e,t=[]){var a;E.debug("NodeJSConnection.execute() sql:",e),E.debug("NodeJSConnection.execute() params:",t);try{const s=q(t);E.debug("Normalized params:",s);const r=this.getQueryType(e),i=this.db.prepare(e);switch(r){case"SELECT":{const n=i.all(...s);return E.debug("SQL SELECT result:",n),{rows:n||[],rowsAffected:0}}case"PRAGMA":try{const n=i.all(...s);return E.debug("PRAGMA result:",n),{rows:n||[],rowsAffected:0,lastInsertRowId:0}}catch(n){if(!((a=n.message)===null||a===void 0)&&a.includes("does not return data")){const h=i.run(...s);return E.debug("PRAGMA execution result:",h),{rows:[],rowsAffected:0,lastInsertRowId:0}}throw n}case"MODIFY":default:{const n=i.run(...s);return E.debug("SQL execution result:",{changes:n.changes,lastInsertRowid:n.lastInsertRowid}),{rows:[],rowsAffected:n.changes||0,lastInsertRowId:n.lastInsertRowid||void 0}}}}catch(s){throw(!s.message||s.message.indexOf("_schema_info")===-1)&&E.error("SQL execution failed",{code:s.code,message:s.message}),s}}async close(){try{this.db.close(),E.debug("Database connection closed")}catch(e){throw E.error("Close Error:",e),e}}async transaction(e){const t=this.db.transaction(async()=>{await e(new Y(this.db))});try{t(),E.debug("Transaction completed successfully")}catch(a){throw E.error("Transaction failed",a),a}}};class Y{constructor(e){this.db=e}getQueryType(e){const t=e.trim().toUpperCase();return t.startsWith("SELECT")?"SELECT":t.startsWith("PRAGMA")?"PRAGMA":"MODIFY"}async executeSql(e,t=[]){var a;try{const s=q(t);E.debug("Normalized params:",s);const r=this.getQueryType(e),i=this.db.prepare(e);switch(r){case"SELECT":return{rows:i.all(...s)||[],rowsAffected:0};case"PRAGMA":try{return{rows:i.all(...s)||[],rowsAffected:0,lastInsertRowId:0}}catch(n){if(!((a=n.message)===null||a===void 0)&&a.includes("does not return data")){const h=i.run(...s);return{rows:[],rowsAffected:0,lastInsertRowId:0}}throw n}case"MODIFY":default:{const n=i.run(...s);return{rows:[],rowsAffected:n.changes||0,lastInsertRowId:n.lastInsertRowid||void 0}}}}catch(s){throw E.error("Transaction SQL execution failed",s),s}}}const A=k(O.REACTNATIVE_ADAPTER);class Z extends M{constructor(){super(...arguments),this.SQLite=null,this.Platform=null}async isSupported(){try{const{Platform:e}=await import("react-native");return this.Platform=e,e.OS==="ios"||e.OS==="android"}catch(e){return A.debug("React Native environment not detected",e),!1}}async loadSQLite(){if(this.SQLite)return this.SQLite;try{const e=(await import("react-native-sqlite-storage")).default;return e.DEBUG(!1),e.enablePromise(!0),this.SQLite=e,e}catch(e){throw A.error("Failed to load react-native-sqlite-storage",e),new Error("react-native-sqlite-storage is not available in this environment")}}async connect(e){try{if(!await this.isSupported())throw new Error("React Native environment is not supported");const t=await(await this.loadSQLite()).openDatabase({name:e,location:"default"});return A.debug(`Connected to database: ${e}`,t),new ee(t)}catch(t){throw A.error(`Failed to connect to database: ${e}`,t),new Error(`Failed to connect to database: ${t}`)}}}class ee{constructor(e){this.db=e}async execute(e,t=[]){var a;try{if(e.trim().toUpperCase().startsWith("SELECT")){const s=await this.db.executeSql(e,t),r=[];if(s&&s.length>0){const i=s[0];A.debug("SQL SELECT result:",i);for(let n=0;n<i.rows.length;n++)r.push(i.rows.item(n));A.debug("SQL SELECT rows:",r)}return{rows:r,rowsAffected:0}}else{const s=await this.db.executeSql(e,t);if(s&&s.length>0){const r=s[0];return A.debug("SQL execution result:",r),{rows:((a=r.rows)===null||a===void 0?void 0:a.raw())||[],rowsAffected:r.rowsAffected||0,lastInsertRowId:r.insertId||void 0}}return{rows:[],rowsAffected:0}}}catch(s){throw A.error("SQL execution failed",s),s}}async close(){try{await this.db.close()}catch(e){throw A.error("Close Error:",e),e}}async transaction(e){return new Promise((t,a)=>{this.db.transaction(async s=>{try{await e(new te(s)),t()}catch(r){a(r)}},s=>{a(s)})})}}class te{constructor(e){this.tx=e}async executeSql(e,t=[]){return new Promise((a,s)=>{this.tx.executeSql(e,t,(r,i)=>{if(e.trim().toUpperCase().startsWith("SELECT")){const n=[];for(let h=0;h<i.rows.length;h++)n.push(i.rows.item(h));a({rows:n,rowsAffected:0})}else a({rows:[],rowsAffected:i.rowsAffected||0,lastInsertRowId:i.insertId||void 0})},(r,i)=>{s(i)})})}}const ae={sqlite:{string:"TEXT",varchar:"TEXT",char:"TEXT",text:"TEXT",email:"TEXT",url:"TEXT",uuid:"TEXT",integer:"INTEGER",int:"INTEGER",bigint:"INTEGER",smallint:"INTEGER",tinyint:"INTEGER",number:"REAL",decimal:"REAL",numeric:"REAL",float:"REAL",double:"REAL",boolean:"INTEGER",bool:"INTEGER",timestamp:"TEXT",datetime:"TEXT",date:"TEXT",time:"TEXT",json:"TEXT",jsonb:"TEXT",array:"TEXT",object:"TEXT",blob:"BLOB",binary:"BLOB",objectid:"TEXT"}};export{M as BaseAdapter,z as BaseService,I as DatabaseFactory,v as DatabaseManager,L as DefaultService,X as NodeJSAdapter,F as QueryBuilder,Z as ReactNativeAdapter,ae as SQLITE_TYPE_MAPPING,O as SQLiteModules,T as ServiceManager,P as UniversalDAO,B as configureSQLiteLogger};
14
14
  //# sourceMappingURL=index.mjs.map