@budibase/backend-core 2.15.2 → 2.15.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js.map CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../types/src/sdk/automations/index.ts", "../../types/src/sdk/hosting.ts", "../../types/src/sdk/context.ts", "../../types/src/sdk/events/app.ts", "../../types/src/sdk/events/auth.ts", "../../types/src/sdk/events/automation.ts", "../../types/src/sdk/events/email.ts", "../../types/src/sdk/events/datasource.ts", "../../types/src/sdk/events/event.ts", "../../types/src/sdk/events/layout.ts", "../../types/src/sdk/events/license.ts", "../../types/src/sdk/events/version.ts", "../../types/src/sdk/events/query.ts", "../../types/src/sdk/events/role.ts", "../../types/src/sdk/events/rows.ts", "../../types/src/sdk/events/screen.ts", "../../types/src/sdk/events/serve.ts", "../../types/src/sdk/events/table.ts", "../../types/src/sdk/events/user.ts", "../../types/src/sdk/events/view.ts", "../../types/src/sdk/events/account.ts", "../../types/src/sdk/events/backfill.ts", "../../types/src/sdk/events/identification.ts", "../../types/src/sdk/events/userGroup.ts", "../../types/src/sdk/events/plugin.ts", "../../types/src/sdk/events/backup.ts", "../../types/src/sdk/events/environmentVariable.ts", "../../types/src/sdk/events/auditLog.ts", "../../types/src/sdk/events/index.ts", "../../types/src/sdk/licensing/license.ts", "../../types/src/sdk/licensing/plan.ts", "../../types/src/sdk/licensing/quota.ts", "../../types/src/sdk/licensing/feature.ts", "../../types/src/sdk/licensing/billing.ts", "../../types/src/sdk/licensing/index.ts", "../../types/src/sdk/migrations.ts", "../../types/src/sdk/datasources.ts", "../../types/src/sdk/search.ts", "../../types/src/sdk/koa.ts", "../../types/src/sdk/auth.ts", "../../types/src/sdk/locks.ts", "../../types/src/sdk/db.ts", "../../types/src/sdk/middleware/matchers.ts", "../../types/src/sdk/middleware/tenancy.ts", "../../types/src/sdk/middleware/index.ts", "../../types/src/sdk/featureFlag.ts", "../../types/src/sdk/environmentVariables.ts", "../../types/src/sdk/auditLogs.ts", "../../types/src/sdk/sso.ts", "../../types/src/sdk/user.ts", "../../types/src/sdk/cli/constants.ts", "../../types/src/sdk/cli/index.ts", "../../types/src/sdk/websocket.ts", "../../types/src/sdk/permissions.ts", "../../types/src/sdk/row.ts", "../../types/src/sdk/index.ts", "../../types/src/documents/account/account.ts", "../../types/src/documents/account/user.ts", "../../types/src/documents/account/flag.ts", "../../types/src/documents/account/index.ts", "../../types/src/documents/app/app.ts", "../../types/src/documents/app/automation.ts", "../../types/src/documents/app/datasource.ts", "../../types/src/documents/app/layout.ts", "../../types/src/documents/app/query.ts", "../../types/src/documents/app/role.ts", "../../types/src/documents/app/table/table.ts", "../../types/src/documents/app/row.ts", "../../types/src/documents/app/table/constants.ts", "../../types/src/documents/app/table/schema.ts", "../../types/src/documents/app/table/index.ts", "../../types/src/documents/app/screen.ts", "../../types/src/documents/app/view.ts", "../../types/src/documents/document.ts", "../../types/src/documents/app/user.ts", "../../types/src/documents/app/backup.ts", "../../types/src/documents/app/webhook.ts", "../../types/src/documents/app/links.ts", "../../types/src/documents/app/component.ts", "../../types/src/documents/app/index.ts", "../../types/src/documents/global/config.ts", "../../types/src/documents/global/user.ts", "../../types/src/documents/global/userGroup.ts", "../../types/src/documents/global/plugin.ts", "../../types/src/documents/global/quotas.ts", "../../types/src/documents/global/schedule.ts", "../../types/src/documents/global/templates.ts", "../../types/src/documents/global/environmentVariables.ts", "../../types/src/documents/global/auditLogs.ts", "../../types/src/documents/global/index.ts", "../../types/src/documents/platform/info.ts", "../../types/src/documents/platform/users.ts", "../../types/src/documents/platform/accounts.ts", "../../types/src/documents/platform/tenants.ts", "../../types/src/documents/platform/index.ts", "../../types/src/documents/pouch.ts", "../../types/src/documents/index.ts", "../../types/src/api/account/accounts.ts", "../../types/src/api/account/user.ts", "../../types/src/api/account/license.ts", "../../types/src/api/account/status.ts", "../../types/src/api/account/index.ts", "../../types/src/api/web/analytics.ts", "../../types/src/api/web/auth.ts", "../../types/src/api/web/user.ts", "../../types/src/api/web/errors.ts", "../../types/src/api/web/debug.ts", "../../types/src/api/web/schedule.ts", "../../types/src/api/web/system/environment.ts", "../../types/src/api/web/system/index.ts", "../../types/src/api/web/app/backup.ts", "../../types/src/api/web/app/datasource.ts", "../../types/src/api/web/app/row.ts", "../../types/src/api/web/app/view.ts", "../../types/src/api/web/app/rows.ts", "../../types/src/api/web/app/table.ts", "../../types/src/api/web/app/permission.ts", "../../types/src/api/web/app/attachment.ts", "../../types/src/api/web/app/user.ts", "../../types/src/api/web/app/index.ts", "../../types/src/api/web/global/environmentVariables.ts", "../../types/src/api/web/global/auditLogs.ts", "../../types/src/api/web/global/events.ts", "../../types/src/api/web/global/configs.ts", "../../types/src/api/web/global/scim/users.ts", "../../types/src/api/web/global/scim/groups.ts", "../../types/src/api/web/global/scim/shared.ts", "../../types/src/api/web/global/scim/index.ts", "../../types/src/api/web/global/license.ts", "../../types/src/api/web/global/index.ts", "../../types/src/api/web/pagination.ts", "../../types/src/api/web/searchFilter.ts", "../../types/src/api/web/cookies.ts", "../../types/src/api/web/index.ts", "../../types/src/api/index.ts", "../../types/src/core/installation.ts", "../../types/src/core/index.ts", "../../types/src/shared/typeUtils.ts", "../../types/src/shared/index.ts", "../../types/src/index.ts", "../src/constants/db.ts", "../../shared-core/src/constants/api.ts", "../../shared-core/src/constants/index.ts", "../../../node_modules/dayjs/dayjs.min.js", "../../shared-core/src/helpers/helpers.ts", "../../shared-core/src/helpers/integrations.ts", "../../shared-core/src/helpers/index.ts", "../../shared-core/src/filters.ts", "../../shared-core/src/utils.ts", "../../shared-core/src/sdk/documents/applications.ts", "../../shared-core/src/sdk/documents/users.ts", "../../shared-core/src/sdk/documents/index.ts", "../../shared-core/src/sdk/index.ts", "../../shared-core/src/table.ts", "../../shared-core/src/index.ts", "../src/constants/misc.ts", "../src/constants/index.ts", "../src/context/identity.ts", "../src/environment.ts", "../src/context/Context.ts", "../src/docIds/conversions.ts", "../src/db/couch/connections.ts", "../src/helpers.ts", "../src/db/couch/utils.ts", "../src/db/couch/pouchDB.ts", "../src/docIds/newid.ts", "../src/db/instrumentation.ts", "../src/db/couch/DatabaseImpl.ts", "../src/db/constants.ts", "../src/db/couch/index.ts", "../src/db/db.ts", "../src/context/mainContext.ts", "../src/context/index.ts", "../src/redis/utils.ts", "../src/logging/correlation/correlation.ts", "../src/logging/correlation/index.ts", "../src/objectStore/utils.ts", "../src/cache/appMetadata.ts", "../src/docIds/ids.ts", "../src/docIds/params.ts", "../src/docIds/index.ts", "../src/db/utils.ts", "../src/db/views.ts", "../src/db/Replication.ts", "../src/db/lucene.ts", "../src/db/searchIndexes/searchIndexes.ts", "../src/db/searchIndexes/index.ts", "../src/db/errors.ts", "../src/db/index.ts", "../src/objectStore/objectStore.ts", "../src/objectStore/cloudfront.ts", "../src/objectStore/buckets/app.ts", "../src/objectStore/buckets/global.ts", "../src/objectStore/buckets/plugins.ts", "../src/objectStore/buckets/index.ts", "../src/objectStore/index.ts", "../src/logging/system.ts", "../src/logging/pino/logger.ts", "../src/logging/alerts.ts", "../src/logging/index.ts", "../src/timers/timers.ts", "../src/timers/index.ts", "../src/redis/redis.ts", "../src/redis/init.ts", "../src/index.ts", "../src/configs/index.ts", "../src/configs/configs.ts", "../src/cache/index.ts", "../src/cache/generic.ts", "../src/cache/base/index.ts", "../src/cache/user.ts", "../src/tenancy/index.ts", "../src/tenancy/db.ts", "../src/tenancy/tenancy.ts", "../src/platform/index.ts", "../src/platform/users.ts", "../src/platform/platformDb.ts", "../src/platform/tenants.ts", "../src/redis/redlockImpl.ts", "../src/utils/index.ts", "../src/utils/hashing.ts", "../src/utils/utils.ts", "../src/utils/stringUtils.ts", "../src/utils/Duration.ts", "../src/accounts/index.ts", "../src/accounts/api.ts", "../src/accounts/accounts.ts", "../src/users/index.ts", "../src/users/users.ts", "../src/users/utils.ts", "../src/users/lookup.ts", "../src/cache/invite.ts", "../src/errors/index.ts", "../src/errors/errors.ts", "../src/users/db.ts", "../src/users/events.ts", "../src/events/index.ts", "../src/events/processors/index.ts", "../src/events/processors/AnalyticsProcessor.ts", "../src/events/analytics.ts", "../src/events/processors/posthog/PosthogProcessor.ts", "../src/events/processors/posthog/rateLimiting.ts", "../src/events/processors/posthog/index.ts", "../src/events/processors/LoggingProcessor.ts", "../src/events/processors/AuditLogsProcessor.ts", "../src/queue/index.ts", "../src/queue/queue.ts", "../src/queue/inMemoryQueue.ts", "../src/queue/constants.ts", "../src/queue/listeners.ts", "../src/events/processors/Processors.ts", "../src/events/identification.ts", "../src/installation.ts", "../src/events/backfill.ts", "../src/events/asyncEvents/queue.ts", "../src/events/asyncEvents/publisher.ts", "../src/events/events.ts", "../src/events/publishers/account.ts", "../src/events/publishers/app.ts", "../src/events/publishers/auth.ts", "../src/events/publishers/automation.ts", "../src/events/publishers/datasource.ts", "../src/events/publishers/email.ts", "../src/events/publishers/license.ts", "../src/events/publishers/layout.ts", "../src/events/publishers/org.ts", "../src/events/publishers/query.ts", "../src/events/publishers/role.ts", "../src/events/publishers/screen.ts", "../src/events/publishers/rows.ts", "../src/events/publishers/table.ts", "../src/events/publishers/serve.ts", "../src/events/publishers/user.ts", "../src/events/publishers/view.ts", "../src/events/publishers/installation.ts", "../src/events/publishers/backfill.ts", "../src/events/publishers/group.ts", "../src/events/publishers/plugin.ts", "../src/events/publishers/backup.ts", "../src/events/publishers/environmentVariable.ts", "../src/events/publishers/auditLog.ts", "../src/security/sessions.ts", "../src/security/index.ts", "../src/security/auth.ts", "../src/cache/writethrough.ts", "../src/cache/passwordReset.ts", "../src/migrations/index.ts", "../src/migrations/migrations.ts", "../src/migrations/definitions.ts", "../src/security/roles.ts", "../src/security/permissions.ts", "../src/features/index.ts", "../src/features/installation.ts", "../src/auth/index.ts", "../src/auth/auth.ts", "../src/middleware/index.ts", "../src/middleware/passport/local.ts", "../src/middleware/passport/utils.ts", "../src/middleware/passport/sso/google.ts", "../src/middleware/passport/sso/sso.ts", "../src/middleware/passport/sso/oidc.ts", "../src/middleware/passport/datasource/google.ts", "../src/middleware/authenticated.ts", "../src/middleware/matchers.ts", "../src/security/encryption.ts", "../src/middleware/auditLog.ts", "../src/middleware/tenancy.ts", "../src/middleware/internalApi.ts", "../src/middleware/csrf.ts", "../src/middleware/adminOnly.ts", "../src/middleware/builderOrAdmin.ts", "../src/middleware/builderOnly.ts", "../src/logging/pino/middleware.ts", "../src/logging/correlation/middleware.ts", "../src/middleware/errorHandling.ts", "../src/middleware/querystringToBody.ts", "../src/middleware/joi-validator.ts", "../src/plugin/index.ts", "../src/plugin/utils.ts", "../src/redis/index.ts", "../src/blacklist/index.ts", "../src/blacklist/blacklist.ts", "../src/docUpdates/index.ts", "../src/events/processors/async/DocumentUpdateProcessor.ts", "../src/events/documentId.ts"],
4
- "sourcesContent": ["import { Automation, AutomationMetadata } from \"../../documents\"\nimport { Job } from \"bull\"\n\nexport interface AutomationDataEvent {\n appId?: string\n metadata?: AutomationMetadata\n automation?: Automation\n timeout?: number\n}\n\nexport interface AutomationData {\n event: AutomationDataEvent\n automation: Automation\n}\n\nexport type AutomationJob = Job<AutomationData>\n", "export enum Hosting {\n CLOUD = \"cloud\",\n SELF = \"self\",\n}\n", "import { User, Account } from \"../documents\"\nimport { IdentityType, HostInfo } from \"./events\"\n\nexport interface BaseContext {\n _id: string\n type: IdentityType\n tenantId?: string\n}\n\nexport interface AccountUserContext extends BaseContext {\n tenantId: string\n account: Account\n}\n\nexport interface UserContext extends BaseContext, User {\n _id: string\n tenantId: string\n account?: Account\n hostInfo: HostInfo\n}\n\nexport type IdentityContext = BaseContext | AccountUserContext | UserContext\n", "import { BaseEvent } from \"./event\"\n\nexport interface AppCreatedEvent extends BaseEvent {\n appId: string\n version: string\n audited: {\n name: string\n }\n}\n\nexport interface AppUpdatedEvent extends BaseEvent {\n appId: string\n version: string\n audited: {\n name: string\n }\n}\n\nexport interface AppDeletedEvent extends BaseEvent {\n appId: string\n audited: {\n name: string\n }\n}\n\nexport interface AppPublishedEvent extends BaseEvent {\n appId: string\n audited: {\n name: string\n }\n}\n\nexport interface AppUnpublishedEvent extends BaseEvent {\n appId: string\n audited: {\n name: string\n }\n}\n\nexport interface AppFileImportedEvent extends BaseEvent {\n appId: string\n audited: {\n name: string\n }\n}\n\nexport interface AppTemplateImportedEvent extends BaseEvent {\n appId: string\n templateKey: string\n audited: {\n name: string\n }\n}\n\nexport interface AppVersionUpdatedEvent extends BaseEvent {\n appId: string\n currentVersion: string\n updatedToVersion: string\n audited: {\n name: string\n }\n}\n\nexport interface AppVersionRevertedEvent extends BaseEvent {\n appId: string\n currentVersion: string\n revertedToVersion: string\n audited: {\n name: string\n }\n}\n\nexport interface AppRevertedEvent extends BaseEvent {\n appId: string\n audited: {\n name: string\n }\n}\n\nexport interface AppExportedEvent extends BaseEvent {\n appId: string\n audited: {\n name: string\n }\n}\n", "import { BaseEvent } from \"./event\"\nimport { ConfigType } from \"../../documents\"\n\nexport type LoginSource = \"local\" | \"google\" | \"oidc\" | \"google-internal\"\nexport type SSOType = ConfigType.OIDC | ConfigType.GOOGLE\n\nexport interface LoginEvent extends BaseEvent {\n userId: string\n source: LoginSource\n audited: {\n email: string\n }\n}\n\nexport interface LogoutEvent extends BaseEvent {\n userId: string\n audited: {\n email?: string\n }\n}\n\nexport interface SSOCreatedEvent extends BaseEvent {\n type: SSOType\n}\n\nexport interface SSOUpdatedEvent extends BaseEvent {\n type: SSOType\n}\n\nexport interface SSOActivatedEvent extends BaseEvent {\n type: SSOType\n}\n\nexport interface SSODeactivatedEvent extends BaseEvent {\n type: SSOType\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface AutomationCreatedEvent extends BaseEvent {\n appId: string\n automationId: string\n triggerId: string\n triggerType: string\n audited: {\n name: string\n }\n}\n\nexport interface AutomationTriggerUpdatedEvent extends BaseEvent {\n appId: string\n automationId: string\n triggerId: string\n triggerType: string\n}\n\nexport interface AutomationDeletedEvent extends BaseEvent {\n appId: string\n automationId: string\n triggerId: string\n triggerType: string\n audited: {\n name: string\n }\n}\n\nexport interface AutomationTestedEvent extends BaseEvent {\n appId: string\n automationId: string\n triggerId: string\n triggerType: string\n}\n\nexport interface AutomationStepCreatedEvent extends BaseEvent {\n appId: string\n automationId: string\n triggerId: string\n triggerType: string\n stepId: string\n stepType: string\n audited: {\n name: string\n }\n}\n\nexport interface AutomationStepDeletedEvent extends BaseEvent {\n appId: string\n automationId: string\n triggerId: string\n triggerType: string\n stepId: string\n stepType: string\n audited: {\n name: string\n }\n}\n\nexport interface AutomationsRunEvent extends BaseEvent {\n count: number\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface SMTPCreatedEvent extends BaseEvent {}\n\nexport interface SMTPUpdatedEvent extends BaseEvent {}\n", "import { BaseEvent } from \"./event\"\n\nexport interface DatasourceCreatedEvent extends BaseEvent {\n datasourceId: string\n source: string\n custom: boolean\n}\n\nexport interface DatasourceUpdatedEvent extends BaseEvent {\n datasourceId: string\n source: string\n custom: boolean\n}\n\nexport interface DatasourceDeletedEvent extends BaseEvent {\n datasourceId: string\n source: string\n custom: boolean\n}\n", "import { Hosting } from \"../hosting\"\nimport { Group, Identity } from \"./identification\"\n\nexport enum Event {\n // USER\n USER_CREATED = \"user:created\",\n USER_UPDATED = \"user:updated\",\n USER_DELETED = \"user:deleted\",\n\n // USER / ONBOARDING\n USER_ONBOARDING_COMPLETE = \"user:onboarding:complete\",\n\n // USER / PERMISSIONS\n USER_PERMISSION_ADMIN_ASSIGNED = \"user:admin:assigned\",\n USER_PERMISSION_ADMIN_REMOVED = \"user:admin:removed\",\n USER_PERMISSION_BUILDER_ASSIGNED = \"user:builder:assigned\",\n USER_PERMISSION_BUILDER_REMOVED = \"user:builder:removed\",\n\n // USER / INVITE\n USER_INVITED = \"user:invited\",\n USER_INVITED_ACCEPTED = \"user:invite:accepted\",\n\n // USER / PASSWORD\n USER_PASSWORD_FORCE_RESET = \"user:password:force:reset\",\n USER_PASSWORD_UPDATED = \"user:password:updated\",\n USER_PASSWORD_RESET_REQUESTED = \"user:password:reset:requested\",\n USER_PASSWORD_RESET = \"user:password:reset\",\n\n // USER / COLLABORATION\n USER_DATA_COLLABORATION = \"user:data:collaboration\",\n\n // EMAIL\n EMAIL_SMTP_CREATED = \"email:smtp:created\",\n EMAIL_SMTP_UPDATED = \"email:smtp:updated\",\n\n // AUTH\n AUTH_SSO_CREATED = \"auth:sso:created\",\n AUTH_SSO_UPDATED = \"auth:sso:updated\",\n AUTH_SSO_ACTIVATED = \"auth:sso:activated\",\n AUTH_SSO_DEACTIVATED = \"auth:sso:deactivated\",\n AUTH_LOGIN = \"auth:login\",\n AUTH_LOGOUT = \"auth:logout\",\n\n // ORG\n ORG_NAME_UPDATED = \"org:info:name:updated\",\n ORG_LOGO_UPDATED = \"org:info:logo:updated\",\n ORG_PLATFORM_URL_UPDATED = \"org:platformurl:updated\",\n\n // INSTALLATION\n INSTALLATION_VERSION_CHECKED = \"installation:version:checked\",\n INSTALLATION_VERSION_UPGRADED = \"installation:version:upgraded\",\n INSTALLATION_VERSION_DOWNGRADED = \"installation:version:downgraded\",\n INSTALLATION_FIRST_STARTUP = \"installation:firstStartup\",\n\n // ORG / ANALYTICS\n ANALYTICS_OPT_OUT = \"analytics:opt:out\",\n ANALYTICS_OPT_IN = \"analytics:opt:in\",\n\n // APP\n APP_CREATED = \"app:created\",\n APP_UPDATED = \"app:updated\",\n APP_DELETED = \"app:deleted\",\n APP_PUBLISHED = \"app:published\",\n APP_UNPUBLISHED = \"app:unpublished\",\n APP_TEMPLATE_IMPORTED = \"app:template:imported\",\n APP_FILE_IMPORTED = \"app:file:imported\",\n APP_VERSION_UPDATED = \"app:version:updated\",\n APP_VERSION_REVERTED = \"app:version:reverted\",\n APP_REVERTED = \"app:reverted\",\n APP_EXPORTED = \"app:exported\",\n\n // ROLE\n ROLE_CREATED = \"role:created\",\n ROLE_UPDATED = \"role:updated\",\n ROLE_DELETED = \"role:deleted\",\n ROLE_ASSIGNED = \"role:assigned\",\n ROLE_UNASSIGNED = \"role:unassigned\",\n\n // SERVE\n SERVED_BUILDER = \"served:builder\",\n SERVED_APP = \"served:app\",\n SERVED_APP_PREVIEW = \"served:app:preview\",\n\n // DATASOURCE\n DATASOURCE_CREATED = \"datasource:created\",\n DATASOURCE_UPDATED = \"datasource:updated\",\n DATASOURCE_DELETED = \"datasource:deleted\",\n\n // QUERY\n QUERY_CREATED = \"query:created\",\n QUERY_UPDATED = \"query:updated\",\n QUERY_DELETED = \"query:deleted\",\n QUERY_IMPORT = \"query:import\",\n QUERIES_RUN = \"queries:run\",\n QUERY_PREVIEWED = \"query:previewed\",\n\n // TABLE\n TABLE_CREATED = \"table:created\",\n TABLE_UPDATED = \"table:updated\",\n TABLE_DELETED = \"table:deleted\",\n TABLE_EXPORTED = \"table:exported\",\n TABLE_IMPORTED = \"table:imported\",\n TABLE_DATA_IMPORTED = \"table:data:imported\",\n\n // VIEW\n VIEW_CREATED = \"view:created\",\n VIEW_UPDATED = \"view:updated\",\n VIEW_DELETED = \"view:deleted\",\n VIEW_EXPORTED = \"view:exported\",\n VIEW_FILTER_CREATED = \"view:filter:created\",\n VIEW_FILTER_UPDATED = \"view:filter:updated\",\n VIEW_FILTER_DELETED = \"view:filter:deleted\",\n VIEW_CALCULATION_CREATED = \"view:calculation:created\",\n VIEW_CALCULATION_UPDATED = \"view:calculation:updated\",\n VIEW_CALCULATION_DELETED = \"view:calculation:deleted\",\n\n // ROWS\n ROWS_CREATED = \"rows:created\",\n ROWS_IMPORTED = \"rows:imported\",\n\n // COMPONENT\n COMPONENT_CREATED = \"component:created\",\n COMPONENT_DELETED = \"component:deleted\",\n\n // SCREEN\n SCREEN_CREATED = \"screen:created\",\n SCREEN_DELETED = \"screen:deleted\",\n\n // LAYOUT\n LAYOUT_CREATED = \"layout:created\",\n LAYOUT_DELETED = \"layout:deleted\",\n\n // AUTOMATION\n AUTOMATION_CREATED = \"automation:created\",\n AUTOMATION_DELETED = \"automation:deleted\",\n AUTOMATION_TESTED = \"automation:tested\",\n AUTOMATIONS_RUN = \"automations:run\",\n AUTOMATION_STEP_CREATED = \"automation:step:created\",\n AUTOMATION_STEP_DELETED = \"automation:step:deleted\",\n AUTOMATION_TRIGGER_UPDATED = \"automation:trigger:updated\",\n\n // LICENSE\n LICENSE_PLAN_CHANGED = \"license:plan:changed\",\n LICENSE_ACTIVATED = \"license:activated\",\n LICENSE_PAYMENT_FAILED = \"license:payment:failed\",\n LICENSE_PAYMENT_RECOVERED = \"license:payment:recovered\",\n LICENSE_CHECKOUT_OPENED = \"license:checkout:opened\",\n LICENSE_CHECKOUT_SUCCESS = \"license:checkout:success\",\n LICENSE_PORTAL_OPENED = \"license:portal:opened\",\n\n // ACCOUNT\n ACCOUNT_CREATED = \"account:created\",\n ACCOUNT_DELETED = \"account:deleted\",\n ACCOUNT_VERIFIED = \"account:verified\",\n\n // BACKFILL\n APP_BACKFILL_SUCCEEDED = \"app:backfill:succeeded\",\n APP_BACKFILL_FAILED = \"app:backfill:failed\",\n TENANT_BACKFILL_SUCCEEDED = \"tenant:backfill:succeeded\",\n TENANT_BACKFILL_FAILED = \"tenant:backfill:failed\",\n INSTALLATION_BACKFILL_SUCCEEDED = \"installation:backfill:succeeded\",\n INSTALLATION_BACKFILL_FAILED = \"installation:backfill:failed\",\n\n // USER\n USER_GROUP_CREATED = \"user_group:created\",\n USER_GROUP_UPDATED = \"user_group:updated\",\n USER_GROUP_DELETED = \"user_group:deleted\",\n USER_GROUP_USERS_ADDED = \"user_group:user_added\",\n USER_GROUP_USERS_REMOVED = \"user_group:users_deleted\",\n USER_GROUP_PERMISSIONS_EDITED = \"user_group:permissions_edited\",\n USER_GROUP_ONBOARDING = \"user_group:onboarding_added\",\n\n // PLUGIN\n PLUGIN_INIT = \"plugin:init\",\n PLUGIN_IMPORTED = \"plugin:imported\",\n PLUGIN_DELETED = \"plugin:deleted\",\n\n // BACKUP\n APP_BACKUP_RESTORED = \"app:backup:restored\",\n APP_BACKUP_TRIGGERED = \"app:backup:triggered\",\n\n // ENVIRONMENT VARIABLE\n ENVIRONMENT_VARIABLE_CREATED = \"environment_variable:created\",\n ENVIRONMENT_VARIABLE_DELETED = \"environment_variable:deleted\",\n ENVIRONMENT_VARIABLE_UPGRADE_PANEL_OPENED = \"environment_variable:upgrade_panel_opened\",\n\n // AUDIT LOG\n AUDIT_LOGS_FILTERED = \"audit_log:filtered\",\n AUDIT_LOGS_DOWNLOADED = \"audit_log:downloaded\",\n}\n\nexport const UserGroupSyncEvents: Event[] = [\n Event.USER_CREATED,\n Event.USER_UPDATED,\n Event.USER_DELETED,\n Event.USER_PERMISSION_ADMIN_ASSIGNED,\n Event.USER_PERMISSION_ADMIN_REMOVED,\n Event.USER_PERMISSION_BUILDER_ASSIGNED,\n Event.USER_PERMISSION_BUILDER_REMOVED,\n Event.USER_GROUP_CREATED,\n Event.USER_GROUP_UPDATED,\n Event.USER_GROUP_DELETED,\n Event.USER_GROUP_USERS_ADDED,\n Event.USER_GROUP_USERS_REMOVED,\n Event.USER_GROUP_PERMISSIONS_EDITED,\n]\n\nexport const AsyncEvents: Event[] = [...UserGroupSyncEvents]\n\n// all events that are not audited have been added to this record as undefined, this means\n// that Typescript can protect us against new events being added and auditing of those\n// events not being considered. This might be a little ugly, but provides a level of\n// Typescript build protection for the audit log feature, any new event also needs to be\n// added to this map, during which the developer will need to consider if it should be\n// a user facing event or not.\nexport const AuditedEventFriendlyName: Record<Event, string | undefined> = {\n // USER\n [Event.USER_CREATED]: `User \"{{ email }}\" created{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_UPDATED]: `User \"{{ email }}\" updated{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_DELETED]: `User \"{{ email }}\" deleted{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_PERMISSION_ADMIN_ASSIGNED]: `User \"{{ email }}\" admin role assigned`,\n [Event.USER_PERMISSION_ADMIN_REMOVED]: `User \"{{ email }}\" admin role removed`,\n [Event.USER_PERMISSION_BUILDER_ASSIGNED]: `User \"{{ email }}\" builder role assigned`,\n [Event.USER_PERMISSION_BUILDER_REMOVED]: `User \"{{ email }}\" builder role removed`,\n [Event.USER_INVITED]: `User \"{{ email }}\" invited`,\n [Event.USER_INVITED_ACCEPTED]: `User \"{{ email }}\" accepted invite`,\n [Event.USER_PASSWORD_UPDATED]: `User \"{{ email }}\" password updated`,\n [Event.USER_PASSWORD_RESET_REQUESTED]: `User \"{{ email }}\" password reset requested`,\n [Event.USER_PASSWORD_RESET]: `User \"{{ email }}\" password reset`,\n [Event.USER_GROUP_CREATED]: `User group \"{{ name }}\" created{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_GROUP_UPDATED]: `User group \"{{ name }}\" updated{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_GROUP_DELETED]: `User group \"{{ name }}\" deleted{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_GROUP_USERS_ADDED]: `User group \"{{ name }}\" {{ count }} users added{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_GROUP_USERS_REMOVED]: `User group \"{{ name }}\" {{ count }} users removed{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_GROUP_PERMISSIONS_EDITED]: `User group \"{{ name }}\" permissions edited`,\n [Event.USER_PASSWORD_FORCE_RESET]: undefined,\n [Event.USER_GROUP_ONBOARDING]: undefined,\n [Event.USER_ONBOARDING_COMPLETE]: undefined,\n [Event.USER_DATA_COLLABORATION]: undefined,\n\n // EMAIL\n [Event.EMAIL_SMTP_CREATED]: `Email configuration created`,\n [Event.EMAIL_SMTP_UPDATED]: `Email configuration updated`,\n\n // AUTH\n [Event.AUTH_SSO_CREATED]: `SSO configuration created`,\n [Event.AUTH_SSO_UPDATED]: `SSO configuration updated`,\n [Event.AUTH_SSO_ACTIVATED]: `SSO configuration activated`,\n [Event.AUTH_SSO_DEACTIVATED]: `SSO configuration deactivated`,\n [Event.AUTH_LOGIN]: `User \"{{ email }}\" logged in`,\n [Event.AUTH_LOGOUT]: `User \"{{ email }}\" logged out`,\n\n // ORG\n [Event.ORG_NAME_UPDATED]: `Organisation name updated`,\n [Event.ORG_LOGO_UPDATED]: `Organisation logo updated`,\n [Event.ORG_PLATFORM_URL_UPDATED]: `Organisation platform URL updated`,\n\n // APP\n [Event.APP_CREATED]: `App \"{{ name }}\" created`,\n [Event.APP_UPDATED]: `App \"{{ name }}\" updated`,\n [Event.APP_DELETED]: `App \"{{ name }}\" deleted`,\n [Event.APP_PUBLISHED]: `App \"{{ name }}\" published`,\n [Event.APP_UNPUBLISHED]: `App \"{{ name }}\" unpublished`,\n [Event.APP_TEMPLATE_IMPORTED]: `App \"{{ name }}\" template imported`,\n [Event.APP_FILE_IMPORTED]: `App \"{{ name }}\" file imported`,\n [Event.APP_VERSION_UPDATED]: `App \"{{ name }}\" version updated`,\n [Event.APP_VERSION_REVERTED]: `App \"{{ name }}\" version reverted`,\n [Event.APP_REVERTED]: `App \"{{ name }}\" reverted`,\n [Event.APP_EXPORTED]: `App \"{{ name }}\" exported`,\n [Event.APP_BACKUP_RESTORED]: `App backup \"{{ name }}\" restored`,\n [Event.APP_BACKUP_TRIGGERED]: `App backup \"{{ name }}\" triggered`,\n\n // DATASOURCE\n [Event.DATASOURCE_CREATED]: `Datasource created`,\n [Event.DATASOURCE_UPDATED]: `Datasource updated`,\n [Event.DATASOURCE_DELETED]: `Datasource deleted`,\n\n // QUERY\n [Event.QUERY_CREATED]: `Query created`,\n [Event.QUERY_UPDATED]: `Query updated`,\n [Event.QUERY_DELETED]: `Query deleted`,\n [Event.QUERY_IMPORT]: `Query import`,\n [Event.QUERIES_RUN]: undefined,\n [Event.QUERY_PREVIEWED]: undefined,\n\n // TABLE\n [Event.TABLE_CREATED]: `Table \"{{ name }}\" created`,\n [Event.TABLE_UPDATED]: `Table \"{{ name }}\" updated`,\n [Event.TABLE_DELETED]: `Table \"{{ name }}\" deleted`,\n [Event.TABLE_EXPORTED]: `Table \"{{ name }}\" exported`,\n [Event.TABLE_IMPORTED]: `Table \"{{ name }}\" imported`,\n [Event.TABLE_DATA_IMPORTED]: `Data imported to table`,\n\n // ROWS\n [Event.ROWS_CREATED]: `Rows created`,\n [Event.ROWS_IMPORTED]: `Rows imported`,\n\n // AUTOMATION\n [Event.AUTOMATION_CREATED]: `Automation \"{{ name }}\" created`,\n [Event.AUTOMATION_DELETED]: `Automation \"{{ name }}\" deleted`,\n [Event.AUTOMATION_STEP_CREATED]: `Automation \"{{ name }}\" step added`,\n [Event.AUTOMATION_STEP_DELETED]: `Automation \"{{ name }}\" step removed`,\n [Event.AUTOMATION_TESTED]: undefined,\n [Event.AUTOMATIONS_RUN]: undefined,\n [Event.AUTOMATION_TRIGGER_UPDATED]: undefined,\n\n // SCREEN\n [Event.SCREEN_CREATED]: `Screen \"{{ name }}\" created`,\n [Event.SCREEN_DELETED]: `Screen \"{{ name }}\" deleted`,\n\n // COMPONENT\n [Event.COMPONENT_CREATED]: `Component created`,\n [Event.COMPONENT_DELETED]: `Component deleted`,\n\n // ENVIRONMENT VARIABLE\n [Event.ENVIRONMENT_VARIABLE_CREATED]: `Environment variable created`,\n [Event.ENVIRONMENT_VARIABLE_DELETED]: `Environment variable deleted`,\n [Event.ENVIRONMENT_VARIABLE_UPGRADE_PANEL_OPENED]: undefined,\n\n // PLUGIN\n [Event.PLUGIN_IMPORTED]: `Plugin imported`,\n [Event.PLUGIN_DELETED]: `Plugin deleted`,\n [Event.PLUGIN_INIT]: undefined,\n\n // ROLE - NOT AUDITED\n [Event.ROLE_CREATED]: undefined,\n [Event.ROLE_UPDATED]: undefined,\n [Event.ROLE_DELETED]: undefined,\n [Event.ROLE_ASSIGNED]: undefined,\n [Event.ROLE_UNASSIGNED]: undefined,\n\n // LICENSE - NOT AUDITED\n [Event.LICENSE_PLAN_CHANGED]: undefined,\n [Event.LICENSE_ACTIVATED]: undefined,\n [Event.LICENSE_PAYMENT_FAILED]: undefined,\n [Event.LICENSE_PAYMENT_RECOVERED]: undefined,\n [Event.LICENSE_CHECKOUT_OPENED]: undefined,\n [Event.LICENSE_CHECKOUT_SUCCESS]: undefined,\n [Event.LICENSE_PORTAL_OPENED]: undefined,\n\n // ACCOUNT - NOT AUDITED\n [Event.ACCOUNT_CREATED]: undefined,\n [Event.ACCOUNT_DELETED]: undefined,\n [Event.ACCOUNT_VERIFIED]: undefined,\n\n // BACKFILL - NOT AUDITED\n [Event.APP_BACKFILL_SUCCEEDED]: undefined,\n [Event.APP_BACKFILL_FAILED]: undefined,\n [Event.TENANT_BACKFILL_SUCCEEDED]: undefined,\n [Event.TENANT_BACKFILL_FAILED]: undefined,\n [Event.INSTALLATION_BACKFILL_SUCCEEDED]: undefined,\n [Event.INSTALLATION_BACKFILL_FAILED]: undefined,\n\n // LAYOUT - NOT AUDITED\n [Event.LAYOUT_CREATED]: undefined,\n [Event.LAYOUT_DELETED]: undefined,\n\n // VIEW - NOT AUDITED\n [Event.VIEW_CREATED]: undefined,\n [Event.VIEW_UPDATED]: undefined,\n [Event.VIEW_DELETED]: undefined,\n [Event.VIEW_EXPORTED]: undefined,\n [Event.VIEW_FILTER_CREATED]: undefined,\n [Event.VIEW_FILTER_UPDATED]: undefined,\n [Event.VIEW_FILTER_DELETED]: undefined,\n [Event.VIEW_CALCULATION_CREATED]: undefined,\n [Event.VIEW_CALCULATION_UPDATED]: undefined,\n [Event.VIEW_CALCULATION_DELETED]: undefined,\n\n // SERVED - NOT AUDITED\n [Event.SERVED_BUILDER]: undefined,\n [Event.SERVED_APP]: undefined,\n [Event.SERVED_APP_PREVIEW]: undefined,\n\n // ANALYTICS - NOT AUDITED\n [Event.ANALYTICS_OPT_OUT]: undefined,\n [Event.ANALYTICS_OPT_IN]: undefined,\n\n // INSTALLATION - NOT AUDITED\n [Event.INSTALLATION_VERSION_CHECKED]: undefined,\n [Event.INSTALLATION_VERSION_UPGRADED]: undefined,\n [Event.INSTALLATION_VERSION_DOWNGRADED]: undefined,\n [Event.INSTALLATION_FIRST_STARTUP]: undefined,\n\n // AUDIT LOG - NOT AUDITED\n [Event.AUDIT_LOGS_FILTERED]: undefined,\n [Event.AUDIT_LOGS_DOWNLOADED]: undefined,\n}\n\n// properties added at the final stage of the event pipeline\nexport interface BaseEvent {\n version?: string\n service?: string\n environment?: string\n appId?: string\n installationId?: string\n tenantId?: string\n hosting?: Hosting\n // any props in the audited section will be removed before passing events\n // up out of system (purely for use with auditing)\n audited?: {\n [key: string]: any\n }\n}\n\nexport type TableExportFormat = \"json\" | \"csv\"\n\nexport type DocUpdateEvent = {\n id: string\n tenantId: string\n appId?: string\n}\n\nexport interface EventProcessor {\n processEvent(\n event: Event,\n identity: Identity,\n properties: any,\n timestamp?: string | number\n ): Promise<void>\n identify?(identity: Identity, timestamp?: string | number): Promise<void>\n identifyGroup?(group: Group, timestamp?: string | number): Promise<void>\n shutdown?(): void\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface LayoutCreatedEvent extends BaseEvent {\n layoutId: string\n}\n\nexport interface LayoutDeletedEvent extends BaseEvent {\n layoutId: string\n}\n", "import { PlanType, PriceDuration } from \"../licensing\"\n\nexport interface LicensePlanChangedEvent {\n accountId: string\n from: PlanType\n to: PlanType\n // may not be on historical events\n fromDuration: PriceDuration | undefined\n toDuration: PriceDuration | undefined\n fromQuantity: number | undefined\n toQuantity: number | undefined\n}\n\nexport interface LicenseActivatedEvent {\n accountId: string\n}\n\nexport interface LicenseCheckoutOpenedEvent {\n accountId: string\n}\n\nexport interface LicenseCheckoutSuccessEvent {\n accountId: string\n}\n\nexport interface LicensePortalOpenedEvent {\n accountId: string\n}\n\nexport interface LicensePaymentFailedEvent {\n accountId: string\n}\n\nexport interface LicensePaymentRecoveredEvent {\n accountId: string\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface VersionCheckedEvent extends BaseEvent {\n currentVersion: string\n}\n\nexport interface VersionChangeEvent extends BaseEvent {\n from: string\n to: string\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface QueryCreatedEvent extends BaseEvent {\n queryId: string\n datasourceId: string\n source: string\n queryVerb: string\n}\n\nexport interface QueryUpdatedEvent extends BaseEvent {\n queryId: string\n datasourceId: string\n source: string\n queryVerb: string\n}\n\nexport interface QueryDeletedEvent extends BaseEvent {\n queryId: string\n datasourceId: string\n source: string\n queryVerb: string\n}\n\nexport interface QueryImportedEvent extends BaseEvent {\n datasourceId: string\n source: string\n count: number\n importSource: string\n}\n\nexport interface QueryPreviewedEvent extends BaseEvent {\n queryId?: string\n datasourceId: string\n source: string\n queryVerb: string\n}\n\nexport interface QueriesRunEvent extends BaseEvent {\n count: number\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface RoleCreatedEvent extends BaseEvent {\n roleId: string\n permissionId: string\n inherits?: string\n}\n\nexport interface RoleUpdatedEvent extends BaseEvent {\n roleId: string\n permissionId: string\n inherits?: string\n}\n\nexport interface RoleDeletedEvent extends BaseEvent {\n roleId: string\n permissionId: string\n inherits?: string\n}\n\nexport interface RoleAssignedEvent extends BaseEvent {\n userId: string\n roleId: string\n}\n\nexport interface RoleUnassignedEvent extends BaseEvent {\n userId: string\n roleId: string\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface RowsImportedEvent extends BaseEvent {\n tableId: string\n count: number\n}\n\nexport interface RowsCreatedEvent extends BaseEvent {\n count: number\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface ScreenCreatedEvent extends BaseEvent {\n screenId: string\n layoutId?: string\n roleId: string\n audited: {\n name: string\n }\n}\n\nexport interface ScreenDeletedEvent extends BaseEvent {\n screenId: string\n layoutId?: string\n roleId: string\n audited: {\n name: string\n }\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface BuilderServedEvent extends BaseEvent {\n timezone: string\n}\n\nexport interface AppServedEvent extends BaseEvent {\n appVersion: string\n timezone: string\n embed?: boolean\n}\n\nexport interface AppPreviewServedEvent extends BaseEvent {\n appVersion: string\n timezone: string\n}\n", "import { BaseEvent, TableExportFormat } from \"./event\"\n\nexport interface TableCreatedEvent extends BaseEvent {\n tableId: string\n audited: {\n name: string\n }\n}\n\nexport interface TableUpdatedEvent extends BaseEvent {\n tableId: string\n audited: {\n name: string\n }\n}\n\nexport interface TableDeletedEvent extends BaseEvent {\n tableId: string\n audited: {\n name: string\n }\n}\n\nexport interface TableExportedEvent extends BaseEvent {\n tableId: string\n format: TableExportFormat\n audited: {\n name: string\n }\n}\n\nexport interface TableImportedEvent extends BaseEvent {\n tableId: string\n audited: {\n name: string\n }\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface UserCreatedEvent extends BaseEvent {\n userId: string\n viaScim?: boolean\n audited: {\n email: string\n }\n}\n\nexport interface UserUpdatedEvent extends BaseEvent {\n userId: string\n viaScim?: boolean\n audited: {\n email: string\n }\n}\n\nexport interface UserDeletedEvent extends BaseEvent {\n userId: string\n viaScim?: boolean\n audited: {\n email: string\n }\n}\n\nexport interface UserOnboardingEvent extends BaseEvent {\n userId: string\n step?: string\n audited: {\n email: string\n }\n}\n\nexport interface UserPermissionAssignedEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserPermissionRemovedEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserInvitedEvent extends BaseEvent {\n audited: {\n email: string\n }\n}\n\nexport interface UserInviteAcceptedEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserPasswordForceResetEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserPasswordUpdatedEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserPasswordResetRequestedEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserPasswordResetEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserDataCollaborationEvent extends BaseEvent {\n users: number\n}\n", "import { ViewCalculation } from \"../../documents\"\nimport { BaseEvent, TableExportFormat } from \"./event\"\n\nexport interface ViewCreatedEvent extends BaseEvent {\n tableId: string\n}\n\nexport interface ViewUpdatedEvent extends BaseEvent {\n tableId: string\n}\n\nexport interface ViewDeletedEvent extends BaseEvent {\n tableId: string\n}\n\nexport interface ViewExportedEvent extends BaseEvent {\n tableId: string\n format: TableExportFormat\n}\n\nexport interface ViewFilterCreatedEvent extends BaseEvent {\n tableId: string\n}\n\nexport interface ViewFilterUpdatedEvent extends BaseEvent {\n tableId: string\n}\n\nexport interface ViewFilterDeletedEvent extends BaseEvent {\n tableId: string\n}\n\nexport interface ViewCalculationCreatedEvent extends BaseEvent {\n tableId: string\n calculation: ViewCalculation\n}\n\nexport interface ViewCalculationUpdatedEvent extends BaseEvent {\n tableId: string\n calculation: ViewCalculation\n}\n\nexport interface ViewCalculationDeletedEvent extends BaseEvent {\n tableId: string\n calculation: ViewCalculation\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface AccountCreatedEvent extends BaseEvent {\n tenantId: string\n registrationStep?: string\n}\n\nexport interface AccountDeletedEvent extends BaseEvent {\n tenantId: string\n registrationStep?: string\n}\n\nexport interface AccountVerifiedEvent extends BaseEvent {\n tenantId: string\n}\n", "import { BaseEvent, Event } from \"./event\"\n\nexport interface AppBackfillSucceededEvent extends BaseEvent {\n appId: string\n automations: number\n datasources: number\n layouts: number\n queries: number\n roles: number\n tables: number\n screens: number\n errors?: string[]\n errorCount?: number\n}\n\nexport interface AppBackfillFailedEvent extends BaseEvent {\n error: string\n}\n\nexport interface TenantBackfillSucceededEvent extends BaseEvent {\n apps: number\n users: number\n\n usage: any\n errors?: [string]\n errorCount?: number\n}\n\nexport interface TenantBackfillFailedEvent extends BaseEvent {\n error: string\n}\n\nexport interface InstallationBackfillSucceededEvent extends BaseEvent {}\n\nexport interface InstallationBackfillFailedEvent extends BaseEvent {\n error: string\n}\n\nexport interface BackfillMetadata extends BaseEvent {\n eventWhitelist: Event[]\n}\n\nexport interface CachedEvent extends BaseEvent {\n event: Event\n properties: any\n}\n", "import { Hosting } from \"..\"\n\n// GROUPS\n\nexport enum GroupType {\n TENANT = \"tenant\",\n INSTALLATION = \"installation\",\n}\n\nexport interface Group {\n id: string\n type: IdentityType\n environment: string\n hosting: Hosting\n}\n\nexport interface TenantGroup extends Group {\n // account level information is associated with the tenant group\n // as we don't have this at the user level\n profession?: string // only available in cloud\n companySize?: string // only available in cloud\n installationId: string\n}\n\nexport interface InstallationGroup extends Group {\n version: string\n}\n\n// IDENTITIES\n\nexport enum IdentityType {\n USER = \"user\",\n TENANT = \"tenant\",\n INSTALLATION = \"installation\",\n}\n\nexport interface HostInfo {\n ipAddress?: string\n userAgent?: string\n}\n\nexport interface Identity {\n id: string\n type: IdentityType\n hosting: Hosting\n environment: string\n installationId?: string\n tenantId?: string\n // usable - no unique format\n realTenantId?: string\n hostInfo?: HostInfo\n}\n\nexport interface UserIdentity extends Identity {\n verified: boolean\n accountHolder: boolean\n providerType?: string\n builder?: boolean\n admin?: boolean\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface GroupCreatedEvent extends BaseEvent {\n groupId: string\n viaScim?: boolean\n audited: {\n name: string\n }\n}\n\nexport interface GroupUpdatedEvent extends BaseEvent {\n groupId: string\n viaScim?: boolean\n audited: {\n name: string\n }\n}\n\nexport interface GroupDeletedEvent extends BaseEvent {\n groupId: string\n viaScim?: boolean\n audited: {\n name: string\n }\n}\n\nexport interface GroupUsersAddedEvent extends BaseEvent {\n count: number\n groupId: string\n viaScim?: boolean\n audited: {\n name: string\n }\n}\n\nexport interface GroupUsersDeletedEvent extends BaseEvent {\n count: number\n groupId: string\n viaScim?: boolean\n audited: {\n name: string\n }\n}\n\nexport interface GroupAddedOnboardingEvent extends BaseEvent {\n groupId: string\n onboarding: boolean\n}\n\nexport interface GroupPermissionsEditedEvent extends BaseEvent {\n permissions: Record<string, string>\n groupId: string\n audited: {\n name: string\n }\n}\n", "import { BaseEvent } from \"./event\"\nimport { PluginSource, PluginType } from \"../../\"\n\nexport interface PluginInitEvent extends BaseEvent {\n type: PluginType\n name: string\n version: string\n description: string\n}\n\nexport interface PluginImportedEvent extends BaseEvent {\n pluginId: string\n type: PluginType\n source: PluginSource\n name: string\n version: string\n description: string\n}\n\nexport interface PluginDeletedEvent extends BaseEvent {\n pluginId: string\n type: PluginType\n name: string\n version: string\n description: string\n}\n", "import { BaseEvent } from \"./event\"\nimport { AppBackupTrigger, AppBackupType } from \"../../documents\"\n\nexport interface AppBackupRestoreEvent extends BaseEvent {\n appId: string\n restoreId: string\n backupCreatedAt: string\n name: string\n}\n\nexport interface AppBackupTriggeredEvent extends BaseEvent {\n backupId: string\n appId: string\n trigger: AppBackupTrigger\n type: AppBackupType\n name: string\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface EnvironmentVariableCreatedEvent extends BaseEvent {\n name: string\n environments: string[]\n}\n\nexport interface EnvironmentVariableDeletedEvent extends BaseEvent {\n name: string\n}\n\nexport interface EnvironmentVariableUpgradePanelOpenedEvent extends BaseEvent {\n userId: string\n}\n", "import { BaseEvent } from \"./event\"\nimport { AuditLogSearchParams } from \"../../api\"\n\nexport interface AuditLogFilteredEvent extends BaseEvent {\n filters: AuditLogSearchParams\n}\n\nexport interface AuditLogDownloadedEvent extends BaseEvent {\n filters: AuditLogSearchParams\n}\n", "export * from \"./app\"\nexport * from \"./auth\"\nexport * from \"./automation\"\nexport * from \"./email\"\nexport * from \"./datasource\"\nexport * from \"./event\"\nexport * from \"./layout\"\nexport * from \"./license\"\nexport * from \"./version\"\nexport * from \"./query\"\nexport * from \"./role\"\nexport * from \"./rows\"\nexport * from \"./screen\"\nexport * from \"./serve\"\nexport * from \"./table\"\nexport * from \"./user\"\nexport * from \"./view\"\nexport * from \"./account\"\nexport * from \"./backfill\"\nexport * from \"./identification\"\nexport * from \"./userGroup\"\nexport * from \"./plugin\"\nexport * from \"./backup\"\nexport * from \"./environmentVariable\"\nexport * from \"./auditLog\"\n", "import { PurchasedPlan, Quotas, Feature, Billing } from \".\"\nimport { ISO8601 } from \"../../shared\"\n\nexport interface OfflineIdentifier {\n installId: string\n tenantId: string\n}\n\nexport interface OfflineLicense extends License {\n identifier: OfflineIdentifier\n expireAt: ISO8601\n}\n\nexport interface License {\n features: Feature[]\n quotas: Quotas\n plan: PurchasedPlan\n billing?: Billing\n testClockId?: string\n}\n", "export enum PlanType {\n FREE = \"free\",\n /** @deprecated */\n PRO = \"pro\",\n /** @deprecated */\n TEAM = \"team\",\n /** @deprecated */\n PREMIUM = \"premium\",\n PREMIUM_PLUS = \"premium_plus\",\n /** @deprecated */\n BUSINESS = \"business\",\n ENTERPRISE_BASIC = \"enterprise_basic\",\n ENTERPRISE = \"enterprise\",\n}\n\nexport enum PriceDuration {\n MONTHLY = \"monthly\",\n YEARLY = \"yearly\",\n}\n\nexport interface AvailablePlan {\n type: PlanType\n maxUsers: number\n prices: AvailablePrice[]\n}\n\nexport interface AvailablePrice {\n amount: number\n amountMonthly: number\n currency: string\n duration: PriceDuration\n priceId: string\n type?: string\n}\n\nexport enum PlanModel {\n PER_USER = \"perUser\",\n PER_CREATOR_PER_USER = \"per_creator_per_user\",\n DAY_PASS = \"dayPass\",\n}\n\nexport interface PurchasedPlan {\n type: PlanType\n model: PlanModel\n usesInvoicing: boolean\n price?: PurchasedPrice\n}\n\nexport interface PurchasedPrice extends AvailablePrice {\n dayPasses: number | undefined\n /** @deprecated - now at the plan level via model */\n isPerUser: boolean\n}\n", "import { PlanType } from \".\"\n\nexport enum QuotaUsageType {\n STATIC = \"static\",\n MONTHLY = \"monthly\",\n}\n\nexport enum QuotaType {\n USAGE = \"usage\",\n CONSTANT = \"constant\",\n}\n\nexport enum StaticQuotaName {\n ROWS = \"rows\",\n APPS = \"apps\",\n USERS = \"users\",\n CREATORS = \"creators\",\n USER_GROUPS = \"userGroups\",\n PLUGINS = \"plugins\",\n}\n\nexport enum MonthlyQuotaName {\n QUERIES = \"queries\",\n AUTOMATIONS = \"automations\",\n DAY_PASSES = \"dayPasses\",\n}\n\nexport enum ConstantQuotaName {\n AUTOMATION_LOG_RETENTION_DAYS = \"automationLogRetentionDays\",\n APP_BACKUPS_RETENTION_DAYS = \"appBackupRetentionDays\",\n}\n\nexport type MeteredQuotaName = StaticQuotaName | MonthlyQuotaName\nexport type QuotaName = StaticQuotaName | MonthlyQuotaName | ConstantQuotaName\n\nexport const isStaticQuota = (\n quotaType: QuotaType,\n usageType: QuotaUsageType,\n name: QuotaName\n): name is StaticQuotaName => {\n return quotaType === QuotaType.USAGE && usageType === QuotaUsageType.STATIC\n}\n\nexport const isMonthlyQuota = (\n quotaType: QuotaType,\n usageType: QuotaUsageType,\n name: QuotaName\n): name is MonthlyQuotaName => {\n return quotaType === QuotaType.USAGE && usageType === QuotaUsageType.MONTHLY\n}\n\nexport const isConstantQuota = (\n quotaType: QuotaType,\n name: QuotaName\n): name is ConstantQuotaName => {\n return quotaType === QuotaType.CONSTANT\n}\n\nexport type PlanQuotas = { [key in PlanType]: Quotas | undefined }\n\nexport type MonthlyQuotas = {\n [MonthlyQuotaName.QUERIES]: Quota\n [MonthlyQuotaName.AUTOMATIONS]: Quota\n [MonthlyQuotaName.DAY_PASSES]: Quota\n}\n\nexport type StaticQuotas = {\n [StaticQuotaName.ROWS]: Quota\n [StaticQuotaName.APPS]: Quota\n [StaticQuotaName.USERS]: Quota\n [StaticQuotaName.CREATORS]: Quota\n [StaticQuotaName.USER_GROUPS]: Quota\n [StaticQuotaName.PLUGINS]: Quota\n}\n\nexport type ConstantQuotas = {\n [ConstantQuotaName.AUTOMATION_LOG_RETENTION_DAYS]: Quota\n [ConstantQuotaName.APP_BACKUPS_RETENTION_DAYS]: Quota\n}\n\nexport type Quotas = {\n [QuotaType.USAGE]: {\n [QuotaUsageType.MONTHLY]: MonthlyQuotas\n [QuotaUsageType.STATIC]: StaticQuotas\n }\n [QuotaType.CONSTANT]: ConstantQuotas\n}\n\nexport interface Quota {\n name: string\n value: number\n /**\n * Array of whole numbers (1-100) that dictate the percentage that this quota should trigger\n * at in relation to the corresponding usage inside budibase.\n *\n * Triggering results in a budibase installation sending a request to account-portal,\n * which can have subsequent effects such as sending emails to users.\n */\n triggers: number[]\n startDate?: number\n}\n", "import { PlanType } from \"./plan\"\n\nexport enum Feature {\n USER_GROUPS = \"userGroups\",\n APP_BACKUPS = \"appBackups\",\n ENVIRONMENT_VARIABLES = \"environmentVariables\",\n AUDIT_LOGS = \"auditLogs\",\n ENFORCEABLE_SSO = \"enforceableSSO\",\n BRANDING = \"branding\",\n SCIM = \"scim\",\n SYNC_AUTOMATIONS = \"syncAutomations\",\n TRIGGER_AUTOMATION_RUN = \"triggerAutomationRun\",\n APP_BUILDERS = \"appBuilders\",\n OFFLINE = \"offline\",\n EXPANDED_PUBLIC_API = \"expandedPublicApi\",\n VIEW_PERMISSIONS = \"viewPermissions\",\n}\n\nexport type PlanFeatures = { [key in PlanType]: Feature[] | undefined }\n", "import { PriceDuration } from \"./plan\"\n\nexport interface Customer {\n balance: number | null | undefined\n currency: string | null | undefined\n}\n\nexport interface SubscriptionItems {\n user: number | undefined\n creator: number | undefined\n}\n\nexport interface Subscription {\n amount: number\n amounts: SubscriptionItems | undefined\n currency: string\n quantity: number\n quantities: SubscriptionItems | undefined\n duration: PriceDuration\n cancelAt: number | null | undefined\n currentPeriodStart: number\n currentPeriodEnd: number\n status: string\n pastDueAt?: number | null\n downgradeAt?: number\n}\n\nexport interface Billing {\n customer: Customer\n subscription?: Subscription\n}\n", "export * from \"./license\"\nexport * from \"./plan\"\nexport * from \"./quota\"\nexport * from \"./feature\"\nexport * from \"./billing\"\n", "export interface Migration extends MigrationDefinition {\n appOpts?: object\n fn: Function\n silent?: boolean\n preventRetry?: boolean\n}\n\nexport enum MigrationType {\n // run once per tenant, recorded in global db, global db is provided as an argument\n GLOBAL = \"global\",\n // run per app, recorded in each app db, app db is provided as an argument\n APP = \"app\",\n // run once, recorded in global info db, global info db is provided as an argument\n INSTALLATION = \"installation\",\n}\n\nexport interface MigrationNoOpOptions {\n type: MigrationType\n tenantId: string\n appId?: string\n}\n\n/**\n * e.g.\n * {\n * tenantIds: ['bb'],\n * force: {\n * global: ['quota_1']\n * }\n * }\n */\nexport interface MigrationOptions {\n tenantIds?: string[]\n force?: {\n [type: string]: string[]\n }\n noOp?: MigrationNoOpOptions\n}\n\nexport enum MigrationName {\n USER_EMAIL_VIEW_CASING = \"user_email_view_casing\",\n APP_URLS = \"app_urls\",\n EVENT_APP_BACKFILL = \"event_app_backfill\",\n EVENT_GLOBAL_BACKFILL = \"event_global_backfill\",\n EVENT_INSTALLATION_BACKFILL = \"event_installation_backfill\",\n GLOBAL_INFO_SYNC_USERS = \"global_info_sync_users\",\n TABLE_SETTINGS_LINKS_TO_ACTIONS = \"table_settings_links_to_actions\",\n // increment this number to re-activate this migration\n SYNC_QUOTAS = \"sync_quotas_2\",\n}\n\nexport interface MigrationDefinition {\n type: MigrationType\n name: MigrationName\n}\n", "import { Table } from \"../documents\"\n\nexport const PASSWORD_REPLACEMENT = \"--secret-value--\"\n\nexport enum Operation {\n CREATE = \"CREATE\",\n READ = \"READ\",\n UPDATE = \"UPDATE\",\n DELETE = \"DELETE\",\n BULK_CREATE = \"BULK_CREATE\",\n CREATE_TABLE = \"CREATE_TABLE\",\n UPDATE_TABLE = \"UPDATE_TABLE\",\n DELETE_TABLE = \"DELETE_TABLE\",\n}\n\nexport enum SortDirection {\n ASCENDING = \"ASCENDING\",\n DESCENDING = \"DESCENDING\",\n}\n\nexport enum QueryType {\n SQL = \"sql\",\n JSON = \"json\",\n FIELDS = \"fields\",\n}\n\nexport enum DatasourceFieldType {\n STRING = \"string\",\n CODE = \"code\",\n LONGFORM = \"longForm\",\n BOOLEAN = \"boolean\",\n NUMBER = \"number\",\n PASSWORD = \"password\",\n LIST = \"list\",\n OBJECT = \"object\",\n JSON = \"json\",\n FILE = \"file\",\n FIELD_GROUP = \"fieldGroup\",\n SELECT = \"select\",\n}\n\nexport enum SourceName {\n POSTGRES = \"POSTGRES\",\n DYNAMODB = \"DYNAMODB\",\n MONGODB = \"MONGODB\",\n ELASTICSEARCH = \"ELASTICSEARCH\",\n COUCHDB = \"COUCHDB\",\n SQL_SERVER = \"SQL_SERVER\",\n S3 = \"S3\",\n AIRTABLE = \"AIRTABLE\",\n MYSQL = \"MYSQL\",\n ARANGODB = \"ARANGODB\",\n REST = \"REST\",\n ORACLE = \"ORACLE\",\n GOOGLE_SHEETS = \"GOOGLE_SHEETS\",\n FIRESTORE = \"FIRESTORE\",\n REDIS = \"REDIS\",\n SNOWFLAKE = \"SNOWFLAKE\",\n}\n\nexport enum IncludeRelationship {\n INCLUDE = 1,\n EXCLUDE = 0,\n}\n\nexport enum FilterType {\n STRING = \"string\",\n FUZZY = \"fuzzy\",\n RANGE = \"range\",\n EQUAL = \"equal\",\n NOT_EQUAL = \"notEqual\",\n EMPTY = \"empty\",\n NOT_EMPTY = \"notEmpty\",\n ONE_OF = \"oneOf\",\n}\n\nexport enum DatasourceFeature {\n CONNECTION_CHECKING = \"connection\",\n FETCH_TABLE_NAMES = \"fetch_table_names\",\n EXPORT_SCHEMA = \"export_schema\",\n}\n\nexport interface StepDefinition {\n key: string\n template: string\n}\n\nexport interface QueryDefinition {\n type: QueryType\n displayName?: string\n readable?: boolean\n customisable?: boolean\n fields?: object\n urlDisplay?: boolean\n steps?: Array<StepDefinition>\n}\n\nexport interface ExtraQueryConfig {\n [key: string]: {\n displayName: string\n type: string\n required: boolean\n data?: object\n }\n}\n\ninterface DatasourceBasicFieldConfig {\n type: DatasourceFieldType\n display?: string\n required?: boolean\n default?: any\n deprecated?: boolean\n hidden?: string\n}\n\ninterface DatasourceSelectFieldConfig extends DatasourceBasicFieldConfig {\n type: DatasourceFieldType.SELECT\n config: { options: string[] }\n}\n\ninterface DatasourceFieldGroupConfig extends DatasourceBasicFieldConfig {\n type: DatasourceFieldType.FIELD_GROUP\n config: {\n openByDefault?: boolean\n nestedFields?: boolean\n }\n}\n\ntype DatasourceFieldConfig =\n | DatasourceSelectFieldConfig\n | DatasourceFieldGroupConfig\n | DatasourceBasicFieldConfig\n\nexport interface DatasourceConfig {\n [key: string]: DatasourceFieldConfig & {\n fields?: DatasourceConfig\n }\n}\n\nexport interface Integration {\n docs: string\n plus?: boolean\n isSQL?: boolean\n auth?: { type: string }\n features?: Partial<Record<DatasourceFeature, boolean>>\n relationships?: boolean\n description: string\n friendlyName: string\n type?: string\n iconUrl?: string\n datasource: DatasourceConfig\n query: {\n [key: string]: QueryDefinition\n }\n extra?: ExtraQueryConfig\n}\n\nexport type ConnectionInfo = {\n connected: boolean\n error?: string\n}\n\nexport interface IntegrationBase {\n create?(query: any): Promise<any[] | any>\n read?(query: any): Promise<any[] | any>\n update?(query: any): Promise<any[] | any>\n delete?(query: any): Promise<any[] | any>\n testConnection?(): Promise<ConnectionInfo>\n getExternalSchema?(): Promise<string>\n defineTypeCastingFromSchema?(schema: {\n [key: string]: {\n name: string\n type: string\n }\n }): void\n}\n\nexport interface Schema {\n tables: Record<string, Table>\n errors: Record<string, string>\n}\n\nexport interface DatasourcePlus extends IntegrationBase {\n // if the datasource supports the use of bindings directly (to protect against SQL injection)\n // this returns the format of the identifier\n getBindingIdentifier(): string\n getStringConcat(parts: string[]): string\n buildSchema(\n datasourceId: string,\n entities: Record<string, Table>\n ): Promise<Schema>\n getTableNames(): Promise<string[]>\n}\n", "import { Operation, SortDirection } from \"./datasources\"\nimport { Row, Table } from \"../documents\"\nimport { SortType } from \"../api\"\n\nexport interface SearchFilters {\n allOr?: boolean\n onEmptyFilter?: EmptyFilterOption\n string?: {\n [key: string]: string\n }\n fuzzy?: {\n [key: string]: string\n }\n range?: {\n [key: string]: {\n high: number | string\n low: number | string\n }\n }\n equal?: {\n [key: string]: any\n }\n notEqual?: {\n [key: string]: any\n }\n empty?: {\n [key: string]: any\n }\n notEmpty?: {\n [key: string]: any\n }\n oneOf?: {\n [key: string]: any[]\n }\n contains?: {\n [key: string]: any[] | any\n }\n notContains?: {\n [key: string]: any[]\n }\n containsAny?: {\n [key: string]: any[]\n }\n}\n\nexport interface SortJson {\n [key: string]: {\n direction: SortDirection\n type?: SortType\n }\n}\n\nexport interface PaginationJson {\n limit: number\n page?: string | number\n}\n\nexport interface RenameColumn {\n old: string\n updated: string\n}\n\nexport interface RelationshipsJson {\n through?: string\n from?: string\n to?: string\n fromPrimary?: string\n toPrimary?: string\n tableName: string\n column: string\n}\n\nexport interface QueryJson {\n endpoint: {\n datasourceId: string\n entityId: string\n operation: Operation\n schema?: string\n }\n resource?: {\n fields: string[]\n }\n filters?: SearchFilters\n sort?: SortJson\n paginate?: PaginationJson\n body?: Row | Row[]\n table?: Table\n meta?: {\n table?: Table\n tables?: Record<string, Table>\n renamed?: RenameColumn\n }\n extra?: {\n idFilter?: SearchFilters\n }\n relationships?: RelationshipsJson[]\n}\n\nexport interface SqlQuery {\n sql: string\n bindings?: string[]\n}\n\nexport enum EmptyFilterOption {\n RETURN_ALL = \"all\",\n RETURN_NONE = \"none\",\n}\n", "import { Context, Request } from \"koa\"\nimport { User, Role, UserRoles, Account, ConfigType } from \"../documents\"\nimport { FeatureFlag, License } from \"../sdk\"\nimport { Files } from \"formidable\"\n\nexport interface ContextUser extends Omit<User, \"roles\"> {\n globalId?: string\n license?: License\n userId?: string\n roleId?: string | null\n role?: Role\n roles?: UserRoles\n csrfToken?: string\n featureFlags?: FeatureFlag[]\n accountPortalAccess?: boolean\n providerType?: ConfigType\n account?: Account\n}\n\n/**\n * Add support for koa-body in context.\n */\nexport interface BBRequest<RequestBody> extends Request {\n body: RequestBody\n files?: Files\n}\n\n/**\n * Basic context with no user.\n */\nexport interface Ctx<RequestBody = any, ResponseBody = any> extends Context {\n request: BBRequest<RequestBody>\n body: ResponseBody\n}\n\n/**\n * Authenticated context.\n */\nexport interface UserCtx<RequestBody = any, ResponseBody = any>\n extends Ctx<RequestBody, ResponseBody> {\n user: ContextUser\n roleId?: string\n}\n\n/**\n * @deprecated: Use UserCtx / Ctx appropriately\n * Authenticated context.\n */\nexport interface BBContext extends Ctx {\n user?: ContextUser\n}\n", "import { BBContext } from \"./koa\"\nimport { Hosting } from \"./hosting\"\n\nexport interface AuthToken {\n userId: string\n tenantId: string\n sessionId: string\n}\n\nexport interface CreateSession {\n sessionId: string\n tenantId: string\n csrfToken?: string\n hosting?: Hosting\n}\n\nexport interface Session extends CreateSession {\n userId: string\n lastAccessedAt: string\n createdAt: string\n // make optional attributes required\n csrfToken: string\n}\n\nexport interface SessionKey {\n key: string\n}\n\nexport interface ScannedSession {\n value: Session\n}\n\nexport interface PlatformLogoutOpts {\n ctx: BBContext\n userId: string\n keepActiveSession?: boolean\n}\n", "import Redlock from \"redlock\"\n\nexport enum LockType {\n /**\n * If this lock is already held the attempted operation will not be performed.\n * No retries will take place and no error will be thrown.\n */\n TRY_ONCE = \"try_once\",\n TRY_TWICE = \"try_twice\",\n DEFAULT = \"default\",\n DELAY_500 = \"delay_500\",\n CUSTOM = \"custom\",\n AUTO_EXTEND = \"auto_extend\",\n}\n\nexport enum LockName {\n MIGRATIONS = \"migrations\",\n TRIGGER_QUOTA = \"trigger_quota\",\n SYNC_ACCOUNT_LICENSE = \"sync_account_license\",\n UPDATE_TENANTS_DOC = \"update_tenants_doc\",\n PERSIST_WRITETHROUGH = \"persist_writethrough\",\n QUOTA_USAGE_EVENT = \"quota_usage_event\",\n APP_MIGRATION = \"app_migrations\",\n PROCESS_AUTO_COLUMNS = \"process_auto_columns\",\n PROCESS_USER_INVITE = \"process_user_invite\",\n}\n\nexport type LockOptions = {\n /**\n * The lock type determines which client to use\n */\n type: LockType\n /**\n * The custom options to use when creating the redlock instance\n * type must be set to custom for the options to be applied\n */\n customOptions?: Redlock.Options\n /**\n * The name for the lock\n */\n name: LockName\n /**\n * The individual resource to lock. This is useful for locking around very specific identifiers, e.g. a document that is prone to conflicts\n */\n resource?: string\n /**\n * This is a system-wide lock - don't use tenancy in lock key\n */\n systemLock?: boolean\n} & (\n | {\n /**\n * The ttl to auto-expire the lock if not unlocked manually\n */\n ttl: number\n type: Exclude<LockType, LockType.AUTO_EXTEND>\n }\n | {\n type: LockType.AUTO_EXTEND\n onExtend?: () => void\n }\n)\n", "import type Nano from \"@budibase/nano\"\nimport { AllDocsResponse, AnyDocument, Document, ViewTemplateOpts } from \"../\"\nimport { Writable } from \"stream\"\n\nexport enum SearchIndex {\n ROWS = \"rows\",\n AUDIT = \"audit\",\n USER = \"user\",\n}\n\nexport type PouchOptions = {\n inMemory?: boolean\n replication?: boolean\n onDisk?: boolean\n find?: boolean\n}\n\nexport enum SortOption {\n ASCENDING = \"asc\",\n DESCENDING = \"desc\",\n}\n\nexport type IndexAnalyzer = {\n name: string\n default?: string\n fields?: Record<string, string>\n}\n\nexport type DBView = {\n name?: string\n map: string\n reduce?: string\n meta?: ViewTemplateOpts\n groupBy?: string\n}\n\nexport interface DesignDocument extends Document {\n // we use this static reference for all design documents\n _id: \"_design/database\"\n language?: string\n // CouchDB views\n views?: {\n [viewName: string]: DBView\n }\n // Lucene indexes\n indexes?: {\n [indexName: string]: {\n index: string\n analyzer?: string | IndexAnalyzer\n }\n }\n}\n\nexport type CouchFindOptions = {\n selector: PouchDB.Find.Selector\n fields?: string[]\n sort?: {\n [key: string]: SortOption\n }[]\n limit?: number\n skip?: number\n bookmark?: string\n}\n\nexport type DatabaseOpts = {\n skip_setup?: boolean\n}\n\nexport type DatabasePutOpts = {\n force?: boolean\n}\n\nexport type DatabaseCreateIndexOpts = {\n index: {\n fields: string[]\n name?: string | undefined\n ddoc?: string | undefined\n type?: string | undefined\n }\n}\n\nexport type DatabaseDeleteIndexOpts = {\n name: string\n ddoc: string\n type?: string | undefined\n}\n\ntype DBPrimitiveKey = string | number | {}\nexport type DatabaseKey = DBPrimitiveKey | DBPrimitiveKey[]\n\nexport type DatabaseQueryOpts = {\n include_docs?: boolean\n startkey?: DatabaseKey\n endkey?: DatabaseKey\n limit?: number\n skip?: number\n descending?: boolean\n key?: DatabaseKey\n keys?: DatabaseKey[]\n group?: boolean\n startkey_docid?: string\n}\n\nexport const isDocument = (doc: any): doc is Document => {\n return typeof doc === \"object\" && doc._id && doc._rev\n}\n\nexport interface DatabaseDumpOpts {\n filter?: (doc: AnyDocument) => boolean\n batch_size?: number\n batch_limit?: number\n style?: \"main_only\" | \"all_docs\"\n timeout?: number\n doc_ids?: string[]\n query_params?: any\n view?: string\n selector?: any\n}\n\nexport interface Database {\n name: string\n\n exists(): Promise<boolean>\n get<T extends Document>(id?: string): Promise<T>\n getMultiple<T extends Document>(\n ids: string[],\n opts?: { allowMissing?: boolean }\n ): Promise<T[]>\n remove(\n id: string | Document,\n rev?: string\n ): Promise<Nano.DocumentDestroyResponse>\n put(\n document: AnyDocument,\n opts?: DatabasePutOpts\n ): Promise<Nano.DocumentInsertResponse>\n bulkDocs(documents: AnyDocument[]): Promise<Nano.DocumentBulkResponse[]>\n allDocs<T extends Document>(\n params: DatabaseQueryOpts\n ): Promise<AllDocsResponse<T>>\n query<T extends Document>(\n viewName: string,\n params: DatabaseQueryOpts\n ): Promise<AllDocsResponse<T>>\n destroy(): Promise<Nano.OkResponse | void>\n compact(): Promise<Nano.OkResponse | void>\n // these are all PouchDB related functions that are rarely used - in future\n // should be replaced by better typed/non-pouch implemented methods\n dump(stream: Writable, opts?: DatabaseDumpOpts): Promise<any>\n load(...args: any[]): Promise<any>\n createIndex(...args: any[]): Promise<any>\n deleteIndex(...args: any[]): Promise<any>\n getIndexes(...args: any[]): Promise<any>\n}\n", "export interface EndpointMatcher {\n /**\n * The HTTP Path. e.g. /api/things/:thingId\n */\n route: string\n /**\n * The HTTP Verb. e.g. GET, POST, etc.\n * ALL is also accepted to cover all verbs.\n */\n method: string\n /**\n * The route must match exactly - not just begins with\n */\n strict?: boolean\n}\n\nexport interface RegexMatcher {\n regex: RegExp\n method: string\n strict: boolean\n route: string\n}\n", "export interface GetTenantIdOptions {\n allowNoTenant?: boolean\n excludeStrategies?: TenantResolutionStrategy[]\n includeStrategies?: TenantResolutionStrategy[]\n}\n\nexport enum TenantResolutionStrategy {\n USER = \"user\",\n HEADER = \"header\",\n QUERY = \"query\",\n SUBDOMAIN = \"subdomain\",\n PATH = \"path\",\n}\n", "export * from \"./matchers\"\nexport * from \"./tenancy\"\n", "export enum FeatureFlag {\n LICENSING = \"LICENSING\",\n PER_CREATOR_PER_USER_PRICE = \"PER_CREATOR_PER_USER_PRICE\",\n PER_CREATOR_PER_USER_PRICE_ALERT = \"PER_CREATOR_PER_USER_PRICE_ALERT\",\n}\n\nexport interface TenantFeatureFlags {\n [key: string]: FeatureFlag[]\n}\n", "export enum AppEnvironment {\n PRODUCTION = \"production\",\n DEVELOPMENT = \"development\",\n}\n", "import { Event, HostInfo } from \"./events\"\nimport { AuditLogDoc } from \"../documents\"\n\nexport type AuditWriteOpts = {\n appId?: string\n timestamp?: string | number\n userId?: string\n hostInfo?: HostInfo\n}\n\nexport type AuditLogFn = (\n event: Event,\n metadata: any,\n opts: AuditWriteOpts\n) => Promise<AuditLogDoc | undefined>\n\nexport type AuditLogQueueEvent = {\n event: Event\n properties: any\n opts: AuditWriteOpts\n tenantId: string\n}\n", "import {\n OAuth2,\n SSOProfileJson,\n SSOProviderType,\n SSOUser,\n User,\n} from \"../documents\"\nimport { SaveUserOpts } from \"./user\"\n\nexport interface JwtClaims {\n preferred_username?: string\n email?: string\n}\n\nexport interface SSOAuthDetails {\n oauth2: OAuth2\n provider: string\n providerType: SSOProviderType\n userId: string\n email?: string\n profile?: SSOProfile\n}\n\nexport interface SSOProfile {\n id: string\n name?: {\n givenName?: string\n familyName?: string\n }\n _json: SSOProfileJson\n provider?: string\n}\n\nexport type SaveSSOUserFunction = (\n user: SSOUser,\n opts: SaveUserOpts\n) => Promise<User>\n", "export interface SaveUserOpts {\n hashPassword?: boolean\n requirePassword?: boolean\n currentUserId?: string\n skipPasswordValidation?: boolean\n}\n", "import { Event } from \"../events\"\n\nexport enum CommandWord {\n BACKUPS = \"backups\",\n HOSTING = \"hosting\",\n ANALYTICS = \"analytics\",\n HELP = \"help\",\n PLUGIN = \"plugins\",\n}\n\nexport enum InitType {\n QUICK = \"quick\",\n DIGITAL_OCEAN = \"do\",\n}\n\nexport const AnalyticsEvent = {\n OptOut: \"analytics:opt:out\",\n OptIn: \"analytics:opt:in\",\n SelfHostInit: \"hosting:init\",\n PluginInit: Event.PLUGIN_INIT,\n}\n", "export * from \"./constants\"\n", "export interface SocketSession {\n _id: string\n email: string\n firstName?: string\n lastName?: string\n sessionId: string\n room?: string\n connectedAt: number\n}\n", "export enum PermissionLevel {\n READ = \"read\",\n WRITE = \"write\",\n EXECUTE = \"execute\",\n ADMIN = \"admin\",\n}\n\n// these are the global types, that govern the underlying default behaviour\nexport enum PermissionType {\n APP = \"app\",\n TABLE = \"table\",\n USER = \"user\",\n AUTOMATION = \"automation\",\n WEBHOOK = \"webhook\",\n BUILDER = \"builder\",\n CREATOR = \"creator\",\n GLOBAL_BUILDER = \"globalBuilder\",\n QUERY = \"query\",\n VIEW = \"view\",\n LEGACY_VIEW = \"legacy_view\",\n}\n\nexport enum PermissionSource {\n EXPLICIT = \"EXPLICIT\",\n INHERITED = \"INHERITED\",\n BASE = \"BASE\",\n}\n", "import { SortOrder, SortType } from \"../api\"\nimport { SearchFilters } from \"./search\"\n\nexport interface SearchParams {\n tableId: string\n paginate?: boolean\n query: SearchFilters\n bookmark?: string\n limit?: number\n sort?: string\n sortOrder?: SortOrder\n sortType?: SortType\n version?: string\n disableEscaping?: boolean\n fields?: string[]\n}\n", "export * from \"./automations\"\nexport * from \"./hosting\"\nexport * from \"./context\"\nexport * from \"./events\"\nexport * from \"./licensing\"\nexport * from \"./migrations\"\nexport * from \"./datasources\"\nexport * from \"./search\"\nexport * from \"./koa\"\nexport * from \"./auth\"\nexport * from \"./locks\"\nexport * from \"./db\"\nexport * from \"./middleware\"\nexport * from \"./featureFlag\"\nexport * from \"./environmentVariables\"\nexport * from \"./auditLogs\"\nexport * from \"./sso\"\nexport * from \"./user\"\nexport * from \"./cli\"\nexport * from \"./websocket\"\nexport * from \"./permissions\"\nexport * from \"./row\"\n", "import { Feature, Hosting, License, PlanType, Quotas } from \"../../sdk\"\nimport { DeepPartial } from \"../../shared\"\nimport { QuotaUsage } from \"../global\"\n\nexport interface CreateAccount {\n email: string\n tenantId: string\n hosting: Hosting\n authType: AuthType\n // optional fields - for sso based sign ups\n registrationStep?: string\n // profile\n tenantName?: string\n name?: string\n size?: string\n profession?: string\n}\n\nexport interface CreatePassswordAccount extends CreateAccount {\n password: string\n}\n\nexport interface CreateVerifiableSSOAccount extends CreateAccount {\n provider?: AccountSSOProvider\n thirdPartyProfile?: any\n}\n\nexport const isCreatePasswordAccount = (\n account: CreateAccount\n): account is CreatePassswordAccount => account.authType === AuthType.PASSWORD\n\nexport interface LicenseOverrides {\n features?: Feature[]\n quotas?: DeepPartial<Quotas>\n}\n\nexport interface Account extends CreateAccount {\n // generated\n accountId: string\n createdAt: number\n // registration\n verified: boolean\n verificationSent: boolean\n // licensing\n tier: string // deprecated\n planType?: PlanType\n /** @deprecated */\n planTier?: number\n license?: License\n installId?: string\n installTenantId?: string\n installVersion?: string\n stripeCustomerId?: string\n licenseKey?: string\n licenseKeyActivatedAt?: number\n licenseRequestedAt?: number\n licenseOverrides?: LicenseOverrides\n provider?: AccountSSOProvider\n providerType?: AccountSSOProviderType\n quotaUsage?: QuotaUsage\n offlineLicenseToken?: string\n}\n\nexport interface PasswordAccount extends Account {\n password: string\n}\n\nexport const isPasswordAccount = (\n account: Account\n): account is PasswordAccount =>\n account.authType === AuthType.PASSWORD && account.hosting === Hosting.SELF\n\nexport interface CloudAccount extends Account {\n password?: string\n budibaseUserId: string\n}\n\nexport const isCloudAccount = (account: Account): account is CloudAccount =>\n account.hosting === Hosting.CLOUD\n\nexport const isSelfHostAccount = (account: Account) =>\n account.hosting === Hosting.SELF\n\nexport const isSSOAccount = (account: Account): account is SSOAccount =>\n account.authType === AuthType.SSO\n\nexport enum AccountSSOProviderType {\n GOOGLE = \"google\",\n MICROSOFT = \"microsoft\",\n}\n\nexport enum AccountSSOProvider {\n GOOGLE = \"google\",\n MICROSOFT = \"microsoft\",\n}\n\nconst verifiableSSOProviders: AccountSSOProvider[] = [\n AccountSSOProvider.MICROSOFT,\n]\nexport function isVerifiableSSOProvider(provider: AccountSSOProvider): boolean {\n return verifiableSSOProviders.includes(provider)\n}\n\nexport interface AccountSSO {\n provider: AccountSSOProvider\n providerType: AccountSSOProviderType\n oauth2?: OAuthTokens\n pictureUrl?: string\n thirdPartyProfile: any // TODO: define what the google profile looks like\n}\n\nexport type SSOAccount = (Account | CloudAccount) & AccountSSO\n\nexport enum AuthType {\n SSO = \"sso\",\n PASSWORD = \"password\",\n}\n\nexport interface OAuthTokens {\n accessToken: string\n refreshToken: string\n}\n", "export interface CreateAccountUserActivity {\n accountId: string\n userId: string\n timestamp: number\n}\n\nexport interface AccountUserActivity extends CreateAccountUserActivity {\n PK: string\n SK: string\n}\n", "import { Document } from \"../../\"\n\nexport interface Flags extends Document {\n [key: string]: any\n}\n", "export * from \"./account\"\nexport * from \"./user\"\nexport * from \"./flag\"\n", "import { User, Document } from \"../\"\nimport { SocketSession } from \"../../sdk\"\n\nexport type AppMetadataErrors = { [key: string]: string[] }\n\nexport interface App extends Document {\n appId: string\n type: string\n version: string\n componentLibraries: string[]\n name: string\n url: string | undefined\n template: string | undefined\n instance: AppInstance\n tenantId: string\n status: string\n theme?: string\n customTheme?: AppCustomTheme\n revertableVersion?: string\n lockedBy?: User\n sessions?: SocketSession[]\n navigation?: AppNavigation\n automationErrors?: AppMetadataErrors\n icon?: AppIcon\n features?: AppFeatures\n}\n\nexport interface AppInstance {\n _id: string\n}\n\nexport interface AppNavigation {\n navigation: string\n title: string\n navWidth: string\n sticky?: boolean\n hideLogo?: boolean\n logoUrl?: string\n hideTitle?: boolean\n navBackground?: string\n navTextColor?: string\n links?: AppNavigationLink[]\n}\n\nexport interface AppNavigationLink {\n text: string\n url: string\n id?: string\n roleId?: string\n}\n\nexport interface AppCustomTheme {\n buttonBorderRadius?: string\n primaryColor?: string\n primaryColorHover?: string\n\n // Used to exist before new design UI\n navTextColor?: string\n navBackground?: string\n}\n\nexport interface AppIcon {\n name: string\n color: string\n}\n\nexport interface AppFeatures {\n componentValidation?: boolean\n disableUserMetadata?: boolean\n}\n", "import { Document } from \"../document\"\nimport { EventEmitter } from \"events\"\nimport { User } from \"../global\"\n\nexport enum AutomationIOType {\n OBJECT = \"object\",\n STRING = \"string\",\n BOOLEAN = \"boolean\",\n NUMBER = \"number\",\n ARRAY = \"array\",\n JSON = \"json\",\n DATE = \"date\",\n}\n\nexport enum AutomationCustomIOType {\n TABLE = \"table\",\n ROW = \"row\",\n ROWS = \"rows\",\n WIDE = \"wide\",\n QUERY = \"query\",\n QUERY_PARAMS = \"queryParams\",\n QUERY_LIMIT = \"queryLimit\",\n LOOP_OPTION = \"loopOption\",\n ITEM = \"item\",\n CODE = \"code\",\n FILTERS = \"filters\",\n COLUMN = \"column\",\n TRIGGER_SCHEMA = \"triggerSchema\",\n CRON = \"cron\",\n WEBHOOK_URL = \"webhookUrl\",\n AUTOMATION = \"automation\",\n AUTOMATION_FIELDS = \"automationFields\",\n}\n\nexport enum AutomationTriggerStepId {\n ROW_SAVED = \"ROW_SAVED\",\n ROW_UPDATED = \"ROW_UPDATED\",\n ROW_DELETED = \"ROW_DELETED\",\n WEBHOOK = \"WEBHOOK\",\n APP = \"APP\",\n CRON = \"CRON\",\n}\n\nexport enum AutomationStepType {\n LOGIC = \"LOGIC\",\n ACTION = \"ACTION\",\n TRIGGER = \"TRIGGER\",\n}\n\nexport enum AutomationActionStepId {\n SEND_EMAIL_SMTP = \"SEND_EMAIL_SMTP\",\n CREATE_ROW = \"CREATE_ROW\",\n UPDATE_ROW = \"UPDATE_ROW\",\n DELETE_ROW = \"DELETE_ROW\",\n EXECUTE_BASH = \"EXECUTE_BASH\",\n OUTGOING_WEBHOOK = \"OUTGOING_WEBHOOK\",\n EXECUTE_SCRIPT = \"EXECUTE_SCRIPT\",\n EXECUTE_QUERY = \"EXECUTE_QUERY\",\n SERVER_LOG = \"SERVER_LOG\",\n DELAY = \"DELAY\",\n FILTER = \"FILTER\",\n QUERY_ROWS = \"QUERY_ROWS\",\n LOOP = \"LOOP\",\n COLLECT = \"COLLECT\",\n OPENAI = \"OPENAI\",\n TRIGGER_AUTOMATION_RUN = \"TRIGGER_AUTOMATION_RUN\",\n // these used to be lowercase step IDs, maintain for backwards compat\n discord = \"discord\",\n slack = \"slack\",\n zapier = \"zapier\",\n integromat = \"integromat\",\n}\n\nexport interface EmailInvite {\n startTime: Date\n endTime: Date\n summary: string\n location?: string\n url?: string\n}\n\nexport interface SendEmailOpts {\n // workspaceId If finer grain controls being used then this will lookup config for workspace.\n workspaceId?: string\n // user If sending to an existing user the object can be provided, this is used in the context.\n user: User\n // from If sending from an address that is not what is configured in the SMTP config.\n from?: string\n // contents If sending a custom email then can supply contents which will be added to it.\n contents?: string\n // subject A custom subject can be specified if the config one is not desired.\n subject?: string\n // info Pass in a structure of information to be stored alongside the invitation.\n info?: any\n cc?: boolean\n bcc?: boolean\n automation?: boolean\n invite?: EmailInvite\n}\n\nexport const AutomationStepIdArray = [\n ...Object.values(AutomationActionStepId),\n ...Object.values(AutomationTriggerStepId),\n]\n\nexport interface Automation extends Document {\n definition: {\n steps: AutomationStep[]\n trigger: AutomationTrigger\n }\n screenId?: string\n uiTree?: any\n appId: string\n live?: boolean\n name: string\n internal?: boolean\n type?: string\n}\n\ninterface BaseIOStructure {\n type?: AutomationIOType\n customType?: AutomationCustomIOType\n title?: string\n description?: string\n dependsOn?: string\n enum?: string[]\n pretty?: string[]\n properties?: {\n [key: string]: BaseIOStructure\n }\n required?: string[]\n}\n\ninterface InputOutputBlock {\n properties: {\n [key: string]: BaseIOStructure\n }\n required?: string[]\n}\n\nexport interface AutomationStepSchema {\n name: string\n stepTitle?: string\n tagline: string\n icon: string\n description: string\n type: AutomationStepType\n internal?: boolean\n deprecated?: boolean\n stepId: AutomationTriggerStepId | AutomationActionStepId\n blockToLoop?: string\n inputs: {\n [key: string]: any\n }\n schema: {\n inputs: InputOutputBlock\n outputs: InputOutputBlock\n }\n custom?: boolean\n features?: Partial<Record<AutomationFeature, boolean>>\n}\n\nexport enum AutomationFeature {\n LOOPING = \"LOOPING\",\n}\n\nexport interface AutomationStep extends AutomationStepSchema {\n id: string\n}\n\nexport interface AutomationTriggerSchema extends AutomationStepSchema {\n event?: string\n cronJobId?: string\n}\n\nexport interface AutomationTrigger extends AutomationTriggerSchema {\n id: string\n}\n\nexport enum AutomationStepStatus {\n NO_ITERATIONS = \"no_iterations\",\n}\n\nexport enum AutomationStatus {\n SUCCESS = \"success\",\n ERROR = \"error\",\n STOPPED = \"stopped\",\n STOPPED_ERROR = \"stopped_error\",\n}\n\nexport interface AutomationResults {\n automationId?: string\n status?: AutomationStatus\n trigger?: any\n steps: {\n stepId: AutomationTriggerStepId | AutomationActionStepId\n inputs: {\n [key: string]: any\n }\n outputs: {\n [key: string]: any\n }\n }[]\n}\n\nexport interface AutomationLog extends AutomationResults, Document {\n automationName: string\n _rev?: string\n}\n\nexport interface AutomationLogPage {\n data: AutomationLog[]\n hasNextPage: boolean\n nextPage?: string\n}\n\nexport type AutomationStepInput = {\n inputs: Record<string, any>\n context: Record<string, any>\n emitter: EventEmitter\n appId: string\n apiKey?: string\n}\n\nexport interface AutomationMetadata extends Document {\n errorCount?: number\n automationChainCount?: number\n}\n", "import { Document } from \"../document\"\nimport { SourceName } from \"../../sdk\"\nimport { Table } from \"./table\"\n\nexport interface Datasource extends Document {\n type: string\n name?: string\n source: SourceName\n // the config is defined by the schema\n config?: Record<string, any>\n plus?: boolean\n isSQL?: boolean\n entities?: {\n [key: string]: Table\n }\n}\n\nexport enum RestAuthType {\n BASIC = \"basic\",\n BEARER = \"bearer\",\n}\n\nexport interface RestBasicAuthConfig {\n username: string\n password: string\n}\n\nexport interface RestBearerAuthConfig {\n token: string\n}\n\nexport interface RestAuthConfig {\n _id: string\n name: string\n type: RestAuthType\n config: RestBasicAuthConfig | RestBearerAuthConfig\n}\n\nexport interface RestConfig {\n url: string\n rejectUnauthorized: boolean\n defaultHeaders: {\n [key: string]: any\n }\n legacyHttpParser: boolean\n authConfigs: RestAuthConfig[]\n staticVariables: {\n [key: string]: string\n }\n dynamicVariables: [\n {\n name: string\n queryId: string\n value: string\n }\n ]\n}\n", "import { Document } from \"../document\"\n\nexport interface Layout extends Document {\n props: any\n layoutId?: string\n}\n", "import { Document } from \"../document\"\n\nexport interface Query extends Document {\n datasourceId: string\n name: string\n parameters: QueryParameter[]\n fields: RestQueryFields | any\n transformer: string | null\n schema: Record<string, { name?: string; type: string }>\n readable: boolean\n queryVerb: string\n}\n\nexport interface QueryParameter {\n name: string\n default: string\n}\n\nexport interface RestQueryFields {\n path: string\n queryString?: string\n headers: { [key: string]: any }\n disabledHeaders: { [key: string]: any }\n requestBody: any\n bodyType: string\n json: object\n method: string\n authConfigId: string\n pagination: PaginationConfig | null\n paginationValues: PaginationValues | null\n}\n\nexport interface PaginationConfig {\n type: string\n location: string\n pageParam: string\n sizeParam: string | null\n responseParam: string | null\n}\n\nexport interface PaginationValues {\n page: string | number | null\n limit: number | null\n}\n\nexport interface PreviewQueryRequest extends Omit<Query, \"parameters\"> {\n parameters: {}\n flags?: {\n urlName?: boolean\n }\n}\n", "import { Document } from \"../document\"\n\nexport interface Role extends Document {\n permissionId: string\n inherits?: string\n permissions: { [key: string]: string[] }\n version?: string\n}\n", "import { Document } from \"../../document\"\nimport { View, ViewV2 } from \"../view\"\nimport { RenameColumn } from \"../../../sdk\"\nimport { TableSchema } from \"./schema\"\n\nexport const INTERNAL_TABLE_SOURCE_ID = \"bb_internal\"\n\nexport enum TableSourceType {\n EXTERNAL = \"external\",\n INTERNAL = \"internal\",\n}\n\nexport interface Table extends Document {\n type: \"table\"\n sourceType: TableSourceType\n views?: { [key: string]: View | ViewV2 }\n name: string\n sourceId: string\n primary?: string[]\n schema: TableSchema\n primaryDisplay?: string\n relatedFormula?: string[]\n constrained?: string[]\n sql?: boolean\n indexes?: { [key: string]: any }\n created?: boolean\n rowHeight?: number\n}\n\nexport interface TableRequest extends Table {\n _rename?: RenameColumn\n created?: boolean\n}\n", "import { Document } from \"../document\"\n\nexport enum FieldType {\n STRING = \"string\",\n LONGFORM = \"longform\",\n OPTIONS = \"options\",\n NUMBER = \"number\",\n BOOLEAN = \"boolean\",\n ARRAY = \"array\",\n DATETIME = \"datetime\",\n ATTACHMENT = \"attachment\",\n LINK = \"link\",\n FORMULA = \"formula\",\n AUTO = \"auto\",\n JSON = \"json\",\n INTERNAL = \"internal\",\n BARCODEQR = \"barcodeqr\",\n BIGINT = \"bigint\",\n BB_REFERENCE = \"bb_reference\",\n}\n\nexport interface RowAttachment {\n size: number\n name: string\n extension: string\n key: string\n // Populated on read\n url?: string\n}\n\nexport interface Row extends Document {\n type?: string\n tableId?: string\n _viewId?: string\n [key: string]: any\n}\n\nexport enum FieldSubtype {\n USER = \"user\",\n USERS = \"users\",\n}\n\nexport const FieldTypeSubtypes = {\n BB_REFERENCE: {\n USER: FieldSubtype.USER,\n USERS: FieldSubtype.USERS,\n },\n}\n", "export enum RelationshipType {\n ONE_TO_MANY = \"one-to-many\",\n MANY_TO_ONE = \"many-to-one\",\n MANY_TO_MANY = \"many-to-many\",\n}\n\nexport enum AutoReason {\n FOREIGN_KEY = \"foreign_key\",\n}\n\nexport enum AutoFieldSubTypes {\n CREATED_BY = \"createdBy\",\n CREATED_AT = \"createdAt\",\n UPDATED_BY = \"updatedBy\",\n UPDATED_AT = \"updatedAt\",\n AUTO_ID = \"autoID\",\n}\n\nexport enum FormulaTypes {\n STATIC = \"static\",\n DYNAMIC = \"dynamic\",\n}\n", "// all added by grid/table when defining the\n// column size, position and whether it can be viewed\nimport { FieldSubtype, FieldType } from \"../row\"\nimport {\n AutoFieldSubTypes,\n AutoReason,\n FormulaTypes,\n RelationshipType,\n} from \"./constants\"\n\nexport interface UIFieldMetadata {\n order?: number\n width?: number\n visible?: boolean\n icon?: string\n}\n\ninterface BaseRelationshipFieldMetadata\n extends Omit<BaseFieldSchema, \"subtype\"> {\n type: FieldType.LINK\n main?: boolean\n fieldName: string\n tableId: string\n subtype?: AutoFieldSubTypes.CREATED_BY | AutoFieldSubTypes.UPDATED_BY\n}\n\n// External tables use junction tables, internal tables don't require them\ntype ManyToManyJunctionTableMetadata =\n | {\n through: string\n throughFrom: string\n throughTo: string\n }\n | {\n through?: never\n throughFrom?: never\n throughTo?: never\n }\n\nexport type ManyToManyRelationshipFieldMetadata =\n BaseRelationshipFieldMetadata & {\n relationshipType: RelationshipType.MANY_TO_MANY\n } & ManyToManyJunctionTableMetadata\n\nexport interface OneToManyRelationshipFieldMetadata\n extends BaseRelationshipFieldMetadata {\n relationshipType: RelationshipType.ONE_TO_MANY\n foreignKey?: string\n}\nexport interface ManyToOneRelationshipFieldMetadata\n extends BaseRelationshipFieldMetadata {\n relationshipType: RelationshipType.MANY_TO_ONE\n foreignKey?: string\n}\nexport type RelationshipFieldMetadata =\n | ManyToManyRelationshipFieldMetadata\n | OneToManyRelationshipFieldMetadata\n | ManyToOneRelationshipFieldMetadata\n\nexport interface AutoColumnFieldMetadata\n extends Omit<BaseFieldSchema, \"subtype\"> {\n type: FieldType.AUTO\n autocolumn: true\n subtype?: AutoFieldSubTypes\n lastID?: number\n // if the column was turned to an auto-column for SQL, explains why (primary, foreign etc)\n autoReason?: AutoReason\n}\n\nexport interface NumberFieldMetadata extends Omit<BaseFieldSchema, \"subtype\"> {\n type: FieldType.NUMBER\n subtype?: AutoFieldSubTypes.AUTO_ID\n lastID?: number\n autoReason?: AutoReason.FOREIGN_KEY\n // used specifically when Budibase generates external tables, this denotes if a number field\n // is a foreign key used for a many-to-many relationship\n meta?: {\n toTable: string\n toKey: string\n }\n}\n\nexport interface DateFieldMetadata extends Omit<BaseFieldSchema, \"subtype\"> {\n type: FieldType.DATETIME\n ignoreTimezones?: boolean\n timeOnly?: boolean\n subtype?: AutoFieldSubTypes.CREATED_AT | AutoFieldSubTypes.UPDATED_AT\n}\n\nexport interface LongFormFieldMetadata extends BaseFieldSchema {\n type: FieldType.LONGFORM\n useRichText?: boolean | null\n}\n\nexport interface FormulaFieldMetadata extends BaseFieldSchema {\n type: FieldType.FORMULA\n formula: string\n formulaType?: FormulaTypes\n}\n\nexport interface BBReferenceFieldMetadata\n extends Omit<BaseFieldSchema, \"subtype\"> {\n type: FieldType.BB_REFERENCE\n subtype: FieldSubtype.USER | FieldSubtype.USERS\n relationshipType?: RelationshipType\n}\n\nexport interface FieldConstraints {\n type?: string\n email?: boolean\n inclusion?: string[]\n length?: {\n minimum?: string | number | null\n maximum?: string | number | null\n }\n numericality?: {\n greaterThanOrEqualTo: string | null\n lessThanOrEqualTo: string | null\n }\n presence?:\n | boolean\n | {\n allowEmpty?: boolean\n }\n datetime?: {\n latest: string\n earliest: string\n }\n}\n\ninterface BaseFieldSchema extends UIFieldMetadata {\n type: FieldType\n name: string\n sortable?: boolean\n // only used by external databases, to denote the real type\n externalType?: string\n constraints?: FieldConstraints\n autocolumn?: boolean\n autoReason?: AutoReason.FOREIGN_KEY\n subtype?: never\n}\n\ninterface OtherFieldMetadata extends BaseFieldSchema {\n type: Exclude<\n FieldType,\n | FieldType.DATETIME\n | FieldType.LINK\n | FieldType.AUTO\n | FieldType.FORMULA\n | FieldType.NUMBER\n | FieldType.LONGFORM\n >\n}\n\nexport type FieldSchema =\n | OtherFieldMetadata\n | DateFieldMetadata\n | RelationshipFieldMetadata\n | AutoColumnFieldMetadata\n | FormulaFieldMetadata\n | NumberFieldMetadata\n | LongFormFieldMetadata\n | BBReferenceFieldMetadata\n\nexport interface TableSchema {\n [key: string]: FieldSchema\n}\n\nexport function isRelationshipField(\n field: FieldSchema\n): field is RelationshipFieldMetadata {\n return field.type === FieldType.LINK\n}\n\nexport function isManyToMany(\n field: RelationshipFieldMetadata\n): field is ManyToManyRelationshipFieldMetadata {\n return field.relationshipType === RelationshipType.MANY_TO_MANY\n}\n\nexport function isOneToMany(\n field: RelationshipFieldMetadata\n): field is OneToManyRelationshipFieldMetadata {\n return field.relationshipType === RelationshipType.ONE_TO_MANY\n}\n\nexport function isManyToOne(\n field: RelationshipFieldMetadata\n): field is ManyToOneRelationshipFieldMetadata {\n return field.relationshipType === RelationshipType.MANY_TO_ONE\n}\n\nexport function isBBReferenceField(\n field: FieldSchema\n): field is BBReferenceFieldMetadata {\n return field.type === FieldType.BB_REFERENCE\n}\n", "export * from \"./table\"\nexport * from \"./schema\"\nexport * from \"./constants\"\n", "import { Document } from \"../document\"\nimport { Component } from \"./component\"\n\nexport interface ScreenProps extends Component {\n size?: string\n gap?: string\n direction?: string\n vAlign?: string\n hAlign?: string\n}\n\nexport interface ScreenRouting {\n route: string\n roleId: string\n homeScreen?: boolean\n}\n\nexport interface Screen extends Document {\n layoutId?: string\n showNavigation?: boolean\n width?: string\n routing: ScreenRouting\n props: ScreenProps\n name?: string\n}\n", "import { SearchFilter, SortOrder, SortType } from \"../../api\"\nimport { UIFieldMetadata } from \"./table\"\nimport { Document } from \"../document\"\nimport { DBView } from \"../../sdk\"\n\nexport type ViewTemplateOpts = {\n field: string\n tableId: string\n groupBy: string\n filters: ViewFilter[]\n schema: any\n calculation: string\n groupByMulti?: boolean\n}\n\nexport interface InMemoryView extends Document {\n view: DBView\n name: string\n tableId: string\n groupBy?: string\n}\n\nexport interface View {\n name?: string\n tableId: string\n field?: string\n filters: ViewFilter[]\n schema: ViewSchema\n calculation?: ViewCalculation\n map?: string\n reduce?: any\n meta?: ViewTemplateOpts\n}\n\nexport interface ViewV2 {\n version: 2\n id: string\n name: string\n primaryDisplay?: string\n tableId: string\n query?: SearchFilter[]\n sort?: {\n field: string\n order?: SortOrder\n type?: SortType\n }\n schema?: Record<string, UIFieldMetadata>\n}\n\nexport type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema\n\nexport interface ViewCountOrSumSchema {\n field: string\n value: string\n}\n\n/**\n e.g:\n \"min\": {\n \"type\": \"number\"\n },\n \"max\": {\n \"type\": \"number\"\n }\n */\nexport interface ViewStatisticsSchema {\n [key: string]: {\n type: string\n }\n}\n\nexport interface ViewFilter {\n value?: any\n condition: string\n key: string\n conjunction?: string\n}\n\nexport enum ViewCalculation {\n SUM = \"sum\",\n COUNT = \"count\",\n STATISTICS = \"stats\",\n}\n", "export const SEPARATOR = \"_\"\nexport const UNICODE_MAX = \"\\ufff0\"\n\nexport const prefixed = (type: DocumentType) => `${type}${SEPARATOR}`\n\nexport enum DocumentType {\n USER = \"us\",\n GROUP = \"gr\",\n WORKSPACE = \"workspace\",\n CONFIG = \"config\",\n TEMPLATE = \"template\",\n APP = \"app\",\n DEV = \"dev\",\n APP_DEV = \"app_dev\",\n APP_METADATA = \"app_metadata\",\n ROLE = \"role\",\n MIGRATIONS = \"migrations\",\n DEV_INFO = \"devinfo\",\n AUTOMATION_LOG = \"log_au\",\n ACCOUNT_METADATA = \"acc_metadata\",\n PLUGIN = \"plg\",\n DATASOURCE = \"datasource\",\n DATASOURCE_PLUS = \"datasource_plus\",\n APP_BACKUP = \"backup\",\n TABLE = \"ta\",\n ROW = \"ro\",\n AUTOMATION = \"au\",\n LINK = \"li\",\n WEBHOOK = \"wh\",\n INSTANCE = \"inst\",\n LAYOUT = \"layout\",\n SCREEN = \"screen\",\n QUERY = \"query\",\n DEPLOYMENTS = \"deployments\",\n METADATA = \"metadata\",\n MEM_VIEW = \"view\",\n USER_FLAG = \"flag\",\n AUTOMATION_METADATA = \"meta_au\",\n AUDIT_LOG = \"al\",\n APP_MIGRATION_METADATA = \"_design/migrations\",\n}\n\n// these are the core documents that make up the data, design\n// and automation sections of an app. This excludes any internal\n// rows as we shouldn't import data.\nexport const DocumentTypesToImport: DocumentType[] = [\n DocumentType.ROLE,\n DocumentType.DATASOURCE,\n DocumentType.DATASOURCE_PLUS,\n DocumentType.TABLE,\n DocumentType.AUTOMATION,\n DocumentType.WEBHOOK,\n DocumentType.SCREEN,\n DocumentType.QUERY,\n DocumentType.METADATA,\n DocumentType.MEM_VIEW,\n // Deprecated but still copied\n DocumentType.INSTANCE,\n DocumentType.LAYOUT,\n]\n\nexport enum InternalTable {\n USER_METADATA = \"ta_users\",\n}\n\n// these documents don't really exist, they are part of other\n// documents or enriched into existence as part of get requests\nexport enum VirtualDocumentType {\n VIEW = \"view\",\n}\n\nexport interface Document {\n _id?: string\n _rev?: string\n createdAt?: string | number\n updatedAt?: string\n}\n\nexport interface AnyDocument extends Document {\n [key: string]: any\n}\n", "import { User } from \"../global\"\nimport { Row } from \"./row\"\nimport { ContextUser } from \"../../sdk\"\n\nexport type UserMetadata = User & Row\nexport type ContextUserMetadata = ContextUser & Row\n", "import { Document } from \"../document\"\nimport { User } from \"../../\"\n\nexport enum AppBackupType {\n BACKUP = \"backup\",\n RESTORE = \"restore\",\n}\n\nexport enum AppBackupStatus {\n STARTED = \"started\",\n PENDING = \"pending\",\n COMPLETE = \"complete\",\n FAILED = \"failed\",\n}\n\nexport enum AppBackupTrigger {\n PUBLISH = \"publish\",\n MANUAL = \"manual\",\n SCHEDULED = \"scheduled\",\n RESTORING = \"restoring\",\n}\n\nexport interface AppBackupContents {\n datasources: string[]\n screens: string[]\n automations: string[]\n}\n\nexport interface AppBackupMetadata {\n appId: string\n trigger?: AppBackupTrigger\n type: AppBackupType\n status: AppBackupStatus\n name?: string\n createdBy?: string | User\n timestamp: string\n finishedAt?: string\n startedAt?: string\n contents?: AppBackupContents\n}\n\nexport interface AppBackup extends Document, AppBackupMetadata {\n _id: string\n filename?: string\n}\n\nexport type AppBackupFetchOpts = {\n trigger?: AppBackupTrigger\n type?: AppBackupType\n limit?: number\n page?: string\n paginate?: boolean\n startDate?: string\n endDate?: string\n}\n\nexport interface AppBackupQueueData {\n appId: string\n docId: string\n docRev: string\n export?: {\n trigger: AppBackupTrigger\n name?: string\n createdBy?: string\n }\n import?: {\n backupId: string\n nameForBackup: string\n createdBy?: string\n }\n}\n", "import { Document } from \"../document\"\n\nexport enum WebhookActionType {\n AUTOMATION = \"automation\",\n}\n\nexport interface Webhook extends Document {\n live: boolean\n name: string\n action: {\n type: WebhookActionType\n target: string\n }\n bodySchema?: any\n}\n", "import { Document } from \"../document\"\n\nexport interface LinkInfo {\n rowId: string\n fieldName: string\n tableId: string\n}\n\nexport interface LinkDocument extends Document {\n type: string\n doc1: LinkInfo\n doc2: LinkInfo\n}\n\nexport interface LinkDocumentValue {\n id: string\n thisId: string\n fieldName: string\n}\n", "import { Document } from \"../document\"\n\nexport interface Component extends Document {\n _instanceName: string\n _styles: { [key: string]: any }\n _component: string\n _children?: Component[]\n [key: string]: any\n}\n", "export * from \"./app\"\nexport * from \"./automation\"\nexport * from \"./datasource\"\nexport * from \"./layout\"\nexport * from \"./query\"\nexport * from \"./role\"\nexport * from \"./table\"\nexport * from \"./screen\"\nexport * from \"./view\"\nexport * from \"../document\"\nexport * from \"./row\"\nexport * from \"./user\"\nexport * from \"./backup\"\nexport * from \"./webhook\"\nexport * from \"./links\"\nexport * from \"./component\"\n", "import { Document } from \"../document\"\n\nexport interface Config<T = any> extends Document {\n type: ConfigType\n config: T\n}\n\nexport interface SMTPInnerConfig {\n port: number\n host: string\n from: string\n subject?: string\n secure: boolean\n auth?: {\n user: string\n pass: string\n }\n connectionTimeout?: any\n}\n\nexport interface SMTPConfig extends Config<SMTPInnerConfig> {}\n\n/**\n * Accessible only via pro.\n */\nexport interface SettingsBrandingConfig {\n faviconUrl?: string\n faviconUrlEtag?: string\n\n emailBrandingEnabled?: boolean\n testimonialsEnabled?: boolean\n platformTitle?: string\n loginHeading?: string\n loginButton?: string\n\n metaDescription?: string\n metaImageUrl?: string\n metaTitle?: string\n}\n\nexport interface SettingsInnerConfig {\n platformUrl?: string\n company?: string\n logoUrl?: string // Populated on read\n logoUrlEtag?: string\n uniqueTenantId?: string\n analyticsEnabled?: boolean\n isSSOEnforced?: boolean\n}\n\nexport interface SettingsConfig extends Config<SettingsInnerConfig> {}\n\nexport type SSOConfigType = ConfigType.GOOGLE | ConfigType.OIDC\nexport type SSOConfig = GoogleInnerConfig | OIDCInnerConfig\n\nexport interface GoogleInnerConfig {\n clientID: string\n clientSecret: string\n activated: boolean\n /**\n * @deprecated read only\n */\n callbackURL?: string\n}\n\nexport interface GoogleConfig extends Config<GoogleInnerConfig> {}\n\nexport interface OIDCStrategyConfiguration {\n issuer: string\n authorizationURL: string\n tokenURL: string\n userInfoURL: string\n clientID: string\n clientSecret: string\n callbackURL: string\n}\n\nexport interface OIDCConfigs {\n configs: OIDCInnerConfig[]\n}\n\nexport interface OIDCLogosInnerConfig {\n [key: string]: string\n}\n\nexport interface OIDCLogosConfig extends Config<OIDCLogosInnerConfig> {}\n\nexport interface OIDCInnerConfig {\n configUrl: string\n clientID: string\n clientSecret: string\n logo: string\n name: string\n uuid: string\n activated: boolean\n scopes: string[]\n}\n\nexport interface OIDCConfig extends Config<OIDCConfigs> {}\n\nexport interface OIDCWellKnownConfig {\n issuer: string\n authorization_endpoint: string\n token_endpoint: string\n userinfo_endpoint: string\n}\n\nexport interface SCIMInnerConfig {\n enabled: boolean\n}\n\nexport interface SCIMConfig extends Config<SCIMInnerConfig> {}\n\nexport const isSettingsConfig = (config: Config): config is SettingsConfig =>\n config.type === ConfigType.SETTINGS\n\nexport const isSMTPConfig = (config: Config): config is SMTPConfig =>\n config.type === ConfigType.SMTP\n\nexport const isGoogleConfig = (config: Config): config is GoogleConfig =>\n config.type === ConfigType.GOOGLE\n\nexport const isOIDCConfig = (config: Config): config is OIDCConfig =>\n config.type === ConfigType.OIDC\n\nexport const isSCIMConfig = (config: Config): config is SCIMConfig =>\n config.type === ConfigType.SCIM\n\nexport enum ConfigType {\n SETTINGS = \"settings\",\n ACCOUNT = \"account\",\n SMTP = \"smtp\",\n GOOGLE = \"google\",\n OIDC = \"oidc\",\n OIDC_LOGOS = \"logos_oidc\",\n SCIM = \"scim\",\n}\n", "import { Document } from \"../document\"\n\n// SSO\n\nexport interface SSOProfileJson {\n email?: string\n picture?: string\n}\n\nexport interface OAuth2 {\n accessToken: string\n refreshToken?: string\n}\n\nexport enum SSOProviderType {\n OIDC = \"oidc\",\n GOOGLE = \"google\",\n}\n\nexport interface UserSSO {\n provider: string // the individual provider e.g. Okta, Auth0, Google\n providerType: SSOProviderType\n oauth2?: OAuth2\n thirdPartyProfile?: SSOProfileJson\n}\n\nexport type SSOUser = User & UserSSO\n\nexport function isSSOUser(user: User): user is SSOUser {\n return !!(user as SSOUser).providerType\n}\n\n// USER\n\nexport interface User extends Document {\n tenantId: string\n email: string\n userId?: string\n firstName?: string\n lastName?: string\n pictureUrl?: string\n forceResetPassword?: boolean\n roles: UserRoles\n builder?: {\n global?: boolean\n apps?: string[]\n creator?: boolean\n }\n admin?: {\n global: boolean\n }\n password?: string\n status?: UserStatus\n createdAt?: number // override the default createdAt behaviour - users sdk historically set this to Date.now()\n dayPassRecordedAt?: string\n userGroups?: string[]\n onboardedAt?: string\n scimInfo?: { isSync: true } & Record<string, any>\n ssoId?: string\n}\n\nexport enum UserStatus {\n ACTIVE = \"active\",\n INACTIVE = \"inactive\",\n}\n\nexport interface UserRoles {\n [key: string]: string\n}\n\n// UTILITY TYPES\n\nexport interface BuilderUser extends User {\n builder: {\n global?: boolean\n apps?: string[]\n }\n}\n\nexport interface AdminUser extends User {\n admin: {\n global: boolean\n }\n builder: {\n global: boolean\n }\n}\n\nexport interface AdminOnlyUser extends User {\n admin: {\n global: boolean\n }\n}\n\nexport function isUser(user: object): user is User {\n return !!(user as User).roles\n}\n", "import { PaginationResponse } from \"../../api\"\nimport { Document } from \"../document\"\n\nexport interface UserGroup extends Document {\n name: string\n icon: string\n color: string\n users?: GroupUser[]\n roles?: UserGroupRoles\n // same structure as users\n builder?: {\n apps: string[]\n }\n createdAt?: number\n scimInfo?: {\n externalId: string\n isSync: boolean\n }\n}\n\nexport interface GroupUser {\n _id: string\n email: string\n}\n\nexport interface UserGroupRoles {\n [key: string]: string\n}\n\nexport interface SearchGroupRequest {}\nexport interface SearchGroupResponse {\n data: UserGroup[]\n}\n\nexport interface SearchUserGroupResponse extends PaginationResponse {\n users: {\n _id: any\n email: any\n }[]\n}\n", "import { Document } from \"../document\"\n\nexport enum PluginType {\n DATASOURCE = \"datasource\",\n COMPONENT = \"component\",\n AUTOMATION = \"automation\",\n}\n\nexport enum PluginSource {\n NPM = \"NPM\",\n GITHUB = \"Github\",\n URL = \"URL\",\n FILE = \"File Upload\",\n}\nexport interface FileType {\n path: string\n name: string\n}\n\nexport interface Plugin extends Document {\n description: string\n name: string\n version: string\n source: PluginSource\n package: { [key: string]: any }\n hash: string\n schema: {\n type: PluginType\n [key: string]: any\n }\n iconFileName?: string\n // Populated on read\n jsUrl?: string\n // Populated on read\n iconUrl?: string\n}\n\nexport const PLUGIN_TYPE_ARR = Object.values(PluginType)\n", "import { MonthlyQuotaName, StaticQuotaName } from \"../../sdk\"\n\nexport enum BreakdownQuotaName {\n ROW_QUERIES = \"rowQueries\",\n DATASOURCE_QUERIES = \"datasourceQueries\",\n AUTOMATIONS = \"automations\",\n}\n\nexport const APP_QUOTA_NAMES = [\n StaticQuotaName.ROWS,\n MonthlyQuotaName.QUERIES,\n MonthlyQuotaName.AUTOMATIONS,\n]\n\nexport const BREAKDOWN_QUOTA_NAMES = [\n MonthlyQuotaName.QUERIES,\n MonthlyQuotaName.AUTOMATIONS,\n]\n\nexport interface UsageBreakdown {\n parent: MonthlyQuotaName\n values: {\n [key: string]: number\n }\n}\n\nexport type QuotaTriggers = {\n [key: string]: string | undefined\n}\n\nexport interface StaticUsage {\n [StaticQuotaName.APPS]: number\n [StaticQuotaName.PLUGINS]: number\n [StaticQuotaName.USERS]: number\n [StaticQuotaName.CREATORS]: number\n [StaticQuotaName.USER_GROUPS]: number\n [StaticQuotaName.ROWS]: number\n triggers: {\n [key in StaticQuotaName]?: QuotaTriggers\n }\n}\n\nexport interface MonthlyUsage {\n [MonthlyQuotaName.QUERIES]: number\n [MonthlyQuotaName.AUTOMATIONS]: number\n [MonthlyQuotaName.DAY_PASSES]: number\n triggers: {\n [key in MonthlyQuotaName]?: QuotaTriggers\n }\n breakdown?: {\n [key in BreakdownQuotaName]?: UsageBreakdown\n }\n}\n\nexport interface BaseQuotaUsage {\n usageQuota: StaticUsage\n monthly: {\n [key: string]: MonthlyUsage\n }\n}\n\nexport interface QuotaUsage extends BaseQuotaUsage {\n _id: string\n _rev?: string\n quotaReset: string\n apps?: {\n [key: string]: BaseQuotaUsage\n }\n}\n\nexport type SetUsageValues = {\n total: number\n app?: number\n breakdown?: number\n triggers?: QuotaTriggers\n}\n\nexport type UsageValues = {\n total: number\n app?: number\n breakdown?: number\n}\n", "import { Document } from \"../document\"\n\nexport enum ScheduleType {\n APP_BACKUP = \"app_backup\",\n}\n\nexport enum ScheduleRepeatPeriod {\n DAILY = \"daily\",\n WEEKLY = \"weekly\",\n MONTHLY = \"monthly\",\n}\n\nexport interface Schedule extends Document {\n type: ScheduleType\n name: string\n startDate: string\n repeat: ScheduleRepeatPeriod\n metadata: ScheduleMetadata\n}\n\nexport type ScheduleMetadata = AppBackupScheduleMetadata\n\nexport const isAppBackupMetadata = (\n type: ScheduleType,\n metadata: ScheduleMetadata\n): metadata is AppBackupScheduleMetadata => {\n return type === ScheduleType.APP_BACKUP\n}\n\nexport interface AppBackupScheduleMetadata {\n apps: string[]\n}\n", "import { Document } from \"../document\"\n\nexport interface Template extends Document {\n ownerId?: string\n name?: string\n contents: string\n purpose: string\n type?: string\n}\n", "import { Document } from \"../document\"\n\nexport interface EnvironmentVariablesDoc extends Document {\n variables: string\n}\n\nexport type EnvironmentVariableValue = {\n production: string\n development: string\n}\n\n// what comes out of the \"variables\" when it is decrypted\nexport type EnvironmentVariablesDecrypted = Record<\n string,\n EnvironmentVariableValue\n>\n\nexport interface EnvironmentVariablesDocDecrypted extends Document {\n variables: EnvironmentVariablesDecrypted\n}\n", "import { Document } from \"../document\"\nimport { Event } from \"../../sdk\"\n\nexport const AuditLogSystemUser = \"SYSTEM\"\n\nexport type FallbackInfo = {\n appName?: string\n email?: string\n}\n\nexport interface AuditLogDoc extends Document {\n appId?: string\n event: Event\n userId: string\n timestamp: string\n metadata: any\n name: string\n fallback?: FallbackInfo\n}\n", "export * from \"./config\"\nexport * from \"./user\"\nexport * from \"./userGroup\"\nexport * from \"./plugin\"\nexport * from \"./quotas\"\nexport * from \"./schedule\"\nexport * from \"./templates\"\nexport * from \"./environmentVariables\"\nexport * from \"./auditLogs\"\n", "import { Document } from \"../document\"\n\nexport interface GlobalInfo {}\n\nexport interface Installation extends Document {\n _id: string\n installId: string\n version: string\n}\n", "import { Document } from \"../document\"\n\n/**\n * doc id is user email\n */\nexport interface PlatformUserByEmail extends Document {\n tenantId: string\n userId: string\n}\n\n/**\n * doc id is userId\n */\nexport interface PlatformUserById extends Document {\n tenantId: string\n}\n\n/**\n * doc id is a unique SSO provider ID for the user\n */\nexport interface PlatformUserBySsoId extends Document {\n tenantId: string\n userId: string\n email: string\n}\n\nexport type PlatformUser =\n | PlatformUserByEmail\n | PlatformUserById\n | PlatformUserBySsoId\n", "import { Document } from \"../document\"\n\nexport interface AccountMetadata extends Document {\n email: string\n}\n", "import { Document } from \"../document\"\n\nexport interface Tenants extends Document {\n tenantIds: string[]\n}\n", "export * from \"./info\"\nexport * from \"./users\"\nexport * from \"./accounts\"\nexport * from \"./tenants\"\n", "import { Document } from \"../\"\n\nexport interface RowValue {\n rev: string\n deleted: boolean\n}\n\nexport interface RowResponse<T extends Document> {\n id: string\n key: string\n error: string\n value: T | RowValue\n doc?: T\n}\n\nexport interface AllDocsResponse<T extends Document> {\n offset: number\n total_rows: number\n rows: RowResponse<T>[]\n}\n\nexport type BulkDocsResponse = BulkDocResponse[]\n\ninterface BulkDocResponse {\n ok: boolean\n id: string\n rev: string\n}\n\nexport interface PutResponse {\n ok: boolean\n id: string\n rev: string\n}\n", "export * from \"./account\"\nexport * from \"./app\"\nexport * from \"./global\"\nexport * from \"./platform\"\nexport * from \"./document\"\nexport * from \"./pouch\"\n", "import { Account, AccountSSOProvider } from \"../../documents\"\nimport { Hosting } from \"../../sdk\"\n\nexport interface CreateAccountRequest {\n email: string\n tenantId: string\n hosting: Hosting\n size: string\n profession: string\n // optional fields\n tenantName?: string\n name?: string\n password: string\n provider?: AccountSSOProvider\n thirdPartyProfile: object\n}\n\nexport interface SearchAccountsRequest {\n // one or the other - not both\n email?: string\n tenantId?: string\n}\n\nexport type SearchAccountsResponse = Account[]\n", "export interface PostAccountUserActivity {\n timestamp: number\n}\n\nexport interface PostAccountUserActivityResponse {\n userId: string\n timestamp: number\n}\n", "import { LicenseOverrides, QuotaUsage } from \"../../documents\"\nimport { OfflineLicense, PlanType } from \"../../sdk\"\nimport { ISO8601 } from \"../../shared\"\n\nexport interface GetLicenseRequest {\n // All fields should be optional to cater for\n // historical versions of budibase\n quotaUsage?: QuotaUsage\n install: {\n id: string\n tenantId: string\n version: string\n }\n}\n\nexport interface QuotaTriggeredRequest {\n percentage: number\n name: string\n resetDate?: string\n}\n\nexport interface LicenseActivateRequest {\n installVersion?: string\n}\n\nexport interface UpdateLicenseRequest {\n planType?: PlanType\n overrides?: LicenseOverrides\n}\n\nexport interface CreateOfflineLicenseRequest {\n installationIdentifierBase64: string\n expireAt: ISO8601\n}\n\nexport interface GetOfflineLicenseResponse {\n offlineLicenseToken: string\n license: OfflineLicense\n}\n", "export interface HealthStatusResponse {\n passing: boolean\n checks: {\n login: boolean\n search: boolean\n }\n}\n", "export * from \"./accounts\"\nexport * from \"./user\"\nexport * from \"./license\"\nexport * from \"./status\"\n", "export enum PingSource {\n BUILDER = \"builder\",\n APP = \"app\",\n}\n\nexport interface AnalyticsPingRequest {\n source: PingSource\n timezone: string\n embedded?: boolean\n}\n", "export interface LoginRequest {\n username: string\n password: string\n}\n\nexport interface PasswordResetRequest {\n email: string\n}\n\nexport interface PasswordResetUpdateRequest {\n resetCode: string\n password: string\n}\n\nexport interface UpdateSelfRequest {\n firstName?: string\n lastName?: string\n password?: string\n forceResetPassword?: boolean\n onboardedAt?: string\n}\n\nexport interface UpdateSelfResponse {\n _id: string\n _rev: string\n}\n", "import { User } from \"../../documents\"\nimport { SearchQuery } from \"./searchFilter\"\n\nexport interface SaveUserResponse {\n _id: string\n _rev: string\n email: string\n}\n\nexport interface UserDetails {\n _id: string\n email: string\n password?: string\n}\n\nexport interface BulkUserRequest {\n delete?: {\n userIds: string[]\n }\n create?: {\n roles?: any[]\n users: User[]\n groups: any[]\n }\n}\n\nexport interface BulkUserCreated {\n successful: UserDetails[]\n unsuccessful: { email: string; reason: string }[]\n}\n\nexport interface BulkUserDeleted {\n successful: UserDetails[]\n unsuccessful: { _id: string; email: string; reason: string }[]\n}\n\nexport interface BulkUserResponse {\n created?: BulkUserCreated\n deleted?: BulkUserDeleted\n message?: string\n}\n\nexport interface InviteUserRequest {\n email: string\n userInfo: any\n}\n\nexport type InviteUsersRequest = InviteUserRequest[]\n\nexport interface InviteUsersResponse {\n successful: { email: string }[]\n unsuccessful: { email: string; reason: string }[]\n created?: boolean\n}\n\nexport interface SearchUsersRequest {\n bookmark?: string\n query?: SearchQuery\n appId?: string\n limit?: number\n paginate?: boolean\n}\n\nexport interface CreateAdminUserRequest {\n email: string\n password: string\n tenantId: string\n ssoId?: string\n}\n\nexport interface CreateAdminUserResponse {\n _id: string\n _rev: string\n email: string\n}\n\nexport interface AcceptUserInviteRequest {\n inviteCode: string\n password: string\n firstName: string\n lastName: string\n}\n\nexport interface AcceptUserInviteResponse {\n _id: string\n _rev: string\n email: string\n}\n\nexport interface SyncUserRequest {\n previousUser?: User\n}\n", "export interface APIError {\n message: string\n status: number\n error?: any\n validationErrors?: any\n}\n", "export interface GetDiagnosticsResponse {\n budibaseVersion: string\n hosting: string\n nodeVersion: string\n platform: string\n cpuArch: string\n cpuCores: number\n cpuInfo: string\n totalMemory: string\n uptime: string\n}\n", "import {\n ScheduleMetadata,\n ScheduleRepeatPeriod,\n ScheduleType,\n} from \"../../documents\"\n\nexport interface CreateScheduleRequest {\n type: ScheduleType\n name: string\n startDate: string\n repeat: ScheduleRepeatPeriod\n metadata: ScheduleMetadata\n}\n\nexport interface UpdateScheduleRequest extends CreateScheduleRequest {}\n", "export interface GetEnvironmentResponse {\n multiTenancy: boolean\n cloud: boolean\n accountPortalUrl: string\n baseUrl: string\n disableAccountPortal: boolean\n isDev: boolean\n}\n", "export * from \"./environment\"\n", "import { AppBackupTrigger, AppBackupType } from \"../../../documents\"\n\nexport interface SearchAppBackupsRequest {\n trigger: AppBackupTrigger\n type: AppBackupType\n startDate: string\n endDate: string\n page?: string\n}\n\nexport interface CreateAppBackupRequest {\n name: string\n}\n\nexport interface CreateAppBackupResponse {\n backupId: string\n message: string\n}\n\nexport interface UpdateAppBackupRequest {\n name: string\n}\n\nexport interface ImportAppBackupResponse {\n restoreId: string\n message: string\n}\n", "import { Datasource } from \"../../../documents\"\n\nexport interface CreateDatasourceResponse {\n datasource: Datasource\n errors: Record<string, string>\n}\n\nexport interface UpdateDatasourceResponse {\n datasource: Datasource\n}\n\nexport interface CreateDatasourceRequest {\n datasource: Datasource\n fetchSchema?: boolean\n tablesFilter: string[]\n}\n\nexport interface VerifyDatasourceRequest {\n datasource: Datasource\n}\n\nexport interface VerifyDatasourceResponse {\n connected: boolean\n error?: string\n}\n\nexport interface FetchDatasourceInfoRequest {\n datasource: Datasource\n}\n\nexport interface FetchDatasourceInfoResponse {\n tableNames: string[]\n}\n\nexport interface UpdateDatasourceRequest extends Datasource {\n datasource: Datasource\n}\n\nexport interface BuildSchemaFromSourceRequest {\n tablesFilter?: string[]\n}\n\nexport interface BuildSchemaFromSourceResponse {\n datasource: Datasource\n errors: Record<string, string>\n}\n", "import { Row } from \"../../../documents/app/row\"\n\nexport interface GetRowResponse extends Row {}\n\nexport interface DeleteRows {\n rows: (Row | string)[]\n}\n\nexport interface DeleteRow {\n _id: string\n}\n\nexport type DeleteRowRequest = DeleteRows | DeleteRow\n\nexport interface ValidateResponse {\n valid: boolean\n errors: Record<string, any>\n}\n", "import { ViewV2, UIFieldMetadata } from \"../../../documents\"\n\nexport interface ViewResponse {\n data: ViewV2\n}\n\nexport interface CreateViewRequest\n extends Omit<ViewV2, \"version\" | \"id\" | \"schema\"> {\n schema?: Record<string, UIFieldMetadata>\n}\n\nexport interface UpdateViewRequest extends Omit<ViewV2, \"schema\"> {\n schema?: Record<string, UIFieldMetadata>\n}\n", "import { SearchFilters, SearchParams } from \"../../../sdk\"\nimport { Row } from \"../../../documents\"\nimport { SortOrder } from \"../../../api\"\nimport { ReadStream } from \"fs\"\n\nexport interface SaveRowRequest extends Row {}\n\nexport interface PatchRowRequest extends Row {\n _id: string\n _rev: string\n tableId: string\n}\n\nexport interface PatchRowResponse extends Row {}\n\nexport interface SearchRowRequest extends Omit<SearchParams, \"tableId\"> {}\n\nexport interface SearchViewRowRequest\n extends Pick<\n SearchRowRequest,\n | \"sort\"\n | \"sortOrder\"\n | \"sortType\"\n | \"limit\"\n | \"bookmark\"\n | \"paginate\"\n | \"query\"\n > {}\n\nexport interface SearchRowResponse {\n rows: any[]\n}\n\nexport interface ExportRowsRequest {\n rows: string[]\n columns?: string[]\n query?: SearchFilters\n sort?: string\n sortOrder?: SortOrder\n}\n\nexport type ExportRowsResponse = ReadStream\n", "import {\n FieldSchema,\n Row,\n Table,\n TableRequest,\n TableSchema,\n View,\n ViewV2,\n} from \"../../../documents\"\n\ninterface ViewV2Response extends ViewV2 {\n schema: TableSchema\n}\n\nexport type TableViewsResponse = { [key: string]: View | ViewV2Response }\n\nexport interface TableResponse extends Table {\n views?: TableViewsResponse\n}\n\nexport type FetchTablesResponse = TableResponse[]\n\nexport interface SaveTableRequest extends TableRequest {\n rows?: Row[]\n}\n\nexport type SaveTableResponse = Table\n\nexport interface BulkImportRequest {\n rows: Row[]\n identifierFields?: Array<string>\n}\n\nexport interface BulkImportResponse {\n message: string\n}\n\nexport interface MigrateRequest {\n oldColumn: FieldSchema\n newColumn: FieldSchema\n}\n\nexport interface MigrateResponse {\n message: string\n}\n", "import { PlanType } from \"../../../sdk\"\n\nexport interface ResourcePermissionInfo {\n role: string\n permissionType: string\n inheritablePermission?: string\n}\n\nexport interface GetResourcePermsResponse {\n permissions: Record<string, ResourcePermissionInfo>\n requiresPlanToModify?: PlanType\n}\n\nexport interface GetDependantResourcesResponse {\n resourceByType?: Record<string, number>\n}\n", "export interface Upload {\n size: number\n name: string\n url: string\n extension: string\n key: string\n}\n\nexport type ProcessAttachmentResponse = Upload[]\n", "import { ContextUserMetadata } from \"../../../\"\n\nexport type FetchUserMetadataResponse = ContextUserMetadata[]\nexport type FindUserMetadataResponse = ContextUserMetadata\n\nexport interface SetFlagRequest {\n flag: string\n value: any\n}\n", "export * from \"./backup\"\nexport * from \"./datasource\"\nexport * from \"./row\"\nexport * from \"./view\"\nexport * from \"./rows\"\nexport * from \"./table\"\nexport * from \"./permission\"\nexport * from \"./attachment\"\nexport * from \"./user\"\n", "export interface StatusEnvironmentVariableResponse {\n encryptionKeyAvailable: boolean\n}\n\nexport interface CreateEnvironmentVariableRequest {\n name: string\n production: string\n development: string\n}\n\nexport interface UpdateEnvironmentVariableRequest {\n production: string\n development: string\n}\n\nexport interface GetEnvironmentVariablesResponse {\n variables: string[]\n}\n", "import { Event, AuditedEventFriendlyName } from \"../../../sdk\"\nimport {\n PaginationResponse,\n PaginationRequest,\n BasicPaginationRequest,\n} from \"../\"\nimport { User, App } from \"../../../\"\n\nexport interface AuditLogSearchParams {\n userIds?: string[]\n appIds?: string[]\n events?: Event[]\n startDate?: string\n endDate?: string\n fullSearch?: string\n bookmark?: string\n}\n\nexport interface DownloadAuditLogsRequest extends AuditLogSearchParams {}\n\nexport interface SearchAuditLogsRequest\n extends BasicPaginationRequest,\n AuditLogSearchParams {}\n\nexport enum AuditLogResourceStatus {\n DELETED = \"deleted\",\n}\n\nexport type DeletedResourceInfo = {\n _id: string\n status: AuditLogResourceStatus\n email?: string\n name?: string\n}\n\nexport interface AuditLogEnriched {\n app?: App | DeletedResourceInfo\n user: User | DeletedResourceInfo\n event: Event\n timestamp: string\n name: string\n metadata: any\n}\n\nexport interface SearchAuditLogsResponse extends PaginationResponse {\n data: AuditLogEnriched[]\n}\n\nexport interface DefinitionsAuditLogsResponse {\n events: Record<string, string>\n}\n", "export enum EventPublishType {\n ENVIRONMENT_VARIABLE_UPGRADE_PANEL_OPENED = \"environment_variable_upgrade_panel_opened\",\n}\n\nexport interface PostEventPublishRequest {\n type: EventPublishType\n}\n", "import { SettingsConfig, SettingsInnerConfig } from \"../../../documents\"\n\n/**\n * Settings that aren't stored in the database - enriched at runtime.\n */\nexport interface PublicSettingsInnerConfig extends SettingsInnerConfig {\n google: boolean\n googleDatasourceConfigured: boolean\n oidc: boolean\n oidcCallbackUrl: string\n googleCallbackUrl: string\n}\n\nexport interface GetPublicSettingsResponse extends SettingsConfig {\n config: PublicSettingsInnerConfig\n}\n\nexport interface PublicOIDCConfig {\n logo?: string\n name?: string\n uuid?: string\n}\n\nexport type GetPublicOIDCConfigResponse = PublicOIDCConfig[]\n", "import { ScimResource, ScimMeta } from \"scim-patch\"\nimport { ScimListResponse } from \"./shared\"\n\ntype BooleanString = boolean | \"True\" | \"true\" | \"False\" | \"false\"\n\ntype Emails =\n | {\n value: string\n type: \"work\"\n primary: boolean\n }[]\n\nexport interface ScimUserResponse extends ScimResource {\n schemas: [\"urn:ietf:params:scim:schemas:core:2.0:User\"]\n id: string\n externalId: string\n meta: ScimMeta & {\n resourceType: \"User\"\n }\n userName: string\n displayName?: string\n name?: {\n formatted?: string\n familyName?: string\n givenName?: string\n }\n active: BooleanString\n emails?: Emails\n}\n\nexport interface ScimCreateUserRequest {\n schemas: [\n \"urn:ietf:params:scim:schemas:core:2.0:User\",\n \"urn:ietf:params:scim:schemas:extension:enterprise:2.0:User\"\n ]\n externalId: string\n userName: string\n active: BooleanString\n emails?: Emails\n meta: {\n resourceType: \"User\"\n }\n displayName?: string\n name?: {\n formatted: string\n familyName: string\n givenName: string\n }\n roles: []\n}\n\nexport interface ScimUserListResponse\n extends ScimListResponse<ScimUserResponse> {}\n", "import { ScimResource, ScimMeta } from \"scim-patch\"\nimport { ScimListResponse } from \"./shared\"\n\nexport interface ScimGroupResponse extends ScimResource {\n schemas: [\"urn:ietf:params:scim:schemas:core:2.0:Group\"]\n id: string\n externalId: string\n displayName: string\n meta: ScimMeta & {\n resourceType: \"Group\"\n }\n members?: {\n value: string\n }[]\n}\n\nexport interface ScimCreateGroupRequest {\n schemas: [\n \"urn:ietf:params:scim:schemas:core:2.0:Group\",\n \"http://schemas.microsoft.com/2006/11/ResourceManagement/ADSCIM/2.0/Group\"\n ]\n externalId: string\n displayName: string\n meta: ScimMeta & {\n resourceType: \"Group\"\n }\n}\n\nexport interface ScimGroupListResponse\n extends ScimListResponse<ScimGroupResponse> {}\n", "import { ScimPatchOperation } from \"scim-patch\"\n\nexport interface ScimListResponse<T> {\n schemas: [\"urn:ietf:params:scim:api:messages:2.0:ListResponse\"]\n totalResults: number\n Resources: T[]\n startIndex: number\n itemsPerPage: number\n}\n\nexport interface ScimUpdateRequest {\n schemas: [\"urn:ietf:params:scim:api:messages:2.0:PatchOp\"]\n Operations: ScimPatchOperation[]\n}\n", "export * from \"./users\"\nexport * from \"./groups\"\nexport * from \"./shared\"\n", "// LICENSE KEY\n\nexport interface ActivateLicenseKeyRequest {\n licenseKey: string\n}\n\nexport interface GetLicenseKeyResponse {\n licenseKey: string\n}\n\n// OFFLINE LICENSE\n\nexport interface ActivateOfflineLicenseTokenRequest {\n offlineLicenseToken: string\n}\n\nexport interface GetOfflineLicenseTokenResponse {\n offlineLicenseToken: string\n}\n\n// IDENTIFIER\n\nexport interface GetOfflineIdentifierResponse {\n identifierBase64: string\n}\n", "export * from \"./environmentVariables\"\nexport * from \"./auditLogs\"\nexport * from \"./events\"\nexport * from \"./configs\"\nexport * from \"./scim\"\nexport * from \"./license\"\n", "export enum SortOrder {\n ASCENDING = \"ascending\",\n DESCENDING = \"descending\",\n}\n\nexport enum SortType {\n STRING = \"string\",\n number = \"number\",\n}\n\nexport interface BasicPaginationRequest {\n bookmark?: string\n}\n\nexport interface PaginationRequest extends BasicPaginationRequest {\n limit?: number\n sort?: {\n order: SortOrder\n column: string\n type: SortType\n }\n}\n\nexport interface PaginationResponse {\n bookmark: string | undefined\n hasNextPage: boolean\n}\n", "import { FieldType } from \"../../documents\"\nimport { EmptyFilterOption } from \"../../sdk\"\n\nexport type SearchFilter = {\n operator: keyof SearchQuery\n onEmptyFilter?: EmptyFilterOption\n field: string\n type?: FieldType\n value: any\n externalType?: string\n}\n\nexport enum SearchQueryOperators {\n STRING = \"string\",\n FUZZY = \"fuzzy\",\n RANGE = \"range\",\n EQUAL = \"equal\",\n NOT_EQUAL = \"notEqual\",\n EMPTY = \"empty\",\n NOT_EMPTY = \"notEmpty\",\n ONE_OF = \"oneOf\",\n CONTAINS = \"contains\",\n NOT_CONTAINS = \"notContains\",\n CONTAINS_ANY = \"containsAny\",\n}\n\nexport type SearchQuery = {\n allOr?: boolean\n onEmptyFilter?: EmptyFilterOption\n [SearchQueryOperators.STRING]?: {\n [key: string]: string\n }\n [SearchQueryOperators.FUZZY]?: {\n [key: string]: string\n }\n [SearchQueryOperators.RANGE]?: {\n [key: string]: {\n high: number | string\n low: number | string\n }\n }\n [SearchQueryOperators.EQUAL]?: {\n [key: string]: any\n }\n [SearchQueryOperators.NOT_EQUAL]?: {\n [key: string]: any\n }\n [SearchQueryOperators.EMPTY]?: {\n [key: string]: any\n }\n [SearchQueryOperators.NOT_EMPTY]?: {\n [key: string]: any\n }\n [SearchQueryOperators.ONE_OF]?: {\n [key: string]: any[]\n }\n [SearchQueryOperators.CONTAINS]?: {\n [key: string]: any[]\n }\n [SearchQueryOperators.NOT_CONTAINS]?: {\n [key: string]: any[]\n }\n [SearchQueryOperators.CONTAINS_ANY]?: {\n [key: string]: any[]\n }\n}\n\nexport type SearchQueryFields = Omit<SearchQuery, \"allOr\" | \"onEmptyFilter\">\n", "export interface DatasourceAuthCookie {\n appId: string\n provider: string\n}\n\nexport interface SessionCookie {\n sessionId: string\n userId: string\n}\n", "export * from \"./analytics\"\nexport * from \"./auth\"\nexport * from \"./user\"\nexport * from \"./errors\"\nexport * from \"./debug\"\nexport * from \"./schedule\"\nexport * from \"./system\"\nexport * from \"./app\"\nexport * from \"./global\"\nexport * from \"./pagination\"\nexport * from \"./searchFilter\"\nexport * from \"./cookies\"\n", "export * from \"./account\"\nexport * from \"./web\"\n", "export enum ServiceType {\n WORKER = \"worker\",\n APPS = \"apps\",\n}\n", "export * from \"./installation\"\n", "export type DeepPartial<T> = {\n [P in keyof T]?: T[P] extends object ? DeepPartial<T[P]> : T[P]\n}\n\nexport type ISO8601 = string\n\nexport type RequiredKeys<T> = {\n [K in keyof Required<T>]: T[K]\n}\n", "export * from \"./typeUtils\"\n", "export * from \"./documents\"\nexport * from \"./sdk\"\nexport * from \"./api\"\nexport * from \"./core\"\nexport * from \"./shared\"\n", "import { prefixed, DocumentType } from \"@budibase/types\"\n\nexport {\n SEPARATOR,\n UNICODE_MAX,\n DocumentType,\n InternalTable,\n} from \"@budibase/types\"\n\n/**\n * Can be used to create a few different forms of querying a view.\n */\nexport enum AutomationViewMode {\n ALL = \"all\",\n AUTOMATION = \"automation\",\n STATUS = \"status\",\n}\n\nexport enum ViewName {\n USER_BY_APP = \"by_app\",\n USER_BY_EMAIL = \"by_email2\",\n BY_API_KEY = \"by_api_key\",\n LINK = \"by_link\",\n ROUTING = \"screen_routes\",\n AUTOMATION_LOGS = \"automation_logs\",\n ACCOUNT_BY_EMAIL = \"account_by_email\",\n PLATFORM_USERS_LOWERCASE = \"platform_users_lowercase_2\",\n USER_BY_GROUP = \"user_by_group\",\n APP_BACKUP_BY_TRIGGER = \"by_trigger\",\n}\n\nexport const DeprecatedViews: Record<string, string[]> = {\n [ViewName.USER_BY_EMAIL]: [\n // removed due to inaccuracy in view doc filter logic\n \"by_email\",\n ],\n}\n\nexport const StaticDatabases = {\n GLOBAL: {\n name: \"global-db\",\n docs: {\n apiKeys: \"apikeys\",\n usageQuota: \"usage_quota\",\n licenseInfo: \"license_info\",\n environmentVariables: \"environmentvariables\",\n },\n },\n // contains information about tenancy and so on\n PLATFORM_INFO: {\n name: \"global-info\",\n docs: {\n tenants: \"tenants\",\n install: \"install\",\n },\n },\n AUDIT_LOGS: {\n name: \"audit-logs\",\n },\n}\n\nexport const APP_PREFIX = prefixed(DocumentType.APP)\nexport const APP_DEV = prefixed(DocumentType.APP_DEV)\nexport const APP_DEV_PREFIX = APP_DEV\nexport const BUDIBASE_DATASOURCE_TYPE = \"budibase\"\n", "export enum Header {\n API_KEY = \"x-budibase-api-key\",\n LICENSE_KEY = \"x-budibase-license-key\",\n API_VER = \"x-budibase-api-version\",\n APP_ID = \"x-budibase-app-id\",\n SESSION_ID = \"x-budibase-session-id\",\n TYPE = \"x-budibase-type\",\n PREVIEW_ROLE = \"x-budibase-role\",\n TENANT_ID = \"x-budibase-tenant-id\",\n VERIFICATION_CODE = \"x-budibase-verification-code\",\n RETURN_VERIFICATION_CODE = \"x-budibase-return-verification-code\",\n RESET_PASSWORD_CODE = \"x-budibase-reset-password-code\",\n RETURN_RESET_PASSWORD_CODE = \"x-budibase-return-reset-password-code\",\n TOKEN = \"x-budibase-token\",\n CSRF_TOKEN = \"x-csrf-token\",\n CORRELATION_ID = \"x-budibase-correlation-id\",\n AUTHORIZATION = \"authorization\",\n MIGRATING_APP = \"x-budibase-migrating-app\",\n}\n", "export * from \"./api\"\n\nexport const OperatorOptions = {\n Equals: {\n value: \"equal\",\n label: \"Equals\",\n },\n NotEquals: {\n value: \"notEqual\",\n label: \"Not equals\",\n },\n Empty: {\n value: \"empty\",\n label: \"Is empty\",\n },\n NotEmpty: {\n value: \"notEmpty\",\n label: \"Is not empty\",\n },\n StartsWith: {\n value: \"string\",\n label: \"Starts with\",\n },\n Like: {\n value: \"fuzzy\",\n label: \"Like\",\n },\n MoreThan: {\n value: \"rangeLow\",\n label: \"More than or equal to\",\n },\n LessThan: {\n value: \"rangeHigh\",\n label: \"Less than or equal to\",\n },\n Contains: {\n value: \"contains\",\n label: \"Contains\",\n },\n NotContains: {\n value: \"notContains\",\n label: \"Does not contain\",\n },\n In: {\n value: \"oneOf\",\n label: \"Is in\",\n },\n ContainsAny: {\n value: \"containsAny\",\n label: \"Has any\",\n },\n}\n\nexport const SqlNumberTypeRangeMap = {\n integer: {\n max: 2147483647,\n min: -2147483648,\n },\n int: {\n max: 2147483647,\n min: -2147483648,\n },\n smallint: {\n max: 32767,\n min: -32768,\n },\n mediumint: {\n max: 8388607,\n min: -8388608,\n },\n}\n\nexport enum SocketEvent {\n UserUpdate = \"UserUpdate\",\n UserDisconnect = \"UserDisconnect\",\n Heartbeat = \"Heartbeat\",\n}\n\nexport enum GridSocketEvent {\n RowChange = \"RowChange\",\n DatasourceChange = \"DatasourceChange\",\n SelectDatasource = \"SelectDatasource\",\n SelectCell = \"SelectCell\",\n}\n\nexport enum BuilderSocketEvent {\n SelectApp = \"SelectApp\",\n TableChange = \"TableChange\",\n DatasourceChange = \"DatasourceChange\",\n LockTransfer = \"LockTransfer\",\n ScreenChange = \"ScreenChange\",\n AppMetadataChange = \"AppMetadataChange\",\n SelectResource = \"SelectResource\",\n AppPublishChange = \"AppPublishChange\",\n AutomationChange = \"AutomationChange\",\n}\n\nexport const SocketSessionTTL = 60\nexport const ValidQueryNameRegex = /^[^()]*$/\nexport const ValidColumnNameRegex = /^[_a-zA-Z0-9\\s]*$/g\n\nexport const InvalidFileExtensions = [\n \"7z\",\n \"action\",\n \"apk\",\n \"app\",\n \"bat\",\n \"bin\",\n \"cab\",\n \"cmd\",\n \"com\",\n \"command\",\n \"cpl\",\n \"csh\",\n \"ex_\",\n \"exe\",\n \"gadget\",\n \"inf1\",\n \"ins\",\n \"inx\",\n \"ipa\",\n \"isu\",\n \"job\",\n \"js\",\n \"jse\",\n \"ksh\",\n \"lnk\",\n \"msc\",\n \"msi\",\n \"msp\",\n \"mst\",\n \"osx\",\n \"out\",\n \"paf\",\n \"php\",\n \"pif\",\n \"prg\",\n \"ps1\",\n \"reg\",\n \"rgs\",\n \"run\",\n \"scr\",\n \"sct\",\n \"shb\",\n \"shs\",\n \"tar\",\n \"u3p\",\n \"vb\",\n \"vbe\",\n \"vbs\",\n \"vbscript\",\n \"wasm\",\n \"workflow\",\n \"ws\",\n \"wsf\",\n \"wsh\",\n \"zip\",\n]\n", "!function(t,e){\"object\"==typeof exports&&\"undefined\"!=typeof module?module.exports=e():\"function\"==typeof define&&define.amd?define(e):(t=\"undefined\"!=typeof globalThis?globalThis:t||self).dayjs=e()}(this,(function(){\"use strict\";var t=1e3,e=6e4,n=36e5,r=\"millisecond\",i=\"second\",s=\"minute\",u=\"hour\",a=\"day\",o=\"week\",c=\"month\",f=\"quarter\",h=\"year\",d=\"date\",l=\"Invalid Date\",$=/^(\\d{4})[-/]?(\\d{1,2})?[-/]?(\\d{0,2})[Tt\\s]*(\\d{1,2})?:?(\\d{1,2})?:?(\\d{1,2})?[.:]?(\\d+)?$/,y=/\\[([^\\]]+)]|Y{1,4}|M{1,4}|D{1,2}|d{1,4}|H{1,2}|h{1,2}|a|A|m{1,2}|s{1,2}|Z{1,2}|SSS/g,M={name:\"en\",weekdays:\"Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday\".split(\"_\"),months:\"January_February_March_April_May_June_July_August_September_October_November_December\".split(\"_\"),ordinal:function(t){var e=[\"th\",\"st\",\"nd\",\"rd\"],n=t%100;return\"[\"+t+(e[(n-20)%10]||e[n]||e[0])+\"]\"}},m=function(t,e,n){var r=String(t);return!r||r.length>=e?t:\"\"+Array(e+1-r.length).join(n)+t},v={s:m,z:function(t){var e=-t.utcOffset(),n=Math.abs(e),r=Math.floor(n/60),i=n%60;return(e<=0?\"+\":\"-\")+m(r,2,\"0\")+\":\"+m(i,2,\"0\")},m:function t(e,n){if(e.date()<n.date())return-t(n,e);var r=12*(n.year()-e.year())+(n.month()-e.month()),i=e.clone().add(r,c),s=n-i<0,u=e.clone().add(r+(s?-1:1),c);return+(-(r+(n-i)/(s?i-u:u-i))||0)},a:function(t){return t<0?Math.ceil(t)||0:Math.floor(t)},p:function(t){return{M:c,y:h,w:o,d:a,D:d,h:u,m:s,s:i,ms:r,Q:f}[t]||String(t||\"\").toLowerCase().replace(/s$/,\"\")},u:function(t){return void 0===t}},g=\"en\",D={};D[g]=M;var p=\"$isDayjsObject\",S=function(t){return t instanceof _||!(!t||!t[p])},w=function t(e,n,r){var i;if(!e)return g;if(\"string\"==typeof e){var s=e.toLowerCase();D[s]&&(i=s),n&&(D[s]=n,i=s);var u=e.split(\"-\");if(!i&&u.length>1)return t(u[0])}else{var a=e.name;D[a]=e,i=a}return!r&&i&&(g=i),i||!r&&g},O=function(t,e){if(S(t))return t.clone();var n=\"object\"==typeof e?e:{};return n.date=t,n.args=arguments,new _(n)},b=v;b.l=w,b.i=S,b.w=function(t,e){return O(t,{locale:e.$L,utc:e.$u,x:e.$x,$offset:e.$offset})};var _=function(){function M(t){this.$L=w(t.locale,null,!0),this.parse(t),this.$x=this.$x||t.x||{},this[p]=!0}var m=M.prototype;return m.parse=function(t){this.$d=function(t){var e=t.date,n=t.utc;if(null===e)return new Date(NaN);if(b.u(e))return new Date;if(e instanceof Date)return new Date(e);if(\"string\"==typeof e&&!/Z$/i.test(e)){var r=e.match($);if(r){var i=r[2]-1||0,s=(r[7]||\"0\").substring(0,3);return n?new Date(Date.UTC(r[1],i,r[3]||1,r[4]||0,r[5]||0,r[6]||0,s)):new Date(r[1],i,r[3]||1,r[4]||0,r[5]||0,r[6]||0,s)}}return new Date(e)}(t),this.init()},m.init=function(){var t=this.$d;this.$y=t.getFullYear(),this.$M=t.getMonth(),this.$D=t.getDate(),this.$W=t.getDay(),this.$H=t.getHours(),this.$m=t.getMinutes(),this.$s=t.getSeconds(),this.$ms=t.getMilliseconds()},m.$utils=function(){return b},m.isValid=function(){return!(this.$d.toString()===l)},m.isSame=function(t,e){var n=O(t);return this.startOf(e)<=n&&n<=this.endOf(e)},m.isAfter=function(t,e){return O(t)<this.startOf(e)},m.isBefore=function(t,e){return this.endOf(e)<O(t)},m.$g=function(t,e,n){return b.u(t)?this[e]:this.set(n,t)},m.unix=function(){return Math.floor(this.valueOf()/1e3)},m.valueOf=function(){return this.$d.getTime()},m.startOf=function(t,e){var n=this,r=!!b.u(e)||e,f=b.p(t),l=function(t,e){var i=b.w(n.$u?Date.UTC(n.$y,e,t):new Date(n.$y,e,t),n);return r?i:i.endOf(a)},$=function(t,e){return b.w(n.toDate()[t].apply(n.toDate(\"s\"),(r?[0,0,0,0]:[23,59,59,999]).slice(e)),n)},y=this.$W,M=this.$M,m=this.$D,v=\"set\"+(this.$u?\"UTC\":\"\");switch(f){case h:return r?l(1,0):l(31,11);case c:return r?l(1,M):l(0,M+1);case o:var g=this.$locale().weekStart||0,D=(y<g?y+7:y)-g;return l(r?m-D:m+(6-D),M);case a:case d:return $(v+\"Hours\",0);case u:return $(v+\"Minutes\",1);case s:return $(v+\"Seconds\",2);case i:return $(v+\"Milliseconds\",3);default:return this.clone()}},m.endOf=function(t){return this.startOf(t,!1)},m.$set=function(t,e){var n,o=b.p(t),f=\"set\"+(this.$u?\"UTC\":\"\"),l=(n={},n[a]=f+\"Date\",n[d]=f+\"Date\",n[c]=f+\"Month\",n[h]=f+\"FullYear\",n[u]=f+\"Hours\",n[s]=f+\"Minutes\",n[i]=f+\"Seconds\",n[r]=f+\"Milliseconds\",n)[o],$=o===a?this.$D+(e-this.$W):e;if(o===c||o===h){var y=this.clone().set(d,1);y.$d[l]($),y.init(),this.$d=y.set(d,Math.min(this.$D,y.daysInMonth())).$d}else l&&this.$d[l]($);return this.init(),this},m.set=function(t,e){return this.clone().$set(t,e)},m.get=function(t){return this[b.p(t)]()},m.add=function(r,f){var d,l=this;r=Number(r);var $=b.p(f),y=function(t){var e=O(l);return b.w(e.date(e.date()+Math.round(t*r)),l)};if($===c)return this.set(c,this.$M+r);if($===h)return this.set(h,this.$y+r);if($===a)return y(1);if($===o)return y(7);var M=(d={},d[s]=e,d[u]=n,d[i]=t,d)[$]||1,m=this.$d.getTime()+r*M;return b.w(m,this)},m.subtract=function(t,e){return this.add(-1*t,e)},m.format=function(t){var e=this,n=this.$locale();if(!this.isValid())return n.invalidDate||l;var r=t||\"YYYY-MM-DDTHH:mm:ssZ\",i=b.z(this),s=this.$H,u=this.$m,a=this.$M,o=n.weekdays,c=n.months,f=n.meridiem,h=function(t,n,i,s){return t&&(t[n]||t(e,r))||i[n].slice(0,s)},d=function(t){return b.s(s%12||12,t,\"0\")},$=f||function(t,e,n){var r=t<12?\"AM\":\"PM\";return n?r.toLowerCase():r};return r.replace(y,(function(t,r){return r||function(t){switch(t){case\"YY\":return String(e.$y).slice(-2);case\"YYYY\":return b.s(e.$y,4,\"0\");case\"M\":return a+1;case\"MM\":return b.s(a+1,2,\"0\");case\"MMM\":return h(n.monthsShort,a,c,3);case\"MMMM\":return h(c,a);case\"D\":return e.$D;case\"DD\":return b.s(e.$D,2,\"0\");case\"d\":return String(e.$W);case\"dd\":return h(n.weekdaysMin,e.$W,o,2);case\"ddd\":return h(n.weekdaysShort,e.$W,o,3);case\"dddd\":return o[e.$W];case\"H\":return String(s);case\"HH\":return b.s(s,2,\"0\");case\"h\":return d(1);case\"hh\":return d(2);case\"a\":return $(s,u,!0);case\"A\":return $(s,u,!1);case\"m\":return String(u);case\"mm\":return b.s(u,2,\"0\");case\"s\":return String(e.$s);case\"ss\":return b.s(e.$s,2,\"0\");case\"SSS\":return b.s(e.$ms,3,\"0\");case\"Z\":return i}return null}(t)||i.replace(\":\",\"\")}))},m.utcOffset=function(){return 15*-Math.round(this.$d.getTimezoneOffset()/15)},m.diff=function(r,d,l){var $,y=this,M=b.p(d),m=O(r),v=(m.utcOffset()-this.utcOffset())*e,g=this-m,D=function(){return b.m(y,m)};switch(M){case h:$=D()/12;break;case c:$=D();break;case f:$=D()/3;break;case o:$=(g-v)/6048e5;break;case a:$=(g-v)/864e5;break;case u:$=g/n;break;case s:$=g/e;break;case i:$=g/t;break;default:$=g}return l?$:b.a($)},m.daysInMonth=function(){return this.endOf(c).$D},m.$locale=function(){return D[this.$L]},m.locale=function(t,e){if(!t)return this.$L;var n=this.clone(),r=w(t,e,!0);return r&&(n.$L=r),n},m.clone=function(){return b.w(this.$d,this)},m.toDate=function(){return new Date(this.valueOf())},m.toJSON=function(){return this.isValid()?this.toISOString():null},m.toISOString=function(){return this.$d.toISOString()},m.toString=function(){return this.$d.toUTCString()},M}(),k=_.prototype;return O.prototype=k,[[\"$ms\",r],[\"$s\",i],[\"$m\",s],[\"$H\",u],[\"$W\",a],[\"$M\",c],[\"$y\",h],[\"$D\",d]].forEach((function(t){k[t[1]]=function(e){return this.$g(e,t[0],t[1])}})),O.extend=function(t,e){return t.$i||(t(e,_,O),t.$i=!0),O},O.locale=w,O.isDayjs=S,O.unix=function(t){return O(1e3*t)},O.en=D[g],O.Ls=D,O.p={},O}));", "import { User } from \"@budibase/types\"\n\n/**\n * Gets a key within an object. The key supports dot syntax for retrieving deep\n * fields - e.g. \"a.b.c\".\n * Exact matches of keys with dots in them take precedence over nested keys of\n * the same path - e.g. getting \"a.b\" from { \"a.b\": \"foo\", a: { b: \"bar\" } }\n * will return \"foo\" over \"bar\".\n * @param obj the object\n * @param key the key\n * @return the value or null if a value was not found for this key\n */\nexport const deepGet = (obj: { [x: string]: any }, key: string) => {\n if (!obj || !key) {\n return null\n }\n if (Object.prototype.hasOwnProperty.call(obj, key)) {\n return obj[key]\n }\n const split = key.split(\".\")\n for (let i = 0; i < split.length; i++) {\n obj = obj?.[split[i]]\n }\n return obj\n}\n\n/**\n * Gets the initials to show in a user avatar.\n * @param user the user\n */\nexport const getUserInitials = (user: User) => {\n if (!user) {\n return \"?\"\n }\n let initials = \"\"\n initials += user.firstName ? user.firstName[0] : \"\"\n initials += user.lastName ? user.lastName[0] : \"\"\n if (initials !== \"\") {\n return initials\n }\n return user.email?.[0] || \"U\"\n}\n\n/**\n * Gets a deterministic colour for a particular user\n * @param user the user\n */\nexport const getUserColor = (user: User) => {\n let id = user?._id\n if (!id) {\n return \"var(--spectrum-global-color-blue-400)\"\n }\n\n // In order to generate the same color for global users as app users, we need\n // to remove the app-specific table prefix\n id = id.replace(\"ro_ta_users_\", \"\")\n\n // Generate a hue based on the ID\n let hue = 1\n for (let i = 0; i < id.length; i++) {\n hue += id.charCodeAt(i)\n hue = hue % 36\n }\n return `hsl(${hue * 10}, 50%, 40%)`\n}\n\n/**\n * Gets a friendly label to describe who a user is.\n * @param user the user\n */\nexport const getUserLabel = (user: User) => {\n if (!user) {\n return \"\"\n }\n const { firstName, lastName, email } = user\n if (firstName && lastName) {\n return `${firstName} ${lastName}`\n } else if (firstName) {\n return firstName\n } else if (lastName) {\n return lastName\n } else {\n return email\n }\n}\n", "import { Datasource, SourceName } from \"@budibase/types\"\n\nexport function isGoogleSheets(type: SourceName) {\n return type === SourceName.GOOGLE_SHEETS\n}\n\nexport function isSQL(datasource: Datasource): boolean {\n if (!datasource || !datasource.source) {\n return false\n }\n const SQL = [\n SourceName.POSTGRES,\n SourceName.SQL_SERVER,\n SourceName.MYSQL,\n SourceName.ORACLE,\n ]\n return SQL.indexOf(datasource.source) !== -1 || datasource.isSQL === true\n}\n", "export * from \"./helpers\"\nexport * from \"./integrations\"\n", "import {\n Datasource,\n FieldSubtype,\n FieldType,\n SearchFilter,\n SearchQuery,\n SearchQueryFields,\n SearchQueryOperators,\n SortDirection,\n SortType,\n} from \"@budibase/types\"\nimport dayjs from \"dayjs\"\nimport { OperatorOptions, SqlNumberTypeRangeMap } from \"./constants\"\nimport { deepGet } from \"./helpers\"\n\nconst HBS_REGEX = /{{([^{].*?)}}/g\n\n/**\n * Returns the valid operator options for a certain data type\n */\nexport const getValidOperatorsForType = (\n fieldType: { type: FieldType; subtype?: FieldSubtype },\n field: string,\n datasource: Datasource & { tableId: any } // TODO: is this table id ever populated?\n) => {\n const Op = OperatorOptions\n const stringOps = [\n Op.Equals,\n Op.NotEquals,\n Op.StartsWith,\n Op.Like,\n Op.Empty,\n Op.NotEmpty,\n Op.In,\n ]\n const numOps = [\n Op.Equals,\n Op.NotEquals,\n Op.MoreThan,\n Op.LessThan,\n Op.Empty,\n Op.NotEmpty,\n Op.In,\n ]\n let ops: {\n value: string\n label: string\n }[] = []\n const { type, subtype } = fieldType\n if (type === FieldType.STRING) {\n ops = stringOps\n } else if (type === FieldType.NUMBER || type === FieldType.BIGINT) {\n ops = numOps\n } else if (type === FieldType.OPTIONS) {\n ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In]\n } else if (type === FieldType.ARRAY) {\n ops = [Op.Contains, Op.NotContains, Op.Empty, Op.NotEmpty, Op.ContainsAny]\n } else if (type === FieldType.BOOLEAN) {\n ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty]\n } else if (type === FieldType.LONGFORM) {\n ops = stringOps\n } else if (type === FieldType.DATETIME) {\n ops = numOps\n } else if (type === FieldType.FORMULA) {\n ops = stringOps.concat([Op.MoreThan, Op.LessThan])\n } else if (type === FieldType.BB_REFERENCE && subtype == FieldSubtype.USER) {\n ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In]\n } else if (type === FieldType.BB_REFERENCE && subtype == FieldSubtype.USERS) {\n ops = [Op.Contains, Op.NotContains, Op.ContainsAny, Op.Empty, Op.NotEmpty]\n }\n\n // Only allow equal/not equal for _id in SQL tables\n const externalTable = datasource?.tableId?.includes(\"datasource_plus\")\n if (field === \"_id\" && externalTable) {\n ops = [Op.Equals, Op.NotEquals, Op.In]\n }\n\n return ops\n}\n\n/**\n * Operators which do not support empty strings as values\n */\nexport const NoEmptyFilterStrings = [\n OperatorOptions.StartsWith.value,\n OperatorOptions.Like.value,\n OperatorOptions.Equals.value,\n OperatorOptions.NotEquals.value,\n OperatorOptions.Contains.value,\n OperatorOptions.NotContains.value,\n] as (keyof SearchQueryFields)[]\n\n/**\n * Removes any fields that contain empty strings that would cause inconsistent\n * behaviour with how backend tables are filtered (no value means no filter).\n */\nconst cleanupQuery = (query: SearchQuery) => {\n if (!query) {\n return query\n }\n for (let filterField of NoEmptyFilterStrings) {\n if (!query[filterField]) {\n continue\n }\n\n for (let [key, value] of Object.entries(query[filterField]!)) {\n if (value == null || value === \"\") {\n delete query[filterField]![key]\n }\n }\n }\n return query\n}\n\n/**\n * Removes a numeric prefix on field names designed to give fields uniqueness\n */\nconst removeKeyNumbering = (key: string) => {\n if (typeof key === \"string\" && key.match(/\\d[0-9]*:/g) != null) {\n const parts = key.split(\":\")\n parts.shift()\n return parts.join(\":\")\n } else {\n return key\n }\n}\n\n/**\n * Builds a lucene JSON query from the filter structure generated in the builder\n * @param filter the builder filter structure\n */\nexport const buildLuceneQuery = (filter: SearchFilter[]) => {\n let query: SearchQuery = {\n string: {},\n fuzzy: {},\n range: {},\n equal: {},\n notEqual: {},\n empty: {},\n notEmpty: {},\n contains: {},\n notContains: {},\n oneOf: {},\n containsAny: {},\n }\n\n if (!Array.isArray(filter)) {\n return query\n }\n\n filter.forEach(expression => {\n let { operator, field, type, value, externalType, onEmptyFilter } =\n expression\n const isHbs =\n typeof value === \"string\" && (value.match(HBS_REGEX) || []).length > 0\n // Parse all values into correct types\n if (operator === \"allOr\") {\n query.allOr = true\n return\n }\n if (onEmptyFilter) {\n query.onEmptyFilter = onEmptyFilter\n return\n }\n if (\n type === \"datetime\" &&\n !isHbs &&\n operator !== \"empty\" &&\n operator !== \"notEmpty\"\n ) {\n // Ensure date value is a valid date and parse into correct format\n if (!value) {\n return\n }\n try {\n value = new Date(value).toISOString()\n } catch (error) {\n return\n }\n }\n if (type === \"number\" && typeof value === \"string\" && !isHbs) {\n if (operator === \"oneOf\") {\n value = value.split(\",\").map(item => parseFloat(item))\n } else {\n value = parseFloat(value)\n }\n }\n if (type === \"boolean\") {\n value = `${value}`?.toLowerCase() === \"true\"\n }\n if (\n [\"contains\", \"notContains\", \"containsAny\"].includes(operator) &&\n type === \"array\" &&\n typeof value === \"string\"\n ) {\n value = value.split(\",\")\n }\n if (operator.startsWith(\"range\") && query.range) {\n const minint =\n SqlNumberTypeRangeMap[\n externalType as keyof typeof SqlNumberTypeRangeMap\n ]?.min || Number.MIN_SAFE_INTEGER\n const maxint =\n SqlNumberTypeRangeMap[\n externalType as keyof typeof SqlNumberTypeRangeMap\n ]?.max || Number.MAX_SAFE_INTEGER\n if (!query.range[field]) {\n query.range[field] = {\n low: type === \"number\" ? minint : \"0000-00-00T00:00:00.000Z\",\n high: type === \"number\" ? maxint : \"9999-00-00T00:00:00.000Z\",\n }\n }\n if ((operator as any) === \"rangeLow\" && value != null && value !== \"\") {\n query.range[field].low = value\n } else if (\n (operator as any) === \"rangeHigh\" &&\n value != null &&\n value !== \"\"\n ) {\n query.range[field].high = value\n }\n } else if (query[operator] && operator !== \"onEmptyFilter\") {\n if (type === \"boolean\") {\n // Transform boolean filters to cope with null.\n // \"equals false\" needs to be \"not equals true\"\n // \"not equals false\" needs to be \"equals true\"\n if (operator === \"equal\" && value === false) {\n query.notEqual = query.notEqual || {}\n query.notEqual[field] = true\n } else if (operator === \"notEqual\" && value === false) {\n query.equal = query.equal || {}\n query.equal[field] = true\n } else {\n query[operator] = query[operator] || {}\n query[operator]![field] = value\n }\n } else {\n query[operator] = query[operator] || {}\n query[operator]![field] = value\n }\n }\n })\n\n return query\n}\n\n/**\n * Performs a client-side lucene search on an array of data\n * @param docs the data\n * @param query the JSON lucene query\n */\nexport const runLuceneQuery = (docs: any[], query?: SearchQuery) => {\n if (!docs || !Array.isArray(docs)) {\n return []\n }\n if (!query) {\n return docs\n }\n\n // Make query consistent first\n query = cleanupQuery(query)\n\n // Iterates over a set of filters and evaluates a fail function against a doc\n const match =\n (\n type: keyof SearchQueryFields,\n failFn: (docValue: any, testValue: any) => boolean\n ) =>\n (doc: any) => {\n const filters = Object.entries(query![type] || {})\n for (let i = 0; i < filters.length; i++) {\n const [key, testValue] = filters[i]\n const docValue = deepGet(doc, removeKeyNumbering(key))\n if (failFn(docValue, testValue)) {\n return false\n }\n }\n return true\n }\n\n // Process a string match (fails if the value does not start with the string)\n const stringMatch = match(\n SearchQueryOperators.STRING,\n (docValue: string, testValue: string) => {\n return (\n !docValue ||\n !docValue?.toLowerCase().startsWith(testValue?.toLowerCase())\n )\n }\n )\n\n // Process a fuzzy match (treat the same as starts with when running locally)\n const fuzzyMatch = match(\n SearchQueryOperators.FUZZY,\n (docValue: string, testValue: string) => {\n return (\n !docValue ||\n !docValue?.toLowerCase().startsWith(testValue?.toLowerCase())\n )\n }\n )\n\n // Process a range match\n const rangeMatch = match(\n SearchQueryOperators.RANGE,\n (\n docValue: string | number | null,\n testValue: { low: number; high: number }\n ) => {\n if (docValue == null || docValue === \"\") {\n return true\n }\n if (!isNaN(+docValue)) {\n return +docValue < testValue.low || +docValue > testValue.high\n }\n if (dayjs(docValue).isValid()) {\n return (\n new Date(docValue).getTime() < new Date(testValue.low).getTime() ||\n new Date(docValue).getTime() > new Date(testValue.high).getTime()\n )\n }\n return false\n }\n )\n\n // Process an equal match (fails if the value is different)\n const equalMatch = match(\n SearchQueryOperators.EQUAL,\n (docValue: any, testValue: string | null) => {\n return testValue != null && testValue !== \"\" && docValue !== testValue\n }\n )\n\n // Process a not-equal match (fails if the value is the same)\n const notEqualMatch = match(\n SearchQueryOperators.NOT_EQUAL,\n (docValue: any, testValue: string | null) => {\n return testValue != null && testValue !== \"\" && docValue === testValue\n }\n )\n\n // Process an empty match (fails if the value is not empty)\n const emptyMatch = match(\n SearchQueryOperators.EMPTY,\n (docValue: string | null) => {\n return docValue != null && docValue !== \"\"\n }\n )\n\n // Process a not-empty match (fails is the value is empty)\n const notEmptyMatch = match(\n SearchQueryOperators.NOT_EMPTY,\n (docValue: string | null) => {\n return docValue == null || docValue === \"\"\n }\n )\n\n // Process an includes match (fails if the value is not included)\n const oneOf = match(\n SearchQueryOperators.ONE_OF,\n (docValue: any, testValue: any) => {\n if (typeof testValue === \"string\") {\n testValue = testValue.split(\",\")\n if (typeof docValue === \"number\") {\n testValue = testValue.map((item: string) => parseFloat(item))\n }\n }\n return !testValue?.includes(docValue)\n }\n )\n\n const containsAny = match(\n SearchQueryOperators.CONTAINS_ANY,\n (docValue: any, testValue: any) => {\n return !docValue?.includes(...testValue)\n }\n )\n\n const contains = match(\n SearchQueryOperators.CONTAINS,\n (docValue: string | any[], testValue: any[]) => {\n return !testValue?.every((item: any) => docValue?.includes(item))\n }\n )\n\n const notContains = match(\n SearchQueryOperators.NOT_CONTAINS,\n (docValue: string | any[], testValue: any[]) => {\n return testValue?.every((item: any) => docValue?.includes(item))\n }\n )\n\n // Match a document against all criteria\n const docMatch = (doc: any) => {\n return (\n stringMatch(doc) &&\n fuzzyMatch(doc) &&\n rangeMatch(doc) &&\n equalMatch(doc) &&\n notEqualMatch(doc) &&\n emptyMatch(doc) &&\n notEmptyMatch(doc) &&\n oneOf(doc) &&\n contains(doc) &&\n containsAny(doc) &&\n notContains(doc)\n )\n }\n\n // Process all docs\n return docs.filter(docMatch)\n}\n\n/**\n * Performs a client-side sort from the equivalent server-side lucene sort\n * parameters.\n * @param docs the data\n * @param sort the sort column\n * @param sortOrder the sort order (\"ascending\" or \"descending\")\n * @param sortType the type of sort (\"string\" or \"number\")\n */\nexport const luceneSort = (\n docs: any[],\n sort: string,\n sortOrder: SortDirection,\n sortType = SortType.STRING\n) => {\n if (!sort || !sortOrder || !sortType) {\n return docs\n }\n const parse =\n sortType === \"string\" ? (x: any) => `${x}` : (x: string) => parseFloat(x)\n return docs\n .slice()\n .sort((a: { [x: string]: any }, b: { [x: string]: any }) => {\n const colA = parse(a[sort])\n const colB = parse(b[sort])\n if (sortOrder.toLowerCase() === \"descending\") {\n return colA > colB ? -1 : 1\n } else {\n return colA > colB ? 1 : -1\n }\n })\n}\n\n/**\n * Limits the specified docs to the specified number of rows from the equivalent\n * server-side lucene limit parameters.\n * @param docs the data\n * @param limit the number of docs to limit to\n */\nexport const luceneLimit = (docs: any[], limit: string) => {\n const numLimit = parseFloat(limit)\n if (isNaN(numLimit)) {\n return docs\n }\n return docs.slice(0, numLimit)\n}\n\nexport const hasFilters = (query?: SearchQuery) => {\n if (!query) {\n return false\n }\n const skipped = [\"allOr\", \"onEmptyFilter\"]\n for (let [key, value] of Object.entries(query)) {\n if (skipped.includes(key) || typeof value !== \"object\") {\n continue\n }\n if (Object.keys(value || {}).length !== 0) {\n return true\n }\n }\n return false\n}\n", "import * as Constants from \"./constants\"\n\nexport function unreachable(\n value: never,\n message = `No such case in exhaustive switch: ${value}`\n) {\n throw new Error(message)\n}\n\nexport async function parallelForeach<T>(\n items: T[],\n task: (item: T) => Promise<void>,\n maxConcurrency: number\n): Promise<void> {\n const promises: Promise<void>[] = []\n let index = 0\n\n const processItem = async (item: T) => {\n try {\n await task(item)\n } finally {\n processNext()\n }\n }\n\n const processNext = () => {\n if (index >= items.length) {\n // No more items to process\n return\n }\n\n const item = items[index]\n index++\n\n const promise = processItem(item)\n promises.push(promise)\n\n if (promises.length >= maxConcurrency) {\n Promise.race(promises).then(processNext)\n } else {\n processNext()\n }\n }\n processNext()\n\n await Promise.all(promises)\n}\n\nexport function filterValueToLabel() {\n return Object.keys(Constants.OperatorOptions).reduce(\n (acc: { [key: string]: string }, key: string) => {\n const ops: { [key: string]: any } = Constants.OperatorOptions\n const op: { [key: string]: string } = ops[key]\n acc[op[\"value\"]] = op.label\n return acc\n },\n {}\n )\n}\n", "import { DocumentType, prefixed } from \"@budibase/types\"\n\nconst APP_PREFIX = prefixed(DocumentType.APP)\nconst APP_DEV_PREFIX = prefixed(DocumentType.APP_DEV)\n\nexport function getDevAppID(appId: string) {\n if (!appId) {\n throw new Error(\"No app ID provided\")\n }\n if (appId.startsWith(APP_DEV_PREFIX)) {\n return appId\n }\n // split to take off the app_ element, then join it together incase any other app_ exist\n const split = appId.split(APP_PREFIX)\n split.shift()\n const rest = split.join(APP_PREFIX)\n return `${APP_DEV_PREFIX}${rest}`\n}\n\n/**\n * Convert a development app ID to a deployed app ID.\n */\nexport function getProdAppID(appId: string) {\n if (!appId) {\n throw new Error(\"No app ID provided\")\n }\n if (!appId.startsWith(APP_DEV_PREFIX)) {\n return appId\n }\n // split to take off the app_dev element, then join it together incase any other app_ exist\n const split = appId.split(APP_DEV_PREFIX)\n split.shift()\n const rest = split.join(APP_DEV_PREFIX)\n return `${APP_PREFIX}${rest}`\n}\n", "import {\n ContextUser,\n DocumentType,\n SEPARATOR,\n User,\n InternalTable,\n} from \"@budibase/types\"\nimport { getProdAppID } from \"./applications\"\nimport * as _ from \"lodash/fp\"\n\n// checks if a user is specifically a builder, given an app ID\nexport function isBuilder(user: User | ContextUser, appId?: string): boolean {\n if (!user) {\n return false\n }\n if (user.builder?.global) {\n return true\n } else if (appId && user.builder?.apps?.includes(getProdAppID(appId))) {\n return true\n }\n return false\n}\n\nexport function isGlobalBuilder(user: User | ContextUser): boolean {\n return (isBuilder(user) && !hasAppBuilderPermissions(user)) || isAdmin(user)\n}\n\nexport function canCreateApps(user: User | ContextUser): boolean {\n return isGlobalBuilder(user) || hasCreatorPermissions(user)\n}\n\n// alias for hasAdminPermission, currently do the same thing\n// in future whether someone has admin permissions and whether they are\n// an admin for a specific resource could be separated\nexport function isAdmin(user: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n return hasAdminPermissions(user)\n}\n\nexport function isAdminOrBuilder(\n user: User | ContextUser,\n appId?: string\n): boolean {\n return isBuilder(user, appId) || isAdmin(user)\n}\n\nexport function isAdminOrGlobalBuilder(\n user: User | ContextUser,\n appId?: string\n): boolean {\n return isGlobalBuilder(user) || isAdmin(user)\n}\n\n// check if they are a builder within an app (not necessarily a global builder)\nexport function hasAppBuilderPermissions(user?: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n const appLength = user.builder?.apps?.length\n const isGlobalBuilder = !!user.builder?.global\n return !isGlobalBuilder && appLength != null && appLength > 0\n}\n\nexport function hasAppCreatorPermissions(user?: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n return _.flow(\n _.get(\"roles\"),\n _.values,\n _.find(x => x === \"CREATOR\"),\n x => !!x\n )(user)\n}\n\n// checks if a user is capable of building any app\nexport function hasBuilderPermissions(user?: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n return (\n user.builder?.global ||\n hasAppBuilderPermissions(user) ||\n hasCreatorPermissions(user)\n )\n}\n\n// checks if a user is capable of being an admin\nexport function hasAdminPermissions(user?: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n return !!user.admin?.global\n}\n\nexport function hasCreatorPermissions(user?: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n return !!user.builder?.creator\n}\n\nexport function isCreator(user?: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n return (\n isGlobalBuilder(user!) ||\n hasAdminPermissions(user) ||\n hasCreatorPermissions(user) ||\n hasAppBuilderPermissions(user) ||\n hasAppCreatorPermissions(user)\n )\n}\n\nexport function getGlobalUserID(userId?: string): string | undefined {\n if (typeof userId !== \"string\") {\n return userId\n }\n const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`\n if (!userId.startsWith(prefix)) {\n return userId\n }\n return userId.split(prefix)[1]\n}\n\nexport function containsUserID(value: string | undefined): boolean {\n if (typeof value !== \"string\") {\n return false\n }\n return value.includes(`${DocumentType.USER}${SEPARATOR}`)\n}\n", "export * as applications from \"./applications\"\nexport * as users from \"./users\"\n", "export * from \"./documents\"\n", "import { FieldType } from \"@budibase/types\"\n\nconst allowDisplayColumnByType: Record<FieldType, boolean> = {\n [FieldType.STRING]: true,\n [FieldType.LONGFORM]: true,\n [FieldType.OPTIONS]: true,\n [FieldType.NUMBER]: true,\n [FieldType.DATETIME]: true,\n [FieldType.FORMULA]: true,\n [FieldType.AUTO]: true,\n [FieldType.INTERNAL]: true,\n [FieldType.BARCODEQR]: true,\n [FieldType.BIGINT]: true,\n\n [FieldType.BOOLEAN]: false,\n [FieldType.ARRAY]: false,\n [FieldType.ATTACHMENT]: false,\n [FieldType.LINK]: false,\n [FieldType.JSON]: false,\n [FieldType.BB_REFERENCE]: false,\n}\n\nconst allowSortColumnByType: Record<FieldType, boolean> = {\n [FieldType.STRING]: true,\n [FieldType.LONGFORM]: true,\n [FieldType.OPTIONS]: true,\n [FieldType.NUMBER]: true,\n [FieldType.DATETIME]: true,\n [FieldType.AUTO]: true,\n [FieldType.INTERNAL]: true,\n [FieldType.BARCODEQR]: true,\n [FieldType.BIGINT]: true,\n [FieldType.BOOLEAN]: true,\n [FieldType.JSON]: true,\n\n [FieldType.FORMULA]: false,\n [FieldType.ATTACHMENT]: false,\n [FieldType.ARRAY]: false,\n [FieldType.LINK]: false,\n [FieldType.BB_REFERENCE]: false,\n}\n\nexport function canBeDisplayColumn(type: FieldType): boolean {\n return !!allowDisplayColumnByType[type]\n}\n\nexport function canBeSortColumn(type: FieldType): boolean {\n return !!allowSortColumnByType[type]\n}\n", "export * from \"./constants\"\nexport * as dataFilters from \"./filters\"\nexport * as helpers from \"./helpers\"\nexport * as utils from \"./utils\"\nexport * as sdk from \"./sdk\"\nexport * from \"./table\"\n", "export enum UserStatus {\n ACTIVE = \"active\",\n INACTIVE = \"inactive\",\n}\n\nexport enum Cookie {\n Auth = \"budibase:auth\",\n Init = \"budibase:init\",\n ACCOUNT_RETURN_URL = \"budibase:account:returnurl\",\n DatasourceAuth = \"budibase:datasourceauth\",\n OIDC_CONFIG = \"budibase:oidc:config\",\n}\n\nexport { Header } from \"@budibase/shared-core\"\n\nexport enum GlobalRole {\n OWNER = \"owner\",\n ADMIN = \"admin\",\n BUILDER = \"builder\",\n WORKSPACE_MANAGER = \"workspace_manager\",\n}\n\nexport enum Config {\n SETTINGS = \"settings\",\n ACCOUNT = \"account\",\n SMTP = \"smtp\",\n GOOGLE = \"google\",\n OIDC = \"oidc\",\n OIDC_LOGOS = \"logos_oidc\",\n SCIM = \"scim\",\n}\n\nexport const MIN_VALID_DATE = new Date(-2147483647000)\nexport const MAX_VALID_DATE = new Date(2147483647000)\nexport const DEFAULT_TENANT_ID = \"default\"\n", "export * from \"./db\"\nexport * from \"./misc\"\n", "import {\n IdentityContext,\n IdentityType,\n User,\n isCloudAccount,\n Account,\n AccountUserContext,\n UserContext,\n Ctx,\n} from \"@budibase/types\"\nimport * as context from \".\"\n\nexport function getIdentity(): IdentityContext | undefined {\n return context.getIdentity()\n}\n\nexport function doInIdentityContext(identity: IdentityContext, task: any) {\n return context.doInIdentityContext(identity, task)\n}\n\n// used in server/worker\nexport function doInUserContext(user: User, ctx: Ctx, task: any) {\n const userContext: UserContext = {\n ...user,\n _id: user._id as string,\n type: IdentityType.USER,\n hostInfo: {\n ipAddress: ctx.request.ip,\n // filled in by koa-useragent package\n userAgent: ctx.userAgent._agent.source,\n },\n }\n return doInIdentityContext(userContext, task)\n}\n\n// used in account portal\nexport function doInAccountContext(account: Account, task: any) {\n const _id = getAccountUserId(account)\n const tenantId = account.tenantId\n const accountContext: AccountUserContext = {\n _id,\n type: IdentityType.USER,\n tenantId,\n account,\n }\n return doInIdentityContext(accountContext, task)\n}\n\nexport function getAccountUserId(account: Account) {\n let userId: string\n if (isCloudAccount(account)) {\n userId = account.budibaseUserId\n } else {\n // use account id as user id for self-hosting\n userId = account.accountId\n }\n return userId\n}\n", "import { existsSync, readFileSync } from \"fs\"\nimport { ServiceType } from \"@budibase/types\"\n\nfunction isTest() {\n return isJest()\n}\n\nfunction isJest() {\n return (\n process.env.NODE_ENV === \"jest\" ||\n (process.env.JEST_WORKER_ID != null &&\n process.env.JEST_WORKER_ID !== \"null\")\n )\n}\n\nfunction isDev() {\n return process.env.NODE_ENV !== \"production\"\n}\n\nlet LOADED = false\nif (!LOADED && isDev() && !isTest()) {\n require(\"dotenv\").config()\n LOADED = true\n}\n\nconst DefaultBucketName = {\n BACKUPS: \"backups\",\n APPS: \"prod-budi-app-assets\",\n TEMPLATES: \"templates\",\n GLOBAL: \"global\",\n PLUGINS: \"plugins\",\n}\n\nconst selfHosted = !!parseInt(process.env.SELF_HOSTED || \"\")\n\nfunction getAPIEncryptionKey() {\n return process.env.API_ENCRYPTION_KEY\n ? process.env.API_ENCRYPTION_KEY\n : process.env.JWT_SECRET // fallback to the JWT_SECRET used historically\n}\n\nfunction httpLogging() {\n if (process.env.HTTP_LOGGING === undefined) {\n // on by default unless otherwise specified\n return true\n }\n\n return process.env.HTTP_LOGGING\n}\n\nfunction getPackageJsonFields(): {\n VERSION: string\n SERVICE_NAME: string\n} {\n function findFileInAncestors(\n fileName: string,\n currentDir: string\n ): string | null {\n const filePath = `${currentDir}/${fileName}`\n if (existsSync(filePath)) {\n return filePath\n }\n\n const parentDir = `${currentDir}/..`\n if (parentDir === currentDir) {\n // reached root directory\n return null\n }\n\n return findFileInAncestors(fileName, parentDir)\n }\n\n try {\n const packageJsonFile = findFileInAncestors(\"package.json\", process.cwd())\n const content = readFileSync(packageJsonFile!, \"utf-8\")\n const parsedContent = JSON.parse(content)\n return {\n VERSION: process.env.BUDIBASE_VERSION || parsedContent.version,\n SERVICE_NAME: parsedContent.name,\n }\n } catch {\n // throwing an error here is confusing/causes backend-core to be hard to import\n return { VERSION: process.env.BUDIBASE_VERSION || \"\", SERVICE_NAME: \"\" }\n }\n}\n\nfunction isWorker() {\n return environment.SERVICE_TYPE === ServiceType.WORKER\n}\n\nfunction isApps() {\n return environment.SERVICE_TYPE === ServiceType.APPS\n}\n\nconst environment = {\n isTest,\n isJest,\n isDev,\n isWorker,\n isApps,\n isProd: () => {\n return !isDev()\n },\n JS_BCRYPT: process.env.JS_BCRYPT,\n JWT_SECRET: process.env.JWT_SECRET,\n JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK,\n ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,\n API_ENCRYPTION_KEY: getAPIEncryptionKey(),\n COUCH_DB_URL: process.env.COUCH_DB_URL || \"http://localhost:4005\",\n COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || \"http://localhost:4984\",\n COUCH_DB_USERNAME: process.env.COUCH_DB_USER,\n COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,\n GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,\n GOOGLE_CLIENT_SECRET: process.env.GOOGLE_CLIENT_SECRET,\n SALT_ROUNDS: process.env.SALT_ROUNDS,\n REDIS_URL: process.env.REDIS_URL || \"localhost:6379\",\n REDIS_PASSWORD: process.env.REDIS_PASSWORD,\n REDIS_CLUSTERED: process.env.REDIS_CLUSTERED,\n MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,\n MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,\n AWS_REGION: process.env.AWS_REGION,\n MINIO_URL: process.env.MINIO_URL,\n MINIO_ENABLED: process.env.MINIO_ENABLED || 1,\n INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,\n INTERNAL_API_KEY_FALLBACK: process.env.INTERNAL_API_KEY_FALLBACK,\n MULTI_TENANCY: process.env.MULTI_TENANCY,\n ACCOUNT_PORTAL_URL:\n process.env.ACCOUNT_PORTAL_URL || \"https://account.budibase.app\",\n ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY || \"\",\n DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,\n SELF_HOSTED: selfHosted,\n COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,\n PLATFORM_URL: process.env.PLATFORM_URL || \"\",\n POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,\n ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,\n TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,\n CLOUDFRONT_CDN: process.env.CLOUDFRONT_CDN,\n CLOUDFRONT_PRIVATE_KEY_64: process.env.CLOUDFRONT_PRIVATE_KEY_64,\n CLOUDFRONT_PUBLIC_KEY_ID: process.env.CLOUDFRONT_PUBLIC_KEY_ID,\n BACKUPS_BUCKET_NAME:\n process.env.BACKUPS_BUCKET_NAME || DefaultBucketName.BACKUPS,\n APPS_BUCKET_NAME: process.env.APPS_BUCKET_NAME || DefaultBucketName.APPS,\n TEMPLATES_BUCKET_NAME:\n process.env.TEMPLATES_BUCKET_NAME || DefaultBucketName.TEMPLATES,\n GLOBAL_BUCKET_NAME:\n process.env.GLOBAL_BUCKET_NAME || DefaultBucketName.GLOBAL,\n PLUGIN_BUCKET_NAME:\n process.env.PLUGIN_BUCKET_NAME || DefaultBucketName.PLUGINS,\n USE_COUCH: process.env.USE_COUCH || true,\n MOCK_REDIS: process.env.MOCK_REDIS,\n DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,\n SERVICE: process.env.SERVICE || \"budibase\",\n LOG_LEVEL: process.env.LOG_LEVEL || \"info\",\n SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD,\n DEPLOYMENT_ENVIRONMENT:\n process.env.DEPLOYMENT_ENVIRONMENT || \"docker-compose\",\n HTTP_LOGGING: httpLogging(),\n ENABLE_AUDIT_LOG_IP_ADDR: process.env.ENABLE_AUDIT_LOG_IP_ADDR,\n // smtp\n SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,\n SMTP_USER: process.env.SMTP_USER,\n SMTP_PASSWORD: process.env.SMTP_PASSWORD,\n SMTP_HOST: process.env.SMTP_HOST,\n SMTP_PORT: parseInt(process.env.SMTP_PORT || \"\"),\n SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS,\n DISABLE_JWT_WARNING: process.env.DISABLE_JWT_WARNING,\n BLACKLIST_IPS: process.env.BLACKLIST_IPS,\n SERVICE_TYPE: \"unknown\",\n PASSWORD_MIN_LENGTH: process.env.PASSWORD_MIN_LENGTH,\n PASSWORD_MAX_LENGTH: process.env.PASSWORD_MAX_LENGTH,\n /**\n * Enable to allow an admin user to login using a password.\n * This can be useful to prevent lockout when configuring SSO.\n * However, this should be turned OFF by default for security purposes.\n */\n ENABLE_SSO_MAINTENANCE_MODE: selfHosted\n ? process.env.ENABLE_SSO_MAINTENANCE_MODE\n : false,\n ...getPackageJsonFields(),\n DISABLE_PINO_LOGGER: process.env.DISABLE_PINO_LOGGER,\n OFFLINE_MODE: process.env.OFFLINE_MODE,\n _set(key: any, value: any) {\n process.env[key] = value\n // @ts-ignore\n environment[key] = value\n },\n ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || \"10M\",\n}\n\n// clean up any environment variable edge cases\nfor (let [key, value] of Object.entries(environment)) {\n // handle the edge case of \"0\" to disable an environment variable\n if (value === \"0\") {\n // @ts-ignore\n environment[key] = 0\n }\n // handle the edge case of \"false\" to disable an environment variable\n if (value === \"false\") {\n // @ts-ignore\n environment[key] = 0\n }\n}\n\nexport default environment\n", "import { AsyncLocalStorage } from \"async_hooks\"\nimport { ContextMap } from \"./types\"\n\nexport default class Context {\n static storage = new AsyncLocalStorage<ContextMap>()\n\n static run<T>(context: ContextMap, func: () => T) {\n return Context.storage.run(context, () => func())\n }\n\n static get(): ContextMap {\n return Context.storage.getStore() as ContextMap\n }\n}\n", "import { APP_DEV_PREFIX, APP_PREFIX } from \"../constants\"\nimport { App } from \"@budibase/types\"\n\nconst NO_APP_ERROR = \"No app provided\"\n\nexport function isDevAppID(appId?: string) {\n if (!appId) {\n throw NO_APP_ERROR\n }\n return appId.startsWith(APP_DEV_PREFIX)\n}\n\nexport function isProdAppID(appId?: string) {\n if (!appId) {\n throw NO_APP_ERROR\n }\n return appId.startsWith(APP_PREFIX) && !isDevAppID(appId)\n}\n\nexport function isDevApp(app: App) {\n if (!app) {\n throw NO_APP_ERROR\n }\n return isDevAppID(app.appId)\n}\n\n/**\n * Generates a development app ID from a real app ID.\n * @returns the dev app ID which can be used for dev database.\n */\nexport function getDevelopmentAppID(appId: string) {\n if (!appId || appId.startsWith(APP_DEV_PREFIX)) {\n return appId\n }\n // split to take off the app_ element, then join it together incase any other app_ exist\n const split = appId.split(APP_PREFIX)\n split.shift()\n const rest = split.join(APP_PREFIX)\n return `${APP_DEV_PREFIX}${rest}`\n}\nexport const getDevAppID = getDevelopmentAppID\n\n/**\n * Convert a development app ID to a deployed app ID.\n */\nexport function getProdAppID(appId: string) {\n if (!appId || !appId.startsWith(APP_DEV_PREFIX)) {\n return appId\n }\n // split to take off the app_dev element, then join it together incase any other app_ exist\n const split = appId.split(APP_DEV_PREFIX)\n split.shift()\n const rest = split.join(APP_DEV_PREFIX)\n return `${APP_PREFIX}${rest}`\n}\n\nexport function extractAppUUID(id: string) {\n const split = id?.split(\"_\") || []\n return split.length ? split[split.length - 1] : null\n}\n", "import env from \"../../environment\"\n\nexport const getCouchInfo = (connection?: string) => {\n const urlInfo = getUrlInfo(connection)\n let username\n let password\n if (urlInfo.auth?.username) {\n // set from url\n username = urlInfo.auth.username\n } else if (env.COUCH_DB_USERNAME) {\n // set from env\n username = env.COUCH_DB_USERNAME\n } else if (!env.isTest()) {\n throw new Error(\"CouchDB username not set\")\n }\n if (urlInfo.auth?.password) {\n // set from url\n password = urlInfo.auth.password\n } else if (env.COUCH_DB_PASSWORD) {\n // set from env\n password = env.COUCH_DB_PASSWORD\n } else if (!env.isTest()) {\n throw new Error(\"CouchDB password not set\")\n }\n const authCookie = Buffer.from(`${username}:${password}`).toString(\"base64\")\n return {\n url: urlInfo.url!,\n auth: {\n username: username,\n password: password,\n },\n cookie: `Basic ${authCookie}`,\n }\n}\n\nexport const getUrlInfo = (url = env.COUCH_DB_URL) => {\n let cleanUrl, username, password, host\n if (url) {\n // Ensure the URL starts with a protocol\n const protoRegex = /^https?:\\/\\//i\n if (!protoRegex.test(url)) {\n url = `http://${url}`\n }\n\n // Split into protocol and remainder\n const split = url.split(\"://\")\n const protocol = split[0]\n const rest = split.slice(1).join(\"://\")\n\n // Extract auth if specified\n if (url.includes(\"@\")) {\n // Split into host and remainder\n let parts = rest.split(\"@\")\n host = parts[parts.length - 1]\n let auth = parts.slice(0, -1).join(\"@\")\n\n // Split auth into username and password\n if (auth.includes(\":\")) {\n const authParts = auth.split(\":\")\n username = authParts[0]\n password = authParts.slice(1).join(\":\")\n } else {\n username = auth\n }\n } else {\n host = rest\n }\n cleanUrl = `${protocol}://${host}`\n }\n return {\n url: cleanUrl,\n auth: {\n username,\n password,\n },\n }\n}\n", "/**\n * Makes sure that a URL has the correct number of slashes, while maintaining the\n * http(s):// double slashes.\n * @param url The URL to test and remove any extra double slashes.\n * @return The updated url.\n */\nexport function checkSlashesInUrl(url: string) {\n return url.replace(/(https?:\\/\\/)|(\\/)+/g, \"$1$2\")\n}\n", "import { getCouchInfo } from \"./connections\"\nimport fetch from \"node-fetch\"\nimport { checkSlashesInUrl } from \"../../helpers\"\n\nexport async function directCouchCall(\n path: string,\n method: string = \"GET\",\n body?: any\n) {\n let { url, cookie } = getCouchInfo()\n const couchUrl = `${url}/${path}`\n return await directCouchUrlCall({ url: couchUrl, cookie, method, body })\n}\n\nexport async function directCouchUrlCall({\n url,\n cookie,\n method,\n body,\n}: {\n url: string\n cookie: string\n method: string\n body?: any\n}) {\n const params: any = {\n method: method,\n headers: {\n Authorization: cookie,\n },\n }\n if (body && method !== \"GET\") {\n params.body = JSON.stringify(body)\n params.headers[\"Content-Type\"] = \"application/json\"\n }\n return await fetch(checkSlashesInUrl(encodeURI(url)), params)\n}\n\nexport async function directCouchQuery(\n path: string,\n method: string = \"GET\",\n body?: any\n) {\n const response = await directCouchCall(path, method, body)\n if (response.status < 300) {\n return await response.json()\n } else {\n throw \"Cannot connect to CouchDB instance\"\n }\n}\n", "import PouchDB from \"pouchdb\"\nimport env from \"../../environment\"\nimport { PouchOptions } from \"@budibase/types\"\nimport { getCouchInfo } from \"./connections\"\n\nlet Pouch: any\nlet initialised = false\n\n/**\n * Return a constructor for PouchDB.\n * This should be rarely used outside of the main application config.\n * Exposed for exceptional cases such as in-memory views.\n */\nexport const getPouch = (opts: PouchOptions = {}) => {\n let { url, cookie } = getCouchInfo()\n let POUCH_DB_DEFAULTS = {\n prefix: url,\n fetch: (url: string, opts: any) => {\n // use a specific authorization cookie - be very explicit about how we authenticate\n opts.headers.set(\"Authorization\", cookie)\n return PouchDB.fetch(url, opts)\n },\n }\n\n if (opts.inMemory) {\n const inMemory = require(\"pouchdb-adapter-memory\")\n PouchDB.plugin(inMemory)\n POUCH_DB_DEFAULTS = {\n // @ts-ignore\n adapter: \"memory\",\n }\n }\n\n if (opts.onDisk) {\n POUCH_DB_DEFAULTS = {\n // @ts-ignore\n adapter: \"leveldb\",\n }\n }\n\n if (opts.replication) {\n const replicationStream = require(\"@budibase/pouchdb-replication-stream\")\n PouchDB.plugin(replicationStream.plugin)\n // @ts-ignore\n PouchDB.adapter(\"writableStream\", replicationStream.adapters.writableStream)\n }\n\n if (opts.find) {\n const find = require(\"pouchdb-find\")\n PouchDB.plugin(find)\n }\n\n return PouchDB.defaults(POUCH_DB_DEFAULTS)\n}\n\nexport function init(opts?: PouchOptions) {\n Pouch = getPouch(opts)\n initialised = true\n}\n\nconst checkInitialised = () => {\n if (!initialised) {\n throw new Error(\"init has not been called\")\n }\n}\n\nexport function getPouchDB(dbName: string, opts?: any): PouchDB.Database {\n checkInitialised()\n const db = new Pouch(dbName, opts)\n const dbPut = db.put\n db.put = async (doc: any, options = {}) => {\n if (!doc.createdAt) {\n doc.createdAt = new Date().toISOString()\n }\n doc.updatedAt = new Date().toISOString()\n return dbPut(doc, options)\n }\n db.exists = async () => {\n const info = await db.info()\n return !info.error\n }\n return db\n}\n\n// use this function if you have called getPouchDB - close\n// the databases you've opened once finished\nexport async function closePouchDB(db: PouchDB.Database) {\n if (!db || env.isTest()) {\n return\n }\n try {\n // specifically await so that if there is an error, it can be ignored\n return await db.close()\n } catch (err) {\n // ignore error, already closed\n }\n}\n", "import { v4 } from \"uuid\"\n\nexport function newid() {\n return v4().replace(/-/g, \"\")\n}\n", "import {\n DocumentScope,\n DocumentDestroyResponse,\n DocumentInsertResponse,\n DocumentBulkResponse,\n OkResponse,\n} from \"@budibase/nano\"\nimport {\n AllDocsResponse,\n AnyDocument,\n Database,\n DatabaseDumpOpts,\n DatabasePutOpts,\n DatabaseQueryOpts,\n Document,\n} from \"@budibase/types\"\nimport tracer from \"dd-trace\"\nimport { Writable } from \"stream\"\n\nexport class DDInstrumentedDatabase implements Database {\n constructor(private readonly db: Database) {}\n\n get name(): string {\n return this.db.name\n }\n\n exists(): Promise<boolean> {\n return tracer.trace(\"db.exists\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.exists()\n })\n }\n\n get<T extends Document>(id?: string | undefined): Promise<T> {\n return tracer.trace(\"db.get\", span => {\n span?.addTags({ db_name: this.name, doc_id: id })\n return this.db.get(id)\n })\n }\n\n getMultiple<T extends Document>(\n ids: string[],\n opts?: { allowMissing?: boolean | undefined } | undefined\n ): Promise<T[]> {\n return tracer.trace(\"db.getMultiple\", span => {\n span?.addTags({\n db_name: this.name,\n num_docs: ids.length,\n allow_missing: opts?.allowMissing,\n })\n return this.db.getMultiple(ids, opts)\n })\n }\n\n remove(\n id: string | Document,\n rev?: string | undefined\n ): Promise<DocumentDestroyResponse> {\n return tracer.trace(\"db.remove\", span => {\n span?.addTags({ db_name: this.name, doc_id: id })\n return this.db.remove(id, rev)\n })\n }\n\n put(\n document: AnyDocument,\n opts?: DatabasePutOpts | undefined\n ): Promise<DocumentInsertResponse> {\n return tracer.trace(\"db.put\", span => {\n span?.addTags({ db_name: this.name, doc_id: document._id })\n return this.db.put(document, opts)\n })\n }\n\n bulkDocs(documents: AnyDocument[]): Promise<DocumentBulkResponse[]> {\n return tracer.trace(\"db.bulkDocs\", span => {\n span?.addTags({ db_name: this.name, num_docs: documents.length })\n return this.db.bulkDocs(documents)\n })\n }\n\n allDocs<T extends Document>(\n params: DatabaseQueryOpts\n ): Promise<AllDocsResponse<T>> {\n return tracer.trace(\"db.allDocs\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.allDocs(params)\n })\n }\n\n query<T extends Document>(\n viewName: string,\n params: DatabaseQueryOpts\n ): Promise<AllDocsResponse<T>> {\n return tracer.trace(\"db.query\", span => {\n span?.addTags({ db_name: this.name, view_name: viewName })\n return this.db.query(viewName, params)\n })\n }\n\n destroy(): Promise<void | OkResponse> {\n return tracer.trace(\"db.destroy\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.destroy()\n })\n }\n\n compact(): Promise<void | OkResponse> {\n return tracer.trace(\"db.compact\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.compact()\n })\n }\n\n dump(stream: Writable, opts?: DatabaseDumpOpts | undefined): Promise<any> {\n return tracer.trace(\"db.dump\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.dump(stream, opts)\n })\n }\n\n load(...args: any[]): Promise<any> {\n return tracer.trace(\"db.load\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.load(...args)\n })\n }\n\n createIndex(...args: any[]): Promise<any> {\n return tracer.trace(\"db.createIndex\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.createIndex(...args)\n })\n }\n\n deleteIndex(...args: any[]): Promise<any> {\n return tracer.trace(\"db.deleteIndex\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.deleteIndex(...args)\n })\n }\n\n getIndexes(...args: any[]): Promise<any> {\n return tracer.trace(\"db.getIndexes\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.getIndexes(...args)\n })\n }\n}\n", "import Nano from \"@budibase/nano\"\nimport {\n AllDocsResponse,\n AnyDocument,\n Database,\n DatabaseOpts,\n DatabaseQueryOpts,\n DatabasePutOpts,\n DatabaseCreateIndexOpts,\n DatabaseDeleteIndexOpts,\n Document,\n isDocument,\n RowResponse,\n} from \"@budibase/types\"\nimport { getCouchInfo } from \"./connections\"\nimport { directCouchUrlCall } from \"./utils\"\nimport { getPouchDB } from \"./pouchDB\"\nimport { WriteStream, ReadStream } from \"fs\"\nimport { newid } from \"../../docIds/newid\"\nimport { DDInstrumentedDatabase } from \"../instrumentation\"\n\nconst DATABASE_NOT_FOUND = \"Database does not exist.\"\n\nfunction buildNano(couchInfo: { url: string; cookie: string }) {\n return Nano({\n url: couchInfo.url,\n requestDefaults: {\n headers: {\n Authorization: couchInfo.cookie,\n },\n },\n parseUrl: false,\n })\n}\n\ntype DBCall<T> = () => Promise<T>\n\nexport function DatabaseWithConnection(\n dbName: string,\n connection: string,\n opts?: DatabaseOpts\n) {\n const db = new DatabaseImpl(dbName, opts, connection)\n return new DDInstrumentedDatabase(db)\n}\n\nexport class DatabaseImpl implements Database {\n public readonly name: string\n private static nano: Nano.ServerScope\n private readonly instanceNano?: Nano.ServerScope\n private readonly pouchOpts: DatabaseOpts\n\n private readonly couchInfo = getCouchInfo()\n\n constructor(dbName: string, opts?: DatabaseOpts, connection?: string) {\n this.name = dbName\n this.pouchOpts = opts || {}\n if (connection) {\n this.couchInfo = getCouchInfo(connection)\n this.instanceNano = buildNano(this.couchInfo)\n }\n if (!DatabaseImpl.nano) {\n DatabaseImpl.init()\n }\n }\n\n static init() {\n const couchInfo = getCouchInfo()\n DatabaseImpl.nano = buildNano(couchInfo)\n }\n\n async exists() {\n const response = await directCouchUrlCall({\n url: `${this.couchInfo.url}/${this.name}`,\n method: \"HEAD\",\n cookie: this.couchInfo.cookie,\n })\n return response.status === 200\n }\n\n private nano() {\n return this.instanceNano || DatabaseImpl.nano\n }\n\n private getDb() {\n return this.nano().db.use(this.name)\n }\n\n private async checkAndCreateDb() {\n let shouldCreate = !this.pouchOpts?.skip_setup\n // check exists in a lightweight fashion\n let exists = await this.exists()\n if (!shouldCreate && !exists) {\n throw new Error(\"DB does not exist\")\n }\n if (!exists) {\n try {\n await this.nano().db.create(this.name)\n } catch (err: any) {\n // Handling race conditions\n if (err.statusCode !== 412) {\n throw err\n }\n }\n }\n return this.getDb()\n }\n\n // this function fetches the DB and handles if DB creation is needed\n private async performCall<T>(\n call: (db: Nano.DocumentScope<any>) => Promise<DBCall<T>> | DBCall<T>\n ): Promise<any> {\n const db = this.getDb()\n const fnc = await call(db)\n try {\n return await fnc()\n } catch (err: any) {\n if (err.statusCode === 404 && err.reason === DATABASE_NOT_FOUND) {\n await this.checkAndCreateDb()\n return await this.performCall(call)\n } else if (err.statusCode) {\n err.status = err.statusCode\n }\n throw err\n }\n }\n\n async get<T extends Document>(id?: string): Promise<T> {\n return this.performCall(db => {\n if (!id) {\n throw new Error(\"Unable to get doc without a valid _id.\")\n }\n return () => db.get(id)\n })\n }\n\n async getMultiple<T extends Document>(\n ids: string[],\n opts?: { allowMissing?: boolean }\n ): Promise<T[]> {\n // get unique\n ids = [...new Set(ids)]\n const response = await this.allDocs<T>({\n keys: ids,\n include_docs: true,\n })\n const rowUnavailable = (row: RowResponse<T>) => {\n // row is deleted - key lookup can return this\n if (row.doc == null || (\"deleted\" in row.value && row.value.deleted)) {\n return true\n }\n return row.error === \"not_found\"\n }\n\n const rows = response.rows.filter(row => !rowUnavailable(row))\n const someMissing = rows.length !== response.rows.length\n // some were filtered out - means some missing\n if (!opts?.allowMissing && someMissing) {\n const missing = response.rows.filter(row => rowUnavailable(row))\n const missingIds = missing.map(row => row.key).join(\", \")\n throw new Error(`Unable to get documents: ${missingIds}`)\n }\n return rows.map(row => row.doc!)\n }\n\n async remove(idOrDoc: string | Document, rev?: string) {\n return this.performCall(db => {\n let _id: string\n let _rev: string\n\n if (isDocument(idOrDoc)) {\n _id = idOrDoc._id!\n _rev = idOrDoc._rev!\n } else {\n _id = idOrDoc\n _rev = rev!\n }\n\n if (!_id || !_rev) {\n throw new Error(\"Unable to remove doc without a valid _id and _rev.\")\n }\n return () => db.destroy(_id, _rev)\n })\n }\n\n async post(document: AnyDocument, opts?: DatabasePutOpts) {\n if (!document._id) {\n document._id = newid()\n }\n return this.put(document, opts)\n }\n\n async put(document: AnyDocument, opts?: DatabasePutOpts) {\n if (!document._id) {\n throw new Error(\"Cannot store document without _id field.\")\n }\n return this.performCall(async db => {\n if (!document.createdAt) {\n document.createdAt = new Date().toISOString()\n }\n document.updatedAt = new Date().toISOString()\n if (opts?.force && document._id) {\n try {\n const existing = await this.get(document._id)\n if (existing) {\n document._rev = existing._rev\n }\n } catch (err: any) {\n if (err.status !== 404) {\n throw err\n }\n }\n }\n return () => db.insert(document)\n })\n }\n\n async bulkDocs(documents: AnyDocument[]) {\n return this.performCall(db => {\n return () => db.bulk({ docs: documents })\n })\n }\n\n async allDocs<T extends Document>(\n params: DatabaseQueryOpts\n ): Promise<AllDocsResponse<T>> {\n return this.performCall(db => {\n return () => db.list(params)\n })\n }\n\n async query<T extends Document>(\n viewName: string,\n params: DatabaseQueryOpts\n ): Promise<AllDocsResponse<T>> {\n return this.performCall(db => {\n const [database, view] = viewName.split(\"/\")\n return () => db.view(database, view, params)\n })\n }\n\n async destroy() {\n try {\n return await this.nano().db.destroy(this.name)\n } catch (err: any) {\n // didn't exist, don't worry\n if (err.statusCode === 404) {\n return\n } else {\n throw { ...err, status: err.statusCode }\n }\n }\n }\n\n async compact() {\n return this.performCall(db => {\n return () => db.compact()\n })\n }\n\n // All below functions are in-frequently called, just utilise PouchDB\n // for them as it implements them better than we can\n async dump(stream: WriteStream, opts?: { filter?: any }) {\n const pouch = getPouchDB(this.name)\n // @ts-ignore\n return pouch.dump(stream, opts)\n }\n\n async load(stream: ReadStream) {\n const pouch = getPouchDB(this.name)\n // @ts-ignore\n return pouch.load(stream)\n }\n\n async createIndex(opts: DatabaseCreateIndexOpts) {\n const pouch = getPouchDB(this.name)\n return pouch.createIndex(opts)\n }\n\n async deleteIndex(opts: DatabaseDeleteIndexOpts) {\n const pouch = getPouchDB(this.name)\n return pouch.deleteIndex(opts)\n }\n\n async getIndexes() {\n const pouch = getPouchDB(this.name)\n return pouch.getIndexes()\n }\n}\n", "export const CONSTANT_INTERNAL_ROW_COLS = [\n \"_id\",\n \"_rev\",\n \"type\",\n \"createdAt\",\n \"updatedAt\",\n \"tableId\",\n] as const\n\nexport const CONSTANT_EXTERNAL_ROW_COLS = [\"_id\", \"_rev\", \"tableId\"] as const\n\nexport function isInternalColumnName(name: string): boolean {\n return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)\n}\n", "export * from \"./connections\"\nexport * from \"./DatabaseImpl\"\nexport * from \"./utils\"\nexport { init, getPouch, getPouchDB, closePouchDB } from \"./pouchDB\"\nexport * from \"../constants\"\n", "import { directCouchQuery, DatabaseImpl } from \"./couch\"\nimport { CouchFindOptions, Database, DatabaseOpts } from \"@budibase/types\"\nimport { DDInstrumentedDatabase } from \"./instrumentation\"\n\nexport function getDB(dbName: string, opts?: DatabaseOpts): Database {\n return new DDInstrumentedDatabase(new DatabaseImpl(dbName, opts))\n}\n\n// we have to use a callback for this so that we can close\n// the DB when we're done, without this manual requests would\n// need to close the database when done with it to avoid memory leaks\nexport async function doWithDB<T>(\n dbName: string,\n cb: (db: Database) => Promise<T>,\n opts?: DatabaseOpts\n) {\n const db = getDB(dbName, opts)\n // need this to be async so that we can correctly close DB after all\n // async operations have been completed\n return await cb(db)\n}\n\nexport async function directCouchAllDbs(queryString?: string) {\n let couchPath = \"/_all_dbs\"\n if (queryString) {\n couchPath += `?${queryString}`\n }\n return await directCouchQuery(couchPath)\n}\n\nexport async function directCouchFind(dbName: string, opts: CouchFindOptions) {\n const json = await directCouchQuery(`${dbName}/_find`, \"POST\", opts)\n return { rows: json.docs, bookmark: json.bookmark }\n}\n", "// some test cases call functions directly, need to\n// store an app ID to pretend there is a context\nimport env from \"../environment\"\nimport Context from \"./Context\"\nimport * as conversions from \"../docIds/conversions\"\nimport { getDB } from \"../db/db\"\nimport {\n DocumentType,\n SEPARATOR,\n StaticDatabases,\n DEFAULT_TENANT_ID,\n} from \"../constants\"\nimport { Database, IdentityContext } from \"@budibase/types\"\nimport { ContextMap } from \"./types\"\n\nlet TEST_APP_ID: string | null = null\n\nexport function getGlobalDBName(tenantId?: string) {\n // tenant ID can be set externally, for example user API where\n // new tenants are being created, this may be the case\n if (!tenantId) {\n tenantId = getTenantId()\n }\n return baseGlobalDBName(tenantId)\n}\n\nexport function getAuditLogDBName(tenantId?: string) {\n if (!tenantId) {\n tenantId = getTenantId()\n }\n if (tenantId === DEFAULT_TENANT_ID) {\n return StaticDatabases.AUDIT_LOGS.name\n } else {\n return `${tenantId}${SEPARATOR}${StaticDatabases.AUDIT_LOGS.name}`\n }\n}\n\nexport function baseGlobalDBName(tenantId: string | undefined | null) {\n if (!tenantId || tenantId === DEFAULT_TENANT_ID) {\n return StaticDatabases.GLOBAL.name\n } else {\n return `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`\n }\n}\n\nexport function getPlatformURL() {\n return env.PLATFORM_URL\n}\n\nexport function isMultiTenant() {\n return !!env.MULTI_TENANCY\n}\n\nexport function isTenantIdSet() {\n const context = Context.get()\n return !!context?.tenantId\n}\n\nexport function isTenancyEnabled() {\n return env.MULTI_TENANCY\n}\n\n/**\n * Given an app ID this will attempt to retrieve the tenant ID from it.\n * @return The tenant ID found within the app ID.\n */\nexport function getTenantIDFromAppID(appId: string) {\n if (!appId) {\n return undefined\n }\n if (!isMultiTenant()) {\n return DEFAULT_TENANT_ID\n }\n const split = appId.split(SEPARATOR)\n const hasDev = split[1] === DocumentType.DEV\n if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {\n return undefined\n }\n if (hasDev) {\n return split[2]\n } else {\n return split[1]\n }\n}\n\nfunction updateContext(updates: ContextMap): ContextMap {\n let context: ContextMap\n try {\n context = Context.get()\n } catch (err) {\n // no context, start empty\n context = {}\n }\n context = {\n ...context,\n ...updates,\n }\n return context\n}\n\nasync function newContext<T>(updates: ContextMap, task: () => T) {\n guardMigration()\n\n // see if there already is a context setup\n let context: ContextMap = updateContext(updates)\n return Context.run(context, task)\n}\n\nexport async function doInAutomationContext<T>(params: {\n appId: string\n automationId: string\n task: () => T\n}): Promise<T> {\n const tenantId = getTenantIDFromAppID(params.appId)\n return newContext(\n {\n tenantId,\n appId: params.appId,\n automationId: params.automationId,\n },\n params.task\n )\n}\n\nexport async function doInContext(appId: string, task: any): Promise<any> {\n const tenantId = getTenantIDFromAppID(appId)\n return newContext(\n {\n tenantId,\n appId,\n },\n task\n )\n}\n\nexport async function doInTenant<T>(\n tenantId: string | undefined,\n task: () => T\n): Promise<T> {\n // make sure default always selected in single tenancy\n if (!env.MULTI_TENANCY) {\n tenantId = tenantId || DEFAULT_TENANT_ID\n }\n\n const updates = tenantId ? { tenantId } : {}\n return newContext(updates, task)\n}\n\nexport async function doInAppContext<T>(\n appId: string,\n task: () => T\n): Promise<T> {\n return _doInAppContext(appId, task)\n}\n\nasync function _doInAppContext<T>(\n appId: string,\n task: () => T,\n extraContextSettings?: ContextMap\n): Promise<T> {\n if (!appId) {\n throw new Error(\"appId is required\")\n }\n\n const tenantId = getTenantIDFromAppID(appId)\n const updates: ContextMap = { appId, ...extraContextSettings }\n if (tenantId) {\n updates.tenantId = tenantId\n }\n\n return newContext(updates, task)\n}\n\nexport async function doInIdentityContext<T>(\n identity: IdentityContext,\n task: () => T\n): Promise<T> {\n if (!identity) {\n throw new Error(\"identity is required\")\n }\n\n const context: ContextMap = {\n identity,\n }\n if (identity.tenantId) {\n context.tenantId = identity.tenantId\n }\n return newContext(context, task)\n}\n\nfunction guardMigration() {\n const context = Context.get()\n if (context?.isMigrating) {\n throw new Error(\n \"The context cannot be changed, a migration is currently running\"\n )\n }\n}\n\nexport async function doInAppMigrationContext<T>(\n appId: string,\n task: () => T\n): Promise<T> {\n return _doInAppContext(appId, task, {\n isMigrating: true,\n })\n}\n\nexport function getIdentity(): IdentityContext | undefined {\n try {\n const context = Context.get()\n return context?.identity\n } catch (e) {\n // do nothing - identity is not in context\n }\n}\n\nexport function getTenantId(): string {\n if (!isMultiTenant()) {\n return DEFAULT_TENANT_ID\n }\n const context = Context.get()\n const tenantId = context?.tenantId\n if (!tenantId) {\n throw new Error(\"Tenant id not found\")\n }\n return tenantId\n}\n\nexport function getAutomationId(): string | undefined {\n const context = Context.get()\n return context?.automationId\n}\n\nexport function getAppId(): string | undefined {\n const context = Context.get()\n const foundId = context?.appId\n if (!foundId && env.isTest() && TEST_APP_ID) {\n return TEST_APP_ID\n } else {\n return foundId\n }\n}\n\nexport const getProdAppId = () => {\n const appId = getAppId()\n if (!appId) {\n throw new Error(\"Could not get appId\")\n }\n return conversions.getProdAppID(appId)\n}\n\nexport function doInEnvironmentContext(\n values: Record<string, string>,\n task: any\n) {\n if (!values) {\n throw new Error(\"Must supply environment variables.\")\n }\n const updates = {\n environmentVariables: values,\n }\n return newContext(updates, task)\n}\n\nexport function doInScimContext(task: any) {\n const updates: ContextMap = {\n isScim: true,\n }\n return newContext(updates, task)\n}\n\nexport function getEnvironmentVariables() {\n const context = Context.get()\n if (!context.environmentVariables) {\n return null\n } else {\n return context.environmentVariables\n }\n}\n\nexport function getGlobalDB(): Database {\n const context = Context.get()\n if (!context || (env.MULTI_TENANCY && !context.tenantId)) {\n throw new Error(\"Global DB not found\")\n }\n return getDB(baseGlobalDBName(context?.tenantId))\n}\n\nexport function getAuditLogsDB(): Database {\n if (!getTenantId()) {\n throw new Error(\"No tenant ID found - cannot open audit log DB\")\n }\n return getDB(getAuditLogDBName())\n}\n\n/**\n * Gets the app database based on whatever the request\n * contained, dev or prod.\n */\nexport function getAppDB(opts?: any): Database {\n const appId = getAppId()\n if (!appId) {\n throw new Error(\"Unable to retrieve app DB - no app ID.\")\n }\n return getDB(appId, opts)\n}\n\n/**\n * This specifically gets the prod app ID, if the request\n * contained a development app ID, this will get the prod one.\n */\nexport function getProdAppDB(opts?: any): Database {\n const appId = getAppId()\n if (!appId) {\n throw new Error(\"Unable to retrieve prod DB - no app ID.\")\n }\n return getDB(conversions.getProdAppID(appId), opts)\n}\n\n/**\n * This specifically gets the dev app ID, if the request\n * contained a prod app ID, this will get the dev one.\n */\nexport function getDevAppDB(opts?: any): Database {\n const appId = getAppId()\n if (!appId) {\n throw new Error(\"Unable to retrieve dev DB - no app ID.\")\n }\n return getDB(conversions.getDevelopmentAppID(appId), opts)\n}\n\nexport function isScim(): boolean {\n const context = Context.get()\n const scimCall = context?.isScim\n return !!scimCall\n}\n\nexport function getCurrentContext(): ContextMap | undefined {\n try {\n return Context.get()\n } catch (e) {\n return undefined\n }\n}\n", "export { DEFAULT_TENANT_ID } from \"../constants\"\nexport * as identity from \"./identity\"\nexport * from \"./mainContext\"\n", "import env from \"../environment\"\nimport * as Redis from \"ioredis\"\n\nconst SLOT_REFRESH_MS = 2000\nconst CONNECT_TIMEOUT_MS = 10000\nexport const SEPARATOR = \"-\"\n\n/**\n * These Redis databases help us to segment up a Redis keyspace by prepending the\n * specified database name onto the cache key. This means that a single real Redis database\n * can be split up a bit; allowing us to use scans on small databases to find some particular\n * keys within.\n * If writing a very large volume of keys is expected (say 10K+) then it is better to keep these out\n * of the default keyspace and use a separate one - the SelectableDatabase can be used for this.\n */\nexport enum Databases {\n PW_RESETS = \"pwReset\",\n VERIFICATIONS = \"verification\",\n INVITATIONS = \"invitation\",\n DEV_LOCKS = \"devLocks\",\n DEBOUNCE = \"debounce\",\n SESSIONS = \"session\",\n USER_CACHE = \"users\",\n FLAGS = \"flags\",\n APP_METADATA = \"appMetadata\",\n QUERY_VARS = \"queryVars\",\n LICENSES = \"license\",\n GENERIC_CACHE = \"data_cache\",\n WRITE_THROUGH = \"writeThrough\",\n LOCKS = \"locks\",\n SOCKET_IO = \"socket_io\",\n}\n\n/**\n * These define the numeric Redis databases that can be access with the SELECT command -\n * (https://redis.io/commands/select/). By default a Redis server/cluster will have 16 selectable\n * databases, increasing this count increases the amount of CPU/memory required to run the server.\n * Ideally new Redis keyspaces should be used sparingly, only when absolutely necessary for performance\n * to be maintained. Generally a keyspace can grow to be very large is scans are not needed or desired,\n * but if you need to walk through all values in a database periodically then a separate selectable\n * keyspace should be used.\n */\nexport enum SelectableDatabase {\n DEFAULT = 0,\n SOCKET_IO = 1,\n RATE_LIMITING = 2,\n UNUSED_2 = 3,\n UNUSED_3 = 4,\n UNUSED_4 = 5,\n UNUSED_5 = 6,\n UNUSED_6 = 7,\n UNUSED_7 = 8,\n UNUSED_8 = 9,\n UNUSED_9 = 10,\n UNUSED_10 = 11,\n UNUSED_11 = 12,\n UNUSED_12 = 13,\n UNUSED_13 = 14,\n UNUSED_14 = 15,\n}\n\nexport function getRedisConnectionDetails() {\n let password = env.REDIS_PASSWORD\n let url: string[] | string = env.REDIS_URL.split(\"//\")\n // get rid of the protocol\n url = url.length > 1 ? url[1] : url[0]\n // check for a password etc\n url = url.split(\"@\")\n if (url.length > 1) {\n // get the password\n password = url[0].split(\":\")[1]\n url = url[1]\n } else {\n url = url[0]\n }\n const [host, port] = url.split(\":\")\n\n const portNumber = parseInt(port)\n return {\n host,\n password,\n // assume default port for redis if invalid found\n port: isNaN(portNumber) ? 6379 : portNumber,\n }\n}\n\nexport function getRedisOptions() {\n const { host, password, port } = getRedisConnectionDetails()\n let redisOpts: Redis.RedisOptions = {\n connectTimeout: CONNECT_TIMEOUT_MS,\n port: port,\n host,\n password,\n }\n let opts: Redis.ClusterOptions | Redis.RedisOptions = redisOpts\n if (env.REDIS_CLUSTERED) {\n opts = {\n connectTimeout: CONNECT_TIMEOUT_MS,\n redisOptions: {\n ...redisOpts,\n tls: {},\n },\n slotsRefreshTimeout: SLOT_REFRESH_MS,\n dnsLookup: (address: string, callback: any) => callback(null, address),\n } as Redis.ClusterOptions\n }\n return opts\n}\n\nexport function addDbPrefix(db: string, key: string) {\n if (key.includes(db)) {\n return key\n }\n return `${db}${SEPARATOR}${key}`\n}\n\nexport function removeDbPrefix(key: string) {\n let parts = key.split(SEPARATOR)\n if (parts.length >= 2) {\n parts.shift()\n return parts.join(SEPARATOR)\n } else {\n // return the only part\n return parts[0]\n }\n}\n", "import { Header } from \"../../constants\"\n\nconst correlator = require(\"correlation-id\")\n\nexport const setHeader = (headers: any) => {\n const correlationId = correlator.getId()\n if (correlationId) {\n headers[Header.CORRELATION_ID] = correlationId\n }\n}\n\nexport function getId() {\n return correlator.getId()\n}\n", "export * from \"./correlation\"\n", "import { join } from \"path\"\nimport { tmpdir } from \"os\"\nimport fs from \"fs\"\nimport env from \"../environment\"\n\n/****************************************************\n * NOTE: When adding a new bucket - name *\n * sure that S3 usages (like budibase-infra) *\n * have been updated to have a unique bucket name. *\n ****************************************************/\n// can't be an enum - only numbers can be used for computed types\nexport const ObjectStoreBuckets = {\n BACKUPS: env.BACKUPS_BUCKET_NAME,\n APPS: env.APPS_BUCKET_NAME,\n TEMPLATES: env.TEMPLATES_BUCKET_NAME,\n GLOBAL: env.GLOBAL_BUCKET_NAME,\n PLUGINS: env.PLUGIN_BUCKET_NAME,\n}\n\nconst bbTmp = join(tmpdir(), \".budibase\")\ntry {\n fs.mkdirSync(bbTmp)\n} catch (e: any) {\n if (e.code !== \"EEXIST\") {\n throw e\n }\n}\n\nexport function budibaseTempDir() {\n return bbTmp\n}\n", "import { getAppClient } from \"../redis/init\"\nimport { doWithDB, DocumentType } from \"../db\"\nimport { Database, App } from \"@budibase/types\"\n\nexport enum AppState {\n INVALID = \"invalid\",\n}\n\nexport interface DeletedApp {\n state: AppState\n}\n\nconst EXPIRY_SECONDS = 3600\n\n/**\n * The default populate app metadata function\n */\nasync function populateFromDB(appId: string) {\n return doWithDB(\n appId,\n (db: Database) => {\n return db.get<App>(DocumentType.APP_METADATA)\n },\n { skip_setup: true }\n )\n}\n\nfunction isInvalid(metadata?: { state: string }) {\n return !metadata || metadata.state === AppState.INVALID\n}\n\n/**\n * Get the requested app metadata by id.\n * Use redis cache to first read the app metadata.\n * If not present fallback to loading the app metadata directly and re-caching.\n * @param appId the id of the app to get metadata from.\n * @returns the app metadata.\n */\nexport async function getAppMetadata(appId: string): Promise<App | DeletedApp> {\n const client = await getAppClient()\n // try cache\n let metadata = await client.get(appId)\n if (!metadata) {\n let expiry: number | undefined = EXPIRY_SECONDS\n try {\n metadata = await populateFromDB(appId)\n } catch (err: any) {\n // app DB left around, but no metadata, it is invalid\n if (err && err.status === 404) {\n metadata = { state: AppState.INVALID }\n // don't expire the reference to an invalid app, it'll only be\n // updated if a metadata doc actually gets stored (app is remade/reverted)\n expiry = undefined\n } else {\n throw err\n }\n }\n // needed for some scenarios where the caching happens\n // so quickly the requests can get slightly out of sync\n // might store its invalid just before it stores its valid\n if (isInvalid(metadata)) {\n const temp = await client.get(appId)\n if (temp) {\n metadata = temp\n }\n }\n await client.store(appId, metadata, expiry)\n }\n\n return metadata\n}\n\n/**\n * Invalidate/reset the cached metadata when a change occurs in the db.\n * @param appId the cache key to bust/update.\n * @param newMetadata optional - can simply provide the new metadata to update with.\n * @return will respond with success when cache is updated.\n */\nexport async function invalidateAppMetadata(appId: string, newMetadata?: any) {\n if (!appId) {\n throw \"Cannot invalidate if no app ID provided.\"\n }\n const client = await getAppClient()\n await client.delete(appId)\n if (newMetadata) {\n await client.store(appId, newMetadata, EXPIRY_SECONDS)\n }\n}\n", "import {\n APP_PREFIX,\n DocumentType,\n InternalTable,\n SEPARATOR,\n} from \"../constants\"\nimport { newid } from \"./newid\"\n\n/**\n * Generates a new app ID.\n * @returns The new app ID which the app doc can be stored under.\n */\nexport const generateAppID = (tenantId?: string | null) => {\n let id = APP_PREFIX\n if (tenantId) {\n id += `${tenantId}${SEPARATOR}`\n }\n return `${id}${newid()}`\n}\n\n/**\n * Gets a new row ID for the specified table.\n * @param tableId The table which the row is being created for.\n * @param id If an ID is to be used then the UUID can be substituted for this.\n * @returns The new ID which a row doc can be stored under.\n */\nexport function generateRowID(tableId: string, id?: string) {\n id = id || newid()\n return `${DocumentType.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}`\n}\n\n/**\n * Generates a new workspace ID.\n * @returns The new workspace ID which the workspace doc can be stored under.\n */\nexport function generateWorkspaceID() {\n return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}`\n}\n\n/**\n * Generates a new global user ID.\n * @returns The new user ID which the user doc can be stored under.\n */\nexport function generateGlobalUserID(id?: any) {\n return `${DocumentType.USER}${SEPARATOR}${id || newid()}`\n}\n\nconst isGlobalUserIDRegex = new RegExp(`^${DocumentType.USER}${SEPARATOR}.+`)\nexport function isGlobalUserID(id: string) {\n return isGlobalUserIDRegex.test(id)\n}\n\n/**\n * Generates a new user ID based on the passed in global ID.\n * @param globalId The ID of the global user.\n * @returns The new user ID which the user doc can be stored under.\n */\nexport function generateUserMetadataID(globalId: string) {\n return generateRowID(InternalTable.USER_METADATA, globalId)\n}\n\n/**\n * Breaks up the ID to get the global ID.\n */\nexport function getGlobalIDFromUserMetadataID(id: string) {\n const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`\n if (!id || !id.includes(prefix)) {\n return id\n }\n return id.split(prefix)[1]\n}\n\n/**\n * Generates a template ID.\n * @param ownerId The owner/user of the template, this could be global or a workspace level.\n */\nexport function generateTemplateID(ownerId: any) {\n return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`\n}\n\nexport function generateAppUserID(prodAppId: string, userId: string) {\n return `${prodAppId}${SEPARATOR}${userId}`\n}\n\n/**\n * Generates a new role ID.\n * @returns The new role ID which the role doc can be stored under.\n */\nexport function generateRoleID(name: string) {\n const prefix = `${DocumentType.ROLE}${SEPARATOR}`\n if (name.startsWith(prefix)) {\n return name\n }\n return `${prefix}${name}`\n}\n\n/**\n * Utility function to be more verbose.\n */\nexport function prefixRoleID(name: string) {\n return generateRoleID(name)\n}\n\n/**\n * Generates a new dev info document ID - this is scoped to a user.\n * @returns The new dev info ID which info for dev (like api key) can be stored under.\n */\nexport const generateDevInfoID = (userId: any) => {\n return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`\n}\n\n/**\n * Generates a new plugin ID - to be used in the global DB.\n * @returns The new plugin ID which a plugin metadata document can be stored under.\n */\nexport const generatePluginID = (name: string) => {\n return `${DocumentType.PLUGIN}${SEPARATOR}${name}`\n}\n", "import {\n DocumentType,\n InternalTable,\n SEPARATOR,\n UNICODE_MAX,\n ViewName,\n} from \"../constants\"\nimport { getProdAppID } from \"./conversions\"\nimport { DatabaseQueryOpts } from \"@budibase/types\"\n\n/**\n * If creating DB allDocs/query params with only a single top level ID this can be used, this\n * is usually the case as most of our docs are top level e.g. tables, automations, users and so on.\n * More complex cases such as link docs and rows which have multiple levels of IDs that their\n * ID consists of need their own functions to build the allDocs parameters.\n * @param docType The type of document which input params are being built for, e.g. user,\n * link, app, table and so on.\n * @param docId The ID of the document minus its type - this is only needed if looking\n * for a singular document.\n * @param otherProps Add any other properties onto the request, e.g. include_docs.\n * @returns Parameters which can then be used with an allDocs request.\n */\nexport function getDocParams(\n docType: string,\n docId?: string | null,\n otherProps: Partial<DatabaseQueryOpts> = {}\n): DatabaseQueryOpts {\n if (docId == null) {\n docId = \"\"\n }\n return {\n ...otherProps,\n startkey: `${docType}${SEPARATOR}${docId}`,\n endkey: `${docType}${SEPARATOR}${docId}${UNICODE_MAX}`,\n }\n}\n\n/**\n * Gets the DB allDocs/query params for retrieving a row.\n * @param tableId The table in which the rows have been stored.\n * @param rowId The ID of the row which is being specifically queried for. This can be\n * left null to get all the rows in the table.\n * @param otherProps Any other properties to add to the request.\n * @returns Parameters which can then be used with an allDocs request.\n */\nexport function getRowParams(\n tableId?: string | null,\n rowId?: string | null,\n otherProps: Partial<DatabaseQueryOpts> = {}\n): DatabaseQueryOpts {\n if (tableId == null) {\n return getDocParams(DocumentType.ROW, null, otherProps)\n }\n\n const endOfKey = rowId == null ? `${tableId}${SEPARATOR}` : rowId\n\n return getDocParams(DocumentType.ROW, endOfKey, otherProps)\n}\n\n/**\n * Retrieve the correct index for a view based on default design DB.\n */\nexport function getQueryIndex(viewName: ViewName) {\n return `database/${viewName}`\n}\n\n/**\n * Check if a given ID is that of a table.\n * @returns {boolean}\n */\nexport const isTableId = (id: string) => {\n // this includes datasource plus tables\n return (\n id &&\n (id.startsWith(`${DocumentType.TABLE}${SEPARATOR}`) ||\n id.startsWith(`${DocumentType.DATASOURCE_PLUS}${SEPARATOR}`))\n )\n}\n\n/**\n * Check if a given ID is that of a datasource or datasource plus.\n * @returns {boolean}\n */\nexport const isDatasourceId = (id: string) => {\n // this covers both datasources and datasource plus\n return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)\n}\n\n/**\n * Gets parameters for retrieving workspaces.\n */\nexport function getWorkspaceParams(\n id = \"\",\n otherProps: Partial<DatabaseQueryOpts> = {}\n): DatabaseQueryOpts {\n return {\n ...otherProps,\n startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,\n endkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`,\n }\n}\n\n/**\n * Gets parameters for retrieving users.\n */\nexport function getGlobalUserParams(\n globalId: any,\n otherProps: Partial<DatabaseQueryOpts> = {}\n): DatabaseQueryOpts {\n if (!globalId) {\n globalId = \"\"\n }\n const startkey = otherProps?.startkey\n return {\n ...otherProps,\n // need to include this incase pagination\n startkey: startkey\n ? startkey\n : `${DocumentType.USER}${SEPARATOR}${globalId}`,\n endkey: `${DocumentType.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,\n }\n}\n\n/**\n * Gets parameters for retrieving users, this is a utility function for the getDocParams function.\n */\nexport function getUserMetadataParams(\n userId?: string | null,\n otherProps: Partial<DatabaseQueryOpts> = {}\n): DatabaseQueryOpts {\n return getRowParams(InternalTable.USER_METADATA, userId, otherProps)\n}\n\nexport function getUsersByAppParams(\n appId: any,\n otherProps: Partial<DatabaseQueryOpts> = {}\n): DatabaseQueryOpts {\n const prodAppId = getProdAppID(appId)\n return {\n ...otherProps,\n startkey: prodAppId,\n endkey: `${prodAppId}${UNICODE_MAX}`,\n }\n}\n\n/**\n * Gets parameters for retrieving templates. Owner ID must be specified, either global or a workspace level.\n */\nexport function getTemplateParams(\n ownerId: any,\n templateId: any,\n otherProps = {}\n) {\n if (!templateId) {\n templateId = \"\"\n }\n let final\n if (templateId) {\n final = templateId\n } else {\n final = `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}`\n }\n return {\n ...otherProps,\n startkey: final,\n endkey: `${final}${UNICODE_MAX}`,\n }\n}\n\n/**\n * Gets parameters for retrieving a role, this is a utility function for the getDocParams function.\n */\nexport function getRoleParams(roleId?: string | null, otherProps = {}) {\n return getDocParams(DocumentType.ROLE, roleId, otherProps)\n}\n\nexport function getStartEndKeyURL(baseKey: any, tenantId?: string) {\n const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : \"\"\n return `startkey=\"${baseKey}${tenancy}\"&endkey=\"${baseKey}${tenancy}${UNICODE_MAX}\"`\n}\n\n/**\n * Gets parameters for retrieving automations, this is a utility function for the getDocParams function.\n */\nexport const getPluginParams = (pluginId?: string | null, otherProps = {}) => {\n return getDocParams(DocumentType.PLUGIN, pluginId, otherProps)\n}\n", "export * from \"./ids\"\nexport * from \"./params\"\n", "import env from \"../environment\"\nimport { DEFAULT_TENANT_ID, SEPARATOR, DocumentType } from \"../constants\"\nimport { getTenantId, getGlobalDBName } from \"../context\"\nimport { doWithDB, directCouchAllDbs } from \"./db\"\nimport { AppState, DeletedApp, getAppMetadata } from \"../cache/appMetadata\"\nimport { isDevApp, isDevAppID, getProdAppID } from \"../docIds/conversions\"\nimport { App, Database } from \"@budibase/types\"\nimport { getStartEndKeyURL } from \"../docIds\"\n\nexport * from \"../docIds\"\n\n/**\n * if in production this will use the CouchDB _all_dbs call to retrieve a list of databases. If testing\n * when using Pouch it will use the pouchdb-all-dbs package.\n * opts.efficient can be provided to make sure this call is always quick in a multi-tenant environment,\n * but it may not be 100% accurate in full efficiency mode (some tenantless apps may be missed).\n */\nexport async function getAllDbs(opts = { efficient: false }) {\n const efficient = opts && opts.efficient\n\n let dbs: any[] = []\n async function addDbs(queryString?: string) {\n const json = await directCouchAllDbs(queryString)\n dbs = dbs.concat(json)\n }\n let tenantId = getTenantId()\n if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) {\n // just get all DBs when:\n // - single tenancy\n // - default tenant\n // - apps dbs don't contain tenant id\n // - non-default tenant dbs are filtered out application side in getAllApps\n await addDbs()\n } else {\n // get prod apps\n await addDbs(getStartEndKeyURL(DocumentType.APP, tenantId))\n // get dev apps\n await addDbs(getStartEndKeyURL(DocumentType.APP_DEV, tenantId))\n // add global db name\n dbs.push(getGlobalDBName(tenantId))\n }\n return dbs\n}\n\n/**\n * Lots of different points in the system need to find the full list of apps, this will\n * enumerate the entire CouchDB cluster and get the list of databases (every app).\n *\n * @return returns the app information document stored in each app database.\n */\nexport async function getAllApps({\n dev,\n all,\n idsOnly,\n efficient,\n}: any = {}): Promise<App[] | string[]> {\n let tenantId = getTenantId()\n if (!env.MULTI_TENANCY && !tenantId) {\n tenantId = DEFAULT_TENANT_ID\n }\n let dbs = await getAllDbs({ efficient })\n const appDbNames = dbs.filter((dbName: any) => {\n if (env.isTest() && !dbName) {\n return false\n }\n\n const split = dbName.split(SEPARATOR)\n // it is an app, check the tenantId\n if (split[0] === DocumentType.APP) {\n // tenantId is always right before the UUID\n const possibleTenantId = split[split.length - 2]\n\n const noTenantId =\n split.length === 2 || possibleTenantId === DocumentType.DEV\n\n return (\n (tenantId === DEFAULT_TENANT_ID && noTenantId) ||\n possibleTenantId === tenantId\n )\n }\n return false\n })\n if (idsOnly) {\n const devAppIds = appDbNames.filter(appId => isDevAppID(appId))\n const prodAppIds = appDbNames.filter(appId => !isDevAppID(appId))\n switch (dev) {\n case true:\n return devAppIds\n case false:\n return prodAppIds\n default:\n return appDbNames\n }\n }\n const appPromises = appDbNames.map((app: any) =>\n // skip setup otherwise databases could be re-created\n getAppMetadata(app)\n )\n if (appPromises.length === 0) {\n return []\n } else {\n const response = await Promise.allSettled(appPromises)\n const apps = response\n .filter(\n (result: any) =>\n result.status === \"fulfilled\" &&\n result.value?.state !== AppState.INVALID\n )\n .map(({ value }: any) => value)\n if (!all) {\n return apps.filter((app: any) => {\n if (dev) {\n return isDevApp(app)\n }\n return !isDevApp(app)\n })\n } else {\n return apps.map((app: any) => ({\n ...app,\n status: isDevApp(app) ? \"development\" : \"published\",\n }))\n }\n }\n}\n\nexport async function getAppsByIDs(appIds: string[]) {\n const settled = await Promise.allSettled(\n appIds.map(appId => getAppMetadata(appId))\n )\n // have to list the apps which exist, some may have been deleted\n return settled\n .filter(\n promise =>\n promise.status === \"fulfilled\" &&\n (promise.value as DeletedApp).state !== AppState.INVALID\n )\n .map(promise => (promise as PromiseFulfilledResult<App>).value)\n}\n\n/**\n * Utility function for getAllApps but filters to production apps only.\n */\nexport async function getProdAppIDs() {\n const apps = (await getAllApps({ idsOnly: true })) as string[]\n return apps.filter((id: any) => !isDevAppID(id))\n}\n\n/**\n * Utility function for the inverse of above.\n */\nexport async function getDevAppIDs() {\n const apps = (await getAllApps({ idsOnly: true })) as string[]\n return apps.filter((id: any) => isDevAppID(id))\n}\n\nexport function isSameAppID(\n appId1: string | undefined,\n appId2: string | undefined\n) {\n if (appId1 == undefined || appId2 == undefined) {\n return false\n }\n return getProdAppID(appId1) === getProdAppID(appId2)\n}\n\nexport async function dbExists(dbName: any) {\n return doWithDB(\n dbName,\n async (db: Database) => {\n return await db.exists()\n },\n { skip_setup: true }\n )\n}\n\nexport function pagination<T>(\n data: T[],\n pageSize: number,\n {\n paginate,\n property,\n getKey,\n }: {\n paginate: boolean\n property: string\n getKey?: (doc: T) => string | undefined\n } = {\n paginate: true,\n property: \"_id\",\n }\n) {\n if (!paginate) {\n return { data, hasNextPage: false }\n }\n const hasNextPage = data.length > pageSize\n let nextPage = undefined\n if (!getKey) {\n getKey = (doc: any) => (property ? doc?.[property] : doc?._id)\n }\n if (hasNextPage) {\n nextPage = getKey(data[pageSize])\n }\n return {\n data: data.slice(0, pageSize),\n hasNextPage,\n nextPage,\n }\n}\n", "import {\n DeprecatedViews,\n DocumentType,\n SEPARATOR,\n StaticDatabases,\n ViewName,\n} from \"../constants\"\nimport { getGlobalDB } from \"../context\"\nimport { doWithDB } from \"./\"\nimport {\n AllDocsResponse,\n Database,\n DatabaseQueryOpts,\n Document,\n DesignDocument,\n DBView,\n} from \"@budibase/types\"\nimport env from \"../environment\"\n\nconst DESIGN_DB = \"_design/database\"\n\nfunction DesignDoc(): DesignDocument {\n return {\n _id: DESIGN_DB,\n // view collation information, read before writing any complex views:\n // https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification\n views: {},\n }\n}\n\nasync function removeDeprecated(db: Database, viewName: ViewName) {\n if (!DeprecatedViews[viewName]) {\n return\n }\n try {\n const designDoc = await db.get<DesignDocument>(DESIGN_DB)\n for (let deprecatedNames of DeprecatedViews[viewName]) {\n delete designDoc.views?.[deprecatedNames]\n }\n await db.put(designDoc)\n } catch (err) {\n // doesn't exist, ignore\n }\n}\n\nexport async function createView(\n db: Database,\n viewJs: string,\n viewName: string\n): Promise<void> {\n let designDoc\n try {\n designDoc = await db.get<DesignDocument>(DESIGN_DB)\n } catch (err) {\n // no design doc, make one\n designDoc = DesignDoc()\n }\n const view: DBView = {\n map: viewJs,\n }\n designDoc.views = {\n ...designDoc.views,\n [viewName]: view,\n }\n try {\n await db.put(designDoc)\n } catch (err: any) {\n if (err.status === 409) {\n return await createView(db, viewJs, viewName)\n } else {\n throw err\n }\n }\n}\n\nexport const createNewUserEmailView = async () => {\n const db = getGlobalDB()\n const viewJs = `function(doc) {\n if (doc._id.startsWith(\"${DocumentType.USER}${SEPARATOR}\")) {\n emit(doc.email.toLowerCase(), doc._id)\n }\n }`\n await createView(db, viewJs, ViewName.USER_BY_EMAIL)\n}\n\nexport const createUserAppView = async () => {\n const db = getGlobalDB()\n const viewJs = `function(doc) {\n if (doc._id.startsWith(\"${DocumentType.USER}${SEPARATOR}\") && doc.roles) {\n for (let prodAppId of Object.keys(doc.roles)) {\n let emitted = prodAppId + \"${SEPARATOR}\" + doc._id\n emit(emitted, null)\n }\n }\n }`\n await createView(db, viewJs, ViewName.USER_BY_APP)\n}\n\nexport const createApiKeyView = async () => {\n const db = getGlobalDB()\n const viewJs = `function(doc) {\n if (doc._id.startsWith(\"${DocumentType.DEV_INFO}\") && doc.apiKey) {\n emit(doc.apiKey, doc.userId)\n }\n }`\n await createView(db, viewJs, ViewName.BY_API_KEY)\n}\n\nexport interface QueryViewOptions {\n arrayResponse?: boolean\n}\n\nexport async function queryViewRaw<T extends Document>(\n viewName: ViewName,\n params: DatabaseQueryOpts,\n db: Database,\n createFunc: any,\n opts?: QueryViewOptions\n): Promise<AllDocsResponse<T>> {\n try {\n const response = await db.query<T>(`database/${viewName}`, params)\n // await to catch error\n return response\n } catch (err: any) {\n const pouchNotFound = err && err.name === \"not_found\"\n const couchNotFound = err && err.status === 404\n if (pouchNotFound || couchNotFound) {\n await removeDeprecated(db, viewName)\n await createFunc()\n return queryViewRaw(viewName, params, db, createFunc, opts)\n } else if (err.status === 409) {\n // can happen when multiple queries occur at once, view couldn't be created\n // other design docs being updated, re-run\n return queryViewRaw(viewName, params, db, createFunc, opts)\n } else {\n throw err\n }\n }\n}\n\nexport const queryView = async <T extends Document>(\n viewName: ViewName,\n params: DatabaseQueryOpts,\n db: Database,\n createFunc: any,\n opts?: QueryViewOptions\n): Promise<T[] | T> => {\n const response = await queryViewRaw<T>(viewName, params, db, createFunc, opts)\n const rows = response.rows\n const docs = rows.map(row => (params.include_docs ? row.doc! : row.value))\n\n // if arrayResponse has been requested, always return array regardless of length\n if (opts?.arrayResponse) {\n return docs as T[]\n } else {\n // return the single document if there is only one\n return docs.length <= 1 ? (docs[0] as T) : (docs as T[])\n }\n}\n\n// PLATFORM\n\nasync function createPlatformView(viewJs: string, viewName: ViewName) {\n try {\n await doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => {\n await createView(db, viewJs, viewName)\n })\n } catch (e: any) {\n if (e.status === 409 && env.isTest()) {\n // multiple tests can try to initialise platforms views\n // at once - safe to exit on conflict\n return\n }\n throw e\n }\n}\n\nexport const createPlatformAccountEmailView = async () => {\n const viewJs = `function(doc) {\n if (doc._id.startsWith(\"${DocumentType.ACCOUNT_METADATA}${SEPARATOR}\")) {\n emit(doc.email.toLowerCase(), doc._id)\n }\n }`\n await createPlatformView(viewJs, ViewName.ACCOUNT_BY_EMAIL)\n}\n\nexport const createPlatformUserView = async () => {\n const viewJs = `function(doc) {\n if (doc.tenantId) {\n emit(doc._id.toLowerCase(), doc._id)\n }\n\n if (doc.ssoId) {\n emit(doc.ssoId, doc._id)\n }\n }`\n await createPlatformView(viewJs, ViewName.PLATFORM_USERS_LOWERCASE)\n}\n\nexport const queryPlatformView = async <T extends Document>(\n viewName: ViewName,\n params: DatabaseQueryOpts,\n opts?: QueryViewOptions\n): Promise<T[] | T> => {\n const CreateFuncByName: any = {\n [ViewName.ACCOUNT_BY_EMAIL]: createPlatformAccountEmailView,\n [ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView,\n }\n\n return doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => {\n const createFn = CreateFuncByName[viewName]\n return queryView(viewName, params, db, createFn, opts)\n })\n}\n\nconst CreateFuncByName: any = {\n [ViewName.USER_BY_EMAIL]: createNewUserEmailView,\n [ViewName.BY_API_KEY]: createApiKeyView,\n [ViewName.USER_BY_APP]: createUserAppView,\n}\n\nexport const queryGlobalView = async <T extends Document>(\n viewName: ViewName,\n params: DatabaseQueryOpts,\n db?: Database,\n opts?: QueryViewOptions\n): Promise<T[] | T | undefined> => {\n // can pass DB in if working with something specific\n if (!db) {\n db = getGlobalDB()\n }\n const createFn = CreateFuncByName[viewName]\n return queryView<T>(viewName, params, db!, createFn, opts)\n}\n\nexport async function queryGlobalViewRaw<T extends Document>(\n viewName: ViewName,\n params: DatabaseQueryOpts,\n opts?: QueryViewOptions\n) {\n const db = getGlobalDB()\n const createFn = CreateFuncByName[viewName]\n return queryViewRaw<T>(viewName, params, db, createFn, opts)\n}\n", "import { getPouchDB, closePouchDB } from \"./couch\"\nimport { DocumentType } from \"../constants\"\n\nclass Replication {\n source: any\n target: any\n replication: any\n\n /**\n *\n * @param source - the DB you want to replicate or rollback to\n * @param target - the DB you want to replicate to, or rollback from\n */\n constructor({ source, target }: any) {\n this.source = getPouchDB(source)\n this.target = getPouchDB(target)\n }\n\n close() {\n return Promise.all([closePouchDB(this.source), closePouchDB(this.target)])\n }\n\n promisify(operation: any, opts = {}) {\n return new Promise(resolve => {\n operation(this.target, opts)\n .on(\"denied\", function (err: any) {\n // a document failed to replicate (e.g. due to permissions)\n throw new Error(`Denied: Document failed to replicate ${err}`)\n })\n .on(\"complete\", function (info: any) {\n return resolve(info)\n })\n .on(\"error\", function (err: any) {\n throw new Error(`Replication Error: ${err}`)\n })\n })\n }\n\n /**\n * Two way replication operation, intended to be promise based.\n * @param opts - PouchDB replication options\n */\n sync(opts = {}) {\n this.replication = this.promisify(this.source.sync, opts)\n return this.replication\n }\n\n /**\n * One way replication operation, intended to be promise based.\n * @param opts - PouchDB replication options\n */\n replicate(opts = {}) {\n this.replication = this.promisify(this.source.replicate.to, opts)\n return this.replication\n }\n\n appReplicateOpts() {\n return {\n filter: (doc: any) => {\n if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) {\n return false\n }\n return doc._id !== DocumentType.APP_METADATA\n },\n }\n }\n\n /**\n * Rollback the target DB back to the state of the source DB\n */\n async rollback() {\n await this.target.destroy()\n // Recreate the DB again\n this.target = getPouchDB(this.target.name)\n // take the opportunity to remove deleted tombstones\n await this.replicate()\n }\n\n cancel() {\n this.replication.cancel()\n }\n}\n\nexport default Replication\n", "import fetch from \"node-fetch\"\nimport { getCouchInfo } from \"./couch\"\nimport { SearchFilters, Row, EmptyFilterOption } from \"@budibase/types\"\n\nconst QUERY_START_REGEX = /\\d[0-9]*:/g\n\ninterface SearchResponse<T> {\n rows: T[] | any[]\n bookmark?: string\n totalRows: number\n}\n\ninterface PaginatedSearchResponse<T> extends SearchResponse<T> {\n hasNextPage: boolean\n}\n\nexport type SearchParams<T> = {\n tableId?: string\n sort?: string\n sortOrder?: string\n sortType?: string\n limit?: number\n bookmark?: string\n version?: string\n indexer?: () => Promise<any>\n disableEscaping?: boolean\n rows?: T | Row[]\n}\n\nexport function removeKeyNumbering(key: any): string {\n if (typeof key === \"string\" && key.match(QUERY_START_REGEX) != null) {\n const parts = key.split(\":\")\n // remove the number\n parts.shift()\n return parts.join(\":\")\n } else {\n return key\n }\n}\n\n/**\n * Class to build lucene query URLs.\n * Optionally takes a base lucene query object.\n */\nexport class QueryBuilder<T> {\n #dbName: string\n #index: string\n #query: SearchFilters\n #limit: number\n #sort?: string\n #bookmark?: string\n #sortOrder: string\n #sortType: string\n #includeDocs: boolean\n #version?: string\n #indexBuilder?: () => Promise<any>\n #noEscaping = false\n #skip?: number\n\n static readonly maxLimit = 200\n\n constructor(dbName: string, index: string, base?: SearchFilters) {\n this.#dbName = dbName\n this.#index = index\n this.#query = {\n allOr: false,\n onEmptyFilter: EmptyFilterOption.RETURN_ALL,\n string: {},\n fuzzy: {},\n range: {},\n equal: {},\n notEqual: {},\n empty: {},\n notEmpty: {},\n oneOf: {},\n contains: {},\n notContains: {},\n containsAny: {},\n ...base,\n }\n this.#limit = 50\n this.#sortOrder = \"ascending\"\n this.#sortType = \"string\"\n this.#includeDocs = true\n }\n\n disableEscaping() {\n this.#noEscaping = true\n return this\n }\n\n setIndexBuilder(builderFn: () => Promise<any>) {\n this.#indexBuilder = builderFn\n return this\n }\n\n setVersion(version?: string) {\n if (version != null) {\n this.#version = version\n }\n return this\n }\n\n setTable(tableId: string) {\n this.#query.equal!.tableId = tableId\n return this\n }\n\n setLimit(limit?: number) {\n if (limit != null) {\n this.#limit = limit\n }\n return this\n }\n\n setSort(sort?: string) {\n if (sort != null) {\n this.#sort = sort\n }\n return this\n }\n\n setSortOrder(sortOrder?: string) {\n if (sortOrder != null) {\n this.#sortOrder = sortOrder\n }\n return this\n }\n\n setSortType(sortType?: string) {\n if (sortType != null) {\n this.#sortType = sortType\n }\n return this\n }\n\n setBookmark(bookmark?: string) {\n if (bookmark != null) {\n this.#bookmark = bookmark\n }\n return this\n }\n\n setSkip(skip: number | undefined) {\n this.#skip = skip\n return this\n }\n\n excludeDocs() {\n this.#includeDocs = false\n return this\n }\n\n includeDocs() {\n this.#includeDocs = true\n return this\n }\n\n addString(key: string, partial: string) {\n this.#query.string![key] = partial\n return this\n }\n\n addFuzzy(key: string, fuzzy: string) {\n this.#query.fuzzy![key] = fuzzy\n return this\n }\n\n addRange(key: string, low: string | number, high: string | number) {\n this.#query.range![key] = {\n low,\n high,\n }\n return this\n }\n\n addEqual(key: string, value: any) {\n this.#query.equal![key] = value\n return this\n }\n\n addNotEqual(key: string, value: any) {\n this.#query.notEqual![key] = value\n return this\n }\n\n addEmpty(key: string, value: any) {\n this.#query.empty![key] = value\n return this\n }\n\n addNotEmpty(key: string, value: any) {\n this.#query.notEmpty![key] = value\n return this\n }\n\n addOneOf(key: string, value: any) {\n this.#query.oneOf![key] = value\n return this\n }\n\n addContains(key: string, value: any) {\n this.#query.contains![key] = value\n return this\n }\n\n addNotContains(key: string, value: any) {\n this.#query.notContains![key] = value\n return this\n }\n\n addContainsAny(key: string, value: any) {\n this.#query.containsAny![key] = value\n return this\n }\n\n setAllOr() {\n this.#query.allOr = true\n }\n\n setOnEmptyFilter(value: EmptyFilterOption) {\n this.#query.onEmptyFilter = value\n }\n\n handleSpaces(input: string) {\n if (this.#noEscaping) {\n return input\n } else {\n return input.replace(/ /g, \"_\")\n }\n }\n\n /**\n * Preprocesses a value before going into a lucene search.\n * Transforms strings to lowercase and wraps strings and bools in quotes.\n * @param value The value to process\n * @param options The preprocess options\n * @returns {string|*}\n */\n preprocess(value: any, { escape, lowercase, wrap, type }: any = {}) {\n const hasVersion = !!this.#version\n // Determine if type needs wrapped\n const originalType = typeof value\n // Convert to lowercase\n if (value && lowercase) {\n value = value.toLowerCase ? value.toLowerCase() : value\n }\n // Escape characters\n if (!this.#noEscaping && escape && originalType === \"string\") {\n value = `${value}`.replace(/[ \\/#+\\-&|!(){}\\]^\"~*?:\\\\]/g, \"\\\\$&\")\n }\n\n // Wrap in quotes\n if (originalType === \"string\" && !isNaN(value) && !type) {\n value = `\"${value}\"`\n } else if (hasVersion && wrap) {\n value = originalType === \"number\" ? value : `\"${value}\"`\n }\n return value\n }\n\n isMultiCondition() {\n let count = 0\n for (let filters of Object.values(this.#query)) {\n // not contains is one massive filter in allOr mode\n if (typeof filters === \"object\") {\n count += Object.keys(filters).length\n }\n }\n return count > 1\n }\n\n compressFilters(filters: Record<string, string[]>) {\n const compressed: typeof filters = {}\n for (let key of Object.keys(filters)) {\n const finalKey = removeKeyNumbering(key)\n if (compressed[finalKey]) {\n compressed[finalKey] = compressed[finalKey].concat(filters[key])\n } else {\n compressed[finalKey] = filters[key]\n }\n }\n // add prefixes back\n const final: typeof filters = {}\n let count = 1\n for (let [key, value] of Object.entries(compressed)) {\n final[`${count++}:${key}`] = value\n }\n return final\n }\n\n buildSearchQuery() {\n const builder = this\n let allOr = this.#query && this.#query.allOr\n let query = allOr ? \"\" : \"*:*\"\n let allFiltersEmpty = true\n const allPreProcessingOpts = { escape: true, lowercase: true, wrap: true }\n let tableId: string = \"\"\n if (this.#query.equal!.tableId) {\n tableId = this.#query.equal!.tableId\n delete this.#query.equal!.tableId\n }\n\n const equal = (key: string, value: any) => {\n // 0 evaluates to false, which means we would return all rows if we don't check it\n if (!value && value !== 0) {\n return null\n }\n return `${key}:${builder.preprocess(value, allPreProcessingOpts)}`\n }\n\n const contains = (key: string, value: any, mode = \"AND\") => {\n if (!value || (Array.isArray(value) && value.length === 0)) {\n return null\n }\n if (!Array.isArray(value)) {\n return `${key}:${value}`\n }\n let statement = `${builder.preprocess(value[0], { escape: true })}`\n for (let i = 1; i < value.length; i++) {\n statement += ` ${mode} ${builder.preprocess(value[i], {\n escape: true,\n })}`\n }\n return `${key}:(${statement})`\n }\n\n const fuzzy = (key: string, value: any) => {\n if (!value) {\n return null\n }\n value = builder.preprocess(value, {\n escape: true,\n lowercase: true,\n type: \"fuzzy\",\n })\n return `${key}:/.*${value}.*/`\n }\n\n const notContains = (key: string, value: any) => {\n const allPrefix = allOr ? \"*:* AND \" : \"\"\n const mode = allOr ? \"AND\" : undefined\n return allPrefix + \"NOT \" + contains(key, value, mode)\n }\n\n const containsAny = (key: string, value: any) => {\n return contains(key, value, \"OR\")\n }\n\n const oneOf = (key: string, value: any) => {\n if (!value) {\n return `*:*`\n }\n if (!Array.isArray(value)) {\n if (typeof value === \"string\") {\n value = value.split(\",\")\n } else {\n return \"\"\n }\n }\n let orStatement = `${builder.preprocess(value[0], allPreProcessingOpts)}`\n for (let i = 1; i < value.length; i++) {\n orStatement += ` OR ${builder.preprocess(\n value[i],\n allPreProcessingOpts\n )}`\n }\n return `${key}:(${orStatement})`\n }\n\n function build(\n structure: any,\n queryFn: (key: string, value: any) => string | null,\n opts?: { returnBuilt?: boolean; mode?: string }\n ) {\n let built = \"\"\n for (let [key, value] of Object.entries(structure)) {\n // check for new format - remove numbering if needed\n key = removeKeyNumbering(key)\n key = builder.preprocess(builder.handleSpaces(key), {\n escape: true,\n })\n let expression = queryFn(key, value)\n if (expression == null) {\n continue\n }\n if (built.length > 0 || query.length > 0) {\n const mode = opts?.mode ? opts.mode : allOr ? \"OR\" : \"AND\"\n built += ` ${mode} `\n }\n built += expression\n if (\n (typeof value !== \"string\" && value != null) ||\n (typeof value === \"string\" && value !== tableId && value !== \"\")\n ) {\n allFiltersEmpty = false\n }\n }\n if (opts?.returnBuilt) {\n return built\n } else {\n query += built\n }\n }\n\n // Construct the actual lucene search query string from JSON structure\n if (this.#query.string) {\n build(this.#query.string, (key: string, value: any) => {\n if (!value) {\n return null\n }\n value = builder.preprocess(value, {\n escape: true,\n lowercase: true,\n type: \"string\",\n })\n return `${key}:${value}*`\n })\n }\n if (this.#query.range) {\n build(this.#query.range, (key: string, value: any) => {\n if (!value) {\n return null\n }\n if (value.low == null || value.low === \"\") {\n return null\n }\n if (value.high == null || value.high === \"\") {\n return null\n }\n const low = builder.preprocess(value.low, allPreProcessingOpts)\n const high = builder.preprocess(value.high, allPreProcessingOpts)\n return `${key}:[${low} TO ${high}]`\n })\n }\n if (this.#query.fuzzy) {\n build(this.#query.fuzzy, fuzzy)\n }\n if (this.#query.equal) {\n build(this.#query.equal, equal)\n }\n if (this.#query.notEqual) {\n build(this.#query.notEqual, (key: string, value: any) => {\n if (!value) {\n return null\n }\n if (typeof value === \"boolean\") {\n return `(*:* AND !${key}:${value})`\n }\n return `!${key}:${builder.preprocess(value, allPreProcessingOpts)}`\n })\n }\n if (this.#query.empty) {\n build(this.#query.empty, (key: string) => `(*:* -${key}:[\"\" TO *])`)\n }\n if (this.#query.notEmpty) {\n build(this.#query.notEmpty, (key: string) => `${key}:[\"\" TO *]`)\n }\n if (this.#query.oneOf) {\n build(this.#query.oneOf, oneOf)\n }\n if (this.#query.contains) {\n build(this.#query.contains, contains)\n }\n if (this.#query.notContains) {\n build(this.compressFilters(this.#query.notContains), notContains)\n }\n if (this.#query.containsAny) {\n build(this.#query.containsAny, containsAny)\n }\n // make sure table ID is always added as an AND\n if (tableId) {\n query = this.isMultiCondition() ? `(${query})` : query\n allOr = false\n build({ tableId }, equal)\n }\n if (allFiltersEmpty) {\n if (this.#query.onEmptyFilter === EmptyFilterOption.RETURN_NONE) {\n return \"\"\n } else if (this.#query?.allOr) {\n return query.replace(\"()\", \"(*:*)\")\n }\n }\n return query\n }\n\n buildSearchBody() {\n let body: any = {\n q: this.buildSearchQuery(),\n limit: Math.min(this.#limit, QueryBuilder.maxLimit),\n include_docs: this.#includeDocs,\n }\n if (this.#bookmark) {\n body.bookmark = this.#bookmark\n }\n if (this.#sort) {\n const order = this.#sortOrder === \"descending\" ? \"-\" : \"\"\n const type = `<${this.#sortType}>`\n body.sort = `${order}${this.handleSpaces(this.#sort)}${type}`\n }\n return body\n }\n\n async run() {\n if (this.#skip) {\n await this.#skipItems(this.#skip)\n }\n return await this.#execute()\n }\n\n /**\n * Lucene queries do not support pagination and use bookmarks instead.\n * For the given builder, walk through pages using bookmarks until the desired\n * page has been met.\n */\n async #skipItems(skip: number) {\n // Lucene does not support pagination.\n // Handle pagination by finding the right bookmark\n const prevIncludeDocs = this.#includeDocs\n const prevLimit = this.#limit\n\n this.excludeDocs()\n let skipRemaining = skip\n let iterationFetched = 0\n do {\n const toSkip = Math.min(QueryBuilder.maxLimit, skipRemaining)\n this.setLimit(toSkip)\n const { bookmark, rows } = await this.#execute()\n this.setBookmark(bookmark)\n iterationFetched = rows.length\n skipRemaining -= rows.length\n } while (skipRemaining > 0 && iterationFetched > 0)\n\n this.#includeDocs = prevIncludeDocs\n this.#limit = prevLimit\n }\n\n async #execute() {\n const { url, cookie } = getCouchInfo()\n const fullPath = `${url}/${this.#dbName}/_design/database/_search/${\n this.#index\n }`\n const body = this.buildSearchBody()\n try {\n return await runQuery<T>(fullPath, body, cookie)\n } catch (err: any) {\n if (err.status === 404 && this.#indexBuilder) {\n await this.#indexBuilder()\n return await runQuery<T>(fullPath, body, cookie)\n } else {\n throw err\n }\n }\n }\n}\n\n/**\n * Executes a lucene search query.\n * @param url The query URL\n * @param body The request body defining search criteria\n * @param cookie The auth cookie for CouchDB\n * @returns {Promise<{rows: []}>}\n */\nasync function runQuery<T>(\n url: string,\n body: any,\n cookie: string\n): Promise<SearchResponse<T>> {\n const response = await fetch(url, {\n body: JSON.stringify(body),\n method: \"POST\",\n headers: {\n Authorization: cookie,\n },\n })\n\n if (response.status === 404) {\n throw response\n }\n const json = await response.json()\n\n let output: SearchResponse<T> = {\n rows: [],\n totalRows: 0,\n }\n if (json.rows != null && json.rows.length > 0) {\n output.rows = json.rows.map((row: any) => row.doc)\n }\n if (json.bookmark) {\n output.bookmark = json.bookmark\n }\n if (json.total_rows) {\n output.totalRows = json.total_rows\n }\n return output\n}\n\n/**\n * Gets round the fixed limit of 200 results from a query by fetching as many\n * pages as required and concatenating the results. This recursively operates\n * until enough results have been found.\n * @param dbName Which database to run a lucene query on\n * @param index Which search index to utilise\n * @param query The JSON query structure\n * @param params The search params including:\n * tableId {string} The table ID to search\n * sort {string} The sort column\n * sortOrder {string} The sort order (\"ascending\" or \"descending\")\n * sortType {string} Whether to treat sortable values as strings or\n * numbers. (\"string\" or \"number\")\n * limit {number} The number of results to fetch\n * bookmark {string|null} Current bookmark in the recursive search\n * rows {array|null} Current results in the recursive search\n * @returns {Promise<*[]|*>}\n */\nasync function recursiveSearch<T>(\n dbName: string,\n index: string,\n query: any,\n params: any\n): Promise<any> {\n const bookmark = params.bookmark\n const rows = params.rows || []\n if (rows.length >= params.limit) {\n return rows\n }\n let pageSize = QueryBuilder.maxLimit\n if (rows.length > params.limit - QueryBuilder.maxLimit) {\n pageSize = params.limit - rows.length\n }\n const page = await new QueryBuilder<T>(dbName, index, query)\n .setVersion(params.version)\n .setTable(params.tableId)\n .setBookmark(bookmark)\n .setLimit(pageSize)\n .setSort(params.sort)\n .setSortOrder(params.sortOrder)\n .setSortType(params.sortType)\n .run()\n if (!page.rows.length) {\n return rows\n }\n if (page.rows.length < QueryBuilder.maxLimit) {\n return [...rows, ...page.rows]\n }\n const newParams = {\n ...params,\n bookmark: page.bookmark,\n rows: [...rows, ...page.rows],\n }\n return await recursiveSearch(dbName, index, query, newParams)\n}\n\n/**\n * Performs a paginated search. A bookmark will be returned to allow the next\n * page to be fetched. There is a max limit off 200 results per page in a\n * paginated search.\n * @param dbName Which database to run a lucene query on\n * @param index Which search index to utilise\n * @param query The JSON query structure\n * @param params The search params including:\n * tableId {string} The table ID to search\n * sort {string} The sort column\n * sortOrder {string} The sort order (\"ascending\" or \"descending\")\n * sortType {string} Whether to treat sortable values as strings or\n * numbers. (\"string\" or \"number\")\n * limit {number} The desired page size\n * bookmark {string} The bookmark to resume from\n * @returns {Promise<{hasNextPage: boolean, rows: *[]}>}\n */\nexport async function paginatedSearch<T>(\n dbName: string,\n index: string,\n query: SearchFilters,\n params: SearchParams<T>\n) {\n let limit = params.limit\n if (limit == null || isNaN(limit) || limit < 0) {\n limit = 50\n }\n limit = Math.min(limit, QueryBuilder.maxLimit)\n const search = new QueryBuilder<T>(dbName, index, query)\n if (params.version) {\n search.setVersion(params.version)\n }\n if (params.tableId) {\n search.setTable(params.tableId)\n }\n if (params.sort) {\n search\n .setSort(params.sort)\n .setSortOrder(params.sortOrder)\n .setSortType(params.sortType)\n }\n if (params.indexer) {\n search.setIndexBuilder(params.indexer)\n }\n if (params.disableEscaping) {\n search.disableEscaping()\n }\n const searchResults = await search\n .setBookmark(params.bookmark)\n .setLimit(limit)\n .run()\n\n // Try fetching 1 row in the next page to see if another page of results\n // exists or not\n search.setBookmark(searchResults.bookmark).setLimit(1)\n if (params.tableId) {\n search.setTable(params.tableId)\n }\n const nextResults = await search.run()\n\n return {\n ...searchResults,\n hasNextPage: nextResults.rows && nextResults.rows.length > 0,\n }\n}\n\n/**\n * Performs a full search, fetching multiple pages if required to return the\n * desired amount of results. There is a limit of 1000 results to avoid\n * heavy performance hits, and to avoid client components breaking from\n * handling too much data.\n * @param dbName Which database to run a lucene query on\n * @param index Which search index to utilise\n * @param query The JSON query structure\n * @param params The search params including:\n * tableId {string} The table ID to search\n * sort {string} The sort column\n * sortOrder {string} The sort order (\"ascending\" or \"descending\")\n * sortType {string} Whether to treat sortable values as strings or\n * numbers. (\"string\" or \"number\")\n * limit {number} The desired number of results\n * @returns {Promise<{rows: *}>}\n */\nexport async function fullSearch<T>(\n dbName: string,\n index: string,\n query: SearchFilters,\n params: SearchParams<T>\n) {\n let limit = params.limit\n if (limit == null || isNaN(limit) || limit < 0) {\n limit = 1000\n }\n params.limit = Math.min(limit, 1000)\n const rows = await recursiveSearch<T>(dbName, index, query, params)\n return { rows }\n}\n", "import { User, SearchIndex } from \"@budibase/types\"\nimport { getGlobalDB } from \"../../context\"\n\nexport async function createUserIndex() {\n const db = getGlobalDB()\n let designDoc\n try {\n designDoc = await db.get<any>(\"_design/database\")\n } catch (err: any) {\n if (err.status === 404) {\n designDoc = { _id: \"_design/database\" }\n }\n }\n\n const fn = function (user: User) {\n if (user._id && !user._id.startsWith(\"us_\")) {\n return\n }\n const ignoredFields = [\n \"_id\",\n \"_rev\",\n \"password\",\n \"account\",\n \"license\",\n \"budibaseAccess\",\n \"accountPortalAccess\",\n \"csrfToken\",\n ]\n\n function idx(input: Record<string, any>, prev?: string) {\n for (let key of Object.keys(input)) {\n if (ignoredFields.includes(key)) {\n continue\n }\n let idxKey = prev != null ? `${prev}.${key}` : key\n if (typeof input[key] === \"string\") {\n // eslint-disable-next-line no-undef\n // @ts-ignore\n index(idxKey, input[key].toLowerCase(), { facet: true })\n } else if (typeof input[key] !== \"object\") {\n // eslint-disable-next-line no-undef\n // @ts-ignore\n index(idxKey, input[key], { facet: true })\n } else {\n idx(input[key], idxKey)\n }\n }\n }\n idx(user)\n }\n\n designDoc.indexes = {\n [SearchIndex.USER]: {\n index: fn.toString(),\n analyzer: {\n default: \"keyword\",\n name: \"perfield\",\n },\n },\n }\n await db.put(designDoc)\n}\n", "export * from \"./searchIndexes\"\n", "export function checkErrorCode(error: any, code: number) {\n const stringCode = code.toString()\n if (typeof error === \"object\") {\n return error.status === code || error.message?.includes(stringCode)\n } else if (typeof error === \"number\") {\n return error === code\n } else if (typeof error === \"string\") {\n return error.includes(stringCode)\n }\n}\n\nexport function isDocumentConflictError(error: any) {\n return checkErrorCode(error, 409)\n}\n", "export * from \"./couch\"\nexport * from \"./db\"\nexport * from \"./utils\"\nexport * from \"./views\"\nexport * from \"../docIds/conversions\"\nexport { default as Replication } from \"./Replication\"\n// exports to support old export structure\nexport * from \"../constants/db\"\nexport { getGlobalDBName, baseGlobalDBName } from \"../context\"\nexport * from \"./lucene\"\nexport * as searchIndexes from \"./searchIndexes\"\nexport * from \"./errors\"\n", "const sanitize = require(\"sanitize-s3-objectkey\")\n\nimport AWS from \"aws-sdk\"\nimport stream, { Readable } from \"stream\"\nimport fetch from \"node-fetch\"\nimport tar from \"tar-fs\"\nimport zlib from \"zlib\"\nimport { promisify } from \"util\"\nimport { join } from \"path\"\nimport fs from \"fs\"\nimport env from \"../environment\"\nimport { budibaseTempDir } from \"./utils\"\nimport { v4 } from \"uuid\"\nimport { APP_PREFIX, APP_DEV_PREFIX } from \"../db\"\n\nconst streamPipeline = promisify(stream.pipeline)\n// use this as a temporary store of buckets that are being created\nconst STATE = {\n bucketCreationPromises: {},\n}\n\ntype ListParams = {\n ContinuationToken?: string\n}\n\ntype UploadParams = {\n bucket: string\n filename: string\n path: string\n type?: string | null\n // can be undefined, we will remove it\n metadata?: {\n [key: string]: string | undefined\n }\n}\n\nconst CONTENT_TYPE_MAP: any = {\n txt: \"text/plain\",\n html: \"text/html\",\n css: \"text/css\",\n js: \"application/javascript\",\n json: \"application/json\",\n gz: \"application/gzip\",\n}\n\nconst STRING_CONTENT_TYPES = [\n CONTENT_TYPE_MAP.html,\n CONTENT_TYPE_MAP.css,\n CONTENT_TYPE_MAP.js,\n CONTENT_TYPE_MAP.json,\n]\n\n// does normal sanitization and then swaps dev apps to apps\nexport function sanitizeKey(input: string) {\n return sanitize(sanitizeBucket(input)).replace(/\\\\/g, \"/\")\n}\n\n// simply handles the dev app to app conversion\nexport function sanitizeBucket(input: string) {\n return input.replace(new RegExp(APP_DEV_PREFIX, \"g\"), APP_PREFIX)\n}\n\n/**\n * Gets a connection to the object store using the S3 SDK.\n * @param bucket the name of the bucket which blobs will be uploaded/retrieved from.\n * @param opts configuration for the object store.\n * @return an S3 object store object, check S3 Nodejs SDK for usage.\n * @constructor\n */\nexport function ObjectStore(\n bucket: string,\n opts: { presigning: boolean } = { presigning: false }\n) {\n const config: any = {\n s3ForcePathStyle: true,\n signatureVersion: \"v4\",\n apiVersion: \"2006-03-01\",\n accessKeyId: env.MINIO_ACCESS_KEY,\n secretAccessKey: env.MINIO_SECRET_KEY,\n region: env.AWS_REGION,\n }\n if (bucket) {\n config.params = {\n Bucket: sanitizeBucket(bucket),\n }\n }\n\n // custom S3 is in use i.e. minio\n if (env.MINIO_URL) {\n if (opts.presigning && env.MINIO_ENABLED) {\n // IMPORTANT: Signed urls will inspect the host header of the request.\n // Normally a signed url will need to be generated with a specified host in mind.\n // To support dynamic hosts, e.g. some unknown self-hosted installation url,\n // use a predefined host. The host 'minio-service' is also forwarded to minio requests via nginx\n config.endpoint = \"minio-service\"\n } else {\n config.endpoint = env.MINIO_URL\n }\n }\n\n return new AWS.S3(config)\n}\n\n/**\n * Given an object store and a bucket name this will make sure the bucket exists,\n * if it does not exist then it will create it.\n */\nexport async function makeSureBucketExists(client: any, bucketName: string) {\n bucketName = sanitizeBucket(bucketName)\n try {\n await client\n .headBucket({\n Bucket: bucketName,\n })\n .promise()\n } catch (err: any) {\n const promises: any = STATE.bucketCreationPromises\n const doesntExist = err.statusCode === 404,\n noAccess = err.statusCode === 403\n if (promises[bucketName]) {\n await promises[bucketName]\n } else if (doesntExist || noAccess) {\n if (doesntExist) {\n // bucket doesn't exist create it\n promises[bucketName] = client\n .createBucket({\n Bucket: bucketName,\n })\n .promise()\n await promises[bucketName]\n delete promises[bucketName]\n }\n } else {\n throw new Error(\"Unable to write to object store bucket.\")\n }\n }\n}\n\n/**\n * Uploads the contents of a file given the required parameters, useful when\n * temp files in use (for example file uploaded as an attachment).\n */\nexport async function upload({\n bucket: bucketName,\n filename,\n path,\n type,\n metadata,\n}: UploadParams) {\n const extension = filename.split(\".\").pop()\n const fileBytes = fs.readFileSync(path)\n\n const objectStore = ObjectStore(bucketName)\n await makeSureBucketExists(objectStore, bucketName)\n\n let contentType = type\n if (!contentType) {\n contentType = extension\n ? CONTENT_TYPE_MAP[extension.toLowerCase()]\n : CONTENT_TYPE_MAP.txt\n }\n const config: any = {\n // windows file paths need to be converted to forward slashes for s3\n Key: sanitizeKey(filename),\n Body: fileBytes,\n ContentType: contentType,\n }\n if (metadata && typeof metadata === \"object\") {\n // remove any nullish keys from the metadata object, as these may be considered invalid\n for (let key of Object.keys(metadata)) {\n if (!metadata[key] || typeof metadata[key] !== \"string\") {\n delete metadata[key]\n }\n }\n config.Metadata = metadata\n }\n return objectStore.upload(config).promise()\n}\n\n/**\n * Similar to the upload function but can be used to send a file stream\n * through to the object store.\n */\nexport async function streamUpload(\n bucketName: string,\n filename: string,\n stream: any,\n extra = {}\n) {\n const objectStore = ObjectStore(bucketName)\n await makeSureBucketExists(objectStore, bucketName)\n\n // Set content type for certain known extensions\n if (filename?.endsWith(\".js\")) {\n extra = {\n ...extra,\n ContentType: \"application/javascript\",\n }\n } else if (filename?.endsWith(\".svg\")) {\n extra = {\n ...extra,\n ContentType: \"image\",\n }\n }\n\n const params = {\n Bucket: sanitizeBucket(bucketName),\n Key: sanitizeKey(filename),\n Body: stream,\n ...extra,\n }\n return objectStore.upload(params).promise()\n}\n\n/**\n * retrieves the contents of a file from the object store, if it is a known content type it\n * will be converted, otherwise it will be returned as a buffer stream.\n */\nexport async function retrieve(bucketName: string, filepath: string) {\n const objectStore = ObjectStore(bucketName)\n const params = {\n Bucket: sanitizeBucket(bucketName),\n Key: sanitizeKey(filepath),\n }\n const response: any = await objectStore.getObject(params).promise()\n // currently these are all strings\n if (STRING_CONTENT_TYPES.includes(response.ContentType)) {\n return response.Body.toString(\"utf8\")\n } else {\n return response.Body\n }\n}\n\nexport async function listAllObjects(bucketName: string, path: string) {\n const objectStore = ObjectStore(bucketName)\n const list = (params: ListParams = {}) => {\n return objectStore\n .listObjectsV2({\n ...params,\n Bucket: sanitizeBucket(bucketName),\n Prefix: sanitizeKey(path),\n })\n .promise()\n }\n let isTruncated = false,\n token,\n objects: AWS.S3.Types.Object[] = []\n do {\n let params: ListParams = {}\n if (token) {\n params.ContinuationToken = token\n }\n const response = await list(params)\n if (response.Contents) {\n objects = objects.concat(response.Contents)\n }\n isTruncated = !!response.IsTruncated\n } while (isTruncated)\n return objects\n}\n\n/**\n * Generate a presigned url with a default TTL of 1 hour\n */\nexport function getPresignedUrl(\n bucketName: string,\n key: string,\n durationSeconds: number = 3600\n) {\n const objectStore = ObjectStore(bucketName, { presigning: true })\n const params = {\n Bucket: sanitizeBucket(bucketName),\n Key: sanitizeKey(key),\n Expires: durationSeconds,\n }\n const url = objectStore.getSignedUrl(\"getObject\", params)\n\n if (!env.MINIO_ENABLED) {\n // return the full URL to the client\n return url\n } else {\n // return the path only to the client\n // use the presigned url route to ensure the static\n // hostname will be used in the request\n const signedUrl = new URL(url)\n const path = signedUrl.pathname\n const query = signedUrl.search\n return `/files/signed${path}${query}`\n }\n}\n\n/**\n * Same as retrieval function but puts to a temporary file.\n */\nexport async function retrieveToTmp(bucketName: string, filepath: string) {\n bucketName = sanitizeBucket(bucketName)\n filepath = sanitizeKey(filepath)\n const data = await retrieve(bucketName, filepath)\n const outputPath = join(budibaseTempDir(), v4())\n fs.writeFileSync(outputPath, data)\n return outputPath\n}\n\nexport async function retrieveDirectory(bucketName: string, path: string) {\n let writePath = join(budibaseTempDir(), v4())\n fs.mkdirSync(writePath)\n const objects = await listAllObjects(bucketName, path)\n let streams = await Promise.all(\n objects.map(obj => getReadStream(bucketName, obj.Key!))\n )\n let count = 0\n const writePromises: Promise<Error>[] = []\n for (let obj of objects) {\n const filename = obj.Key!\n const stream = streams[count++]\n const possiblePath = filename.split(\"/\")\n const dirs = possiblePath.slice(0, possiblePath.length - 1)\n const possibleDir = join(writePath, ...dirs)\n if (possiblePath.length > 1 && !fs.existsSync(possibleDir)) {\n fs.mkdirSync(possibleDir, { recursive: true })\n }\n const writeStream = fs.createWriteStream(join(writePath, ...possiblePath), {\n mode: 0o644,\n })\n stream.pipe(writeStream)\n writePromises.push(\n new Promise((resolve, reject) => {\n stream.on(\"finish\", resolve)\n stream.on(\"error\", reject)\n writeStream.on(\"error\", reject)\n })\n )\n }\n await Promise.all(writePromises)\n return writePath\n}\n\n/**\n * Delete a single file.\n */\nexport async function deleteFile(bucketName: string, filepath: string) {\n const objectStore = ObjectStore(bucketName)\n await makeSureBucketExists(objectStore, bucketName)\n const params = {\n Bucket: bucketName,\n Key: sanitizeKey(filepath),\n }\n return objectStore.deleteObject(params).promise()\n}\n\nexport async function deleteFiles(bucketName: string, filepaths: string[]) {\n const objectStore = ObjectStore(bucketName)\n await makeSureBucketExists(objectStore, bucketName)\n const params = {\n Bucket: bucketName,\n Delete: {\n Objects: filepaths.map((path: any) => ({ Key: sanitizeKey(path) })),\n },\n }\n return objectStore.deleteObjects(params).promise()\n}\n\n/**\n * Delete a path, including everything within.\n */\nexport async function deleteFolder(\n bucketName: string,\n folder: string\n): Promise<any> {\n bucketName = sanitizeBucket(bucketName)\n folder = sanitizeKey(folder)\n const client = ObjectStore(bucketName)\n const listParams = {\n Bucket: bucketName,\n Prefix: folder,\n }\n\n const existingObjectsResponse = await client.listObjects(listParams).promise()\n if (existingObjectsResponse.Contents?.length === 0) {\n return\n }\n const deleteParams: any = {\n Bucket: bucketName,\n Delete: {\n Objects: [],\n },\n }\n\n existingObjectsResponse.Contents?.forEach((content: any) => {\n deleteParams.Delete.Objects.push({ Key: content.Key })\n })\n\n const deleteResponse = await client.deleteObjects(deleteParams).promise()\n // can only empty 1000 items at once\n if (deleteResponse.Deleted?.length === 1000) {\n return deleteFolder(bucketName, folder)\n }\n}\n\nexport async function uploadDirectory(\n bucketName: string,\n localPath: string,\n bucketPath: string\n) {\n bucketName = sanitizeBucket(bucketName)\n let uploads = []\n const files = fs.readdirSync(localPath, { withFileTypes: true })\n for (let file of files) {\n const path = sanitizeKey(join(bucketPath, file.name))\n const local = join(localPath, file.name)\n if (file.isDirectory()) {\n uploads.push(uploadDirectory(bucketName, local, path))\n } else {\n uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))\n }\n }\n await Promise.all(uploads)\n return files\n}\n\nexport async function downloadTarballDirect(\n url: string,\n path: string,\n headers = {}\n) {\n path = sanitizeKey(path)\n const response = await fetch(url, { headers })\n if (!response.ok) {\n throw new Error(`unexpected response ${response.statusText}`)\n }\n\n await streamPipeline(response.body, zlib.createUnzip(), tar.extract(path))\n}\n\nexport async function downloadTarball(\n url: string,\n bucketName: string,\n path: string\n) {\n bucketName = sanitizeBucket(bucketName)\n path = sanitizeKey(path)\n const response = await fetch(url)\n if (!response.ok) {\n throw new Error(`unexpected response ${response.statusText}`)\n }\n\n const tmpPath = join(budibaseTempDir(), path)\n await streamPipeline(response.body, zlib.createUnzip(), tar.extract(tmpPath))\n if (!env.isTest() && env.SELF_HOSTED) {\n await uploadDirectory(bucketName, tmpPath, path)\n }\n // return the temporary path incase there is a use for it\n return tmpPath\n}\n\nexport async function getReadStream(\n bucketName: string,\n path: string\n): Promise<Readable> {\n bucketName = sanitizeBucket(bucketName)\n path = sanitizeKey(path)\n const client = ObjectStore(bucketName)\n const params = {\n Bucket: bucketName,\n Key: path,\n }\n return client.getObject(params).createReadStream()\n}\n", "import env from \"../environment\"\nimport * as cfsign from \"aws-cloudfront-sign\"\n\nlet PRIVATE_KEY: string | undefined\n\nfunction getPrivateKey() {\n if (!env.CLOUDFRONT_PRIVATE_KEY_64) {\n throw new Error(\"CLOUDFRONT_PRIVATE_KEY_64 is not set\")\n }\n\n if (PRIVATE_KEY) {\n return PRIVATE_KEY\n }\n\n PRIVATE_KEY = Buffer.from(env.CLOUDFRONT_PRIVATE_KEY_64, \"base64\").toString(\n \"utf-8\"\n )\n\n return PRIVATE_KEY\n}\n\nconst getCloudfrontSignParams = () => {\n return {\n keypairId: env.CLOUDFRONT_PUBLIC_KEY_ID!,\n privateKeyString: getPrivateKey(),\n expireTime: new Date().getTime() + 1000 * 60 * 60, // 1 hour\n }\n}\n\nexport const getPresignedUrl = (s3Key: string) => {\n const url = getUrl(s3Key)\n return cfsign.getSignedUrl(url, getCloudfrontSignParams())\n}\n\nexport const getUrl = (s3Key: string) => {\n let prefix = \"/\"\n if (s3Key.startsWith(\"/\")) {\n prefix = \"\"\n }\n return `${env.CLOUDFRONT_CDN}${prefix}${s3Key}`\n}\n", "import env from \"../../environment\"\nimport * as objectStore from \"../objectStore\"\nimport * as cloudfront from \"../cloudfront\"\nimport qs from \"querystring\"\nimport { DEFAULT_TENANT_ID, getTenantId } from \"../../context\"\n\nexport function clientLibraryPath(appId: string) {\n return `${objectStore.sanitizeKey(appId)}/budibase-client.js`\n}\n\n/**\n * Previously we used to serve the client library directly from Cloudfront, however\n * due to issues with the domain we were unable to continue doing this - keeping\n * incase we are able to switch back to CDN path again in future.\n */\nexport function clientLibraryCDNUrl(appId: string, version: string) {\n let file = clientLibraryPath(appId)\n if (env.CLOUDFRONT_CDN) {\n // append app version to bust the cache\n if (version) {\n file += `?v=${version}`\n }\n // don't need to use presigned for client with cloudfront\n // file is public\n return cloudfront.getUrl(file)\n } else {\n return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)\n }\n}\n\nexport function clientLibraryUrl(appId: string, version: string) {\n let tenantId, qsParams: { appId: string; version: string; tenantId?: string }\n try {\n tenantId = getTenantId()\n } finally {\n qsParams = {\n appId,\n version,\n }\n }\n if (tenantId && tenantId !== DEFAULT_TENANT_ID) {\n qsParams.tenantId = tenantId\n }\n return `/api/assets/client?${qs.encode(qsParams)}`\n}\n\nexport function getAppFileUrl(s3Key: string) {\n if (env.CLOUDFRONT_CDN) {\n return cloudfront.getPresignedUrl(s3Key)\n } else {\n return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)\n }\n}\n", "import env from \"../../environment\"\nimport * as context from \"../../context\"\nimport * as objectStore from \"../objectStore\"\nimport * as cloudfront from \"../cloudfront\"\n\n// URLs\n\nexport const getGlobalFileUrl = (type: string, name: string, etag?: string) => {\n let file = getGlobalFileS3Key(type, name)\n if (env.CLOUDFRONT_CDN) {\n if (etag) {\n file = `${file}?etag=${etag}`\n }\n return cloudfront.getPresignedUrl(file)\n } else {\n return objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)\n }\n}\n\n// KEYS\n\nexport const getGlobalFileS3Key = (type: string, name: string) => {\n let file = `${type}/${name}`\n if (env.MULTI_TENANCY) {\n const tenantId = context.getTenantId()\n file = `${tenantId}/${file}`\n }\n return file\n}\n", "import env from \"../../environment\"\nimport * as objectStore from \"../objectStore\"\nimport * as context from \"../../context\"\nimport * as cloudfront from \"../cloudfront\"\nimport { Plugin } from \"@budibase/types\"\n\n// URLS\n\nexport function enrichPluginURLs(plugins: Plugin[]) {\n if (!plugins || !plugins.length) {\n return []\n }\n return plugins.map(plugin => {\n const jsUrl = getPluginJSUrl(plugin)\n const iconUrl = getPluginIconUrl(plugin)\n return { ...plugin, jsUrl, iconUrl }\n })\n}\n\nfunction getPluginJSUrl(plugin: Plugin) {\n const s3Key = getPluginJSKey(plugin)\n return getPluginUrl(s3Key)\n}\n\nfunction getPluginIconUrl(plugin: Plugin): string | undefined {\n const s3Key = getPluginIconKey(plugin)\n if (!s3Key) {\n return\n }\n return getPluginUrl(s3Key)\n}\n\nfunction getPluginUrl(s3Key: string) {\n if (env.CLOUDFRONT_CDN) {\n return cloudfront.getPresignedUrl(s3Key)\n } else {\n return objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)\n }\n}\n\n// S3 KEYS\n\nexport function getPluginJSKey(plugin: Plugin) {\n return getPluginS3Key(plugin, \"plugin.min.js\")\n}\n\nexport function getPluginIconKey(plugin: Plugin) {\n // stored iconUrl is deprecated - hardcode to icon.svg in this case\n const iconFileName = plugin.iconUrl ? \"icon.svg\" : plugin.iconFileName\n if (!iconFileName) {\n return\n }\n return getPluginS3Key(plugin, iconFileName)\n}\n\nfunction getPluginS3Key(plugin: Plugin, fileName: string) {\n const s3Key = getPluginS3Dir(plugin.name)\n return `${s3Key}/${fileName}`\n}\n\nexport function getPluginS3Dir(pluginName: string) {\n let s3Key = `${pluginName}`\n if (env.MULTI_TENANCY) {\n const tenantId = context.getTenantId()\n s3Key = `${tenantId}/${s3Key}`\n }\n if (env.CLOUDFRONT_CDN) {\n s3Key = `plugins/${s3Key}`\n }\n return s3Key\n}\n", "export * from \"./app\"\nexport * from \"./global\"\nexport * from \"./plugins\"\n", "export * from \"./objectStore\"\nexport * from \"./utils\"\nexport * from \"./buckets\"\n", "import fs from \"fs\"\nimport path from \"path\"\nimport * as rfs from \"rotating-file-stream\"\n\nimport env from \"../environment\"\nimport { budibaseTempDir } from \"../objectStore\"\n\nconst logsFileName = `budibase.log`\nconst budibaseLogsHistoryFileName = \"budibase-logs-history.txt\"\n\nconst logsPath = path.join(budibaseTempDir(), \"systemlogs\")\n\nfunction getFullPath(fileName: string) {\n return path.join(logsPath, fileName)\n}\n\nexport function getSingleFileMaxSizeInfo(totalMaxSize: string) {\n const regex = /(\\d+)([A-Za-z])/\n const match = totalMaxSize?.match(regex)\n if (!match) {\n console.warn(`totalMaxSize does not have a valid value`, {\n totalMaxSize,\n })\n return undefined\n }\n\n const size = +match[1]\n const unit = match[2]\n if (size === 1) {\n switch (unit) {\n case \"B\":\n return { size: `${size}B`, totalHistoryFiles: 1 }\n case \"K\":\n return { size: `${(size * 1000) / 2}B`, totalHistoryFiles: 1 }\n case \"M\":\n return { size: `${(size * 1000) / 2}K`, totalHistoryFiles: 1 }\n case \"G\":\n return { size: `${(size * 1000) / 2}M`, totalHistoryFiles: 1 }\n default:\n return undefined\n }\n }\n\n if (size % 2 === 0) {\n return { size: `${size / 2}${unit}`, totalHistoryFiles: 1 }\n }\n\n return { size: `1${unit}`, totalHistoryFiles: size - 1 }\n}\n\nexport function localFileDestination() {\n const fileInfo = getSingleFileMaxSizeInfo(env.ROLLING_LOG_MAX_SIZE)\n const outFile = rfs.createStream(logsFileName, {\n // As we have a rolling size, we want to half the max size\n size: fileInfo?.size,\n path: logsPath,\n maxFiles: fileInfo?.totalHistoryFiles || 1,\n immutable: true,\n history: budibaseLogsHistoryFileName,\n initialRotation: false,\n })\n\n return outFile\n}\n\nexport function getLogReadStream() {\n const streams = []\n const historyFile = getFullPath(budibaseLogsHistoryFileName)\n if (fs.existsSync(historyFile)) {\n const fileContent = fs.readFileSync(historyFile, \"utf-8\")\n const historyFiles = fileContent.split(\"\\n\")\n for (const historyFile of historyFiles.filter(x => x)) {\n streams.push(fs.readFileSync(historyFile))\n }\n }\n\n streams.push(fs.readFileSync(getFullPath(logsFileName)))\n\n const combinedContent = Buffer.concat(streams)\n return combinedContent\n}\n", "import pino, { LoggerOptions } from \"pino\"\nimport pinoPretty from \"pino-pretty\"\n\nimport { IdentityType } from \"@budibase/types\"\nimport env from \"../../environment\"\nimport * as context from \"../../context\"\nimport * as correlation from \"../correlation\"\nimport tracer from \"dd-trace\"\nimport { formats } from \"dd-trace/ext\"\n\nimport { localFileDestination } from \"../system\"\n\n// LOGGER\n\nlet pinoInstance: pino.Logger | undefined\nif (!env.DISABLE_PINO_LOGGER) {\n const level = env.LOG_LEVEL\n const pinoOptions: LoggerOptions = {\n level,\n formatters: {\n level: level => {\n return { level: level.toUpperCase() }\n },\n bindings: () => {\n if (env.SELF_HOSTED) {\n // \"service\" is being injected in datadog using the pod names,\n // so we should leave it blank to allow the default behaviour if it's not running self-hosted\n return {\n service: env.SERVICE_NAME,\n }\n } else {\n return {}\n }\n },\n },\n timestamp: () => `,\"timestamp\":\"${new Date(Date.now()).toISOString()}\"`,\n }\n\n const destinations: pino.StreamEntry[] = []\n\n destinations.push(\n env.isDev()\n ? {\n stream: pinoPretty({ singleLine: true }),\n level: level as pino.Level,\n }\n : { stream: process.stdout, level: level as pino.Level }\n )\n\n if (env.SELF_HOSTED) {\n destinations.push({\n stream: localFileDestination(),\n level: level as pino.Level,\n })\n }\n\n pinoInstance = destinations.length\n ? pino(pinoOptions, pino.multistream(destinations))\n : pino(pinoOptions)\n\n // CONSOLE OVERRIDES\n\n interface MergingObject {\n objects?: any[]\n tenantId?: string\n appId?: string\n automationId?: string\n identityId?: string\n identityType?: IdentityType\n correlationId?: string\n err?: Error\n }\n\n function isPlainObject(obj: any) {\n return typeof obj === \"object\" && obj !== null && !(obj instanceof Error)\n }\n\n function isError(obj: any) {\n return obj instanceof Error\n }\n\n function isMessage(obj: any) {\n return typeof obj === \"string\"\n }\n\n /**\n * Backwards compatibility between console logging statements\n * and pino logging requirements.\n */\n function getLogParams(args: any[]): [MergingObject, string] {\n let error = undefined\n let objects: any[] = []\n let message = \"\"\n\n args.forEach(arg => {\n if (isMessage(arg)) {\n message = `${message} ${arg}`.trimStart()\n }\n if (isPlainObject(arg)) {\n objects.push(arg)\n }\n if (isError(arg)) {\n error = arg\n }\n })\n\n const identity = getIdentity()\n\n let contextObject = {}\n\n contextObject = {\n tenantId: getTenantId(),\n appId: getAppId(),\n automationId: getAutomationId(),\n identityId: identity?._id,\n identityType: identity?.type,\n correlationId: correlation.getId(),\n }\n\n const span = tracer.scope().active()\n if (span) {\n tracer.inject(span.context(), formats.LOG, contextObject)\n }\n\n const mergingObject: any = {\n err: error,\n pid: process.pid,\n ...contextObject,\n }\n\n if (objects.length) {\n // init generic data object for params supplied that don't have a\n // '_logKey' field. This prints an object using argument index as the key\n // e.g. { 0: {}, 1: {} }\n const data: any = {}\n let dataIndex = 0\n\n for (let i = 0; i < objects.length; i++) {\n const object = objects[i]\n // the object has specified a log key\n // use this instead of generic key\n const logKey = object._logKey\n if (logKey) {\n delete object._logKey\n mergingObject[logKey] = object\n } else {\n data[dataIndex] = object\n dataIndex++\n }\n }\n\n if (Object.keys(data).length) {\n mergingObject.data = data\n }\n }\n\n return [mergingObject, message]\n }\n\n console.log = (...arg: any[]) => {\n const [obj, msg] = getLogParams(arg)\n pinoInstance?.info(obj, msg)\n }\n console.info = (...arg: any[]) => {\n const [obj, msg] = getLogParams(arg)\n pinoInstance?.info(obj, msg)\n }\n console.warn = (...arg: any[]) => {\n const [obj, msg] = getLogParams(arg)\n pinoInstance?.warn(obj, msg)\n }\n console.error = (...arg: any[]) => {\n const [obj, msg] = getLogParams(arg)\n pinoInstance?.error(obj, msg)\n }\n\n /**\n * custom trace impl - this resembles the node trace behaviour rather\n * than traditional trace logging\n * @param arg\n */\n console.trace = (...arg: any[]) => {\n const [obj, msg] = getLogParams(arg)\n if (!obj.err) {\n // to get stack trace\n obj.err = new Error()\n }\n pinoInstance?.trace(obj, msg)\n }\n\n console.debug = (...arg: any) => {\n const [obj, msg] = getLogParams(arg)\n pinoInstance?.debug(obj, msg)\n }\n\n // CONTEXT\n\n const getTenantId = () => {\n let tenantId\n try {\n tenantId = context.getTenantId()\n } catch (e: any) {\n // do nothing\n }\n return tenantId\n }\n\n const getAppId = () => {\n let appId\n try {\n appId = context.getAppId()\n } catch (e) {\n // do nothing\n }\n return appId\n }\n\n const getAutomationId = () => {\n let appId\n try {\n appId = context.getAutomationId()\n } catch (e) {\n // do nothing\n }\n return appId\n }\n\n const getIdentity = () => {\n let identity\n try {\n identity = context.getIdentity()\n } catch (e) {\n // do nothing\n }\n return identity\n }\n}\n\nexport const logger = pinoInstance\n", "const NonErrors = [\"AccountError\"]\n\nfunction isSuppressed(e?: any) {\n return e && e[\"suppressAlert\"]\n}\n\nexport function logAlert(message: string, e?: any) {\n if (e && NonErrors.includes(e.name) && isSuppressed(e)) {\n return\n }\n console.error(`bb-alert: ${message}`, e)\n}\n\nexport function logAlertWithInfo(\n message: string,\n db: string,\n id: string,\n error: any\n) {\n message = `${message} - db: ${db} - doc: ${id} - error: `\n logAlert(message, error)\n}\n\nexport function logWarn(message: string, e?: any) {\n console.warn(`bb-warn: ${message}`, e)\n}\n", "export * as correlation from \"./correlation/correlation\"\nexport { logger } from \"./pino/logger\"\nexport * from \"./alerts\"\nexport * as system from \"./system\"\n", "let intervals: NodeJS.Timeout[] = []\n\nexport function set(callback: () => any, period: number) {\n const interval = setInterval(callback, period)\n intervals.push(interval)\n return interval\n}\n\nexport function clear(interval: NodeJS.Timeout) {\n const idx = intervals.indexOf(interval)\n if (idx !== -1) {\n intervals.splice(idx, 1)\n }\n clearInterval(interval)\n}\n\nexport function cleanup() {\n for (let interval of intervals) {\n clearInterval(interval)\n }\n intervals = []\n}\n\nexport class ExecutionTimeoutError extends Error {\n public readonly name = \"ExecutionTimeoutError\"\n}\n\nexport class ExecutionTimeTracker {\n static withLimit(limitMs: number) {\n return new ExecutionTimeTracker(limitMs)\n }\n\n constructor(readonly limitMs: number) {}\n\n private totalTimeMs = 0\n\n track<T>(f: () => T): T {\n this.checkLimit()\n const start = process.hrtime.bigint()\n try {\n return f()\n } finally {\n const end = process.hrtime.bigint()\n this.totalTimeMs += Number(end - start) / 1e6\n this.checkLimit()\n }\n }\n\n get elapsedMS() {\n return this.totalTimeMs\n }\n\n checkLimit() {\n if (this.totalTimeMs > this.limitMs) {\n throw new ExecutionTimeoutError(\n `Execution time limit of ${this.limitMs}ms exceeded: ${this.totalTimeMs}ms`\n )\n }\n }\n}\n", "export * from \"./timers\"\n", "import env from \"../environment\"\nimport Redis from \"ioredis\"\n// mock-redis doesn't have any typing\nlet MockRedis: any | undefined\nif (env.MOCK_REDIS) {\n try {\n // ioredis mock is all in memory\n MockRedis = require(\"ioredis-mock\")\n } catch (err) {\n console.log(\"Mock redis unavailable\")\n }\n}\nimport {\n addDbPrefix,\n removeDbPrefix,\n getRedisOptions,\n SEPARATOR,\n SelectableDatabase,\n getRedisConnectionDetails,\n} from \"./utils\"\nimport { logAlert } from \"../logging\"\nimport * as timers from \"../timers\"\n\nconst RETRY_PERIOD_MS = 2000\nconst STARTUP_TIMEOUT_MS = 5000\nconst CLUSTERED = env.REDIS_CLUSTERED\nconst DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT\n\n// for testing just generate the client once\nlet CLOSED = false\nlet CLIENTS: { [key: number]: any } = {}\nlet CONNECTED = false\n\n// mock redis always connected\nif (env.MOCK_REDIS) {\n CONNECTED = true\n}\n\nfunction pickClient(selectDb: number): any {\n return CLIENTS[selectDb]\n}\n\nfunction connectionError(timeout: NodeJS.Timeout, err: Error | string) {\n // manually shut down, ignore errors\n if (CLOSED) {\n return\n }\n CLOSED = true\n // always clear this on error\n clearTimeout(timeout)\n CONNECTED = false\n logAlert(\"Redis connection failed\", err)\n setTimeout(() => {\n init()\n }, RETRY_PERIOD_MS)\n}\n\n/**\n * Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise\n * will return the ioredis client which will be ready to use.\n */\nfunction init(selectDb = DEFAULT_SELECT_DB) {\n const RedisCore = env.MOCK_REDIS && MockRedis ? MockRedis : Redis\n let timeout: NodeJS.Timeout\n CLOSED = false\n let client = pickClient(selectDb)\n // already connected, ignore\n if (client && CONNECTED) {\n return\n }\n // testing uses a single in memory client\n if (env.MOCK_REDIS) {\n CLIENTS[selectDb] = new RedisCore(getRedisOptions())\n }\n // start the timer - only allowed 5 seconds to connect\n timeout = setTimeout(() => {\n if (!CONNECTED) {\n connectionError(timeout, \"Did not successfully connect in timeout\")\n }\n }, STARTUP_TIMEOUT_MS)\n\n // disconnect any lingering client\n if (client) {\n client.disconnect()\n }\n const { host, port } = getRedisConnectionDetails()\n const opts = getRedisOptions()\n\n if (CLUSTERED) {\n client = new RedisCore.Cluster([{ host, port }], opts)\n } else {\n client = new RedisCore(opts)\n }\n // attach handlers\n client.on(\"end\", (err: Error) => {\n if (env.isTest()) {\n // don't try to re-connect in test env\n // allow the process to exit\n return\n }\n connectionError(timeout, err)\n })\n client.on(\"error\", (err: Error) => {\n connectionError(timeout, err)\n })\n client.on(\"connect\", () => {\n console.log(`Connected to Redis DB: ${selectDb}`)\n clearTimeout(timeout)\n CONNECTED = true\n })\n CLIENTS[selectDb] = client\n}\n\nfunction waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {\n return new Promise(resolve => {\n if (pickClient(selectDb) == null) {\n init()\n } else if (CONNECTED) {\n resolve(\"\")\n return\n }\n // check if the connection is ready\n const interval = timers.set(() => {\n if (CONNECTED) {\n timers.clear(interval)\n resolve(\"\")\n }\n }, 500)\n })\n}\n\n/**\n * Utility function, takes a redis stream and converts it to a promisified response -\n * this can only be done with redis streams because they will have an end.\n * @param stream A redis stream, specifically as this type of stream will have an end.\n * @param client The client to use for further lookups.\n * @return The final output of the stream\n */\nfunction promisifyStream(stream: any, client: RedisWrapper) {\n return new Promise((resolve, reject) => {\n const outputKeys = new Set()\n stream.on(\"data\", (keys: string[]) => {\n keys.forEach(key => {\n outputKeys.add(key)\n })\n })\n stream.on(\"error\", (err: Error) => {\n reject(err)\n })\n stream.on(\"end\", async () => {\n const keysArray: string[] = Array.from(outputKeys) as string[]\n try {\n let getPromises = []\n for (let key of keysArray) {\n getPromises.push(client.get(key))\n }\n const jsonArray = await Promise.all(getPromises)\n resolve(\n keysArray.map(key => ({\n key: removeDbPrefix(key),\n value: JSON.parse(jsonArray.shift()),\n }))\n )\n } catch (err) {\n reject(err)\n }\n })\n })\n}\n\nclass RedisWrapper {\n _db: string\n _select: number\n\n constructor(db: string, selectDb: number | null = null) {\n this._db = db\n this._select = selectDb || DEFAULT_SELECT_DB\n }\n\n getClient() {\n return pickClient(this._select)\n }\n\n async init() {\n CLOSED = false\n init(this._select)\n await waitForConnection(this._select)\n if (this._select && !env.isTest()) {\n this.getClient().select(this._select)\n }\n return this\n }\n\n async finish() {\n CLOSED = true\n this.getClient().disconnect()\n }\n\n async scan(key = \"\"): Promise<any> {\n const db = this._db\n key = `${db}${SEPARATOR}${key}`\n let stream\n if (CLUSTERED) {\n let node = this.getClient().nodes(\"master\")\n stream = node[0].scanStream({ match: key + \"*\", count: 100 })\n } else {\n stream = this.getClient().scanStream({ match: key + \"*\", count: 100 })\n }\n return promisifyStream(stream, this.getClient())\n }\n\n async keys(pattern: string) {\n const db = this._db\n return this.getClient().keys(addDbPrefix(db, pattern))\n }\n\n async exists(key: string) {\n const db = this._db\n return await this.getClient().exists(addDbPrefix(db, key))\n }\n\n async get(key: string) {\n const db = this._db\n let response = await this.getClient().get(addDbPrefix(db, key))\n // overwrite the prefixed key\n if (response != null && response.key) {\n response.key = key\n }\n // if its not an object just return the response\n try {\n return JSON.parse(response)\n } catch (err) {\n return response\n }\n }\n\n async bulkGet<T>(keys: string[]) {\n const db = this._db\n if (keys.length === 0) {\n return {}\n }\n const prefixedKeys = keys.map(key => addDbPrefix(db, key))\n let response = await this.getClient().mget(prefixedKeys)\n if (Array.isArray(response)) {\n let final: Record<string, T> = {}\n let count = 0\n for (let result of response) {\n if (result) {\n let parsed\n try {\n parsed = JSON.parse(result)\n } catch (err) {\n parsed = result\n }\n final[keys[count]] = parsed\n }\n count++\n }\n return final\n } else {\n throw new Error(`Invalid response: ${response}`)\n }\n }\n\n async store(key: string, value: any, expirySeconds: number | null = null) {\n const db = this._db\n if (typeof value === \"object\") {\n value = JSON.stringify(value)\n }\n const prefixedKey = addDbPrefix(db, key)\n await this.getClient().set(prefixedKey, value)\n if (expirySeconds) {\n await this.getClient().expire(prefixedKey, expirySeconds)\n }\n }\n\n async getTTL(key: string) {\n const db = this._db\n const prefixedKey = addDbPrefix(db, key)\n return this.getClient().ttl(prefixedKey)\n }\n\n async setExpiry(key: string, expirySeconds: number | null) {\n const db = this._db\n const prefixedKey = addDbPrefix(db, key)\n await this.getClient().expire(prefixedKey, expirySeconds)\n }\n\n async delete(key: string) {\n const db = this._db\n await this.getClient().del(addDbPrefix(db, key))\n }\n\n async clear() {\n let items = await this.scan()\n await Promise.all(items.map((obj: any) => this.delete(obj.key)))\n }\n}\n\nexport default RedisWrapper\n", "import Client from \"./redis\"\nimport * as utils from \"./utils\"\n\nlet userClient: Client,\n sessionClient: Client,\n appClient: Client,\n cacheClient: Client,\n writethroughClient: Client,\n lockClient: Client,\n socketClient: Client,\n inviteClient: Client,\n passwordResetClient: Client\n\nexport async function init() {\n userClient = await new Client(utils.Databases.USER_CACHE).init()\n sessionClient = await new Client(utils.Databases.SESSIONS).init()\n appClient = await new Client(utils.Databases.APP_METADATA).init()\n cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()\n lockClient = await new Client(utils.Databases.LOCKS).init()\n writethroughClient = await new Client(utils.Databases.WRITE_THROUGH).init()\n inviteClient = await new Client(utils.Databases.INVITATIONS).init()\n passwordResetClient = await new Client(utils.Databases.PW_RESETS).init()\n socketClient = await new Client(\n utils.Databases.SOCKET_IO,\n utils.SelectableDatabase.SOCKET_IO\n ).init()\n}\n\nexport async function shutdown() {\n if (userClient) await userClient.finish()\n if (sessionClient) await sessionClient.finish()\n if (appClient) await appClient.finish()\n if (cacheClient) await cacheClient.finish()\n if (writethroughClient) await writethroughClient.finish()\n if (lockClient) await lockClient.finish()\n if (inviteClient) await inviteClient.finish()\n if (passwordResetClient) await passwordResetClient.finish()\n if (socketClient) await socketClient.finish()\n}\n\nprocess.on(\"exit\", async () => {\n await shutdown()\n})\n\nexport async function getUserClient() {\n if (!userClient) {\n await init()\n }\n return userClient\n}\n\nexport async function getSessionClient() {\n if (!sessionClient) {\n await init()\n }\n return sessionClient\n}\n\nexport async function getAppClient() {\n if (!appClient) {\n await init()\n }\n return appClient\n}\n\nexport async function getCacheClient() {\n if (!cacheClient) {\n await init()\n }\n return cacheClient\n}\n\nexport async function getWritethroughClient() {\n if (!writethroughClient) {\n await init()\n }\n return writethroughClient\n}\n\nexport async function getLockClient() {\n if (!lockClient) {\n await init()\n }\n return lockClient\n}\n\nexport async function getSocketClient() {\n if (!socketClient) {\n await init()\n }\n return socketClient\n}\n\nexport async function getInviteClient() {\n if (!inviteClient) {\n await init()\n }\n return inviteClient\n}\n\nexport async function getPasswordResetClient() {\n if (!passwordResetClient) {\n await init()\n }\n return passwordResetClient\n}\n", "export * as configs from \"./configs\"\nexport * as events from \"./events\"\nexport * as migrations from \"./migrations\"\nexport * as users from \"./users\"\nexport * as roles from \"./security/roles\"\nexport * as permissions from \"./security/permissions\"\nexport * as accounts from \"./accounts\"\nexport * as installation from \"./installation\"\nexport * as featureFlags from \"./features\"\nexport * as features from \"./features/installation\"\nexport * as sessions from \"./security/sessions\"\nexport * as platform from \"./platform\"\nexport * as auth from \"./auth\"\nexport * as constants from \"./constants\"\nexport * as logging from \"./logging\"\nexport * as middleware from \"./middleware\"\nexport * as plugins from \"./plugin\"\nexport * as encryption from \"./security/encryption\"\nexport * as queue from \"./queue\"\nexport * as db from \"./db\"\nexport * as context from \"./context\"\nexport * as cache from \"./cache\"\nexport * as objectStore from \"./objectStore\"\nexport * as redis from \"./redis\"\nexport { Client as RedisClient } from \"./redis\"\nexport * as locks from \"./redis/redlockImpl\"\nexport * as utils from \"./utils\"\nexport * as errors from \"./errors\"\nexport * as timers from \"./timers\"\nexport { default as env } from \"./environment\"\nexport * as blacklist from \"./blacklist\"\nexport * as docUpdates from \"./docUpdates\"\nexport * from \"./utils/Duration\"\nexport { SearchParams } from \"./db\"\nexport * as docIds from \"./docIds\"\nexport * as security from \"./security\"\n// Add context to tenancy for backwards compatibility\n// only do this for external usages to prevent internal\n// circular dependencies\nimport * as context from \"./context\"\nimport * as _tenancy from \"./tenancy\"\n\nexport const tenancy = {\n ..._tenancy,\n ...context,\n}\n\n// expose error classes directly\nexport * from \"./errors\"\n\n// expose constants directly\nexport * from \"./constants\"\n\n// expose package init function\nimport * as db from \"./db\"\n\nexport const init = (opts: any = {}) => {\n db.init(opts.db)\n}\n", "export * from \"./configs\"\n", "import {\n Config,\n ConfigType,\n GoogleConfig,\n GoogleInnerConfig,\n OIDCConfig,\n OIDCInnerConfig,\n OIDCLogosConfig,\n SCIMConfig,\n SCIMInnerConfig,\n SettingsConfig,\n SettingsInnerConfig,\n SMTPConfig,\n SMTPInnerConfig,\n} from \"@budibase/types\"\nimport { DocumentType, SEPARATOR } from \"../constants\"\nimport { CacheKey, TTL, withCache } from \"../cache\"\nimport * as context from \"../context\"\nimport env from \"../environment\"\n\n// UTILS\n\n/**\n * Generates a new configuration ID.\n * @returns The new configuration ID which the config doc can be stored under.\n */\nexport function generateConfigID(type: ConfigType) {\n return `${DocumentType.CONFIG}${SEPARATOR}${type}`\n}\n\nexport async function getConfig<T extends Config>(\n type: ConfigType\n): Promise<T | undefined> {\n const db = context.getGlobalDB()\n try {\n // await to catch error\n return (await db.get(generateConfigID(type))) as T\n } catch (e: any) {\n if (e.status === 404) {\n return\n }\n throw e\n }\n}\n\nexport async function save(\n config: Config\n): Promise<{ id: string; rev: string }> {\n const db = context.getGlobalDB()\n return db.put(config)\n}\n\n// SETTINGS\n\nexport async function getSettingsConfigDoc(): Promise<SettingsConfig> {\n let config = await getConfig<SettingsConfig>(ConfigType.SETTINGS)\n\n if (!config) {\n config = {\n _id: generateConfigID(ConfigType.SETTINGS),\n type: ConfigType.SETTINGS,\n config: {},\n }\n }\n\n // overridden fields\n config.config.platformUrl = await getPlatformUrl({\n tenantAware: true,\n config: config.config,\n })\n config.config.analyticsEnabled = await analyticsEnabled({\n config: config.config,\n })\n\n return config\n}\n\nexport async function getSettingsConfig(): Promise<SettingsInnerConfig> {\n return (await getSettingsConfigDoc()).config\n}\n\nexport async function getPlatformUrl(\n opts: { tenantAware: boolean; config?: SettingsInnerConfig } = {\n tenantAware: true,\n }\n) {\n let platformUrl = env.PLATFORM_URL || \"http://localhost:10000\"\n\n if (!env.SELF_HOSTED && env.MULTI_TENANCY && opts.tenantAware) {\n // cloud and multi tenant - add the tenant to the default platform url\n const tenantId = context.getTenantId()\n if (!platformUrl.includes(\"localhost:\")) {\n platformUrl = platformUrl.replace(\"://\", `://${tenantId}.`)\n }\n } else if (env.SELF_HOSTED) {\n const config = opts?.config\n ? opts.config\n : // direct to db to prevent infinite loop\n (await getConfig<SettingsConfig>(ConfigType.SETTINGS))?.config\n if (config?.platformUrl) {\n platformUrl = config.platformUrl\n }\n }\n\n return platformUrl\n}\n\nexport const analyticsEnabled = async (opts?: {\n config?: SettingsInnerConfig\n}) => {\n // cloud - always use the environment variable\n if (!env.SELF_HOSTED) {\n return !!env.ENABLE_ANALYTICS\n }\n\n // self host - prefer the settings doc\n // use cache as events have high throughput\n const enabledInDB = await withCache(\n CacheKey.ANALYTICS_ENABLED,\n TTL.ONE_DAY,\n async () => {\n const config = opts?.config\n ? opts.config\n : // direct to db to prevent infinite loop\n (await getConfig<SettingsConfig>(ConfigType.SETTINGS))?.config\n\n // need to do explicit checks in case the field is not set\n if (config?.analyticsEnabled === false) {\n return false\n } else if (config?.analyticsEnabled === true) {\n return true\n }\n }\n )\n\n if (enabledInDB !== undefined) {\n return enabledInDB\n }\n\n // fallback to the environment variable\n // explicitly check for 0 or false here, undefined or otherwise is treated as true\n const envEnabled: any = env.ENABLE_ANALYTICS\n if (envEnabled === 0 || envEnabled === false) {\n return false\n } else {\n return true\n }\n}\n\n// GOOGLE\n\nasync function getGoogleConfigDoc(): Promise<GoogleConfig | undefined> {\n return await getConfig<GoogleConfig>(ConfigType.GOOGLE)\n}\n\nexport async function getGoogleConfig(): Promise<\n GoogleInnerConfig | undefined\n> {\n const config = await getGoogleConfigDoc()\n return config?.config\n}\n\nexport async function getGoogleDatasourceConfig(): Promise<\n GoogleInnerConfig | undefined\n> {\n if (!env.SELF_HOSTED) {\n // always use the env vars in cloud\n return getDefaultGoogleConfig()\n }\n\n // prefer the config in self-host\n let config = await getGoogleConfig()\n\n // fallback to env vars\n if (!config || !config.activated) {\n config = getDefaultGoogleConfig()\n }\n\n return config\n}\n\nexport function getDefaultGoogleConfig(): GoogleInnerConfig | undefined {\n if (env.GOOGLE_CLIENT_ID && env.GOOGLE_CLIENT_SECRET) {\n return {\n clientID: env.GOOGLE_CLIENT_ID!,\n clientSecret: env.GOOGLE_CLIENT_SECRET!,\n activated: true,\n }\n }\n}\n\n// OIDC\n\nexport async function getOIDCLogosDoc(): Promise<OIDCLogosConfig | undefined> {\n return getConfig<OIDCLogosConfig>(ConfigType.OIDC_LOGOS)\n}\n\nasync function getOIDCConfigDoc(): Promise<OIDCConfig | undefined> {\n return getConfig<OIDCConfig>(ConfigType.OIDC)\n}\n\nexport async function getOIDCConfig(): Promise<OIDCInnerConfig | undefined> {\n const config = (await getOIDCConfigDoc())?.config\n // default to the 0th config\n return config?.configs && config.configs[0]\n}\n\n/**\n * @param configId The config id of the inner config to retrieve\n */\nexport async function getOIDCConfigById(\n configId: string\n): Promise<OIDCInnerConfig | undefined> {\n const config = (await getConfig<OIDCConfig>(ConfigType.OIDC))?.config\n return config && config.configs.filter((c: any) => c.uuid === configId)[0]\n}\n\n// SMTP\n\nexport async function getSMTPConfigDoc(): Promise<SMTPConfig | undefined> {\n return getConfig<SMTPConfig>(ConfigType.SMTP)\n}\n\nexport async function getSMTPConfig(\n isAutomation?: boolean\n): Promise<SMTPInnerConfig | undefined> {\n const config = await getSMTPConfigDoc()\n if (config) {\n return config.config\n }\n\n // always allow fallback in self host\n // in cloud don't allow for automations\n const allowFallback = env.SELF_HOSTED || !isAutomation\n\n // Use an SMTP fallback configuration from env variables\n if (env.SMTP_FALLBACK_ENABLED && allowFallback) {\n return {\n port: env.SMTP_PORT,\n host: env.SMTP_HOST!,\n secure: false,\n from: env.SMTP_FROM_ADDRESS!,\n auth: {\n user: env.SMTP_USER!,\n pass: env.SMTP_PASSWORD!,\n },\n }\n }\n}\n\n// SCIM\n\nexport async function getSCIMConfig(): Promise<SCIMInnerConfig | undefined> {\n const config = await getConfig<SCIMConfig>(ConfigType.SCIM)\n return config?.config\n}\n", "export * as generic from \"./generic\"\nexport * as user from \"./user\"\nexport * as app from \"./appMetadata\"\nexport * as writethrough from \"./writethrough\"\nexport * as invite from \"./invite\"\nexport * as passwordReset from \"./passwordReset\"\nexport * from \"./generic\"\n", "import BaseCache from \"./base\"\n\nconst GENERIC = new BaseCache()\n\nexport enum CacheKey {\n CHECKLIST = \"checklist\",\n INSTALLATION = \"installation\",\n ANALYTICS_ENABLED = \"analyticsEnabled\",\n UNIQUE_TENANT_ID = \"uniqueTenantId\",\n EVENTS = \"events\",\n BACKFILL_METADATA = \"backfillMetadata\",\n EVENTS_RATE_LIMIT = \"eventsRateLimit\",\n}\n\nexport enum TTL {\n ONE_MINUTE = 600,\n ONE_HOUR = 3600,\n ONE_DAY = 86400,\n}\n\nexport const keys = (...args: Parameters<typeof GENERIC.keys>) =>\n GENERIC.keys(...args)\nexport const get = (...args: Parameters<typeof GENERIC.get>) =>\n GENERIC.get(...args)\nexport const store = (...args: Parameters<typeof GENERIC.store>) =>\n GENERIC.store(...args)\nexport const destroy = (...args: Parameters<typeof GENERIC.delete>) =>\n GENERIC.delete(...args)\nexport const withCache = (...args: Parameters<typeof GENERIC.withCache>) =>\n GENERIC.withCache(...args)\nexport const bustCache = (...args: Parameters<typeof GENERIC.bustCache>) =>\n GENERIC.bustCache(...args)\n", "import { getTenantId } from \"../../context\"\nimport * as redis from \"../../redis/init\"\nimport { Client } from \"../../redis\"\n\nfunction generateTenantKey(key: string) {\n const tenantId = getTenantId()\n return `${key}:${tenantId}`\n}\n\nexport default class BaseCache {\n client: Client | undefined\n\n constructor(client: Client | undefined = undefined) {\n this.client = client\n }\n\n async getClient() {\n return !this.client ? await redis.getCacheClient() : this.client\n }\n\n async keys(pattern: string) {\n const client = await this.getClient()\n return client.keys(pattern)\n }\n\n /**\n * Read only from the cache.\n */\n async get(key: string, opts = { useTenancy: true }) {\n key = opts.useTenancy ? generateTenantKey(key) : key\n const client = await this.getClient()\n return client.get(key)\n }\n\n /**\n * Write to the cache.\n */\n async store(\n key: string,\n value: any,\n ttl: number | null = null,\n opts = { useTenancy: true }\n ) {\n key = opts.useTenancy ? generateTenantKey(key) : key\n const client = await this.getClient()\n await client.store(key, value, ttl)\n }\n\n /**\n * Remove from cache.\n */\n async delete(key: string, opts = { useTenancy: true }) {\n key = opts.useTenancy ? generateTenantKey(key) : key\n const client = await this.getClient()\n return client.delete(key)\n }\n\n /**\n * Read from the cache. Write to the cache if not exists.\n */\n async withCache(\n key: string,\n ttl: number,\n fetchFn: any,\n opts = { useTenancy: true }\n ) {\n const cachedValue = await this.get(key, opts)\n if (cachedValue) {\n return cachedValue\n }\n\n try {\n const fetchedValue = await fetchFn()\n\n await this.store(key, fetchedValue, ttl, opts)\n return fetchedValue\n } catch (err) {\n console.error(\"Error fetching before cache - \", err)\n throw err\n }\n }\n\n async bustCache(key: string, opts = { client: null }) {\n const client = await this.getClient()\n try {\n await client.delete(generateTenantKey(key))\n } catch (err) {\n console.error(\"Error busting cache - \", err)\n throw err\n }\n }\n}\n", "import * as redis from \"../redis/init\"\nimport * as tenancy from \"../tenancy\"\nimport * as context from \"../context\"\nimport * as platform from \"../platform\"\nimport env from \"../environment\"\nimport * as accounts from \"../accounts\"\nimport { UserDB } from \"../users\"\nimport { sdk } from \"@budibase/shared-core\"\nimport { User } from \"@budibase/types\"\n\nconst EXPIRY_SECONDS = 3600\n\n/**\n * The default populate user function\n */\nasync function populateFromDB(userId: string, tenantId: string) {\n const db = tenancy.getTenantDB(tenantId)\n const user = await db.get<any>(userId)\n user.budibaseAccess = true\n if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {\n const account = await accounts.getAccount(user.email)\n if (account) {\n user.account = account\n user.accountPortalAccess = true\n }\n }\n\n return user\n}\n\nasync function populateUsersFromDB(\n userIds: string[]\n): Promise<{ users: User[]; notFoundIds?: string[] }> {\n const getUsersResponse = await UserDB.bulkGet(userIds)\n\n // Handle missed user ids\n const notFoundIds = userIds.filter((uid, i) => !getUsersResponse[i])\n\n const users = getUsersResponse.filter(x => x)\n\n await Promise.all(\n users.map(async (user: any) => {\n user.budibaseAccess = true\n if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {\n const account = await accounts.getAccount(user.email)\n if (account) {\n user.account = account\n user.accountPortalAccess = true\n }\n }\n })\n )\n\n if (notFoundIds.length) {\n return { users, notFoundIds }\n }\n return { users }\n}\n\n/**\n * Get the requested user by id.\n * Use redis cache to first read the user.\n * If not present fallback to loading the user directly and re-caching.\n * @param userId the id of the user to get\n * @param tenantId the tenant of the user to get\n * @param populateUser function to provide the user for re-caching. default to couch db\n * @returns\n */\nexport async function getUser(\n userId: string,\n tenantId?: string,\n populateUser?: any\n) {\n if (!populateUser) {\n populateUser = populateFromDB\n }\n if (!tenantId) {\n try {\n tenantId = context.getTenantId()\n } catch (err) {\n tenantId = await platform.users.lookupTenantId(userId)\n }\n }\n const client = await redis.getUserClient()\n // try cache\n let user = await client.get(userId)\n if (!user) {\n user = await populateUser(userId, tenantId)\n await client.store(userId, user, EXPIRY_SECONDS)\n }\n if (user && !user.tenantId && tenantId) {\n // make sure the tenant ID is always correct/set\n user.tenantId = tenantId\n }\n // if has groups, could have builder permissions granted by a group\n if (user.userGroups && !sdk.users.isGlobalBuilder(user)) {\n await context.doInTenant(tenantId, async () => {\n const appIds = await UserDB.getGroupBuilderAppIds(user)\n if (appIds.length) {\n const existing = user.builder?.apps || []\n user.builder = {\n apps: [...new Set(existing.concat(appIds))],\n }\n }\n })\n }\n return user\n}\n\n/**\n * Get the requested users by id.\n * Use redis cache to first read the users.\n * If not present fallback to loading the users directly and re-caching.\n * @param userIds the ids of the user to get\n * @param tenantId the tenant of the users to get\n * @returns\n */\nexport async function getUsers(\n userIds: string[]\n): Promise<{ users: User[]; notFoundIds?: string[] }> {\n const client = await redis.getUserClient()\n // try cache\n let usersFromCache = await client.bulkGet<User>(userIds)\n const missingUsersFromCache = userIds.filter(uid => !usersFromCache[uid])\n const users = Object.values(usersFromCache)\n let notFoundIds\n\n if (missingUsersFromCache.length) {\n const usersFromDb = await populateUsersFromDB(missingUsersFromCache)\n\n notFoundIds = usersFromDb.notFoundIds\n for (const userToCache of usersFromDb.users) {\n await client.store(userToCache._id!, userToCache, EXPIRY_SECONDS)\n }\n users.push(...usersFromDb.users)\n }\n return { users, notFoundIds: notFoundIds }\n}\n\nexport async function invalidateUser(userId: string) {\n const client = await redis.getUserClient()\n await client.delete(userId)\n}\n", "export * from \"./db\"\nexport * from \"./tenancy\"\n", "import { getDB } from \"../db/db\"\nimport { getGlobalDBName } from \"../context\"\n\nexport function getTenantDB(tenantId: string) {\n return getDB(getGlobalDBName(tenantId))\n}\n", "import {\n DEFAULT_TENANT_ID,\n getTenantId,\n getTenantIDFromAppID,\n isMultiTenant,\n getPlatformURL,\n} from \"../context\"\nimport {\n BBContext,\n TenantResolutionStrategy,\n GetTenantIdOptions,\n} from \"@budibase/types\"\nimport { Header } from \"../constants\"\n\nexport function addTenantToUrl(url: string) {\n const tenantId = getTenantId()\n\n if (isMultiTenant()) {\n const char = url.indexOf(\"?\") === -1 ? \"?\" : \"&\"\n url += `${char}tenantId=${tenantId}`\n }\n\n return url\n}\n\nexport const isUserInAppTenant = (appId: string, user?: any) => {\n let userTenantId\n if (user) {\n userTenantId = user.tenantId || DEFAULT_TENANT_ID\n } else {\n userTenantId = getTenantId()\n }\n const tenantId = getTenantIDFromAppID(appId) || DEFAULT_TENANT_ID\n return tenantId === userTenantId\n}\n\nconst ALL_STRATEGIES = Object.values(TenantResolutionStrategy)\n\nexport const getTenantIDFromCtx = (\n ctx: BBContext,\n opts: GetTenantIdOptions\n): string | undefined => {\n // exit early if not multi-tenant\n if (!isMultiTenant()) {\n return DEFAULT_TENANT_ID\n }\n\n // opt defaults\n if (opts.allowNoTenant === undefined) {\n opts.allowNoTenant = false\n }\n if (!opts.includeStrategies) {\n opts.includeStrategies = ALL_STRATEGIES\n }\n if (!opts.excludeStrategies) {\n opts.excludeStrategies = []\n }\n\n const isAllowed = (strategy: TenantResolutionStrategy) => {\n // excluded takes precedence\n if (opts.excludeStrategies?.includes(strategy)) {\n return false\n }\n if (opts.includeStrategies?.includes(strategy)) {\n return true\n }\n }\n\n // always use user first\n if (isAllowed(TenantResolutionStrategy.USER)) {\n const userTenantId = ctx.user?.tenantId\n if (userTenantId) {\n return userTenantId\n }\n }\n\n // header\n if (isAllowed(TenantResolutionStrategy.HEADER)) {\n const headerTenantId = ctx.request.headers[Header.TENANT_ID]\n if (headerTenantId) {\n return headerTenantId as string\n }\n }\n\n // query param\n if (isAllowed(TenantResolutionStrategy.QUERY)) {\n const queryTenantId = ctx.request.query.tenantId\n if (queryTenantId) {\n return queryTenantId as string\n }\n }\n\n // subdomain\n if (isAllowed(TenantResolutionStrategy.SUBDOMAIN)) {\n // e.g. budibase.app or local.com:10000\n let platformHost\n try {\n platformHost = new URL(getPlatformURL()).host.split(\":\")[0]\n } catch (err: any) {\n // if invalid URL, just don't try to process subdomain\n if (err.code !== \"ERR_INVALID_URL\") {\n throw err\n }\n }\n // e.g. tenant.budibase.app or tenant.local.com\n const requestHost = ctx.host\n // parse the tenant id from the difference\n if (platformHost && requestHost.includes(platformHost)) {\n const tenantId = requestHost.substring(\n 0,\n requestHost.indexOf(`.${platformHost}`)\n )\n if (tenantId) {\n return tenantId\n }\n }\n }\n\n // path\n if (isAllowed(TenantResolutionStrategy.PATH)) {\n // params - have to parse manually due to koa-router not run yet\n const match = ctx.matched.find(\n (m: any) => !!m.paramNames.find((p: any) => p.name === \"tenantId\")\n )\n\n // get the raw path url - without any query params\n const ctxUrl = ctx.originalUrl\n let url\n if (ctxUrl.includes(\"?\")) {\n url = ctxUrl.split(\"?\")[0]\n } else {\n url = ctxUrl\n }\n\n if (match) {\n const params = match.params(url, match.captures(url), {})\n if (params.tenantId) {\n return params.tenantId\n }\n }\n }\n\n if (!opts.allowNoTenant) {\n ctx.throw(403, \"Tenant id not set\")\n }\n\n return undefined\n}\n", "export * as users from \"./users\"\nexport * as tenants from \"./tenants\"\nexport * from \"./platformDb\"\n", "import { getPlatformDB } from \"./platformDb\"\nimport { DEFAULT_TENANT_ID } from \"../constants\"\nimport env from \"../environment\"\nimport {\n PlatformUser,\n PlatformUserByEmail,\n PlatformUserById,\n PlatformUserBySsoId,\n User,\n} from \"@budibase/types\"\n\n// READ\n\nexport async function lookupTenantId(userId: string) {\n if (!env.MULTI_TENANCY) {\n return DEFAULT_TENANT_ID\n }\n\n const user = await getUserDoc(userId)\n return user.tenantId\n}\n\nasync function getUserDoc(emailOrId: string): Promise<PlatformUser> {\n const db = getPlatformDB()\n return db.get(emailOrId)\n}\n\n// CREATE\n\nfunction newUserIdDoc(id: string, tenantId: string): PlatformUserById {\n return {\n _id: id,\n tenantId,\n }\n}\n\nfunction newUserEmailDoc(\n userId: string,\n email: string,\n tenantId: string\n): PlatformUserByEmail {\n return {\n _id: email,\n userId,\n tenantId,\n }\n}\n\nfunction newUserSsoIdDoc(\n ssoId: string,\n email: string,\n userId: string,\n tenantId: string\n): PlatformUserBySsoId {\n return {\n _id: ssoId,\n userId,\n email,\n tenantId,\n }\n}\n\n/**\n * Add a new user id or email doc if it doesn't exist.\n */\nasync function addUserDoc(emailOrId: string, newDocFn: () => PlatformUser) {\n const db = getPlatformDB()\n let user: PlatformUser\n\n try {\n await db.get(emailOrId)\n } catch (e: any) {\n if (e.status === 404) {\n user = newDocFn()\n await db.put(user)\n } else {\n throw e\n }\n }\n}\n\nexport async function addUser(\n tenantId: string,\n userId: string,\n email: string,\n ssoId?: string\n) {\n const promises = [\n addUserDoc(userId, () => newUserIdDoc(userId, tenantId)),\n addUserDoc(email, () => newUserEmailDoc(userId, email, tenantId)),\n ]\n\n if (ssoId) {\n promises.push(\n addUserDoc(ssoId, () => newUserSsoIdDoc(ssoId, email, userId, tenantId))\n )\n }\n\n await Promise.all(promises)\n}\n\n// DELETE\n\nexport async function removeUser(user: User) {\n const db = getPlatformDB()\n const keys = [user._id!, user.email]\n const userDocs = await db.allDocs({\n keys,\n include_docs: true,\n })\n const toDelete = userDocs.rows.map((row: any) => {\n return {\n ...row.doc,\n _deleted: true,\n }\n })\n await db.bulkDocs(toDelete)\n}\n", "import { StaticDatabases } from \"../constants\"\nimport { getDB } from \"../db/db\"\n\nexport function getPlatformDB() {\n return getDB(StaticDatabases.PLATFORM_INFO.name)\n}\n", "import { StaticDatabases } from \"../constants\"\nimport { getPlatformDB } from \"./platformDb\"\nimport { LockName, LockOptions, LockType, Tenants } from \"@budibase/types\"\nimport * as locks from \"../redis/redlockImpl\"\n\nconst TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants\n\nexport const tenacyLockOptions: LockOptions = {\n type: LockType.DEFAULT,\n name: LockName.UPDATE_TENANTS_DOC,\n ttl: 10 * 1000, // auto expire after 10 seconds\n systemLock: true,\n}\n\n// READ\n\nexport async function getTenantIds(): Promise<string[]> {\n const tenants = await getTenants()\n return tenants.tenantIds\n}\n\nasync function getTenants(): Promise<Tenants> {\n const db = getPlatformDB()\n let tenants: Tenants\n\n try {\n tenants = await db.get(TENANT_DOC)\n } catch (e: any) {\n // doesn't exist yet - create\n if (e.status === 404) {\n tenants = await createTenantsDoc()\n } else {\n throw e\n }\n }\n\n return tenants\n}\n\nexport async function exists(tenantId: string) {\n const tenants = await getTenants()\n return tenants.tenantIds.indexOf(tenantId) !== -1\n}\n\n// CREATE / UPDATE\n\nfunction newTenantsDoc(): Tenants {\n return {\n _id: TENANT_DOC,\n tenantIds: [],\n }\n}\n\nasync function createTenantsDoc(): Promise<Tenants> {\n const db = getPlatformDB()\n let tenants = newTenantsDoc()\n\n try {\n const response = await db.put(tenants)\n tenants._rev = response.rev\n } catch (e: any) {\n // don't throw 409 is doc has already been created\n if (e.status === 409) {\n return db.get(TENANT_DOC)\n }\n throw e\n }\n\n return tenants\n}\n\nexport async function addTenant(tenantId: string) {\n const db = getPlatformDB()\n\n // use a lock as tenant creation is conflict prone\n await locks.doWithLock(tenacyLockOptions, async () => {\n const tenants = await getTenants()\n\n // write the new tenant if it doesn't already exist\n if (tenants.tenantIds.indexOf(tenantId) === -1) {\n tenants.tenantIds.push(tenantId)\n await db.put(tenants)\n }\n })\n}\n\n// DELETE\n\nexport async function removeTenant(tenantId: string) {\n try {\n await locks.doWithLock(tenacyLockOptions, async () => {\n const db = getPlatformDB()\n const tenants = await getTenants()\n tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId)\n await db.put(tenants)\n })\n } catch (err) {\n console.error(`Error removing tenant ${tenantId} from info db`, err)\n throw err\n }\n}\n", "import Redlock from \"redlock\"\nimport { getLockClient } from \"./init\"\nimport { LockOptions, LockType } from \"@budibase/types\"\nimport * as context from \"../context\"\nimport { utils } from \"@budibase/shared-core\"\nimport { Duration } from \"../utils\"\n\nasync function getClient(\n type: LockType,\n opts?: Redlock.Options\n): Promise<Redlock> {\n if (type === LockType.CUSTOM) {\n return newRedlock(opts)\n }\n\n switch (type) {\n case LockType.TRY_ONCE: {\n return newRedlock(OPTIONS.TRY_ONCE)\n }\n case LockType.TRY_TWICE: {\n return newRedlock(OPTIONS.TRY_TWICE)\n }\n case LockType.DEFAULT: {\n return newRedlock(OPTIONS.DEFAULT)\n }\n case LockType.DELAY_500: {\n return newRedlock(OPTIONS.DELAY_500)\n }\n case LockType.AUTO_EXTEND: {\n return newRedlock(OPTIONS.AUTO_EXTEND)\n }\n default: {\n throw utils.unreachable(type)\n }\n }\n}\n\nconst OPTIONS: Record<keyof typeof LockType, Redlock.Options> = {\n TRY_ONCE: {\n // immediately throws an error if the lock is already held\n retryCount: 0,\n },\n TRY_TWICE: {\n retryCount: 1,\n },\n DEFAULT: {\n // the expected clock drift; for more details\n // see http://redis.io/topics/distlock\n driftFactor: 0.01, // multiplied by lock ttl to determine drift time\n\n // the max number of times Redlock will attempt\n // to lock a resource before erroring\n retryCount: 10,\n\n // the time in ms between attempts\n retryDelay: 200, // time in ms\n\n // the max time in ms randomly added to retries\n // to improve performance under high contention\n // see https://www.awsarchitectureblog.com/2015/03/backoff.html\n retryJitter: 100, // time in ms\n },\n DELAY_500: {\n retryDelay: 500,\n },\n CUSTOM: {},\n AUTO_EXTEND: {\n retryCount: -1,\n },\n}\n\nexport async function newRedlock(opts: Redlock.Options = {}) {\n const options = { ...OPTIONS.DEFAULT, ...opts }\n const redisWrapper = await getLockClient()\n const client = redisWrapper.getClient()\n return new Redlock([client], options)\n}\n\ntype SuccessfulRedlockExecution<T> = {\n executed: true\n result: T\n}\ntype UnsuccessfulRedlockExecution = {\n executed: false\n}\n\ntype RedlockExecution<T> =\n | SuccessfulRedlockExecution<T>\n | UnsuccessfulRedlockExecution\n\nfunction getLockName(opts: LockOptions) {\n // determine lock name\n // by default use the tenantId for uniqueness, unless using a system lock\n const prefix = opts.systemLock ? \"system\" : context.getTenantId()\n let name: string = `lock:${prefix}_${opts.name}`\n // add additional unique name if required\n if (opts.resource) {\n name = name + `_${opts.resource}`\n }\n return name\n}\n\nexport const AUTO_EXTEND_POLLING_MS = Duration.fromSeconds(10).toMs()\n\nexport async function doWithLock<T>(\n opts: LockOptions,\n task: () => Promise<T>\n): Promise<RedlockExecution<T>> {\n const redlock = await getClient(opts.type, opts.customOptions)\n let lock: Redlock.Lock | undefined\n let timeout\n try {\n const name = getLockName(opts)\n\n const ttl =\n opts.type === LockType.AUTO_EXTEND ? AUTO_EXTEND_POLLING_MS : opts.ttl\n\n // create the lock\n lock = await redlock.lock(name, ttl)\n\n if (opts.type === LockType.AUTO_EXTEND) {\n // We keep extending the lock while the task is running\n const extendInIntervals = (): void => {\n timeout = setTimeout(async () => {\n lock = await lock!.extend(ttl, () => opts.onExtend && opts.onExtend())\n\n extendInIntervals()\n }, ttl / 2)\n }\n\n extendInIntervals()\n }\n\n // perform locked task\n // need to await to ensure completion before unlocking\n const result = await task()\n return { executed: true, result }\n } catch (e: any) {\n // lock limit exceeded\n if (e.name === \"LockError\") {\n if (opts.type === LockType.TRY_ONCE) {\n // don't throw for try-once locks, they will always error\n // due to retry count (0) exceeded\n return { executed: false }\n } else {\n throw e\n }\n } else {\n throw e\n }\n } finally {\n clearTimeout(timeout)\n await lock?.unlock()\n }\n}\n", "export * from \"./hashing\"\nexport * from \"./utils\"\nexport * from \"./stringUtils\"\nexport * from \"./Duration\"\n", "import env from \"../environment\"\n\nexport * from \"../docIds/newid\"\nconst bcrypt = env.JS_BCRYPT ? require(\"bcryptjs\") : require(\"bcrypt\")\n\nconst SALT_ROUNDS = env.SALT_ROUNDS || 10\n\nexport async function hash(data: string) {\n const salt = await bcrypt.genSalt(SALT_ROUNDS)\n return bcrypt.hash(data, salt)\n}\n\nexport async function compare(data: string, encrypted: string) {\n return bcrypt.compare(data, encrypted)\n}\n", "import { getAllApps } from \"../db\"\nimport { Header, MAX_VALID_DATE, DocumentType, SEPARATOR } from \"../constants\"\nimport env from \"../environment\"\nimport * as tenancy from \"../tenancy\"\nimport * as context from \"../context\"\nimport {\n App,\n AuditedEventFriendlyName,\n Ctx,\n Event,\n TenantResolutionStrategy,\n} from \"@budibase/types\"\nimport type { SetOption } from \"cookies\"\nimport jwt, { Secret } from \"jsonwebtoken\"\n\nconst APP_PREFIX = DocumentType.APP + SEPARATOR\nconst PROD_APP_PREFIX = \"/app/\"\n\nconst BUILDER_PREVIEW_PATH = \"/app/preview\"\nconst BUILDER_PREFIX = \"/builder\"\nconst BUILDER_APP_PREFIX = `${BUILDER_PREFIX}/app/`\nconst PUBLIC_API_PREFIX = \"/api/public/v\"\n\nfunction confirmAppId(possibleAppId: string | undefined) {\n return possibleAppId && possibleAppId.startsWith(APP_PREFIX)\n ? possibleAppId\n : undefined\n}\n\nexport async function resolveAppUrl(ctx: Ctx) {\n const appUrl = ctx.path.split(\"/\")[2]\n let possibleAppUrl = `/${appUrl.toLowerCase()}`\n\n let tenantId: string | undefined = context.getTenantId()\n if (!env.isDev() && env.MULTI_TENANCY) {\n // always use the tenant id from the subdomain in multi tenancy\n // this ensures the logged-in user tenant id doesn't overwrite\n // e.g. in the case of viewing a public app while already logged-in to another tenant\n tenantId = tenancy.getTenantIDFromCtx(ctx, {\n includeStrategies: [TenantResolutionStrategy.SUBDOMAIN],\n })\n }\n\n // search prod apps for an url that matches\n const apps: App[] = await context.doInTenant(\n tenantId,\n () => getAllApps({ dev: false }) as Promise<App[]>\n )\n const app = apps.filter(\n a => a.url && a.url.toLowerCase() === possibleAppUrl\n )[0]\n\n return app && app.appId ? app.appId : undefined\n}\n\nexport function isServingApp(ctx: Ctx) {\n // dev app\n if (ctx.path.startsWith(`/${APP_PREFIX}`)) {\n return true\n }\n // prod app\n return ctx.path.startsWith(PROD_APP_PREFIX)\n}\n\nexport function isServingBuilder(ctx: Ctx): boolean {\n return ctx.path.startsWith(BUILDER_APP_PREFIX)\n}\n\nexport function isServingBuilderPreview(ctx: Ctx): boolean {\n return ctx.path.startsWith(BUILDER_PREVIEW_PATH)\n}\n\nexport function isPublicApiRequest(ctx: Ctx): boolean {\n return ctx.path.startsWith(PUBLIC_API_PREFIX)\n}\n\n/**\n * Given a request tries to find the appId, which can be located in various places\n * @param ctx The main request body to look through.\n * @returns If an appId was found it will be returned.\n */\nexport async function getAppIdFromCtx(ctx: Ctx) {\n // look in headers\n const options = [ctx.request.headers[Header.APP_ID]]\n let appId\n for (let option of options) {\n appId = confirmAppId(option as string)\n if (appId) {\n break\n }\n }\n\n // look in body\n if (!appId && ctx.request.body && ctx.request.body.appId) {\n appId = confirmAppId(ctx.request.body.appId)\n }\n\n // look in the path\n const pathId = parseAppIdFromUrlPath(ctx.path)\n if (!appId && pathId) {\n appId = confirmAppId(pathId)\n }\n\n // lookup using custom url - prod apps only\n // filter out the builder preview path which collides with the prod app path\n // to ensure we don't load all apps excessively\n const isBuilderPreview = ctx.path.startsWith(BUILDER_PREVIEW_PATH)\n const isViewingProdApp =\n ctx.path.startsWith(PROD_APP_PREFIX) && !isBuilderPreview\n if (!appId && isViewingProdApp) {\n appId = confirmAppId(await resolveAppUrl(ctx))\n }\n\n // look in the referer - builder only\n // make sure this is performed after prod app url resolution, in case the\n // referer header is present from a builder redirect\n const referer = ctx.request.headers.referer\n if (!appId && referer?.includes(BUILDER_APP_PREFIX)) {\n const refererId = parseAppIdFromUrlPath(ctx.request.headers.referer)\n appId = confirmAppId(refererId)\n }\n\n return appId\n}\n\nfunction parseAppIdFromUrlPath(url?: string) {\n if (!url) {\n return\n }\n return url\n .split(\"?\")[0] // Remove any possible query string\n .split(\"/\")\n .find(subPath => subPath.startsWith(APP_PREFIX))\n}\n\n/**\n * opens the contents of the specified encrypted JWT.\n * @return the contents of the token.\n */\nexport function openJwt<T>(token?: string): T | undefined {\n if (!token) {\n return undefined\n }\n try {\n return jwt.verify(token, env.JWT_SECRET as Secret) as T\n } catch (e) {\n if (env.JWT_SECRET_FALLBACK) {\n // fallback to enable rotation\n return jwt.verify(token, env.JWT_SECRET_FALLBACK) as T\n } else {\n throw e\n }\n }\n}\n\nexport function isValidInternalAPIKey(apiKey: string) {\n if (env.INTERNAL_API_KEY && env.INTERNAL_API_KEY === apiKey) {\n return true\n }\n // fallback to enable rotation\n return !!(\n env.INTERNAL_API_KEY_FALLBACK && env.INTERNAL_API_KEY_FALLBACK === apiKey\n )\n}\n\n/**\n * Get a cookie from context, and decrypt if necessary.\n * @param ctx The request which is to be manipulated.\n * @param name The name of the cookie to get.\n */\nexport function getCookie<T>(ctx: Ctx, name: string) {\n const cookie = ctx.cookies.get(name)\n\n if (!cookie) {\n return undefined\n }\n\n return openJwt<T>(cookie)\n}\n\n/**\n * Store a cookie for the request - it will not expire.\n * @param ctx The request which is to be manipulated.\n * @param name The name of the cookie to set.\n * @param value The value of cookie which will be set.\n * @param opts options like whether to sign.\n */\nexport function setCookie(\n ctx: Ctx,\n value: any,\n name = \"builder\",\n opts = { sign: true }\n) {\n if (value && opts && opts.sign) {\n value = jwt.sign(value, env.JWT_SECRET as Secret)\n }\n\n const config: SetOption = {\n expires: MAX_VALID_DATE,\n path: \"/\",\n httpOnly: false,\n overwrite: true,\n }\n\n if (env.COOKIE_DOMAIN) {\n config.domain = env.COOKIE_DOMAIN\n }\n\n ctx.cookies.set(name, value, config)\n}\n\n/**\n * Utility function, simply calls setCookie with an empty string for value\n */\nexport function clearCookie(ctx: Ctx, name: string) {\n setCookie(ctx, null, name)\n}\n\n/**\n * Checks if the API call being made (based on the provided ctx object) is from the client. If\n * the call is not from a client app then it is from the builder.\n * @param ctx The koa context object to be tested.\n * @return returns true if the call is from the client lib (a built app rather than the builder).\n */\nexport function isClient(ctx: Ctx) {\n return ctx.headers[Header.TYPE] === \"client\"\n}\n\nexport function timeout(timeMs: number) {\n return new Promise(resolve => setTimeout(resolve, timeMs))\n}\n\nexport function isAudited(event: Event) {\n return !!AuditedEventFriendlyName[event]\n}\n\nexport function hasCircularStructure(json: any) {\n if (typeof json !== \"object\") {\n return false\n }\n try {\n JSON.stringify(json)\n } catch (err) {\n if (err instanceof Error && err?.message.includes(\"circular structure\")) {\n return true\n }\n }\n return false\n}\n", "export function validEmail(value: string) {\n return (\n value &&\n !!value.match(\n /^(([^<>()[\\]\\\\.,;:\\s@\"]+(\\.[^<>()[\\]\\\\.,;:\\s@\"]+)*)|(\".+\"))@((\\[[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}])|(([a-zA-Z\\-0-9]+\\.)+[a-zA-Z]{2,}))$/\n )\n )\n}\n", "export enum DurationType {\n MILLISECONDS = \"milliseconds\",\n SECONDS = \"seconds\",\n MINUTES = \"minutes\",\n HOURS = \"hours\",\n DAYS = \"days\",\n}\n\nconst conversion: Record<DurationType, number> = {\n milliseconds: 1,\n seconds: 1000,\n minutes: 60 * 1000,\n hours: 60 * 60 * 1000,\n days: 24 * 60 * 60 * 1000,\n}\n\nexport class Duration {\n static convert(from: DurationType, to: DurationType, duration: number) {\n const milliseconds = duration * conversion[from]\n return milliseconds / conversion[to]\n }\n\n static from(from: DurationType, duration: number) {\n return {\n to: (to: DurationType) => {\n return Duration.convert(from, to, duration)\n },\n toMs: () => {\n return Duration.convert(from, DurationType.MILLISECONDS, duration)\n },\n toSeconds: () => {\n return Duration.convert(from, DurationType.SECONDS, duration)\n },\n }\n }\n\n static fromSeconds(duration: number) {\n return Duration.from(DurationType.SECONDS, duration)\n }\n\n static fromMinutes(duration: number) {\n return Duration.from(DurationType.MINUTES, duration)\n }\n\n static fromHours(duration: number) {\n return Duration.from(DurationType.HOURS, duration)\n }\n\n static fromDays(duration: number) {\n return Duration.from(DurationType.DAYS, duration)\n }\n\n static fromMilliseconds(duration: number) {\n return Duration.from(DurationType.MILLISECONDS, duration)\n }\n}\n", "export * from \"./accounts\"\n", "import fetch from \"node-fetch\"\nimport * as logging from \"../logging\"\n\nexport default class API {\n host: string\n\n constructor(host: string) {\n this.host = host\n }\n\n async apiCall(method: string, url: string, options?: any) {\n if (!options.headers) {\n options.headers = {}\n }\n\n if (!options.headers[\"Content-Type\"]) {\n options.headers = {\n \"Content-Type\": \"application/json\",\n Accept: \"application/json\",\n ...options.headers,\n }\n }\n\n let json = options.headers[\"Content-Type\"] === \"application/json\"\n\n // add x-budibase-correlation-id header\n logging.correlation.setHeader(options.headers)\n\n const requestOptions = {\n method: method,\n body: json ? JSON.stringify(options.body) : options.body,\n headers: options.headers,\n // TODO: See if this is necessary\n credentials: \"include\",\n }\n\n return await fetch(`${this.host}${url}`, requestOptions)\n }\n\n async post(url: string, options?: any) {\n return this.apiCall(\"POST\", url, options)\n }\n\n async get(url: string, options?: any) {\n return this.apiCall(\"GET\", url, options)\n }\n\n async patch(url: string, options?: any) {\n return this.apiCall(\"PATCH\", url, options)\n }\n\n async del(url: string, options?: any) {\n return this.apiCall(\"DELETE\", url, options)\n }\n\n async put(url: string, options?: any) {\n return this.apiCall(\"PUT\", url, options)\n }\n}\n", "import API from \"./api\"\nimport env from \"../environment\"\nimport { Header } from \"../constants\"\nimport { CloudAccount, HealthStatusResponse } from \"@budibase/types\"\n\nconst api = new API(env.ACCOUNT_PORTAL_URL)\n\n/**\n * This client is intended to be used in a cloud hosted deploy only.\n * Rather than relying on each consumer to perform the necessary environmental checks\n * we use the following check to exit early with a undefined response which should be\n * handled by the caller.\n */\nconst EXIT_EARLY = env.SELF_HOSTED || env.DISABLE_ACCOUNT_PORTAL\n\nexport const getAccount = async (\n email: string\n): Promise<CloudAccount | undefined> => {\n if (EXIT_EARLY) {\n return\n }\n const payload = {\n email,\n }\n const response = await api.post(`/api/accounts/search`, {\n body: payload,\n headers: {\n [Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,\n },\n })\n\n if (response.status !== 200) {\n throw new Error(`Error getting account by email ${email}`)\n }\n\n const json: CloudAccount[] = await response.json()\n return json[0]\n}\n\nexport const getAccountByTenantId = async (\n tenantId: string\n): Promise<CloudAccount | undefined> => {\n if (EXIT_EARLY) {\n return\n }\n const payload = {\n tenantId,\n }\n const response = await api.post(`/api/accounts/search`, {\n body: payload,\n headers: {\n [Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,\n },\n })\n\n if (response.status !== 200) {\n throw new Error(`Error getting account by tenantId ${tenantId}`)\n }\n\n const json: CloudAccount[] = await response.json()\n return json[0]\n}\n\nexport const getStatus = async (): Promise<\n HealthStatusResponse | undefined\n> => {\n if (EXIT_EARLY) {\n return\n }\n const response = await api.get(`/api/status`, {\n headers: {\n [Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,\n },\n })\n const json = await response.json()\n\n if (response.status !== 200) {\n throw new Error(`Error getting status`)\n }\n\n return json\n}\n", "export * from \"./users\"\nexport * from \"./utils\"\nexport * from \"./lookup\"\nexport { UserDB } from \"./db\"\n", "import {\n directCouchFind,\n DocumentType,\n generateAppUserID,\n getGlobalUserParams,\n getProdAppID,\n getUsersByAppParams,\n pagination,\n queryGlobalView,\n queryGlobalViewRaw,\n SEPARATOR,\n UNICODE_MAX,\n ViewName,\n} from \"../db\"\nimport {\n BulkDocsResponse,\n SearchQuery,\n SearchQueryOperators,\n SearchUsersRequest,\n User,\n ContextUser,\n DatabaseQueryOpts,\n CouchFindOptions,\n} from \"@budibase/types\"\nimport { getGlobalDB } from \"../context\"\nimport * as context from \"../context\"\nimport { isCreator } from \"./utils\"\nimport { UserDB } from \"./db\"\n\ntype GetOpts = { cleanup?: boolean }\n\nfunction removeUserPassword(users: User | User[]) {\n if (Array.isArray(users)) {\n return users.map(user => {\n if (user) {\n delete user.password\n return user\n }\n })\n } else if (users) {\n delete users.password\n return users\n }\n return users\n}\n\nexport function isSupportedUserSearch(query: SearchQuery) {\n const allowed = [\n { op: SearchQueryOperators.STRING, key: \"email\" },\n { op: SearchQueryOperators.EQUAL, key: \"_id\" },\n ]\n for (let [key, operation] of Object.entries(query)) {\n if (typeof operation !== \"object\") {\n return false\n }\n const fields = Object.keys(operation || {})\n // this filter doesn't contain options - ignore\n if (fields.length === 0) {\n continue\n }\n const allowedOperation = allowed.find(\n allow =>\n allow.op === key && fields.length === 1 && fields[0] === allow.key\n )\n if (!allowedOperation) {\n return false\n }\n }\n return true\n}\n\nexport async function bulkGetGlobalUsersById(\n userIds: string[],\n opts?: GetOpts\n) {\n const db = getGlobalDB()\n let users = (\n await db.allDocs({\n keys: userIds,\n include_docs: true,\n })\n ).rows.map(row => row.doc) as User[]\n if (opts?.cleanup) {\n users = removeUserPassword(users) as User[]\n }\n return users\n}\n\nexport async function getAllUserIds() {\n const db = getGlobalDB()\n const startKey = `${DocumentType.USER}${SEPARATOR}`\n const response = await db.allDocs({\n startkey: startKey,\n endkey: `${startKey}${UNICODE_MAX}`,\n })\n return response.rows.map(row => row.id)\n}\n\nexport async function bulkUpdateGlobalUsers(users: User[]) {\n const db = getGlobalDB()\n return (await db.bulkDocs(users)) as BulkDocsResponse\n}\n\nexport async function getById(id: string, opts?: GetOpts): Promise<User> {\n const db = context.getGlobalDB()\n let user = await db.get<User>(id)\n if (opts?.cleanup) {\n user = removeUserPassword(user) as User\n }\n return user\n}\n\n/**\n * Given an email address this will use a view to search through\n * all the users to find one with this email address.\n */\nexport async function getGlobalUserByEmail(\n email: String,\n opts?: GetOpts\n): Promise<User | undefined> {\n if (email == null) {\n throw \"Must supply an email address to view\"\n }\n\n const response = await queryGlobalView<User>(ViewName.USER_BY_EMAIL, {\n key: email.toLowerCase(),\n include_docs: true,\n })\n\n if (Array.isArray(response)) {\n // shouldn't be able to happen, but need to handle just in case\n throw new Error(`Multiple users found with email address: ${email}`)\n }\n\n let user = response as User\n if (opts?.cleanup) {\n user = removeUserPassword(user) as User\n }\n\n return user\n}\n\nexport async function doesUserExist(email: string) {\n try {\n const user = await getGlobalUserByEmail(email)\n if (Array.isArray(user) || user != null) {\n return true\n }\n } catch (err) {\n return false\n }\n return false\n}\n\nexport async function searchGlobalUsersByApp(\n appId: any,\n opts: DatabaseQueryOpts,\n getOpts?: GetOpts\n) {\n if (typeof appId !== \"string\") {\n throw new Error(\"Must provide a string based app ID\")\n }\n const params = getUsersByAppParams(appId, {\n include_docs: true,\n })\n params.startkey = opts && opts.startkey ? opts.startkey : params.startkey\n let response = await queryGlobalView<User>(ViewName.USER_BY_APP, params)\n\n if (!response) {\n response = []\n }\n let users: User[] = Array.isArray(response) ? response : [response]\n if (getOpts?.cleanup) {\n users = removeUserPassword(users) as User[]\n }\n return users\n}\n\n/*\n Return any user who potentially has access to the application\n Admins, developers and app users with the explicitly role.\n*/\nexport async function searchGlobalUsersByAppAccess(\n appId: any,\n opts?: { limit?: number }\n) {\n const roleSelector = `roles.${appId}`\n\n let orQuery: any[] = [\n {\n \"builder.global\": true,\n },\n {\n \"admin.global\": true,\n },\n ]\n\n if (appId) {\n const roleCheck = {\n [roleSelector]: {\n $exists: true,\n },\n }\n orQuery.push(roleCheck)\n }\n\n let searchOptions: CouchFindOptions = {\n selector: {\n $or: orQuery,\n _id: {\n $regex: \"^us_\",\n },\n },\n limit: opts?.limit || 50,\n }\n\n const resp = await directCouchFind(context.getGlobalDBName(), searchOptions)\n return resp.rows\n}\n\nexport function getGlobalUserByAppPage(appId: string, user: User) {\n if (!user) {\n return\n }\n return generateAppUserID(getProdAppID(appId)!, user._id!)\n}\n\n/**\n * Performs a starts with search on the global email view.\n */\nexport async function searchGlobalUsersByEmail(\n email: string | unknown,\n opts: any,\n getOpts?: GetOpts\n) {\n if (typeof email !== \"string\") {\n throw new Error(\"Must provide a string to search by\")\n }\n const lcEmail = email.toLowerCase()\n // handle if passing up startkey for pagination\n const startkey = opts && opts.startkey ? opts.startkey : lcEmail\n let response = await queryGlobalView<User>(ViewName.USER_BY_EMAIL, {\n ...opts,\n startkey,\n endkey: `${lcEmail}${UNICODE_MAX}`,\n })\n if (!response) {\n response = []\n }\n let users: User[] = Array.isArray(response) ? response : [response]\n if (getOpts?.cleanup) {\n users = removeUserPassword(users) as User[]\n }\n return users\n}\n\nconst PAGE_LIMIT = 8\nexport async function paginatedUsers({\n bookmark,\n query,\n appId,\n limit,\n}: SearchUsersRequest = {}) {\n const db = getGlobalDB()\n const pageSize = limit ?? PAGE_LIMIT\n const pageLimit = pageSize + 1\n // get one extra document, to have the next page\n const opts: DatabaseQueryOpts = {\n include_docs: true,\n limit: pageLimit,\n }\n // add a startkey if the page was specified (anchor)\n if (bookmark) {\n opts.startkey = bookmark\n }\n // property specifies what to use for the page/anchor\n let userList: User[],\n property = \"_id\",\n getKey\n if (query?.equal?._id) {\n userList = [await getById(query.equal._id)]\n } else if (appId) {\n userList = await searchGlobalUsersByApp(appId, opts)\n getKey = (doc: any) => getGlobalUserByAppPage(appId, doc)\n } else if (query?.string?.email) {\n userList = await searchGlobalUsersByEmail(query?.string?.email, opts)\n property = \"email\"\n } else {\n // no search, query allDocs\n const response = await db.allDocs(getGlobalUserParams(null, opts))\n userList = response.rows.map((row: any) => row.doc)\n }\n return pagination(userList, pageSize, {\n paginate: true,\n property,\n getKey,\n })\n}\n\nexport async function getUserCount() {\n const response = await queryGlobalViewRaw(ViewName.USER_BY_EMAIL, {\n limit: 0, // to be as fast as possible - we just want the total rows count\n include_docs: false,\n })\n return response.total_rows\n}\n\nexport async function getCreatorCount() {\n let creators = 0\n async function iterate(startPage?: string) {\n const page = await paginatedUsers({ bookmark: startPage })\n creators += page.data.filter(isCreator).length\n if (page.hasNextPage) {\n await iterate(page.nextPage)\n }\n }\n await iterate()\n return creators\n}\n\n// used to remove the builder/admin permissions, for processing the\n// user as an app user (they may have some specific role/group\nexport function removePortalUserPermissions(user: User | ContextUser) {\n delete user.admin\n delete user.builder\n return user\n}\n\nexport function cleanseUserObject(user: User | ContextUser, base?: User) {\n delete user.admin\n delete user.builder\n delete user.roles\n if (base) {\n user.admin = base.admin\n user.builder = base.builder\n user.roles = base.roles\n }\n return user\n}\n\nexport async function addAppBuilder(user: User, appId: string) {\n const prodAppId = getProdAppID(appId)\n user.builder ??= {}\n user.builder.creator = true\n user.builder.apps ??= []\n user.builder.apps.push(prodAppId)\n await UserDB.save(user, { hashPassword: false })\n}\n\nexport async function removeAppBuilder(user: User, appId: string) {\n const prodAppId = getProdAppID(appId)\n if (user.builder && user.builder.apps?.includes(prodAppId)) {\n user.builder.apps = user.builder.apps.filter(id => id !== prodAppId)\n }\n await UserDB.save(user, { hashPassword: false })\n}\n", "import { CloudAccount } from \"@budibase/types\"\nimport * as accountSdk from \"../accounts\"\nimport env from \"../environment\"\nimport { getPlatformUser } from \"./lookup\"\nimport { EmailUnavailableError } from \"../errors\"\nimport { getTenantId } from \"../context\"\nimport { sdk } from \"@budibase/shared-core\"\nimport { getAccountByTenantId } from \"../accounts\"\n\n// extract from shared-core to make easily accessible from backend-core\nexport const isBuilder = sdk.users.isBuilder\nexport const isAdmin = sdk.users.isAdmin\nexport const isCreator = sdk.users.isCreator\nexport const isGlobalBuilder = sdk.users.isGlobalBuilder\nexport const isAdminOrBuilder = sdk.users.isAdminOrBuilder\nexport const hasAdminPermissions = sdk.users.hasAdminPermissions\nexport const hasBuilderPermissions = sdk.users.hasBuilderPermissions\nexport const hasAppBuilderPermissions = sdk.users.hasAppBuilderPermissions\n\nexport async function validateUniqueUser(email: string, tenantId: string) {\n // check budibase users in other tenants\n if (env.MULTI_TENANCY) {\n const tenantUser = await getPlatformUser(email)\n if (tenantUser != null && tenantUser.tenantId !== tenantId) {\n throw new EmailUnavailableError(email)\n }\n }\n\n // check root account users in account portal\n if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {\n const account = await accountSdk.getAccount(email)\n if (account && account.verified && account.tenantId !== tenantId) {\n throw new EmailUnavailableError(email)\n }\n }\n}\n\n/**\n * For the given user id's, return the account holder if it is in the ids.\n */\nexport async function getAccountHolderFromUserIds(\n userIds: string[]\n): Promise<CloudAccount | undefined> {\n if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {\n const tenantId = getTenantId()\n const account = await getAccountByTenantId(tenantId)\n if (!account) {\n throw new Error(`Account not found for tenantId=${tenantId}`)\n }\n\n const budibaseUserId = account.budibaseUserId\n if (userIds.includes(budibaseUserId)) {\n return account\n }\n }\n}\n", "import {\n AccountMetadata,\n PlatformUser,\n PlatformUserByEmail,\n User,\n} from \"@budibase/types\"\nimport * as dbUtils from \"../db\"\nimport { ViewName } from \"../constants\"\nimport { getExistingInvites } from \"../cache/invite\"\n\n/**\n * Apply a system-wide search on emails:\n * - in tenant\n * - cross tenant\n * - accounts\n * return an array of emails that match the supplied emails.\n */\nexport async function searchExistingEmails(emails: string[]) {\n let matchedEmails: string[] = []\n\n const existingTenantUsers = await getExistingTenantUsers(emails)\n matchedEmails.push(...existingTenantUsers.map(user => user.email))\n\n const existingPlatformUsers = await getExistingPlatformUsers(emails)\n matchedEmails.push(...existingPlatformUsers.map(user => user._id!))\n\n const existingAccounts = await getExistingAccounts(emails)\n matchedEmails.push(...existingAccounts.map(account => account.email))\n\n const invitedEmails = await getExistingInvites(emails)\n matchedEmails.push(...invitedEmails.map(invite => invite.email))\n\n return [...new Set(matchedEmails.map(email => email.toLowerCase()))]\n}\n\n// lookup, could be email or userId, either will return a doc\nexport async function getPlatformUser(\n identifier: string\n): Promise<PlatformUser | null> {\n // use the view here and allow to find anyone regardless of casing\n // Use lowercase to ensure email login is case insensitive\n return (await dbUtils.queryPlatformView(ViewName.PLATFORM_USERS_LOWERCASE, {\n keys: [identifier.toLowerCase()],\n include_docs: true,\n })) as PlatformUser\n}\n\nexport async function getExistingTenantUsers(\n emails: string[]\n): Promise<User[]> {\n const lcEmails = emails.map(email => email.toLowerCase())\n const params = {\n keys: lcEmails,\n include_docs: true,\n }\n\n const opts = {\n arrayResponse: true,\n }\n\n return (await dbUtils.queryGlobalView(\n ViewName.USER_BY_EMAIL,\n params,\n undefined,\n opts\n )) as User[]\n}\n\nexport async function getExistingPlatformUsers(\n emails: string[]\n): Promise<PlatformUserByEmail[]> {\n const lcEmails = emails.map(email => email.toLowerCase())\n const params = {\n keys: lcEmails,\n include_docs: true,\n }\n\n const opts = {\n arrayResponse: true,\n }\n return (await dbUtils.queryPlatformView(\n ViewName.PLATFORM_USERS_LOWERCASE,\n params,\n opts\n )) as PlatformUserByEmail[]\n}\n\nexport async function getExistingAccounts(\n emails: string[]\n): Promise<AccountMetadata[]> {\n const lcEmails = emails.map(email => email.toLowerCase())\n const params = {\n keys: lcEmails,\n include_docs: true,\n }\n\n const opts = {\n arrayResponse: true,\n }\n\n return (await dbUtils.queryPlatformView(\n ViewName.ACCOUNT_BY_EMAIL,\n params,\n opts\n )) as AccountMetadata[]\n}\n", "import * as utils from \"../utils\"\nimport { Duration, DurationType } from \"../utils\"\nimport env from \"../environment\"\nimport { getTenantId } from \"../context\"\nimport * as redis from \"../redis/init\"\n\nconst TTL_SECONDS = Duration.fromDays(7).toSeconds()\n\ninterface Invite {\n email: string\n info: any\n}\n\ninterface InviteWithCode extends Invite {\n code: string\n}\n\n/**\n * Given an invite code and invite body, allow the update an existing/valid invite in redis\n * @param code The invite code for an invite in redis\n * @param value The body of the updated user invitation\n */\nexport async function updateCode(code: string, value: Invite) {\n const client = await redis.getInviteClient()\n await client.store(code, value, TTL_SECONDS)\n}\n\n/**\n * Generates an invitation code and writes it to redis - which can later be checked for user creation.\n * @param email the email address which the code is being sent to (for use later).\n * @param info Information to be carried along with the invitation.\n * @return returns the code that was stored to redis.\n */\nexport async function createCode(email: string, info: any): Promise<string> {\n const code = utils.newid()\n const client = await redis.getInviteClient()\n await client.store(code, { email, info }, TTL_SECONDS)\n return code\n}\n\n/**\n * Checks that the provided invite code is valid - will return the email address of user that was invited.\n * @param code the invite code that was provided as part of the link.\n * @return If the code is valid then an email address will be returned.\n */\nexport async function getCode(code: string): Promise<Invite> {\n const client = await redis.getInviteClient()\n const value = (await client.get(code)) as Invite | undefined\n if (!value) {\n throw \"Invitation is not valid or has expired, please request a new one.\"\n }\n return value\n}\n\nexport async function deleteCode(code: string) {\n const client = await redis.getInviteClient()\n await client.delete(code)\n}\n\n/**\n Get all currently available user invitations for the current tenant.\n **/\nexport async function getInviteCodes(): Promise<InviteWithCode[]> {\n const client = await redis.getInviteClient()\n const invites: { key: string; value: Invite }[] = await client.scan()\n\n const results: InviteWithCode[] = invites.map(invite => {\n return {\n ...invite.value,\n code: invite.key,\n }\n })\n if (!env.MULTI_TENANCY) {\n return results\n }\n const tenantId = getTenantId()\n return results.filter(invite => tenantId === invite.info.tenantId)\n}\n\nexport async function getExistingInvites(\n emails: string[]\n): Promise<InviteWithCode[]> {\n return (await getInviteCodes()).filter(invite =>\n emails.includes(invite.email)\n )\n}\n", "export * from \"./errors\"\n", "// BASE\n\nexport abstract class BudibaseError extends Error {\n code: string\n\n constructor(message: string, code: ErrorCode) {\n super(message)\n this.code = code\n }\n\n protected getPublicError?(): any\n}\n\n// ERROR HANDLING\n\nexport enum ErrorCode {\n USAGE_LIMIT_EXCEEDED = \"usage_limit_exceeded\",\n FEATURE_DISABLED = \"feature_disabled\",\n INVALID_API_KEY = \"invalid_api_key\",\n HTTP = \"http\",\n}\n\n/**\n * For the given error, build the public representation that is safe\n * to be exposed over an api.\n */\nexport const getPublicError = (err: any) => {\n let error\n if (err.code) {\n // add generic error information\n error = {\n code: err.code,\n }\n\n if (err.getPublicError) {\n error = {\n ...error,\n // get any additional context from this error\n ...err.getPublicError(),\n }\n }\n }\n\n return error\n}\n\n// HTTP\n\nexport class HTTPError extends BudibaseError {\n status: number\n\n constructor(message: string, httpStatus: number, code = ErrorCode.HTTP) {\n super(message, code)\n this.status = httpStatus\n }\n}\n\nexport class NotFoundError extends HTTPError {\n constructor(message: string) {\n super(message, 404)\n }\n}\n\nexport class BadRequestError extends HTTPError {\n constructor(message: string) {\n super(message, 400)\n }\n}\n\n// LICENSING\n\nexport class UsageLimitError extends HTTPError {\n limitName: string\n\n constructor(message: string, limitName: string) {\n super(message, 400, ErrorCode.USAGE_LIMIT_EXCEEDED)\n this.limitName = limitName\n }\n\n getPublicError() {\n return {\n limitName: this.limitName,\n }\n }\n}\n\nexport class FeatureDisabledError extends HTTPError {\n featureName: string\n\n constructor(message: string, featureName: string) {\n super(message, 400, ErrorCode.FEATURE_DISABLED)\n this.featureName = featureName\n }\n\n getPublicError() {\n return {\n featureName: this.featureName,\n }\n }\n}\n\n// AUTH\n\nexport class InvalidAPIKeyError extends BudibaseError {\n constructor() {\n super(\n \"Invalid API key - may need re-generated, or user doesn't exist\",\n ErrorCode.INVALID_API_KEY\n )\n }\n}\n\n// USERS\n\nexport class EmailUnavailableError extends Error {\n constructor(email: string) {\n super(`Email already in use: '${email}'`)\n }\n}\n", "import env from \"../environment\"\nimport * as eventHelpers from \"./events\"\nimport * as accountSdk from \"../accounts\"\nimport * as cache from \"../cache\"\nimport { getGlobalDB, getIdentity, getTenantId } from \"../context\"\nimport * as dbUtils from \"../db\"\nimport { EmailUnavailableError, HTTPError } from \"../errors\"\nimport * as platform from \"../platform\"\nimport * as sessions from \"../security/sessions\"\nimport * as usersCore from \"./users\"\nimport {\n Account,\n BulkUserCreated,\n BulkUserDeleted,\n isSSOAccount,\n isSSOUser,\n SaveUserOpts,\n User,\n UserStatus,\n UserGroup,\n} from \"@budibase/types\"\nimport {\n getAccountHolderFromUserIds,\n isAdmin,\n isCreator,\n validateUniqueUser,\n} from \"./utils\"\nimport { searchExistingEmails } from \"./lookup\"\nimport { hash } from \"../utils\"\nimport { validatePassword } from \"../security\"\n\ntype QuotaUpdateFn = (\n change: number,\n creatorsChange: number,\n cb?: () => Promise<any>\n) => Promise<any>\ntype GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>\ntype FeatureFn = () => Promise<Boolean>\ntype GroupGetFn = (ids: string[]) => Promise<UserGroup[]>\ntype GroupBuildersFn = (user: User) => Promise<string[]>\ntype QuotaFns = { addUsers: QuotaUpdateFn; removeUsers: QuotaUpdateFn }\ntype GroupFns = {\n addUsers: GroupUpdateFn\n getBulk: GroupGetFn\n getGroupBuilderAppIds: GroupBuildersFn\n}\ntype CreateAdminUserOpts = {\n ssoId?: string\n hashPassword?: boolean\n requirePassword?: boolean\n skipPasswordValidation?: boolean\n}\ntype FeatureFns = { isSSOEnforced: FeatureFn; isAppBuildersEnabled: FeatureFn }\n\nconst bulkDeleteProcessing = async (dbUser: User) => {\n const userId = dbUser._id as string\n await platform.users.removeUser(dbUser)\n await eventHelpers.handleDeleteEvents(dbUser)\n await cache.user.invalidateUser(userId)\n await sessions.invalidateSessions(userId, { reason: \"bulk-deletion\" })\n}\n\nexport class UserDB {\n static quotas: QuotaFns\n static groups: GroupFns\n static features: FeatureFns\n\n static init(quotaFns: QuotaFns, groupFns: GroupFns, featureFns: FeatureFns) {\n UserDB.quotas = quotaFns\n UserDB.groups = groupFns\n UserDB.features = featureFns\n }\n\n static async isPreventPasswordActions(user: User, account?: Account) {\n // when in maintenance mode we allow sso users with the admin role\n // to perform any password action - this prevents lockout\n if (env.ENABLE_SSO_MAINTENANCE_MODE && isAdmin(user)) {\n return false\n }\n\n // SSO is enforced for all users\n if (await UserDB.features.isSSOEnforced()) {\n return true\n }\n\n // Check local sso\n if (isSSOUser(user)) {\n return true\n }\n\n // Check account sso\n if (!account) {\n account = await accountSdk.getAccountByTenantId(getTenantId())\n }\n return !!(account && account.email === user.email && isSSOAccount(account))\n }\n\n static async buildUser(\n user: User,\n opts: SaveUserOpts = {\n hashPassword: true,\n requirePassword: true,\n },\n tenantId: string,\n dbUser?: any,\n account?: Account\n ): Promise<User> {\n let { password, _id } = user\n\n // don't require a password if the db user doesn't already have one\n if (dbUser && !dbUser.password) {\n opts.requirePassword = false\n }\n\n let hashedPassword\n if (password) {\n if (await UserDB.isPreventPasswordActions(user, account)) {\n throw new HTTPError(\"Password change is disabled for this user\", 400)\n }\n\n if (!opts.skipPasswordValidation) {\n const passwordValidation = validatePassword(password)\n if (!passwordValidation.valid) {\n throw new HTTPError(passwordValidation.error, 400)\n }\n }\n\n hashedPassword = opts.hashPassword ? await hash(password) : password\n } else if (dbUser) {\n hashedPassword = dbUser.password\n }\n\n // passwords are never required if sso is enforced\n const requirePasswords =\n opts.requirePassword && !(await UserDB.features.isSSOEnforced())\n if (!hashedPassword && requirePasswords) {\n throw \"Password must be specified.\"\n }\n\n _id = _id || dbUtils.generateGlobalUserID()\n\n const fullUser = {\n createdAt: Date.now(),\n ...dbUser,\n ...user,\n _id,\n password: hashedPassword,\n tenantId,\n }\n // make sure the roles object is always present\n if (!fullUser.roles) {\n fullUser.roles = {}\n }\n // add the active status to a user if it's not provided\n if (fullUser.status == null) {\n fullUser.status = UserStatus.ACTIVE\n }\n\n return fullUser\n }\n\n static async allUsers() {\n const db = getGlobalDB()\n const response = await db.allDocs<User>(\n dbUtils.getGlobalUserParams(null, {\n include_docs: true,\n })\n )\n return response.rows.map(row => row.doc!)\n }\n\n static async countUsersByApp(appId: string) {\n let response: any = await usersCore.searchGlobalUsersByApp(appId, {})\n return {\n userCount: response.length,\n }\n }\n\n static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) {\n let response: User[] = await usersCore.searchGlobalUsersByAppAccess(\n opts.appId,\n { limit: opts.limit || 50 }\n )\n return response\n }\n\n static async getUserByEmail(email: string) {\n return usersCore.getGlobalUserByEmail(email)\n }\n\n /**\n * Gets a user by ID from the global database, based on the current tenancy.\n */\n static async getUser(userId: string) {\n const user = await usersCore.getById(userId)\n if (user) {\n delete user.password\n }\n return user\n }\n\n static async bulkGet(userIds: string[]) {\n return await usersCore.bulkGetGlobalUsersById(userIds)\n }\n\n static async bulkUpdate(users: User[]) {\n return await usersCore.bulkUpdateGlobalUsers(users)\n }\n\n static async save(user: User, opts: SaveUserOpts = {}): Promise<User> {\n // default booleans to true\n if (opts.hashPassword == null) {\n opts.hashPassword = true\n }\n if (opts.requirePassword == null) {\n opts.requirePassword = true\n }\n const tenantId = getTenantId()\n const db = getGlobalDB()\n\n let { email, _id, userGroups = [], roles } = user\n\n if (!email && !_id) {\n throw new Error(\"_id or email is required\")\n }\n\n let dbUser: User | undefined\n if (_id) {\n // try to get existing user from db\n try {\n dbUser = (await db.get(_id)) as User\n if (email && dbUser.email !== email) {\n throw \"Email address cannot be changed\"\n }\n email = dbUser.email\n } catch (e: any) {\n if (e.status === 404) {\n // do nothing, save this new user with the id specified - required for SSO auth\n } else {\n throw e\n }\n }\n }\n\n if (!dbUser && email) {\n // no id was specified - load from email instead\n dbUser = await usersCore.getGlobalUserByEmail(email)\n if (dbUser && dbUser._id !== _id) {\n throw new EmailUnavailableError(email)\n }\n }\n\n const change = dbUser ? 0 : 1 // no change if there is existing user\n const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0\n return UserDB.quotas.addUsers(change, creatorsChange, async () => {\n await validateUniqueUser(email, tenantId)\n\n let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)\n // don't allow a user to update its own roles/perms\n if (opts.currentUserId && opts.currentUserId === dbUser?._id) {\n builtUser = usersCore.cleanseUserObject(builtUser, dbUser) as User\n }\n\n if (!dbUser && roles?.length) {\n builtUser.roles = { ...roles }\n }\n\n // make sure we set the _id field for a new user\n // Also if this is a new user, associate groups with them\n let groupPromises = []\n if (!_id) {\n _id = builtUser._id!\n\n if (userGroups.length > 0) {\n for (let groupId of userGroups) {\n groupPromises.push(UserDB.groups.addUsers(groupId, [_id!]))\n }\n }\n }\n\n try {\n // save the user to db\n let response = await db.put(builtUser)\n builtUser._rev = response.rev\n\n await eventHelpers.handleSaveEvents(builtUser, dbUser)\n await platform.users.addUser(\n tenantId,\n builtUser._id!,\n builtUser.email,\n builtUser.ssoId\n )\n await cache.user.invalidateUser(response.id)\n\n await Promise.all(groupPromises)\n\n // finally returned the saved user from the db\n return db.get(builtUser._id!)\n } catch (err: any) {\n if (err.status === 409) {\n throw \"User exists already\"\n } else {\n throw err\n }\n }\n })\n }\n\n static async bulkCreate(\n newUsersRequested: User[],\n groups?: string[]\n ): Promise<BulkUserCreated> {\n const tenantId = getTenantId()\n\n let usersToSave: any[] = []\n let newUsers: any[] = []\n let newCreators: any[] = []\n\n const emails = newUsersRequested.map((user: User) => user.email)\n const existingEmails = await searchExistingEmails(emails)\n const unsuccessful: { email: string; reason: string }[] = []\n\n for (const newUser of newUsersRequested) {\n if (\n newUsers.find(\n (x: User) => x.email.toLowerCase() === newUser.email.toLowerCase()\n ) ||\n existingEmails.includes(newUser.email.toLowerCase())\n ) {\n unsuccessful.push({\n email: newUser.email,\n reason: `Unavailable`,\n })\n continue\n }\n newUser.userGroups = groups || []\n newUsers.push(newUser)\n if (isCreator(newUser)) {\n newCreators.push(newUser)\n }\n }\n\n const account = await accountSdk.getAccountByTenantId(tenantId)\n return UserDB.quotas.addUsers(\n newUsers.length,\n newCreators.length,\n async () => {\n // create the promises array that will be called by bulkDocs\n newUsers.forEach((user: any) => {\n usersToSave.push(\n UserDB.buildUser(\n user,\n {\n hashPassword: true,\n requirePassword: user.requirePassword,\n },\n tenantId,\n undefined, // no dbUser\n account\n )\n )\n })\n\n const usersToBulkSave = await Promise.all(usersToSave)\n await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)\n\n // Post-processing of bulk added users, e.g. events and cache operations\n for (const user of usersToBulkSave) {\n // TODO: Refactor to bulk insert users into the info db\n // instead of relying on looping tenant creation\n await platform.users.addUser(tenantId, user._id, user.email)\n await eventHelpers.handleSaveEvents(user, undefined)\n }\n\n const saved = usersToBulkSave.map(user => {\n return {\n _id: user._id,\n email: user.email,\n }\n })\n\n // now update the groups\n if (Array.isArray(saved) && groups) {\n const groupPromises = []\n const createdUserIds = saved.map(user => user._id)\n for (let groupId of groups) {\n groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))\n }\n await Promise.all(groupPromises)\n }\n\n return {\n successful: saved,\n unsuccessful,\n }\n }\n )\n }\n\n static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {\n const db = getGlobalDB()\n\n const response: BulkUserDeleted = {\n successful: [],\n unsuccessful: [],\n }\n\n // remove the account holder from the delete request if present\n const account = await getAccountHolderFromUserIds(userIds)\n if (account) {\n userIds = userIds.filter(u => u !== account.budibaseUserId)\n // mark user as unsuccessful\n response.unsuccessful.push({\n _id: account.budibaseUserId,\n email: account.email,\n reason: \"Account holder cannot be deleted\",\n })\n }\n\n // Get users and delete\n const allDocsResponse = await db.allDocs<User>({\n include_docs: true,\n keys: userIds,\n })\n const usersToDelete = allDocsResponse.rows.map(user => {\n return user.doc!\n })\n\n // Delete from DB\n const toDelete = usersToDelete.map(user => ({\n ...user,\n _deleted: true,\n }))\n const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)\n const creatorsToDelete = usersToDelete.filter(isCreator)\n\n for (let user of usersToDelete) {\n await bulkDeleteProcessing(user)\n }\n await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)\n\n // Build Response\n // index users by id\n const userIndex: { [key: string]: User } = {}\n usersToDelete.reduce((prev, current) => {\n prev[current._id!] = current\n return prev\n }, userIndex)\n\n // add the successful and unsuccessful users to response\n dbResponse.forEach(item => {\n const email = userIndex[item.id].email\n if (item.ok) {\n response.successful.push({ _id: item.id, email })\n } else {\n response.unsuccessful.push({\n _id: item.id,\n email,\n reason: \"Database error\",\n })\n }\n })\n\n return response\n }\n\n static async destroy(id: string) {\n const db = getGlobalDB()\n const dbUser = (await db.get(id)) as User\n const userId = dbUser._id as string\n\n if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {\n // root account holder can't be deleted from inside budibase\n const email = dbUser.email\n const account = await accountSdk.getAccount(email)\n if (account) {\n if (dbUser.userId === getIdentity()!._id) {\n throw new HTTPError('Please visit \"Account\" to delete this user', 400)\n } else {\n throw new HTTPError(\"Account holder cannot be deleted\", 400)\n }\n }\n }\n\n await platform.users.removeUser(dbUser)\n\n await db.remove(userId, dbUser._rev)\n\n const creatorsToDelete = isCreator(dbUser) ? 1 : 0\n await UserDB.quotas.removeUsers(1, creatorsToDelete)\n await eventHelpers.handleDeleteEvents(dbUser)\n await cache.user.invalidateUser(userId)\n await sessions.invalidateSessions(userId, { reason: \"deletion\" })\n }\n\n static async createAdminUser(\n email: string,\n password: string,\n tenantId: string,\n opts?: CreateAdminUserOpts\n ) {\n const user: User = {\n email: email,\n password: password,\n createdAt: Date.now(),\n roles: {},\n builder: {\n global: true,\n },\n admin: {\n global: true,\n },\n tenantId,\n }\n if (opts?.ssoId) {\n user.ssoId = opts.ssoId\n }\n // always bust checklist beforehand, if an error occurs but can proceed, don't get\n // stuck in a cycle\n await cache.bustCache(cache.CacheKey.CHECKLIST)\n return await UserDB.save(user, {\n hashPassword: opts?.hashPassword,\n requirePassword: opts?.requirePassword,\n skipPasswordValidation: opts?.skipPasswordValidation,\n })\n }\n\n static async getGroups(groupIds: string[]) {\n return await this.groups.getBulk(groupIds)\n }\n\n static async getGroupBuilderAppIds(user: User) {\n return await this.groups.getGroupBuilderAppIds(user)\n }\n}\n", "import env from \"../environment\"\nimport * as events from \"../events\"\nimport * as accounts from \"../accounts\"\nimport { getTenantId } from \"../context\"\nimport { User, UserRoles, CloudAccount } from \"@budibase/types\"\nimport { hasBuilderPermissions, hasAdminPermissions } from \"./utils\"\n\nexport const handleDeleteEvents = async (user: any) => {\n await events.user.deleted(user)\n\n if (hasBuilderPermissions(user)) {\n await events.user.permissionBuilderRemoved(user)\n }\n\n if (hasAdminPermissions(user)) {\n await events.user.permissionAdminRemoved(user)\n }\n}\n\nconst assignAppRoleEvents = async (\n user: User,\n roles: UserRoles,\n existingRoles: UserRoles\n) => {\n for (const [appId, role] of Object.entries(roles)) {\n // app role in existing is not same as new\n if (!existingRoles || existingRoles[appId] !== role) {\n await events.role.assigned(user, role)\n }\n }\n}\n\nconst unassignAppRoleEvents = async (\n user: User,\n roles: UserRoles,\n existingRoles: UserRoles\n) => {\n if (!existingRoles) {\n return\n }\n for (const [appId, role] of Object.entries(existingRoles)) {\n // app role in new is not same as existing\n if (!roles || roles[appId] !== role) {\n await events.role.unassigned(user, role)\n }\n }\n}\n\nconst handleAppRoleEvents = async (user: any, existingUser: any) => {\n const roles = user.roles\n const existingRoles = existingUser?.roles\n\n await assignAppRoleEvents(user, roles, existingRoles)\n await unassignAppRoleEvents(user, roles, existingRoles)\n}\n\nexport const handleSaveEvents = async (\n user: User,\n existingUser: User | undefined\n) => {\n const tenantId = getTenantId()\n let tenantAccount: CloudAccount | undefined\n if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {\n tenantAccount = await accounts.getAccountByTenantId(tenantId)\n }\n await events.identification.identifyUser(user, tenantAccount)\n\n if (existingUser) {\n await events.user.updated(user)\n\n if (isRemovingBuilder(user, existingUser)) {\n await events.user.permissionBuilderRemoved(user)\n }\n\n if (isRemovingAdmin(user, existingUser)) {\n await events.user.permissionAdminRemoved(user)\n }\n\n if (isOnboardingComplete(user, existingUser)) {\n await events.user.onboardingComplete(user)\n }\n\n if (\n !existingUser.forceResetPassword &&\n user.forceResetPassword &&\n user.password\n ) {\n await events.user.passwordForceReset(user)\n }\n\n if (user.password !== existingUser.password) {\n await events.user.passwordUpdated(user)\n }\n } else {\n await events.user.created(user)\n }\n\n if (isAddingBuilder(user, existingUser)) {\n await events.user.permissionBuilderAssigned(user)\n }\n\n if (isAddingAdmin(user, existingUser)) {\n await events.user.permissionAdminAssigned(user)\n }\n\n await handleAppRoleEvents(user, existingUser)\n}\n\nexport const isAddingBuilder = (user: any, existingUser: any) => {\n return isAddingPermission(user, existingUser, hasBuilderPermissions)\n}\n\nexport const isRemovingBuilder = (user: any, existingUser: any) => {\n return isRemovingPermission(user, existingUser, hasBuilderPermissions)\n}\n\nconst isAddingAdmin = (user: any, existingUser: any) => {\n return isAddingPermission(user, existingUser, hasAdminPermissions)\n}\n\nconst isRemovingAdmin = (user: any, existingUser: any) => {\n return isRemovingPermission(user, existingUser, hasAdminPermissions)\n}\n\nconst isOnboardingComplete = (user: any, existingUser: any) => {\n return !existingUser?.onboardedAt && typeof user.onboardedAt === \"string\"\n}\n\n/**\n * Check if a permission is being added to a new or existing user.\n */\nconst isAddingPermission = (\n user: any,\n existingUser: any,\n hasPermission: any\n) => {\n // new user doesn't have the permission\n if (!hasPermission(user)) {\n return false\n }\n\n // existing user has the permission\n if (existingUser && hasPermission(existingUser)) {\n return false\n }\n\n // permission is being added\n return true\n}\n\n/**\n * Check if a permission is being removed from an existing user.\n */\nconst isRemovingPermission = (\n user: any,\n existingUser: any,\n hasPermission: any\n) => {\n // new user has the permission\n if (hasPermission(user)) {\n return false\n }\n\n // no existing user or existing user doesn't have the permission\n if (!existingUser) {\n return false\n }\n\n // existing user doesn't have the permission\n if (!hasPermission(existingUser)) {\n return false\n }\n\n // permission is being removed\n return true\n}\n", "export * from \"./publishers\"\nexport * as processors from \"./processors\"\nexport * as analytics from \"./analytics\"\nexport { default as identification } from \"./identification\"\nexport * as backfillCache from \"./backfill\"\n\nimport { processors } from \"./processors\"\n\nexport function initAsyncEvents() {}\n\nexport const shutdown = () => {\n processors.shutdown()\n console.log(\"Events shutdown\")\n}\n", "import AnalyticsProcessor from \"./AnalyticsProcessor\"\nimport LoggingProcessor from \"./LoggingProcessor\"\nimport AuditLogsProcessor from \"./AuditLogsProcessor\"\nimport Processors from \"./Processors\"\nimport { AuditLogFn } from \"@budibase/types\"\n\nexport const analyticsProcessor = new AnalyticsProcessor()\nconst loggingProcessor = new LoggingProcessor()\nconst auditLogsProcessor = new AuditLogsProcessor()\n\nexport function init(auditingFn: AuditLogFn) {\n return AuditLogsProcessor.init(auditingFn)\n}\n\nexport const processors = new Processors([\n analyticsProcessor,\n loggingProcessor,\n auditLogsProcessor,\n])\n", "import { Event, Identity, Group, IdentityType } from \"@budibase/types\"\nimport { EventProcessor } from \"./types\"\nimport env from \"../../environment\"\nimport * as analytics from \"../analytics\"\nimport PosthogProcessor from \"./posthog\"\n\n/**\n * Events that are always captured.\n */\nconst EVENT_WHITELIST = [\n Event.INSTALLATION_VERSION_UPGRADED,\n Event.INSTALLATION_VERSION_DOWNGRADED,\n]\nconst IDENTITY_WHITELIST = [IdentityType.INSTALLATION, IdentityType.TENANT]\n\nexport default class AnalyticsProcessor implements EventProcessor {\n posthog: PosthogProcessor | undefined\n\n constructor() {\n if (env.POSTHOG_TOKEN && !env.isTest()) {\n this.posthog = new PosthogProcessor(env.POSTHOG_TOKEN)\n }\n }\n\n async processEvent(\n event: Event,\n identity: Identity,\n properties: any,\n timestamp?: string | number\n ): Promise<void> {\n if (!EVENT_WHITELIST.includes(event) && !(await analytics.enabled())) {\n return\n }\n if (this.posthog) {\n await this.posthog.processEvent(event, identity, properties, timestamp)\n }\n }\n\n async identify(identity: Identity, timestamp?: string | number) {\n // Group indentifications (tenant and installation) always on\n if (\n !IDENTITY_WHITELIST.includes(identity.type) &&\n !(await analytics.enabled())\n ) {\n return\n }\n if (this.posthog) {\n await this.posthog.identify(identity, timestamp)\n }\n }\n\n async identifyGroup(group: Group, timestamp?: string | number) {\n // Group indentifications (tenant and installation) always on\n if (this.posthog) {\n await this.posthog.identifyGroup(group, timestamp)\n }\n }\n\n shutdown() {\n if (this.posthog) {\n this.posthog.shutdown()\n }\n }\n}\n", "import * as configs from \"../configs\"\n\n// wrapper utility function\nexport const enabled = async () => {\n return configs.analyticsEnabled()\n}\n", "import PostHog from \"posthog-node\"\nimport { Event, Identity, Group, BaseEvent } from \"@budibase/types\"\nimport { EventProcessor } from \"../types\"\nimport env from \"../../../environment\"\nimport * as context from \"../../../context\"\nimport * as rateLimiting from \"./rateLimiting\"\n\nconst EXCLUDED_EVENTS: Event[] = [\n Event.USER_UPDATED,\n Event.EMAIL_SMTP_UPDATED,\n Event.AUTH_SSO_UPDATED,\n Event.APP_UPDATED,\n Event.ROLE_UPDATED,\n Event.DATASOURCE_UPDATED,\n Event.QUERY_UPDATED,\n Event.TABLE_UPDATED,\n Event.VIEW_UPDATED,\n Event.VIEW_FILTER_UPDATED,\n Event.VIEW_CALCULATION_UPDATED,\n Event.AUTOMATION_TRIGGER_UPDATED,\n Event.USER_GROUP_UPDATED,\n]\n\nexport default class PosthogProcessor implements EventProcessor {\n posthog: PostHog\n\n constructor(token: string | undefined) {\n if (!token) {\n throw new Error(\"Posthog token is not defined\")\n }\n this.posthog = new PostHog(token)\n }\n\n async processEvent(\n event: Event,\n identity: Identity,\n properties: BaseEvent,\n timestamp?: string | number\n ): Promise<void> {\n // don't send excluded events\n if (EXCLUDED_EVENTS.includes(event)) {\n return\n }\n\n if (await rateLimiting.limited(event)) {\n return\n }\n\n properties = this.clearPIIProperties(properties)\n\n properties.version = env.VERSION\n properties.service = env.SERVICE\n properties.environment = identity.environment\n properties.hosting = identity.hosting\n\n const appId = context.getAppId()\n if (appId) {\n properties.appId = appId\n }\n\n const payload: any = { distinctId: identity.id, event, properties }\n\n if (timestamp) {\n payload.timestamp = new Date(timestamp)\n }\n\n // add groups to the event\n if (identity.installationId || identity.tenantId) {\n payload.groups = {}\n if (identity.installationId) {\n payload.groups.installation = identity.installationId\n payload.properties.installationId = identity.installationId\n }\n if (identity.tenantId) {\n payload.groups.tenant = identity.tenantId\n payload.properties.tenantId = identity.tenantId\n }\n }\n\n this.posthog.capture(payload)\n }\n\n clearPIIProperties(properties: any) {\n if (properties.email) {\n delete properties.email\n }\n if (properties.audited) {\n delete properties.audited\n }\n return properties\n }\n\n async identify(identity: Identity, timestamp?: string | number) {\n const payload: any = { distinctId: identity.id, properties: identity }\n if (timestamp) {\n payload.timestamp = new Date(timestamp)\n }\n this.posthog.identify(payload)\n }\n\n async identifyGroup(group: Group, timestamp?: string | number) {\n const payload: any = {\n distinctId: group.id,\n groupType: group.type,\n groupKey: group.id,\n properties: group,\n }\n\n if (timestamp) {\n payload.timestamp = new Date(timestamp)\n }\n this.posthog.groupIdentify(payload)\n }\n\n shutdown() {\n this.posthog.shutdown()\n }\n}\n", "import { Event } from \"@budibase/types\"\nimport { CacheKey, TTL } from \"../../../cache/generic\"\nimport * as cache from \"../../../cache/generic\"\nimport * as context from \"../../../context\"\n\ntype RateLimitedEvent =\n | Event.SERVED_BUILDER\n | Event.SERVED_APP_PREVIEW\n | Event.SERVED_APP\n\nconst isRateLimited = (event: Event): event is RateLimitedEvent => {\n return (\n event === Event.SERVED_BUILDER ||\n event === Event.SERVED_APP_PREVIEW ||\n event === Event.SERVED_APP\n )\n}\n\nconst isPerApp = (event: RateLimitedEvent) => {\n return event === Event.SERVED_APP_PREVIEW || event === Event.SERVED_APP\n}\n\ninterface EventProperties {\n timestamp: number\n}\n\nenum RateLimit {\n CALENDAR_DAY = \"calendarDay\",\n}\n\nconst RATE_LIMITS = {\n [Event.SERVED_APP]: RateLimit.CALENDAR_DAY,\n [Event.SERVED_APP_PREVIEW]: RateLimit.CALENDAR_DAY,\n [Event.SERVED_BUILDER]: RateLimit.CALENDAR_DAY,\n}\n\n/**\n * Check if this event should be sent right now\n * Return false to signal the event SHOULD be sent\n * Return true to signal the event should NOT be sent\n */\nexport const limited = async (event: Event): Promise<boolean> => {\n // not a rate limited event -- send\n if (!isRateLimited(event)) {\n return false\n }\n\n const cachedEvent = await readEvent(event)\n if (cachedEvent) {\n const timestamp = new Date(cachedEvent.timestamp)\n const limit = RATE_LIMITS[event]\n switch (limit) {\n case RateLimit.CALENDAR_DAY: {\n // get midnight at the start of the next day for the timestamp\n timestamp.setDate(timestamp.getDate() + 1)\n timestamp.setHours(0, 0, 0, 0)\n\n // if we have passed the threshold into the next day\n if (Date.now() > timestamp.getTime()) {\n // update the timestamp in the event -- send\n await recordEvent(event, { timestamp: Date.now() })\n return false\n } else {\n // still within the limited period -- don't send\n return true\n }\n }\n }\n } else {\n // no event present i.e. expired -- send\n await recordEvent(event, { timestamp: Date.now() })\n return false\n }\n}\n\nconst eventKey = (event: RateLimitedEvent) => {\n let key = `${CacheKey.EVENTS_RATE_LIMIT}:${event}`\n if (isPerApp(event)) {\n key = key + \":\" + context.getAppId()\n }\n return key\n}\n\nconst readEvent = async (\n event: RateLimitedEvent\n): Promise<EventProperties | undefined> => {\n const key = eventKey(event)\n const result = await cache.get(key)\n return result as EventProperties\n}\n\nconst recordEvent = async (\n event: RateLimitedEvent,\n properties: EventProperties\n) => {\n const key = eventKey(event)\n const limit = RATE_LIMITS[event]\n let ttl\n switch (limit) {\n case RateLimit.CALENDAR_DAY: {\n ttl = TTL.ONE_DAY\n }\n }\n\n await cache.store(key, properties, ttl)\n}\n", "import PosthogProcessor from \"./PosthogProcessor\"\n\nexport default PosthogProcessor\n", "import { Event, Identity, Group } from \"@budibase/types\"\nimport { EventProcessor } from \"./types\"\nimport env from \"../../environment\"\n\nconst skipLogging = env.SELF_HOSTED && !env.isDev()\n\nexport default class LoggingProcessor implements EventProcessor {\n async processEvent(\n event: Event,\n identity: Identity,\n properties: any,\n timestamp?: string\n ): Promise<void> {\n if (skipLogging) {\n return\n }\n console.log(`[audit] [identityType=${identity.type}] ${event}`, properties)\n }\n\n async identify(identity: Identity, timestamp?: string | number) {\n if (skipLogging) {\n return\n }\n console.log(`[audit] identified`, identity)\n }\n\n async identifyGroup(group: Group, timestamp?: string | number) {\n if (skipLogging) {\n return\n }\n console.log(`[audit] group identified`, group)\n }\n\n shutdown(): void {\n // no-op\n }\n}\n", "import {\n Event,\n Identity,\n Group,\n IdentityType,\n AuditLogQueueEvent,\n AuditLogFn,\n HostInfo,\n} from \"@budibase/types\"\nimport { EventProcessor } from \"./types\"\nimport { getAppId, doInTenant, getTenantId } from \"../../context\"\nimport BullQueue from \"bull\"\nimport { createQueue, JobQueue } from \"../../queue\"\nimport { isAudited } from \"../../utils\"\nimport env from \"../../environment\"\n\nexport default class AuditLogsProcessor implements EventProcessor {\n static auditLogsEnabled = false\n static auditLogQueue: BullQueue.Queue<AuditLogQueueEvent>\n\n // can't use constructor as need to return promise\n static init(fn: AuditLogFn) {\n AuditLogsProcessor.auditLogsEnabled = true\n const writeAuditLogs = fn\n AuditLogsProcessor.auditLogQueue = createQueue<AuditLogQueueEvent>(\n JobQueue.AUDIT_LOG\n )\n return AuditLogsProcessor.auditLogQueue.process(async job => {\n return doInTenant(job.data.tenantId, async () => {\n let properties = job.data.properties\n if (properties.audited) {\n properties = {\n ...properties,\n ...properties.audited,\n }\n delete properties.audited\n }\n\n // this feature is disabled by default due to privacy requirements\n // in some countries - available as env var in-case it is desired\n // in self host deployments\n let hostInfo: HostInfo | undefined = {}\n if (env.ENABLE_AUDIT_LOG_IP_ADDR) {\n hostInfo = job.data.opts.hostInfo\n }\n\n await writeAuditLogs(job.data.event, properties, {\n userId: job.data.opts.userId,\n timestamp: job.data.opts.timestamp,\n appId: job.data.opts.appId,\n hostInfo,\n })\n })\n })\n }\n\n async processEvent(\n event: Event,\n identity: Identity,\n properties: any,\n timestamp?: string\n ): Promise<void> {\n if (AuditLogsProcessor.auditLogsEnabled && isAudited(event)) {\n // only audit log actual events, don't include backfills\n const userId =\n identity.type === IdentityType.USER ? identity.id : undefined\n // add to the event queue, rather than just writing immediately\n await AuditLogsProcessor.auditLogQueue.add({\n event,\n properties,\n opts: {\n userId,\n timestamp,\n appId: getAppId(),\n hostInfo: identity.hostInfo,\n },\n tenantId: getTenantId(),\n })\n }\n }\n\n async identify(identity: Identity, timestamp?: string | number) {\n // no-op\n }\n\n async identifyGroup(group: Group, timestamp?: string | number) {\n // no-op\n }\n\n shutdown(): void {\n AuditLogsProcessor.auditLogQueue?.close()\n }\n}\n", "export * from \"./queue\"\nexport * from \"./constants\"\n", "import env from \"../environment\"\nimport { getRedisOptions } from \"../redis/utils\"\nimport { JobQueue } from \"./constants\"\nimport InMemoryQueue from \"./inMemoryQueue\"\nimport BullQueue, { QueueOptions } from \"bull\"\nimport { addListeners, StalledFn } from \"./listeners\"\nimport { Duration } from \"../utils\"\nimport * as timers from \"../timers\"\n\n// the queue lock is held for 5 minutes\nconst QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()\n// queue lock is refreshed every 30 seconds\nconst QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs()\n// cleanup the queue every 60 seconds\nconst CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs()\nlet QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []\nlet cleanupInterval: NodeJS.Timeout\n\nasync function cleanup() {\n for (let queue of QUEUES) {\n await queue.clean(CLEANUP_PERIOD_MS, \"completed\")\n }\n}\n\nexport function createQueue<T>(\n jobQueue: JobQueue,\n opts: { removeStalledCb?: StalledFn } = {}\n): BullQueue.Queue<T> {\n const redisOpts = getRedisOptions()\n const queueConfig: QueueOptions = {\n redis: redisOpts,\n settings: {\n maxStalledCount: 0,\n lockDuration: QUEUE_LOCK_MS,\n lockRenewTime: QUEUE_LOCK_RENEW_INTERNAL_MS,\n },\n }\n let queue: any\n if (!env.isTest()) {\n queue = new BullQueue(jobQueue, queueConfig)\n } else {\n queue = new InMemoryQueue(jobQueue, queueConfig)\n }\n addListeners(queue, jobQueue, opts?.removeStalledCb)\n QUEUES.push(queue)\n if (!cleanupInterval && !env.isTest()) {\n cleanupInterval = timers.set(cleanup, CLEANUP_PERIOD_MS)\n // fire off an initial cleanup\n cleanup().catch(err => {\n console.error(`Unable to cleanup ${jobQueue} initially - ${err}`)\n })\n }\n return queue\n}\n\nexport async function shutdown() {\n if (cleanupInterval) {\n timers.clear(cleanupInterval)\n }\n if (QUEUES.length) {\n for (let queue of QUEUES) {\n await queue.close()\n }\n QUEUES = []\n }\n console.log(\"Queues shutdown\")\n}\n", "import events from \"events\"\nimport { timeout } from \"../utils\"\n\n/**\n * Bull works with a Job wrapper around all messages that contains a lot more information about\n * the state of the message, this object constructor implements the same schema of Bull jobs\n * for the sake of maintaining API consistency.\n * @param queue The name of the queue which the message will be carried on.\n * @param message The JSON message which will be passed back to the consumer.\n * @returns A new job which can now be put onto the queue, this is mostly an\n * internal structure so that an in memory queue can be easily swapped for a Bull queue.\n */\nfunction newJob(queue: string, message: any) {\n return {\n timestamp: Date.now(),\n queue: queue,\n data: message,\n opts: {},\n }\n}\n\n/**\n * This is designed to replicate Bull (https://github.com/OptimalBits/bull) in memory as a sort of mock.\n * It is relatively simple, using an event emitter internally to register when messages are available\n * to the consumers - in can support many inputs and many consumers.\n */\nclass InMemoryQueue {\n _name: string\n _opts?: any\n _messages: any[]\n _emitter: EventEmitter\n _runCount: number\n _addCount: number\n /**\n * The constructor the queue, exactly the same as that of Bulls.\n * @param name The name of the queue which is being configured.\n * @param opts This is not used by the in memory queue as there is no real use\n * case when in memory, but is the same API as Bull\n */\n constructor(name: string, opts?: any) {\n this._name = name\n this._opts = opts\n this._messages = []\n this._emitter = new events.EventEmitter()\n this._runCount = 0\n this._addCount = 0\n }\n\n /**\n * Same callback API as Bull, each callback passed to this will consume messages as they are\n * available. Please note this is a queue service, not a notification service, so each\n * consumer will receive different messages.\n * @param func The callback function which will return a \"Job\", the same\n * as the Bull API, within this job the property \"data\" contains the JSON message. Please\n * note this is incredibly limited compared to Bull as in reality the Job would contain\n * a lot more information about the queue and current status of Bull cluster.\n */\n process(func: any) {\n this._emitter.on(\"message\", async () => {\n if (this._messages.length <= 0) {\n return\n }\n let msg = this._messages.shift()\n let resp = func(msg)\n if (resp.then != null) {\n await resp\n }\n this._runCount++\n })\n }\n\n async isReady() {\n return true\n }\n\n // simply puts a message to the queue and emits to the queue for processing\n /**\n * Simple function to replicate the add message functionality of Bull, putting\n * a new message on the queue. This then emits an event which will be used to\n * return the message to a consumer (if one is attached).\n * @param msg A message to be transported over the queue, this should be\n * a JSON message as this is required by Bull.\n * @param repeat serves no purpose for the import queue.\n */\n // eslint-disable-next-line no-unused-vars\n add(msg: any, repeat: boolean) {\n if (typeof msg !== \"object\") {\n throw \"Queue only supports carrying JSON.\"\n }\n this._messages.push(newJob(this._name, msg))\n this._addCount++\n this._emitter.emit(\"message\")\n }\n\n /**\n * replicating the close function from bull, which waits for jobs to finish.\n */\n async close() {\n return []\n }\n\n /**\n * This removes a cron which has been implemented, this is part of Bull API.\n * @param cronJobId The cron which is to be removed.\n */\n removeRepeatableByKey(cronJobId: string) {\n // TODO: implement for testing\n console.log(cronJobId)\n }\n\n /**\n * Implemented for tests\n */\n getRepeatableJobs() {\n return []\n }\n\n // eslint-disable-next-line no-unused-vars\n removeJobs(pattern: string) {\n // no-op\n }\n\n /**\n * Implemented for tests\n */\n async clean() {\n return []\n }\n\n async getJob() {\n return {}\n }\n\n on() {\n // do nothing\n return this\n }\n\n async waitForCompletion() {\n do {\n await timeout(50)\n } while (this._addCount < this._runCount)\n }\n}\n\nexport default InMemoryQueue\n", "export enum JobQueue {\n AUTOMATION = \"automationQueue\",\n APP_BACKUP = \"appBackupQueue\",\n AUDIT_LOG = \"auditLogQueue\",\n SYSTEM_EVENT_QUEUE = \"systemEventQueue\",\n APP_MIGRATION = \"appMigration\",\n}\n", "import { Job, JobId, Queue } from \"bull\"\nimport { JobQueue } from \"./constants\"\nimport * as context from \"../context\"\n\nexport type StalledFn = (job: Job) => Promise<void>\n\nexport function addListeners(\n queue: Queue,\n jobQueue: JobQueue,\n removeStalledCb?: StalledFn\n) {\n logging(queue, jobQueue)\n if (removeStalledCb) {\n handleStalled(queue, removeStalledCb)\n }\n}\n\nfunction handleStalled(queue: Queue, removeStalledCb?: StalledFn) {\n queue.on(\"stalled\", async (job: Job) => {\n if (removeStalledCb) {\n await removeStalledCb(job)\n } else if (job.opts.repeat) {\n const jobId = job.id\n const repeatJobs = await queue.getRepeatableJobs()\n for (let repeatJob of repeatJobs) {\n if (repeatJob.id === jobId) {\n await queue.removeRepeatableByKey(repeatJob.key)\n }\n }\n console.log(`jobId=${jobId} disabled`)\n }\n })\n}\n\nfunction getLogParams(\n eventType: QueueEventType,\n event: BullEvent,\n opts: {\n job?: Job\n jobId?: JobId\n error?: Error\n } = {},\n extra: any = {}\n) {\n const message = `[BULL] ${eventType}=${event}`\n const err = opts.error\n\n const bullLog = {\n _logKey: \"bull\",\n eventType,\n event,\n job: opts.job,\n jobId: opts.jobId || opts.job?.id,\n ...extra,\n }\n\n let automationLog\n if (opts.job?.data?.automation) {\n automationLog = {\n _logKey: \"automation\",\n trigger: opts.job\n ? opts.job.data.automation.definition.trigger.event\n : undefined,\n }\n }\n\n return [message, err, bullLog, automationLog]\n}\n\nenum BullEvent {\n ERROR = \"error\",\n WAITING = \"waiting\",\n ACTIVE = \"active\",\n STALLED = \"stalled\",\n PROGRESS = \"progress\",\n COMPLETED = \"completed\",\n FAILED = \"failed\",\n PAUSED = \"paused\",\n RESUMED = \"resumed\",\n CLEANED = \"cleaned\",\n DRAINED = \"drained\",\n REMOVED = \"removed\",\n}\n\nenum QueueEventType {\n AUTOMATION_EVENT = \"automation-event\",\n APP_BACKUP_EVENT = \"app-backup-event\",\n AUDIT_LOG_EVENT = \"audit-log-event\",\n SYSTEM_EVENT = \"system-event\",\n APP_MIGRATION = \"app-migration\",\n}\n\nconst EventTypeMap: { [key in JobQueue]: QueueEventType } = {\n [JobQueue.AUTOMATION]: QueueEventType.AUTOMATION_EVENT,\n [JobQueue.APP_BACKUP]: QueueEventType.APP_BACKUP_EVENT,\n [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT,\n [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT,\n [JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION,\n}\n\nfunction logging(queue: Queue, jobQueue: JobQueue) {\n const eventType = EventTypeMap[jobQueue]\n\n function doInJobContext(job: Job, task: any) {\n // if this is an automation job try to get the app id\n const appId = job.data.event?.appId\n if (appId) {\n return context.doInContext(appId, task)\n } else {\n task()\n }\n }\n\n queue\n .on(BullEvent.STALLED, async (job: Job) => {\n // A job has been marked as stalled. This is useful for debugging job\n // workers that crash or pause the event loop.\n await doInJobContext(job, () => {\n console.error(...getLogParams(eventType, BullEvent.STALLED, { job }))\n })\n })\n .on(BullEvent.ERROR, (error: any) => {\n // An error occurred.\n console.error(...getLogParams(eventType, BullEvent.ERROR, { error }))\n })\n\n if (process.env.NODE_DEBUG?.includes(\"bull\")) {\n queue\n .on(BullEvent.WAITING, (jobId: JobId) => {\n // A Job is waiting to be processed as soon as a worker is idling.\n console.info(...getLogParams(eventType, BullEvent.WAITING, { jobId }))\n })\n .on(BullEvent.ACTIVE, async (job: Job, jobPromise: any) => {\n // A job has started. You can use `jobPromise.cancel()`` to abort it.\n await doInJobContext(job, () => {\n console.info(...getLogParams(eventType, BullEvent.ACTIVE, { job }))\n })\n })\n .on(BullEvent.PROGRESS, async (job: Job, progress: any) => {\n // A job's progress was updated\n await doInJobContext(job, () => {\n console.info(\n ...getLogParams(\n eventType,\n BullEvent.PROGRESS,\n { job },\n { progress }\n )\n )\n })\n })\n .on(BullEvent.COMPLETED, async (job: Job, result) => {\n // A job successfully completed with a `result`.\n await doInJobContext(job, () => {\n console.info(\n ...getLogParams(eventType, BullEvent.COMPLETED, { job }, { result })\n )\n })\n })\n .on(BullEvent.FAILED, async (job: Job, error: any) => {\n // A job failed with reason `err`!\n await doInJobContext(job, () => {\n console.error(\n ...getLogParams(eventType, BullEvent.FAILED, { job, error })\n )\n })\n })\n .on(BullEvent.PAUSED, () => {\n // The queue has been paused.\n console.info(...getLogParams(eventType, BullEvent.PAUSED))\n })\n .on(BullEvent.RESUMED, () => {\n // The queue has been resumed.\n console.info(...getLogParams(eventType, BullEvent.RESUMED))\n })\n .on(BullEvent.CLEANED, (jobs: Job[], type: string) => {\n // Old jobs have been cleaned from the queue. `jobs` is an array of cleaned\n // jobs, and `type` is the type of jobs cleaned.\n console.info(\n ...getLogParams(\n eventType,\n BullEvent.CLEANED,\n {},\n { length: jobs.length, type }\n )\n )\n })\n .on(BullEvent.DRAINED, () => {\n // Emitted every time the queue has processed all the waiting jobs (even if there can be some delayed jobs not yet processed)\n console.info(...getLogParams(eventType, BullEvent.DRAINED))\n })\n .on(BullEvent.REMOVED, (job: Job) => {\n // A job successfully removed.\n console.info(...getLogParams(eventType, BullEvent.REMOVED, { job }))\n })\n }\n}\n", "import { Event, Identity, Group } from \"@budibase/types\"\nimport { EventProcessor } from \"./types\"\n\nexport default class Processor implements EventProcessor {\n initialised: boolean = false\n processors: EventProcessor[] = []\n\n constructor(processors: EventProcessor[]) {\n this.processors = processors\n }\n\n async processEvent(\n event: Event,\n identity: Identity,\n properties: any,\n timestamp?: string | number\n ): Promise<void> {\n for (const eventProcessor of this.processors) {\n await eventProcessor.processEvent(event, identity, properties, timestamp)\n }\n }\n\n async identify(\n identity: Identity,\n timestamp?: string | number\n ): Promise<void> {\n for (const eventProcessor of this.processors) {\n if (eventProcessor.identify) {\n await eventProcessor.identify(identity, timestamp)\n }\n }\n }\n\n async identifyGroup(\n identity: Group,\n timestamp?: string | number\n ): Promise<void> {\n for (const eventProcessor of this.processors) {\n if (eventProcessor.identifyGroup) {\n await eventProcessor.identifyGroup(identity, timestamp)\n }\n }\n }\n\n shutdown() {\n for (const eventProcessor of this.processors) {\n if (eventProcessor.shutdown) {\n eventProcessor.shutdown()\n }\n }\n }\n}\n", "import * as context from \"../context\"\nimport * as identityCtx from \"../context/identity\"\nimport env from \"../environment\"\nimport {\n Hosting,\n User,\n Identity,\n IdentityType,\n Account,\n isCloudAccount,\n isSSOAccount,\n TenantGroup,\n CloudAccount,\n UserIdentity,\n InstallationGroup,\n UserContext,\n Group,\n isSSOUser,\n} from \"@budibase/types\"\nimport { processors } from \"./processors\"\nimport { newid } from \"../utils\"\nimport * as installation from \"../installation\"\nimport * as configs from \"../configs\"\nimport * as users from \"../users\"\nimport { withCache, TTL, CacheKey } from \"../cache/generic\"\n\n/**\n * An identity can be:\n * - account user (Self host)\n * - budibase user\n * - tenant\n * - installation\n */\nconst getCurrentIdentity = async (): Promise<Identity> => {\n let identityContext = identityCtx.getIdentity()\n const environment = getDeploymentEnvironment()\n\n let identityType\n\n if (!identityContext) {\n identityType = IdentityType.TENANT\n } else {\n identityType = identityContext.type\n }\n\n if (identityType === IdentityType.INSTALLATION) {\n const installationId = await getInstallationId()\n const hosting = getHostingFromEnv()\n return {\n id: formatDistinctId(installationId, identityType),\n hosting,\n type: identityType,\n installationId,\n environment,\n }\n } else if (identityType === IdentityType.TENANT) {\n const installationId = await getInstallationId()\n const tenantId = await getEventTenantId(context.getTenantId())\n const hosting = getHostingFromEnv()\n\n return {\n id: formatDistinctId(tenantId, identityType),\n type: identityType,\n hosting,\n installationId,\n tenantId,\n realTenantId: context.getTenantId(),\n environment,\n }\n } else if (identityType === IdentityType.USER) {\n const userContext = identityContext as UserContext\n const tenantId = await getEventTenantId(context.getTenantId())\n const installationId = await getInstallationId()\n\n const account = userContext.account\n let hosting\n if (account) {\n hosting = account.hosting\n } else {\n hosting = getHostingFromEnv()\n }\n\n return {\n id: userContext._id,\n type: identityType,\n hosting,\n installationId,\n tenantId,\n environment,\n realTenantId: context.getTenantId(),\n hostInfo: userContext.hostInfo,\n }\n } else {\n throw new Error(\"Unknown identity type\")\n }\n}\n\nconst identifyInstallationGroup = async (\n installId: string,\n timestamp?: string | number\n): Promise<void> => {\n const id = installId\n const type = IdentityType.INSTALLATION\n const hosting = getHostingFromEnv()\n const version = env.VERSION\n const environment = getDeploymentEnvironment()\n\n const group: InstallationGroup = {\n id,\n type,\n hosting,\n version,\n environment,\n }\n\n await identifyGroup(group, timestamp)\n // need to create a normal identity for the group to be able to query it globally\n // match the posthog syntax to link this identity to the empty auto generated one\n await identify({ ...group, id: `$${type}_${id}` }, timestamp)\n}\n\nconst identifyTenantGroup = async (\n tenantId: string,\n account: Account | undefined,\n timestamp?: string | number\n): Promise<void> => {\n const id = await getEventTenantId(tenantId)\n const type = IdentityType.TENANT\n const installationId = await getInstallationId()\n const environment = getDeploymentEnvironment()\n\n let hosting: Hosting\n let profession: string | undefined\n let companySize: string | undefined\n\n if (account) {\n profession = account.profession\n companySize = account.size\n hosting = account.hosting\n } else {\n hosting = getHostingFromEnv()\n }\n\n const group: TenantGroup = {\n id,\n type,\n hosting,\n environment,\n installationId,\n profession,\n companySize,\n }\n\n await identifyGroup(group, timestamp)\n // need to create a normal identity for the group to be able to query it globally\n // match the posthog syntax to link this identity to the auto generated one\n await identify({ ...group, id: `$${type}_${id}` }, timestamp)\n}\n\nconst identifyUser = async (\n user: User,\n account: CloudAccount | undefined,\n timestamp?: string | number\n) => {\n const id = user._id as string\n const tenantId = await getEventTenantId(user.tenantId)\n const type = IdentityType.USER\n let builder = users.hasBuilderPermissions(user)\n let admin = users.hasAdminPermissions(user)\n let providerType\n if (isSSOUser(user)) {\n providerType = user.providerType\n }\n const accountHolder = account?.budibaseUserId === user._id || false\n const verified =\n account && account?.budibaseUserId === user._id ? account.verified : false\n const installationId = await getInstallationId()\n const hosting = account ? account.hosting : getHostingFromEnv()\n const environment = getDeploymentEnvironment()\n\n const identity: UserIdentity = {\n id,\n type,\n hosting,\n installationId,\n tenantId,\n verified,\n accountHolder,\n providerType,\n builder,\n admin,\n environment,\n }\n\n await identify(identity, timestamp)\n}\n\nconst identifyAccount = async (account: Account) => {\n let id = account.accountId\n const tenantId = account.tenantId\n let type = IdentityType.USER\n let providerType = isSSOAccount(account) ? account.providerType : undefined\n const verified = account.verified\n const accountHolder = true\n const hosting = account.hosting\n const installationId = await getInstallationId()\n const environment = getDeploymentEnvironment()\n\n if (isCloudAccount(account)) {\n if (account.budibaseUserId) {\n // use the budibase user as the id if set\n id = account.budibaseUserId\n }\n }\n\n const identity: UserIdentity = {\n id,\n type,\n hosting,\n installationId,\n tenantId,\n providerType,\n verified,\n accountHolder,\n environment,\n }\n\n await identify(identity)\n}\n\nconst identify = async (identity: Identity, timestamp?: string | number) => {\n await processors.identify(identity, timestamp)\n}\n\nconst identifyGroup = async (group: Group, timestamp?: string | number) => {\n await processors.identifyGroup(group, timestamp)\n}\n\nconst getDeploymentEnvironment = () => {\n if (env.isDev()) {\n return \"development\"\n } else {\n return env.DEPLOYMENT_ENVIRONMENT\n }\n}\n\nconst getHostingFromEnv = () => {\n return env.SELF_HOSTED ? Hosting.SELF : Hosting.CLOUD\n}\n\nconst getInstallationId = async () => {\n if (isAccountPortal()) {\n return \"account-portal\"\n }\n const install = await installation.getInstall()\n return install.installId\n}\n\nconst getEventTenantId = async (tenantId: string): Promise<string> => {\n if (env.SELF_HOSTED) {\n return getUniqueTenantId(tenantId)\n } else {\n // tenant id's in the cloud are already unique\n return tenantId\n }\n}\n\nexport const getUniqueTenantId = async (tenantId: string): Promise<string> => {\n // make sure this tenantId always matches the tenantId in context\n return context.doInTenant(tenantId, () => {\n return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {\n const db = context.getGlobalDB()\n const config = await configs.getSettingsConfigDoc()\n\n let uniqueTenantId: string\n if (config.config.uniqueTenantId) {\n return config.config.uniqueTenantId\n } else {\n uniqueTenantId = `${newid()}_${tenantId}`\n config.config.uniqueTenantId = uniqueTenantId\n await db.put(config)\n return uniqueTenantId\n }\n })\n })\n}\n\nconst isAccountPortal = () => {\n return env.SERVICE === \"account-portal\"\n}\n\nconst formatDistinctId = (id: string, type: IdentityType) => {\n if (type === IdentityType.INSTALLATION || type === IdentityType.TENANT) {\n return `$${type}_${id}`\n } else {\n return id\n }\n}\n\nexport default {\n getCurrentIdentity,\n identifyInstallationGroup,\n identifyTenantGroup,\n identifyUser,\n identifyAccount,\n identify,\n identifyGroup,\n getInstallationId,\n getUniqueTenantId,\n}\n", "import { newid } from \"./utils\"\nimport * as events from \"./events\"\nimport { StaticDatabases, doWithDB } from \"./db\"\nimport { Installation, IdentityType, Database } from \"@budibase/types\"\nimport * as context from \"./context\"\nimport semver from \"semver\"\nimport { bustCache, withCache, TTL, CacheKey } from \"./cache/generic\"\nimport environment from \"./environment\"\n\nexport const getInstall = async (): Promise<Installation> => {\n return withCache(CacheKey.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {\n useTenancy: false,\n })\n}\nasync function createInstallDoc(platformDb: Database) {\n const install: Installation = {\n _id: StaticDatabases.PLATFORM_INFO.docs.install,\n installId: newid(),\n version: environment.VERSION,\n }\n try {\n const resp = await platformDb.put(install)\n install._rev = resp.rev\n return install\n } catch (err: any) {\n if (err.status === 409) {\n return getInstallFromDB()\n } else {\n throw err\n }\n }\n}\n\nexport const getInstallFromDB = async (): Promise<Installation> => {\n return doWithDB(\n StaticDatabases.PLATFORM_INFO.name,\n async (platformDb: any) => {\n let install: Installation\n try {\n install = await platformDb.get(\n StaticDatabases.PLATFORM_INFO.docs.install\n )\n } catch (e: any) {\n if (e.status === 404) {\n install = await createInstallDoc(platformDb)\n } else {\n throw e\n }\n }\n return install\n }\n )\n}\n\nconst updateVersion = async (version: string): Promise<boolean> => {\n try {\n await doWithDB(\n StaticDatabases.PLATFORM_INFO.name,\n async (platformDb: any) => {\n const install = await getInstall()\n install.version = version\n await platformDb.put(install)\n await bustCache(CacheKey.INSTALLATION)\n }\n )\n } catch (e: any) {\n if (e.status === 409) {\n // do nothing - version has already been updated\n // likely in clustered environment\n return false\n }\n throw e\n }\n return true\n}\n\nexport const checkInstallVersion = async (): Promise<void> => {\n const install = await getInstall()\n\n const currentVersion = install.version\n const newVersion = environment.VERSION\n\n if (currentVersion !== newVersion) {\n const isUpgrade = semver.gt(newVersion, currentVersion)\n const isDowngrade = semver.lt(newVersion, currentVersion)\n\n const success = await updateVersion(newVersion)\n\n if (success) {\n await context.doInIdentityContext(\n {\n _id: install.installId,\n type: IdentityType.INSTALLATION,\n },\n async () => {\n if (isUpgrade) {\n await events.installation.upgraded(currentVersion, newVersion)\n } else if (isDowngrade) {\n await events.installation.downgraded(currentVersion, newVersion)\n }\n }\n )\n await events.identification.identifyInstallationGroup(install.installId)\n }\n }\n}\n", "import {\n Event,\n BackfillMetadata,\n CachedEvent,\n SSOCreatedEvent,\n AutomationCreatedEvent,\n AutomationStepCreatedEvent,\n DatasourceCreatedEvent,\n LayoutCreatedEvent,\n QueryCreatedEvent,\n RoleCreatedEvent,\n ScreenCreatedEvent,\n TableCreatedEvent,\n ViewCreatedEvent,\n ViewCalculationCreatedEvent,\n ViewFilterCreatedEvent,\n AppPublishedEvent,\n UserCreatedEvent,\n RoleAssignedEvent,\n UserPermissionAssignedEvent,\n AppCreatedEvent,\n} from \"@budibase/types\"\nimport * as context from \"../context\"\nimport { CacheKey } from \"../cache/generic\"\nimport * as cache from \"../cache/generic\"\n\n// LIFECYCLE\n\nexport const start = async (events: Event[]) => {\n const metadata: BackfillMetadata = {\n eventWhitelist: events,\n }\n return saveBackfillMetadata(metadata)\n}\n\nexport const recordEvent = async (event: Event, properties: any) => {\n const eventKey = getEventKey(event, properties)\n // don't use a ttl - cleaned up by migration\n // don't use tenancy - already in the key\n await cache.store(eventKey, properties, undefined, { useTenancy: false })\n}\n\nexport const end = async () => {\n await deleteBackfillMetadata()\n await clearEvents()\n}\n\n// CRUD\n\nconst getBackfillMetadata = async (): Promise<BackfillMetadata | null> => {\n return cache.get(CacheKey.BACKFILL_METADATA)\n}\n\nconst saveBackfillMetadata = async (\n backfill: BackfillMetadata\n): Promise<void> => {\n // no TTL - deleted by backfill\n return cache.store(CacheKey.BACKFILL_METADATA, backfill)\n}\n\nconst deleteBackfillMetadata = async (): Promise<void> => {\n await cache.destroy(CacheKey.BACKFILL_METADATA)\n}\n\nconst clearEvents = async () => {\n // wildcard\n const pattern = getEventKey()\n const keys = await cache.keys(pattern)\n\n for (const key of keys) {\n // delete each key\n // don't use tenancy, already in the key\n await cache.destroy(key, { useTenancy: false })\n }\n}\n\n// HELPERS\n\nexport const isBackfillingEvent = async (event: Event) => {\n const backfill = await getBackfillMetadata()\n const events = backfill?.eventWhitelist\n if (events && events.includes(event)) {\n return true\n } else {\n return false\n }\n}\n\nexport const isAlreadySent = async (event: Event, properties: any) => {\n const eventKey = getEventKey(event, properties)\n const cachedEvent: CachedEvent = await cache.get(eventKey, {\n useTenancy: false,\n })\n return !!cachedEvent\n}\n\nconst CUSTOM_PROPERTY_SUFFIX: any = {\n // APP EVENTS\n [Event.AUTOMATION_CREATED]: (properties: AutomationCreatedEvent) => {\n return properties.automationId\n },\n [Event.AUTOMATION_STEP_CREATED]: (properties: AutomationStepCreatedEvent) => {\n return properties.stepId\n },\n [Event.DATASOURCE_CREATED]: (properties: DatasourceCreatedEvent) => {\n return properties.datasourceId\n },\n [Event.LAYOUT_CREATED]: (properties: LayoutCreatedEvent) => {\n return properties.layoutId\n },\n [Event.QUERY_CREATED]: (properties: QueryCreatedEvent) => {\n return properties.queryId\n },\n [Event.ROLE_CREATED]: (properties: RoleCreatedEvent) => {\n return properties.roleId\n },\n [Event.SCREEN_CREATED]: (properties: ScreenCreatedEvent) => {\n return properties.screenId\n },\n [Event.TABLE_CREATED]: (properties: TableCreatedEvent) => {\n return properties.tableId\n },\n [Event.VIEW_CREATED]: (properties: ViewCreatedEvent) => {\n return properties.tableId // best uniqueness\n },\n [Event.VIEW_CALCULATION_CREATED]: (\n properties: ViewCalculationCreatedEvent\n ) => {\n return properties.tableId // best uniqueness\n },\n [Event.VIEW_FILTER_CREATED]: (properties: ViewFilterCreatedEvent) => {\n return properties.tableId // best uniqueness\n },\n [Event.APP_CREATED]: (properties: AppCreatedEvent) => {\n return properties.appId // best uniqueness\n },\n [Event.APP_PUBLISHED]: (properties: AppPublishedEvent) => {\n return properties.appId // best uniqueness\n },\n // GLOBAL EVENTS\n [Event.AUTH_SSO_CREATED]: (properties: SSOCreatedEvent) => {\n return properties.type\n },\n [Event.AUTH_SSO_ACTIVATED]: (properties: SSOCreatedEvent) => {\n return properties.type\n },\n [Event.USER_CREATED]: (properties: UserCreatedEvent) => {\n return properties.userId\n },\n [Event.USER_PERMISSION_ADMIN_ASSIGNED]: (\n properties: UserPermissionAssignedEvent\n ) => {\n return properties.userId\n },\n [Event.USER_PERMISSION_BUILDER_ASSIGNED]: (\n properties: UserPermissionAssignedEvent\n ) => {\n return properties.userId\n },\n [Event.ROLE_ASSIGNED]: (properties: RoleAssignedEvent) => {\n return `${properties.roleId}-${properties.userId}`\n },\n}\n\nconst getEventKey = (event?: Event, properties?: any) => {\n let eventKey: string\n\n const tenantId = context.getTenantId()\n if (event) {\n eventKey = `${CacheKey.EVENTS}:${tenantId}:${event}`\n\n // use some properties to make the key more unique\n const custom = CUSTOM_PROPERTY_SUFFIX[event]\n const suffix = custom ? custom(properties) : undefined\n if (suffix) {\n eventKey = `${eventKey}:${suffix}`\n }\n } else {\n eventKey = `${CacheKey.EVENTS}:${tenantId}:*`\n }\n\n return eventKey\n}\n", "import BullQueue from \"bull\"\nimport { createQueue, JobQueue } from \"../../queue\"\nimport { Event, Identity } from \"@budibase/types\"\n\nexport interface EventPayload {\n event: Event\n identity: Identity\n properties: any\n timestamp?: string | number\n}\n\nexport let asyncEventQueue: BullQueue.Queue\n\nexport function init() {\n asyncEventQueue = createQueue<EventPayload>(JobQueue.SYSTEM_EVENT_QUEUE)\n}\n\nexport async function shutdown() {\n if (asyncEventQueue) {\n await asyncEventQueue.close()\n }\n}\n", "import { AsyncEvents } from \"@budibase/types\"\nimport { EventPayload, asyncEventQueue, init } from \"./queue\"\n\nexport async function publishAsyncEvent(payload: EventPayload) {\n if (!asyncEventQueue) {\n init()\n }\n const { event, identity } = payload\n if (AsyncEvents.indexOf(event) !== -1 && identity.tenantId) {\n await asyncEventQueue.add(payload)\n }\n}\n", "import { Event } from \"@budibase/types\"\nimport { processors } from \"./processors\"\nimport identification from \"./identification\"\nimport * as backfill from \"./backfill\"\nimport { publishAsyncEvent } from \"./asyncEvents\"\n\nexport const publishEvent = async (\n event: Event,\n properties: any,\n timestamp?: string | number\n) => {\n // in future this should use async events via a distributed queue.\n const identity = await identification.getCurrentIdentity()\n\n const backfilling = await backfill.isBackfillingEvent(event)\n // no backfill - send the event and exit\n if (!backfilling) {\n // send off async events if required\n await publishAsyncEvent({\n event,\n identity,\n properties,\n timestamp,\n })\n // now handle the main sync event processing pipeline\n await processors.processEvent(event, identity, properties, timestamp)\n return\n }\n\n // backfill active - check if the event has been sent already\n const alreadySent = await backfill.isAlreadySent(event, properties)\n if (alreadySent) {\n // do nothing\n return\n } else {\n // send and record the event\n await processors.processEvent(event, identity, properties, timestamp)\n await backfill.recordEvent(event, properties)\n }\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Account,\n AccountCreatedEvent,\n AccountDeletedEvent,\n AccountVerifiedEvent,\n} from \"@budibase/types\"\n\nasync function created(account: Account) {\n const properties: AccountCreatedEvent = {\n tenantId: account.tenantId,\n }\n await publishEvent(Event.ACCOUNT_CREATED, properties)\n}\n\nasync function deleted(account: Account) {\n const properties: AccountDeletedEvent = {\n tenantId: account.tenantId,\n }\n await publishEvent(Event.ACCOUNT_DELETED, properties)\n}\n\nasync function verified(account: Account) {\n const properties: AccountVerifiedEvent = {\n tenantId: account.tenantId,\n }\n await publishEvent(Event.ACCOUNT_VERIFIED, properties)\n}\n\nexport default {\n created,\n deleted,\n verified,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n App,\n AppCreatedEvent,\n AppUpdatedEvent,\n AppDeletedEvent,\n AppPublishedEvent,\n AppUnpublishedEvent,\n AppFileImportedEvent,\n AppTemplateImportedEvent,\n AppVersionUpdatedEvent,\n AppVersionRevertedEvent,\n AppRevertedEvent,\n AppExportedEvent,\n} from \"@budibase/types\"\n\nconst created = async (app: App, timestamp?: string | number) => {\n const properties: AppCreatedEvent = {\n appId: app.appId,\n version: app.version,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_CREATED, properties, timestamp)\n}\n\nasync function updated(app: App) {\n const properties: AppUpdatedEvent = {\n appId: app.appId,\n version: app.version,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_UPDATED, properties)\n}\n\nasync function deleted(app: App) {\n const properties: AppDeletedEvent = {\n appId: app.appId,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_DELETED, properties)\n}\n\nasync function published(app: App, timestamp?: string | number) {\n const properties: AppPublishedEvent = {\n appId: app.appId,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_PUBLISHED, properties, timestamp)\n}\n\nasync function unpublished(app: App) {\n const properties: AppUnpublishedEvent = {\n appId: app.appId,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_UNPUBLISHED, properties)\n}\n\nasync function fileImported(app: App) {\n const properties: AppFileImportedEvent = {\n appId: app.appId,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_FILE_IMPORTED, properties)\n}\n\nasync function templateImported(app: App, templateKey: string) {\n const properties: AppTemplateImportedEvent = {\n appId: app.appId,\n templateKey,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_TEMPLATE_IMPORTED, properties)\n}\n\nasync function versionUpdated(\n app: App,\n currentVersion: string,\n updatedToVersion: string\n) {\n const properties: AppVersionUpdatedEvent = {\n appId: app.appId,\n currentVersion,\n updatedToVersion,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_VERSION_UPDATED, properties)\n}\n\nasync function versionReverted(\n app: App,\n currentVersion: string,\n revertedToVersion: string\n) {\n const properties: AppVersionRevertedEvent = {\n appId: app.appId,\n currentVersion,\n revertedToVersion,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_VERSION_REVERTED, properties)\n}\n\nasync function reverted(app: App) {\n const properties: AppRevertedEvent = {\n appId: app.appId,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_REVERTED, properties)\n}\n\nasync function exported(app: App) {\n const properties: AppExportedEvent = {\n appId: app.appId,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_EXPORTED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n published,\n unpublished,\n fileImported,\n templateImported,\n versionUpdated,\n versionReverted,\n reverted,\n exported,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n LoginEvent,\n LoginSource,\n LogoutEvent,\n SSOActivatedEvent,\n SSOCreatedEvent,\n SSODeactivatedEvent,\n SSOType,\n SSOUpdatedEvent,\n} from \"@budibase/types\"\nimport { identification } from \"..\"\n\nasync function login(source: LoginSource, email: string) {\n const identity = await identification.getCurrentIdentity()\n const properties: LoginEvent = {\n userId: identity.id,\n source,\n audited: {\n email,\n },\n }\n await publishEvent(Event.AUTH_LOGIN, properties)\n}\n\nasync function logout(email?: string) {\n const identity = await identification.getCurrentIdentity()\n const properties: LogoutEvent = {\n userId: identity.id,\n audited: {\n email,\n },\n }\n await publishEvent(Event.AUTH_LOGOUT, properties)\n}\n\nasync function SSOCreated(type: SSOType, timestamp?: string | number) {\n const properties: SSOCreatedEvent = {\n type,\n }\n await publishEvent(Event.AUTH_SSO_CREATED, properties, timestamp)\n}\n\nasync function SSOUpdated(type: SSOType) {\n const properties: SSOUpdatedEvent = {\n type,\n }\n await publishEvent(Event.AUTH_SSO_UPDATED, properties)\n}\n\nasync function SSOActivated(type: SSOType, timestamp?: string | number) {\n const properties: SSOActivatedEvent = {\n type,\n }\n await publishEvent(Event.AUTH_SSO_ACTIVATED, properties, timestamp)\n}\n\nasync function SSODeactivated(type: SSOType) {\n const properties: SSODeactivatedEvent = {\n type,\n }\n await publishEvent(Event.AUTH_SSO_DEACTIVATED, properties)\n}\n\nexport default {\n login,\n logout,\n SSOCreated,\n SSOUpdated,\n SSOActivated,\n SSODeactivated,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Automation,\n Event,\n AutomationStep,\n AutomationCreatedEvent,\n AutomationDeletedEvent,\n AutomationTestedEvent,\n AutomationStepCreatedEvent,\n AutomationStepDeletedEvent,\n AutomationTriggerUpdatedEvent,\n AutomationsRunEvent,\n} from \"@budibase/types\"\n\nasync function created(automation: Automation, timestamp?: string | number) {\n const properties: AutomationCreatedEvent = {\n appId: automation.appId,\n automationId: automation._id as string,\n triggerId: automation.definition?.trigger?.id,\n triggerType: automation.definition?.trigger?.stepId,\n audited: {\n name: automation.name,\n },\n }\n await publishEvent(Event.AUTOMATION_CREATED, properties, timestamp)\n}\n\nasync function triggerUpdated(automation: Automation) {\n const properties: AutomationTriggerUpdatedEvent = {\n appId: automation.appId,\n automationId: automation._id as string,\n triggerId: automation.definition?.trigger?.id,\n triggerType: automation.definition?.trigger?.stepId,\n }\n await publishEvent(Event.AUTOMATION_TRIGGER_UPDATED, properties)\n}\n\nasync function deleted(automation: Automation) {\n const properties: AutomationDeletedEvent = {\n appId: automation.appId,\n automationId: automation._id as string,\n triggerId: automation.definition?.trigger?.id,\n triggerType: automation.definition?.trigger?.stepId,\n audited: {\n name: automation.name,\n },\n }\n await publishEvent(Event.AUTOMATION_DELETED, properties)\n}\n\nasync function tested(automation: Automation) {\n const properties: AutomationTestedEvent = {\n appId: automation.appId,\n automationId: automation._id as string,\n triggerId: automation.definition?.trigger?.id,\n triggerType: automation.definition?.trigger?.stepId,\n }\n await publishEvent(Event.AUTOMATION_TESTED, properties)\n}\n\nconst run = async (count: number, timestamp?: string | number) => {\n const properties: AutomationsRunEvent = {\n count,\n }\n await publishEvent(Event.AUTOMATIONS_RUN, properties, timestamp)\n}\n\nasync function stepCreated(\n automation: Automation,\n step: AutomationStep,\n timestamp?: string | number\n) {\n const properties: AutomationStepCreatedEvent = {\n appId: automation.appId,\n automationId: automation._id as string,\n triggerId: automation.definition?.trigger?.id,\n triggerType: automation.definition?.trigger?.stepId,\n stepId: step.id!,\n stepType: step.stepId,\n audited: {\n name: automation.name,\n },\n }\n await publishEvent(Event.AUTOMATION_STEP_CREATED, properties, timestamp)\n}\n\nasync function stepDeleted(automation: Automation, step: AutomationStep) {\n const properties: AutomationStepDeletedEvent = {\n appId: automation.appId,\n automationId: automation._id as string,\n triggerId: automation.definition?.trigger?.id,\n triggerType: automation.definition?.trigger?.stepId,\n stepId: step.id!,\n stepType: step.stepId,\n audited: {\n name: automation.name,\n },\n }\n await publishEvent(Event.AUTOMATION_STEP_DELETED, properties)\n}\n\nexport default {\n created,\n triggerUpdated,\n deleted,\n tested,\n run,\n stepCreated,\n stepDeleted,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Datasource,\n DatasourceCreatedEvent,\n DatasourceUpdatedEvent,\n DatasourceDeletedEvent,\n SourceName,\n} from \"@budibase/types\"\n\nfunction isCustom(datasource: Datasource) {\n const sources = Object.values(SourceName)\n // if not in the base source list, then it must be custom\n return !sources.includes(datasource.source)\n}\n\nasync function created(datasource: Datasource, timestamp?: string | number) {\n const properties: DatasourceCreatedEvent = {\n datasourceId: datasource._id as string,\n source: datasource.source,\n custom: isCustom(datasource),\n }\n await publishEvent(Event.DATASOURCE_CREATED, properties, timestamp)\n}\n\nasync function updated(datasource: Datasource) {\n const properties: DatasourceUpdatedEvent = {\n datasourceId: datasource._id as string,\n source: datasource.source,\n custom: isCustom(datasource),\n }\n await publishEvent(Event.DATASOURCE_UPDATED, properties)\n}\n\nasync function deleted(datasource: Datasource) {\n const properties: DatasourceDeletedEvent = {\n datasourceId: datasource._id as string,\n source: datasource.source,\n custom: isCustom(datasource),\n }\n await publishEvent(Event.DATASOURCE_DELETED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n}\n", "import { publishEvent } from \"../events\"\nimport { Event, SMTPCreatedEvent, SMTPUpdatedEvent } from \"@budibase/types\"\n\nasync function SMTPCreated(timestamp?: string | number) {\n const properties: SMTPCreatedEvent = {}\n await publishEvent(Event.EMAIL_SMTP_CREATED, properties, timestamp)\n}\n\nasync function SMTPUpdated() {\n const properties: SMTPUpdatedEvent = {}\n await publishEvent(Event.EMAIL_SMTP_UPDATED, properties)\n}\n\nexport default {\n SMTPCreated,\n SMTPUpdated,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n LicenseActivatedEvent,\n LicensePlanChangedEvent,\n PlanType,\n Account,\n LicensePortalOpenedEvent,\n LicenseCheckoutSuccessEvent,\n LicenseCheckoutOpenedEvent,\n LicensePaymentFailedEvent,\n LicensePaymentRecoveredEvent,\n PriceDuration,\n} from \"@budibase/types\"\n\nasync function planChanged(\n account: Account,\n opts: {\n from: PlanType\n to: PlanType\n fromQuantity: number | undefined\n toQuantity: number | undefined\n fromDuration: PriceDuration | undefined\n toDuration: PriceDuration | undefined\n }\n) {\n const properties: LicensePlanChangedEvent = {\n accountId: account.accountId,\n ...opts,\n }\n await publishEvent(Event.LICENSE_PLAN_CHANGED, properties)\n}\n\nasync function activated(account: Account) {\n const properties: LicenseActivatedEvent = {\n accountId: account.accountId,\n }\n await publishEvent(Event.LICENSE_ACTIVATED, properties)\n}\n\nasync function checkoutOpened(account: Account) {\n const properties: LicenseCheckoutOpenedEvent = {\n accountId: account.accountId,\n }\n await publishEvent(Event.LICENSE_CHECKOUT_OPENED, properties)\n}\n\nasync function checkoutSuccess(account: Account) {\n const properties: LicenseCheckoutSuccessEvent = {\n accountId: account.accountId,\n }\n await publishEvent(Event.LICENSE_CHECKOUT_SUCCESS, properties)\n}\n\nasync function portalOpened(account: Account) {\n const properties: LicensePortalOpenedEvent = {\n accountId: account.accountId,\n }\n await publishEvent(Event.LICENSE_PORTAL_OPENED, properties)\n}\n\nasync function paymentFailed(account: Account) {\n const properties: LicensePaymentFailedEvent = {\n accountId: account.accountId,\n }\n await publishEvent(Event.LICENSE_PAYMENT_FAILED, properties)\n}\n\nasync function paymentRecovered(account: Account) {\n const properties: LicensePaymentRecoveredEvent = {\n accountId: account.accountId,\n }\n await publishEvent(Event.LICENSE_PAYMENT_RECOVERED, properties)\n}\n\nexport default {\n planChanged,\n activated,\n checkoutOpened,\n checkoutSuccess,\n portalOpened,\n paymentFailed,\n paymentRecovered,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Layout,\n LayoutCreatedEvent,\n LayoutDeletedEvent,\n} from \"@budibase/types\"\n\nasync function created(layout: Layout, timestamp?: string | number) {\n const properties: LayoutCreatedEvent = {\n layoutId: layout._id as string,\n }\n await publishEvent(Event.LAYOUT_CREATED, properties, timestamp)\n}\n\nasync function deleted(layoutId: string) {\n const properties: LayoutDeletedEvent = {\n layoutId,\n }\n await publishEvent(Event.LAYOUT_DELETED, properties)\n}\n\nexport default {\n created,\n deleted,\n}\n", "import { publishEvent } from \"../events\"\nimport { Event } from \"@budibase/types\"\n\nasync function nameUpdated(timestamp?: string | number) {\n const properties = {}\n await publishEvent(Event.ORG_NAME_UPDATED, properties, timestamp)\n}\n\nasync function logoUpdated(timestamp?: string | number) {\n const properties = {}\n await publishEvent(Event.ORG_LOGO_UPDATED, properties, timestamp)\n}\n\nasync function platformURLUpdated(timestamp?: string | number) {\n const properties = {}\n await publishEvent(Event.ORG_PLATFORM_URL_UPDATED, properties, timestamp)\n}\n\n// TODO\n\nasync function analyticsOptOut() {\n const properties = {}\n await publishEvent(Event.ANALYTICS_OPT_OUT, properties)\n}\n\nasync function analyticsOptIn() {\n const properties = {}\n await publishEvent(Event.ANALYTICS_OPT_OUT, properties)\n}\n\nexport default {\n nameUpdated,\n logoUpdated,\n platformURLUpdated,\n analyticsOptOut,\n analyticsOptIn,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Datasource,\n Query,\n QueryCreatedEvent,\n QueryUpdatedEvent,\n QueryDeletedEvent,\n QueryImportedEvent,\n QueryPreviewedEvent,\n QueriesRunEvent,\n} from \"@budibase/types\"\n\n/* eslint-disable */\n\nconst created = async (\n datasource: Datasource,\n query: Query,\n timestamp?: string | number\n) => {\n const properties: QueryCreatedEvent = {\n queryId: query._id as string,\n datasourceId: datasource._id as string,\n source: datasource.source,\n queryVerb: query.queryVerb,\n }\n await publishEvent(Event.QUERY_CREATED, properties, timestamp)\n}\n\nconst updated = async (datasource: Datasource, query: Query) => {\n const properties: QueryUpdatedEvent = {\n queryId: query._id as string,\n datasourceId: datasource._id as string,\n source: datasource.source,\n queryVerb: query.queryVerb,\n }\n await publishEvent(Event.QUERY_UPDATED, properties)\n}\n\nconst deleted = async (datasource: Datasource, query: Query) => {\n const properties: QueryDeletedEvent = {\n queryId: query._id as string,\n datasourceId: datasource._id as string,\n source: datasource.source,\n queryVerb: query.queryVerb,\n }\n await publishEvent(Event.QUERY_DELETED, properties)\n}\n\nconst imported = async (\n datasource: Datasource,\n importSource: any,\n count: any\n) => {\n const properties: QueryImportedEvent = {\n datasourceId: datasource._id as string,\n source: datasource.source,\n count,\n importSource,\n }\n await publishEvent(Event.QUERY_IMPORT, properties)\n}\n\nconst run = async (count: number, timestamp?: string | number) => {\n const properties: QueriesRunEvent = {\n count,\n }\n await publishEvent(Event.QUERIES_RUN, properties, timestamp)\n}\n\nconst previewed = async (datasource: Datasource, query: Query) => {\n const properties: QueryPreviewedEvent = {\n queryId: query._id,\n datasourceId: datasource._id as string,\n source: datasource.source,\n queryVerb: query.queryVerb,\n }\n await publishEvent(Event.QUERY_PREVIEWED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n imported,\n run,\n previewed,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Role,\n RoleAssignedEvent,\n RoleCreatedEvent,\n RoleDeletedEvent,\n RoleUnassignedEvent,\n RoleUpdatedEvent,\n User,\n} from \"@budibase/types\"\n\nasync function created(role: Role, timestamp?: string | number) {\n const properties: RoleCreatedEvent = {\n roleId: role._id as string,\n permissionId: role.permissionId,\n inherits: role.inherits,\n }\n await publishEvent(Event.ROLE_CREATED, properties, timestamp)\n}\n\nasync function updated(role: Role) {\n const properties: RoleUpdatedEvent = {\n roleId: role._id as string,\n permissionId: role.permissionId,\n inherits: role.inherits,\n }\n await publishEvent(Event.ROLE_UPDATED, properties)\n}\n\nasync function deleted(role: Role) {\n const properties: RoleDeletedEvent = {\n roleId: role._id as string,\n permissionId: role.permissionId,\n inherits: role.inherits,\n }\n await publishEvent(Event.ROLE_DELETED, properties)\n}\n\nasync function assigned(user: User, roleId: string, timestamp?: number) {\n const properties: RoleAssignedEvent = {\n userId: user._id as string,\n roleId,\n }\n await publishEvent(Event.ROLE_ASSIGNED, properties, timestamp)\n}\n\nasync function unassigned(user: User, roleId: string) {\n const properties: RoleUnassignedEvent = {\n userId: user._id as string,\n roleId,\n }\n await publishEvent(Event.ROLE_UNASSIGNED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n assigned,\n unassigned,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Screen,\n ScreenCreatedEvent,\n ScreenDeletedEvent,\n} from \"@budibase/types\"\n\nasync function created(screen: Screen, timestamp?: string | number) {\n const properties: ScreenCreatedEvent = {\n layoutId: screen.layoutId,\n screenId: screen._id as string,\n roleId: screen.routing.roleId,\n audited: {\n name: screen.routing?.route,\n },\n }\n await publishEvent(Event.SCREEN_CREATED, properties, timestamp)\n}\n\nasync function deleted(screen: Screen) {\n const properties: ScreenDeletedEvent = {\n layoutId: screen.layoutId,\n screenId: screen._id as string,\n roleId: screen.routing.roleId,\n audited: {\n name: screen.routing?.route,\n },\n }\n await publishEvent(Event.SCREEN_DELETED, properties)\n}\n\nexport default {\n created,\n deleted,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n RowsImportedEvent,\n RowsCreatedEvent,\n Table,\n} from \"@budibase/types\"\n\n/* eslint-disable */\n\nconst created = async (count: number, timestamp?: string | number) => {\n const properties: RowsCreatedEvent = {\n count,\n }\n await publishEvent(Event.ROWS_CREATED, properties, timestamp)\n}\n\nconst imported = async (table: Table, count: number) => {\n const properties: RowsImportedEvent = {\n tableId: table._id as string,\n count,\n }\n await publishEvent(Event.ROWS_IMPORTED, properties)\n}\n\nexport default {\n created,\n imported,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n TableExportFormat,\n Table,\n TableCreatedEvent,\n TableUpdatedEvent,\n TableDeletedEvent,\n TableExportedEvent,\n TableImportedEvent,\n} from \"@budibase/types\"\n\nasync function created(table: Table, timestamp?: string | number) {\n const properties: TableCreatedEvent = {\n tableId: table._id as string,\n audited: {\n name: table.name,\n },\n }\n await publishEvent(Event.TABLE_CREATED, properties, timestamp)\n}\n\nasync function updated(table: Table) {\n const properties: TableUpdatedEvent = {\n tableId: table._id as string,\n audited: {\n name: table.name,\n },\n }\n await publishEvent(Event.TABLE_UPDATED, properties)\n}\n\nasync function deleted(table: Table) {\n const properties: TableDeletedEvent = {\n tableId: table._id as string,\n audited: {\n name: table.name,\n },\n }\n await publishEvent(Event.TABLE_DELETED, properties)\n}\n\nasync function exported(table: Table, format: TableExportFormat) {\n const properties: TableExportedEvent = {\n tableId: table._id as string,\n format,\n audited: {\n name: table.name,\n },\n }\n await publishEvent(Event.TABLE_EXPORTED, properties)\n}\n\nasync function imported(table: Table) {\n const properties: TableImportedEvent = {\n tableId: table._id as string,\n audited: {\n name: table.name,\n },\n }\n await publishEvent(Event.TABLE_IMPORTED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n exported,\n imported,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n App,\n BuilderServedEvent,\n Event,\n AppPreviewServedEvent,\n AppServedEvent,\n} from \"@budibase/types\"\n\nasync function servedBuilder(timezone: string) {\n const properties: BuilderServedEvent = {\n timezone,\n }\n await publishEvent(Event.SERVED_BUILDER, properties)\n}\n\nasync function servedApp(\n app: App,\n timezone: string,\n embed?: boolean | undefined\n) {\n const properties: AppServedEvent = {\n appVersion: app.version,\n timezone,\n embed: embed === true,\n }\n await publishEvent(Event.SERVED_APP, properties)\n}\n\nasync function servedAppPreview(app: App, timezone: string) {\n const properties: AppPreviewServedEvent = {\n appId: app.appId,\n appVersion: app.version,\n timezone,\n }\n await publishEvent(Event.SERVED_APP_PREVIEW, properties)\n}\n\nexport default {\n servedBuilder,\n servedApp,\n servedAppPreview,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n User,\n UserCreatedEvent,\n UserDataCollaborationEvent,\n UserDeletedEvent,\n UserInviteAcceptedEvent,\n UserInvitedEvent,\n UserPasswordForceResetEvent,\n UserPasswordResetEvent,\n UserPasswordResetRequestedEvent,\n UserPasswordUpdatedEvent,\n UserPermissionAssignedEvent,\n UserPermissionRemovedEvent,\n UserUpdatedEvent,\n UserOnboardingEvent,\n} from \"@budibase/types\"\nimport { isScim } from \"../../context\"\n\nasync function created(user: User, timestamp?: number) {\n const properties: UserCreatedEvent = {\n userId: user._id as string,\n viaScim: isScim(),\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_CREATED, properties, timestamp)\n}\n\nasync function updated(user: User) {\n const properties: UserUpdatedEvent = {\n userId: user._id as string,\n viaScim: isScim(),\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_UPDATED, properties)\n}\n\nasync function deleted(user: User) {\n const properties: UserDeletedEvent = {\n userId: user._id as string,\n viaScim: isScim(),\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_DELETED, properties)\n}\n\nexport async function onboardingComplete(user: User) {\n const properties: UserOnboardingEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_ONBOARDING_COMPLETE, properties)\n}\n\n// PERMISSIONS\n\nasync function permissionAdminAssigned(user: User, timestamp?: number) {\n const properties: UserPermissionAssignedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(\n Event.USER_PERMISSION_ADMIN_ASSIGNED,\n properties,\n timestamp\n )\n}\n\nasync function permissionAdminRemoved(user: User) {\n const properties: UserPermissionRemovedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_PERMISSION_ADMIN_REMOVED, properties)\n}\n\nasync function permissionBuilderAssigned(user: User, timestamp?: number) {\n const properties: UserPermissionAssignedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(\n Event.USER_PERMISSION_BUILDER_ASSIGNED,\n properties,\n timestamp\n )\n}\n\nasync function permissionBuilderRemoved(user: User) {\n const properties: UserPermissionRemovedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_PERMISSION_BUILDER_REMOVED, properties)\n}\n\n// INVITE\n\nasync function invited(email: string) {\n const properties: UserInvitedEvent = {\n audited: {\n email,\n },\n }\n await publishEvent(Event.USER_INVITED, properties)\n}\n\nasync function inviteAccepted(user: User) {\n const properties: UserInviteAcceptedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_INVITED_ACCEPTED, properties)\n}\n\n// PASSWORD\n\nasync function passwordForceReset(user: User) {\n const properties: UserPasswordForceResetEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_PASSWORD_FORCE_RESET, properties)\n}\n\nasync function passwordUpdated(user: User) {\n const properties: UserPasswordUpdatedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_PASSWORD_UPDATED, properties)\n}\n\nasync function passwordResetRequested(user: User) {\n const properties: UserPasswordResetRequestedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_PASSWORD_RESET_REQUESTED, properties)\n}\n\nasync function passwordReset(user: User) {\n const properties: UserPasswordResetEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_PASSWORD_RESET, properties)\n}\n\n// COLLABORATION\n\nasync function dataCollaboration(users: number) {\n const properties: UserDataCollaborationEvent = {\n users,\n }\n await publishEvent(Event.USER_DATA_COLLABORATION, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n permissionAdminAssigned,\n permissionAdminRemoved,\n permissionBuilderAssigned,\n permissionBuilderRemoved,\n onboardingComplete,\n invited,\n inviteAccepted,\n passwordForceReset,\n passwordUpdated,\n passwordResetRequested,\n passwordReset,\n dataCollaboration,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n ViewCalculationCreatedEvent,\n ViewCalculationDeletedEvent,\n ViewCalculationUpdatedEvent,\n ViewCreatedEvent,\n ViewDeletedEvent,\n ViewExportedEvent,\n ViewFilterCreatedEvent,\n ViewFilterDeletedEvent,\n ViewFilterUpdatedEvent,\n ViewUpdatedEvent,\n View,\n ViewCalculation,\n Table,\n TableExportFormat,\n} from \"@budibase/types\"\n\n/* eslint-disable */\n\nasync function created(view: View, timestamp?: string | number) {\n const properties: ViewCreatedEvent = {\n tableId: view.tableId,\n }\n await publishEvent(Event.VIEW_CREATED, properties, timestamp)\n}\n\nasync function updated(view: View) {\n const properties: ViewUpdatedEvent = {\n tableId: view.tableId,\n }\n await publishEvent(Event.VIEW_UPDATED, properties)\n}\n\nasync function deleted(view: View) {\n const properties: ViewDeletedEvent = {\n tableId: view.tableId,\n }\n await publishEvent(Event.VIEW_DELETED, properties)\n}\n\nasync function exported(table: Table, format: TableExportFormat) {\n const properties: ViewExportedEvent = {\n tableId: table._id as string,\n format,\n }\n await publishEvent(Event.VIEW_EXPORTED, properties)\n}\n\nasync function filterCreated(view: View, timestamp?: string | number) {\n const properties: ViewFilterCreatedEvent = {\n tableId: view.tableId,\n }\n await publishEvent(Event.VIEW_FILTER_CREATED, properties, timestamp)\n}\n\nasync function filterUpdated(view: View) {\n const properties: ViewFilterUpdatedEvent = {\n tableId: view.tableId,\n }\n await publishEvent(Event.VIEW_FILTER_UPDATED, properties)\n}\n\nasync function filterDeleted(view: View) {\n const properties: ViewFilterDeletedEvent = {\n tableId: view.tableId,\n }\n await publishEvent(Event.VIEW_FILTER_DELETED, properties)\n}\n\nasync function calculationCreated(view: View, timestamp?: string | number) {\n const properties: ViewCalculationCreatedEvent = {\n tableId: view.tableId,\n calculation: view.calculation as ViewCalculation,\n }\n await publishEvent(Event.VIEW_CALCULATION_CREATED, properties, timestamp)\n}\n\nasync function calculationUpdated(view: View) {\n const properties: ViewCalculationUpdatedEvent = {\n tableId: view.tableId,\n calculation: view.calculation as ViewCalculation,\n }\n await publishEvent(Event.VIEW_CALCULATION_UPDATED, properties)\n}\n\nasync function calculationDeleted(existingView: View) {\n const properties: ViewCalculationDeletedEvent = {\n tableId: existingView.tableId,\n calculation: existingView.calculation as ViewCalculation,\n }\n await publishEvent(Event.VIEW_CALCULATION_DELETED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n exported,\n filterCreated,\n filterUpdated,\n filterDeleted,\n calculationCreated,\n calculationUpdated,\n calculationDeleted,\n}\n", "import { publishEvent } from \"../events\"\nimport { Event, VersionCheckedEvent, VersionChangeEvent } from \"@budibase/types\"\n\nasync function versionChecked(version: string) {\n const properties: VersionCheckedEvent = {\n currentVersion: version,\n }\n await publishEvent(Event.INSTALLATION_VERSION_CHECKED, properties)\n}\n\nasync function upgraded(from: string, to: string) {\n const properties: VersionChangeEvent = {\n from,\n to,\n }\n\n await publishEvent(Event.INSTALLATION_VERSION_UPGRADED, properties)\n}\n\nasync function downgraded(from: string, to: string) {\n const properties: VersionChangeEvent = {\n from,\n to,\n }\n await publishEvent(Event.INSTALLATION_VERSION_DOWNGRADED, properties)\n}\n\nasync function firstStartup() {\n const properties = {}\n await publishEvent(Event.INSTALLATION_FIRST_STARTUP, properties)\n}\n\nexport default {\n versionChecked,\n upgraded,\n downgraded,\n firstStartup,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n AppBackfillSucceededEvent,\n AppBackfillFailedEvent,\n TenantBackfillSucceededEvent,\n TenantBackfillFailedEvent,\n InstallationBackfillSucceededEvent,\n InstallationBackfillFailedEvent,\n} from \"@budibase/types\"\nimport env from \"../../environment\"\n\nconst shouldSkip = !env.SELF_HOSTED && !env.isDev()\n\nasync function appSucceeded(properties: AppBackfillSucceededEvent) {\n if (shouldSkip) {\n return\n }\n await publishEvent(Event.APP_BACKFILL_SUCCEEDED, properties)\n}\n\nasync function appFailed(error: any) {\n if (shouldSkip) {\n return\n }\n const properties: AppBackfillFailedEvent = {\n error: JSON.stringify(error, Object.getOwnPropertyNames(error)),\n }\n await publishEvent(Event.APP_BACKFILL_FAILED, properties)\n}\n\nasync function tenantSucceeded(properties: TenantBackfillSucceededEvent) {\n if (shouldSkip) {\n return\n }\n await publishEvent(Event.TENANT_BACKFILL_SUCCEEDED, properties)\n}\n\nasync function tenantFailed(error: any) {\n if (shouldSkip) {\n return\n }\n const properties: TenantBackfillFailedEvent = {\n error: JSON.stringify(error, Object.getOwnPropertyNames(error)),\n }\n await publishEvent(Event.TENANT_BACKFILL_FAILED, properties)\n}\n\nasync function installationSucceeded() {\n if (shouldSkip) {\n return\n }\n const properties: InstallationBackfillSucceededEvent = {}\n await publishEvent(Event.INSTALLATION_BACKFILL_SUCCEEDED, properties)\n}\n\nasync function installationFailed(error: any) {\n if (shouldSkip) {\n return\n }\n const properties: InstallationBackfillFailedEvent = {\n error: JSON.stringify(error, Object.getOwnPropertyNames(error)),\n }\n await publishEvent(Event.INSTALLATION_BACKFILL_FAILED, properties)\n}\n\nexport default {\n appSucceeded,\n appFailed,\n tenantSucceeded,\n tenantFailed,\n installationSucceeded,\n installationFailed,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n UserGroup,\n GroupCreatedEvent,\n GroupDeletedEvent,\n GroupUpdatedEvent,\n GroupUsersAddedEvent,\n GroupUsersDeletedEvent,\n GroupAddedOnboardingEvent,\n GroupPermissionsEditedEvent,\n} from \"@budibase/types\"\nimport { isScim } from \"../../context\"\n\nasync function created(group: UserGroup, timestamp?: number) {\n const properties: GroupCreatedEvent = {\n groupId: group._id as string,\n viaScim: isScim(),\n audited: {\n name: group.name,\n },\n }\n await publishEvent(Event.USER_GROUP_CREATED, properties, timestamp)\n}\n\nasync function updated(group: UserGroup) {\n const properties: GroupUpdatedEvent = {\n groupId: group._id as string,\n viaScim: isScim(),\n audited: {\n name: group.name,\n },\n }\n await publishEvent(Event.USER_GROUP_UPDATED, properties)\n}\n\nasync function deleted(group: UserGroup) {\n const properties: GroupDeletedEvent = {\n groupId: group._id as string,\n viaScim: isScim(),\n audited: {\n name: group.name,\n },\n }\n await publishEvent(Event.USER_GROUP_DELETED, properties)\n}\n\nasync function usersAdded(count: number, group: UserGroup) {\n const properties: GroupUsersAddedEvent = {\n count,\n groupId: group._id as string,\n viaScim: isScim(),\n audited: {\n name: group.name,\n },\n }\n await publishEvent(Event.USER_GROUP_USERS_ADDED, properties)\n}\n\nasync function usersDeleted(count: number, group: UserGroup) {\n const properties: GroupUsersDeletedEvent = {\n count,\n groupId: group._id as string,\n viaScim: isScim(),\n audited: {\n name: group.name,\n },\n }\n await publishEvent(Event.USER_GROUP_USERS_REMOVED, properties)\n}\n\nasync function createdOnboarding(groupId: string) {\n const properties: GroupAddedOnboardingEvent = {\n groupId: groupId,\n onboarding: true,\n }\n await publishEvent(Event.USER_GROUP_ONBOARDING, properties)\n}\n\nasync function permissionsEdited(group: UserGroup) {\n const properties: GroupPermissionsEditedEvent = {\n permissions: group.roles!,\n groupId: group._id as string,\n audited: {\n name: group.name,\n },\n }\n await publishEvent(Event.USER_GROUP_PERMISSIONS_EDITED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n usersAdded,\n usersDeleted,\n createdOnboarding,\n permissionsEdited,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Plugin,\n PluginDeletedEvent,\n PluginImportedEvent,\n PluginInitEvent,\n} from \"@budibase/types\"\n\nasync function init(plugin: Plugin) {\n const properties: PluginInitEvent = {\n type: plugin.schema.type,\n name: plugin.name,\n description: plugin.description,\n version: plugin.version,\n }\n await publishEvent(Event.PLUGIN_INIT, properties)\n}\n\nasync function imported(plugin: Plugin) {\n const properties: PluginImportedEvent = {\n pluginId: plugin._id as string,\n type: plugin.schema.type,\n source: plugin.source,\n name: plugin.name,\n description: plugin.description,\n version: plugin.version,\n }\n await publishEvent(Event.PLUGIN_IMPORTED, properties)\n}\n\nasync function deleted(plugin: Plugin) {\n const properties: PluginDeletedEvent = {\n pluginId: plugin._id as string,\n type: plugin.schema.type,\n name: plugin.name,\n description: plugin.description,\n version: plugin.version,\n }\n await publishEvent(Event.PLUGIN_DELETED, properties)\n}\n\nexport default {\n init,\n imported,\n deleted,\n}\n", "import {\n AppBackup,\n AppBackupRestoreEvent,\n AppBackupTriggeredEvent,\n AppBackupTrigger,\n AppBackupType,\n Event,\n} from \"@budibase/types\"\nimport { publishEvent } from \"../events\"\n\nasync function appBackupRestored(backup: AppBackup) {\n const properties: AppBackupRestoreEvent = {\n appId: backup.appId,\n restoreId: backup._id!,\n backupCreatedAt: backup.timestamp,\n name: backup.name as string,\n }\n\n await publishEvent(Event.APP_BACKUP_RESTORED, properties)\n}\n\nasync function appBackupTriggered(\n appId: string,\n backupId: string,\n type: AppBackupType,\n trigger: AppBackupTrigger,\n name: string\n) {\n const properties: AppBackupTriggeredEvent = {\n appId: appId,\n backupId,\n type,\n trigger,\n name,\n }\n await publishEvent(Event.APP_BACKUP_TRIGGERED, properties)\n}\n\nexport default {\n appBackupRestored,\n appBackupTriggered,\n}\n", "import {\n Event,\n EnvironmentVariableCreatedEvent,\n EnvironmentVariableDeletedEvent,\n EnvironmentVariableUpgradePanelOpenedEvent,\n} from \"@budibase/types\"\nimport { publishEvent } from \"../events\"\n\nasync function created(name: string, environments: string[]) {\n const properties: EnvironmentVariableCreatedEvent = {\n name,\n environments,\n }\n await publishEvent(Event.ENVIRONMENT_VARIABLE_CREATED, properties)\n}\n\nasync function deleted(name: string) {\n const properties: EnvironmentVariableDeletedEvent = {\n name,\n }\n await publishEvent(Event.ENVIRONMENT_VARIABLE_DELETED, properties)\n}\n\nasync function upgradePanelOpened(userId: string) {\n const properties: EnvironmentVariableUpgradePanelOpenedEvent = {\n userId,\n }\n await publishEvent(\n Event.ENVIRONMENT_VARIABLE_UPGRADE_PANEL_OPENED,\n properties\n )\n}\n\nexport default {\n created,\n deleted,\n upgradePanelOpened,\n}\n", "import {\n Event,\n AuditLogSearchParams,\n AuditLogFilteredEvent,\n AuditLogDownloadedEvent,\n} from \"@budibase/types\"\nimport { publishEvent } from \"../events\"\n\nasync function filtered(search: AuditLogSearchParams) {\n const properties: AuditLogFilteredEvent = {\n filters: search,\n }\n await publishEvent(Event.AUDIT_LOGS_FILTERED, properties)\n}\n\nasync function downloaded(search: AuditLogSearchParams) {\n const properties: AuditLogDownloadedEvent = {\n filters: search,\n }\n await publishEvent(Event.AUDIT_LOGS_DOWNLOADED, properties)\n}\n\nexport default {\n filtered,\n downloaded,\n}\n", "const redis = require(\"../redis/init\")\nconst { v4: uuidv4 } = require(\"uuid\")\nconst { logWarn } = require(\"../logging\")\n\nimport env from \"../environment\"\nimport {\n Session,\n ScannedSession,\n SessionKey,\n CreateSession,\n} from \"@budibase/types\"\n\n// a week in seconds\nconst EXPIRY_SECONDS = 86400 * 7\n\nfunction makeSessionID(userId: string, sessionId: string) {\n return `${userId}/${sessionId}`\n}\n\nexport async function getSessionsForUser(userId: string): Promise<Session[]> {\n if (!userId) {\n console.trace(\"Cannot get sessions for undefined userId\")\n return []\n }\n const client = await redis.getSessionClient()\n const sessions: ScannedSession[] = await client.scan(userId)\n return sessions.map(session => session.value)\n}\n\nexport async function invalidateSessions(\n userId: string,\n opts: { sessionIds?: string[]; reason?: string } = {}\n) {\n try {\n const reason = opts?.reason || \"unknown\"\n let sessionIds: string[] = opts.sessionIds || []\n let sessionKeys: SessionKey[]\n\n // If no sessionIds, get all the sessions for the user\n if (sessionIds.length === 0) {\n const sessions = await getSessionsForUser(userId)\n sessionKeys = sessions.map(session => ({\n key: makeSessionID(session.userId, session.sessionId),\n }))\n } else {\n // use the passed array of sessionIds\n sessionIds = Array.isArray(sessionIds) ? sessionIds : [sessionIds]\n sessionKeys = sessionIds.map(sessionId => ({\n key: makeSessionID(userId, sessionId),\n }))\n }\n\n if (sessionKeys && sessionKeys.length > 0) {\n const client = await redis.getSessionClient()\n const promises = []\n for (let sessionKey of sessionKeys) {\n promises.push(client.delete(sessionKey.key))\n }\n if (!env.isTest()) {\n logWarn(\n `Invalidating sessions for ${userId} (reason: ${reason}) - ${sessionKeys\n .map(sessionKey => sessionKey.key)\n .join(\", \")}`\n )\n }\n await Promise.all(promises)\n }\n } catch (err) {\n console.error(`Error invalidating sessions: ${err}`)\n }\n}\n\nexport async function createASession(\n userId: string,\n createSession: CreateSession\n) {\n // invalidate all other sessions\n await invalidateSessions(userId, { reason: \"creation\" })\n\n const client = await redis.getSessionClient()\n const sessionId = createSession.sessionId\n const csrfToken = createSession.csrfToken ? createSession.csrfToken : uuidv4()\n const key = makeSessionID(userId, sessionId)\n\n const session: Session = {\n ...createSession,\n csrfToken,\n createdAt: new Date().toISOString(),\n lastAccessedAt: new Date().toISOString(),\n userId,\n }\n await client.store(key, session, EXPIRY_SECONDS)\n return session\n}\n\nexport async function updateSessionTTL(session: Session) {\n const client = await redis.getSessionClient()\n const key = makeSessionID(session.userId, session.sessionId)\n session.lastAccessedAt = new Date().toISOString()\n await client.store(key, session, EXPIRY_SECONDS)\n}\n\nexport async function endSession(userId: string, sessionId: string) {\n const client = await redis.getSessionClient()\n await client.delete(makeSessionID(userId, sessionId))\n}\n\nexport async function getSession(\n userId: string,\n sessionId: string\n): Promise<Session> {\n if (!userId || !sessionId) {\n throw new Error(`Invalid session details - ${userId} - ${sessionId}`)\n }\n const client = await redis.getSessionClient()\n const session = await client.get(makeSessionID(userId, sessionId))\n if (!session) {\n throw new Error(`Session not found - ${userId} - ${sessionId}`)\n }\n return session\n}\n", "export * from \"./auth\"\n", "import env from \"../environment\"\n\nexport const PASSWORD_MIN_LENGTH = +(env.PASSWORD_MIN_LENGTH || 8)\nexport const PASSWORD_MAX_LENGTH = +(env.PASSWORD_MAX_LENGTH || 512)\n\nexport function validatePassword(\n password: string\n): { valid: true } | { valid: false; error: string } {\n if (!password || password.length < PASSWORD_MIN_LENGTH) {\n return {\n valid: false,\n error: `Password invalid. Minimum ${PASSWORD_MIN_LENGTH} characters.`,\n }\n }\n\n if (password.length > PASSWORD_MAX_LENGTH) {\n return {\n valid: false,\n error: `Password invalid. Maximum ${PASSWORD_MAX_LENGTH} characters.`,\n }\n }\n\n return { valid: true }\n}\n", "import BaseCache from \"./base\"\nimport { getWritethroughClient } from \"../redis/init\"\nimport { logWarn } from \"../logging\"\nimport { Database, Document, LockName, LockType } from \"@budibase/types\"\nimport * as locks from \"../redis/redlockImpl\"\n\nconst DEFAULT_WRITE_RATE_MS = 10000\nlet CACHE: BaseCache | null = null\n\ninterface CacheItem<T extends Document> {\n doc: any\n lastWrite: number\n}\n\nasync function getCache() {\n if (!CACHE) {\n const client = await getWritethroughClient()\n CACHE = new BaseCache(client)\n }\n return CACHE\n}\n\nfunction makeCacheKey(db: Database, key: string) {\n return db.name + key\n}\n\nfunction makeCacheItem<T extends Document>(\n doc: T,\n lastWrite: number | null = null\n): CacheItem<T> {\n return { doc, lastWrite: lastWrite || Date.now() }\n}\n\nasync function put(\n db: Database,\n doc: Document,\n writeRateMs: number = DEFAULT_WRITE_RATE_MS\n) {\n const cache = await getCache()\n const key = doc._id\n let cacheItem: CacheItem<any> | undefined\n if (key) {\n cacheItem = await cache.get(makeCacheKey(db, key))\n }\n const updateDb = !cacheItem || cacheItem.lastWrite < Date.now() - writeRateMs\n let output = doc\n if (updateDb) {\n const lockResponse = await locks.doWithLock(\n {\n type: LockType.TRY_ONCE,\n name: LockName.PERSIST_WRITETHROUGH,\n resource: key,\n ttl: 15000,\n },\n async () => {\n const writeDb = async (toWrite: any) => {\n // doc should contain the _id and _rev\n const response = await db.put(toWrite, { force: true })\n output._id = response.id\n output._rev = response.rev\n }\n try {\n await writeDb(doc)\n } catch (err: any) {\n if (err.status !== 409) {\n throw err\n } else {\n // Swallow 409s but log them\n logWarn(`Ignoring conflict in write-through cache`)\n }\n }\n }\n )\n\n if (!lockResponse.executed) {\n logWarn(`Ignoring redlock conflict in write-through cache`)\n }\n }\n // if we are updating the DB then need to set the lastWrite to now\n cacheItem = makeCacheItem(output, updateDb ? null : cacheItem?.lastWrite)\n if (output._id) {\n await cache.store(makeCacheKey(db, output._id), cacheItem)\n }\n return { ok: true, id: output._id, rev: output._rev }\n}\n\nasync function get<T extends Document>(db: Database, id: string): Promise<T> {\n const cache = await getCache()\n const cacheKey = makeCacheKey(db, id)\n let cacheItem: CacheItem<T> = await cache.get(cacheKey)\n if (!cacheItem) {\n const doc = await db.get<T>(id)\n cacheItem = makeCacheItem(doc)\n await cache.store(cacheKey, cacheItem)\n }\n return cacheItem.doc\n}\n\nasync function remove(db: Database, docOrId: any, rev?: any): Promise<void> {\n const cache = await getCache()\n if (!docOrId) {\n throw new Error(\"No ID/Rev provided.\")\n }\n const id = typeof docOrId === \"string\" ? docOrId : docOrId._id\n rev = typeof docOrId === \"string\" ? rev : docOrId._rev\n try {\n await cache.delete(makeCacheKey(db, id))\n } finally {\n await db.remove(id, rev)\n }\n}\n\nexport class Writethrough {\n db: Database\n writeRateMs: number\n\n constructor(db: Database, writeRateMs: number = DEFAULT_WRITE_RATE_MS) {\n this.db = db\n this.writeRateMs = writeRateMs\n }\n\n async put(doc: any, writeRateMs: number = this.writeRateMs) {\n return put(this.db, doc, writeRateMs)\n }\n\n async get<T extends Document>(id: string) {\n return get<T>(this.db, id)\n }\n\n async remove(docOrId: any, rev?: any) {\n return remove(this.db, docOrId, rev)\n }\n}\n", "import * as redis from \"../redis/init\"\nimport * as utils from \"../utils\"\nimport { Duration } from \"../utils\"\n\nconst TTL_SECONDS = Duration.fromHours(1).toSeconds()\n\ninterface PasswordReset {\n userId: string\n info: any\n}\n\n/**\n * Given a user ID this will store a code (that is returned) for an hour in redis.\n * The user can then return this code for resetting their password (through their reset link).\n * @param userId the ID of the user which is to be reset.\n * @param info Info about the user/the reset process.\n * @return returns the code that was stored to redis.\n */\nexport async function createCode(userId: string, info: any): Promise<string> {\n const code = utils.newid()\n const client = await redis.getPasswordResetClient()\n await client.store(code, { userId, info }, TTL_SECONDS)\n return code\n}\n\n/**\n * Given a reset code this will lookup to redis, check if the code is valid.\n * @param code The code provided via the email link.\n * @return returns the user ID if it is found\n */\nexport async function getCode(code: string): Promise<PasswordReset> {\n const client = await redis.getPasswordResetClient()\n const value = (await client.get(code)) as PasswordReset | undefined\n if (!value) {\n throw new Error(\n \"Provided information is not valid, cannot reset password - please try again.\"\n )\n }\n return value\n}\n\n/**\n * Given a reset code this will invalidate it.\n * @param code The code provided via the email link.\n */\nexport async function invalidateCode(code: string): Promise<void> {\n const client = await redis.getPasswordResetClient()\n await client.delete(code)\n}\n", "export * from \"./migrations\"\nexport * from \"./definitions\"\n", "import { DEFAULT_TENANT_ID } from \"../constants\"\nimport {\n DocumentType,\n StaticDatabases,\n getAllApps,\n getGlobalDBName,\n getDB,\n} from \"../db\"\nimport environment from \"../environment\"\nimport * as platform from \"../platform\"\nimport * as context from \"../context\"\nimport { DEFINITIONS } from \".\"\nimport {\n Migration,\n MigrationOptions,\n MigrationType,\n MigrationNoOpOptions,\n App,\n} from \"@budibase/types\"\n\nexport const getMigrationsDoc = async (db: any) => {\n // get the migrations doc\n try {\n return await db.get(DocumentType.MIGRATIONS)\n } catch (err: any) {\n if (err.status && err.status === 404) {\n return { _id: DocumentType.MIGRATIONS }\n } else {\n console.error(err)\n throw err\n }\n }\n}\n\nexport const backPopulateMigrations = async (opts: MigrationNoOpOptions) => {\n // filter migrations to the type and populate a no-op migration\n const migrations: Migration[] = DEFINITIONS.filter(\n def => def.type === opts.type\n ).map(d => ({ ...d, fn: () => {} }))\n await runMigrations(migrations, { noOp: opts })\n}\n\nexport const runMigration = async (\n migration: Migration,\n options: MigrationOptions = {}\n) => {\n const migrationType = migration.type\n let tenantId: string | undefined\n if (migrationType !== MigrationType.INSTALLATION) {\n tenantId = context.getTenantId()\n }\n const migrationName = migration.name\n const silent = migration.silent\n\n const log = (message: string) => {\n if (!silent) {\n console.log(message)\n }\n }\n\n // get the db to store the migration in\n let dbNames: string[]\n if (migrationType === MigrationType.GLOBAL) {\n dbNames = [getGlobalDBName()]\n } else if (migrationType === MigrationType.APP) {\n if (options.noOp) {\n if (!options.noOp.appId) {\n throw new Error(\"appId is required for noOp app migration\")\n }\n dbNames = [options.noOp.appId]\n } else {\n const apps = (await getAllApps(migration.appOpts)) as App[]\n dbNames = apps.map(app => app.appId)\n }\n } else if (migrationType === MigrationType.INSTALLATION) {\n dbNames = [StaticDatabases.PLATFORM_INFO.name]\n } else {\n throw new Error(`Unrecognised migration type [${migrationType}]`)\n }\n\n const length = dbNames.length\n let count = 0\n\n // run the migration against each db\n for (const dbName of dbNames) {\n count++\n const lengthStatement = length > 1 ? `[${count}/${length}]` : \"\"\n\n const db = getDB(dbName)\n\n try {\n const doc = await getMigrationsDoc(db)\n\n // the migration has already been run\n if (doc[migrationName]) {\n // check for force\n if (\n options.force &&\n options.force[migrationType] &&\n options.force[migrationType].includes(migrationName)\n ) {\n log(`[Migration: ${migrationName}] [DB: ${dbName}] Forcing`)\n } else {\n // no force, exit\n return\n }\n }\n\n // check if the migration is not a no-op\n if (!options.noOp) {\n log(\n `[Migration: ${migrationName}] [DB: ${dbName}] Running ${lengthStatement}`\n )\n\n if (migration.preventRetry) {\n // eagerly set the completion date\n // so that we never run this migration twice even upon failure\n doc[migrationName] = Date.now()\n const response = await db.put(doc)\n doc._rev = response.rev\n }\n\n // run the migration\n if (migrationType === MigrationType.APP) {\n await context.doInAppContext(db.name, async () => {\n await migration.fn(db)\n })\n } else {\n await migration.fn(db)\n }\n\n log(`[Migration: ${migrationName}] [DB: ${dbName}] Complete`)\n }\n\n // mark as complete\n doc[migrationName] = Date.now()\n await db.put(doc)\n } catch (err) {\n console.error(\n `[Migration: ${migrationName}] [DB: ${dbName}] Error: `,\n err\n )\n throw err\n }\n }\n}\n\nexport const runMigrations = async (\n migrations: Migration[],\n options: MigrationOptions = {}\n) => {\n let tenantIds\n\n if (environment.MULTI_TENANCY) {\n if (options.noOp) {\n tenantIds = [options.noOp.tenantId]\n } else if (!options.tenantIds || !options.tenantIds.length) {\n // run for all tenants\n tenantIds = await platform.tenants.getTenantIds()\n } else {\n tenantIds = options.tenantIds\n }\n } else {\n // single tenancy\n tenantIds = [DEFAULT_TENANT_ID]\n }\n\n if (tenantIds.length > 1) {\n console.log(`Checking migrations for ${tenantIds.length} tenants`)\n } else {\n console.log(\"Checking migrations\")\n }\n\n let count = 0\n // for all tenants\n for (const tenantId of tenantIds) {\n count++\n if (tenantIds.length > 1) {\n console.log(`Progress [${count}/${tenantIds.length}]`)\n }\n // for all migrations\n for (const migration of migrations) {\n // run the migration\n await context.doInTenant(\n tenantId,\n async () => await runMigration(migration, options)\n )\n }\n }\n console.log(\"Migrations complete\")\n}\n", "import {\n MigrationType,\n MigrationName,\n MigrationDefinition,\n} from \"@budibase/types\"\n\nexport const DEFINITIONS: MigrationDefinition[] = [\n {\n type: MigrationType.GLOBAL,\n name: MigrationName.USER_EMAIL_VIEW_CASING,\n },\n {\n type: MigrationType.GLOBAL,\n name: MigrationName.SYNC_QUOTAS,\n },\n {\n type: MigrationType.APP,\n name: MigrationName.APP_URLS,\n },\n {\n type: MigrationType.APP,\n name: MigrationName.EVENT_APP_BACKFILL,\n },\n {\n type: MigrationType.APP,\n name: MigrationName.TABLE_SETTINGS_LINKS_TO_ACTIONS,\n },\n {\n type: MigrationType.GLOBAL,\n name: MigrationName.EVENT_GLOBAL_BACKFILL,\n },\n {\n type: MigrationType.INSTALLATION,\n name: MigrationName.EVENT_INSTALLATION_BACKFILL,\n },\n {\n type: MigrationType.GLOBAL,\n name: MigrationName.GLOBAL_INFO_SYNC_USERS,\n },\n]\n", "import { BuiltinPermissionID, PermissionLevel } from \"./permissions\"\nimport {\n prefixRoleID,\n getRoleParams,\n DocumentType,\n SEPARATOR,\n doWithDB,\n} from \"../db\"\nimport { getAppDB } from \"../context\"\nimport { Screen, Role as RoleDoc } from \"@budibase/types\"\nimport cloneDeep from \"lodash/fp/cloneDeep\"\n\nexport const BUILTIN_ROLE_IDS = {\n ADMIN: \"ADMIN\",\n POWER: \"POWER\",\n BASIC: \"BASIC\",\n PUBLIC: \"PUBLIC\",\n}\n\nconst BUILTIN_IDS = {\n ...BUILTIN_ROLE_IDS,\n BUILDER: \"BUILDER\",\n}\n\n// exclude internal roles like builder\nconst EXTERNAL_BUILTIN_ROLE_IDS = [\n BUILTIN_IDS.ADMIN,\n BUILTIN_IDS.POWER,\n BUILTIN_IDS.BASIC,\n BUILTIN_IDS.PUBLIC,\n]\n\nexport const RoleIDVersion = {\n // original version, with a UUID based ID\n UUID: undefined,\n // new version - with name based ID\n NAME: \"name\",\n}\n\nexport class Role implements RoleDoc {\n _id: string\n _rev?: string\n name: string\n permissionId: string\n inherits?: string\n version?: string\n permissions = {}\n\n constructor(id: string, name: string, permissionId: string) {\n this._id = id\n this.name = name\n this.permissionId = permissionId\n // version for managing the ID - removing the role_ when responding\n this.version = RoleIDVersion.NAME\n }\n\n addInheritance(inherits: string) {\n this.inherits = inherits\n return this\n }\n}\n\nconst BUILTIN_ROLES = {\n ADMIN: new Role(\n BUILTIN_IDS.ADMIN,\n \"Admin\",\n BuiltinPermissionID.ADMIN\n ).addInheritance(BUILTIN_IDS.POWER),\n POWER: new Role(\n BUILTIN_IDS.POWER,\n \"Power\",\n BuiltinPermissionID.POWER\n ).addInheritance(BUILTIN_IDS.BASIC),\n BASIC: new Role(\n BUILTIN_IDS.BASIC,\n \"Basic\",\n BuiltinPermissionID.WRITE\n ).addInheritance(BUILTIN_IDS.PUBLIC),\n PUBLIC: new Role(BUILTIN_IDS.PUBLIC, \"Public\", BuiltinPermissionID.PUBLIC),\n BUILDER: new Role(BUILTIN_IDS.BUILDER, \"Builder\", BuiltinPermissionID.ADMIN),\n}\n\nexport function getBuiltinRoles(): { [key: string]: RoleDoc } {\n return cloneDeep(BUILTIN_ROLES)\n}\n\nexport const BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map(\n role => role._id\n)\n\nexport const BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map(\n role => role.name\n)\n\nexport function isBuiltin(role?: string) {\n return BUILTIN_ROLE_ID_ARRAY.some(builtin => role?.includes(builtin))\n}\n\n/**\n * Works through the inheritance ranks to see how far up the builtin stack this ID is.\n */\nexport function builtinRoleToNumber(id?: string) {\n if (!id) {\n return 0\n }\n const builtins = getBuiltinRoles()\n const MAX = Object.values(builtins).length + 1\n if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) {\n return MAX\n }\n let role = builtins[id],\n count = 0\n do {\n if (!role) {\n break\n }\n role = builtins[role.inherits!]\n count++\n } while (role !== null)\n return count\n}\n\n/**\n * Converts any role to a number, but has to be async to get the roles from db.\n */\nexport async function roleToNumber(id?: string) {\n if (isBuiltin(id)) {\n return builtinRoleToNumber(id)\n }\n const hierarchy = (await getUserRoleHierarchy(id, {\n defaultPublic: true,\n })) as RoleDoc[]\n for (let role of hierarchy) {\n if (isBuiltin(role?.inherits)) {\n return builtinRoleToNumber(role.inherits) + 1\n }\n }\n return 0\n}\n\n/**\n * Returns whichever builtin roleID is lower.\n */\nexport function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {\n if (!roleId1) {\n return roleId2 as string\n }\n if (!roleId2) {\n return roleId1 as string\n }\n return builtinRoleToNumber(roleId1) > builtinRoleToNumber(roleId2)\n ? roleId2\n : roleId1\n}\n\n/**\n * Gets the role object, this is mainly useful for two purposes, to check if the level exists and\n * to check if the role inherits any others.\n * @param roleId The level ID to lookup.\n * @param opts options for the function, like whether to halt errors, instead return public.\n * @returns The role object, which may contain an \"inherits\" property.\n */\nexport async function getRole(\n roleId?: string,\n opts?: { defaultPublic?: boolean }\n): Promise<RoleDoc | undefined> {\n if (!roleId) {\n return undefined\n }\n let role: any = {}\n // built in roles mostly come from the in-code implementation,\n // but can be extended by a doc stored about them (e.g. permissions)\n if (isBuiltin(roleId)) {\n role = cloneDeep(\n Object.values(BUILTIN_ROLES).find(role => role._id === roleId)\n )\n } else {\n // make sure has the prefix (if it has it then it won't be added)\n roleId = prefixRoleID(roleId)\n }\n try {\n const db = getAppDB()\n const dbRole = await db.get(getDBRoleID(roleId))\n role = Object.assign(role, dbRole)\n // finalise the ID\n role._id = getExternalRoleID(role._id, role.version)\n } catch (err) {\n if (!isBuiltin(roleId) && opts?.defaultPublic) {\n return cloneDeep(BUILTIN_ROLES.PUBLIC)\n }\n // only throw an error if there is no role at all\n if (Object.keys(role).length === 0) {\n throw err\n }\n }\n return role\n}\n\n/**\n * Simple function to get all the roles based on the top level user role ID.\n */\nasync function getAllUserRoles(\n userRoleId?: string,\n opts?: { defaultPublic?: boolean }\n): Promise<RoleDoc[]> {\n // admins have access to all roles\n if (userRoleId === BUILTIN_IDS.ADMIN) {\n return getAllRoles()\n }\n let currentRole = await getRole(userRoleId, opts)\n let roles = currentRole ? [currentRole] : []\n let roleIds = [userRoleId]\n // get all the inherited roles\n while (\n currentRole &&\n currentRole.inherits &&\n roleIds.indexOf(currentRole.inherits) === -1\n ) {\n roleIds.push(currentRole.inherits)\n currentRole = await getRole(currentRole.inherits)\n if (currentRole) {\n roles.push(currentRole)\n }\n }\n return roles\n}\n\nexport async function getUserRoleIdHierarchy(\n userRoleId?: string\n): Promise<string[]> {\n const roles = await getUserRoleHierarchy(userRoleId)\n return roles.map(role => role._id!)\n}\n\n/**\n * Returns an ordered array of the user's inherited role IDs, this can be used\n * to determine if a user can access something that requires a specific role.\n * @param userRoleId The user's role ID, this can be found in their access token.\n * @param opts optional - if want to default to public use this.\n * @returns returns an ordered array of the roles, with the first being their\n * highest level of access and the last being the lowest level.\n */\nexport async function getUserRoleHierarchy(\n userRoleId?: string,\n opts?: { defaultPublic?: boolean }\n) {\n // special case, if they don't have a role then they are a public user\n return getAllUserRoles(userRoleId, opts)\n}\n\n// this function checks that the provided permissions are in an array format\n// some templates/older apps will use a simple string instead of array for roles\n// convert the string to an array using the theory that write is higher than read\nexport function checkForRoleResourceArray(\n rolePerms: { [key: string]: string[] },\n resourceId: string\n) {\n if (rolePerms && !Array.isArray(rolePerms[resourceId])) {\n const permLevel = rolePerms[resourceId] as any\n rolePerms[resourceId] = [permLevel]\n if (permLevel === PermissionLevel.WRITE) {\n rolePerms[resourceId].push(PermissionLevel.READ)\n }\n }\n return rolePerms\n}\n\nexport async function getAllRoleIds(appId?: string) {\n const roles = await getAllRoles(appId)\n return roles.map(role => role._id)\n}\n\n/**\n * Given an app ID this will retrieve all of the roles that are currently within that app.\n * @return An array of the role objects that were found.\n */\nexport async function getAllRoles(appId?: string): Promise<RoleDoc[]> {\n if (appId) {\n return doWithDB(appId, internal)\n } else {\n let appDB\n try {\n appDB = getAppDB()\n } catch (error) {\n // We don't have any apps, so we'll just use the built-in roles\n }\n return internal(appDB)\n }\n async function internal(db: any) {\n let roles: RoleDoc[] = []\n if (db) {\n const body = await db.allDocs(\n getRoleParams(null, {\n include_docs: true,\n })\n )\n roles = body.rows.map((row: any) => row.doc)\n roles.forEach(\n role => (role._id = getExternalRoleID(role._id!, role.version))\n )\n }\n const builtinRoles = getBuiltinRoles()\n\n // need to combine builtin with any DB record of them (for sake of permissions)\n for (let builtinRoleId of EXTERNAL_BUILTIN_ROLE_IDS) {\n const builtinRole = builtinRoles[builtinRoleId]\n const dbBuiltin = roles.filter(\n dbRole =>\n getExternalRoleID(dbRole._id!, dbRole.version) === builtinRoleId\n )[0]\n if (dbBuiltin == null) {\n roles.push(builtinRole || builtinRoles.BASIC)\n } else {\n // remove role and all back after combining with the builtin\n roles = roles.filter(role => role._id !== dbBuiltin._id)\n dbBuiltin._id = getExternalRoleID(dbBuiltin._id!, dbBuiltin.version)\n roles.push(Object.assign(builtinRole, dbBuiltin))\n }\n }\n // check permissions\n for (let role of roles) {\n if (!role.permissions) {\n continue\n }\n for (let resourceId of Object.keys(role.permissions)) {\n role.permissions = checkForRoleResourceArray(\n role.permissions,\n resourceId\n )\n }\n }\n return roles\n }\n}\n\nexport class AccessController {\n userHierarchies: { [key: string]: string[] }\n constructor() {\n this.userHierarchies = {}\n }\n\n async hasAccess(tryingRoleId?: string, userRoleId?: string) {\n // special cases, the screen has no role, the roles are the same or the user\n // is currently in the builder\n if (\n tryingRoleId == null ||\n tryingRoleId === \"\" ||\n tryingRoleId === userRoleId ||\n tryingRoleId === BUILTIN_IDS.BUILDER ||\n userRoleId === BUILTIN_IDS.BUILDER\n ) {\n return true\n }\n let roleIds = userRoleId ? this.userHierarchies[userRoleId] : null\n if (!roleIds && userRoleId) {\n roleIds = await getUserRoleIdHierarchy(userRoleId)\n this.userHierarchies[userRoleId] = roleIds\n }\n\n return roleIds?.indexOf(tryingRoleId) !== -1\n }\n\n async checkScreensAccess(screens: Screen[], userRoleId: string) {\n let accessibleScreens = []\n // don't want to handle this with Promise.all as this would mean all custom roles would be\n // retrieved at same time, it is likely a custom role will be re-used and therefore want\n // to work in sync for performance save\n for (let screen of screens) {\n const accessible = await this.checkScreenAccess(screen, userRoleId)\n if (accessible) {\n accessibleScreens.push(accessible)\n }\n }\n return accessibleScreens\n }\n\n async checkScreenAccess(screen: Screen, userRoleId: string) {\n const roleId = screen && screen.routing ? screen.routing.roleId : undefined\n if (await this.hasAccess(roleId, userRoleId)) {\n return screen\n }\n return null\n }\n}\n\n/**\n * Adds the \"role_\" for builtin role IDs which are to be written to the DB (for permissions).\n */\nexport function getDBRoleID(roleName: string) {\n if (roleName?.startsWith(DocumentType.ROLE)) {\n return roleName\n }\n return prefixRoleID(roleName)\n}\n\n/**\n * Remove the \"role_\" from builtin role IDs that have been written to the DB (for permissions).\n */\nexport function getExternalRoleID(roleId: string, version?: string) {\n // for built-in roles we want to remove the DB role ID element (role_)\n if (\n roleId.startsWith(DocumentType.ROLE) &&\n (isBuiltin(roleId) || version === RoleIDVersion.NAME)\n ) {\n return roleId.split(`${DocumentType.ROLE}${SEPARATOR}`)[1]\n }\n return roleId\n}\n", "import { PermissionLevel, PermissionType } from \"@budibase/types\"\nimport flatten from \"lodash/flatten\"\nimport cloneDeep from \"lodash/fp/cloneDeep\"\n\nexport { PermissionType, PermissionLevel } from \"@budibase/types\"\n\nexport type RoleHierarchy = {\n permissionId: string\n}[]\n\nexport class Permission {\n type: PermissionType\n level: PermissionLevel\n\n constructor(type: PermissionType, level: PermissionLevel) {\n this.type = type\n this.level = level\n }\n}\n\nexport function levelToNumber(perm: PermissionLevel) {\n switch (perm) {\n // not everything has execute privileges\n case PermissionLevel.EXECUTE:\n return 0\n case PermissionLevel.READ:\n return 1\n case PermissionLevel.WRITE:\n return 2\n case PermissionLevel.ADMIN:\n return 3\n default:\n return -1\n }\n}\n\n/**\n * Given the specified permission level for the user return the levels they are allowed to carry out.\n * @param userPermLevel The permission level of the user.\n * @return All the permission levels this user is allowed to carry out.\n */\nexport function getAllowedLevels(userPermLevel: PermissionLevel): string[] {\n switch (userPermLevel) {\n case PermissionLevel.EXECUTE:\n return [PermissionLevel.EXECUTE]\n case PermissionLevel.READ:\n return [PermissionLevel.EXECUTE, PermissionLevel.READ]\n case PermissionLevel.WRITE:\n case PermissionLevel.ADMIN:\n return [\n PermissionLevel.EXECUTE,\n PermissionLevel.READ,\n PermissionLevel.WRITE,\n ]\n default:\n return []\n }\n}\n\nexport enum BuiltinPermissionID {\n PUBLIC = \"public\",\n READ_ONLY = \"read_only\",\n WRITE = \"write\",\n ADMIN = \"admin\",\n POWER = \"power\",\n}\n\nexport const BUILTIN_PERMISSIONS = {\n PUBLIC: {\n _id: BuiltinPermissionID.PUBLIC,\n name: \"Public\",\n permissions: [\n new Permission(PermissionType.WEBHOOK, PermissionLevel.EXECUTE),\n ],\n },\n READ_ONLY: {\n _id: BuiltinPermissionID.READ_ONLY,\n name: \"Read only\",\n permissions: [\n new Permission(PermissionType.QUERY, PermissionLevel.READ),\n new Permission(PermissionType.TABLE, PermissionLevel.READ),\n new Permission(PermissionType.APP, PermissionLevel.READ),\n ],\n },\n WRITE: {\n _id: BuiltinPermissionID.WRITE,\n name: \"Read/Write\",\n permissions: [\n new Permission(PermissionType.QUERY, PermissionLevel.WRITE),\n new Permission(PermissionType.TABLE, PermissionLevel.WRITE),\n new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),\n new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),\n new Permission(PermissionType.APP, PermissionLevel.READ),\n ],\n },\n POWER: {\n _id: BuiltinPermissionID.POWER,\n name: \"Power\",\n permissions: [\n new Permission(PermissionType.TABLE, PermissionLevel.WRITE),\n new Permission(PermissionType.USER, PermissionLevel.READ),\n new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),\n new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),\n new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),\n new Permission(PermissionType.APP, PermissionLevel.READ),\n ],\n },\n ADMIN: {\n _id: BuiltinPermissionID.ADMIN,\n name: \"Admin\",\n permissions: [\n new Permission(PermissionType.TABLE, PermissionLevel.ADMIN),\n new Permission(PermissionType.USER, PermissionLevel.ADMIN),\n new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN),\n new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),\n new Permission(PermissionType.QUERY, PermissionLevel.ADMIN),\n new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),\n new Permission(PermissionType.APP, PermissionLevel.READ),\n ],\n },\n}\n\nexport function getBuiltinPermissions() {\n return cloneDeep(BUILTIN_PERMISSIONS)\n}\n\nexport function getBuiltinPermissionByID(id: string) {\n const perms = Object.values(BUILTIN_PERMISSIONS)\n return perms.find(perm => perm._id === id)\n}\n\nexport function doesHaveBasePermission(\n permType: PermissionType,\n permLevel: PermissionLevel,\n rolesHierarchy: RoleHierarchy\n) {\n const basePermissions = [\n ...new Set(rolesHierarchy.map(role => role.permissionId)),\n ]\n const builtins = Object.values(BUILTIN_PERMISSIONS)\n let permissions = flatten(\n builtins\n .filter(builtin => basePermissions.indexOf(builtin._id) !== -1)\n .map(builtin => builtin.permissions)\n )\n for (let permission of permissions) {\n if (\n permission.type === permType &&\n getAllowedLevels(permission.level).indexOf(permLevel) !== -1\n ) {\n return true\n }\n }\n return false\n}\n\nexport function isPermissionLevelHigherThanRead(level: PermissionLevel) {\n return levelToNumber(level) > 1\n}\n\n// utility as a lot of things need simply the builder permission\nexport const BUILDER = PermissionType.BUILDER\nexport const CREATOR = PermissionType.CREATOR\nexport const GLOBAL_BUILDER = PermissionType.GLOBAL_BUILDER\n", "import env from \"../environment\"\nimport * as context from \"../context\"\n\nexport * from \"./installation\"\n\n/**\n * Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.\n * The env var is formatted as:\n * tenant1:feature1:feature2,tenant2:feature1\n */\nexport function buildFeatureFlags() {\n if (!env.TENANT_FEATURE_FLAGS) {\n return\n }\n\n const tenantFeatureFlags: Record<string, string[]> = {}\n\n env.TENANT_FEATURE_FLAGS.split(\",\").forEach(tenantToFeatures => {\n const [tenantId, ...features] = tenantToFeatures.split(\":\")\n\n features.forEach(feature => {\n if (!tenantFeatureFlags[tenantId]) {\n tenantFeatureFlags[tenantId] = []\n }\n tenantFeatureFlags[tenantId].push(feature)\n })\n })\n\n return tenantFeatureFlags\n}\n\nexport function isEnabled(featureFlag: string) {\n const tenantId = context.getTenantId()\n const flags = getTenantFeatureFlags(tenantId)\n return flags.includes(featureFlag)\n}\n\nexport function getTenantFeatureFlags(tenantId: string) {\n let flags: string[] = []\n const envFlags = buildFeatureFlags()\n if (envFlags) {\n const globalFlags = envFlags[\"*\"]\n const tenantFlags = envFlags[tenantId] || []\n\n // Explicitly exclude tenants from global features if required.\n // Prefix the tenant flag with '!'\n const tenantOverrides = tenantFlags.reduce(\n (acc: string[], flag: string) => {\n if (flag.startsWith(\"!\")) {\n let stripped = flag.substring(1)\n acc.push(stripped)\n }\n return acc\n },\n []\n )\n\n if (globalFlags) {\n flags.push(...globalFlags)\n }\n if (tenantFlags.length) {\n flags.push(...tenantFlags)\n }\n\n // Purge any tenant specific overrides\n flags = flags.filter(flag => {\n return tenantOverrides.indexOf(flag) == -1 && !flag.startsWith(\"!\")\n })\n }\n\n return flags\n}\n\nexport enum TenantFeatureFlag {\n LICENSING = \"LICENSING\",\n GOOGLE_SHEETS = \"GOOGLE_SHEETS\",\n USER_GROUPS = \"USER_GROUPS\",\n ONBOARDING_TOUR = \"ONBOARDING_TOUR\",\n}\n", "export function processFeatureEnvVar<T>(\n fullList: string[],\n featureList?: string\n) {\n let list\n if (!featureList) {\n list = fullList\n } else {\n list = featureList.split(\",\")\n }\n for (let feature of list) {\n if (!fullList.includes(feature)) {\n throw new Error(`Feature: ${feature} is not an allowed option`)\n }\n }\n return list as unknown as T[]\n}\n", "export * from \"./auth\"\n", "const _passport = require(\"koa-passport\")\nconst LocalStrategy = require(\"passport-local\").Strategy\n\nimport { getGlobalDB } from \"../context\"\nimport { Cookie } from \"../constants\"\nimport { getSessionsForUser, invalidateSessions } from \"../security/sessions\"\nimport {\n authenticated,\n csrf,\n google,\n local,\n oidc,\n tenancy,\n} from \"../middleware\"\nimport * as userCache from \"../cache/user\"\nimport { invalidateUser } from \"../cache/user\"\nimport {\n ConfigType,\n GoogleInnerConfig,\n OIDCInnerConfig,\n PlatformLogoutOpts,\n SessionCookie,\n SSOProviderType,\n} from \"@budibase/types\"\nimport * as events from \"../events\"\nimport * as configs from \"../configs\"\nimport { clearCookie, getCookie } from \"../utils\"\nimport { ssoSaveUserNoOp } from \"../middleware/passport/sso/sso\"\n\nconst refresh = require(\"passport-oauth2-refresh\")\n\nexport {\n auditLog,\n authError,\n internalApi,\n ssoCallbackUrl,\n adminOnly,\n builderOnly,\n builderOrAdmin,\n joiValidator,\n google,\n oidc,\n} from \"../middleware\"\nexport const buildAuthMiddleware = authenticated\nexport const buildTenancyMiddleware = tenancy\nexport const buildCsrfMiddleware = csrf\nexport const passport = _passport\n\n// Strategies\n_passport.use(new LocalStrategy(local.options, local.authenticate))\n\nasync function refreshOIDCAccessToken(\n chosenConfig: OIDCInnerConfig,\n refreshToken: string\n): Promise<RefreshResponse> {\n const callbackUrl = await oidc.getCallbackUrl()\n let enrichedConfig: any\n let strategy: any\n\n try {\n enrichedConfig = await oidc.fetchStrategyConfig(chosenConfig, callbackUrl)\n if (!enrichedConfig) {\n throw new Error(\"OIDC Config contents invalid\")\n }\n strategy = await oidc.strategyFactory(enrichedConfig, ssoSaveUserNoOp)\n } catch (err) {\n console.error(err)\n throw new Error(\"Could not refresh OAuth Token\")\n }\n\n refresh.use(strategy, {\n setRefreshOAuth2() {\n return strategy._getOAuth2Client(enrichedConfig)\n },\n })\n\n return new Promise(resolve => {\n refresh.requestNewAccessToken(\n ConfigType.OIDC,\n refreshToken,\n (err: any, accessToken: string, refreshToken: any, params: any) => {\n resolve({ err, accessToken, refreshToken, params })\n }\n )\n })\n}\n\nasync function refreshGoogleAccessToken(\n config: GoogleInnerConfig,\n refreshToken: any\n): Promise<RefreshResponse> {\n let callbackUrl = await google.getCallbackUrl(config)\n\n let strategy\n try {\n strategy = await google.strategyFactory(\n config,\n callbackUrl,\n ssoSaveUserNoOp\n )\n } catch (err: any) {\n console.error(err)\n throw new Error(\n `Error constructing OIDC refresh strategy: message=${err.message}`\n )\n }\n\n refresh.use(strategy)\n\n return new Promise(resolve => {\n refresh.requestNewAccessToken(\n ConfigType.GOOGLE,\n refreshToken,\n (err: any, accessToken: string, refreshToken: string, params: any) => {\n resolve({ err, accessToken, refreshToken, params })\n }\n )\n })\n}\n\ninterface RefreshResponse {\n err?: {\n data?: string\n }\n accessToken?: string\n refreshToken?: string\n params?: any\n}\n\nexport async function refreshOAuthToken(\n refreshToken: string,\n providerType: SSOProviderType,\n configId?: string\n): Promise<RefreshResponse> {\n switch (providerType) {\n case SSOProviderType.OIDC:\n if (!configId) {\n return { err: { data: \"OIDC config id not provided\" } }\n }\n const oidcConfig = await configs.getOIDCConfigById(configId)\n if (!oidcConfig) {\n return { err: { data: \"OIDC configuration not found\" } }\n }\n return refreshOIDCAccessToken(oidcConfig, refreshToken)\n case SSOProviderType.GOOGLE:\n let googleConfig = await configs.getGoogleConfig()\n if (!googleConfig) {\n return { err: { data: \"Google configuration not found\" } }\n }\n return refreshGoogleAccessToken(googleConfig, refreshToken)\n }\n}\n\n// TODO: Refactor to use user save function instead to prevent the need for\n// manually saving and invalidating on callback\nexport async function updateUserOAuth(userId: string, oAuthConfig: any) {\n const details = {\n accessToken: oAuthConfig.accessToken,\n refreshToken: oAuthConfig.refreshToken,\n }\n\n try {\n const db = getGlobalDB()\n const dbUser = await db.get<any>(userId)\n\n //Do not overwrite the refresh token if a valid one is not provided.\n if (typeof details.refreshToken !== \"string\") {\n delete details.refreshToken\n }\n\n dbUser.oauth2 = {\n ...dbUser.oauth2,\n ...details,\n }\n\n await db.put(dbUser)\n\n await invalidateUser(userId)\n } catch (e) {\n console.error(\"Could not update OAuth details for current user\", e)\n }\n}\n\n/**\n * Logs a user out from budibase. Re-used across account portal and builder.\n */\nexport async function platformLogout(opts: PlatformLogoutOpts) {\n const ctx = opts.ctx\n const userId = opts.userId\n const keepActiveSession = opts.keepActiveSession\n\n if (!ctx) throw new Error(\"Koa context must be supplied to logout.\")\n\n const currentSession = getCookie<SessionCookie>(ctx, Cookie.Auth)\n let sessions = await getSessionsForUser(userId)\n\n if (currentSession && keepActiveSession) {\n sessions = sessions.filter(\n session => session.sessionId !== currentSession.sessionId\n )\n } else {\n // clear cookies\n clearCookie(ctx, Cookie.Auth)\n }\n\n const sessionIds = sessions.map(({ sessionId }) => sessionId)\n await invalidateSessions(userId, { sessionIds, reason: \"logout\" })\n await events.auth.logout(ctx.user?.email)\n await userCache.invalidateUser(userId)\n}\n", "export * as local from \"./passport/local\"\nexport * as google from \"./passport/sso/google\"\nexport * as oidc from \"./passport/sso/oidc\"\nimport * as datasourceGoogle from \"./passport/datasource/google\"\n\nexport const datasource = {\n google: datasourceGoogle,\n}\nexport { authError, ssoCallbackUrl } from \"./passport/utils\"\nexport { default as authenticated } from \"./authenticated\"\nexport { default as auditLog } from \"./auditLog\"\nexport { default as tenancy } from \"./tenancy\"\nexport { default as internalApi } from \"./internalApi\"\nexport { default as csrf } from \"./csrf\"\nexport { default as adminOnly } from \"./adminOnly\"\nexport { default as builderOrAdmin } from \"./builderOrAdmin\"\nexport { default as builderOnly } from \"./builderOnly\"\nexport { default as pino } from \"../logging/pino/middleware\"\nexport { default as correlation } from \"../logging/correlation/middleware\"\nexport { default as errorHandling } from \"./errorHandling\"\nexport { default as querystringToBody } from \"./querystringToBody\"\nexport * as joiValidator from \"./joi-validator\"\n", "import { UserStatus } from \"../../constants\"\nimport { compare } from \"../../utils\"\nimport * as users from \"../../users\"\nimport { authError } from \"./utils\"\nimport { BBContext } from \"@budibase/types\"\n\nconst INVALID_ERR = \"Invalid credentials\"\nconst EXPIRED = \"This account has expired. Please reset your password\"\n\nexport const options = {\n passReqToCallback: true,\n}\n\n/**\n * Passport Local Authentication Middleware.\n * @param ctx the request structure\n * @param email username to login with\n * @param password plain text password to log in with\n * @param done callback from passport to return user information and errors\n * @returns The authenticated user, or errors if they occur\n */\nexport async function authenticate(\n ctx: BBContext,\n email: string,\n password: string,\n done: Function\n) {\n if (!email) return authError(done, \"Email Required\")\n if (!password) return authError(done, \"Password Required\")\n\n const dbUser = await users.getGlobalUserByEmail(email)\n if (dbUser == null) {\n console.info(`user=${email} could not be found`)\n return authError(done, INVALID_ERR)\n }\n\n if (dbUser.status === UserStatus.INACTIVE) {\n console.info(`user=${email} is inactive`, dbUser)\n return authError(done, INVALID_ERR)\n }\n\n if (!dbUser.password) {\n console.info(`user=${email} has no password set`, dbUser)\n return authError(done, EXPIRED)\n }\n\n if (!(await compare(password, dbUser.password))) {\n return authError(done, INVALID_ERR)\n }\n\n // intentionally remove the users password in payload\n delete dbUser.password\n return done(null, dbUser)\n}\n", "import { getTenantId, isMultiTenant } from \"../../context\"\nimport * as configs from \"../../configs\"\nimport { ConfigType, GoogleInnerConfig } from \"@budibase/types\"\n\n/**\n * Utility to handle authentication errors.\n *\n * @param done The passport callback.\n * @param message Message that will be returned in the response body\n * @param err (Optional) error that will be logged\n */\n\nexport function authError(done: Function, message: string, err?: any) {\n return done(\n err,\n null, // never return a user\n { message: message }\n )\n}\n\nexport async function ssoCallbackUrl(\n type: ConfigType,\n config?: GoogleInnerConfig\n) {\n // incase there is a callback URL from before\n if (config && (config as GoogleInnerConfig).callbackURL) {\n return (config as GoogleInnerConfig).callbackURL as string\n }\n const settingsConfig = await configs.getSettingsConfig()\n\n let callbackUrl = `/api/global/auth`\n if (isMultiTenant()) {\n callbackUrl += `/${getTenantId()}`\n }\n callbackUrl += `/${type}/callback`\n\n return `${settingsConfig.platformUrl}${callbackUrl}`\n}\n", "import { ssoCallbackUrl } from \"../utils\"\nimport * as sso from \"./sso\"\nimport {\n ConfigType,\n SSOProfile,\n SSOAuthDetails,\n SSOProviderType,\n SaveSSOUserFunction,\n GoogleInnerConfig,\n} from \"@budibase/types\"\n\nconst GoogleStrategy = require(\"passport-google-oauth\").OAuth2Strategy\n\nexport function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {\n return (\n accessToken: string,\n refreshToken: string,\n profile: SSOProfile,\n done: Function\n ) => {\n const details: SSOAuthDetails = {\n provider: \"google\",\n providerType: SSOProviderType.GOOGLE,\n userId: profile.id,\n profile: profile,\n email: profile._json.email,\n oauth2: {\n accessToken,\n refreshToken,\n },\n }\n\n return sso.authenticate(\n details,\n true, // require local accounts to exist\n done,\n saveUserFn\n )\n }\n}\n\n/**\n * Create an instance of the google passport strategy. This wrapper fetches the configuration\n * from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.\n * @returns Dynamically configured Passport Google Strategy\n */\nexport async function strategyFactory(\n config: GoogleInnerConfig,\n callbackUrl: string,\n saveUserFn: SaveSSOUserFunction\n) {\n try {\n const { clientID, clientSecret } = config\n\n if (!clientID || !clientSecret) {\n throw new Error(\n \"Configuration invalid. Must contain google clientID and clientSecret\"\n )\n }\n\n const verify = buildVerifyFn(saveUserFn)\n return new GoogleStrategy(\n {\n clientID: config.clientID,\n clientSecret: config.clientSecret,\n callbackURL: callbackUrl,\n },\n verify\n )\n } catch (err: any) {\n console.error(err)\n throw new Error(`Error constructing google authentication strategy: ${err}`)\n }\n}\n\nexport async function getCallbackUrl(config: GoogleInnerConfig) {\n return ssoCallbackUrl(ConfigType.GOOGLE, config)\n}\n", "import { generateGlobalUserID } from \"../../../db\"\nimport { authError } from \"../utils\"\nimport * as users from \"../../../users\"\nimport * as context from \"../../../context\"\nimport fetch from \"node-fetch\"\nimport {\n SaveSSOUserFunction,\n SaveUserOpts,\n SSOAuthDetails,\n SSOUser,\n User,\n} from \"@budibase/types\"\n\n// no-op function for user save\n// - this allows datasource auth and access token refresh to work correctly\n// - prefer no-op over an optional argument to ensure function is provided to login flows\nexport const ssoSaveUserNoOp: SaveSSOUserFunction = (\n user: SSOUser,\n opts: SaveUserOpts\n) => Promise.resolve(user)\n\n/**\n * Common authentication logic for third parties. e.g. OAuth, OIDC.\n */\nexport async function authenticate(\n details: SSOAuthDetails,\n requireLocalAccount: boolean = true,\n done: any,\n saveUserFn: SaveSSOUserFunction\n) {\n if (!saveUserFn) {\n throw new Error(\"Save user function must be provided\")\n }\n if (!details.userId) {\n return authError(done, \"sso user id required\")\n }\n if (!details.email) {\n return authError(done, \"sso user email required\")\n }\n\n // use the third party id\n const userId = generateGlobalUserID(details.userId)\n\n let dbUser: User | undefined\n\n // try to load by id\n try {\n dbUser = await users.getById(userId)\n } catch (err: any) {\n // abort when not 404 error\n if (!err.status || err.status !== 404) {\n return authError(\n done,\n \"Unexpected error when retrieving existing user\",\n err\n )\n }\n }\n\n // fallback to loading by email\n if (!dbUser) {\n dbUser = await users.getGlobalUserByEmail(details.email)\n }\n\n // exit early if there is still no user and auto creation is disabled\n if (!dbUser && requireLocalAccount) {\n return authError(\n done,\n \"Email does not yet exist. You must set up your local budibase account first.\"\n )\n }\n\n // first time creation\n if (!dbUser) {\n // setup a blank user using the third party id\n dbUser = {\n _id: userId,\n email: details.email,\n roles: {},\n tenantId: context.getTenantId(),\n }\n }\n\n let ssoUser = await syncUser(dbUser, details)\n // never prompt for password reset\n ssoUser.forceResetPassword = false\n\n try {\n // don't try to re-save any existing password\n delete ssoUser.password\n // create or sync the user\n ssoUser = (await saveUserFn(ssoUser, {\n hashPassword: false,\n requirePassword: false,\n })) as SSOUser\n } catch (err: any) {\n return authError(done, \"Error saving user\", err)\n }\n\n return done(null, ssoUser)\n}\n\nasync function getProfilePictureUrl(user: User, details: SSOAuthDetails) {\n const pictureUrl = details.profile?._json.picture\n if (pictureUrl) {\n const response = await fetch(pictureUrl)\n if (response.status === 200) {\n const type = response.headers.get(\"content-type\") as string\n if (type.startsWith(\"image/\")) {\n return pictureUrl\n }\n }\n }\n}\n\n/**\n * @returns a user that has been sync'd with third party information\n */\nasync function syncUser(user: User, details: SSOAuthDetails): Promise<SSOUser> {\n let firstName\n let lastName\n let pictureUrl\n let oauth2\n let thirdPartyProfile\n\n if (details.profile) {\n const profile = details.profile\n\n if (profile.name) {\n const name = profile.name\n // first name\n if (name.givenName) {\n firstName = name.givenName\n }\n // last name\n if (name.familyName) {\n lastName = name.familyName\n }\n }\n\n pictureUrl = await getProfilePictureUrl(user, details)\n\n thirdPartyProfile = {\n ...profile._json,\n }\n }\n\n // oauth tokens for future use\n if (details.oauth2) {\n oauth2 = {\n ...details.oauth2,\n }\n }\n\n return {\n ...user,\n provider: details.provider,\n providerType: details.providerType,\n firstName,\n lastName,\n thirdPartyProfile,\n pictureUrl,\n oauth2,\n }\n}\n", "import fetch from \"node-fetch\"\nimport * as sso from \"./sso\"\nimport { ssoCallbackUrl } from \"../utils\"\nimport { validEmail } from \"../../../utils\"\nimport {\n ConfigType,\n OIDCInnerConfig,\n SSOProfile,\n OIDCStrategyConfiguration,\n SSOAuthDetails,\n SSOProviderType,\n JwtClaims,\n SaveSSOUserFunction,\n} from \"@budibase/types\"\n\nconst OIDCStrategy = require(\"@techpass/passport-openidconnect\").Strategy\n\nexport function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {\n /**\n * @param issuer The identity provider base URL\n * @param sub The user ID\n * @param profile The user profile information. Created by passport from the /userinfo response\n * @param jwtClaims The parsed id_token claims\n * @param accessToken The access_token for contacting the identity provider - may or may not be a JWT\n * @param refreshToken The refresh_token for obtaining a new access_token - usually not a JWT\n * @param idToken The id_token - always a JWT\n * @param params The response body from requesting an access_token\n * @param done The passport callback: err, user, info\n */\n return async (\n issuer: string,\n sub: string,\n profile: SSOProfile,\n jwtClaims: JwtClaims,\n accessToken: string,\n refreshToken: string,\n idToken: string,\n params: any,\n done: Function\n ) => {\n const details: SSOAuthDetails = {\n // store the issuer info to enable sync in future\n provider: issuer,\n providerType: SSOProviderType.OIDC,\n userId: profile.id,\n profile: profile,\n email: getEmail(profile, jwtClaims),\n oauth2: {\n accessToken: accessToken,\n refreshToken: refreshToken,\n },\n }\n\n return sso.authenticate(\n details,\n false, // don't require local accounts to exist\n done,\n saveUserFn\n )\n }\n}\n\n/**\n * @param profile The structured profile created by passport using the user info endpoint\n * @param jwtClaims The claims returned in the id token\n */\nfunction getEmail(profile: SSOProfile, jwtClaims: JwtClaims) {\n // profile not guaranteed to contain email e.g. github connected azure ad account\n if (profile._json.email) {\n return profile._json.email\n }\n\n // fallback to id token email\n if (jwtClaims.email) {\n return jwtClaims.email\n }\n\n // fallback to id token preferred username\n const username = jwtClaims.preferred_username\n if (username && validEmail(username)) {\n return username\n }\n\n throw new Error(\n `Could not determine user email from profile ${JSON.stringify(\n profile\n )} and claims ${JSON.stringify(jwtClaims)}`\n )\n}\n\n/**\n * Create an instance of the oidc passport strategy. This wrapper fetches the configuration\n * from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.\n * @returns Dynamically configured Passport OIDC Strategy\n */\nexport async function strategyFactory(\n config: OIDCStrategyConfiguration,\n saveUserFn: SaveSSOUserFunction\n) {\n try {\n const verify = buildVerifyFn(saveUserFn)\n const strategy = new OIDCStrategy(config, verify)\n strategy.name = \"oidc\"\n return strategy\n } catch (err: any) {\n console.error(err)\n throw new Error(`Error constructing OIDC authentication strategy - ${err}`)\n }\n}\n\nexport async function fetchStrategyConfig(\n oidcConfig: OIDCInnerConfig,\n callbackUrl?: string\n): Promise<OIDCStrategyConfiguration> {\n try {\n const { clientID, clientSecret, configUrl } = oidcConfig\n\n if (!clientID || !clientSecret || !callbackUrl || !configUrl) {\n // check for remote config and all required elements\n throw new Error(\n \"Configuration invalid. Must contain clientID, clientSecret, callbackUrl and configUrl\"\n )\n }\n\n const response = await fetch(configUrl)\n\n if (!response.ok) {\n throw new Error(\n `Unexpected response when fetching openid-configuration: ${response.statusText}`\n )\n }\n\n const body = await response.json()\n\n return {\n issuer: body.issuer,\n authorizationURL: body.authorization_endpoint,\n tokenURL: body.token_endpoint,\n userInfoURL: body.userinfo_endpoint,\n clientID: clientID,\n clientSecret: clientSecret,\n callbackURL: callbackUrl,\n }\n } catch (err) {\n console.error(err)\n throw new Error(\n `Error constructing OIDC authentication configuration - ${err}`\n )\n }\n}\n\nexport async function getCallbackUrl() {\n return ssoCallbackUrl(ConfigType.OIDC)\n}\n", "import * as google from \"../sso/google\"\nimport { Cookie } from \"../../../constants\"\nimport * as configs from \"../../../configs\"\nimport * as cache from \"../../../cache\"\nimport * as utils from \"../../../utils\"\nimport { UserCtx, SSOProfile, DatasourceAuthCookie } from \"@budibase/types\"\nimport { ssoSaveUserNoOp } from \"../sso/sso\"\n\nconst GoogleStrategy = require(\"passport-google-oauth\").OAuth2Strategy\n\ntype Passport = {\n authenticate: any\n}\n\nasync function fetchGoogleCreds() {\n let config = await configs.getGoogleDatasourceConfig()\n\n if (!config) {\n throw new Error(\"No google configuration found\")\n }\n return config\n}\n\nexport async function preAuth(\n passport: Passport,\n ctx: UserCtx,\n next: Function\n) {\n // get the relevant config\n const googleConfig = await fetchGoogleCreds()\n const platformUrl = await configs.getPlatformUrl({ tenantAware: false })\n\n let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback`\n const strategy = await google.strategyFactory(\n googleConfig,\n callbackUrl,\n ssoSaveUserNoOp\n )\n\n if (!ctx.query.appId) {\n ctx.throw(400, \"appId query param not present.\")\n }\n\n return passport.authenticate(strategy, {\n scope: [\"profile\", \"email\", \"https://www.googleapis.com/auth/spreadsheets\"],\n accessType: \"offline\",\n prompt: \"consent\",\n })(ctx, next)\n}\n\nexport async function postAuth(\n passport: Passport,\n ctx: UserCtx,\n next: Function\n) {\n // get the relevant config\n const config = await fetchGoogleCreds()\n const platformUrl = await configs.getPlatformUrl({ tenantAware: false })\n\n let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback`\n const authStateCookie = utils.getCookie<{ appId: string }>(\n ctx,\n Cookie.DatasourceAuth\n )\n\n if (!authStateCookie) {\n throw new Error(\"Unable to fetch datasource auth cookie\")\n }\n\n return passport.authenticate(\n new GoogleStrategy(\n {\n clientID: config.clientID,\n clientSecret: config.clientSecret,\n callbackURL: callbackUrl,\n },\n (\n accessToken: string,\n refreshToken: string,\n _profile: SSOProfile,\n done: Function\n ) => {\n utils.clearCookie(ctx, Cookie.DatasourceAuth)\n done(null, { accessToken, refreshToken })\n }\n ),\n { successRedirect: \"/\", failureRedirect: \"/error\" },\n async (err: any, tokens: string[]) => {\n const baseUrl = `/builder/app/${authStateCookie.appId}/data`\n\n const id = utils.newid()\n await cache.store(\n `datasource:creation:${authStateCookie.appId}:google:${id}`,\n {\n tokens,\n }\n )\n\n ctx.redirect(`${baseUrl}/new?continue_google_setup=${id}`)\n }\n )(ctx, next)\n}\n", "import { Cookie, Header } from \"../constants\"\nimport {\n getCookie,\n clearCookie,\n openJwt,\n isValidInternalAPIKey,\n} from \"../utils\"\nimport { getUser } from \"../cache/user\"\nimport { getSession, updateSessionTTL } from \"../security/sessions\"\nimport { buildMatcherRegex, matches } from \"./matchers\"\nimport { SEPARATOR, queryGlobalView, ViewName } from \"../db\"\nimport { getGlobalDB, doInTenant } from \"../context\"\nimport { decrypt } from \"../security/encryption\"\nimport * as identity from \"../context/identity\"\nimport env from \"../environment\"\nimport { Ctx, EndpointMatcher, SessionCookie } from \"@budibase/types\"\nimport { InvalidAPIKeyError, ErrorCode } from \"../errors\"\nimport tracer from \"dd-trace\"\n\nconst ONE_MINUTE = env.SESSION_UPDATE_PERIOD\n ? parseInt(env.SESSION_UPDATE_PERIOD)\n : 60 * 1000\n\ninterface FinaliseOpts {\n authenticated?: boolean\n internal?: boolean\n publicEndpoint?: boolean\n version?: string\n user?: any\n}\n\nfunction timeMinusOneMinute() {\n return new Date(Date.now() - ONE_MINUTE).toISOString()\n}\n\nfunction finalise(ctx: any, opts: FinaliseOpts = {}) {\n ctx.publicEndpoint = opts.publicEndpoint || false\n ctx.isAuthenticated = opts.authenticated || false\n ctx.user = opts.user\n ctx.internal = opts.internal || false\n ctx.version = opts.version\n}\n\nasync function checkApiKey(apiKey: string, populateUser?: Function) {\n // check both the primary and the fallback internal api keys\n // this allows for rotation\n if (isValidInternalAPIKey(apiKey)) {\n return { valid: true, user: undefined }\n }\n const decrypted = decrypt(apiKey)\n const tenantId = decrypted.split(SEPARATOR)[0]\n return doInTenant(tenantId, async () => {\n let userId\n try {\n const db = getGlobalDB()\n // api key is encrypted in the database\n userId = (await queryGlobalView(\n ViewName.BY_API_KEY,\n {\n key: apiKey,\n },\n db\n )) as string\n } catch (err) {\n userId = undefined\n }\n if (userId) {\n return {\n valid: true,\n user: await getUser(userId, tenantId, populateUser),\n }\n } else {\n throw new InvalidAPIKeyError()\n }\n })\n}\n\n/**\n * This middleware is tenancy aware, so that it does not depend on other middlewares being used.\n * The tenancy modules should not be used here and it should be assumed that the tenancy context\n * has not yet been populated.\n */\nexport default function (\n noAuthPatterns: EndpointMatcher[] = [],\n opts: { publicAllowed?: boolean; populateUser?: Function } = {\n publicAllowed: false,\n }\n) {\n const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []\n return async (ctx: Ctx | any, next: any) => {\n let publicEndpoint = false\n const version = ctx.request.headers[Header.API_VER]\n // the path is not authenticated\n const found = matches(ctx, noAuthOptions)\n if (found) {\n publicEndpoint = true\n }\n try {\n // check the actual user is authenticated first, try header or cookie\n let headerToken = ctx.request.headers[Header.TOKEN]\n\n const authCookie =\n getCookie<SessionCookie>(ctx, Cookie.Auth) ||\n openJwt<SessionCookie>(headerToken)\n let apiKey = ctx.request.headers[Header.API_KEY]\n\n if (!apiKey && ctx.request.headers[Header.AUTHORIZATION]) {\n apiKey = ctx.request.headers[Header.AUTHORIZATION].split(\" \")[1]\n }\n\n const tenantId = ctx.request.headers[Header.TENANT_ID]\n let authenticated = false,\n user = null,\n internal = false\n if (authCookie && !apiKey) {\n const sessionId = authCookie.sessionId\n const userId = authCookie.userId\n let session\n try {\n // getting session handles error checking (if session exists etc)\n session = await getSession(userId, sessionId)\n if (opts && opts.populateUser) {\n user = await getUser(\n userId,\n session.tenantId,\n opts.populateUser(ctx)\n )\n } else {\n user = await getUser(userId, session.tenantId)\n }\n user.csrfToken = session.csrfToken\n\n if (session?.lastAccessedAt < timeMinusOneMinute()) {\n // make sure we denote that the session is still in use\n await updateSessionTTL(session)\n }\n authenticated = true\n } catch (err: any) {\n authenticated = false\n console.error(`Auth Error: ${err.message}`)\n console.error(err)\n // remove the cookie as the user does not exist anymore\n clearCookie(ctx, Cookie.Auth)\n }\n }\n // this is an internal request, no user made it\n if (!authenticated && apiKey) {\n const populateUser = opts.populateUser ? opts.populateUser(ctx) : null\n const { valid, user: foundUser } = await checkApiKey(\n apiKey,\n populateUser\n )\n if (valid && foundUser) {\n authenticated = true\n user = foundUser\n } else if (valid) {\n authenticated = true\n internal = true\n }\n }\n if (!user && tenantId) {\n user = { tenantId }\n } else if (user) {\n delete user.password\n }\n // be explicit\n if (!authenticated) {\n authenticated = false\n }\n\n if (user) {\n tracer.setUser({\n id: user?._id,\n tenantId: user?.tenantId,\n budibaseAccess: user?.budibaseAccess,\n status: user?.status,\n })\n }\n\n // isAuthenticated is a function, so use a variable to be able to check authed state\n finalise(ctx, { authenticated, user, internal, version, publicEndpoint })\n\n if (user && user.email) {\n return identity.doInUserContext(user, ctx, next)\n } else {\n return next()\n }\n } catch (err: any) {\n console.error(`Auth Error: ${err.message}`)\n console.error(err)\n // invalid token, clear the cookie\n if (err?.name === \"JsonWebTokenError\") {\n clearCookie(ctx, Cookie.Auth)\n } else if (err?.code === ErrorCode.INVALID_API_KEY) {\n ctx.throw(403, err.message)\n }\n // allow configuring for public access\n if ((opts && opts.publicAllowed) || publicEndpoint) {\n finalise(ctx, { authenticated: false, version, publicEndpoint })\n return next()\n } else {\n ctx.throw(err.status || 403, err)\n }\n }\n }\n}\n", "import { BBContext, EndpointMatcher, RegexMatcher } from \"@budibase/types\"\n\nconst PARAM_REGEX = /\\/:(.*?)(\\/.*)?$/g\n\nexport const buildMatcherRegex = (\n patterns: EndpointMatcher[]\n): RegexMatcher[] => {\n if (!patterns) {\n return []\n }\n return patterns.map(pattern => {\n let route = pattern.route\n const method = pattern.method\n const strict = pattern.strict ? pattern.strict : false\n\n // if there is a param in the route\n // use a wildcard pattern\n const matches = route.match(PARAM_REGEX)\n if (matches) {\n for (let match of matches) {\n const suffix = match.endsWith(\"/\") ? \"/\" : \"\"\n const pattern = \"/.*\" + suffix\n route = route.replace(match, pattern)\n }\n }\n\n return { regex: new RegExp(route), method, strict, route }\n })\n}\n\nexport const matches = (ctx: BBContext, options: RegexMatcher[]) => {\n return options.find(({ regex, method, strict, route }) => {\n let urlMatch\n if (strict) {\n urlMatch = ctx.request.url === route\n } else {\n urlMatch = regex.test(ctx.request.url)\n }\n\n const methodMatch =\n method === \"ALL\"\n ? true\n : ctx.request.method.toLowerCase() === method.toLowerCase()\n\n return urlMatch && methodMatch\n })\n}\n", "import crypto from \"crypto\"\nimport fs from \"fs\"\nimport zlib from \"zlib\"\nimport env from \"../environment\"\nimport { join } from \"path\"\n\nconst ALGO = \"aes-256-ctr\"\nconst SEPARATOR = \"-\"\nconst ITERATIONS = 10000\nconst STRETCH_LENGTH = 32\n\nconst SALT_LENGTH = 16\nconst IV_LENGTH = 16\n\nexport enum SecretOption {\n API = \"api\",\n ENCRYPTION = \"encryption\",\n}\n\nexport function getSecret(secretOption: SecretOption): string {\n let secret, secretName\n switch (secretOption) {\n case SecretOption.ENCRYPTION:\n secret = env.ENCRYPTION_KEY\n secretName = \"ENCRYPTION_KEY\"\n break\n case SecretOption.API:\n default:\n secret = env.API_ENCRYPTION_KEY\n secretName = \"API_ENCRYPTION_KEY\"\n break\n }\n if (!secret) {\n throw new Error(`Secret \"${secretName}\" has not been set in environment.`)\n }\n return secret\n}\n\nfunction stretchString(secret: string, salt: Buffer) {\n return crypto.pbkdf2Sync(secret, salt, ITERATIONS, STRETCH_LENGTH, \"sha512\")\n}\n\nexport function encrypt(\n input: string,\n secretOption: SecretOption = SecretOption.API\n) {\n const salt = crypto.randomBytes(SALT_LENGTH)\n const stretched = stretchString(getSecret(secretOption), salt)\n const cipher = crypto.createCipheriv(ALGO, stretched, salt)\n const base = cipher.update(input)\n const final = cipher.final()\n const encrypted = Buffer.concat([base, final]).toString(\"hex\")\n return `${salt.toString(\"hex\")}${SEPARATOR}${encrypted}`\n}\n\nexport function decrypt(\n input: string,\n secretOption: SecretOption = SecretOption.API\n) {\n const [salt, encrypted] = input.split(SEPARATOR)\n const saltBuffer = Buffer.from(salt, \"hex\")\n const stretched = stretchString(getSecret(secretOption), saltBuffer)\n const decipher = crypto.createDecipheriv(ALGO, stretched, saltBuffer)\n const base = decipher.update(Buffer.from(encrypted, \"hex\"))\n const final = decipher.final()\n return Buffer.concat([base, final]).toString()\n}\n\nexport async function encryptFile(\n { dir, filename }: { dir: string; filename: string },\n secret: string\n) {\n const outputFileName = `${filename}.enc`\n\n const filePath = join(dir, filename)\n if (fs.lstatSync(filePath).isDirectory()) {\n throw new Error(\"Unable to encrypt directory\")\n }\n const inputFile = fs.createReadStream(filePath)\n const outputFile = fs.createWriteStream(join(dir, outputFileName))\n\n const salt = crypto.randomBytes(SALT_LENGTH)\n const iv = crypto.randomBytes(IV_LENGTH)\n const stretched = stretchString(secret, salt)\n const cipher = crypto.createCipheriv(ALGO, stretched, iv)\n\n outputFile.write(salt)\n outputFile.write(iv)\n\n inputFile.pipe(zlib.createGzip()).pipe(cipher).pipe(outputFile)\n\n return new Promise<{ filename: string; dir: string }>(r => {\n outputFile.on(\"finish\", () => {\n r({\n filename: outputFileName,\n dir,\n })\n })\n })\n}\n\nasync function getSaltAndIV(path: string) {\n const fileStream = fs.createReadStream(path)\n\n const salt = await readBytes(fileStream, SALT_LENGTH)\n const iv = await readBytes(fileStream, IV_LENGTH)\n fileStream.close()\n return { salt, iv }\n}\n\nexport async function decryptFile(\n inputPath: string,\n outputPath: string,\n secret: string\n) {\n if (fs.lstatSync(inputPath).isDirectory()) {\n throw new Error(\"Unable to encrypt directory\")\n }\n const { salt, iv } = await getSaltAndIV(inputPath)\n const inputFile = fs.createReadStream(inputPath, {\n start: SALT_LENGTH + IV_LENGTH,\n })\n\n const outputFile = fs.createWriteStream(outputPath)\n\n const stretched = stretchString(secret, salt)\n const decipher = crypto.createDecipheriv(ALGO, stretched, iv)\n\n const unzip = zlib.createGunzip()\n\n inputFile.pipe(decipher).pipe(unzip).pipe(outputFile)\n\n return new Promise<void>((res, rej) => {\n outputFile.on(\"finish\", () => {\n outputFile.close()\n res()\n })\n\n inputFile.on(\"error\", e => {\n outputFile.close()\n rej(e)\n })\n\n decipher.on(\"error\", e => {\n outputFile.close()\n rej(e)\n })\n\n unzip.on(\"error\", e => {\n outputFile.close()\n rej(e)\n })\n\n outputFile.on(\"error\", e => {\n outputFile.close()\n rej(e)\n })\n })\n}\n\nfunction readBytes(stream: fs.ReadStream, length: number) {\n return new Promise<Buffer>((resolve, reject) => {\n let bytesRead = 0\n const data: Buffer[] = []\n\n stream.on(\"readable\", () => {\n let chunk\n\n while ((chunk = stream.read(length - bytesRead)) !== null) {\n data.push(chunk)\n bytesRead += chunk.length\n }\n\n resolve(Buffer.concat(data))\n })\n\n stream.on(\"end\", () => {\n reject(new Error(\"Insufficient data in the stream.\"))\n })\n\n stream.on(\"error\", error => {\n reject(error)\n })\n })\n}\n", "import { BBContext } from \"@budibase/types\"\n\nexport default async (ctx: BBContext | any, next: any) => {\n // Placeholder for audit log middleware\n return next()\n}\n", "import { doInTenant } from \"../context\"\nimport { getTenantIDFromCtx } from \"../tenancy\"\nimport { buildMatcherRegex, matches } from \"./matchers\"\nimport { Header } from \"../constants\"\nimport {\n BBContext,\n EndpointMatcher,\n GetTenantIdOptions,\n TenantResolutionStrategy,\n} from \"@budibase/types\"\n\nexport default function (\n allowQueryStringPatterns: EndpointMatcher[],\n noTenancyPatterns: EndpointMatcher[],\n opts: { noTenancyRequired?: boolean } = { noTenancyRequired: false }\n) {\n const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)\n const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)\n\n return async function (ctx: BBContext | any, next: any) {\n const allowNoTenant =\n opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)\n const tenantOpts: GetTenantIdOptions = {\n allowNoTenant,\n }\n\n const allowQs = !!matches(ctx, allowQsOptions)\n if (!allowQs) {\n tenantOpts.excludeStrategies = [TenantResolutionStrategy.QUERY]\n }\n\n const tenantId = getTenantIDFromCtx(ctx, tenantOpts)\n ctx.set(Header.TENANT_ID, tenantId as string)\n return doInTenant(tenantId, next)\n }\n}\n", "import { Header } from \"../constants\"\nimport { BBContext } from \"@budibase/types\"\nimport { isValidInternalAPIKey } from \"../utils\"\n\n/**\n * API Key only endpoint.\n */\nexport default async (ctx: BBContext, next: any) => {\n const apiKey = ctx.request.headers[Header.API_KEY]\n if (!apiKey) {\n ctx.throw(403, \"Unauthorized\")\n }\n\n if (Array.isArray(apiKey)) {\n ctx.throw(403, \"Unauthorized\")\n }\n\n if (!isValidInternalAPIKey(apiKey)) {\n ctx.throw(403, \"Unauthorized\")\n }\n\n return next()\n}\n", "import { Header } from \"../constants\"\nimport { buildMatcherRegex, matches } from \"./matchers\"\nimport { BBContext, EndpointMatcher } from \"@budibase/types\"\n\n/**\n * GET, HEAD and OPTIONS methods are considered safe operations\n *\n * POST, PUT, PATCH, and DELETE methods, being state changing verbs,\n * should have a CSRF token attached to the request\n */\nconst EXCLUDED_METHODS = [\"GET\", \"HEAD\", \"OPTIONS\"]\n\n/**\n * There are only three content type values that can be used in cross domain requests.\n * If any other value is used, e.g. application/json, the browser will first make a OPTIONS\n * request which will be protected by CORS.\n */\nconst INCLUDED_CONTENT_TYPES = [\n \"application/x-www-form-urlencoded\",\n \"multipart/form-data\",\n \"text/plain\",\n]\n\n/**\n * Validate the CSRF token generated aganst the user session.\n * Compare the token with the x-csrf-token header.\n *\n * If the token is not found within the request or the value provided\n * does not match the value within the user session, the request is rejected.\n *\n * CSRF protection provided using the 'Synchronizer Token Pattern'\n * https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#synchronizer-token-pattern\n *\n */\nexport default function (\n opts: { noCsrfPatterns: EndpointMatcher[] } = { noCsrfPatterns: [] }\n) {\n const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns)\n return async (ctx: BBContext | any, next: any) => {\n // don't apply for excluded paths\n const found = matches(ctx, noCsrfOptions)\n if (found) {\n return next()\n }\n\n // don't apply for the excluded http methods\n if (EXCLUDED_METHODS.indexOf(ctx.method) !== -1) {\n return next()\n }\n\n // don't apply when the content type isn't supported\n let contentType = ctx.get(\"content-type\")\n ? ctx.get(\"content-type\").toLowerCase()\n : \"\"\n if (\n !INCLUDED_CONTENT_TYPES.filter(type => contentType.includes(type)).length\n ) {\n return next()\n }\n\n // don't apply csrf when the internal api key has been used\n if (ctx.internal) {\n return next()\n }\n\n // apply csrf when there is a token in the session (new logins)\n // in future there should be a hard requirement that the token is present\n const userToken = ctx.user?.csrfToken\n if (!userToken) {\n return next()\n }\n\n // reject if no token in request or mismatch\n const requestToken = ctx.get(Header.CSRF_TOKEN)\n if (!requestToken || requestToken !== userToken) {\n ctx.throw(403, \"Invalid CSRF token\")\n }\n\n return next()\n }\n}\n", "import { UserCtx } from \"@budibase/types\"\nimport { isAdmin } from \"../users\"\n\nexport default async (ctx: UserCtx, next: any) => {\n if (!ctx.internal && !isAdmin(ctx.user)) {\n ctx.throw(403, \"Admin user only endpoint.\")\n }\n return next()\n}\n", "import { UserCtx } from \"@budibase/types\"\nimport { isBuilder, isAdmin, hasBuilderPermissions } from \"../users\"\nimport { getAppId } from \"../context\"\nimport env from \"../environment\"\n\nexport default async (ctx: UserCtx, next: any) => {\n const appId = getAppId()\n const builderFn =\n env.isWorker() || !appId\n ? hasBuilderPermissions\n : env.isApps()\n ? isBuilder\n : undefined\n if (!builderFn) {\n throw new Error(\"Service name unknown - middleware inactive.\")\n }\n if (!ctx.internal && !builderFn(ctx.user, appId) && !isAdmin(ctx.user)) {\n ctx.throw(403, \"Admin/Builder user only endpoint.\")\n }\n return next()\n}\n", "import { UserCtx } from \"@budibase/types\"\nimport { isBuilder, hasBuilderPermissions } from \"../users\"\nimport { getAppId } from \"../context\"\nimport env from \"../environment\"\n\nexport default async (ctx: UserCtx, next: any) => {\n const appId = getAppId()\n const builderFn =\n env.isWorker() || !appId\n ? hasBuilderPermissions\n : env.isApps()\n ? isBuilder\n : undefined\n if (!builderFn) {\n throw new Error(\"Service name unknown - middleware inactive.\")\n }\n if (!ctx.internal && !builderFn(ctx.user, appId)) {\n ctx.throw(403, \"Builder user only endpoint.\")\n }\n return next()\n}\n", "import env from \"../../environment\"\nimport { logger } from \"./logger\"\nimport { IncomingMessage } from \"http\"\n\nconst pino = require(\"koa-pino-logger\")\n\nimport { Options } from \"pino-http\"\nimport { Ctx } from \"@budibase/types\"\n\nconst correlator = require(\"correlation-id\")\n\nexport function pinoSettings(): Options {\n return {\n logger,\n genReqId: correlator.getId,\n autoLogging: {\n ignore: (req: IncomingMessage) => !!req.url?.includes(\"/health\"),\n },\n serializers: {\n req: req => {\n return {\n method: req.method,\n url: req.url,\n correlationId: req.id,\n }\n },\n res: res => {\n return {\n status: res.statusCode,\n }\n },\n },\n }\n}\n\nfunction getMiddleware() {\n if (env.HTTP_LOGGING) {\n return pino(pinoSettings())\n } else {\n return (ctx: Ctx, next: any) => {\n return next()\n }\n }\n}\n\nconst pinoMiddleware = getMiddleware()\n\nexport default pinoMiddleware\n", "import { Header } from \"../../constants\"\nimport { v4 as uuid } from \"uuid\"\n\nconst correlator = require(\"correlation-id\")\n\nconst correlation = (ctx: any, next: any) => {\n // use the provided correlation id header if present\n let correlationId = ctx.headers[Header.CORRELATION_ID]\n if (!correlationId) {\n correlationId = uuid()\n }\n\n return correlator.withId(correlationId, () => {\n return next()\n })\n}\n\nexport default correlation\n", "import { APIError } from \"@budibase/types\"\nimport * as errors from \"../errors\"\n\nexport async function errorHandling(ctx: any, next: any) {\n try {\n await next()\n } catch (err: any) {\n const status = err.status || err.statusCode || 500\n ctx.status = status\n\n if (status >= 400 && status < 500) {\n console.warn(err)\n } else {\n console.error(err)\n }\n\n const error = errors.getPublicError(err)\n const body: APIError = {\n message: err.message,\n status: status,\n validationErrors: err.validation,\n error,\n }\n\n ctx.body = body\n }\n}\n\nexport default errorHandling\n", "import { Ctx } from \"@budibase/types\"\n\n/**\n * Expects a standard \"query\" query string property which is the JSON body\n * of the request, which has to be sent via query string due to the requirement\n * of making an endpoint a GET request e.g. downloading a file stream.\n */\nexport default function (ctx: Ctx, next: any) {\n const queryString = ctx.request.query?.query as string | undefined\n if (ctx.request.method.toLowerCase() !== \"get\") {\n ctx.throw(\n 500,\n \"Query to download middleware can only be used for get requests.\"\n )\n }\n if (!queryString) {\n return next()\n }\n const decoded = decodeURIComponent(queryString)\n let json\n try {\n json = JSON.parse(decoded)\n } catch (err) {\n return next()\n }\n ctx.request.body = json\n return next()\n}\n", "import Joi, { ObjectSchema } from \"joi\"\nimport { BBContext } from \"@budibase/types\"\n\nfunction validate(\n schema: Joi.ObjectSchema | Joi.ArraySchema,\n property: string\n) {\n // Return a Koa middleware function\n return (ctx: BBContext, next: any) => {\n if (!schema) {\n return next()\n }\n let params = null\n // @ts-ignore\n let reqProp = ctx.request?.[property]\n if (ctx[property] != null) {\n params = ctx[property]\n } else if (reqProp != null) {\n params = reqProp\n }\n\n // not all schemas have the append property e.g. array schemas\n if ((schema as Joi.ObjectSchema).append) {\n schema = (schema as Joi.ObjectSchema).append({\n createdAt: Joi.any().optional(),\n updatedAt: Joi.any().optional(),\n })\n }\n\n const { error } = schema.validate(params)\n if (error) {\n ctx.throw(400, `Invalid ${property} - ${error.message}`)\n return\n }\n return next()\n }\n}\n\nexport function body(schema: Joi.ObjectSchema | Joi.ArraySchema) {\n return validate(schema, \"body\")\n}\n\nexport function params(schema: Joi.ObjectSchema | Joi.ArraySchema) {\n return validate(schema, \"params\")\n}\n", "export * from \"./utils\"\n", "import {\n DatasourceFieldType,\n QueryType,\n PluginType,\n AutomationStepType,\n AutomationStepIdArray,\n AutomationIOType,\n AutomationCustomIOType,\n DatasourceFeature,\n} from \"@budibase/types\"\nimport joi from \"joi\"\n\nconst DATASOURCE_TYPES = [\n \"Relational\",\n \"Non-relational\",\n \"Spreadsheet\",\n \"Object store\",\n \"Graph\",\n \"API\",\n]\n\nfunction runJoi(validator: joi.Schema, schema: any) {\n const { error } = validator.validate(schema)\n if (error) {\n throw error\n }\n}\n\nfunction validateComponent(schema: any) {\n const validator = joi.object({\n type: joi.string().allow(PluginType.COMPONENT).required(),\n metadata: joi.object().unknown(true).required(),\n hash: joi.string().optional(),\n version: joi.string().optional(),\n schema: joi\n .object({\n name: joi.string().required(),\n settings: joi.array().items(joi.object().unknown(true)).required(),\n })\n .unknown(true),\n })\n runJoi(validator, schema)\n}\n\nfunction validateDatasource(schema: any) {\n const fieldValidator = joi.object({\n type: joi\n .string()\n .allow(...Object.values(DatasourceFieldType))\n .required(),\n required: joi.boolean().required(),\n default: joi.any(),\n display: joi.string(),\n })\n\n const queryValidator = joi\n .object({\n type: joi.string().allow(...Object.values(QueryType)),\n readable: joi.boolean(),\n fields: joi.object().pattern(joi.string(), fieldValidator),\n })\n .required()\n\n const validator = joi.object({\n type: joi.string().allow(PluginType.DATASOURCE).required(),\n metadata: joi.object().unknown(true).required(),\n hash: joi.string().optional(),\n version: joi.string().optional(),\n schema: joi.object({\n docs: joi.string(),\n plus: joi.boolean().optional(),\n isSQL: joi.boolean().optional(),\n auth: joi\n .object({\n type: joi.string().required(),\n })\n .optional(),\n features: joi\n .object(\n Object.fromEntries(\n Object.values(DatasourceFeature).map(key => [\n key,\n joi.boolean().optional(),\n ])\n )\n )\n .optional(),\n relationships: joi.boolean().optional(),\n description: joi.string().required(),\n friendlyName: joi.string().required(),\n type: joi.string().allow(...DATASOURCE_TYPES),\n datasource: joi.object().pattern(joi.string(), fieldValidator).required(),\n query: joi\n .object()\n .pattern(joi.string(), queryValidator)\n .unknown(true)\n .required(),\n extra: joi.object().pattern(\n joi.string(),\n joi.object({\n type: joi.string().required(),\n displayName: joi.string().required(),\n required: joi.boolean(),\n data: joi.object(),\n })\n ),\n }),\n })\n runJoi(validator, schema)\n}\n\nfunction validateAutomation(schema: any) {\n const basePropsValidator = joi.object().pattern(joi.string(), {\n type: joi\n .string()\n .allow(...Object.values(AutomationIOType))\n .required(),\n customType: joi.string().allow(...Object.values(AutomationCustomIOType)),\n title: joi.string(),\n description: joi.string(),\n enum: joi.array().items(joi.string()),\n pretty: joi.array().items(joi.string()),\n })\n const stepSchemaValidator = joi\n .object({\n properties: basePropsValidator,\n required: joi.array().items(joi.string()),\n })\n .concat(basePropsValidator)\n .required()\n const validator = joi.object({\n type: joi.string().allow(PluginType.AUTOMATION).required(),\n metadata: joi.object().unknown(true).required(),\n hash: joi.string().optional(),\n version: joi.string().optional(),\n schema: joi.object({\n name: joi.string().required(),\n tagline: joi.string().required(),\n icon: joi.string().required(),\n description: joi.string().required(),\n type: joi\n .string()\n .allow(AutomationStepType.ACTION, AutomationStepType.LOGIC)\n .required(),\n stepId: joi\n .string()\n .disallow(...AutomationStepIdArray)\n .required(),\n inputs: joi.object().optional(),\n schema: joi\n .object({\n inputs: stepSchemaValidator,\n outputs: stepSchemaValidator,\n })\n .required(),\n }),\n })\n runJoi(validator, schema)\n}\n\nexport function validate(schema: any) {\n switch (schema?.type) {\n case PluginType.COMPONENT:\n validateComponent(schema)\n break\n case PluginType.DATASOURCE:\n validateDatasource(schema)\n break\n case PluginType.AUTOMATION:\n validateAutomation(schema)\n break\n default:\n throw new Error(`Unknown plugin type - check schema.json: ${schema.type}`)\n }\n}\n", "// Mimic the outer package export for usage in index.ts\n// The outer exports can't be used as they now reference dist directly\nexport { default as Client } from \"./redis\"\nexport * as utils from \"./utils\"\nexport * as clients from \"./init\"\nexport * as locks from \"./redlockImpl\"\n", "export * from \"./blacklist\"\n", "import dns from \"dns\"\nimport net from \"net\"\nimport env from \"../environment\"\nimport { promisify } from \"util\"\n\nlet blackListArray: string[] | undefined\nconst performLookup = promisify(dns.lookup)\n\nasync function lookup(address: string): Promise<string[]> {\n if (!net.isIP(address)) {\n // need this for URL parsing simply\n if (!address.startsWith(\"http\")) {\n address = `https://${address}`\n }\n address = new URL(address).hostname\n }\n const addresses = await performLookup(address, {\n all: true,\n })\n return addresses.map(addr => addr.address)\n}\n\nexport async function refreshBlacklist() {\n const blacklist = env.BLACKLIST_IPS\n const list = blacklist?.split(\",\") || []\n let final: string[] = []\n for (let addr of list) {\n const trimmed = addr.trim()\n if (!net.isIP(trimmed)) {\n const addresses = await lookup(trimmed)\n final = final.concat(addresses)\n } else {\n final.push(trimmed)\n }\n }\n blackListArray = final\n}\n\nexport async function isBlacklisted(address: string): Promise<boolean> {\n if (!blackListArray) {\n await refreshBlacklist()\n }\n if (blackListArray?.length === 0) {\n return false\n }\n // no need for DNS\n let ips: string[]\n if (!net.isIP(address)) {\n ips = await lookup(address)\n } else {\n ips = [address]\n }\n return !!blackListArray?.find(addr => ips.includes(addr))\n}\n", "import { asyncEventQueue, init as initQueue } from \"../events/asyncEvents\"\nimport {\n ProcessorMap,\n default as DocumentUpdateProcessor,\n} from \"../events/processors/async/DocumentUpdateProcessor\"\n\nlet processingPromise: Promise<void>\nlet documentProcessor: DocumentUpdateProcessor\n\nexport function init(processors: ProcessorMap) {\n if (!asyncEventQueue) {\n initQueue()\n }\n if (!documentProcessor) {\n documentProcessor = new DocumentUpdateProcessor(processors)\n }\n // if not processing in this instance, kick it off\n if (!processingPromise) {\n processingPromise = asyncEventQueue.process(async job => {\n const { event, identity, properties, timestamp } = job.data\n await documentProcessor.processEvent(\n event,\n identity,\n properties,\n timestamp\n )\n })\n }\n}\n", "import { EventProcessor } from \"../types\"\nimport { Event, Identity, DocUpdateEvent } from \"@budibase/types\"\nimport { doInTenant } from \"../../../context\"\nimport { getDocumentId } from \"../../documentId\"\nimport { shutdown } from \"../../asyncEvents\"\n\nexport type Processor = (update: DocUpdateEvent) => Promise<void>\nexport type ProcessorMap = { events: Event[]; processor: Processor }[]\n\nexport default class DocumentUpdateProcessor implements EventProcessor {\n processors: ProcessorMap = []\n\n constructor(processors: ProcessorMap) {\n this.processors = processors\n }\n\n async processEvent(\n event: Event,\n identity: Identity,\n properties: any,\n timestamp?: string | number\n ) {\n const tenantId = identity.realTenantId\n const docId = getDocumentId(event, properties)\n if (!tenantId || !docId) {\n return\n }\n for (let { events, processor } of this.processors) {\n if (events.includes(event)) {\n await doInTenant(tenantId, async () => {\n await processor({\n id: docId,\n tenantId,\n })\n })\n }\n }\n }\n\n shutdown() {\n return shutdown()\n }\n}\n", "import {\n Event,\n UserCreatedEvent,\n UserUpdatedEvent,\n UserDeletedEvent,\n UserPermissionAssignedEvent,\n UserPermissionRemovedEvent,\n GroupCreatedEvent,\n GroupUpdatedEvent,\n GroupDeletedEvent,\n GroupUsersAddedEvent,\n GroupUsersDeletedEvent,\n GroupPermissionsEditedEvent,\n} from \"@budibase/types\"\n\nconst getEventProperties: Record<\n string,\n (properties: any) => string | undefined\n> = {\n [Event.USER_CREATED]: (properties: UserCreatedEvent) => properties.userId,\n [Event.USER_UPDATED]: (properties: UserUpdatedEvent) => properties.userId,\n [Event.USER_DELETED]: (properties: UserDeletedEvent) => properties.userId,\n [Event.USER_PERMISSION_ADMIN_ASSIGNED]: (\n properties: UserPermissionAssignedEvent\n ) => properties.userId,\n [Event.USER_PERMISSION_ADMIN_REMOVED]: (\n properties: UserPermissionRemovedEvent\n ) => properties.userId,\n [Event.USER_PERMISSION_BUILDER_ASSIGNED]: (\n properties: UserPermissionAssignedEvent\n ) => properties.userId,\n [Event.USER_PERMISSION_BUILDER_REMOVED]: (\n properties: UserPermissionRemovedEvent\n ) => properties.userId,\n [Event.USER_GROUP_CREATED]: (properties: GroupCreatedEvent) =>\n properties.groupId,\n [Event.USER_GROUP_UPDATED]: (properties: GroupUpdatedEvent) =>\n properties.groupId,\n [Event.USER_GROUP_DELETED]: (properties: GroupDeletedEvent) =>\n properties.groupId,\n [Event.USER_GROUP_USERS_ADDED]: (properties: GroupUsersAddedEvent) =>\n properties.groupId,\n [Event.USER_GROUP_USERS_REMOVED]: (properties: GroupUsersDeletedEvent) =>\n properties.groupId,\n [Event.USER_GROUP_PERMISSIONS_EDITED]: (\n properties: GroupPermissionsEditedEvent\n ) => properties.groupId,\n}\n\nexport function getDocumentId(event: Event, properties: any) {\n const extractor = getEventProperties[event]\n if (!extractor) {\n throw new Error(\"Event does not have a method of document ID extraction\")\n }\n return extractor(properties)\n}\n"],
4
+ "sourcesContent": ["import { Automation, AutomationMetadata } from \"../../documents\"\nimport { Job } from \"bull\"\n\nexport interface AutomationDataEvent {\n appId?: string\n metadata?: AutomationMetadata\n automation?: Automation\n timeout?: number\n}\n\nexport interface AutomationData {\n event: AutomationDataEvent\n automation: Automation\n}\n\nexport type AutomationJob = Job<AutomationData>\n", "export enum Hosting {\n CLOUD = \"cloud\",\n SELF = \"self\",\n}\n", "import { User, Account } from \"../documents\"\nimport { IdentityType, HostInfo } from \"./events\"\n\nexport interface BaseContext {\n _id: string\n type: IdentityType\n tenantId?: string\n}\n\nexport interface AccountUserContext extends BaseContext {\n tenantId: string\n account: Account\n}\n\nexport interface UserContext extends BaseContext, User {\n _id: string\n tenantId: string\n account?: Account\n hostInfo: HostInfo\n}\n\nexport type IdentityContext = BaseContext | AccountUserContext | UserContext\n", "import { BaseEvent } from \"./event\"\n\nexport interface AppCreatedEvent extends BaseEvent {\n appId: string\n version: string\n audited: {\n name: string\n }\n}\n\nexport interface AppUpdatedEvent extends BaseEvent {\n appId: string\n version: string\n audited: {\n name: string\n }\n}\n\nexport interface AppDeletedEvent extends BaseEvent {\n appId: string\n audited: {\n name: string\n }\n}\n\nexport interface AppPublishedEvent extends BaseEvent {\n appId: string\n audited: {\n name: string\n }\n}\n\nexport interface AppUnpublishedEvent extends BaseEvent {\n appId: string\n audited: {\n name: string\n }\n}\n\nexport interface AppFileImportedEvent extends BaseEvent {\n appId: string\n audited: {\n name: string\n }\n}\n\nexport interface AppTemplateImportedEvent extends BaseEvent {\n appId: string\n templateKey: string\n audited: {\n name: string\n }\n}\n\nexport interface AppVersionUpdatedEvent extends BaseEvent {\n appId: string\n currentVersion: string\n updatedToVersion: string\n audited: {\n name: string\n }\n}\n\nexport interface AppVersionRevertedEvent extends BaseEvent {\n appId: string\n currentVersion: string\n revertedToVersion: string\n audited: {\n name: string\n }\n}\n\nexport interface AppRevertedEvent extends BaseEvent {\n appId: string\n audited: {\n name: string\n }\n}\n\nexport interface AppExportedEvent extends BaseEvent {\n appId: string\n audited: {\n name: string\n }\n}\n", "import { BaseEvent } from \"./event\"\nimport { ConfigType } from \"../../documents\"\n\nexport type LoginSource = \"local\" | \"google\" | \"oidc\" | \"google-internal\"\nexport type SSOType = ConfigType.OIDC | ConfigType.GOOGLE\n\nexport interface LoginEvent extends BaseEvent {\n userId: string\n source: LoginSource\n audited: {\n email: string\n }\n}\n\nexport interface LogoutEvent extends BaseEvent {\n userId: string\n audited: {\n email?: string\n }\n}\n\nexport interface SSOCreatedEvent extends BaseEvent {\n type: SSOType\n}\n\nexport interface SSOUpdatedEvent extends BaseEvent {\n type: SSOType\n}\n\nexport interface SSOActivatedEvent extends BaseEvent {\n type: SSOType\n}\n\nexport interface SSODeactivatedEvent extends BaseEvent {\n type: SSOType\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface AutomationCreatedEvent extends BaseEvent {\n appId: string\n automationId: string\n triggerId: string\n triggerType: string\n audited: {\n name: string\n }\n}\n\nexport interface AutomationTriggerUpdatedEvent extends BaseEvent {\n appId: string\n automationId: string\n triggerId: string\n triggerType: string\n}\n\nexport interface AutomationDeletedEvent extends BaseEvent {\n appId: string\n automationId: string\n triggerId: string\n triggerType: string\n audited: {\n name: string\n }\n}\n\nexport interface AutomationTestedEvent extends BaseEvent {\n appId: string\n automationId: string\n triggerId: string\n triggerType: string\n}\n\nexport interface AutomationStepCreatedEvent extends BaseEvent {\n appId: string\n automationId: string\n triggerId: string\n triggerType: string\n stepId: string\n stepType: string\n audited: {\n name: string\n }\n}\n\nexport interface AutomationStepDeletedEvent extends BaseEvent {\n appId: string\n automationId: string\n triggerId: string\n triggerType: string\n stepId: string\n stepType: string\n audited: {\n name: string\n }\n}\n\nexport interface AutomationsRunEvent extends BaseEvent {\n count: number\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface SMTPCreatedEvent extends BaseEvent {}\n\nexport interface SMTPUpdatedEvent extends BaseEvent {}\n", "import { BaseEvent } from \"./event\"\n\nexport interface DatasourceCreatedEvent extends BaseEvent {\n datasourceId: string\n source: string\n custom: boolean\n}\n\nexport interface DatasourceUpdatedEvent extends BaseEvent {\n datasourceId: string\n source: string\n custom: boolean\n}\n\nexport interface DatasourceDeletedEvent extends BaseEvent {\n datasourceId: string\n source: string\n custom: boolean\n}\n", "import { Hosting } from \"../hosting\"\nimport { Group, Identity } from \"./identification\"\n\nexport enum Event {\n // USER\n USER_CREATED = \"user:created\",\n USER_UPDATED = \"user:updated\",\n USER_DELETED = \"user:deleted\",\n\n // USER / ONBOARDING\n USER_ONBOARDING_COMPLETE = \"user:onboarding:complete\",\n\n // USER / PERMISSIONS\n USER_PERMISSION_ADMIN_ASSIGNED = \"user:admin:assigned\",\n USER_PERMISSION_ADMIN_REMOVED = \"user:admin:removed\",\n USER_PERMISSION_BUILDER_ASSIGNED = \"user:builder:assigned\",\n USER_PERMISSION_BUILDER_REMOVED = \"user:builder:removed\",\n\n // USER / INVITE\n USER_INVITED = \"user:invited\",\n USER_INVITED_ACCEPTED = \"user:invite:accepted\",\n\n // USER / PASSWORD\n USER_PASSWORD_FORCE_RESET = \"user:password:force:reset\",\n USER_PASSWORD_UPDATED = \"user:password:updated\",\n USER_PASSWORD_RESET_REQUESTED = \"user:password:reset:requested\",\n USER_PASSWORD_RESET = \"user:password:reset\",\n\n // USER / COLLABORATION\n USER_DATA_COLLABORATION = \"user:data:collaboration\",\n\n // EMAIL\n EMAIL_SMTP_CREATED = \"email:smtp:created\",\n EMAIL_SMTP_UPDATED = \"email:smtp:updated\",\n\n // AUTH\n AUTH_SSO_CREATED = \"auth:sso:created\",\n AUTH_SSO_UPDATED = \"auth:sso:updated\",\n AUTH_SSO_ACTIVATED = \"auth:sso:activated\",\n AUTH_SSO_DEACTIVATED = \"auth:sso:deactivated\",\n AUTH_LOGIN = \"auth:login\",\n AUTH_LOGOUT = \"auth:logout\",\n\n // ORG\n ORG_NAME_UPDATED = \"org:info:name:updated\",\n ORG_LOGO_UPDATED = \"org:info:logo:updated\",\n ORG_PLATFORM_URL_UPDATED = \"org:platformurl:updated\",\n\n // INSTALLATION\n INSTALLATION_VERSION_CHECKED = \"installation:version:checked\",\n INSTALLATION_VERSION_UPGRADED = \"installation:version:upgraded\",\n INSTALLATION_VERSION_DOWNGRADED = \"installation:version:downgraded\",\n INSTALLATION_FIRST_STARTUP = \"installation:firstStartup\",\n\n // ORG / ANALYTICS\n ANALYTICS_OPT_OUT = \"analytics:opt:out\",\n ANALYTICS_OPT_IN = \"analytics:opt:in\",\n\n // APP\n APP_CREATED = \"app:created\",\n APP_UPDATED = \"app:updated\",\n APP_DELETED = \"app:deleted\",\n APP_PUBLISHED = \"app:published\",\n APP_UNPUBLISHED = \"app:unpublished\",\n APP_TEMPLATE_IMPORTED = \"app:template:imported\",\n APP_FILE_IMPORTED = \"app:file:imported\",\n APP_VERSION_UPDATED = \"app:version:updated\",\n APP_VERSION_REVERTED = \"app:version:reverted\",\n APP_REVERTED = \"app:reverted\",\n APP_EXPORTED = \"app:exported\",\n\n // ROLE\n ROLE_CREATED = \"role:created\",\n ROLE_UPDATED = \"role:updated\",\n ROLE_DELETED = \"role:deleted\",\n ROLE_ASSIGNED = \"role:assigned\",\n ROLE_UNASSIGNED = \"role:unassigned\",\n\n // SERVE\n SERVED_BUILDER = \"served:builder\",\n SERVED_APP = \"served:app\",\n SERVED_APP_PREVIEW = \"served:app:preview\",\n\n // DATASOURCE\n DATASOURCE_CREATED = \"datasource:created\",\n DATASOURCE_UPDATED = \"datasource:updated\",\n DATASOURCE_DELETED = \"datasource:deleted\",\n\n // QUERY\n QUERY_CREATED = \"query:created\",\n QUERY_UPDATED = \"query:updated\",\n QUERY_DELETED = \"query:deleted\",\n QUERY_IMPORT = \"query:import\",\n QUERIES_RUN = \"queries:run\",\n QUERY_PREVIEWED = \"query:previewed\",\n\n // TABLE\n TABLE_CREATED = \"table:created\",\n TABLE_UPDATED = \"table:updated\",\n TABLE_DELETED = \"table:deleted\",\n TABLE_EXPORTED = \"table:exported\",\n TABLE_IMPORTED = \"table:imported\",\n TABLE_DATA_IMPORTED = \"table:data:imported\",\n\n // VIEW\n VIEW_CREATED = \"view:created\",\n VIEW_UPDATED = \"view:updated\",\n VIEW_DELETED = \"view:deleted\",\n VIEW_EXPORTED = \"view:exported\",\n VIEW_FILTER_CREATED = \"view:filter:created\",\n VIEW_FILTER_UPDATED = \"view:filter:updated\",\n VIEW_FILTER_DELETED = \"view:filter:deleted\",\n VIEW_CALCULATION_CREATED = \"view:calculation:created\",\n VIEW_CALCULATION_UPDATED = \"view:calculation:updated\",\n VIEW_CALCULATION_DELETED = \"view:calculation:deleted\",\n\n // ROWS\n ROWS_CREATED = \"rows:created\",\n ROWS_IMPORTED = \"rows:imported\",\n\n // COMPONENT\n COMPONENT_CREATED = \"component:created\",\n COMPONENT_DELETED = \"component:deleted\",\n\n // SCREEN\n SCREEN_CREATED = \"screen:created\",\n SCREEN_DELETED = \"screen:deleted\",\n\n // LAYOUT\n LAYOUT_CREATED = \"layout:created\",\n LAYOUT_DELETED = \"layout:deleted\",\n\n // AUTOMATION\n AUTOMATION_CREATED = \"automation:created\",\n AUTOMATION_DELETED = \"automation:deleted\",\n AUTOMATION_TESTED = \"automation:tested\",\n AUTOMATIONS_RUN = \"automations:run\",\n AUTOMATION_STEP_CREATED = \"automation:step:created\",\n AUTOMATION_STEP_DELETED = \"automation:step:deleted\",\n AUTOMATION_TRIGGER_UPDATED = \"automation:trigger:updated\",\n\n // LICENSE\n LICENSE_PLAN_CHANGED = \"license:plan:changed\",\n LICENSE_ACTIVATED = \"license:activated\",\n LICENSE_PAYMENT_FAILED = \"license:payment:failed\",\n LICENSE_PAYMENT_RECOVERED = \"license:payment:recovered\",\n LICENSE_CHECKOUT_OPENED = \"license:checkout:opened\",\n LICENSE_CHECKOUT_SUCCESS = \"license:checkout:success\",\n LICENSE_PORTAL_OPENED = \"license:portal:opened\",\n\n // ACCOUNT\n ACCOUNT_CREATED = \"account:created\",\n ACCOUNT_DELETED = \"account:deleted\",\n ACCOUNT_VERIFIED = \"account:verified\",\n\n // BACKFILL\n APP_BACKFILL_SUCCEEDED = \"app:backfill:succeeded\",\n APP_BACKFILL_FAILED = \"app:backfill:failed\",\n TENANT_BACKFILL_SUCCEEDED = \"tenant:backfill:succeeded\",\n TENANT_BACKFILL_FAILED = \"tenant:backfill:failed\",\n INSTALLATION_BACKFILL_SUCCEEDED = \"installation:backfill:succeeded\",\n INSTALLATION_BACKFILL_FAILED = \"installation:backfill:failed\",\n\n // USER\n USER_GROUP_CREATED = \"user_group:created\",\n USER_GROUP_UPDATED = \"user_group:updated\",\n USER_GROUP_DELETED = \"user_group:deleted\",\n USER_GROUP_USERS_ADDED = \"user_group:user_added\",\n USER_GROUP_USERS_REMOVED = \"user_group:users_deleted\",\n USER_GROUP_PERMISSIONS_EDITED = \"user_group:permissions_edited\",\n USER_GROUP_ONBOARDING = \"user_group:onboarding_added\",\n\n // PLUGIN\n PLUGIN_INIT = \"plugin:init\",\n PLUGIN_IMPORTED = \"plugin:imported\",\n PLUGIN_DELETED = \"plugin:deleted\",\n\n // BACKUP\n APP_BACKUP_RESTORED = \"app:backup:restored\",\n APP_BACKUP_TRIGGERED = \"app:backup:triggered\",\n\n // ENVIRONMENT VARIABLE\n ENVIRONMENT_VARIABLE_CREATED = \"environment_variable:created\",\n ENVIRONMENT_VARIABLE_DELETED = \"environment_variable:deleted\",\n ENVIRONMENT_VARIABLE_UPGRADE_PANEL_OPENED = \"environment_variable:upgrade_panel_opened\",\n\n // AUDIT LOG\n AUDIT_LOGS_FILTERED = \"audit_log:filtered\",\n AUDIT_LOGS_DOWNLOADED = \"audit_log:downloaded\",\n}\n\nexport const UserGroupSyncEvents: Event[] = [\n Event.USER_CREATED,\n Event.USER_UPDATED,\n Event.USER_DELETED,\n Event.USER_PERMISSION_ADMIN_ASSIGNED,\n Event.USER_PERMISSION_ADMIN_REMOVED,\n Event.USER_PERMISSION_BUILDER_ASSIGNED,\n Event.USER_PERMISSION_BUILDER_REMOVED,\n Event.USER_GROUP_CREATED,\n Event.USER_GROUP_UPDATED,\n Event.USER_GROUP_DELETED,\n Event.USER_GROUP_USERS_ADDED,\n Event.USER_GROUP_USERS_REMOVED,\n Event.USER_GROUP_PERMISSIONS_EDITED,\n]\n\nexport const AsyncEvents: Event[] = [...UserGroupSyncEvents]\n\n// all events that are not audited have been added to this record as undefined, this means\n// that Typescript can protect us against new events being added and auditing of those\n// events not being considered. This might be a little ugly, but provides a level of\n// Typescript build protection for the audit log feature, any new event also needs to be\n// added to this map, during which the developer will need to consider if it should be\n// a user facing event or not.\nexport const AuditedEventFriendlyName: Record<Event, string | undefined> = {\n // USER\n [Event.USER_CREATED]: `User \"{{ email }}\" created{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_UPDATED]: `User \"{{ email }}\" updated{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_DELETED]: `User \"{{ email }}\" deleted{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_PERMISSION_ADMIN_ASSIGNED]: `User \"{{ email }}\" admin role assigned`,\n [Event.USER_PERMISSION_ADMIN_REMOVED]: `User \"{{ email }}\" admin role removed`,\n [Event.USER_PERMISSION_BUILDER_ASSIGNED]: `User \"{{ email }}\" builder role assigned`,\n [Event.USER_PERMISSION_BUILDER_REMOVED]: `User \"{{ email }}\" builder role removed`,\n [Event.USER_INVITED]: `User \"{{ email }}\" invited`,\n [Event.USER_INVITED_ACCEPTED]: `User \"{{ email }}\" accepted invite`,\n [Event.USER_PASSWORD_UPDATED]: `User \"{{ email }}\" password updated`,\n [Event.USER_PASSWORD_RESET_REQUESTED]: `User \"{{ email }}\" password reset requested`,\n [Event.USER_PASSWORD_RESET]: `User \"{{ email }}\" password reset`,\n [Event.USER_GROUP_CREATED]: `User group \"{{ name }}\" created{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_GROUP_UPDATED]: `User group \"{{ name }}\" updated{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_GROUP_DELETED]: `User group \"{{ name }}\" deleted{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_GROUP_USERS_ADDED]: `User group \"{{ name }}\" {{ count }} users added{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_GROUP_USERS_REMOVED]: `User group \"{{ name }}\" {{ count }} users removed{{#if viaScim}} via SCIM{{/if}}`,\n [Event.USER_GROUP_PERMISSIONS_EDITED]: `User group \"{{ name }}\" permissions edited`,\n [Event.USER_PASSWORD_FORCE_RESET]: undefined,\n [Event.USER_GROUP_ONBOARDING]: undefined,\n [Event.USER_ONBOARDING_COMPLETE]: undefined,\n [Event.USER_DATA_COLLABORATION]: undefined,\n\n // EMAIL\n [Event.EMAIL_SMTP_CREATED]: `Email configuration created`,\n [Event.EMAIL_SMTP_UPDATED]: `Email configuration updated`,\n\n // AUTH\n [Event.AUTH_SSO_CREATED]: `SSO configuration created`,\n [Event.AUTH_SSO_UPDATED]: `SSO configuration updated`,\n [Event.AUTH_SSO_ACTIVATED]: `SSO configuration activated`,\n [Event.AUTH_SSO_DEACTIVATED]: `SSO configuration deactivated`,\n [Event.AUTH_LOGIN]: `User \"{{ email }}\" logged in`,\n [Event.AUTH_LOGOUT]: `User \"{{ email }}\" logged out`,\n\n // ORG\n [Event.ORG_NAME_UPDATED]: `Organisation name updated`,\n [Event.ORG_LOGO_UPDATED]: `Organisation logo updated`,\n [Event.ORG_PLATFORM_URL_UPDATED]: `Organisation platform URL updated`,\n\n // APP\n [Event.APP_CREATED]: `App \"{{ name }}\" created`,\n [Event.APP_UPDATED]: `App \"{{ name }}\" updated`,\n [Event.APP_DELETED]: `App \"{{ name }}\" deleted`,\n [Event.APP_PUBLISHED]: `App \"{{ name }}\" published`,\n [Event.APP_UNPUBLISHED]: `App \"{{ name }}\" unpublished`,\n [Event.APP_TEMPLATE_IMPORTED]: `App \"{{ name }}\" template imported`,\n [Event.APP_FILE_IMPORTED]: `App \"{{ name }}\" file imported`,\n [Event.APP_VERSION_UPDATED]: `App \"{{ name }}\" version updated`,\n [Event.APP_VERSION_REVERTED]: `App \"{{ name }}\" version reverted`,\n [Event.APP_REVERTED]: `App \"{{ name }}\" reverted`,\n [Event.APP_EXPORTED]: `App \"{{ name }}\" exported`,\n [Event.APP_BACKUP_RESTORED]: `App backup \"{{ name }}\" restored`,\n [Event.APP_BACKUP_TRIGGERED]: `App backup \"{{ name }}\" triggered`,\n\n // DATASOURCE\n [Event.DATASOURCE_CREATED]: `Datasource created`,\n [Event.DATASOURCE_UPDATED]: `Datasource updated`,\n [Event.DATASOURCE_DELETED]: `Datasource deleted`,\n\n // QUERY\n [Event.QUERY_CREATED]: `Query created`,\n [Event.QUERY_UPDATED]: `Query updated`,\n [Event.QUERY_DELETED]: `Query deleted`,\n [Event.QUERY_IMPORT]: `Query import`,\n [Event.QUERIES_RUN]: undefined,\n [Event.QUERY_PREVIEWED]: undefined,\n\n // TABLE\n [Event.TABLE_CREATED]: `Table \"{{ name }}\" created`,\n [Event.TABLE_UPDATED]: `Table \"{{ name }}\" updated`,\n [Event.TABLE_DELETED]: `Table \"{{ name }}\" deleted`,\n [Event.TABLE_EXPORTED]: `Table \"{{ name }}\" exported`,\n [Event.TABLE_IMPORTED]: `Table \"{{ name }}\" imported`,\n [Event.TABLE_DATA_IMPORTED]: `Data imported to table`,\n\n // ROWS\n [Event.ROWS_CREATED]: `Rows created`,\n [Event.ROWS_IMPORTED]: `Rows imported`,\n\n // AUTOMATION\n [Event.AUTOMATION_CREATED]: `Automation \"{{ name }}\" created`,\n [Event.AUTOMATION_DELETED]: `Automation \"{{ name }}\" deleted`,\n [Event.AUTOMATION_STEP_CREATED]: `Automation \"{{ name }}\" step added`,\n [Event.AUTOMATION_STEP_DELETED]: `Automation \"{{ name }}\" step removed`,\n [Event.AUTOMATION_TESTED]: undefined,\n [Event.AUTOMATIONS_RUN]: undefined,\n [Event.AUTOMATION_TRIGGER_UPDATED]: undefined,\n\n // SCREEN\n [Event.SCREEN_CREATED]: `Screen \"{{ name }}\" created`,\n [Event.SCREEN_DELETED]: `Screen \"{{ name }}\" deleted`,\n\n // COMPONENT\n [Event.COMPONENT_CREATED]: `Component created`,\n [Event.COMPONENT_DELETED]: `Component deleted`,\n\n // ENVIRONMENT VARIABLE\n [Event.ENVIRONMENT_VARIABLE_CREATED]: `Environment variable created`,\n [Event.ENVIRONMENT_VARIABLE_DELETED]: `Environment variable deleted`,\n [Event.ENVIRONMENT_VARIABLE_UPGRADE_PANEL_OPENED]: undefined,\n\n // PLUGIN\n [Event.PLUGIN_IMPORTED]: `Plugin imported`,\n [Event.PLUGIN_DELETED]: `Plugin deleted`,\n [Event.PLUGIN_INIT]: undefined,\n\n // ROLE - NOT AUDITED\n [Event.ROLE_CREATED]: undefined,\n [Event.ROLE_UPDATED]: undefined,\n [Event.ROLE_DELETED]: undefined,\n [Event.ROLE_ASSIGNED]: undefined,\n [Event.ROLE_UNASSIGNED]: undefined,\n\n // LICENSE - NOT AUDITED\n [Event.LICENSE_PLAN_CHANGED]: undefined,\n [Event.LICENSE_ACTIVATED]: undefined,\n [Event.LICENSE_PAYMENT_FAILED]: undefined,\n [Event.LICENSE_PAYMENT_RECOVERED]: undefined,\n [Event.LICENSE_CHECKOUT_OPENED]: undefined,\n [Event.LICENSE_CHECKOUT_SUCCESS]: undefined,\n [Event.LICENSE_PORTAL_OPENED]: undefined,\n\n // ACCOUNT - NOT AUDITED\n [Event.ACCOUNT_CREATED]: undefined,\n [Event.ACCOUNT_DELETED]: undefined,\n [Event.ACCOUNT_VERIFIED]: undefined,\n\n // BACKFILL - NOT AUDITED\n [Event.APP_BACKFILL_SUCCEEDED]: undefined,\n [Event.APP_BACKFILL_FAILED]: undefined,\n [Event.TENANT_BACKFILL_SUCCEEDED]: undefined,\n [Event.TENANT_BACKFILL_FAILED]: undefined,\n [Event.INSTALLATION_BACKFILL_SUCCEEDED]: undefined,\n [Event.INSTALLATION_BACKFILL_FAILED]: undefined,\n\n // LAYOUT - NOT AUDITED\n [Event.LAYOUT_CREATED]: undefined,\n [Event.LAYOUT_DELETED]: undefined,\n\n // VIEW - NOT AUDITED\n [Event.VIEW_CREATED]: undefined,\n [Event.VIEW_UPDATED]: undefined,\n [Event.VIEW_DELETED]: undefined,\n [Event.VIEW_EXPORTED]: undefined,\n [Event.VIEW_FILTER_CREATED]: undefined,\n [Event.VIEW_FILTER_UPDATED]: undefined,\n [Event.VIEW_FILTER_DELETED]: undefined,\n [Event.VIEW_CALCULATION_CREATED]: undefined,\n [Event.VIEW_CALCULATION_UPDATED]: undefined,\n [Event.VIEW_CALCULATION_DELETED]: undefined,\n\n // SERVED - NOT AUDITED\n [Event.SERVED_BUILDER]: undefined,\n [Event.SERVED_APP]: undefined,\n [Event.SERVED_APP_PREVIEW]: undefined,\n\n // ANALYTICS - NOT AUDITED\n [Event.ANALYTICS_OPT_OUT]: undefined,\n [Event.ANALYTICS_OPT_IN]: undefined,\n\n // INSTALLATION - NOT AUDITED\n [Event.INSTALLATION_VERSION_CHECKED]: undefined,\n [Event.INSTALLATION_VERSION_UPGRADED]: undefined,\n [Event.INSTALLATION_VERSION_DOWNGRADED]: undefined,\n [Event.INSTALLATION_FIRST_STARTUP]: undefined,\n\n // AUDIT LOG - NOT AUDITED\n [Event.AUDIT_LOGS_FILTERED]: undefined,\n [Event.AUDIT_LOGS_DOWNLOADED]: undefined,\n}\n\n// properties added at the final stage of the event pipeline\nexport interface BaseEvent {\n version?: string\n service?: string\n environment?: string\n appId?: string\n installationId?: string\n tenantId?: string\n hosting?: Hosting\n // any props in the audited section will be removed before passing events\n // up out of system (purely for use with auditing)\n audited?: {\n [key: string]: any\n }\n}\n\nexport type TableExportFormat = \"json\" | \"csv\"\n\nexport type DocUpdateEvent = {\n id: string\n tenantId: string\n appId?: string\n}\n\nexport interface EventProcessor {\n processEvent(\n event: Event,\n identity: Identity,\n properties: any,\n timestamp?: string | number\n ): Promise<void>\n identify?(identity: Identity, timestamp?: string | number): Promise<void>\n identifyGroup?(group: Group, timestamp?: string | number): Promise<void>\n shutdown?(): void\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface LayoutCreatedEvent extends BaseEvent {\n layoutId: string\n}\n\nexport interface LayoutDeletedEvent extends BaseEvent {\n layoutId: string\n}\n", "import { PlanType, PriceDuration } from \"../licensing\"\n\nexport interface LicensePlanChangedEvent {\n accountId: string\n from: PlanType\n to: PlanType\n // may not be on historical events\n fromDuration: PriceDuration | undefined\n toDuration: PriceDuration | undefined\n fromQuantity: number | undefined\n toQuantity: number | undefined\n}\n\nexport interface LicenseActivatedEvent {\n accountId: string\n}\n\nexport interface LicenseCheckoutOpenedEvent {\n accountId: string\n}\n\nexport interface LicenseCheckoutSuccessEvent {\n accountId: string\n}\n\nexport interface LicensePortalOpenedEvent {\n accountId: string\n}\n\nexport interface LicensePaymentFailedEvent {\n accountId: string\n}\n\nexport interface LicensePaymentRecoveredEvent {\n accountId: string\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface VersionCheckedEvent extends BaseEvent {\n currentVersion: string\n}\n\nexport interface VersionChangeEvent extends BaseEvent {\n from: string\n to: string\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface QueryCreatedEvent extends BaseEvent {\n queryId: string\n datasourceId: string\n source: string\n queryVerb: string\n}\n\nexport interface QueryUpdatedEvent extends BaseEvent {\n queryId: string\n datasourceId: string\n source: string\n queryVerb: string\n}\n\nexport interface QueryDeletedEvent extends BaseEvent {\n queryId: string\n datasourceId: string\n source: string\n queryVerb: string\n}\n\nexport interface QueryImportedEvent extends BaseEvent {\n datasourceId: string\n source: string\n count: number\n importSource: string\n}\n\nexport interface QueryPreviewedEvent extends BaseEvent {\n queryId?: string\n datasourceId: string\n source: string\n queryVerb: string\n}\n\nexport interface QueriesRunEvent extends BaseEvent {\n count: number\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface RoleCreatedEvent extends BaseEvent {\n roleId: string\n permissionId: string\n inherits?: string\n}\n\nexport interface RoleUpdatedEvent extends BaseEvent {\n roleId: string\n permissionId: string\n inherits?: string\n}\n\nexport interface RoleDeletedEvent extends BaseEvent {\n roleId: string\n permissionId: string\n inherits?: string\n}\n\nexport interface RoleAssignedEvent extends BaseEvent {\n userId: string\n roleId: string\n}\n\nexport interface RoleUnassignedEvent extends BaseEvent {\n userId: string\n roleId: string\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface RowsImportedEvent extends BaseEvent {\n tableId: string\n count: number\n}\n\nexport interface RowsCreatedEvent extends BaseEvent {\n count: number\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface ScreenCreatedEvent extends BaseEvent {\n screenId: string\n layoutId?: string\n roleId: string\n audited: {\n name: string\n }\n}\n\nexport interface ScreenDeletedEvent extends BaseEvent {\n screenId: string\n layoutId?: string\n roleId: string\n audited: {\n name: string\n }\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface BuilderServedEvent extends BaseEvent {\n timezone: string\n}\n\nexport interface AppServedEvent extends BaseEvent {\n appVersion: string\n timezone: string\n embed?: boolean\n}\n\nexport interface AppPreviewServedEvent extends BaseEvent {\n appVersion: string\n timezone: string\n}\n", "import { BaseEvent, TableExportFormat } from \"./event\"\n\nexport interface TableCreatedEvent extends BaseEvent {\n tableId: string\n audited: {\n name: string\n }\n}\n\nexport interface TableUpdatedEvent extends BaseEvent {\n tableId: string\n audited: {\n name: string\n }\n}\n\nexport interface TableDeletedEvent extends BaseEvent {\n tableId: string\n audited: {\n name: string\n }\n}\n\nexport interface TableExportedEvent extends BaseEvent {\n tableId: string\n format: TableExportFormat\n audited: {\n name: string\n }\n}\n\nexport interface TableImportedEvent extends BaseEvent {\n tableId: string\n audited: {\n name: string\n }\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface UserCreatedEvent extends BaseEvent {\n userId: string\n viaScim?: boolean\n audited: {\n email: string\n }\n}\n\nexport interface UserUpdatedEvent extends BaseEvent {\n userId: string\n viaScim?: boolean\n audited: {\n email: string\n }\n}\n\nexport interface UserDeletedEvent extends BaseEvent {\n userId: string\n viaScim?: boolean\n audited: {\n email: string\n }\n}\n\nexport interface UserOnboardingEvent extends BaseEvent {\n userId: string\n step?: string\n audited: {\n email: string\n }\n}\n\nexport interface UserPermissionAssignedEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserPermissionRemovedEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserInvitedEvent extends BaseEvent {\n audited: {\n email: string\n }\n}\n\nexport interface UserInviteAcceptedEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserPasswordForceResetEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserPasswordUpdatedEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserPasswordResetRequestedEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserPasswordResetEvent extends BaseEvent {\n userId: string\n audited: {\n email: string\n }\n}\n\nexport interface UserDataCollaborationEvent extends BaseEvent {\n users: number\n}\n", "import { ViewCalculation } from \"../../documents\"\nimport { BaseEvent, TableExportFormat } from \"./event\"\n\nexport interface ViewCreatedEvent extends BaseEvent {\n tableId: string\n}\n\nexport interface ViewUpdatedEvent extends BaseEvent {\n tableId: string\n}\n\nexport interface ViewDeletedEvent extends BaseEvent {\n tableId: string\n}\n\nexport interface ViewExportedEvent extends BaseEvent {\n tableId: string\n format: TableExportFormat\n}\n\nexport interface ViewFilterCreatedEvent extends BaseEvent {\n tableId: string\n}\n\nexport interface ViewFilterUpdatedEvent extends BaseEvent {\n tableId: string\n}\n\nexport interface ViewFilterDeletedEvent extends BaseEvent {\n tableId: string\n}\n\nexport interface ViewCalculationCreatedEvent extends BaseEvent {\n tableId: string\n calculation: ViewCalculation\n}\n\nexport interface ViewCalculationUpdatedEvent extends BaseEvent {\n tableId: string\n calculation: ViewCalculation\n}\n\nexport interface ViewCalculationDeletedEvent extends BaseEvent {\n tableId: string\n calculation: ViewCalculation\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface AccountCreatedEvent extends BaseEvent {\n tenantId: string\n registrationStep?: string\n}\n\nexport interface AccountDeletedEvent extends BaseEvent {\n tenantId: string\n registrationStep?: string\n}\n\nexport interface AccountVerifiedEvent extends BaseEvent {\n tenantId: string\n}\n", "import { BaseEvent, Event } from \"./event\"\n\nexport interface AppBackfillSucceededEvent extends BaseEvent {\n appId: string\n automations: number\n datasources: number\n layouts: number\n queries: number\n roles: number\n tables: number\n screens: number\n errors?: string[]\n errorCount?: number\n}\n\nexport interface AppBackfillFailedEvent extends BaseEvent {\n error: string\n}\n\nexport interface TenantBackfillSucceededEvent extends BaseEvent {\n apps: number\n users: number\n\n usage: any\n errors?: [string]\n errorCount?: number\n}\n\nexport interface TenantBackfillFailedEvent extends BaseEvent {\n error: string\n}\n\nexport interface InstallationBackfillSucceededEvent extends BaseEvent {}\n\nexport interface InstallationBackfillFailedEvent extends BaseEvent {\n error: string\n}\n\nexport interface BackfillMetadata extends BaseEvent {\n eventWhitelist: Event[]\n}\n\nexport interface CachedEvent extends BaseEvent {\n event: Event\n properties: any\n}\n", "import { Hosting } from \"..\"\n\n// GROUPS\n\nexport enum GroupType {\n TENANT = \"tenant\",\n INSTALLATION = \"installation\",\n}\n\nexport interface Group {\n id: string\n type: IdentityType\n environment: string\n hosting: Hosting\n}\n\nexport interface TenantGroup extends Group {\n // account level information is associated with the tenant group\n // as we don't have this at the user level\n profession?: string // only available in cloud\n companySize?: string // only available in cloud\n installationId: string\n}\n\nexport interface InstallationGroup extends Group {\n version: string\n}\n\n// IDENTITIES\n\nexport enum IdentityType {\n USER = \"user\",\n TENANT = \"tenant\",\n INSTALLATION = \"installation\",\n}\n\nexport interface HostInfo {\n ipAddress?: string\n userAgent?: string\n}\n\nexport interface Identity {\n id: string\n type: IdentityType\n hosting: Hosting\n environment: string\n installationId?: string\n tenantId?: string\n // usable - no unique format\n realTenantId?: string\n hostInfo?: HostInfo\n}\n\nexport interface UserIdentity extends Identity {\n verified: boolean\n accountHolder: boolean\n providerType?: string\n builder?: boolean\n admin?: boolean\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface GroupCreatedEvent extends BaseEvent {\n groupId: string\n viaScim?: boolean\n audited: {\n name: string\n }\n}\n\nexport interface GroupUpdatedEvent extends BaseEvent {\n groupId: string\n viaScim?: boolean\n audited: {\n name: string\n }\n}\n\nexport interface GroupDeletedEvent extends BaseEvent {\n groupId: string\n viaScim?: boolean\n audited: {\n name: string\n }\n}\n\nexport interface GroupUsersAddedEvent extends BaseEvent {\n count: number\n groupId: string\n viaScim?: boolean\n audited: {\n name: string\n }\n}\n\nexport interface GroupUsersDeletedEvent extends BaseEvent {\n count: number\n groupId: string\n viaScim?: boolean\n audited: {\n name: string\n }\n}\n\nexport interface GroupAddedOnboardingEvent extends BaseEvent {\n groupId: string\n onboarding: boolean\n}\n\nexport interface GroupPermissionsEditedEvent extends BaseEvent {\n permissions: Record<string, string>\n groupId: string\n audited: {\n name: string\n }\n}\n", "import { BaseEvent } from \"./event\"\nimport { PluginSource, PluginType } from \"../../\"\n\nexport interface PluginInitEvent extends BaseEvent {\n type: PluginType\n name: string\n version: string\n description: string\n}\n\nexport interface PluginImportedEvent extends BaseEvent {\n pluginId: string\n type: PluginType\n source: PluginSource\n name: string\n version: string\n description: string\n}\n\nexport interface PluginDeletedEvent extends BaseEvent {\n pluginId: string\n type: PluginType\n name: string\n version: string\n description: string\n}\n", "import { BaseEvent } from \"./event\"\nimport { AppBackupTrigger, AppBackupType } from \"../../documents\"\n\nexport interface AppBackupRestoreEvent extends BaseEvent {\n appId: string\n restoreId: string\n backupCreatedAt: string\n name: string\n}\n\nexport interface AppBackupTriggeredEvent extends BaseEvent {\n backupId: string\n appId: string\n trigger: AppBackupTrigger\n type: AppBackupType\n name: string\n}\n", "import { BaseEvent } from \"./event\"\n\nexport interface EnvironmentVariableCreatedEvent extends BaseEvent {\n name: string\n environments: string[]\n}\n\nexport interface EnvironmentVariableDeletedEvent extends BaseEvent {\n name: string\n}\n\nexport interface EnvironmentVariableUpgradePanelOpenedEvent extends BaseEvent {\n userId: string\n}\n", "import { BaseEvent } from \"./event\"\nimport { AuditLogSearchParams } from \"../../api\"\n\nexport interface AuditLogFilteredEvent extends BaseEvent {\n filters: AuditLogSearchParams\n}\n\nexport interface AuditLogDownloadedEvent extends BaseEvent {\n filters: AuditLogSearchParams\n}\n", "export * from \"./app\"\nexport * from \"./auth\"\nexport * from \"./automation\"\nexport * from \"./email\"\nexport * from \"./datasource\"\nexport * from \"./event\"\nexport * from \"./layout\"\nexport * from \"./license\"\nexport * from \"./version\"\nexport * from \"./query\"\nexport * from \"./role\"\nexport * from \"./rows\"\nexport * from \"./screen\"\nexport * from \"./serve\"\nexport * from \"./table\"\nexport * from \"./user\"\nexport * from \"./view\"\nexport * from \"./account\"\nexport * from \"./backfill\"\nexport * from \"./identification\"\nexport * from \"./userGroup\"\nexport * from \"./plugin\"\nexport * from \"./backup\"\nexport * from \"./environmentVariable\"\nexport * from \"./auditLog\"\n", "import { PurchasedPlan, Quotas, Feature, Billing } from \".\"\nimport { ISO8601 } from \"../../shared\"\n\nexport interface OfflineIdentifier {\n installId: string\n tenantId: string\n}\n\nexport interface OfflineLicense extends License {\n identifier: OfflineIdentifier\n expireAt: ISO8601\n}\n\nexport interface License {\n features: Feature[]\n quotas: Quotas\n plan: PurchasedPlan\n billing?: Billing\n testClockId?: string\n}\n", "export enum PlanType {\n FREE = \"free\",\n /** @deprecated */\n PRO = \"pro\",\n /** @deprecated */\n TEAM = \"team\",\n /** @deprecated */\n PREMIUM = \"premium\",\n PREMIUM_PLUS = \"premium_plus\",\n /** @deprecated */\n BUSINESS = \"business\",\n ENTERPRISE_BASIC = \"enterprise_basic\",\n ENTERPRISE = \"enterprise\",\n}\n\nexport enum PriceDuration {\n MONTHLY = \"monthly\",\n YEARLY = \"yearly\",\n}\n\nexport interface AvailablePlan {\n type: PlanType\n maxUsers: number\n prices: AvailablePrice[]\n}\n\nexport interface AvailablePrice {\n amount: number\n amountMonthly: number\n currency: string\n duration: PriceDuration\n priceId: string\n type?: string\n}\n\nexport enum PlanModel {\n PER_USER = \"perUser\",\n PER_CREATOR_PER_USER = \"per_creator_per_user\",\n DAY_PASS = \"dayPass\",\n}\n\nexport interface PurchasedPlan {\n type: PlanType\n model: PlanModel\n usesInvoicing: boolean\n price?: PurchasedPrice\n}\n\nexport interface PurchasedPrice extends AvailablePrice {\n dayPasses: number | undefined\n /** @deprecated - now at the plan level via model */\n isPerUser: boolean\n}\n", "import { PlanType } from \".\"\n\nexport enum QuotaUsageType {\n STATIC = \"static\",\n MONTHLY = \"monthly\",\n}\n\nexport enum QuotaType {\n USAGE = \"usage\",\n CONSTANT = \"constant\",\n}\n\nexport enum StaticQuotaName {\n ROWS = \"rows\",\n APPS = \"apps\",\n USERS = \"users\",\n CREATORS = \"creators\",\n USER_GROUPS = \"userGroups\",\n PLUGINS = \"plugins\",\n}\n\nexport enum MonthlyQuotaName {\n QUERIES = \"queries\",\n AUTOMATIONS = \"automations\",\n DAY_PASSES = \"dayPasses\",\n}\n\nexport enum ConstantQuotaName {\n AUTOMATION_LOG_RETENTION_DAYS = \"automationLogRetentionDays\",\n APP_BACKUPS_RETENTION_DAYS = \"appBackupRetentionDays\",\n}\n\nexport type MeteredQuotaName = StaticQuotaName | MonthlyQuotaName\nexport type QuotaName = StaticQuotaName | MonthlyQuotaName | ConstantQuotaName\n\nexport const isStaticQuota = (\n quotaType: QuotaType,\n usageType: QuotaUsageType,\n name: QuotaName\n): name is StaticQuotaName => {\n return quotaType === QuotaType.USAGE && usageType === QuotaUsageType.STATIC\n}\n\nexport const isMonthlyQuota = (\n quotaType: QuotaType,\n usageType: QuotaUsageType,\n name: QuotaName\n): name is MonthlyQuotaName => {\n return quotaType === QuotaType.USAGE && usageType === QuotaUsageType.MONTHLY\n}\n\nexport const isConstantQuota = (\n quotaType: QuotaType,\n name: QuotaName\n): name is ConstantQuotaName => {\n return quotaType === QuotaType.CONSTANT\n}\n\nexport type PlanQuotas = { [key in PlanType]: Quotas | undefined }\n\nexport type MonthlyQuotas = {\n [MonthlyQuotaName.QUERIES]: Quota\n [MonthlyQuotaName.AUTOMATIONS]: Quota\n [MonthlyQuotaName.DAY_PASSES]: Quota\n}\n\nexport type StaticQuotas = {\n [StaticQuotaName.ROWS]: Quota\n [StaticQuotaName.APPS]: Quota\n [StaticQuotaName.USERS]: Quota\n [StaticQuotaName.CREATORS]: Quota\n [StaticQuotaName.USER_GROUPS]: Quota\n [StaticQuotaName.PLUGINS]: Quota\n}\n\nexport type ConstantQuotas = {\n [ConstantQuotaName.AUTOMATION_LOG_RETENTION_DAYS]: Quota\n [ConstantQuotaName.APP_BACKUPS_RETENTION_DAYS]: Quota\n}\n\nexport type Quotas = {\n [QuotaType.USAGE]: {\n [QuotaUsageType.MONTHLY]: MonthlyQuotas\n [QuotaUsageType.STATIC]: StaticQuotas\n }\n [QuotaType.CONSTANT]: ConstantQuotas\n}\n\nexport interface Quota {\n name: string\n value: number\n /**\n * Array of whole numbers (1-100) that dictate the percentage that this quota should trigger\n * at in relation to the corresponding usage inside budibase.\n *\n * Triggering results in a budibase installation sending a request to account-portal,\n * which can have subsequent effects such as sending emails to users.\n */\n triggers: number[]\n startDate?: number\n}\n", "import { PlanType } from \"./plan\"\n\nexport enum Feature {\n USER_GROUPS = \"userGroups\",\n APP_BACKUPS = \"appBackups\",\n ENVIRONMENT_VARIABLES = \"environmentVariables\",\n AUDIT_LOGS = \"auditLogs\",\n ENFORCEABLE_SSO = \"enforceableSSO\",\n BRANDING = \"branding\",\n SCIM = \"scim\",\n SYNC_AUTOMATIONS = \"syncAutomations\",\n TRIGGER_AUTOMATION_RUN = \"triggerAutomationRun\",\n APP_BUILDERS = \"appBuilders\",\n OFFLINE = \"offline\",\n EXPANDED_PUBLIC_API = \"expandedPublicApi\",\n VIEW_PERMISSIONS = \"viewPermissions\",\n}\n\nexport type PlanFeatures = { [key in PlanType]: Feature[] | undefined }\n", "import { PriceDuration } from \"./plan\"\n\nexport interface Customer {\n balance: number | null | undefined\n currency: string | null | undefined\n}\n\nexport interface SubscriptionItems {\n user: number | undefined\n creator: number | undefined\n}\n\nexport interface Subscription {\n amount: number\n amounts: SubscriptionItems | undefined\n currency: string\n quantity: number\n quantities: SubscriptionItems | undefined\n duration: PriceDuration\n cancelAt: number | null | undefined\n currentPeriodStart: number\n currentPeriodEnd: number\n status: string\n pastDueAt?: number | null\n downgradeAt?: number\n}\n\nexport interface Billing {\n customer: Customer\n subscription?: Subscription\n}\n", "export * from \"./license\"\nexport * from \"./plan\"\nexport * from \"./quota\"\nexport * from \"./feature\"\nexport * from \"./billing\"\n", "export interface Migration extends MigrationDefinition {\n appOpts?: object\n fn: Function\n silent?: boolean\n preventRetry?: boolean\n}\n\nexport enum MigrationType {\n // run once per tenant, recorded in global db, global db is provided as an argument\n GLOBAL = \"global\",\n // run per app, recorded in each app db, app db is provided as an argument\n APP = \"app\",\n // run once, recorded in global info db, global info db is provided as an argument\n INSTALLATION = \"installation\",\n}\n\nexport interface MigrationNoOpOptions {\n type: MigrationType\n tenantId: string\n appId?: string\n}\n\n/**\n * e.g.\n * {\n * tenantIds: ['bb'],\n * force: {\n * global: ['quota_1']\n * }\n * }\n */\nexport interface MigrationOptions {\n tenantIds?: string[]\n force?: {\n [type: string]: string[]\n }\n noOp?: MigrationNoOpOptions\n}\n\nexport enum MigrationName {\n USER_EMAIL_VIEW_CASING = \"user_email_view_casing\",\n APP_URLS = \"app_urls\",\n EVENT_APP_BACKFILL = \"event_app_backfill\",\n EVENT_GLOBAL_BACKFILL = \"event_global_backfill\",\n EVENT_INSTALLATION_BACKFILL = \"event_installation_backfill\",\n GLOBAL_INFO_SYNC_USERS = \"global_info_sync_users\",\n TABLE_SETTINGS_LINKS_TO_ACTIONS = \"table_settings_links_to_actions\",\n // increment this number to re-activate this migration\n SYNC_QUOTAS = \"sync_quotas_2\",\n}\n\nexport interface MigrationDefinition {\n type: MigrationType\n name: MigrationName\n}\n", "import { Table } from \"../documents\"\n\nexport const PASSWORD_REPLACEMENT = \"--secret-value--\"\n\nexport enum Operation {\n CREATE = \"CREATE\",\n READ = \"READ\",\n UPDATE = \"UPDATE\",\n DELETE = \"DELETE\",\n BULK_CREATE = \"BULK_CREATE\",\n CREATE_TABLE = \"CREATE_TABLE\",\n UPDATE_TABLE = \"UPDATE_TABLE\",\n DELETE_TABLE = \"DELETE_TABLE\",\n}\n\nexport enum SortDirection {\n ASCENDING = \"ASCENDING\",\n DESCENDING = \"DESCENDING\",\n}\n\nexport enum QueryType {\n SQL = \"sql\",\n JSON = \"json\",\n FIELDS = \"fields\",\n}\n\nexport enum DatasourceFieldType {\n STRING = \"string\",\n CODE = \"code\",\n LONGFORM = \"longForm\",\n BOOLEAN = \"boolean\",\n NUMBER = \"number\",\n PASSWORD = \"password\",\n LIST = \"list\",\n OBJECT = \"object\",\n JSON = \"json\",\n FILE = \"file\",\n FIELD_GROUP = \"fieldGroup\",\n SELECT = \"select\",\n}\n\nexport enum SourceName {\n POSTGRES = \"POSTGRES\",\n DYNAMODB = \"DYNAMODB\",\n MONGODB = \"MONGODB\",\n ELASTICSEARCH = \"ELASTICSEARCH\",\n COUCHDB = \"COUCHDB\",\n SQL_SERVER = \"SQL_SERVER\",\n S3 = \"S3\",\n AIRTABLE = \"AIRTABLE\",\n MYSQL = \"MYSQL\",\n ARANGODB = \"ARANGODB\",\n REST = \"REST\",\n ORACLE = \"ORACLE\",\n GOOGLE_SHEETS = \"GOOGLE_SHEETS\",\n FIRESTORE = \"FIRESTORE\",\n REDIS = \"REDIS\",\n SNOWFLAKE = \"SNOWFLAKE\",\n}\n\nexport enum IncludeRelationship {\n INCLUDE = 1,\n EXCLUDE = 0,\n}\n\nexport enum FilterType {\n STRING = \"string\",\n FUZZY = \"fuzzy\",\n RANGE = \"range\",\n EQUAL = \"equal\",\n NOT_EQUAL = \"notEqual\",\n EMPTY = \"empty\",\n NOT_EMPTY = \"notEmpty\",\n ONE_OF = \"oneOf\",\n}\n\nexport enum DatasourceFeature {\n CONNECTION_CHECKING = \"connection\",\n FETCH_TABLE_NAMES = \"fetch_table_names\",\n EXPORT_SCHEMA = \"export_schema\",\n}\n\nexport interface StepDefinition {\n key: string\n template: string\n}\n\nexport interface QueryDefinition {\n type: QueryType\n displayName?: string\n readable?: boolean\n customisable?: boolean\n fields?: object\n urlDisplay?: boolean\n steps?: Array<StepDefinition>\n}\n\nexport interface ExtraQueryConfig {\n [key: string]: {\n displayName: string\n type: string\n required: boolean\n data?: object\n }\n}\n\ninterface DatasourceBasicFieldConfig {\n type: DatasourceFieldType\n display?: string\n required?: boolean\n default?: any\n deprecated?: boolean\n hidden?: string\n}\n\ninterface DatasourceSelectFieldConfig extends DatasourceBasicFieldConfig {\n type: DatasourceFieldType.SELECT\n config: { options: string[] }\n}\n\ninterface DatasourceFieldGroupConfig extends DatasourceBasicFieldConfig {\n type: DatasourceFieldType.FIELD_GROUP\n config: {\n openByDefault?: boolean\n nestedFields?: boolean\n }\n}\n\ntype DatasourceFieldConfig =\n | DatasourceSelectFieldConfig\n | DatasourceFieldGroupConfig\n | DatasourceBasicFieldConfig\n\nexport interface DatasourceConfig {\n [key: string]: DatasourceFieldConfig & {\n fields?: DatasourceConfig\n }\n}\n\nexport interface Integration {\n docs: string\n plus?: boolean\n isSQL?: boolean\n auth?: { type: string }\n features?: Partial<Record<DatasourceFeature, boolean>>\n relationships?: boolean\n description: string\n friendlyName: string\n type?: string\n iconUrl?: string\n datasource: DatasourceConfig\n query: {\n [key: string]: QueryDefinition\n }\n extra?: ExtraQueryConfig\n}\n\nexport type ConnectionInfo = {\n connected: boolean\n error?: string\n}\n\nexport interface IntegrationBase {\n create?(query: any): Promise<any[] | any>\n read?(query: any): Promise<any[] | any>\n update?(query: any): Promise<any[] | any>\n delete?(query: any): Promise<any[] | any>\n testConnection?(): Promise<ConnectionInfo>\n getExternalSchema?(): Promise<string>\n defineTypeCastingFromSchema?(schema: {\n [key: string]: {\n name: string\n type: string\n }\n }): void\n}\n\nexport interface Schema {\n tables: Record<string, Table>\n errors: Record<string, string>\n}\n\nexport interface DatasourcePlus extends IntegrationBase {\n // if the datasource supports the use of bindings directly (to protect against SQL injection)\n // this returns the format of the identifier\n getBindingIdentifier(): string\n getStringConcat(parts: string[]): string\n buildSchema(\n datasourceId: string,\n entities: Record<string, Table>\n ): Promise<Schema>\n getTableNames(): Promise<string[]>\n}\n", "import { Operation, SortDirection } from \"./datasources\"\nimport { Row, Table } from \"../documents\"\nimport { SortType } from \"../api\"\n\nexport interface SearchFilters {\n allOr?: boolean\n onEmptyFilter?: EmptyFilterOption\n string?: {\n [key: string]: string\n }\n fuzzy?: {\n [key: string]: string\n }\n range?: {\n [key: string]: {\n high: number | string\n low: number | string\n }\n }\n equal?: {\n [key: string]: any\n }\n notEqual?: {\n [key: string]: any\n }\n empty?: {\n [key: string]: any\n }\n notEmpty?: {\n [key: string]: any\n }\n oneOf?: {\n [key: string]: any[]\n }\n contains?: {\n [key: string]: any[] | any\n }\n notContains?: {\n [key: string]: any[]\n }\n containsAny?: {\n [key: string]: any[]\n }\n}\n\nexport interface SortJson {\n [key: string]: {\n direction: SortDirection\n type?: SortType\n }\n}\n\nexport interface PaginationJson {\n limit: number\n page?: string | number\n}\n\nexport interface RenameColumn {\n old: string\n updated: string\n}\n\nexport interface RelationshipsJson {\n through?: string\n from?: string\n to?: string\n fromPrimary?: string\n toPrimary?: string\n tableName: string\n column: string\n}\n\nexport interface QueryJson {\n endpoint: {\n datasourceId: string\n entityId: string\n operation: Operation\n schema?: string\n }\n resource?: {\n fields: string[]\n }\n filters?: SearchFilters\n sort?: SortJson\n paginate?: PaginationJson\n body?: Row | Row[]\n table?: Table\n meta?: {\n table?: Table\n tables?: Record<string, Table>\n renamed?: RenameColumn\n }\n extra?: {\n idFilter?: SearchFilters\n }\n relationships?: RelationshipsJson[]\n}\n\nexport interface SqlQuery {\n sql: string\n bindings?: string[]\n}\n\nexport enum EmptyFilterOption {\n RETURN_ALL = \"all\",\n RETURN_NONE = \"none\",\n}\n", "import { Context, Request } from \"koa\"\nimport { User, Role, UserRoles, Account, ConfigType } from \"../documents\"\nimport { FeatureFlag, License } from \"../sdk\"\nimport { Files } from \"formidable\"\n\nexport interface ContextUser extends Omit<User, \"roles\"> {\n globalId?: string\n license?: License\n userId?: string\n roleId?: string | null\n role?: Role\n roles?: UserRoles\n csrfToken?: string\n featureFlags?: FeatureFlag[]\n accountPortalAccess?: boolean\n providerType?: ConfigType\n account?: Account\n}\n\n/**\n * Add support for koa-body in context.\n */\nexport interface BBRequest<RequestBody> extends Request {\n body: RequestBody\n files?: Files\n}\n\n/**\n * Basic context with no user.\n */\nexport interface Ctx<RequestBody = any, ResponseBody = any> extends Context {\n request: BBRequest<RequestBody>\n body: ResponseBody\n}\n\n/**\n * Authenticated context.\n */\nexport interface UserCtx<RequestBody = any, ResponseBody = any>\n extends Ctx<RequestBody, ResponseBody> {\n user: ContextUser\n roleId?: string\n}\n\n/**\n * @deprecated: Use UserCtx / Ctx appropriately\n * Authenticated context.\n */\nexport interface BBContext extends Ctx {\n user?: ContextUser\n}\n", "import { BBContext } from \"./koa\"\nimport { Hosting } from \"./hosting\"\n\nexport interface AuthToken {\n userId: string\n tenantId: string\n sessionId: string\n}\n\nexport interface CreateSession {\n sessionId: string\n tenantId: string\n csrfToken?: string\n hosting?: Hosting\n}\n\nexport interface Session extends CreateSession {\n userId: string\n lastAccessedAt: string\n createdAt: string\n // make optional attributes required\n csrfToken: string\n}\n\nexport interface SessionKey {\n key: string\n}\n\nexport interface ScannedSession {\n value: Session\n}\n\nexport interface PlatformLogoutOpts {\n ctx: BBContext\n userId: string\n keepActiveSession?: boolean\n}\n", "import Redlock from \"redlock\"\n\nexport enum LockType {\n /**\n * If this lock is already held the attempted operation will not be performed.\n * No retries will take place and no error will be thrown.\n */\n TRY_ONCE = \"try_once\",\n TRY_TWICE = \"try_twice\",\n DEFAULT = \"default\",\n DELAY_500 = \"delay_500\",\n CUSTOM = \"custom\",\n AUTO_EXTEND = \"auto_extend\",\n}\n\nexport enum LockName {\n MIGRATIONS = \"migrations\",\n TRIGGER_QUOTA = \"trigger_quota\",\n SYNC_ACCOUNT_LICENSE = \"sync_account_license\",\n UPDATE_TENANTS_DOC = \"update_tenants_doc\",\n PERSIST_WRITETHROUGH = \"persist_writethrough\",\n QUOTA_USAGE_EVENT = \"quota_usage_event\",\n APP_MIGRATION = \"app_migrations\",\n PROCESS_AUTO_COLUMNS = \"process_auto_columns\",\n PROCESS_USER_INVITE = \"process_user_invite\",\n}\n\nexport type LockOptions = {\n /**\n * The lock type determines which client to use\n */\n type: LockType\n /**\n * The custom options to use when creating the redlock instance\n * type must be set to custom for the options to be applied\n */\n customOptions?: Redlock.Options\n /**\n * The name for the lock\n */\n name: LockName\n /**\n * The individual resource to lock. This is useful for locking around very specific identifiers, e.g. a document that is prone to conflicts\n */\n resource?: string\n /**\n * This is a system-wide lock - don't use tenancy in lock key\n */\n systemLock?: boolean\n} & (\n | {\n /**\n * The ttl to auto-expire the lock if not unlocked manually\n */\n ttl: number\n type: Exclude<LockType, LockType.AUTO_EXTEND>\n }\n | {\n type: LockType.AUTO_EXTEND\n onExtend?: () => void\n }\n)\n", "import type Nano from \"@budibase/nano\"\nimport { AllDocsResponse, AnyDocument, Document, ViewTemplateOpts } from \"../\"\nimport { Writable } from \"stream\"\n\nexport enum SearchIndex {\n ROWS = \"rows\",\n AUDIT = \"audit\",\n USER = \"user\",\n}\n\nexport type PouchOptions = {\n inMemory?: boolean\n replication?: boolean\n onDisk?: boolean\n find?: boolean\n}\n\nexport enum SortOption {\n ASCENDING = \"asc\",\n DESCENDING = \"desc\",\n}\n\nexport type IndexAnalyzer = {\n name: string\n default?: string\n fields?: Record<string, string>\n}\n\nexport type DBView = {\n name?: string\n map: string\n reduce?: string\n meta?: ViewTemplateOpts\n groupBy?: string\n}\n\nexport interface DesignDocument extends Document {\n // we use this static reference for all design documents\n _id: \"_design/database\"\n language?: string\n // CouchDB views\n views?: {\n [viewName: string]: DBView\n }\n // Lucene indexes\n indexes?: {\n [indexName: string]: {\n index: string\n analyzer?: string | IndexAnalyzer\n }\n }\n}\n\nexport type CouchFindOptions = {\n selector: PouchDB.Find.Selector\n fields?: string[]\n sort?: {\n [key: string]: SortOption\n }[]\n limit?: number\n skip?: number\n bookmark?: string\n}\n\nexport type DatabaseOpts = {\n skip_setup?: boolean\n}\n\nexport type DatabasePutOpts = {\n force?: boolean\n}\n\nexport type DatabaseCreateIndexOpts = {\n index: {\n fields: string[]\n name?: string | undefined\n ddoc?: string | undefined\n type?: string | undefined\n }\n}\n\nexport type DatabaseDeleteIndexOpts = {\n name: string\n ddoc: string\n type?: string | undefined\n}\n\ntype DBPrimitiveKey = string | number | {}\nexport type DatabaseKey = DBPrimitiveKey | DBPrimitiveKey[]\n\nexport type DatabaseQueryOpts = {\n include_docs?: boolean\n startkey?: DatabaseKey\n endkey?: DatabaseKey\n limit?: number\n skip?: number\n descending?: boolean\n key?: DatabaseKey\n keys?: DatabaseKey[]\n group?: boolean\n startkey_docid?: string\n}\n\nexport const isDocument = (doc: any): doc is Document => {\n return typeof doc === \"object\" && doc._id && doc._rev\n}\n\nexport interface DatabaseDumpOpts {\n filter?: (doc: AnyDocument) => boolean\n batch_size?: number\n batch_limit?: number\n style?: \"main_only\" | \"all_docs\"\n timeout?: number\n doc_ids?: string[]\n query_params?: any\n view?: string\n selector?: any\n}\n\nexport interface Database {\n name: string\n\n exists(): Promise<boolean>\n get<T extends Document>(id?: string): Promise<T>\n getMultiple<T extends Document>(\n ids: string[],\n opts?: { allowMissing?: boolean }\n ): Promise<T[]>\n remove(\n id: string | Document,\n rev?: string\n ): Promise<Nano.DocumentDestroyResponse>\n put(\n document: AnyDocument,\n opts?: DatabasePutOpts\n ): Promise<Nano.DocumentInsertResponse>\n bulkDocs(documents: AnyDocument[]): Promise<Nano.DocumentBulkResponse[]>\n allDocs<T extends Document>(\n params: DatabaseQueryOpts\n ): Promise<AllDocsResponse<T>>\n query<T extends Document>(\n viewName: string,\n params: DatabaseQueryOpts\n ): Promise<AllDocsResponse<T>>\n destroy(): Promise<Nano.OkResponse | void>\n compact(): Promise<Nano.OkResponse | void>\n // these are all PouchDB related functions that are rarely used - in future\n // should be replaced by better typed/non-pouch implemented methods\n dump(stream: Writable, opts?: DatabaseDumpOpts): Promise<any>\n load(...args: any[]): Promise<any>\n createIndex(...args: any[]): Promise<any>\n deleteIndex(...args: any[]): Promise<any>\n getIndexes(...args: any[]): Promise<any>\n}\n", "export interface EndpointMatcher {\n /**\n * The HTTP Path. e.g. /api/things/:thingId\n */\n route: string\n /**\n * The HTTP Verb. e.g. GET, POST, etc.\n * ALL is also accepted to cover all verbs.\n */\n method: string\n /**\n * The route must match exactly - not just begins with\n */\n strict?: boolean\n}\n\nexport interface RegexMatcher {\n regex: RegExp\n method: string\n strict: boolean\n route: string\n}\n", "export interface GetTenantIdOptions {\n allowNoTenant?: boolean\n excludeStrategies?: TenantResolutionStrategy[]\n includeStrategies?: TenantResolutionStrategy[]\n}\n\nexport enum TenantResolutionStrategy {\n USER = \"user\",\n HEADER = \"header\",\n QUERY = \"query\",\n SUBDOMAIN = \"subdomain\",\n PATH = \"path\",\n}\n", "export * from \"./matchers\"\nexport * from \"./tenancy\"\n", "export enum FeatureFlag {\n LICENSING = \"LICENSING\",\n PER_CREATOR_PER_USER_PRICE = \"PER_CREATOR_PER_USER_PRICE\",\n PER_CREATOR_PER_USER_PRICE_ALERT = \"PER_CREATOR_PER_USER_PRICE_ALERT\",\n}\n\nexport interface TenantFeatureFlags {\n [key: string]: FeatureFlag[]\n}\n", "export enum AppEnvironment {\n PRODUCTION = \"production\",\n DEVELOPMENT = \"development\",\n}\n", "import { Event, HostInfo } from \"./events\"\nimport { AuditLogDoc } from \"../documents\"\n\nexport type AuditWriteOpts = {\n appId?: string\n timestamp?: string | number\n userId?: string\n hostInfo?: HostInfo\n}\n\nexport type AuditLogFn = (\n event: Event,\n metadata: any,\n opts: AuditWriteOpts\n) => Promise<AuditLogDoc | undefined>\n\nexport type AuditLogQueueEvent = {\n event: Event\n properties: any\n opts: AuditWriteOpts\n tenantId: string\n}\n", "import {\n OAuth2,\n SSOProfileJson,\n SSOProviderType,\n SSOUser,\n User,\n} from \"../documents\"\nimport { SaveUserOpts } from \"./user\"\n\nexport interface JwtClaims {\n preferred_username?: string\n email?: string\n}\n\nexport interface SSOAuthDetails {\n oauth2: OAuth2\n provider: string\n providerType: SSOProviderType\n userId: string\n email?: string\n profile?: SSOProfile\n}\n\nexport interface SSOProfile {\n id: string\n name?: {\n givenName?: string\n familyName?: string\n }\n _json: SSOProfileJson\n provider?: string\n}\n\nexport type SaveSSOUserFunction = (\n user: SSOUser,\n opts: SaveUserOpts\n) => Promise<User>\n", "export interface SaveUserOpts {\n hashPassword?: boolean\n requirePassword?: boolean\n currentUserId?: string\n skipPasswordValidation?: boolean\n}\n", "import { Event } from \"../events\"\n\nexport enum CommandWord {\n BACKUPS = \"backups\",\n HOSTING = \"hosting\",\n ANALYTICS = \"analytics\",\n HELP = \"help\",\n PLUGIN = \"plugins\",\n}\n\nexport enum InitType {\n QUICK = \"quick\",\n DIGITAL_OCEAN = \"do\",\n}\n\nexport const AnalyticsEvent = {\n OptOut: \"analytics:opt:out\",\n OptIn: \"analytics:opt:in\",\n SelfHostInit: \"hosting:init\",\n PluginInit: Event.PLUGIN_INIT,\n}\n", "export * from \"./constants\"\n", "export interface SocketSession {\n _id: string\n email: string\n firstName?: string\n lastName?: string\n sessionId: string\n room?: string\n connectedAt: number\n}\n", "export enum PermissionLevel {\n READ = \"read\",\n WRITE = \"write\",\n EXECUTE = \"execute\",\n ADMIN = \"admin\",\n}\n\n// these are the global types, that govern the underlying default behaviour\nexport enum PermissionType {\n APP = \"app\",\n TABLE = \"table\",\n USER = \"user\",\n AUTOMATION = \"automation\",\n WEBHOOK = \"webhook\",\n BUILDER = \"builder\",\n CREATOR = \"creator\",\n GLOBAL_BUILDER = \"globalBuilder\",\n QUERY = \"query\",\n VIEW = \"view\",\n LEGACY_VIEW = \"legacy_view\",\n}\n\nexport enum PermissionSource {\n EXPLICIT = \"EXPLICIT\",\n INHERITED = \"INHERITED\",\n BASE = \"BASE\",\n}\n", "import { SortOrder, SortType } from \"../api\"\nimport { SearchFilters } from \"./search\"\n\nexport interface SearchParams {\n tableId: string\n paginate?: boolean\n query: SearchFilters\n bookmark?: string\n limit?: number\n sort?: string\n sortOrder?: SortOrder\n sortType?: SortType\n version?: string\n disableEscaping?: boolean\n fields?: string[]\n}\n", "export * from \"./automations\"\nexport * from \"./hosting\"\nexport * from \"./context\"\nexport * from \"./events\"\nexport * from \"./licensing\"\nexport * from \"./migrations\"\nexport * from \"./datasources\"\nexport * from \"./search\"\nexport * from \"./koa\"\nexport * from \"./auth\"\nexport * from \"./locks\"\nexport * from \"./db\"\nexport * from \"./middleware\"\nexport * from \"./featureFlag\"\nexport * from \"./environmentVariables\"\nexport * from \"./auditLogs\"\nexport * from \"./sso\"\nexport * from \"./user\"\nexport * from \"./cli\"\nexport * from \"./websocket\"\nexport * from \"./permissions\"\nexport * from \"./row\"\n", "import { Feature, Hosting, License, PlanType, Quotas } from \"../../sdk\"\nimport { DeepPartial } from \"../../shared\"\nimport { QuotaUsage } from \"../global\"\n\nexport interface CreateAccount {\n email: string\n tenantId: string\n hosting: Hosting\n authType: AuthType\n // optional fields - for sso based sign ups\n registrationStep?: string\n // profile\n tenantName?: string\n name?: string\n size?: string\n profession?: string\n}\n\nexport interface CreatePassswordAccount extends CreateAccount {\n password: string\n}\n\nexport interface CreateVerifiableSSOAccount extends CreateAccount {\n provider?: AccountSSOProvider\n thirdPartyProfile?: any\n}\n\nexport const isCreatePasswordAccount = (\n account: CreateAccount\n): account is CreatePassswordAccount => account.authType === AuthType.PASSWORD\n\nexport interface LicenseOverrides {\n features?: Feature[]\n quotas?: DeepPartial<Quotas>\n}\n\nexport interface Account extends CreateAccount {\n // generated\n accountId: string\n createdAt: number\n // registration\n verified: boolean\n verificationSent: boolean\n // licensing\n tier: string // deprecated\n planType?: PlanType\n /** @deprecated */\n planTier?: number\n license?: License\n installId?: string\n installTenantId?: string\n installVersion?: string\n stripeCustomerId?: string\n licenseKey?: string\n licenseKeyActivatedAt?: number\n licenseRequestedAt?: number\n licenseOverrides?: LicenseOverrides\n provider?: AccountSSOProvider\n providerType?: AccountSSOProviderType\n quotaUsage?: QuotaUsage\n offlineLicenseToken?: string\n}\n\nexport interface PasswordAccount extends Account {\n password: string\n}\n\nexport const isPasswordAccount = (\n account: Account\n): account is PasswordAccount =>\n account.authType === AuthType.PASSWORD && account.hosting === Hosting.SELF\n\nexport interface CloudAccount extends Account {\n password?: string\n budibaseUserId: string\n}\n\nexport const isCloudAccount = (account: Account): account is CloudAccount =>\n account.hosting === Hosting.CLOUD\n\nexport const isSelfHostAccount = (account: Account) =>\n account.hosting === Hosting.SELF\n\nexport const isSSOAccount = (account: Account): account is SSOAccount =>\n account.authType === AuthType.SSO\n\nexport enum AccountSSOProviderType {\n GOOGLE = \"google\",\n MICROSOFT = \"microsoft\",\n}\n\nexport enum AccountSSOProvider {\n GOOGLE = \"google\",\n MICROSOFT = \"microsoft\",\n}\n\nconst verifiableSSOProviders: AccountSSOProvider[] = [\n AccountSSOProvider.MICROSOFT,\n]\nexport function isVerifiableSSOProvider(provider: AccountSSOProvider): boolean {\n return verifiableSSOProviders.includes(provider)\n}\n\nexport interface AccountSSO {\n provider: AccountSSOProvider\n providerType: AccountSSOProviderType\n oauth2?: OAuthTokens\n pictureUrl?: string\n thirdPartyProfile: any // TODO: define what the google profile looks like\n}\n\nexport type SSOAccount = (Account | CloudAccount) & AccountSSO\n\nexport enum AuthType {\n SSO = \"sso\",\n PASSWORD = \"password\",\n}\n\nexport interface OAuthTokens {\n accessToken: string\n refreshToken: string\n}\n", "export interface CreateAccountUserActivity {\n accountId: string\n userId: string\n timestamp: number\n}\n\nexport interface AccountUserActivity extends CreateAccountUserActivity {\n PK: string\n SK: string\n}\n", "import { Document } from \"../../\"\n\nexport interface Flags extends Document {\n [key: string]: any\n}\n", "export * from \"./account\"\nexport * from \"./user\"\nexport * from \"./flag\"\n", "import { User, Document } from \"../\"\nimport { SocketSession } from \"../../sdk\"\n\nexport type AppMetadataErrors = { [key: string]: string[] }\n\nexport interface App extends Document {\n appId: string\n type: string\n version: string\n componentLibraries: string[]\n name: string\n url: string | undefined\n template: string | undefined\n instance: AppInstance\n tenantId: string\n status: string\n theme?: string\n customTheme?: AppCustomTheme\n revertableVersion?: string\n lockedBy?: User\n sessions?: SocketSession[]\n navigation?: AppNavigation\n automationErrors?: AppMetadataErrors\n icon?: AppIcon\n features?: AppFeatures\n}\n\nexport interface AppInstance {\n _id: string\n}\n\nexport interface AppNavigation {\n navigation: string\n title: string\n navWidth: string\n sticky?: boolean\n hideLogo?: boolean\n logoUrl?: string\n hideTitle?: boolean\n navBackground?: string\n navTextColor?: string\n links?: AppNavigationLink[]\n}\n\nexport interface AppNavigationLink {\n text: string\n url: string\n id?: string\n roleId?: string\n}\n\nexport interface AppCustomTheme {\n buttonBorderRadius?: string\n primaryColor?: string\n primaryColorHover?: string\n\n // Used to exist before new design UI\n navTextColor?: string\n navBackground?: string\n}\n\nexport interface AppIcon {\n name: string\n color: string\n}\n\nexport interface AppFeatures {\n componentValidation?: boolean\n disableUserMetadata?: boolean\n}\n", "import { Document } from \"../document\"\nimport { EventEmitter } from \"events\"\nimport { User } from \"../global\"\n\nexport enum AutomationIOType {\n OBJECT = \"object\",\n STRING = \"string\",\n BOOLEAN = \"boolean\",\n NUMBER = \"number\",\n ARRAY = \"array\",\n JSON = \"json\",\n DATE = \"date\",\n}\n\nexport enum AutomationCustomIOType {\n TABLE = \"table\",\n ROW = \"row\",\n ROWS = \"rows\",\n WIDE = \"wide\",\n QUERY = \"query\",\n QUERY_PARAMS = \"queryParams\",\n QUERY_LIMIT = \"queryLimit\",\n LOOP_OPTION = \"loopOption\",\n ITEM = \"item\",\n CODE = \"code\",\n FILTERS = \"filters\",\n COLUMN = \"column\",\n TRIGGER_SCHEMA = \"triggerSchema\",\n CRON = \"cron\",\n WEBHOOK_URL = \"webhookUrl\",\n AUTOMATION = \"automation\",\n AUTOMATION_FIELDS = \"automationFields\",\n}\n\nexport enum AutomationTriggerStepId {\n ROW_SAVED = \"ROW_SAVED\",\n ROW_UPDATED = \"ROW_UPDATED\",\n ROW_DELETED = \"ROW_DELETED\",\n WEBHOOK = \"WEBHOOK\",\n APP = \"APP\",\n CRON = \"CRON\",\n}\n\nexport enum AutomationStepType {\n LOGIC = \"LOGIC\",\n ACTION = \"ACTION\",\n TRIGGER = \"TRIGGER\",\n}\n\nexport enum AutomationActionStepId {\n SEND_EMAIL_SMTP = \"SEND_EMAIL_SMTP\",\n CREATE_ROW = \"CREATE_ROW\",\n UPDATE_ROW = \"UPDATE_ROW\",\n DELETE_ROW = \"DELETE_ROW\",\n EXECUTE_BASH = \"EXECUTE_BASH\",\n OUTGOING_WEBHOOK = \"OUTGOING_WEBHOOK\",\n EXECUTE_SCRIPT = \"EXECUTE_SCRIPT\",\n EXECUTE_QUERY = \"EXECUTE_QUERY\",\n SERVER_LOG = \"SERVER_LOG\",\n DELAY = \"DELAY\",\n FILTER = \"FILTER\",\n QUERY_ROWS = \"QUERY_ROWS\",\n LOOP = \"LOOP\",\n COLLECT = \"COLLECT\",\n OPENAI = \"OPENAI\",\n TRIGGER_AUTOMATION_RUN = \"TRIGGER_AUTOMATION_RUN\",\n // these used to be lowercase step IDs, maintain for backwards compat\n discord = \"discord\",\n slack = \"slack\",\n zapier = \"zapier\",\n integromat = \"integromat\",\n}\n\nexport interface EmailInvite {\n startTime: Date\n endTime: Date\n summary: string\n location?: string\n url?: string\n}\n\nexport interface SendEmailOpts {\n // workspaceId If finer grain controls being used then this will lookup config for workspace.\n workspaceId?: string\n // user If sending to an existing user the object can be provided, this is used in the context.\n user: User\n // from If sending from an address that is not what is configured in the SMTP config.\n from?: string\n // contents If sending a custom email then can supply contents which will be added to it.\n contents?: string\n // subject A custom subject can be specified if the config one is not desired.\n subject?: string\n // info Pass in a structure of information to be stored alongside the invitation.\n info?: any\n cc?: boolean\n bcc?: boolean\n automation?: boolean\n invite?: EmailInvite\n}\n\nexport const AutomationStepIdArray = [\n ...Object.values(AutomationActionStepId),\n ...Object.values(AutomationTriggerStepId),\n]\n\nexport interface Automation extends Document {\n definition: {\n steps: AutomationStep[]\n trigger: AutomationTrigger\n }\n screenId?: string\n uiTree?: any\n appId: string\n live?: boolean\n name: string\n internal?: boolean\n type?: string\n}\n\ninterface BaseIOStructure {\n type?: AutomationIOType\n customType?: AutomationCustomIOType\n title?: string\n description?: string\n dependsOn?: string\n enum?: string[]\n pretty?: string[]\n properties?: {\n [key: string]: BaseIOStructure\n }\n required?: string[]\n}\n\ninterface InputOutputBlock {\n properties: {\n [key: string]: BaseIOStructure\n }\n required?: string[]\n}\n\nexport interface AutomationStepSchema {\n name: string\n stepTitle?: string\n tagline: string\n icon: string\n description: string\n type: AutomationStepType\n internal?: boolean\n deprecated?: boolean\n stepId: AutomationTriggerStepId | AutomationActionStepId\n blockToLoop?: string\n inputs: {\n [key: string]: any\n }\n schema: {\n inputs: InputOutputBlock\n outputs: InputOutputBlock\n }\n custom?: boolean\n features?: Partial<Record<AutomationFeature, boolean>>\n}\n\nexport enum AutomationFeature {\n LOOPING = \"LOOPING\",\n}\n\nexport interface AutomationStep extends AutomationStepSchema {\n id: string\n}\n\nexport interface AutomationTriggerSchema extends AutomationStepSchema {\n event?: string\n cronJobId?: string\n}\n\nexport interface AutomationTrigger extends AutomationTriggerSchema {\n id: string\n}\n\nexport enum AutomationStepStatus {\n NO_ITERATIONS = \"no_iterations\",\n}\n\nexport enum AutomationStatus {\n SUCCESS = \"success\",\n ERROR = \"error\",\n STOPPED = \"stopped\",\n STOPPED_ERROR = \"stopped_error\",\n}\n\nexport interface AutomationResults {\n automationId?: string\n status?: AutomationStatus\n trigger?: any\n steps: {\n stepId: AutomationTriggerStepId | AutomationActionStepId\n inputs: {\n [key: string]: any\n }\n outputs: {\n [key: string]: any\n }\n }[]\n}\n\nexport interface AutomationLog extends AutomationResults, Document {\n automationName: string\n _rev?: string\n}\n\nexport interface AutomationLogPage {\n data: AutomationLog[]\n hasNextPage: boolean\n nextPage?: string\n}\n\nexport type AutomationStepInput = {\n inputs: Record<string, any>\n context: Record<string, any>\n emitter: EventEmitter\n appId: string\n apiKey?: string\n}\n\nexport interface AutomationMetadata extends Document {\n errorCount?: number\n automationChainCount?: number\n}\n", "import { Document } from \"../document\"\nimport { SourceName } from \"../../sdk\"\nimport { Table } from \"./table\"\n\nexport interface Datasource extends Document {\n type: string\n name?: string\n source: SourceName\n // the config is defined by the schema\n config?: Record<string, any>\n plus?: boolean\n isSQL?: boolean\n entities?: {\n [key: string]: Table\n }\n}\n\nexport enum RestAuthType {\n BASIC = \"basic\",\n BEARER = \"bearer\",\n}\n\nexport interface RestBasicAuthConfig {\n username: string\n password: string\n}\n\nexport interface RestBearerAuthConfig {\n token: string\n}\n\nexport interface RestAuthConfig {\n _id: string\n name: string\n type: RestAuthType\n config: RestBasicAuthConfig | RestBearerAuthConfig\n}\n\nexport interface RestConfig {\n url: string\n rejectUnauthorized: boolean\n defaultHeaders: {\n [key: string]: any\n }\n legacyHttpParser: boolean\n authConfigs: RestAuthConfig[]\n staticVariables: {\n [key: string]: string\n }\n dynamicVariables: [\n {\n name: string\n queryId: string\n value: string\n }\n ]\n}\n", "import { Document } from \"../document\"\n\nexport interface Layout extends Document {\n props: any\n layoutId?: string\n}\n", "import { Document } from \"../document\"\n\nexport interface Query extends Document {\n datasourceId: string\n name: string\n parameters: QueryParameter[]\n fields: RestQueryFields | any\n transformer: string | null\n schema: Record<string, { name?: string; type: string }>\n readable: boolean\n queryVerb: string\n}\n\nexport interface QueryParameter {\n name: string\n default: string\n}\n\nexport interface RestQueryFields {\n path: string\n queryString?: string\n headers: { [key: string]: any }\n disabledHeaders: { [key: string]: any }\n requestBody: any\n bodyType: string\n json: object\n method: string\n authConfigId: string\n pagination: PaginationConfig | null\n paginationValues: PaginationValues | null\n}\n\nexport interface PaginationConfig {\n type: string\n location: string\n pageParam: string\n sizeParam: string | null\n responseParam: string | null\n}\n\nexport interface PaginationValues {\n page: string | number | null\n limit: number | null\n}\n\nexport interface PreviewQueryRequest extends Omit<Query, \"parameters\"> {\n parameters: {}\n flags?: {\n urlName?: boolean\n }\n}\n", "import { Document } from \"../document\"\n\nexport interface Role extends Document {\n permissionId: string\n inherits?: string\n permissions: { [key: string]: string[] }\n version?: string\n}\n", "import { Document } from \"../../document\"\nimport { View, ViewV2 } from \"../view\"\nimport { RenameColumn } from \"../../../sdk\"\nimport { TableSchema } from \"./schema\"\n\nexport const INTERNAL_TABLE_SOURCE_ID = \"bb_internal\"\n\nexport enum TableSourceType {\n EXTERNAL = \"external\",\n INTERNAL = \"internal\",\n}\n\nexport interface Table extends Document {\n type: \"table\"\n sourceType: TableSourceType\n views?: { [key: string]: View | ViewV2 }\n name: string\n sourceId: string\n primary?: string[]\n schema: TableSchema\n primaryDisplay?: string\n relatedFormula?: string[]\n constrained?: string[]\n sql?: boolean\n indexes?: { [key: string]: any }\n created?: boolean\n rowHeight?: number\n}\n\nexport interface TableRequest extends Table {\n _rename?: RenameColumn\n created?: boolean\n}\n", "import { Document } from \"../document\"\n\nexport enum FieldType {\n STRING = \"string\",\n LONGFORM = \"longform\",\n OPTIONS = \"options\",\n NUMBER = \"number\",\n BOOLEAN = \"boolean\",\n ARRAY = \"array\",\n DATETIME = \"datetime\",\n ATTACHMENT = \"attachment\",\n LINK = \"link\",\n FORMULA = \"formula\",\n AUTO = \"auto\",\n JSON = \"json\",\n INTERNAL = \"internal\",\n BARCODEQR = \"barcodeqr\",\n BIGINT = \"bigint\",\n BB_REFERENCE = \"bb_reference\",\n}\n\nexport interface RowAttachment {\n size: number\n name: string\n extension: string\n key: string\n // Populated on read\n url?: string\n}\n\nexport interface Row extends Document {\n type?: string\n tableId?: string\n _viewId?: string\n [key: string]: any\n}\n\nexport enum FieldSubtype {\n USER = \"user\",\n USERS = \"users\",\n}\n\nexport const FieldTypeSubtypes = {\n BB_REFERENCE: {\n USER: FieldSubtype.USER,\n USERS: FieldSubtype.USERS,\n },\n}\n", "export enum RelationshipType {\n ONE_TO_MANY = \"one-to-many\",\n MANY_TO_ONE = \"many-to-one\",\n MANY_TO_MANY = \"many-to-many\",\n}\n\nexport enum AutoReason {\n FOREIGN_KEY = \"foreign_key\",\n}\n\nexport enum AutoFieldSubTypes {\n CREATED_BY = \"createdBy\",\n CREATED_AT = \"createdAt\",\n UPDATED_BY = \"updatedBy\",\n UPDATED_AT = \"updatedAt\",\n AUTO_ID = \"autoID\",\n}\n\nexport enum FormulaTypes {\n STATIC = \"static\",\n DYNAMIC = \"dynamic\",\n}\n", "// all added by grid/table when defining the\n// column size, position and whether it can be viewed\nimport { FieldSubtype, FieldType } from \"../row\"\nimport {\n AutoFieldSubTypes,\n AutoReason,\n FormulaTypes,\n RelationshipType,\n} from \"./constants\"\n\nexport interface UIFieldMetadata {\n order?: number\n width?: number\n visible?: boolean\n icon?: string\n}\n\ninterface BaseRelationshipFieldMetadata\n extends Omit<BaseFieldSchema, \"subtype\"> {\n type: FieldType.LINK\n main?: boolean\n fieldName: string\n tableId: string\n tableRev?: string\n subtype?: AutoFieldSubTypes.CREATED_BY | AutoFieldSubTypes.UPDATED_BY\n}\n\n// External tables use junction tables, internal tables don't require them\ntype ManyToManyJunctionTableMetadata =\n | {\n through: string\n throughFrom: string\n throughTo: string\n }\n | {\n through?: never\n throughFrom?: never\n throughTo?: never\n }\n\nexport type ManyToManyRelationshipFieldMetadata =\n BaseRelationshipFieldMetadata & {\n relationshipType: RelationshipType.MANY_TO_MANY\n } & ManyToManyJunctionTableMetadata\n\nexport interface OneToManyRelationshipFieldMetadata\n extends BaseRelationshipFieldMetadata {\n relationshipType: RelationshipType.ONE_TO_MANY\n foreignKey?: string\n}\nexport interface ManyToOneRelationshipFieldMetadata\n extends BaseRelationshipFieldMetadata {\n relationshipType: RelationshipType.MANY_TO_ONE\n foreignKey?: string\n}\nexport type RelationshipFieldMetadata =\n | ManyToManyRelationshipFieldMetadata\n | OneToManyRelationshipFieldMetadata\n | ManyToOneRelationshipFieldMetadata\n\nexport interface AutoColumnFieldMetadata\n extends Omit<BaseFieldSchema, \"subtype\"> {\n type: FieldType.AUTO\n autocolumn: true\n subtype?: AutoFieldSubTypes\n lastID?: number\n // if the column was turned to an auto-column for SQL, explains why (primary, foreign etc)\n autoReason?: AutoReason\n}\n\nexport interface NumberFieldMetadata extends Omit<BaseFieldSchema, \"subtype\"> {\n type: FieldType.NUMBER\n subtype?: AutoFieldSubTypes.AUTO_ID\n lastID?: number\n autoReason?: AutoReason.FOREIGN_KEY\n // used specifically when Budibase generates external tables, this denotes if a number field\n // is a foreign key used for a many-to-many relationship\n meta?: {\n toTable: string\n toKey: string\n }\n}\n\nexport interface DateFieldMetadata extends Omit<BaseFieldSchema, \"subtype\"> {\n type: FieldType.DATETIME\n ignoreTimezones?: boolean\n timeOnly?: boolean\n subtype?: AutoFieldSubTypes.CREATED_AT | AutoFieldSubTypes.UPDATED_AT\n}\n\nexport interface LongFormFieldMetadata extends BaseFieldSchema {\n type: FieldType.LONGFORM\n useRichText?: boolean | null\n}\n\nexport interface FormulaFieldMetadata extends BaseFieldSchema {\n type: FieldType.FORMULA\n formula: string\n formulaType?: FormulaTypes\n}\n\nexport interface BBReferenceFieldMetadata\n extends Omit<BaseFieldSchema, \"subtype\"> {\n type: FieldType.BB_REFERENCE\n subtype: FieldSubtype.USER | FieldSubtype.USERS\n relationshipType?: RelationshipType\n}\n\nexport interface FieldConstraints {\n type?: string\n email?: boolean\n inclusion?: string[]\n length?: {\n minimum?: string | number | null\n maximum?: string | number | null\n }\n numericality?: {\n greaterThanOrEqualTo: string | null\n lessThanOrEqualTo: string | null\n }\n presence?:\n | boolean\n | {\n allowEmpty?: boolean\n }\n datetime?: {\n latest: string\n earliest: string\n }\n}\n\ninterface BaseFieldSchema extends UIFieldMetadata {\n type: FieldType\n name: string\n sortable?: boolean\n // only used by external databases, to denote the real type\n externalType?: string\n constraints?: FieldConstraints\n autocolumn?: boolean\n autoReason?: AutoReason.FOREIGN_KEY\n subtype?: never\n}\n\ninterface OtherFieldMetadata extends BaseFieldSchema {\n type: Exclude<\n FieldType,\n | FieldType.DATETIME\n | FieldType.LINK\n | FieldType.AUTO\n | FieldType.FORMULA\n | FieldType.NUMBER\n | FieldType.LONGFORM\n >\n}\n\nexport type FieldSchema =\n | OtherFieldMetadata\n | DateFieldMetadata\n | RelationshipFieldMetadata\n | AutoColumnFieldMetadata\n | FormulaFieldMetadata\n | NumberFieldMetadata\n | LongFormFieldMetadata\n | BBReferenceFieldMetadata\n\nexport interface TableSchema {\n [key: string]: FieldSchema\n}\n\nexport function isRelationshipField(\n field: FieldSchema\n): field is RelationshipFieldMetadata {\n return field.type === FieldType.LINK\n}\n\nexport function isManyToMany(\n field: RelationshipFieldMetadata\n): field is ManyToManyRelationshipFieldMetadata {\n return field.relationshipType === RelationshipType.MANY_TO_MANY\n}\n\nexport function isOneToMany(\n field: RelationshipFieldMetadata\n): field is OneToManyRelationshipFieldMetadata {\n return field.relationshipType === RelationshipType.ONE_TO_MANY\n}\n\nexport function isManyToOne(\n field: RelationshipFieldMetadata\n): field is ManyToOneRelationshipFieldMetadata {\n return field.relationshipType === RelationshipType.MANY_TO_ONE\n}\n\nexport function isBBReferenceField(\n field: FieldSchema\n): field is BBReferenceFieldMetadata {\n return field.type === FieldType.BB_REFERENCE\n}\n", "export * from \"./table\"\nexport * from \"./schema\"\nexport * from \"./constants\"\n", "import { Document } from \"../document\"\nimport { Component } from \"./component\"\n\nexport interface ScreenProps extends Component {\n size?: string\n gap?: string\n direction?: string\n vAlign?: string\n hAlign?: string\n}\n\nexport interface ScreenRouting {\n route: string\n roleId: string\n homeScreen?: boolean\n}\n\nexport interface Screen extends Document {\n layoutId?: string\n showNavigation?: boolean\n width?: string\n routing: ScreenRouting\n props: ScreenProps\n name?: string\n}\n", "import { SearchFilter, SortOrder, SortType } from \"../../api\"\nimport { UIFieldMetadata } from \"./table\"\nimport { Document } from \"../document\"\nimport { DBView } from \"../../sdk\"\n\nexport type ViewTemplateOpts = {\n field: string\n tableId: string\n groupBy: string\n filters: ViewFilter[]\n schema: any\n calculation: string\n groupByMulti?: boolean\n}\n\nexport interface InMemoryView extends Document {\n view: DBView\n name: string\n tableId: string\n groupBy?: string\n}\n\nexport interface View {\n name?: string\n tableId: string\n field?: string\n filters: ViewFilter[]\n schema: ViewSchema\n calculation?: ViewCalculation\n map?: string\n reduce?: any\n meta?: ViewTemplateOpts\n}\n\nexport interface ViewV2 {\n version: 2\n id: string\n name: string\n primaryDisplay?: string\n tableId: string\n query?: SearchFilter[]\n sort?: {\n field: string\n order?: SortOrder\n type?: SortType\n }\n schema?: Record<string, UIFieldMetadata>\n}\n\nexport type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema\n\nexport interface ViewCountOrSumSchema {\n field: string\n value: string\n}\n\n/**\n e.g:\n \"min\": {\n \"type\": \"number\"\n },\n \"max\": {\n \"type\": \"number\"\n }\n */\nexport interface ViewStatisticsSchema {\n [key: string]: {\n type: string\n }\n}\n\nexport interface ViewFilter {\n value?: any\n condition: string\n key: string\n conjunction?: string\n}\n\nexport enum ViewCalculation {\n SUM = \"sum\",\n COUNT = \"count\",\n STATISTICS = \"stats\",\n}\n", "export const SEPARATOR = \"_\"\nexport const UNICODE_MAX = \"\\ufff0\"\n\nexport const prefixed = (type: DocumentType) => `${type}${SEPARATOR}`\n\nexport enum DocumentType {\n USER = \"us\",\n GROUP = \"gr\",\n WORKSPACE = \"workspace\",\n CONFIG = \"config\",\n TEMPLATE = \"template\",\n APP = \"app\",\n DEV = \"dev\",\n APP_DEV = \"app_dev\",\n APP_METADATA = \"app_metadata\",\n ROLE = \"role\",\n MIGRATIONS = \"migrations\",\n DEV_INFO = \"devinfo\",\n AUTOMATION_LOG = \"log_au\",\n ACCOUNT_METADATA = \"acc_metadata\",\n PLUGIN = \"plg\",\n DATASOURCE = \"datasource\",\n DATASOURCE_PLUS = \"datasource_plus\",\n APP_BACKUP = \"backup\",\n TABLE = \"ta\",\n ROW = \"ro\",\n AUTOMATION = \"au\",\n LINK = \"li\",\n WEBHOOK = \"wh\",\n INSTANCE = \"inst\",\n LAYOUT = \"layout\",\n SCREEN = \"screen\",\n QUERY = \"query\",\n DEPLOYMENTS = \"deployments\",\n METADATA = \"metadata\",\n MEM_VIEW = \"view\",\n USER_FLAG = \"flag\",\n AUTOMATION_METADATA = \"meta_au\",\n AUDIT_LOG = \"al\",\n APP_MIGRATION_METADATA = \"_design/migrations\",\n}\n\n// these are the core documents that make up the data, design\n// and automation sections of an app. This excludes any internal\n// rows as we shouldn't import data.\nexport const DocumentTypesToImport: DocumentType[] = [\n DocumentType.ROLE,\n DocumentType.DATASOURCE,\n DocumentType.DATASOURCE_PLUS,\n DocumentType.TABLE,\n DocumentType.AUTOMATION,\n DocumentType.WEBHOOK,\n DocumentType.SCREEN,\n DocumentType.QUERY,\n DocumentType.METADATA,\n DocumentType.MEM_VIEW,\n // Deprecated but still copied\n DocumentType.INSTANCE,\n DocumentType.LAYOUT,\n]\n\nexport enum InternalTable {\n USER_METADATA = \"ta_users\",\n}\n\n// these documents don't really exist, they are part of other\n// documents or enriched into existence as part of get requests\nexport enum VirtualDocumentType {\n VIEW = \"view\",\n}\n\nexport interface Document {\n _id?: string\n _rev?: string\n createdAt?: string | number\n updatedAt?: string\n}\n\nexport interface AnyDocument extends Document {\n [key: string]: any\n}\n", "import { User } from \"../global\"\nimport { Row } from \"./row\"\nimport { ContextUser } from \"../../sdk\"\n\nexport type UserMetadata = User & Row\nexport type ContextUserMetadata = ContextUser & Row\n", "import { Document } from \"../document\"\nimport { User } from \"../../\"\n\nexport enum AppBackupType {\n BACKUP = \"backup\",\n RESTORE = \"restore\",\n}\n\nexport enum AppBackupStatus {\n STARTED = \"started\",\n PENDING = \"pending\",\n COMPLETE = \"complete\",\n FAILED = \"failed\",\n}\n\nexport enum AppBackupTrigger {\n PUBLISH = \"publish\",\n MANUAL = \"manual\",\n SCHEDULED = \"scheduled\",\n RESTORING = \"restoring\",\n}\n\nexport interface AppBackupContents {\n datasources: string[]\n screens: string[]\n automations: string[]\n}\n\nexport interface AppBackupMetadata {\n appId: string\n trigger?: AppBackupTrigger\n type: AppBackupType\n status: AppBackupStatus\n name?: string\n createdBy?: string | User\n timestamp: string\n finishedAt?: string\n startedAt?: string\n contents?: AppBackupContents\n}\n\nexport interface AppBackup extends Document, AppBackupMetadata {\n _id: string\n filename?: string\n}\n\nexport type AppBackupFetchOpts = {\n trigger?: AppBackupTrigger\n type?: AppBackupType\n limit?: number\n page?: string\n paginate?: boolean\n startDate?: string\n endDate?: string\n}\n\nexport interface AppBackupQueueData {\n appId: string\n docId: string\n docRev: string\n export?: {\n trigger: AppBackupTrigger\n name?: string\n createdBy?: string\n }\n import?: {\n backupId: string\n nameForBackup: string\n createdBy?: string\n }\n}\n", "import { Document } from \"../document\"\n\nexport enum WebhookActionType {\n AUTOMATION = \"automation\",\n}\n\nexport interface Webhook extends Document {\n live: boolean\n name: string\n action: {\n type: WebhookActionType\n target: string\n }\n bodySchema?: any\n}\n", "import { Document } from \"../document\"\n\nexport interface LinkInfo {\n rowId: string\n fieldName: string\n tableId: string\n}\n\nexport interface LinkDocument extends Document {\n type: string\n doc1: LinkInfo\n doc2: LinkInfo\n}\n\nexport interface LinkDocumentValue {\n id: string\n thisId: string\n fieldName: string\n}\n", "import { Document } from \"../document\"\n\nexport interface Component extends Document {\n _instanceName: string\n _styles: { [key: string]: any }\n _component: string\n _children?: Component[]\n [key: string]: any\n}\n", "export * from \"./app\"\nexport * from \"./automation\"\nexport * from \"./datasource\"\nexport * from \"./layout\"\nexport * from \"./query\"\nexport * from \"./role\"\nexport * from \"./table\"\nexport * from \"./screen\"\nexport * from \"./view\"\nexport * from \"../document\"\nexport * from \"./row\"\nexport * from \"./user\"\nexport * from \"./backup\"\nexport * from \"./webhook\"\nexport * from \"./links\"\nexport * from \"./component\"\n", "import { Document } from \"../document\"\n\nexport interface Config<T = any> extends Document {\n type: ConfigType\n config: T\n}\n\nexport interface SMTPInnerConfig {\n port: number\n host: string\n from: string\n subject?: string\n secure: boolean\n auth?: {\n user: string\n pass: string\n }\n connectionTimeout?: any\n}\n\nexport interface SMTPConfig extends Config<SMTPInnerConfig> {}\n\n/**\n * Accessible only via pro.\n */\nexport interface SettingsBrandingConfig {\n faviconUrl?: string\n faviconUrlEtag?: string\n\n emailBrandingEnabled?: boolean\n testimonialsEnabled?: boolean\n platformTitle?: string\n loginHeading?: string\n loginButton?: string\n\n metaDescription?: string\n metaImageUrl?: string\n metaTitle?: string\n}\n\nexport interface SettingsInnerConfig {\n platformUrl?: string\n company?: string\n logoUrl?: string // Populated on read\n logoUrlEtag?: string\n uniqueTenantId?: string\n analyticsEnabled?: boolean\n isSSOEnforced?: boolean\n}\n\nexport interface SettingsConfig extends Config<SettingsInnerConfig> {}\n\nexport type SSOConfigType = ConfigType.GOOGLE | ConfigType.OIDC\nexport type SSOConfig = GoogleInnerConfig | OIDCInnerConfig\n\nexport interface GoogleInnerConfig {\n clientID: string\n clientSecret: string\n activated: boolean\n /**\n * @deprecated read only\n */\n callbackURL?: string\n}\n\nexport interface GoogleConfig extends Config<GoogleInnerConfig> {}\n\nexport interface OIDCStrategyConfiguration {\n issuer: string\n authorizationURL: string\n tokenURL: string\n userInfoURL: string\n clientID: string\n clientSecret: string\n callbackURL: string\n}\n\nexport interface OIDCConfigs {\n configs: OIDCInnerConfig[]\n}\n\nexport interface OIDCLogosInnerConfig {\n [key: string]: string\n}\n\nexport interface OIDCLogosConfig extends Config<OIDCLogosInnerConfig> {}\n\nexport interface OIDCInnerConfig {\n configUrl: string\n clientID: string\n clientSecret: string\n logo: string\n name: string\n uuid: string\n activated: boolean\n scopes: string[]\n}\n\nexport interface OIDCConfig extends Config<OIDCConfigs> {}\n\nexport interface OIDCWellKnownConfig {\n issuer: string\n authorization_endpoint: string\n token_endpoint: string\n userinfo_endpoint: string\n}\n\nexport interface SCIMInnerConfig {\n enabled: boolean\n}\n\nexport interface SCIMConfig extends Config<SCIMInnerConfig> {}\n\nexport const isSettingsConfig = (config: Config): config is SettingsConfig =>\n config.type === ConfigType.SETTINGS\n\nexport const isSMTPConfig = (config: Config): config is SMTPConfig =>\n config.type === ConfigType.SMTP\n\nexport const isGoogleConfig = (config: Config): config is GoogleConfig =>\n config.type === ConfigType.GOOGLE\n\nexport const isOIDCConfig = (config: Config): config is OIDCConfig =>\n config.type === ConfigType.OIDC\n\nexport const isSCIMConfig = (config: Config): config is SCIMConfig =>\n config.type === ConfigType.SCIM\n\nexport enum ConfigType {\n SETTINGS = \"settings\",\n ACCOUNT = \"account\",\n SMTP = \"smtp\",\n GOOGLE = \"google\",\n OIDC = \"oidc\",\n OIDC_LOGOS = \"logos_oidc\",\n SCIM = \"scim\",\n}\n", "import { Document } from \"../document\"\n\n// SSO\n\nexport interface SSOProfileJson {\n email?: string\n picture?: string\n}\n\nexport interface OAuth2 {\n accessToken: string\n refreshToken?: string\n}\n\nexport enum SSOProviderType {\n OIDC = \"oidc\",\n GOOGLE = \"google\",\n}\n\nexport interface UserSSO {\n provider: string // the individual provider e.g. Okta, Auth0, Google\n providerType: SSOProviderType\n oauth2?: OAuth2\n thirdPartyProfile?: SSOProfileJson\n}\n\nexport type SSOUser = User & UserSSO\n\nexport function isSSOUser(user: User): user is SSOUser {\n return !!(user as SSOUser).providerType\n}\n\n// USER\n\nexport interface User extends Document {\n tenantId: string\n email: string\n userId?: string\n firstName?: string\n lastName?: string\n pictureUrl?: string\n forceResetPassword?: boolean\n roles: UserRoles\n builder?: {\n global?: boolean\n apps?: string[]\n creator?: boolean\n }\n admin?: {\n global: boolean\n }\n password?: string\n status?: UserStatus\n createdAt?: number // override the default createdAt behaviour - users sdk historically set this to Date.now()\n dayPassRecordedAt?: string\n userGroups?: string[]\n onboardedAt?: string\n scimInfo?: { isSync: true } & Record<string, any>\n ssoId?: string\n}\n\nexport enum UserStatus {\n ACTIVE = \"active\",\n INACTIVE = \"inactive\",\n}\n\nexport interface UserRoles {\n [key: string]: string\n}\n\n// UTILITY TYPES\n\nexport interface BuilderUser extends User {\n builder: {\n global?: boolean\n apps?: string[]\n }\n}\n\nexport interface AdminUser extends User {\n admin: {\n global: boolean\n }\n builder: {\n global: boolean\n }\n}\n\nexport interface AdminOnlyUser extends User {\n admin: {\n global: boolean\n }\n}\n\nexport function isUser(user: object): user is User {\n return !!(user as User).roles\n}\n", "import { PaginationResponse } from \"../../api\"\nimport { Document } from \"../document\"\n\nexport interface UserGroup extends Document {\n name: string\n icon: string\n color: string\n users?: GroupUser[]\n roles?: UserGroupRoles\n // same structure as users\n builder?: {\n apps: string[]\n }\n createdAt?: number\n scimInfo?: {\n externalId: string\n isSync: boolean\n }\n}\n\nexport interface GroupUser {\n _id: string\n email: string\n}\n\nexport interface UserGroupRoles {\n [key: string]: string\n}\n\nexport interface SearchGroupRequest {}\nexport interface SearchGroupResponse {\n data: UserGroup[]\n}\n\nexport interface SearchUserGroupResponse extends PaginationResponse {\n users: {\n _id: any\n email: any\n }[]\n}\n", "import { Document } from \"../document\"\n\nexport enum PluginType {\n DATASOURCE = \"datasource\",\n COMPONENT = \"component\",\n AUTOMATION = \"automation\",\n}\n\nexport enum PluginSource {\n NPM = \"NPM\",\n GITHUB = \"Github\",\n URL = \"URL\",\n FILE = \"File Upload\",\n}\nexport interface FileType {\n path: string\n name: string\n}\n\nexport interface Plugin extends Document {\n description: string\n name: string\n version: string\n source: PluginSource\n package: { [key: string]: any }\n hash: string\n schema: {\n type: PluginType\n [key: string]: any\n }\n iconFileName?: string\n // Populated on read\n jsUrl?: string\n // Populated on read\n iconUrl?: string\n}\n\nexport const PLUGIN_TYPE_ARR = Object.values(PluginType)\n", "import { MonthlyQuotaName, StaticQuotaName } from \"../../sdk\"\n\nexport enum BreakdownQuotaName {\n ROW_QUERIES = \"rowQueries\",\n DATASOURCE_QUERIES = \"datasourceQueries\",\n AUTOMATIONS = \"automations\",\n}\n\nexport const APP_QUOTA_NAMES = [\n StaticQuotaName.ROWS,\n MonthlyQuotaName.QUERIES,\n MonthlyQuotaName.AUTOMATIONS,\n]\n\nexport const BREAKDOWN_QUOTA_NAMES = [\n MonthlyQuotaName.QUERIES,\n MonthlyQuotaName.AUTOMATIONS,\n]\n\nexport interface UsageBreakdown {\n parent: MonthlyQuotaName\n values: {\n [key: string]: number\n }\n}\n\nexport type QuotaTriggers = {\n [key: string]: string | undefined\n}\n\nexport interface StaticUsage {\n [StaticQuotaName.APPS]: number\n [StaticQuotaName.PLUGINS]: number\n [StaticQuotaName.USERS]: number\n [StaticQuotaName.CREATORS]: number\n [StaticQuotaName.USER_GROUPS]: number\n [StaticQuotaName.ROWS]: number\n triggers: {\n [key in StaticQuotaName]?: QuotaTriggers\n }\n}\n\nexport interface MonthlyUsage {\n [MonthlyQuotaName.QUERIES]: number\n [MonthlyQuotaName.AUTOMATIONS]: number\n [MonthlyQuotaName.DAY_PASSES]: number\n triggers: {\n [key in MonthlyQuotaName]?: QuotaTriggers\n }\n breakdown?: {\n [key in BreakdownQuotaName]?: UsageBreakdown\n }\n}\n\nexport interface BaseQuotaUsage {\n usageQuota: StaticUsage\n monthly: {\n [key: string]: MonthlyUsage\n }\n}\n\nexport interface QuotaUsage extends BaseQuotaUsage {\n _id: string\n _rev?: string\n quotaReset: string\n apps?: {\n [key: string]: BaseQuotaUsage\n }\n}\n\nexport type SetUsageValues = {\n total: number\n app?: number\n breakdown?: number\n triggers?: QuotaTriggers\n}\n\nexport type UsageValues = {\n total: number\n app?: number\n breakdown?: number\n}\n", "import { Document } from \"../document\"\n\nexport enum ScheduleType {\n APP_BACKUP = \"app_backup\",\n}\n\nexport enum ScheduleRepeatPeriod {\n DAILY = \"daily\",\n WEEKLY = \"weekly\",\n MONTHLY = \"monthly\",\n}\n\nexport interface Schedule extends Document {\n type: ScheduleType\n name: string\n startDate: string\n repeat: ScheduleRepeatPeriod\n metadata: ScheduleMetadata\n}\n\nexport type ScheduleMetadata = AppBackupScheduleMetadata\n\nexport const isAppBackupMetadata = (\n type: ScheduleType,\n metadata: ScheduleMetadata\n): metadata is AppBackupScheduleMetadata => {\n return type === ScheduleType.APP_BACKUP\n}\n\nexport interface AppBackupScheduleMetadata {\n apps: string[]\n}\n", "import { Document } from \"../document\"\n\nexport interface Template extends Document {\n ownerId?: string\n name?: string\n contents: string\n purpose: string\n type?: string\n}\n", "import { Document } from \"../document\"\n\nexport interface EnvironmentVariablesDoc extends Document {\n variables: string\n}\n\nexport type EnvironmentVariableValue = {\n production: string\n development: string\n}\n\n// what comes out of the \"variables\" when it is decrypted\nexport type EnvironmentVariablesDecrypted = Record<\n string,\n EnvironmentVariableValue\n>\n\nexport interface EnvironmentVariablesDocDecrypted extends Document {\n variables: EnvironmentVariablesDecrypted\n}\n", "import { Document } from \"../document\"\nimport { Event } from \"../../sdk\"\n\nexport const AuditLogSystemUser = \"SYSTEM\"\n\nexport type FallbackInfo = {\n appName?: string\n email?: string\n}\n\nexport interface AuditLogDoc extends Document {\n appId?: string\n event: Event\n userId: string\n timestamp: string\n metadata: any\n name: string\n fallback?: FallbackInfo\n}\n", "export * from \"./config\"\nexport * from \"./user\"\nexport * from \"./userGroup\"\nexport * from \"./plugin\"\nexport * from \"./quotas\"\nexport * from \"./schedule\"\nexport * from \"./templates\"\nexport * from \"./environmentVariables\"\nexport * from \"./auditLogs\"\n", "import { Document } from \"../document\"\n\nexport interface GlobalInfo {}\n\nexport interface Installation extends Document {\n _id: string\n installId: string\n version: string\n}\n", "import { Document } from \"../document\"\n\n/**\n * doc id is user email\n */\nexport interface PlatformUserByEmail extends Document {\n tenantId: string\n userId: string\n}\n\n/**\n * doc id is userId\n */\nexport interface PlatformUserById extends Document {\n tenantId: string\n}\n\n/**\n * doc id is a unique SSO provider ID for the user\n */\nexport interface PlatformUserBySsoId extends Document {\n tenantId: string\n userId: string\n email: string\n}\n\nexport type PlatformUser =\n | PlatformUserByEmail\n | PlatformUserById\n | PlatformUserBySsoId\n", "import { Document } from \"../document\"\n\nexport interface AccountMetadata extends Document {\n email: string\n}\n", "import { Document } from \"../document\"\n\nexport interface Tenants extends Document {\n tenantIds: string[]\n}\n", "export * from \"./info\"\nexport * from \"./users\"\nexport * from \"./accounts\"\nexport * from \"./tenants\"\n", "import { Document } from \"../\"\n\nexport interface RowValue {\n rev: string\n deleted: boolean\n}\n\nexport interface RowResponse<T extends Document> {\n id: string\n key: string\n error: string\n value: T | RowValue\n doc?: T\n}\n\nexport interface AllDocsResponse<T extends Document> {\n offset: number\n total_rows: number\n rows: RowResponse<T>[]\n}\n\nexport type BulkDocsResponse = BulkDocResponse[]\n\ninterface BulkDocResponse {\n ok: boolean\n id: string\n rev: string\n}\n\nexport interface PutResponse {\n ok: boolean\n id: string\n rev: string\n}\n", "export * from \"./account\"\nexport * from \"./app\"\nexport * from \"./global\"\nexport * from \"./platform\"\nexport * from \"./document\"\nexport * from \"./pouch\"\n", "import { Account, AccountSSOProvider } from \"../../documents\"\nimport { Hosting } from \"../../sdk\"\n\nexport interface CreateAccountRequest {\n email: string\n tenantId: string\n hosting: Hosting\n size: string\n profession: string\n // optional fields\n tenantName?: string\n name?: string\n password: string\n provider?: AccountSSOProvider\n thirdPartyProfile: object\n}\n\nexport interface SearchAccountsRequest {\n // one or the other - not both\n email?: string\n tenantId?: string\n}\n\nexport type SearchAccountsResponse = Account[]\n", "export interface PostAccountUserActivity {\n timestamp: number\n}\n\nexport interface PostAccountUserActivityResponse {\n userId: string\n timestamp: number\n}\n", "import { LicenseOverrides, QuotaUsage } from \"../../documents\"\nimport { OfflineLicense, PlanType } from \"../../sdk\"\nimport { ISO8601 } from \"../../shared\"\n\nexport interface GetLicenseRequest {\n // All fields should be optional to cater for\n // historical versions of budibase\n quotaUsage?: QuotaUsage\n install: {\n id: string\n tenantId: string\n version: string\n }\n}\n\nexport interface QuotaTriggeredRequest {\n percentage: number\n name: string\n resetDate?: string\n}\n\nexport interface LicenseActivateRequest {\n installVersion?: string\n}\n\nexport interface UpdateLicenseRequest {\n planType?: PlanType\n overrides?: LicenseOverrides\n}\n\nexport interface CreateOfflineLicenseRequest {\n installationIdentifierBase64: string\n expireAt: ISO8601\n}\n\nexport interface GetOfflineLicenseResponse {\n offlineLicenseToken: string\n license: OfflineLicense\n}\n", "export interface HealthStatusResponse {\n passing: boolean\n checks: {\n login: boolean\n search: boolean\n }\n}\n", "export * from \"./accounts\"\nexport * from \"./user\"\nexport * from \"./license\"\nexport * from \"./status\"\n", "export enum PingSource {\n BUILDER = \"builder\",\n APP = \"app\",\n}\n\nexport interface AnalyticsPingRequest {\n source: PingSource\n timezone: string\n embedded?: boolean\n}\n", "export interface LoginRequest {\n username: string\n password: string\n}\n\nexport interface PasswordResetRequest {\n email: string\n}\n\nexport interface PasswordResetUpdateRequest {\n resetCode: string\n password: string\n}\n\nexport interface UpdateSelfRequest {\n firstName?: string\n lastName?: string\n password?: string\n forceResetPassword?: boolean\n onboardedAt?: string\n}\n\nexport interface UpdateSelfResponse {\n _id: string\n _rev: string\n}\n", "import { User } from \"../../documents\"\nimport { SearchQuery } from \"./searchFilter\"\n\nexport interface SaveUserResponse {\n _id: string\n _rev: string\n email: string\n}\n\nexport interface UserDetails {\n _id: string\n email: string\n password?: string\n}\n\nexport interface BulkUserRequest {\n delete?: {\n userIds: string[]\n }\n create?: {\n roles?: any[]\n users: User[]\n groups: any[]\n }\n}\n\nexport interface BulkUserCreated {\n successful: UserDetails[]\n unsuccessful: { email: string; reason: string }[]\n}\n\nexport interface BulkUserDeleted {\n successful: UserDetails[]\n unsuccessful: { _id: string; email: string; reason: string }[]\n}\n\nexport interface BulkUserResponse {\n created?: BulkUserCreated\n deleted?: BulkUserDeleted\n message?: string\n}\n\nexport interface InviteUserRequest {\n email: string\n userInfo: any\n}\n\nexport type InviteUsersRequest = InviteUserRequest[]\n\nexport interface InviteUsersResponse {\n successful: { email: string }[]\n unsuccessful: { email: string; reason: string }[]\n created?: boolean\n}\n\nexport interface SearchUsersRequest {\n bookmark?: string\n query?: SearchQuery\n appId?: string\n limit?: number\n paginate?: boolean\n}\n\nexport interface CreateAdminUserRequest {\n email: string\n password: string\n tenantId: string\n ssoId?: string\n}\n\nexport interface CreateAdminUserResponse {\n _id: string\n _rev: string\n email: string\n}\n\nexport interface AcceptUserInviteRequest {\n inviteCode: string\n password: string\n firstName: string\n lastName: string\n}\n\nexport interface AcceptUserInviteResponse {\n _id: string\n _rev: string\n email: string\n}\n\nexport interface SyncUserRequest {\n previousUser?: User\n}\n", "export interface APIError {\n message: string\n status: number\n error?: any\n validationErrors?: any\n}\n", "export interface GetDiagnosticsResponse {\n budibaseVersion: string\n hosting: string\n nodeVersion: string\n platform: string\n cpuArch: string\n cpuCores: number\n cpuInfo: string\n totalMemory: string\n uptime: string\n}\n", "import {\n ScheduleMetadata,\n ScheduleRepeatPeriod,\n ScheduleType,\n} from \"../../documents\"\n\nexport interface CreateScheduleRequest {\n type: ScheduleType\n name: string\n startDate: string\n repeat: ScheduleRepeatPeriod\n metadata: ScheduleMetadata\n}\n\nexport interface UpdateScheduleRequest extends CreateScheduleRequest {}\n", "export interface GetEnvironmentResponse {\n multiTenancy: boolean\n cloud: boolean\n accountPortalUrl: string\n baseUrl: string\n disableAccountPortal: boolean\n isDev: boolean\n}\n", "export * from \"./environment\"\n", "import { AppBackupTrigger, AppBackupType } from \"../../../documents\"\n\nexport interface SearchAppBackupsRequest {\n trigger: AppBackupTrigger\n type: AppBackupType\n startDate: string\n endDate: string\n page?: string\n}\n\nexport interface CreateAppBackupRequest {\n name: string\n}\n\nexport interface CreateAppBackupResponse {\n backupId: string\n message: string\n}\n\nexport interface UpdateAppBackupRequest {\n name: string\n}\n\nexport interface ImportAppBackupResponse {\n restoreId: string\n message: string\n}\n", "import { Datasource } from \"../../../documents\"\n\nexport interface CreateDatasourceResponse {\n datasource: Datasource\n errors: Record<string, string>\n}\n\nexport interface UpdateDatasourceResponse {\n datasource: Datasource\n}\n\nexport interface CreateDatasourceRequest {\n datasource: Datasource\n fetchSchema?: boolean\n tablesFilter: string[]\n}\n\nexport interface VerifyDatasourceRequest {\n datasource: Datasource\n}\n\nexport interface VerifyDatasourceResponse {\n connected: boolean\n error?: string\n}\n\nexport interface FetchDatasourceInfoRequest {\n datasource: Datasource\n}\n\nexport interface FetchDatasourceInfoResponse {\n tableNames: string[]\n}\n\nexport interface UpdateDatasourceRequest extends Datasource {\n datasource: Datasource\n}\n\nexport interface BuildSchemaFromSourceRequest {\n tablesFilter?: string[]\n}\n\nexport interface BuildSchemaFromSourceResponse {\n datasource: Datasource\n errors: Record<string, string>\n}\n", "import { Row } from \"../../../documents/app/row\"\n\nexport interface GetRowResponse extends Row {}\n\nexport interface DeleteRows {\n rows: (Row | string)[]\n}\n\nexport interface DeleteRow {\n _id: string\n}\n\nexport type DeleteRowRequest = DeleteRows | DeleteRow\n\nexport interface ValidateResponse {\n valid: boolean\n errors: Record<string, any>\n}\n", "import { ViewV2, UIFieldMetadata } from \"../../../documents\"\n\nexport interface ViewResponse {\n data: ViewV2\n}\n\nexport interface CreateViewRequest\n extends Omit<ViewV2, \"version\" | \"id\" | \"schema\"> {\n schema?: Record<string, UIFieldMetadata>\n}\n\nexport interface UpdateViewRequest extends Omit<ViewV2, \"schema\"> {\n schema?: Record<string, UIFieldMetadata>\n}\n", "import { SearchFilters, SearchParams } from \"../../../sdk\"\nimport { Row } from \"../../../documents\"\nimport { SortOrder } from \"../../../api\"\nimport { ReadStream } from \"fs\"\n\nexport interface SaveRowRequest extends Row {}\n\nexport interface PatchRowRequest extends Row {\n _id: string\n _rev: string\n tableId: string\n}\n\nexport interface PatchRowResponse extends Row {}\n\nexport interface SearchRowRequest extends Omit<SearchParams, \"tableId\"> {}\n\nexport interface SearchViewRowRequest\n extends Pick<\n SearchRowRequest,\n | \"sort\"\n | \"sortOrder\"\n | \"sortType\"\n | \"limit\"\n | \"bookmark\"\n | \"paginate\"\n | \"query\"\n > {}\n\nexport interface SearchRowResponse {\n rows: any[]\n}\n\nexport interface ExportRowsRequest {\n rows: string[]\n columns?: string[]\n query?: SearchFilters\n sort?: string\n sortOrder?: SortOrder\n}\n\nexport type ExportRowsResponse = ReadStream\n", "import {\n FieldSchema,\n Row,\n Table,\n TableRequest,\n TableSchema,\n View,\n ViewV2,\n} from \"../../../documents\"\n\ninterface ViewV2Response extends ViewV2 {\n schema: TableSchema\n}\n\nexport type TableViewsResponse = { [key: string]: View | ViewV2Response }\n\nexport interface TableResponse extends Table {\n views?: TableViewsResponse\n}\n\nexport type FetchTablesResponse = TableResponse[]\n\nexport interface SaveTableRequest extends TableRequest {\n rows?: Row[]\n}\n\nexport type SaveTableResponse = Table\n\nexport interface BulkImportRequest {\n rows: Row[]\n identifierFields?: Array<string>\n}\n\nexport interface BulkImportResponse {\n message: string\n}\n\nexport interface MigrateRequest {\n oldColumn: FieldSchema\n newColumn: FieldSchema\n}\n\nexport interface MigrateResponse {\n message: string\n}\n", "import { PlanType } from \"../../../sdk\"\n\nexport interface ResourcePermissionInfo {\n role: string\n permissionType: string\n inheritablePermission?: string\n}\n\nexport interface GetResourcePermsResponse {\n permissions: Record<string, ResourcePermissionInfo>\n requiresPlanToModify?: PlanType\n}\n\nexport interface GetDependantResourcesResponse {\n resourceByType?: Record<string, number>\n}\n", "export interface Upload {\n size: number\n name: string\n url: string\n extension: string\n key: string\n}\n\nexport type ProcessAttachmentResponse = Upload[]\n", "import { ContextUserMetadata } from \"../../../\"\n\nexport type FetchUserMetadataResponse = ContextUserMetadata[]\nexport type FindUserMetadataResponse = ContextUserMetadata\n\nexport interface SetFlagRequest {\n flag: string\n value: any\n}\n", "export * from \"./backup\"\nexport * from \"./datasource\"\nexport * from \"./row\"\nexport * from \"./view\"\nexport * from \"./rows\"\nexport * from \"./table\"\nexport * from \"./permission\"\nexport * from \"./attachment\"\nexport * from \"./user\"\n", "export interface StatusEnvironmentVariableResponse {\n encryptionKeyAvailable: boolean\n}\n\nexport interface CreateEnvironmentVariableRequest {\n name: string\n production: string\n development: string\n}\n\nexport interface UpdateEnvironmentVariableRequest {\n production: string\n development: string\n}\n\nexport interface GetEnvironmentVariablesResponse {\n variables: string[]\n}\n", "import { Event, AuditedEventFriendlyName } from \"../../../sdk\"\nimport {\n PaginationResponse,\n PaginationRequest,\n BasicPaginationRequest,\n} from \"../\"\nimport { User, App } from \"../../../\"\n\nexport interface AuditLogSearchParams {\n userIds?: string[]\n appIds?: string[]\n events?: Event[]\n startDate?: string\n endDate?: string\n fullSearch?: string\n bookmark?: string\n}\n\nexport interface DownloadAuditLogsRequest extends AuditLogSearchParams {}\n\nexport interface SearchAuditLogsRequest\n extends BasicPaginationRequest,\n AuditLogSearchParams {}\n\nexport enum AuditLogResourceStatus {\n DELETED = \"deleted\",\n}\n\nexport type DeletedResourceInfo = {\n _id: string\n status: AuditLogResourceStatus\n email?: string\n name?: string\n}\n\nexport interface AuditLogEnriched {\n app?: App | DeletedResourceInfo\n user: User | DeletedResourceInfo\n event: Event\n timestamp: string\n name: string\n metadata: any\n}\n\nexport interface SearchAuditLogsResponse extends PaginationResponse {\n data: AuditLogEnriched[]\n}\n\nexport interface DefinitionsAuditLogsResponse {\n events: Record<string, string>\n}\n", "export enum EventPublishType {\n ENVIRONMENT_VARIABLE_UPGRADE_PANEL_OPENED = \"environment_variable_upgrade_panel_opened\",\n}\n\nexport interface PostEventPublishRequest {\n type: EventPublishType\n}\n", "import { SettingsConfig, SettingsInnerConfig } from \"../../../documents\"\n\n/**\n * Settings that aren't stored in the database - enriched at runtime.\n */\nexport interface PublicSettingsInnerConfig extends SettingsInnerConfig {\n google: boolean\n googleDatasourceConfigured: boolean\n oidc: boolean\n oidcCallbackUrl: string\n googleCallbackUrl: string\n}\n\nexport interface GetPublicSettingsResponse extends SettingsConfig {\n config: PublicSettingsInnerConfig\n}\n\nexport interface PublicOIDCConfig {\n logo?: string\n name?: string\n uuid?: string\n}\n\nexport type GetPublicOIDCConfigResponse = PublicOIDCConfig[]\n", "import { ScimResource, ScimMeta } from \"scim-patch\"\nimport { ScimListResponse } from \"./shared\"\n\ntype BooleanString = boolean | \"True\" | \"true\" | \"False\" | \"false\"\n\ntype Emails =\n | {\n value: string\n type: \"work\"\n primary: boolean\n }[]\n\nexport interface ScimUserResponse extends ScimResource {\n schemas: [\"urn:ietf:params:scim:schemas:core:2.0:User\"]\n id: string\n externalId: string\n meta: ScimMeta & {\n resourceType: \"User\"\n }\n userName: string\n displayName?: string\n name?: {\n formatted?: string\n familyName?: string\n givenName?: string\n }\n active: BooleanString\n emails?: Emails\n}\n\nexport interface ScimCreateUserRequest {\n schemas: [\n \"urn:ietf:params:scim:schemas:core:2.0:User\",\n \"urn:ietf:params:scim:schemas:extension:enterprise:2.0:User\"\n ]\n externalId: string\n userName: string\n active: BooleanString\n emails?: Emails\n meta: {\n resourceType: \"User\"\n }\n displayName?: string\n name?: {\n formatted: string\n familyName: string\n givenName: string\n }\n roles: []\n}\n\nexport interface ScimUserListResponse\n extends ScimListResponse<ScimUserResponse> {}\n", "import { ScimResource, ScimMeta } from \"scim-patch\"\nimport { ScimListResponse } from \"./shared\"\n\nexport interface ScimGroupResponse extends ScimResource {\n schemas: [\"urn:ietf:params:scim:schemas:core:2.0:Group\"]\n id: string\n externalId: string\n displayName: string\n meta: ScimMeta & {\n resourceType: \"Group\"\n }\n members?: {\n value: string\n }[]\n}\n\nexport interface ScimCreateGroupRequest {\n schemas: [\n \"urn:ietf:params:scim:schemas:core:2.0:Group\",\n \"http://schemas.microsoft.com/2006/11/ResourceManagement/ADSCIM/2.0/Group\"\n ]\n externalId: string\n displayName: string\n meta: ScimMeta & {\n resourceType: \"Group\"\n }\n}\n\nexport interface ScimGroupListResponse\n extends ScimListResponse<ScimGroupResponse> {}\n", "import { ScimPatchOperation } from \"scim-patch\"\n\nexport interface ScimListResponse<T> {\n schemas: [\"urn:ietf:params:scim:api:messages:2.0:ListResponse\"]\n totalResults: number\n Resources: T[]\n startIndex: number\n itemsPerPage: number\n}\n\nexport interface ScimUpdateRequest {\n schemas: [\"urn:ietf:params:scim:api:messages:2.0:PatchOp\"]\n Operations: ScimPatchOperation[]\n}\n", "export * from \"./users\"\nexport * from \"./groups\"\nexport * from \"./shared\"\n", "// LICENSE KEY\n\nexport interface ActivateLicenseKeyRequest {\n licenseKey: string\n}\n\nexport interface GetLicenseKeyResponse {\n licenseKey: string\n}\n\n// OFFLINE LICENSE\n\nexport interface ActivateOfflineLicenseTokenRequest {\n offlineLicenseToken: string\n}\n\nexport interface GetOfflineLicenseTokenResponse {\n offlineLicenseToken: string\n}\n\n// IDENTIFIER\n\nexport interface GetOfflineIdentifierResponse {\n identifierBase64: string\n}\n", "export * from \"./environmentVariables\"\nexport * from \"./auditLogs\"\nexport * from \"./events\"\nexport * from \"./configs\"\nexport * from \"./scim\"\nexport * from \"./license\"\n", "export enum SortOrder {\n ASCENDING = \"ascending\",\n DESCENDING = \"descending\",\n}\n\nexport enum SortType {\n STRING = \"string\",\n number = \"number\",\n}\n\nexport interface BasicPaginationRequest {\n bookmark?: string\n}\n\nexport interface PaginationRequest extends BasicPaginationRequest {\n limit?: number\n sort?: {\n order: SortOrder\n column: string\n type: SortType\n }\n}\n\nexport interface PaginationResponse {\n bookmark: string | undefined\n hasNextPage: boolean\n}\n", "import { FieldType } from \"../../documents\"\nimport { EmptyFilterOption } from \"../../sdk\"\n\nexport type SearchFilter = {\n operator: keyof SearchQuery\n onEmptyFilter?: EmptyFilterOption\n field: string\n type?: FieldType\n value: any\n externalType?: string\n}\n\nexport enum SearchQueryOperators {\n STRING = \"string\",\n FUZZY = \"fuzzy\",\n RANGE = \"range\",\n EQUAL = \"equal\",\n NOT_EQUAL = \"notEqual\",\n EMPTY = \"empty\",\n NOT_EMPTY = \"notEmpty\",\n ONE_OF = \"oneOf\",\n CONTAINS = \"contains\",\n NOT_CONTAINS = \"notContains\",\n CONTAINS_ANY = \"containsAny\",\n}\n\nexport type SearchQuery = {\n allOr?: boolean\n onEmptyFilter?: EmptyFilterOption\n [SearchQueryOperators.STRING]?: {\n [key: string]: string\n }\n [SearchQueryOperators.FUZZY]?: {\n [key: string]: string\n }\n [SearchQueryOperators.RANGE]?: {\n [key: string]: {\n high: number | string\n low: number | string\n }\n }\n [SearchQueryOperators.EQUAL]?: {\n [key: string]: any\n }\n [SearchQueryOperators.NOT_EQUAL]?: {\n [key: string]: any\n }\n [SearchQueryOperators.EMPTY]?: {\n [key: string]: any\n }\n [SearchQueryOperators.NOT_EMPTY]?: {\n [key: string]: any\n }\n [SearchQueryOperators.ONE_OF]?: {\n [key: string]: any[]\n }\n [SearchQueryOperators.CONTAINS]?: {\n [key: string]: any[]\n }\n [SearchQueryOperators.NOT_CONTAINS]?: {\n [key: string]: any[]\n }\n [SearchQueryOperators.CONTAINS_ANY]?: {\n [key: string]: any[]\n }\n}\n\nexport type SearchQueryFields = Omit<SearchQuery, \"allOr\" | \"onEmptyFilter\">\n", "export interface DatasourceAuthCookie {\n appId: string\n provider: string\n}\n\nexport interface SessionCookie {\n sessionId: string\n userId: string\n}\n", "export * from \"./analytics\"\nexport * from \"./auth\"\nexport * from \"./user\"\nexport * from \"./errors\"\nexport * from \"./debug\"\nexport * from \"./schedule\"\nexport * from \"./system\"\nexport * from \"./app\"\nexport * from \"./global\"\nexport * from \"./pagination\"\nexport * from \"./searchFilter\"\nexport * from \"./cookies\"\n", "export * from \"./account\"\nexport * from \"./web\"\n", "export enum ServiceType {\n WORKER = \"worker\",\n APPS = \"apps\",\n}\n", "export * from \"./installation\"\n", "export type DeepPartial<T> = {\n [P in keyof T]?: T[P] extends object ? DeepPartial<T[P]> : T[P]\n}\n\nexport type ISO8601 = string\n\nexport type RequiredKeys<T> = {\n [K in keyof Required<T>]: T[K]\n}\n", "export * from \"./typeUtils\"\n", "export * from \"./documents\"\nexport * from \"./sdk\"\nexport * from \"./api\"\nexport * from \"./core\"\nexport * from \"./shared\"\n", "import { prefixed, DocumentType } from \"@budibase/types\"\n\nexport {\n SEPARATOR,\n UNICODE_MAX,\n DocumentType,\n InternalTable,\n} from \"@budibase/types\"\n\n/**\n * Can be used to create a few different forms of querying a view.\n */\nexport enum AutomationViewMode {\n ALL = \"all\",\n AUTOMATION = \"automation\",\n STATUS = \"status\",\n}\n\nexport enum ViewName {\n USER_BY_APP = \"by_app\",\n USER_BY_EMAIL = \"by_email2\",\n BY_API_KEY = \"by_api_key\",\n LINK = \"by_link\",\n ROUTING = \"screen_routes\",\n AUTOMATION_LOGS = \"automation_logs\",\n ACCOUNT_BY_EMAIL = \"account_by_email\",\n PLATFORM_USERS_LOWERCASE = \"platform_users_lowercase_2\",\n USER_BY_GROUP = \"user_by_group\",\n APP_BACKUP_BY_TRIGGER = \"by_trigger\",\n}\n\nexport const DeprecatedViews: Record<string, string[]> = {\n [ViewName.USER_BY_EMAIL]: [\n // removed due to inaccuracy in view doc filter logic\n \"by_email\",\n ],\n}\n\nexport const StaticDatabases = {\n GLOBAL: {\n name: \"global-db\",\n docs: {\n apiKeys: \"apikeys\",\n usageQuota: \"usage_quota\",\n licenseInfo: \"license_info\",\n environmentVariables: \"environmentvariables\",\n },\n },\n // contains information about tenancy and so on\n PLATFORM_INFO: {\n name: \"global-info\",\n docs: {\n tenants: \"tenants\",\n install: \"install\",\n },\n },\n AUDIT_LOGS: {\n name: \"audit-logs\",\n },\n}\n\nexport const APP_PREFIX = prefixed(DocumentType.APP)\nexport const APP_DEV = prefixed(DocumentType.APP_DEV)\nexport const APP_DEV_PREFIX = APP_DEV\nexport const BUDIBASE_DATASOURCE_TYPE = \"budibase\"\n", "export enum Header {\n API_KEY = \"x-budibase-api-key\",\n LICENSE_KEY = \"x-budibase-license-key\",\n API_VER = \"x-budibase-api-version\",\n APP_ID = \"x-budibase-app-id\",\n SESSION_ID = \"x-budibase-session-id\",\n TYPE = \"x-budibase-type\",\n PREVIEW_ROLE = \"x-budibase-role\",\n TENANT_ID = \"x-budibase-tenant-id\",\n VERIFICATION_CODE = \"x-budibase-verification-code\",\n RETURN_VERIFICATION_CODE = \"x-budibase-return-verification-code\",\n RESET_PASSWORD_CODE = \"x-budibase-reset-password-code\",\n RETURN_RESET_PASSWORD_CODE = \"x-budibase-return-reset-password-code\",\n TOKEN = \"x-budibase-token\",\n CSRF_TOKEN = \"x-csrf-token\",\n CORRELATION_ID = \"x-budibase-correlation-id\",\n AUTHORIZATION = \"authorization\",\n MIGRATING_APP = \"x-budibase-migrating-app\",\n}\n", "export * from \"./api\"\n\nexport const OperatorOptions = {\n Equals: {\n value: \"equal\",\n label: \"Equals\",\n },\n NotEquals: {\n value: \"notEqual\",\n label: \"Not equals\",\n },\n Empty: {\n value: \"empty\",\n label: \"Is empty\",\n },\n NotEmpty: {\n value: \"notEmpty\",\n label: \"Is not empty\",\n },\n StartsWith: {\n value: \"string\",\n label: \"Starts with\",\n },\n Like: {\n value: \"fuzzy\",\n label: \"Like\",\n },\n MoreThan: {\n value: \"rangeLow\",\n label: \"More than or equal to\",\n },\n LessThan: {\n value: \"rangeHigh\",\n label: \"Less than or equal to\",\n },\n Contains: {\n value: \"contains\",\n label: \"Contains\",\n },\n NotContains: {\n value: \"notContains\",\n label: \"Does not contain\",\n },\n In: {\n value: \"oneOf\",\n label: \"Is in\",\n },\n ContainsAny: {\n value: \"containsAny\",\n label: \"Has any\",\n },\n}\n\nexport const SqlNumberTypeRangeMap = {\n integer: {\n max: 2147483647,\n min: -2147483648,\n },\n int: {\n max: 2147483647,\n min: -2147483648,\n },\n smallint: {\n max: 32767,\n min: -32768,\n },\n mediumint: {\n max: 8388607,\n min: -8388608,\n },\n}\n\nexport enum SocketEvent {\n UserUpdate = \"UserUpdate\",\n UserDisconnect = \"UserDisconnect\",\n Heartbeat = \"Heartbeat\",\n}\n\nexport enum GridSocketEvent {\n RowChange = \"RowChange\",\n DatasourceChange = \"DatasourceChange\",\n SelectDatasource = \"SelectDatasource\",\n SelectCell = \"SelectCell\",\n}\n\nexport enum BuilderSocketEvent {\n SelectApp = \"SelectApp\",\n TableChange = \"TableChange\",\n DatasourceChange = \"DatasourceChange\",\n LockTransfer = \"LockTransfer\",\n ScreenChange = \"ScreenChange\",\n AppMetadataChange = \"AppMetadataChange\",\n SelectResource = \"SelectResource\",\n AppPublishChange = \"AppPublishChange\",\n AutomationChange = \"AutomationChange\",\n}\n\nexport const SocketSessionTTL = 60\nexport const ValidQueryNameRegex = /^[^()]*$/\nexport const ValidColumnNameRegex = /^[_a-zA-Z0-9\\s]*$/g\n\nexport const InvalidFileExtensions = [\n \"7z\",\n \"action\",\n \"apk\",\n \"app\",\n \"bat\",\n \"bin\",\n \"cab\",\n \"cmd\",\n \"com\",\n \"command\",\n \"cpl\",\n \"csh\",\n \"ex_\",\n \"exe\",\n \"gadget\",\n \"inf1\",\n \"ins\",\n \"inx\",\n \"ipa\",\n \"isu\",\n \"job\",\n \"js\",\n \"jse\",\n \"ksh\",\n \"lnk\",\n \"msc\",\n \"msi\",\n \"msp\",\n \"mst\",\n \"osx\",\n \"out\",\n \"paf\",\n \"php\",\n \"pif\",\n \"prg\",\n \"ps1\",\n \"reg\",\n \"rgs\",\n \"run\",\n \"scr\",\n \"sct\",\n \"shb\",\n \"shs\",\n \"tar\",\n \"u3p\",\n \"vb\",\n \"vbe\",\n \"vbs\",\n \"vbscript\",\n \"wasm\",\n \"workflow\",\n \"ws\",\n \"wsf\",\n \"wsh\",\n \"zip\",\n]\n", "!function(t,e){\"object\"==typeof exports&&\"undefined\"!=typeof module?module.exports=e():\"function\"==typeof define&&define.amd?define(e):(t=\"undefined\"!=typeof globalThis?globalThis:t||self).dayjs=e()}(this,(function(){\"use strict\";var t=1e3,e=6e4,n=36e5,r=\"millisecond\",i=\"second\",s=\"minute\",u=\"hour\",a=\"day\",o=\"week\",c=\"month\",f=\"quarter\",h=\"year\",d=\"date\",l=\"Invalid Date\",$=/^(\\d{4})[-/]?(\\d{1,2})?[-/]?(\\d{0,2})[Tt\\s]*(\\d{1,2})?:?(\\d{1,2})?:?(\\d{1,2})?[.:]?(\\d+)?$/,y=/\\[([^\\]]+)]|Y{1,4}|M{1,4}|D{1,2}|d{1,4}|H{1,2}|h{1,2}|a|A|m{1,2}|s{1,2}|Z{1,2}|SSS/g,M={name:\"en\",weekdays:\"Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday\".split(\"_\"),months:\"January_February_March_April_May_June_July_August_September_October_November_December\".split(\"_\"),ordinal:function(t){var e=[\"th\",\"st\",\"nd\",\"rd\"],n=t%100;return\"[\"+t+(e[(n-20)%10]||e[n]||e[0])+\"]\"}},m=function(t,e,n){var r=String(t);return!r||r.length>=e?t:\"\"+Array(e+1-r.length).join(n)+t},v={s:m,z:function(t){var e=-t.utcOffset(),n=Math.abs(e),r=Math.floor(n/60),i=n%60;return(e<=0?\"+\":\"-\")+m(r,2,\"0\")+\":\"+m(i,2,\"0\")},m:function t(e,n){if(e.date()<n.date())return-t(n,e);var r=12*(n.year()-e.year())+(n.month()-e.month()),i=e.clone().add(r,c),s=n-i<0,u=e.clone().add(r+(s?-1:1),c);return+(-(r+(n-i)/(s?i-u:u-i))||0)},a:function(t){return t<0?Math.ceil(t)||0:Math.floor(t)},p:function(t){return{M:c,y:h,w:o,d:a,D:d,h:u,m:s,s:i,ms:r,Q:f}[t]||String(t||\"\").toLowerCase().replace(/s$/,\"\")},u:function(t){return void 0===t}},g=\"en\",D={};D[g]=M;var p=\"$isDayjsObject\",S=function(t){return t instanceof _||!(!t||!t[p])},w=function t(e,n,r){var i;if(!e)return g;if(\"string\"==typeof e){var s=e.toLowerCase();D[s]&&(i=s),n&&(D[s]=n,i=s);var u=e.split(\"-\");if(!i&&u.length>1)return t(u[0])}else{var a=e.name;D[a]=e,i=a}return!r&&i&&(g=i),i||!r&&g},O=function(t,e){if(S(t))return t.clone();var n=\"object\"==typeof e?e:{};return n.date=t,n.args=arguments,new _(n)},b=v;b.l=w,b.i=S,b.w=function(t,e){return O(t,{locale:e.$L,utc:e.$u,x:e.$x,$offset:e.$offset})};var _=function(){function M(t){this.$L=w(t.locale,null,!0),this.parse(t),this.$x=this.$x||t.x||{},this[p]=!0}var m=M.prototype;return m.parse=function(t){this.$d=function(t){var e=t.date,n=t.utc;if(null===e)return new Date(NaN);if(b.u(e))return new Date;if(e instanceof Date)return new Date(e);if(\"string\"==typeof e&&!/Z$/i.test(e)){var r=e.match($);if(r){var i=r[2]-1||0,s=(r[7]||\"0\").substring(0,3);return n?new Date(Date.UTC(r[1],i,r[3]||1,r[4]||0,r[5]||0,r[6]||0,s)):new Date(r[1],i,r[3]||1,r[4]||0,r[5]||0,r[6]||0,s)}}return new Date(e)}(t),this.init()},m.init=function(){var t=this.$d;this.$y=t.getFullYear(),this.$M=t.getMonth(),this.$D=t.getDate(),this.$W=t.getDay(),this.$H=t.getHours(),this.$m=t.getMinutes(),this.$s=t.getSeconds(),this.$ms=t.getMilliseconds()},m.$utils=function(){return b},m.isValid=function(){return!(this.$d.toString()===l)},m.isSame=function(t,e){var n=O(t);return this.startOf(e)<=n&&n<=this.endOf(e)},m.isAfter=function(t,e){return O(t)<this.startOf(e)},m.isBefore=function(t,e){return this.endOf(e)<O(t)},m.$g=function(t,e,n){return b.u(t)?this[e]:this.set(n,t)},m.unix=function(){return Math.floor(this.valueOf()/1e3)},m.valueOf=function(){return this.$d.getTime()},m.startOf=function(t,e){var n=this,r=!!b.u(e)||e,f=b.p(t),l=function(t,e){var i=b.w(n.$u?Date.UTC(n.$y,e,t):new Date(n.$y,e,t),n);return r?i:i.endOf(a)},$=function(t,e){return b.w(n.toDate()[t].apply(n.toDate(\"s\"),(r?[0,0,0,0]:[23,59,59,999]).slice(e)),n)},y=this.$W,M=this.$M,m=this.$D,v=\"set\"+(this.$u?\"UTC\":\"\");switch(f){case h:return r?l(1,0):l(31,11);case c:return r?l(1,M):l(0,M+1);case o:var g=this.$locale().weekStart||0,D=(y<g?y+7:y)-g;return l(r?m-D:m+(6-D),M);case a:case d:return $(v+\"Hours\",0);case u:return $(v+\"Minutes\",1);case s:return $(v+\"Seconds\",2);case i:return $(v+\"Milliseconds\",3);default:return this.clone()}},m.endOf=function(t){return this.startOf(t,!1)},m.$set=function(t,e){var n,o=b.p(t),f=\"set\"+(this.$u?\"UTC\":\"\"),l=(n={},n[a]=f+\"Date\",n[d]=f+\"Date\",n[c]=f+\"Month\",n[h]=f+\"FullYear\",n[u]=f+\"Hours\",n[s]=f+\"Minutes\",n[i]=f+\"Seconds\",n[r]=f+\"Milliseconds\",n)[o],$=o===a?this.$D+(e-this.$W):e;if(o===c||o===h){var y=this.clone().set(d,1);y.$d[l]($),y.init(),this.$d=y.set(d,Math.min(this.$D,y.daysInMonth())).$d}else l&&this.$d[l]($);return this.init(),this},m.set=function(t,e){return this.clone().$set(t,e)},m.get=function(t){return this[b.p(t)]()},m.add=function(r,f){var d,l=this;r=Number(r);var $=b.p(f),y=function(t){var e=O(l);return b.w(e.date(e.date()+Math.round(t*r)),l)};if($===c)return this.set(c,this.$M+r);if($===h)return this.set(h,this.$y+r);if($===a)return y(1);if($===o)return y(7);var M=(d={},d[s]=e,d[u]=n,d[i]=t,d)[$]||1,m=this.$d.getTime()+r*M;return b.w(m,this)},m.subtract=function(t,e){return this.add(-1*t,e)},m.format=function(t){var e=this,n=this.$locale();if(!this.isValid())return n.invalidDate||l;var r=t||\"YYYY-MM-DDTHH:mm:ssZ\",i=b.z(this),s=this.$H,u=this.$m,a=this.$M,o=n.weekdays,c=n.months,f=n.meridiem,h=function(t,n,i,s){return t&&(t[n]||t(e,r))||i[n].slice(0,s)},d=function(t){return b.s(s%12||12,t,\"0\")},$=f||function(t,e,n){var r=t<12?\"AM\":\"PM\";return n?r.toLowerCase():r};return r.replace(y,(function(t,r){return r||function(t){switch(t){case\"YY\":return String(e.$y).slice(-2);case\"YYYY\":return b.s(e.$y,4,\"0\");case\"M\":return a+1;case\"MM\":return b.s(a+1,2,\"0\");case\"MMM\":return h(n.monthsShort,a,c,3);case\"MMMM\":return h(c,a);case\"D\":return e.$D;case\"DD\":return b.s(e.$D,2,\"0\");case\"d\":return String(e.$W);case\"dd\":return h(n.weekdaysMin,e.$W,o,2);case\"ddd\":return h(n.weekdaysShort,e.$W,o,3);case\"dddd\":return o[e.$W];case\"H\":return String(s);case\"HH\":return b.s(s,2,\"0\");case\"h\":return d(1);case\"hh\":return d(2);case\"a\":return $(s,u,!0);case\"A\":return $(s,u,!1);case\"m\":return String(u);case\"mm\":return b.s(u,2,\"0\");case\"s\":return String(e.$s);case\"ss\":return b.s(e.$s,2,\"0\");case\"SSS\":return b.s(e.$ms,3,\"0\");case\"Z\":return i}return null}(t)||i.replace(\":\",\"\")}))},m.utcOffset=function(){return 15*-Math.round(this.$d.getTimezoneOffset()/15)},m.diff=function(r,d,l){var $,y=this,M=b.p(d),m=O(r),v=(m.utcOffset()-this.utcOffset())*e,g=this-m,D=function(){return b.m(y,m)};switch(M){case h:$=D()/12;break;case c:$=D();break;case f:$=D()/3;break;case o:$=(g-v)/6048e5;break;case a:$=(g-v)/864e5;break;case u:$=g/n;break;case s:$=g/e;break;case i:$=g/t;break;default:$=g}return l?$:b.a($)},m.daysInMonth=function(){return this.endOf(c).$D},m.$locale=function(){return D[this.$L]},m.locale=function(t,e){if(!t)return this.$L;var n=this.clone(),r=w(t,e,!0);return r&&(n.$L=r),n},m.clone=function(){return b.w(this.$d,this)},m.toDate=function(){return new Date(this.valueOf())},m.toJSON=function(){return this.isValid()?this.toISOString():null},m.toISOString=function(){return this.$d.toISOString()},m.toString=function(){return this.$d.toUTCString()},M}(),k=_.prototype;return O.prototype=k,[[\"$ms\",r],[\"$s\",i],[\"$m\",s],[\"$H\",u],[\"$W\",a],[\"$M\",c],[\"$y\",h],[\"$D\",d]].forEach((function(t){k[t[1]]=function(e){return this.$g(e,t[0],t[1])}})),O.extend=function(t,e){return t.$i||(t(e,_,O),t.$i=!0),O},O.locale=w,O.isDayjs=S,O.unix=function(t){return O(1e3*t)},O.en=D[g],O.Ls=D,O.p={},O}));", "import { User } from \"@budibase/types\"\n\n/**\n * Gets a key within an object. The key supports dot syntax for retrieving deep\n * fields - e.g. \"a.b.c\".\n * Exact matches of keys with dots in them take precedence over nested keys of\n * the same path - e.g. getting \"a.b\" from { \"a.b\": \"foo\", a: { b: \"bar\" } }\n * will return \"foo\" over \"bar\".\n * @param obj the object\n * @param key the key\n * @return the value or null if a value was not found for this key\n */\nexport const deepGet = (obj: { [x: string]: any }, key: string) => {\n if (!obj || !key) {\n return null\n }\n if (Object.prototype.hasOwnProperty.call(obj, key)) {\n return obj[key]\n }\n const split = key.split(\".\")\n for (let i = 0; i < split.length; i++) {\n obj = obj?.[split[i]]\n }\n return obj\n}\n\n/**\n * Gets the initials to show in a user avatar.\n * @param user the user\n */\nexport const getUserInitials = (user: User) => {\n if (!user) {\n return \"?\"\n }\n let initials = \"\"\n initials += user.firstName ? user.firstName[0] : \"\"\n initials += user.lastName ? user.lastName[0] : \"\"\n if (initials !== \"\") {\n return initials\n }\n return user.email?.[0] || \"U\"\n}\n\n/**\n * Gets a deterministic colour for a particular user\n * @param user the user\n */\nexport const getUserColor = (user: User) => {\n let id = user?._id\n if (!id) {\n return \"var(--spectrum-global-color-blue-400)\"\n }\n\n // In order to generate the same color for global users as app users, we need\n // to remove the app-specific table prefix\n id = id.replace(\"ro_ta_users_\", \"\")\n\n // Generate a hue based on the ID\n let hue = 1\n for (let i = 0; i < id.length; i++) {\n hue += id.charCodeAt(i)\n hue = hue % 36\n }\n return `hsl(${hue * 10}, 50%, 40%)`\n}\n\n/**\n * Gets a friendly label to describe who a user is.\n * @param user the user\n */\nexport const getUserLabel = (user: User) => {\n if (!user) {\n return \"\"\n }\n const { firstName, lastName, email } = user\n if (firstName && lastName) {\n return `${firstName} ${lastName}`\n } else if (firstName) {\n return firstName\n } else if (lastName) {\n return lastName\n } else {\n return email\n }\n}\n", "import { Datasource, SourceName } from \"@budibase/types\"\n\nexport function isGoogleSheets(type: SourceName) {\n return type === SourceName.GOOGLE_SHEETS\n}\n\nexport function isSQL(datasource: Datasource): boolean {\n if (!datasource || !datasource.source) {\n return false\n }\n const SQL = [\n SourceName.POSTGRES,\n SourceName.SQL_SERVER,\n SourceName.MYSQL,\n SourceName.ORACLE,\n ]\n return SQL.indexOf(datasource.source) !== -1 || datasource.isSQL === true\n}\n", "export * from \"./helpers\"\nexport * from \"./integrations\"\n", "import {\n Datasource,\n FieldSubtype,\n FieldType,\n SearchFilter,\n SearchQuery,\n SearchQueryFields,\n SearchQueryOperators,\n SortDirection,\n SortType,\n} from \"@budibase/types\"\nimport dayjs from \"dayjs\"\nimport { OperatorOptions, SqlNumberTypeRangeMap } from \"./constants\"\nimport { deepGet } from \"./helpers\"\n\nconst HBS_REGEX = /{{([^{].*?)}}/g\n\n/**\n * Returns the valid operator options for a certain data type\n */\nexport const getValidOperatorsForType = (\n fieldType: { type: FieldType; subtype?: FieldSubtype },\n field: string,\n datasource: Datasource & { tableId: any } // TODO: is this table id ever populated?\n) => {\n const Op = OperatorOptions\n const stringOps = [\n Op.Equals,\n Op.NotEquals,\n Op.StartsWith,\n Op.Like,\n Op.Empty,\n Op.NotEmpty,\n Op.In,\n ]\n const numOps = [\n Op.Equals,\n Op.NotEquals,\n Op.MoreThan,\n Op.LessThan,\n Op.Empty,\n Op.NotEmpty,\n Op.In,\n ]\n let ops: {\n value: string\n label: string\n }[] = []\n const { type, subtype } = fieldType\n if (type === FieldType.STRING) {\n ops = stringOps\n } else if (type === FieldType.NUMBER || type === FieldType.BIGINT) {\n ops = numOps\n } else if (type === FieldType.OPTIONS) {\n ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In]\n } else if (type === FieldType.ARRAY) {\n ops = [Op.Contains, Op.NotContains, Op.Empty, Op.NotEmpty, Op.ContainsAny]\n } else if (type === FieldType.BOOLEAN) {\n ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty]\n } else if (type === FieldType.LONGFORM) {\n ops = stringOps\n } else if (type === FieldType.DATETIME) {\n ops = numOps\n } else if (type === FieldType.FORMULA) {\n ops = stringOps.concat([Op.MoreThan, Op.LessThan])\n } else if (type === FieldType.BB_REFERENCE && subtype == FieldSubtype.USER) {\n ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In]\n } else if (type === FieldType.BB_REFERENCE && subtype == FieldSubtype.USERS) {\n ops = [Op.Contains, Op.NotContains, Op.ContainsAny, Op.Empty, Op.NotEmpty]\n }\n\n // Only allow equal/not equal for _id in SQL tables\n const externalTable = datasource?.tableId?.includes(\"datasource_plus\")\n if (field === \"_id\" && externalTable) {\n ops = [Op.Equals, Op.NotEquals, Op.In]\n }\n\n return ops\n}\n\n/**\n * Operators which do not support empty strings as values\n */\nexport const NoEmptyFilterStrings = [\n OperatorOptions.StartsWith.value,\n OperatorOptions.Like.value,\n OperatorOptions.Equals.value,\n OperatorOptions.NotEquals.value,\n OperatorOptions.Contains.value,\n OperatorOptions.NotContains.value,\n] as (keyof SearchQueryFields)[]\n\n/**\n * Removes any fields that contain empty strings that would cause inconsistent\n * behaviour with how backend tables are filtered (no value means no filter).\n */\nconst cleanupQuery = (query: SearchQuery) => {\n if (!query) {\n return query\n }\n for (let filterField of NoEmptyFilterStrings) {\n if (!query[filterField]) {\n continue\n }\n\n for (let [key, value] of Object.entries(query[filterField]!)) {\n if (value == null || value === \"\") {\n delete query[filterField]![key]\n }\n }\n }\n return query\n}\n\n/**\n * Removes a numeric prefix on field names designed to give fields uniqueness\n */\nconst removeKeyNumbering = (key: string) => {\n if (typeof key === \"string\" && key.match(/\\d[0-9]*:/g) != null) {\n const parts = key.split(\":\")\n parts.shift()\n return parts.join(\":\")\n } else {\n return key\n }\n}\n\n/**\n * Builds a lucene JSON query from the filter structure generated in the builder\n * @param filter the builder filter structure\n */\nexport const buildLuceneQuery = (filter: SearchFilter[]) => {\n let query: SearchQuery = {\n string: {},\n fuzzy: {},\n range: {},\n equal: {},\n notEqual: {},\n empty: {},\n notEmpty: {},\n contains: {},\n notContains: {},\n oneOf: {},\n containsAny: {},\n }\n\n if (!Array.isArray(filter)) {\n return query\n }\n\n filter.forEach(expression => {\n let { operator, field, type, value, externalType, onEmptyFilter } =\n expression\n const isHbs =\n typeof value === \"string\" && (value.match(HBS_REGEX) || []).length > 0\n // Parse all values into correct types\n if (operator === \"allOr\") {\n query.allOr = true\n return\n }\n if (onEmptyFilter) {\n query.onEmptyFilter = onEmptyFilter\n return\n }\n if (\n type === \"datetime\" &&\n !isHbs &&\n operator !== \"empty\" &&\n operator !== \"notEmpty\"\n ) {\n // Ensure date value is a valid date and parse into correct format\n if (!value) {\n return\n }\n try {\n value = new Date(value).toISOString()\n } catch (error) {\n return\n }\n }\n if (type === \"number\" && typeof value === \"string\" && !isHbs) {\n if (operator === \"oneOf\") {\n value = value.split(\",\").map(item => parseFloat(item))\n } else {\n value = parseFloat(value)\n }\n }\n if (type === \"boolean\") {\n value = `${value}`?.toLowerCase() === \"true\"\n }\n if (\n [\"contains\", \"notContains\", \"containsAny\"].includes(operator) &&\n type === \"array\" &&\n typeof value === \"string\"\n ) {\n value = value.split(\",\")\n }\n if (operator.startsWith(\"range\") && query.range) {\n const minint =\n SqlNumberTypeRangeMap[\n externalType as keyof typeof SqlNumberTypeRangeMap\n ]?.min || Number.MIN_SAFE_INTEGER\n const maxint =\n SqlNumberTypeRangeMap[\n externalType as keyof typeof SqlNumberTypeRangeMap\n ]?.max || Number.MAX_SAFE_INTEGER\n if (!query.range[field]) {\n query.range[field] = {\n low: type === \"number\" ? minint : \"0000-00-00T00:00:00.000Z\",\n high: type === \"number\" ? maxint : \"9999-00-00T00:00:00.000Z\",\n }\n }\n if ((operator as any) === \"rangeLow\" && value != null && value !== \"\") {\n query.range[field].low = value\n } else if (\n (operator as any) === \"rangeHigh\" &&\n value != null &&\n value !== \"\"\n ) {\n query.range[field].high = value\n }\n } else if (query[operator] && operator !== \"onEmptyFilter\") {\n if (type === \"boolean\") {\n // Transform boolean filters to cope with null.\n // \"equals false\" needs to be \"not equals true\"\n // \"not equals false\" needs to be \"equals true\"\n if (operator === \"equal\" && value === false) {\n query.notEqual = query.notEqual || {}\n query.notEqual[field] = true\n } else if (operator === \"notEqual\" && value === false) {\n query.equal = query.equal || {}\n query.equal[field] = true\n } else {\n query[operator] = query[operator] || {}\n query[operator]![field] = value\n }\n } else {\n query[operator] = query[operator] || {}\n query[operator]![field] = value\n }\n }\n })\n\n return query\n}\n\n/**\n * Performs a client-side lucene search on an array of data\n * @param docs the data\n * @param query the JSON lucene query\n */\nexport const runLuceneQuery = (docs: any[], query?: SearchQuery) => {\n if (!docs || !Array.isArray(docs)) {\n return []\n }\n if (!query) {\n return docs\n }\n\n // Make query consistent first\n query = cleanupQuery(query)\n\n // Iterates over a set of filters and evaluates a fail function against a doc\n const match =\n (\n type: keyof SearchQueryFields,\n failFn: (docValue: any, testValue: any) => boolean\n ) =>\n (doc: any) => {\n const filters = Object.entries(query![type] || {})\n for (let i = 0; i < filters.length; i++) {\n const [key, testValue] = filters[i]\n const docValue = deepGet(doc, removeKeyNumbering(key))\n if (failFn(docValue, testValue)) {\n return false\n }\n }\n return true\n }\n\n // Process a string match (fails if the value does not start with the string)\n const stringMatch = match(\n SearchQueryOperators.STRING,\n (docValue: string, testValue: string) => {\n return (\n !docValue ||\n !docValue?.toLowerCase().startsWith(testValue?.toLowerCase())\n )\n }\n )\n\n // Process a fuzzy match (treat the same as starts with when running locally)\n const fuzzyMatch = match(\n SearchQueryOperators.FUZZY,\n (docValue: string, testValue: string) => {\n return (\n !docValue ||\n !docValue?.toLowerCase().startsWith(testValue?.toLowerCase())\n )\n }\n )\n\n // Process a range match\n const rangeMatch = match(\n SearchQueryOperators.RANGE,\n (\n docValue: string | number | null,\n testValue: { low: number; high: number }\n ) => {\n if (docValue == null || docValue === \"\") {\n return true\n }\n if (!isNaN(+docValue)) {\n return +docValue < testValue.low || +docValue > testValue.high\n }\n if (dayjs(docValue).isValid()) {\n return (\n new Date(docValue).getTime() < new Date(testValue.low).getTime() ||\n new Date(docValue).getTime() > new Date(testValue.high).getTime()\n )\n }\n return false\n }\n )\n\n // Process an equal match (fails if the value is different)\n const equalMatch = match(\n SearchQueryOperators.EQUAL,\n (docValue: any, testValue: string | null) => {\n return testValue != null && testValue !== \"\" && docValue !== testValue\n }\n )\n\n // Process a not-equal match (fails if the value is the same)\n const notEqualMatch = match(\n SearchQueryOperators.NOT_EQUAL,\n (docValue: any, testValue: string | null) => {\n return testValue != null && testValue !== \"\" && docValue === testValue\n }\n )\n\n // Process an empty match (fails if the value is not empty)\n const emptyMatch = match(\n SearchQueryOperators.EMPTY,\n (docValue: string | null) => {\n return docValue != null && docValue !== \"\"\n }\n )\n\n // Process a not-empty match (fails is the value is empty)\n const notEmptyMatch = match(\n SearchQueryOperators.NOT_EMPTY,\n (docValue: string | null) => {\n return docValue == null || docValue === \"\"\n }\n )\n\n // Process an includes match (fails if the value is not included)\n const oneOf = match(\n SearchQueryOperators.ONE_OF,\n (docValue: any, testValue: any) => {\n if (typeof testValue === \"string\") {\n testValue = testValue.split(\",\")\n if (typeof docValue === \"number\") {\n testValue = testValue.map((item: string) => parseFloat(item))\n }\n }\n return !testValue?.includes(docValue)\n }\n )\n\n const containsAny = match(\n SearchQueryOperators.CONTAINS_ANY,\n (docValue: any, testValue: any) => {\n return !docValue?.includes(...testValue)\n }\n )\n\n const contains = match(\n SearchQueryOperators.CONTAINS,\n (docValue: string | any[], testValue: any[]) => {\n return !testValue?.every((item: any) => docValue?.includes(item))\n }\n )\n\n const notContains = match(\n SearchQueryOperators.NOT_CONTAINS,\n (docValue: string | any[], testValue: any[]) => {\n return testValue?.every((item: any) => docValue?.includes(item))\n }\n )\n\n // Match a document against all criteria\n const docMatch = (doc: any) => {\n return (\n stringMatch(doc) &&\n fuzzyMatch(doc) &&\n rangeMatch(doc) &&\n equalMatch(doc) &&\n notEqualMatch(doc) &&\n emptyMatch(doc) &&\n notEmptyMatch(doc) &&\n oneOf(doc) &&\n contains(doc) &&\n containsAny(doc) &&\n notContains(doc)\n )\n }\n\n // Process all docs\n return docs.filter(docMatch)\n}\n\n/**\n * Performs a client-side sort from the equivalent server-side lucene sort\n * parameters.\n * @param docs the data\n * @param sort the sort column\n * @param sortOrder the sort order (\"ascending\" or \"descending\")\n * @param sortType the type of sort (\"string\" or \"number\")\n */\nexport const luceneSort = (\n docs: any[],\n sort: string,\n sortOrder: SortDirection,\n sortType = SortType.STRING\n) => {\n if (!sort || !sortOrder || !sortType) {\n return docs\n }\n const parse =\n sortType === \"string\" ? (x: any) => `${x}` : (x: string) => parseFloat(x)\n return docs\n .slice()\n .sort((a: { [x: string]: any }, b: { [x: string]: any }) => {\n const colA = parse(a[sort])\n const colB = parse(b[sort])\n if (sortOrder.toLowerCase() === \"descending\") {\n return colA > colB ? -1 : 1\n } else {\n return colA > colB ? 1 : -1\n }\n })\n}\n\n/**\n * Limits the specified docs to the specified number of rows from the equivalent\n * server-side lucene limit parameters.\n * @param docs the data\n * @param limit the number of docs to limit to\n */\nexport const luceneLimit = (docs: any[], limit: string) => {\n const numLimit = parseFloat(limit)\n if (isNaN(numLimit)) {\n return docs\n }\n return docs.slice(0, numLimit)\n}\n\nexport const hasFilters = (query?: SearchQuery) => {\n if (!query) {\n return false\n }\n const skipped = [\"allOr\", \"onEmptyFilter\"]\n for (let [key, value] of Object.entries(query)) {\n if (skipped.includes(key) || typeof value !== \"object\") {\n continue\n }\n if (Object.keys(value || {}).length !== 0) {\n return true\n }\n }\n return false\n}\n", "import * as Constants from \"./constants\"\n\nexport function unreachable(\n value: never,\n message = `No such case in exhaustive switch: ${value}`\n) {\n throw new Error(message)\n}\n\nexport async function parallelForeach<T>(\n items: T[],\n task: (item: T) => Promise<void>,\n maxConcurrency: number\n): Promise<void> {\n const promises: Promise<void>[] = []\n let index = 0\n\n const processItem = async (item: T) => {\n try {\n await task(item)\n } finally {\n processNext()\n }\n }\n\n const processNext = () => {\n if (index >= items.length) {\n // No more items to process\n return\n }\n\n const item = items[index]\n index++\n\n const promise = processItem(item)\n promises.push(promise)\n\n if (promises.length >= maxConcurrency) {\n Promise.race(promises).then(processNext)\n } else {\n processNext()\n }\n }\n processNext()\n\n await Promise.all(promises)\n}\n\nexport function filterValueToLabel() {\n return Object.keys(Constants.OperatorOptions).reduce(\n (acc: { [key: string]: string }, key: string) => {\n const ops: { [key: string]: any } = Constants.OperatorOptions\n const op: { [key: string]: string } = ops[key]\n acc[op[\"value\"]] = op.label\n return acc\n },\n {}\n )\n}\n", "import { DocumentType, prefixed } from \"@budibase/types\"\n\nconst APP_PREFIX = prefixed(DocumentType.APP)\nconst APP_DEV_PREFIX = prefixed(DocumentType.APP_DEV)\n\nexport function getDevAppID(appId: string) {\n if (!appId) {\n throw new Error(\"No app ID provided\")\n }\n if (appId.startsWith(APP_DEV_PREFIX)) {\n return appId\n }\n // split to take off the app_ element, then join it together incase any other app_ exist\n const split = appId.split(APP_PREFIX)\n split.shift()\n const rest = split.join(APP_PREFIX)\n return `${APP_DEV_PREFIX}${rest}`\n}\n\n/**\n * Convert a development app ID to a deployed app ID.\n */\nexport function getProdAppID(appId: string) {\n if (!appId) {\n throw new Error(\"No app ID provided\")\n }\n if (!appId.startsWith(APP_DEV_PREFIX)) {\n return appId\n }\n // split to take off the app_dev element, then join it together incase any other app_ exist\n const split = appId.split(APP_DEV_PREFIX)\n split.shift()\n const rest = split.join(APP_DEV_PREFIX)\n return `${APP_PREFIX}${rest}`\n}\n", "import {\n ContextUser,\n DocumentType,\n SEPARATOR,\n User,\n InternalTable,\n} from \"@budibase/types\"\nimport { getProdAppID } from \"./applications\"\nimport * as _ from \"lodash/fp\"\n\n// checks if a user is specifically a builder, given an app ID\nexport function isBuilder(user: User | ContextUser, appId?: string): boolean {\n if (!user) {\n return false\n }\n if (user.builder?.global) {\n return true\n } else if (appId && user.builder?.apps?.includes(getProdAppID(appId))) {\n return true\n }\n return false\n}\n\nexport function isGlobalBuilder(user: User | ContextUser): boolean {\n return (isBuilder(user) && !hasAppBuilderPermissions(user)) || isAdmin(user)\n}\n\nexport function canCreateApps(user: User | ContextUser): boolean {\n return isGlobalBuilder(user) || hasCreatorPermissions(user)\n}\n\n// alias for hasAdminPermission, currently do the same thing\n// in future whether someone has admin permissions and whether they are\n// an admin for a specific resource could be separated\nexport function isAdmin(user: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n return hasAdminPermissions(user)\n}\n\nexport function isAdminOrBuilder(\n user: User | ContextUser,\n appId?: string\n): boolean {\n return isBuilder(user, appId) || isAdmin(user)\n}\n\nexport function isAdminOrGlobalBuilder(\n user: User | ContextUser,\n appId?: string\n): boolean {\n return isGlobalBuilder(user) || isAdmin(user)\n}\n\n// check if they are a builder within an app (not necessarily a global builder)\nexport function hasAppBuilderPermissions(user?: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n const appLength = user.builder?.apps?.length\n const isGlobalBuilder = !!user.builder?.global\n return !isGlobalBuilder && appLength != null && appLength > 0\n}\n\nexport function hasAppCreatorPermissions(user?: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n return _.flow(\n _.get(\"roles\"),\n _.values,\n _.find(x => x === \"CREATOR\"),\n x => !!x\n )(user)\n}\n\n// checks if a user is capable of building any app\nexport function hasBuilderPermissions(user?: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n return (\n user.builder?.global ||\n hasAppBuilderPermissions(user) ||\n hasCreatorPermissions(user)\n )\n}\n\n// checks if a user is capable of being an admin\nexport function hasAdminPermissions(user?: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n return !!user.admin?.global\n}\n\nexport function hasCreatorPermissions(user?: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n return !!user.builder?.creator\n}\n\nexport function isCreator(user?: User | ContextUser): boolean {\n if (!user) {\n return false\n }\n return (\n isGlobalBuilder(user!) ||\n hasAdminPermissions(user) ||\n hasCreatorPermissions(user) ||\n hasAppBuilderPermissions(user) ||\n hasAppCreatorPermissions(user)\n )\n}\n\nexport function getGlobalUserID(userId?: string): string | undefined {\n if (typeof userId !== \"string\") {\n return userId\n }\n const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`\n if (!userId.startsWith(prefix)) {\n return userId\n }\n return userId.split(prefix)[1]\n}\n\nexport function containsUserID(value: string | undefined): boolean {\n if (typeof value !== \"string\") {\n return false\n }\n return value.includes(`${DocumentType.USER}${SEPARATOR}`)\n}\n", "export * as applications from \"./applications\"\nexport * as users from \"./users\"\n", "export * from \"./documents\"\n", "import { FieldType } from \"@budibase/types\"\n\nconst allowDisplayColumnByType: Record<FieldType, boolean> = {\n [FieldType.STRING]: true,\n [FieldType.LONGFORM]: true,\n [FieldType.OPTIONS]: true,\n [FieldType.NUMBER]: true,\n [FieldType.DATETIME]: true,\n [FieldType.FORMULA]: true,\n [FieldType.AUTO]: true,\n [FieldType.INTERNAL]: true,\n [FieldType.BARCODEQR]: true,\n [FieldType.BIGINT]: true,\n\n [FieldType.BOOLEAN]: false,\n [FieldType.ARRAY]: false,\n [FieldType.ATTACHMENT]: false,\n [FieldType.LINK]: false,\n [FieldType.JSON]: false,\n [FieldType.BB_REFERENCE]: false,\n}\n\nconst allowSortColumnByType: Record<FieldType, boolean> = {\n [FieldType.STRING]: true,\n [FieldType.LONGFORM]: true,\n [FieldType.OPTIONS]: true,\n [FieldType.NUMBER]: true,\n [FieldType.DATETIME]: true,\n [FieldType.AUTO]: true,\n [FieldType.INTERNAL]: true,\n [FieldType.BARCODEQR]: true,\n [FieldType.BIGINT]: true,\n [FieldType.BOOLEAN]: true,\n [FieldType.JSON]: true,\n\n [FieldType.FORMULA]: false,\n [FieldType.ATTACHMENT]: false,\n [FieldType.ARRAY]: false,\n [FieldType.LINK]: false,\n [FieldType.BB_REFERENCE]: false,\n}\n\nexport function canBeDisplayColumn(type: FieldType): boolean {\n return !!allowDisplayColumnByType[type]\n}\n\nexport function canBeSortColumn(type: FieldType): boolean {\n return !!allowSortColumnByType[type]\n}\n", "export * from \"./constants\"\nexport * as dataFilters from \"./filters\"\nexport * as helpers from \"./helpers\"\nexport * as utils from \"./utils\"\nexport * as sdk from \"./sdk\"\nexport * from \"./table\"\n", "export enum UserStatus {\n ACTIVE = \"active\",\n INACTIVE = \"inactive\",\n}\n\nexport enum Cookie {\n Auth = \"budibase:auth\",\n Init = \"budibase:init\",\n ACCOUNT_RETURN_URL = \"budibase:account:returnurl\",\n DatasourceAuth = \"budibase:datasourceauth\",\n OIDC_CONFIG = \"budibase:oidc:config\",\n}\n\nexport { Header } from \"@budibase/shared-core\"\n\nexport enum GlobalRole {\n OWNER = \"owner\",\n ADMIN = \"admin\",\n BUILDER = \"builder\",\n WORKSPACE_MANAGER = \"workspace_manager\",\n}\n\nexport enum Config {\n SETTINGS = \"settings\",\n ACCOUNT = \"account\",\n SMTP = \"smtp\",\n GOOGLE = \"google\",\n OIDC = \"oidc\",\n OIDC_LOGOS = \"logos_oidc\",\n SCIM = \"scim\",\n}\n\nexport const MIN_VALID_DATE = new Date(-2147483647000)\nexport const MAX_VALID_DATE = new Date(2147483647000)\nexport const DEFAULT_TENANT_ID = \"default\"\n", "export * from \"./db\"\nexport * from \"./misc\"\n", "import {\n IdentityContext,\n IdentityType,\n User,\n isCloudAccount,\n Account,\n AccountUserContext,\n UserContext,\n Ctx,\n} from \"@budibase/types\"\nimport * as context from \".\"\n\nexport function getIdentity(): IdentityContext | undefined {\n return context.getIdentity()\n}\n\nexport function doInIdentityContext(identity: IdentityContext, task: any) {\n return context.doInIdentityContext(identity, task)\n}\n\n// used in server/worker\nexport function doInUserContext(user: User, ctx: Ctx, task: any) {\n const userContext: UserContext = {\n ...user,\n _id: user._id as string,\n type: IdentityType.USER,\n hostInfo: {\n ipAddress: ctx.request.ip,\n // filled in by koa-useragent package\n userAgent: ctx.userAgent._agent.source,\n },\n }\n return doInIdentityContext(userContext, task)\n}\n\n// used in account portal\nexport function doInAccountContext(account: Account, task: any) {\n const _id = getAccountUserId(account)\n const tenantId = account.tenantId\n const accountContext: AccountUserContext = {\n _id,\n type: IdentityType.USER,\n tenantId,\n account,\n }\n return doInIdentityContext(accountContext, task)\n}\n\nexport function getAccountUserId(account: Account) {\n let userId: string\n if (isCloudAccount(account)) {\n userId = account.budibaseUserId\n } else {\n // use account id as user id for self-hosting\n userId = account.accountId\n }\n return userId\n}\n", "import { existsSync, readFileSync } from \"fs\"\nimport { ServiceType } from \"@budibase/types\"\n\nfunction isTest() {\n return isJest()\n}\n\nfunction isJest() {\n return (\n process.env.NODE_ENV === \"jest\" ||\n (process.env.JEST_WORKER_ID != null &&\n process.env.JEST_WORKER_ID !== \"null\")\n )\n}\n\nfunction isDev() {\n return process.env.NODE_ENV !== \"production\"\n}\n\nlet LOADED = false\nif (!LOADED && isDev() && !isTest()) {\n require(\"dotenv\").config()\n LOADED = true\n}\n\nconst DefaultBucketName = {\n BACKUPS: \"backups\",\n APPS: \"prod-budi-app-assets\",\n TEMPLATES: \"templates\",\n GLOBAL: \"global\",\n PLUGINS: \"plugins\",\n}\n\nconst selfHosted = !!parseInt(process.env.SELF_HOSTED || \"\")\n\nfunction getAPIEncryptionKey() {\n return process.env.API_ENCRYPTION_KEY\n ? process.env.API_ENCRYPTION_KEY\n : process.env.JWT_SECRET // fallback to the JWT_SECRET used historically\n}\n\nfunction httpLogging() {\n if (process.env.HTTP_LOGGING === undefined) {\n // on by default unless otherwise specified\n return true\n }\n\n return process.env.HTTP_LOGGING\n}\n\nfunction getPackageJsonFields(): {\n VERSION: string\n SERVICE_NAME: string\n} {\n function findFileInAncestors(\n fileName: string,\n currentDir: string\n ): string | null {\n const filePath = `${currentDir}/${fileName}`\n if (existsSync(filePath)) {\n return filePath\n }\n\n const parentDir = `${currentDir}/..`\n if (parentDir === currentDir) {\n // reached root directory\n return null\n }\n\n return findFileInAncestors(fileName, parentDir)\n }\n\n try {\n const packageJsonFile = findFileInAncestors(\"package.json\", process.cwd())\n const content = readFileSync(packageJsonFile!, \"utf-8\")\n const parsedContent = JSON.parse(content)\n return {\n VERSION: process.env.BUDIBASE_VERSION || parsedContent.version,\n SERVICE_NAME: parsedContent.name,\n }\n } catch {\n // throwing an error here is confusing/causes backend-core to be hard to import\n return { VERSION: process.env.BUDIBASE_VERSION || \"\", SERVICE_NAME: \"\" }\n }\n}\n\nfunction isWorker() {\n return environment.SERVICE_TYPE === ServiceType.WORKER\n}\n\nfunction isApps() {\n return environment.SERVICE_TYPE === ServiceType.APPS\n}\n\nconst environment = {\n isTest,\n isJest,\n isDev,\n isWorker,\n isApps,\n isProd: () => {\n return !isDev()\n },\n JS_BCRYPT: process.env.JS_BCRYPT,\n JWT_SECRET: process.env.JWT_SECRET,\n JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK,\n ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,\n API_ENCRYPTION_KEY: getAPIEncryptionKey(),\n COUCH_DB_URL: process.env.COUCH_DB_URL || \"http://localhost:4005\",\n COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || \"http://localhost:4984\",\n COUCH_DB_USERNAME: process.env.COUCH_DB_USER,\n COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,\n GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,\n GOOGLE_CLIENT_SECRET: process.env.GOOGLE_CLIENT_SECRET,\n SALT_ROUNDS: process.env.SALT_ROUNDS,\n REDIS_URL: process.env.REDIS_URL || \"localhost:6379\",\n REDIS_PASSWORD: process.env.REDIS_PASSWORD,\n REDIS_CLUSTERED: process.env.REDIS_CLUSTERED,\n MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,\n MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,\n AWS_REGION: process.env.AWS_REGION,\n MINIO_URL: process.env.MINIO_URL,\n MINIO_ENABLED: process.env.MINIO_ENABLED || 1,\n INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,\n INTERNAL_API_KEY_FALLBACK: process.env.INTERNAL_API_KEY_FALLBACK,\n MULTI_TENANCY: process.env.MULTI_TENANCY,\n ACCOUNT_PORTAL_URL:\n process.env.ACCOUNT_PORTAL_URL || \"https://account.budibase.app\",\n ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY || \"\",\n DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,\n SELF_HOSTED: selfHosted,\n COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,\n PLATFORM_URL: process.env.PLATFORM_URL || \"\",\n POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,\n ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,\n TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,\n CLOUDFRONT_CDN: process.env.CLOUDFRONT_CDN,\n CLOUDFRONT_PRIVATE_KEY_64: process.env.CLOUDFRONT_PRIVATE_KEY_64,\n CLOUDFRONT_PUBLIC_KEY_ID: process.env.CLOUDFRONT_PUBLIC_KEY_ID,\n BACKUPS_BUCKET_NAME:\n process.env.BACKUPS_BUCKET_NAME || DefaultBucketName.BACKUPS,\n APPS_BUCKET_NAME: process.env.APPS_BUCKET_NAME || DefaultBucketName.APPS,\n TEMPLATES_BUCKET_NAME:\n process.env.TEMPLATES_BUCKET_NAME || DefaultBucketName.TEMPLATES,\n GLOBAL_BUCKET_NAME:\n process.env.GLOBAL_BUCKET_NAME || DefaultBucketName.GLOBAL,\n PLUGIN_BUCKET_NAME:\n process.env.PLUGIN_BUCKET_NAME || DefaultBucketName.PLUGINS,\n USE_COUCH: process.env.USE_COUCH || true,\n MOCK_REDIS: process.env.MOCK_REDIS,\n DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,\n SERVICE: process.env.SERVICE || \"budibase\",\n LOG_LEVEL: process.env.LOG_LEVEL || \"info\",\n SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD,\n DEPLOYMENT_ENVIRONMENT:\n process.env.DEPLOYMENT_ENVIRONMENT || \"docker-compose\",\n HTTP_LOGGING: httpLogging(),\n ENABLE_AUDIT_LOG_IP_ADDR: process.env.ENABLE_AUDIT_LOG_IP_ADDR,\n // smtp\n SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,\n SMTP_USER: process.env.SMTP_USER,\n SMTP_PASSWORD: process.env.SMTP_PASSWORD,\n SMTP_HOST: process.env.SMTP_HOST,\n SMTP_PORT: parseInt(process.env.SMTP_PORT || \"\"),\n SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS,\n DISABLE_JWT_WARNING: process.env.DISABLE_JWT_WARNING,\n BLACKLIST_IPS: process.env.BLACKLIST_IPS,\n SERVICE_TYPE: \"unknown\",\n PASSWORD_MIN_LENGTH: process.env.PASSWORD_MIN_LENGTH,\n PASSWORD_MAX_LENGTH: process.env.PASSWORD_MAX_LENGTH,\n /**\n * Enable to allow an admin user to login using a password.\n * This can be useful to prevent lockout when configuring SSO.\n * However, this should be turned OFF by default for security purposes.\n */\n ENABLE_SSO_MAINTENANCE_MODE: selfHosted\n ? process.env.ENABLE_SSO_MAINTENANCE_MODE\n : false,\n ...getPackageJsonFields(),\n DISABLE_PINO_LOGGER: process.env.DISABLE_PINO_LOGGER,\n OFFLINE_MODE: process.env.OFFLINE_MODE,\n _set(key: any, value: any) {\n process.env[key] = value\n // @ts-ignore\n environment[key] = value\n },\n ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || \"10M\",\n}\n\n// clean up any environment variable edge cases\nfor (let [key, value] of Object.entries(environment)) {\n // handle the edge case of \"0\" to disable an environment variable\n if (value === \"0\") {\n // @ts-ignore\n environment[key] = 0\n }\n // handle the edge case of \"false\" to disable an environment variable\n if (value === \"false\") {\n // @ts-ignore\n environment[key] = 0\n }\n}\n\nexport default environment\n", "import { AsyncLocalStorage } from \"async_hooks\"\nimport { ContextMap } from \"./types\"\n\nexport default class Context {\n static storage = new AsyncLocalStorage<ContextMap>()\n\n static run<T>(context: ContextMap, func: () => T) {\n return Context.storage.run(context, () => func())\n }\n\n static get(): ContextMap {\n return Context.storage.getStore() as ContextMap\n }\n}\n", "import { APP_DEV_PREFIX, APP_PREFIX } from \"../constants\"\nimport { App } from \"@budibase/types\"\n\nconst NO_APP_ERROR = \"No app provided\"\n\nexport function isDevAppID(appId?: string) {\n if (!appId) {\n throw NO_APP_ERROR\n }\n return appId.startsWith(APP_DEV_PREFIX)\n}\n\nexport function isProdAppID(appId?: string) {\n if (!appId) {\n throw NO_APP_ERROR\n }\n return appId.startsWith(APP_PREFIX) && !isDevAppID(appId)\n}\n\nexport function isDevApp(app: App) {\n if (!app) {\n throw NO_APP_ERROR\n }\n return isDevAppID(app.appId)\n}\n\n/**\n * Generates a development app ID from a real app ID.\n * @returns the dev app ID which can be used for dev database.\n */\nexport function getDevelopmentAppID(appId: string) {\n if (!appId || appId.startsWith(APP_DEV_PREFIX)) {\n return appId\n }\n // split to take off the app_ element, then join it together incase any other app_ exist\n const split = appId.split(APP_PREFIX)\n split.shift()\n const rest = split.join(APP_PREFIX)\n return `${APP_DEV_PREFIX}${rest}`\n}\nexport const getDevAppID = getDevelopmentAppID\n\n/**\n * Convert a development app ID to a deployed app ID.\n */\nexport function getProdAppID(appId: string) {\n if (!appId || !appId.startsWith(APP_DEV_PREFIX)) {\n return appId\n }\n // split to take off the app_dev element, then join it together incase any other app_ exist\n const split = appId.split(APP_DEV_PREFIX)\n split.shift()\n const rest = split.join(APP_DEV_PREFIX)\n return `${APP_PREFIX}${rest}`\n}\n\nexport function extractAppUUID(id: string) {\n const split = id?.split(\"_\") || []\n return split.length ? split[split.length - 1] : null\n}\n", "import env from \"../../environment\"\n\nexport const getCouchInfo = (connection?: string) => {\n const urlInfo = getUrlInfo(connection)\n let username\n let password\n if (urlInfo.auth?.username) {\n // set from url\n username = urlInfo.auth.username\n } else if (env.COUCH_DB_USERNAME) {\n // set from env\n username = env.COUCH_DB_USERNAME\n } else if (!env.isTest()) {\n throw new Error(\"CouchDB username not set\")\n }\n if (urlInfo.auth?.password) {\n // set from url\n password = urlInfo.auth.password\n } else if (env.COUCH_DB_PASSWORD) {\n // set from env\n password = env.COUCH_DB_PASSWORD\n } else if (!env.isTest()) {\n throw new Error(\"CouchDB password not set\")\n }\n const authCookie = Buffer.from(`${username}:${password}`).toString(\"base64\")\n return {\n url: urlInfo.url!,\n auth: {\n username: username,\n password: password,\n },\n cookie: `Basic ${authCookie}`,\n }\n}\n\nexport const getUrlInfo = (url = env.COUCH_DB_URL) => {\n let cleanUrl, username, password, host\n if (url) {\n // Ensure the URL starts with a protocol\n const protoRegex = /^https?:\\/\\//i\n if (!protoRegex.test(url)) {\n url = `http://${url}`\n }\n\n // Split into protocol and remainder\n const split = url.split(\"://\")\n const protocol = split[0]\n const rest = split.slice(1).join(\"://\")\n\n // Extract auth if specified\n if (url.includes(\"@\")) {\n // Split into host and remainder\n let parts = rest.split(\"@\")\n host = parts[parts.length - 1]\n let auth = parts.slice(0, -1).join(\"@\")\n\n // Split auth into username and password\n if (auth.includes(\":\")) {\n const authParts = auth.split(\":\")\n username = authParts[0]\n password = authParts.slice(1).join(\":\")\n } else {\n username = auth\n }\n } else {\n host = rest\n }\n cleanUrl = `${protocol}://${host}`\n }\n return {\n url: cleanUrl,\n auth: {\n username,\n password,\n },\n }\n}\n", "/**\n * Makes sure that a URL has the correct number of slashes, while maintaining the\n * http(s):// double slashes.\n * @param url The URL to test and remove any extra double slashes.\n * @return The updated url.\n */\nexport function checkSlashesInUrl(url: string) {\n return url.replace(/(https?:\\/\\/)|(\\/)+/g, \"$1$2\")\n}\n", "import { getCouchInfo } from \"./connections\"\nimport fetch from \"node-fetch\"\nimport { checkSlashesInUrl } from \"../../helpers\"\n\nexport async function directCouchCall(\n path: string,\n method: string = \"GET\",\n body?: any\n) {\n let { url, cookie } = getCouchInfo()\n const couchUrl = `${url}/${path}`\n return await directCouchUrlCall({ url: couchUrl, cookie, method, body })\n}\n\nexport async function directCouchUrlCall({\n url,\n cookie,\n method,\n body,\n}: {\n url: string\n cookie: string\n method: string\n body?: any\n}) {\n const params: any = {\n method: method,\n headers: {\n Authorization: cookie,\n },\n }\n if (body && method !== \"GET\") {\n params.body = JSON.stringify(body)\n params.headers[\"Content-Type\"] = \"application/json\"\n }\n return await fetch(checkSlashesInUrl(encodeURI(url)), params)\n}\n\nexport async function directCouchQuery(\n path: string,\n method: string = \"GET\",\n body?: any\n) {\n const response = await directCouchCall(path, method, body)\n if (response.status < 300) {\n return await response.json()\n } else {\n throw \"Cannot connect to CouchDB instance\"\n }\n}\n", "import PouchDB from \"pouchdb\"\nimport env from \"../../environment\"\nimport { PouchOptions } from \"@budibase/types\"\nimport { getCouchInfo } from \"./connections\"\n\nlet Pouch: any\nlet initialised = false\n\n/**\n * Return a constructor for PouchDB.\n * This should be rarely used outside of the main application config.\n * Exposed for exceptional cases such as in-memory views.\n */\nexport const getPouch = (opts: PouchOptions = {}) => {\n let { url, cookie } = getCouchInfo()\n let POUCH_DB_DEFAULTS = {\n prefix: url,\n fetch: (url: string, opts: any) => {\n // use a specific authorization cookie - be very explicit about how we authenticate\n opts.headers.set(\"Authorization\", cookie)\n return PouchDB.fetch(url, opts)\n },\n }\n\n if (opts.inMemory) {\n const inMemory = require(\"pouchdb-adapter-memory\")\n PouchDB.plugin(inMemory)\n POUCH_DB_DEFAULTS = {\n // @ts-ignore\n adapter: \"memory\",\n }\n }\n\n if (opts.onDisk) {\n POUCH_DB_DEFAULTS = {\n // @ts-ignore\n adapter: \"leveldb\",\n }\n }\n\n if (opts.replication) {\n const replicationStream = require(\"@budibase/pouchdb-replication-stream\")\n PouchDB.plugin(replicationStream.plugin)\n // @ts-ignore\n PouchDB.adapter(\"writableStream\", replicationStream.adapters.writableStream)\n }\n\n if (opts.find) {\n const find = require(\"pouchdb-find\")\n PouchDB.plugin(find)\n }\n\n return PouchDB.defaults(POUCH_DB_DEFAULTS)\n}\n\nexport function init(opts?: PouchOptions) {\n Pouch = getPouch(opts)\n initialised = true\n}\n\nconst checkInitialised = () => {\n if (!initialised) {\n throw new Error(\"init has not been called\")\n }\n}\n\nexport function getPouchDB(dbName: string, opts?: any): PouchDB.Database {\n checkInitialised()\n const db = new Pouch(dbName, opts)\n const dbPut = db.put\n db.put = async (doc: any, options = {}) => {\n if (!doc.createdAt) {\n doc.createdAt = new Date().toISOString()\n }\n doc.updatedAt = new Date().toISOString()\n return dbPut(doc, options)\n }\n db.exists = async () => {\n const info = await db.info()\n return !info.error\n }\n return db\n}\n\n// use this function if you have called getPouchDB - close\n// the databases you've opened once finished\nexport async function closePouchDB(db: PouchDB.Database) {\n if (!db || env.isTest()) {\n return\n }\n try {\n // specifically await so that if there is an error, it can be ignored\n return await db.close()\n } catch (err) {\n // ignore error, already closed\n }\n}\n", "import { v4 } from \"uuid\"\n\nexport function newid() {\n return v4().replace(/-/g, \"\")\n}\n", "import {\n DocumentScope,\n DocumentDestroyResponse,\n DocumentInsertResponse,\n DocumentBulkResponse,\n OkResponse,\n} from \"@budibase/nano\"\nimport {\n AllDocsResponse,\n AnyDocument,\n Database,\n DatabaseDumpOpts,\n DatabasePutOpts,\n DatabaseQueryOpts,\n Document,\n} from \"@budibase/types\"\nimport tracer from \"dd-trace\"\nimport { Writable } from \"stream\"\n\nexport class DDInstrumentedDatabase implements Database {\n constructor(private readonly db: Database) {}\n\n get name(): string {\n return this.db.name\n }\n\n exists(): Promise<boolean> {\n return tracer.trace(\"db.exists\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.exists()\n })\n }\n\n get<T extends Document>(id?: string | undefined): Promise<T> {\n return tracer.trace(\"db.get\", span => {\n span?.addTags({ db_name: this.name, doc_id: id })\n return this.db.get(id)\n })\n }\n\n getMultiple<T extends Document>(\n ids: string[],\n opts?: { allowMissing?: boolean | undefined } | undefined\n ): Promise<T[]> {\n return tracer.trace(\"db.getMultiple\", span => {\n span?.addTags({\n db_name: this.name,\n num_docs: ids.length,\n allow_missing: opts?.allowMissing,\n })\n return this.db.getMultiple(ids, opts)\n })\n }\n\n remove(\n id: string | Document,\n rev?: string | undefined\n ): Promise<DocumentDestroyResponse> {\n return tracer.trace(\"db.remove\", span => {\n span?.addTags({ db_name: this.name, doc_id: id })\n return this.db.remove(id, rev)\n })\n }\n\n put(\n document: AnyDocument,\n opts?: DatabasePutOpts | undefined\n ): Promise<DocumentInsertResponse> {\n return tracer.trace(\"db.put\", span => {\n span?.addTags({ db_name: this.name, doc_id: document._id })\n return this.db.put(document, opts)\n })\n }\n\n bulkDocs(documents: AnyDocument[]): Promise<DocumentBulkResponse[]> {\n return tracer.trace(\"db.bulkDocs\", span => {\n span?.addTags({ db_name: this.name, num_docs: documents.length })\n return this.db.bulkDocs(documents)\n })\n }\n\n allDocs<T extends Document>(\n params: DatabaseQueryOpts\n ): Promise<AllDocsResponse<T>> {\n return tracer.trace(\"db.allDocs\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.allDocs(params)\n })\n }\n\n query<T extends Document>(\n viewName: string,\n params: DatabaseQueryOpts\n ): Promise<AllDocsResponse<T>> {\n return tracer.trace(\"db.query\", span => {\n span?.addTags({ db_name: this.name, view_name: viewName })\n return this.db.query(viewName, params)\n })\n }\n\n destroy(): Promise<void | OkResponse> {\n return tracer.trace(\"db.destroy\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.destroy()\n })\n }\n\n compact(): Promise<void | OkResponse> {\n return tracer.trace(\"db.compact\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.compact()\n })\n }\n\n dump(stream: Writable, opts?: DatabaseDumpOpts | undefined): Promise<any> {\n return tracer.trace(\"db.dump\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.dump(stream, opts)\n })\n }\n\n load(...args: any[]): Promise<any> {\n return tracer.trace(\"db.load\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.load(...args)\n })\n }\n\n createIndex(...args: any[]): Promise<any> {\n return tracer.trace(\"db.createIndex\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.createIndex(...args)\n })\n }\n\n deleteIndex(...args: any[]): Promise<any> {\n return tracer.trace(\"db.deleteIndex\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.deleteIndex(...args)\n })\n }\n\n getIndexes(...args: any[]): Promise<any> {\n return tracer.trace(\"db.getIndexes\", span => {\n span?.addTags({ db_name: this.name })\n return this.db.getIndexes(...args)\n })\n }\n}\n", "import Nano from \"@budibase/nano\"\nimport {\n AllDocsResponse,\n AnyDocument,\n Database,\n DatabaseOpts,\n DatabaseQueryOpts,\n DatabasePutOpts,\n DatabaseCreateIndexOpts,\n DatabaseDeleteIndexOpts,\n Document,\n isDocument,\n RowResponse,\n} from \"@budibase/types\"\nimport { getCouchInfo } from \"./connections\"\nimport { directCouchUrlCall } from \"./utils\"\nimport { getPouchDB } from \"./pouchDB\"\nimport { WriteStream, ReadStream } from \"fs\"\nimport { newid } from \"../../docIds/newid\"\nimport { DDInstrumentedDatabase } from \"../instrumentation\"\n\nconst DATABASE_NOT_FOUND = \"Database does not exist.\"\n\nfunction buildNano(couchInfo: { url: string; cookie: string }) {\n return Nano({\n url: couchInfo.url,\n requestDefaults: {\n headers: {\n Authorization: couchInfo.cookie,\n },\n },\n parseUrl: false,\n })\n}\n\ntype DBCall<T> = () => Promise<T>\n\nexport function DatabaseWithConnection(\n dbName: string,\n connection: string,\n opts?: DatabaseOpts\n) {\n const db = new DatabaseImpl(dbName, opts, connection)\n return new DDInstrumentedDatabase(db)\n}\n\nexport class DatabaseImpl implements Database {\n public readonly name: string\n private static nano: Nano.ServerScope\n private readonly instanceNano?: Nano.ServerScope\n private readonly pouchOpts: DatabaseOpts\n\n private readonly couchInfo = getCouchInfo()\n\n constructor(dbName: string, opts?: DatabaseOpts, connection?: string) {\n this.name = dbName\n this.pouchOpts = opts || {}\n if (connection) {\n this.couchInfo = getCouchInfo(connection)\n this.instanceNano = buildNano(this.couchInfo)\n }\n if (!DatabaseImpl.nano) {\n DatabaseImpl.init()\n }\n }\n\n static init() {\n const couchInfo = getCouchInfo()\n DatabaseImpl.nano = buildNano(couchInfo)\n }\n\n async exists() {\n const response = await directCouchUrlCall({\n url: `${this.couchInfo.url}/${this.name}`,\n method: \"HEAD\",\n cookie: this.couchInfo.cookie,\n })\n return response.status === 200\n }\n\n private nano() {\n return this.instanceNano || DatabaseImpl.nano\n }\n\n private getDb() {\n return this.nano().db.use(this.name)\n }\n\n private async checkAndCreateDb() {\n let shouldCreate = !this.pouchOpts?.skip_setup\n // check exists in a lightweight fashion\n let exists = await this.exists()\n if (!shouldCreate && !exists) {\n throw new Error(\"DB does not exist\")\n }\n if (!exists) {\n try {\n await this.nano().db.create(this.name)\n } catch (err: any) {\n // Handling race conditions\n if (err.statusCode !== 412) {\n throw err\n }\n }\n }\n return this.getDb()\n }\n\n // this function fetches the DB and handles if DB creation is needed\n private async performCall<T>(\n call: (db: Nano.DocumentScope<any>) => Promise<DBCall<T>> | DBCall<T>\n ): Promise<any> {\n const db = this.getDb()\n const fnc = await call(db)\n try {\n return await fnc()\n } catch (err: any) {\n if (err.statusCode === 404 && err.reason === DATABASE_NOT_FOUND) {\n await this.checkAndCreateDb()\n return await this.performCall(call)\n } else if (err.statusCode) {\n err.status = err.statusCode\n }\n throw err\n }\n }\n\n async get<T extends Document>(id?: string): Promise<T> {\n return this.performCall(db => {\n if (!id) {\n throw new Error(\"Unable to get doc without a valid _id.\")\n }\n return () => db.get(id)\n })\n }\n\n async getMultiple<T extends Document>(\n ids: string[],\n opts?: { allowMissing?: boolean }\n ): Promise<T[]> {\n // get unique\n ids = [...new Set(ids)]\n const response = await this.allDocs<T>({\n keys: ids,\n include_docs: true,\n })\n const rowUnavailable = (row: RowResponse<T>) => {\n // row is deleted - key lookup can return this\n if (row.doc == null || (\"deleted\" in row.value && row.value.deleted)) {\n return true\n }\n return row.error === \"not_found\"\n }\n\n const rows = response.rows.filter(row => !rowUnavailable(row))\n const someMissing = rows.length !== response.rows.length\n // some were filtered out - means some missing\n if (!opts?.allowMissing && someMissing) {\n const missing = response.rows.filter(row => rowUnavailable(row))\n const missingIds = missing.map(row => row.key).join(\", \")\n throw new Error(`Unable to get documents: ${missingIds}`)\n }\n return rows.map(row => row.doc!)\n }\n\n async remove(idOrDoc: string | Document, rev?: string) {\n return this.performCall(db => {\n let _id: string\n let _rev: string\n\n if (isDocument(idOrDoc)) {\n _id = idOrDoc._id!\n _rev = idOrDoc._rev!\n } else {\n _id = idOrDoc\n _rev = rev!\n }\n\n if (!_id || !_rev) {\n throw new Error(\"Unable to remove doc without a valid _id and _rev.\")\n }\n return () => db.destroy(_id, _rev)\n })\n }\n\n async post(document: AnyDocument, opts?: DatabasePutOpts) {\n if (!document._id) {\n document._id = newid()\n }\n return this.put(document, opts)\n }\n\n async put(document: AnyDocument, opts?: DatabasePutOpts) {\n if (!document._id) {\n throw new Error(\"Cannot store document without _id field.\")\n }\n return this.performCall(async db => {\n if (!document.createdAt) {\n document.createdAt = new Date().toISOString()\n }\n document.updatedAt = new Date().toISOString()\n if (opts?.force && document._id) {\n try {\n const existing = await this.get(document._id)\n if (existing) {\n document._rev = existing._rev\n }\n } catch (err: any) {\n if (err.status !== 404) {\n throw err\n }\n }\n }\n return () => db.insert(document)\n })\n }\n\n async bulkDocs(documents: AnyDocument[]) {\n return this.performCall(db => {\n return () => db.bulk({ docs: documents })\n })\n }\n\n async allDocs<T extends Document>(\n params: DatabaseQueryOpts\n ): Promise<AllDocsResponse<T>> {\n return this.performCall(db => {\n return () => db.list(params)\n })\n }\n\n async query<T extends Document>(\n viewName: string,\n params: DatabaseQueryOpts\n ): Promise<AllDocsResponse<T>> {\n return this.performCall(db => {\n const [database, view] = viewName.split(\"/\")\n return () => db.view(database, view, params)\n })\n }\n\n async destroy() {\n try {\n return await this.nano().db.destroy(this.name)\n } catch (err: any) {\n // didn't exist, don't worry\n if (err.statusCode === 404) {\n return\n } else {\n throw { ...err, status: err.statusCode }\n }\n }\n }\n\n async compact() {\n return this.performCall(db => {\n return () => db.compact()\n })\n }\n\n // All below functions are in-frequently called, just utilise PouchDB\n // for them as it implements them better than we can\n async dump(stream: WriteStream, opts?: { filter?: any }) {\n const pouch = getPouchDB(this.name)\n // @ts-ignore\n return pouch.dump(stream, opts)\n }\n\n async load(stream: ReadStream) {\n const pouch = getPouchDB(this.name)\n // @ts-ignore\n return pouch.load(stream)\n }\n\n async createIndex(opts: DatabaseCreateIndexOpts) {\n const pouch = getPouchDB(this.name)\n return pouch.createIndex(opts)\n }\n\n async deleteIndex(opts: DatabaseDeleteIndexOpts) {\n const pouch = getPouchDB(this.name)\n return pouch.deleteIndex(opts)\n }\n\n async getIndexes() {\n const pouch = getPouchDB(this.name)\n return pouch.getIndexes()\n }\n}\n", "export const CONSTANT_INTERNAL_ROW_COLS = [\n \"_id\",\n \"_rev\",\n \"type\",\n \"createdAt\",\n \"updatedAt\",\n \"tableId\",\n] as const\n\nexport const CONSTANT_EXTERNAL_ROW_COLS = [\"_id\", \"_rev\", \"tableId\"] as const\n\nexport function isInternalColumnName(name: string): boolean {\n return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)\n}\n", "export * from \"./connections\"\nexport * from \"./DatabaseImpl\"\nexport * from \"./utils\"\nexport { init, getPouch, getPouchDB, closePouchDB } from \"./pouchDB\"\nexport * from \"../constants\"\n", "import { directCouchQuery, DatabaseImpl } from \"./couch\"\nimport { CouchFindOptions, Database, DatabaseOpts } from \"@budibase/types\"\nimport { DDInstrumentedDatabase } from \"./instrumentation\"\n\nexport function getDB(dbName: string, opts?: DatabaseOpts): Database {\n return new DDInstrumentedDatabase(new DatabaseImpl(dbName, opts))\n}\n\n// we have to use a callback for this so that we can close\n// the DB when we're done, without this manual requests would\n// need to close the database when done with it to avoid memory leaks\nexport async function doWithDB<T>(\n dbName: string,\n cb: (db: Database) => Promise<T>,\n opts?: DatabaseOpts\n) {\n const db = getDB(dbName, opts)\n // need this to be async so that we can correctly close DB after all\n // async operations have been completed\n return await cb(db)\n}\n\nexport async function directCouchAllDbs(queryString?: string) {\n let couchPath = \"/_all_dbs\"\n if (queryString) {\n couchPath += `?${queryString}`\n }\n return await directCouchQuery(couchPath)\n}\n\nexport async function directCouchFind(dbName: string, opts: CouchFindOptions) {\n const json = await directCouchQuery(`${dbName}/_find`, \"POST\", opts)\n return { rows: json.docs, bookmark: json.bookmark }\n}\n", "// some test cases call functions directly, need to\n// store an app ID to pretend there is a context\nimport env from \"../environment\"\nimport Context from \"./Context\"\nimport * as conversions from \"../docIds/conversions\"\nimport { getDB } from \"../db/db\"\nimport {\n DocumentType,\n SEPARATOR,\n StaticDatabases,\n DEFAULT_TENANT_ID,\n} from \"../constants\"\nimport { Database, IdentityContext } from \"@budibase/types\"\nimport { ContextMap } from \"./types\"\n\nlet TEST_APP_ID: string | null = null\n\nexport function getGlobalDBName(tenantId?: string) {\n // tenant ID can be set externally, for example user API where\n // new tenants are being created, this may be the case\n if (!tenantId) {\n tenantId = getTenantId()\n }\n return baseGlobalDBName(tenantId)\n}\n\nexport function getAuditLogDBName(tenantId?: string) {\n if (!tenantId) {\n tenantId = getTenantId()\n }\n if (tenantId === DEFAULT_TENANT_ID) {\n return StaticDatabases.AUDIT_LOGS.name\n } else {\n return `${tenantId}${SEPARATOR}${StaticDatabases.AUDIT_LOGS.name}`\n }\n}\n\nexport function baseGlobalDBName(tenantId: string | undefined | null) {\n if (!tenantId || tenantId === DEFAULT_TENANT_ID) {\n return StaticDatabases.GLOBAL.name\n } else {\n return `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`\n }\n}\n\nexport function getPlatformURL() {\n return env.PLATFORM_URL\n}\n\nexport function isMultiTenant() {\n return !!env.MULTI_TENANCY\n}\n\nexport function isTenantIdSet() {\n const context = Context.get()\n return !!context?.tenantId\n}\n\nexport function isTenancyEnabled() {\n return env.MULTI_TENANCY\n}\n\n/**\n * Given an app ID this will attempt to retrieve the tenant ID from it.\n * @return The tenant ID found within the app ID.\n */\nexport function getTenantIDFromAppID(appId: string) {\n if (!appId) {\n return undefined\n }\n if (!isMultiTenant()) {\n return DEFAULT_TENANT_ID\n }\n const split = appId.split(SEPARATOR)\n const hasDev = split[1] === DocumentType.DEV\n if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {\n return undefined\n }\n if (hasDev) {\n return split[2]\n } else {\n return split[1]\n }\n}\n\nfunction updateContext(updates: ContextMap): ContextMap {\n let context: ContextMap\n try {\n context = Context.get()\n } catch (err) {\n // no context, start empty\n context = {}\n }\n context = {\n ...context,\n ...updates,\n }\n return context\n}\n\nasync function newContext<T>(updates: ContextMap, task: () => T) {\n guardMigration()\n\n // see if there already is a context setup\n let context: ContextMap = updateContext(updates)\n return Context.run(context, task)\n}\n\nexport async function doInAutomationContext<T>(params: {\n appId: string\n automationId: string\n task: () => T\n}): Promise<T> {\n const tenantId = getTenantIDFromAppID(params.appId)\n return newContext(\n {\n tenantId,\n appId: params.appId,\n automationId: params.automationId,\n },\n params.task\n )\n}\n\nexport async function doInContext(appId: string, task: any): Promise<any> {\n const tenantId = getTenantIDFromAppID(appId)\n return newContext(\n {\n tenantId,\n appId,\n },\n task\n )\n}\n\nexport async function doInTenant<T>(\n tenantId: string | undefined,\n task: () => T\n): Promise<T> {\n // make sure default always selected in single tenancy\n if (!env.MULTI_TENANCY) {\n tenantId = tenantId || DEFAULT_TENANT_ID\n }\n\n const updates = tenantId ? { tenantId } : {}\n return newContext(updates, task)\n}\n\nexport async function doInAppContext<T>(\n appId: string,\n task: () => T\n): Promise<T> {\n return _doInAppContext(appId, task)\n}\n\nasync function _doInAppContext<T>(\n appId: string,\n task: () => T,\n extraContextSettings?: ContextMap\n): Promise<T> {\n if (!appId) {\n throw new Error(\"appId is required\")\n }\n\n const tenantId = getTenantIDFromAppID(appId)\n const updates: ContextMap = { appId, ...extraContextSettings }\n if (tenantId) {\n updates.tenantId = tenantId\n }\n\n return newContext(updates, task)\n}\n\nexport async function doInIdentityContext<T>(\n identity: IdentityContext,\n task: () => T\n): Promise<T> {\n if (!identity) {\n throw new Error(\"identity is required\")\n }\n\n const context: ContextMap = {\n identity,\n }\n if (identity.tenantId) {\n context.tenantId = identity.tenantId\n }\n return newContext(context, task)\n}\n\nfunction guardMigration() {\n const context = Context.get()\n if (context?.isMigrating) {\n throw new Error(\n \"The context cannot be changed, a migration is currently running\"\n )\n }\n}\n\nexport async function doInAppMigrationContext<T>(\n appId: string,\n task: () => T\n): Promise<T> {\n return _doInAppContext(appId, task, {\n isMigrating: true,\n })\n}\n\nexport function getIdentity(): IdentityContext | undefined {\n try {\n const context = Context.get()\n return context?.identity\n } catch (e) {\n // do nothing - identity is not in context\n }\n}\n\nexport function getTenantId(): string {\n if (!isMultiTenant()) {\n return DEFAULT_TENANT_ID\n }\n const context = Context.get()\n const tenantId = context?.tenantId\n if (!tenantId) {\n throw new Error(\"Tenant id not found\")\n }\n return tenantId\n}\n\nexport function getAutomationId(): string | undefined {\n const context = Context.get()\n return context?.automationId\n}\n\nexport function getAppId(): string | undefined {\n const context = Context.get()\n const foundId = context?.appId\n if (!foundId && env.isTest() && TEST_APP_ID) {\n return TEST_APP_ID\n } else {\n return foundId\n }\n}\n\nexport const getProdAppId = () => {\n const appId = getAppId()\n if (!appId) {\n throw new Error(\"Could not get appId\")\n }\n return conversions.getProdAppID(appId)\n}\n\nexport function doInEnvironmentContext(\n values: Record<string, string>,\n task: any\n) {\n if (!values) {\n throw new Error(\"Must supply environment variables.\")\n }\n const updates = {\n environmentVariables: values,\n }\n return newContext(updates, task)\n}\n\nexport function doInScimContext(task: any) {\n const updates: ContextMap = {\n isScim: true,\n }\n return newContext(updates, task)\n}\n\nexport function getEnvironmentVariables() {\n const context = Context.get()\n if (!context.environmentVariables) {\n return null\n } else {\n return context.environmentVariables\n }\n}\n\nexport function getGlobalDB(): Database {\n const context = Context.get()\n if (!context || (env.MULTI_TENANCY && !context.tenantId)) {\n throw new Error(\"Global DB not found\")\n }\n return getDB(baseGlobalDBName(context?.tenantId))\n}\n\nexport function getAuditLogsDB(): Database {\n if (!getTenantId()) {\n throw new Error(\"No tenant ID found - cannot open audit log DB\")\n }\n return getDB(getAuditLogDBName())\n}\n\n/**\n * Gets the app database based on whatever the request\n * contained, dev or prod.\n */\nexport function getAppDB(opts?: any): Database {\n const appId = getAppId()\n if (!appId) {\n throw new Error(\"Unable to retrieve app DB - no app ID.\")\n }\n return getDB(appId, opts)\n}\n\n/**\n * This specifically gets the prod app ID, if the request\n * contained a development app ID, this will get the prod one.\n */\nexport function getProdAppDB(opts?: any): Database {\n const appId = getAppId()\n if (!appId) {\n throw new Error(\"Unable to retrieve prod DB - no app ID.\")\n }\n return getDB(conversions.getProdAppID(appId), opts)\n}\n\n/**\n * This specifically gets the dev app ID, if the request\n * contained a prod app ID, this will get the dev one.\n */\nexport function getDevAppDB(opts?: any): Database {\n const appId = getAppId()\n if (!appId) {\n throw new Error(\"Unable to retrieve dev DB - no app ID.\")\n }\n return getDB(conversions.getDevelopmentAppID(appId), opts)\n}\n\nexport function isScim(): boolean {\n const context = Context.get()\n const scimCall = context?.isScim\n return !!scimCall\n}\n\nexport function getCurrentContext(): ContextMap | undefined {\n try {\n return Context.get()\n } catch (e) {\n return undefined\n }\n}\n", "export { DEFAULT_TENANT_ID } from \"../constants\"\nexport * as identity from \"./identity\"\nexport * from \"./mainContext\"\n", "import env from \"../environment\"\nimport * as Redis from \"ioredis\"\n\nconst SLOT_REFRESH_MS = 2000\nconst CONNECT_TIMEOUT_MS = 10000\nexport const SEPARATOR = \"-\"\n\n/**\n * These Redis databases help us to segment up a Redis keyspace by prepending the\n * specified database name onto the cache key. This means that a single real Redis database\n * can be split up a bit; allowing us to use scans on small databases to find some particular\n * keys within.\n * If writing a very large volume of keys is expected (say 10K+) then it is better to keep these out\n * of the default keyspace and use a separate one - the SelectableDatabase can be used for this.\n */\nexport enum Databases {\n PW_RESETS = \"pwReset\",\n VERIFICATIONS = \"verification\",\n INVITATIONS = \"invitation\",\n DEV_LOCKS = \"devLocks\",\n DEBOUNCE = \"debounce\",\n SESSIONS = \"session\",\n USER_CACHE = \"users\",\n FLAGS = \"flags\",\n APP_METADATA = \"appMetadata\",\n QUERY_VARS = \"queryVars\",\n LICENSES = \"license\",\n GENERIC_CACHE = \"data_cache\",\n WRITE_THROUGH = \"writeThrough\",\n LOCKS = \"locks\",\n SOCKET_IO = \"socket_io\",\n}\n\n/**\n * These define the numeric Redis databases that can be access with the SELECT command -\n * (https://redis.io/commands/select/). By default a Redis server/cluster will have 16 selectable\n * databases, increasing this count increases the amount of CPU/memory required to run the server.\n * Ideally new Redis keyspaces should be used sparingly, only when absolutely necessary for performance\n * to be maintained. Generally a keyspace can grow to be very large is scans are not needed or desired,\n * but if you need to walk through all values in a database periodically then a separate selectable\n * keyspace should be used.\n */\nexport enum SelectableDatabase {\n DEFAULT = 0,\n SOCKET_IO = 1,\n RATE_LIMITING = 2,\n UNUSED_2 = 3,\n UNUSED_3 = 4,\n UNUSED_4 = 5,\n UNUSED_5 = 6,\n UNUSED_6 = 7,\n UNUSED_7 = 8,\n UNUSED_8 = 9,\n UNUSED_9 = 10,\n UNUSED_10 = 11,\n UNUSED_11 = 12,\n UNUSED_12 = 13,\n UNUSED_13 = 14,\n UNUSED_14 = 15,\n}\n\nexport function getRedisConnectionDetails() {\n let password = env.REDIS_PASSWORD\n let url: string[] | string = env.REDIS_URL.split(\"//\")\n // get rid of the protocol\n url = url.length > 1 ? url[1] : url[0]\n // check for a password etc\n url = url.split(\"@\")\n if (url.length > 1) {\n // get the password\n password = url[0].split(\":\")[1]\n url = url[1]\n } else {\n url = url[0]\n }\n const [host, port] = url.split(\":\")\n\n const portNumber = parseInt(port)\n return {\n host,\n password,\n // assume default port for redis if invalid found\n port: isNaN(portNumber) ? 6379 : portNumber,\n }\n}\n\nexport function getRedisOptions() {\n const { host, password, port } = getRedisConnectionDetails()\n let redisOpts: Redis.RedisOptions = {\n connectTimeout: CONNECT_TIMEOUT_MS,\n port: port,\n host,\n password,\n }\n let opts: Redis.ClusterOptions | Redis.RedisOptions = redisOpts\n if (env.REDIS_CLUSTERED) {\n opts = {\n connectTimeout: CONNECT_TIMEOUT_MS,\n redisOptions: {\n ...redisOpts,\n tls: {},\n },\n slotsRefreshTimeout: SLOT_REFRESH_MS,\n dnsLookup: (address: string, callback: any) => callback(null, address),\n } as Redis.ClusterOptions\n }\n return opts\n}\n\nexport function addDbPrefix(db: string, key: string) {\n if (key.includes(db)) {\n return key\n }\n return `${db}${SEPARATOR}${key}`\n}\n\nexport function removeDbPrefix(key: string) {\n let parts = key.split(SEPARATOR)\n if (parts.length >= 2) {\n parts.shift()\n return parts.join(SEPARATOR)\n } else {\n // return the only part\n return parts[0]\n }\n}\n", "import { Header } from \"../../constants\"\n\nconst correlator = require(\"correlation-id\")\n\nexport const setHeader = (headers: any) => {\n const correlationId = correlator.getId()\n if (correlationId) {\n headers[Header.CORRELATION_ID] = correlationId\n }\n}\n\nexport function getId() {\n return correlator.getId()\n}\n", "export * from \"./correlation\"\n", "import { join } from \"path\"\nimport { tmpdir } from \"os\"\nimport fs from \"fs\"\nimport env from \"../environment\"\n\n/****************************************************\n * NOTE: When adding a new bucket - name *\n * sure that S3 usages (like budibase-infra) *\n * have been updated to have a unique bucket name. *\n ****************************************************/\n// can't be an enum - only numbers can be used for computed types\nexport const ObjectStoreBuckets = {\n BACKUPS: env.BACKUPS_BUCKET_NAME,\n APPS: env.APPS_BUCKET_NAME,\n TEMPLATES: env.TEMPLATES_BUCKET_NAME,\n GLOBAL: env.GLOBAL_BUCKET_NAME,\n PLUGINS: env.PLUGIN_BUCKET_NAME,\n}\n\nconst bbTmp = join(tmpdir(), \".budibase\")\ntry {\n fs.mkdirSync(bbTmp)\n} catch (e: any) {\n if (e.code !== \"EEXIST\") {\n throw e\n }\n}\n\nexport function budibaseTempDir() {\n return bbTmp\n}\n", "import { getAppClient } from \"../redis/init\"\nimport { doWithDB, DocumentType } from \"../db\"\nimport { Database, App } from \"@budibase/types\"\n\nexport enum AppState {\n INVALID = \"invalid\",\n}\n\nexport interface DeletedApp {\n state: AppState\n}\n\nconst EXPIRY_SECONDS = 3600\n\n/**\n * The default populate app metadata function\n */\nasync function populateFromDB(appId: string) {\n return doWithDB(\n appId,\n (db: Database) => {\n return db.get<App>(DocumentType.APP_METADATA)\n },\n { skip_setup: true }\n )\n}\n\nfunction isInvalid(metadata?: { state: string }) {\n return !metadata || metadata.state === AppState.INVALID\n}\n\n/**\n * Get the requested app metadata by id.\n * Use redis cache to first read the app metadata.\n * If not present fallback to loading the app metadata directly and re-caching.\n * @param appId the id of the app to get metadata from.\n * @returns the app metadata.\n */\nexport async function getAppMetadata(appId: string): Promise<App | DeletedApp> {\n const client = await getAppClient()\n // try cache\n let metadata = await client.get(appId)\n if (!metadata) {\n let expiry: number | undefined = EXPIRY_SECONDS\n try {\n metadata = await populateFromDB(appId)\n } catch (err: any) {\n // app DB left around, but no metadata, it is invalid\n if (err && err.status === 404) {\n metadata = { state: AppState.INVALID }\n // don't expire the reference to an invalid app, it'll only be\n // updated if a metadata doc actually gets stored (app is remade/reverted)\n expiry = undefined\n } else {\n throw err\n }\n }\n // needed for some scenarios where the caching happens\n // so quickly the requests can get slightly out of sync\n // might store its invalid just before it stores its valid\n if (isInvalid(metadata)) {\n const temp = await client.get(appId)\n if (temp) {\n metadata = temp\n }\n }\n await client.store(appId, metadata, expiry)\n }\n\n return metadata\n}\n\n/**\n * Invalidate/reset the cached metadata when a change occurs in the db.\n * @param appId the cache key to bust/update.\n * @param newMetadata optional - can simply provide the new metadata to update with.\n * @return will respond with success when cache is updated.\n */\nexport async function invalidateAppMetadata(appId: string, newMetadata?: any) {\n if (!appId) {\n throw \"Cannot invalidate if no app ID provided.\"\n }\n const client = await getAppClient()\n await client.delete(appId)\n if (newMetadata) {\n await client.store(appId, newMetadata, EXPIRY_SECONDS)\n }\n}\n", "import {\n APP_PREFIX,\n DocumentType,\n InternalTable,\n SEPARATOR,\n} from \"../constants\"\nimport { newid } from \"./newid\"\n\n/**\n * Generates a new app ID.\n * @returns The new app ID which the app doc can be stored under.\n */\nexport const generateAppID = (tenantId?: string | null) => {\n let id = APP_PREFIX\n if (tenantId) {\n id += `${tenantId}${SEPARATOR}`\n }\n return `${id}${newid()}`\n}\n\n/**\n * Gets a new row ID for the specified table.\n * @param tableId The table which the row is being created for.\n * @param id If an ID is to be used then the UUID can be substituted for this.\n * @returns The new ID which a row doc can be stored under.\n */\nexport function generateRowID(tableId: string, id?: string) {\n id = id || newid()\n return `${DocumentType.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}`\n}\n\n/**\n * Generates a new workspace ID.\n * @returns The new workspace ID which the workspace doc can be stored under.\n */\nexport function generateWorkspaceID() {\n return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}`\n}\n\n/**\n * Generates a new global user ID.\n * @returns The new user ID which the user doc can be stored under.\n */\nexport function generateGlobalUserID(id?: any) {\n return `${DocumentType.USER}${SEPARATOR}${id || newid()}`\n}\n\nconst isGlobalUserIDRegex = new RegExp(`^${DocumentType.USER}${SEPARATOR}.+`)\nexport function isGlobalUserID(id: string) {\n return isGlobalUserIDRegex.test(id)\n}\n\n/**\n * Generates a new user ID based on the passed in global ID.\n * @param globalId The ID of the global user.\n * @returns The new user ID which the user doc can be stored under.\n */\nexport function generateUserMetadataID(globalId: string) {\n return generateRowID(InternalTable.USER_METADATA, globalId)\n}\n\n/**\n * Breaks up the ID to get the global ID.\n */\nexport function getGlobalIDFromUserMetadataID(id: string) {\n const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`\n if (!id || !id.includes(prefix)) {\n return id\n }\n return id.split(prefix)[1]\n}\n\n/**\n * Generates a template ID.\n * @param ownerId The owner/user of the template, this could be global or a workspace level.\n */\nexport function generateTemplateID(ownerId: any) {\n return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`\n}\n\nexport function generateAppUserID(prodAppId: string, userId: string) {\n return `${prodAppId}${SEPARATOR}${userId}`\n}\n\n/**\n * Generates a new role ID.\n * @returns The new role ID which the role doc can be stored under.\n */\nexport function generateRoleID(name: string) {\n const prefix = `${DocumentType.ROLE}${SEPARATOR}`\n if (name.startsWith(prefix)) {\n return name\n }\n return `${prefix}${name}`\n}\n\n/**\n * Utility function to be more verbose.\n */\nexport function prefixRoleID(name: string) {\n return generateRoleID(name)\n}\n\n/**\n * Generates a new dev info document ID - this is scoped to a user.\n * @returns The new dev info ID which info for dev (like api key) can be stored under.\n */\nexport const generateDevInfoID = (userId: any) => {\n return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`\n}\n\n/**\n * Generates a new plugin ID - to be used in the global DB.\n * @returns The new plugin ID which a plugin metadata document can be stored under.\n */\nexport const generatePluginID = (name: string) => {\n return `${DocumentType.PLUGIN}${SEPARATOR}${name}`\n}\n", "import {\n DocumentType,\n InternalTable,\n SEPARATOR,\n UNICODE_MAX,\n ViewName,\n} from \"../constants\"\nimport { getProdAppID } from \"./conversions\"\nimport { DatabaseQueryOpts } from \"@budibase/types\"\n\n/**\n * If creating DB allDocs/query params with only a single top level ID this can be used, this\n * is usually the case as most of our docs are top level e.g. tables, automations, users and so on.\n * More complex cases such as link docs and rows which have multiple levels of IDs that their\n * ID consists of need their own functions to build the allDocs parameters.\n * @param docType The type of document which input params are being built for, e.g. user,\n * link, app, table and so on.\n * @param docId The ID of the document minus its type - this is only needed if looking\n * for a singular document.\n * @param otherProps Add any other properties onto the request, e.g. include_docs.\n * @returns Parameters which can then be used with an allDocs request.\n */\nexport function getDocParams(\n docType: string,\n docId?: string | null,\n otherProps: Partial<DatabaseQueryOpts> = {}\n): DatabaseQueryOpts {\n if (docId == null) {\n docId = \"\"\n }\n return {\n ...otherProps,\n startkey: `${docType}${SEPARATOR}${docId}`,\n endkey: `${docType}${SEPARATOR}${docId}${UNICODE_MAX}`,\n }\n}\n\n/**\n * Gets the DB allDocs/query params for retrieving a row.\n * @param tableId The table in which the rows have been stored.\n * @param rowId The ID of the row which is being specifically queried for. This can be\n * left null to get all the rows in the table.\n * @param otherProps Any other properties to add to the request.\n * @returns Parameters which can then be used with an allDocs request.\n */\nexport function getRowParams(\n tableId?: string | null,\n rowId?: string | null,\n otherProps: Partial<DatabaseQueryOpts> = {}\n): DatabaseQueryOpts {\n if (tableId == null) {\n return getDocParams(DocumentType.ROW, null, otherProps)\n }\n\n const endOfKey = rowId == null ? `${tableId}${SEPARATOR}` : rowId\n\n return getDocParams(DocumentType.ROW, endOfKey, otherProps)\n}\n\n/**\n * Retrieve the correct index for a view based on default design DB.\n */\nexport function getQueryIndex(viewName: ViewName) {\n return `database/${viewName}`\n}\n\n/**\n * Check if a given ID is that of a table.\n * @returns {boolean}\n */\nexport const isTableId = (id: string) => {\n // this includes datasource plus tables\n return (\n id &&\n (id.startsWith(`${DocumentType.TABLE}${SEPARATOR}`) ||\n id.startsWith(`${DocumentType.DATASOURCE_PLUS}${SEPARATOR}`))\n )\n}\n\n/**\n * Check if a given ID is that of a datasource or datasource plus.\n * @returns {boolean}\n */\nexport const isDatasourceId = (id: string) => {\n // this covers both datasources and datasource plus\n return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)\n}\n\n/**\n * Gets parameters for retrieving workspaces.\n */\nexport function getWorkspaceParams(\n id = \"\",\n otherProps: Partial<DatabaseQueryOpts> = {}\n): DatabaseQueryOpts {\n return {\n ...otherProps,\n startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,\n endkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`,\n }\n}\n\n/**\n * Gets parameters for retrieving users.\n */\nexport function getGlobalUserParams(\n globalId: any,\n otherProps: Partial<DatabaseQueryOpts> = {}\n): DatabaseQueryOpts {\n if (!globalId) {\n globalId = \"\"\n }\n const startkey = otherProps?.startkey\n return {\n ...otherProps,\n // need to include this incase pagination\n startkey: startkey\n ? startkey\n : `${DocumentType.USER}${SEPARATOR}${globalId}`,\n endkey: `${DocumentType.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,\n }\n}\n\n/**\n * Gets parameters for retrieving users, this is a utility function for the getDocParams function.\n */\nexport function getUserMetadataParams(\n userId?: string | null,\n otherProps: Partial<DatabaseQueryOpts> = {}\n): DatabaseQueryOpts {\n return getRowParams(InternalTable.USER_METADATA, userId, otherProps)\n}\n\nexport function getUsersByAppParams(\n appId: any,\n otherProps: Partial<DatabaseQueryOpts> = {}\n): DatabaseQueryOpts {\n const prodAppId = getProdAppID(appId)\n return {\n ...otherProps,\n startkey: prodAppId,\n endkey: `${prodAppId}${UNICODE_MAX}`,\n }\n}\n\n/**\n * Gets parameters for retrieving templates. Owner ID must be specified, either global or a workspace level.\n */\nexport function getTemplateParams(\n ownerId: any,\n templateId: any,\n otherProps = {}\n) {\n if (!templateId) {\n templateId = \"\"\n }\n let final\n if (templateId) {\n final = templateId\n } else {\n final = `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}`\n }\n return {\n ...otherProps,\n startkey: final,\n endkey: `${final}${UNICODE_MAX}`,\n }\n}\n\n/**\n * Gets parameters for retrieving a role, this is a utility function for the getDocParams function.\n */\nexport function getRoleParams(roleId?: string | null, otherProps = {}) {\n return getDocParams(DocumentType.ROLE, roleId, otherProps)\n}\n\nexport function getStartEndKeyURL(baseKey: any, tenantId?: string) {\n const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : \"\"\n return `startkey=\"${baseKey}${tenancy}\"&endkey=\"${baseKey}${tenancy}${UNICODE_MAX}\"`\n}\n\n/**\n * Gets parameters for retrieving automations, this is a utility function for the getDocParams function.\n */\nexport const getPluginParams = (pluginId?: string | null, otherProps = {}) => {\n return getDocParams(DocumentType.PLUGIN, pluginId, otherProps)\n}\n", "export * from \"./ids\"\nexport * from \"./params\"\n", "import env from \"../environment\"\nimport { DEFAULT_TENANT_ID, SEPARATOR, DocumentType } from \"../constants\"\nimport { getTenantId, getGlobalDBName } from \"../context\"\nimport { doWithDB, directCouchAllDbs } from \"./db\"\nimport { AppState, DeletedApp, getAppMetadata } from \"../cache/appMetadata\"\nimport { isDevApp, isDevAppID, getProdAppID } from \"../docIds/conversions\"\nimport { App, Database } from \"@budibase/types\"\nimport { getStartEndKeyURL } from \"../docIds\"\n\nexport * from \"../docIds\"\n\n/**\n * if in production this will use the CouchDB _all_dbs call to retrieve a list of databases. If testing\n * when using Pouch it will use the pouchdb-all-dbs package.\n * opts.efficient can be provided to make sure this call is always quick in a multi-tenant environment,\n * but it may not be 100% accurate in full efficiency mode (some tenantless apps may be missed).\n */\nexport async function getAllDbs(opts = { efficient: false }) {\n const efficient = opts && opts.efficient\n\n let dbs: any[] = []\n async function addDbs(queryString?: string) {\n const json = await directCouchAllDbs(queryString)\n dbs = dbs.concat(json)\n }\n let tenantId = getTenantId()\n if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) {\n // just get all DBs when:\n // - single tenancy\n // - default tenant\n // - apps dbs don't contain tenant id\n // - non-default tenant dbs are filtered out application side in getAllApps\n await addDbs()\n } else {\n // get prod apps\n await addDbs(getStartEndKeyURL(DocumentType.APP, tenantId))\n // get dev apps\n await addDbs(getStartEndKeyURL(DocumentType.APP_DEV, tenantId))\n // add global db name\n dbs.push(getGlobalDBName(tenantId))\n }\n return dbs\n}\n\n/**\n * Lots of different points in the system need to find the full list of apps, this will\n * enumerate the entire CouchDB cluster and get the list of databases (every app).\n *\n * @return returns the app information document stored in each app database.\n */\nexport async function getAllApps({\n dev,\n all,\n idsOnly,\n efficient,\n}: any = {}): Promise<App[] | string[]> {\n let tenantId = getTenantId()\n if (!env.MULTI_TENANCY && !tenantId) {\n tenantId = DEFAULT_TENANT_ID\n }\n let dbs = await getAllDbs({ efficient })\n const appDbNames = dbs.filter((dbName: any) => {\n if (env.isTest() && !dbName) {\n return false\n }\n\n const split = dbName.split(SEPARATOR)\n // it is an app, check the tenantId\n if (split[0] === DocumentType.APP) {\n // tenantId is always right before the UUID\n const possibleTenantId = split[split.length - 2]\n\n const noTenantId =\n split.length === 2 || possibleTenantId === DocumentType.DEV\n\n return (\n (tenantId === DEFAULT_TENANT_ID && noTenantId) ||\n possibleTenantId === tenantId\n )\n }\n return false\n })\n if (idsOnly) {\n const devAppIds = appDbNames.filter(appId => isDevAppID(appId))\n const prodAppIds = appDbNames.filter(appId => !isDevAppID(appId))\n switch (dev) {\n case true:\n return devAppIds\n case false:\n return prodAppIds\n default:\n return appDbNames\n }\n }\n const appPromises = appDbNames.map((app: any) =>\n // skip setup otherwise databases could be re-created\n getAppMetadata(app)\n )\n if (appPromises.length === 0) {\n return []\n } else {\n const response = await Promise.allSettled(appPromises)\n const apps = response\n .filter(\n (result: any) =>\n result.status === \"fulfilled\" &&\n result.value?.state !== AppState.INVALID\n )\n .map(({ value }: any) => value)\n if (!all) {\n return apps.filter((app: any) => {\n if (dev) {\n return isDevApp(app)\n }\n return !isDevApp(app)\n })\n } else {\n return apps.map((app: any) => ({\n ...app,\n status: isDevApp(app) ? \"development\" : \"published\",\n }))\n }\n }\n}\n\nexport async function getAppsByIDs(appIds: string[]) {\n const settled = await Promise.allSettled(\n appIds.map(appId => getAppMetadata(appId))\n )\n // have to list the apps which exist, some may have been deleted\n return settled\n .filter(\n promise =>\n promise.status === \"fulfilled\" &&\n (promise.value as DeletedApp).state !== AppState.INVALID\n )\n .map(promise => (promise as PromiseFulfilledResult<App>).value)\n}\n\n/**\n * Utility function for getAllApps but filters to production apps only.\n */\nexport async function getProdAppIDs() {\n const apps = (await getAllApps({ idsOnly: true })) as string[]\n return apps.filter((id: any) => !isDevAppID(id))\n}\n\n/**\n * Utility function for the inverse of above.\n */\nexport async function getDevAppIDs() {\n const apps = (await getAllApps({ idsOnly: true })) as string[]\n return apps.filter((id: any) => isDevAppID(id))\n}\n\nexport function isSameAppID(\n appId1: string | undefined,\n appId2: string | undefined\n) {\n if (appId1 == undefined || appId2 == undefined) {\n return false\n }\n return getProdAppID(appId1) === getProdAppID(appId2)\n}\n\nexport async function dbExists(dbName: any) {\n return doWithDB(\n dbName,\n async (db: Database) => {\n return await db.exists()\n },\n { skip_setup: true }\n )\n}\n\nexport function pagination<T>(\n data: T[],\n pageSize: number,\n {\n paginate,\n property,\n getKey,\n }: {\n paginate: boolean\n property: string\n getKey?: (doc: T) => string | undefined\n } = {\n paginate: true,\n property: \"_id\",\n }\n) {\n if (!paginate) {\n return { data, hasNextPage: false }\n }\n const hasNextPage = data.length > pageSize\n let nextPage = undefined\n if (!getKey) {\n getKey = (doc: any) => (property ? doc?.[property] : doc?._id)\n }\n if (hasNextPage) {\n nextPage = getKey(data[pageSize])\n }\n return {\n data: data.slice(0, pageSize),\n hasNextPage,\n nextPage,\n }\n}\n", "import {\n DeprecatedViews,\n DocumentType,\n SEPARATOR,\n StaticDatabases,\n ViewName,\n} from \"../constants\"\nimport { getGlobalDB } from \"../context\"\nimport { doWithDB } from \"./\"\nimport {\n AllDocsResponse,\n Database,\n DatabaseQueryOpts,\n Document,\n DesignDocument,\n DBView,\n} from \"@budibase/types\"\nimport env from \"../environment\"\n\nconst DESIGN_DB = \"_design/database\"\n\nfunction DesignDoc(): DesignDocument {\n return {\n _id: DESIGN_DB,\n // view collation information, read before writing any complex views:\n // https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification\n views: {},\n }\n}\n\nasync function removeDeprecated(db: Database, viewName: ViewName) {\n if (!DeprecatedViews[viewName]) {\n return\n }\n try {\n const designDoc = await db.get<DesignDocument>(DESIGN_DB)\n for (let deprecatedNames of DeprecatedViews[viewName]) {\n delete designDoc.views?.[deprecatedNames]\n }\n await db.put(designDoc)\n } catch (err) {\n // doesn't exist, ignore\n }\n}\n\nexport async function createView(\n db: Database,\n viewJs: string,\n viewName: string\n): Promise<void> {\n let designDoc\n try {\n designDoc = await db.get<DesignDocument>(DESIGN_DB)\n } catch (err) {\n // no design doc, make one\n designDoc = DesignDoc()\n }\n const view: DBView = {\n map: viewJs,\n }\n designDoc.views = {\n ...designDoc.views,\n [viewName]: view,\n }\n try {\n await db.put(designDoc)\n } catch (err: any) {\n if (err.status === 409) {\n return await createView(db, viewJs, viewName)\n } else {\n throw err\n }\n }\n}\n\nexport const createNewUserEmailView = async () => {\n const db = getGlobalDB()\n const viewJs = `function(doc) {\n if (doc._id.startsWith(\"${DocumentType.USER}${SEPARATOR}\")) {\n emit(doc.email.toLowerCase(), doc._id)\n }\n }`\n await createView(db, viewJs, ViewName.USER_BY_EMAIL)\n}\n\nexport const createUserAppView = async () => {\n const db = getGlobalDB()\n const viewJs = `function(doc) {\n if (doc._id.startsWith(\"${DocumentType.USER}${SEPARATOR}\") && doc.roles) {\n for (let prodAppId of Object.keys(doc.roles)) {\n let emitted = prodAppId + \"${SEPARATOR}\" + doc._id\n emit(emitted, null)\n }\n }\n }`\n await createView(db, viewJs, ViewName.USER_BY_APP)\n}\n\nexport const createApiKeyView = async () => {\n const db = getGlobalDB()\n const viewJs = `function(doc) {\n if (doc._id.startsWith(\"${DocumentType.DEV_INFO}\") && doc.apiKey) {\n emit(doc.apiKey, doc.userId)\n }\n }`\n await createView(db, viewJs, ViewName.BY_API_KEY)\n}\n\nexport interface QueryViewOptions {\n arrayResponse?: boolean\n}\n\nexport async function queryViewRaw<T extends Document>(\n viewName: ViewName,\n params: DatabaseQueryOpts,\n db: Database,\n createFunc: any,\n opts?: QueryViewOptions\n): Promise<AllDocsResponse<T>> {\n try {\n const response = await db.query<T>(`database/${viewName}`, params)\n // await to catch error\n return response\n } catch (err: any) {\n const pouchNotFound = err && err.name === \"not_found\"\n const couchNotFound = err && err.status === 404\n if (pouchNotFound || couchNotFound) {\n await removeDeprecated(db, viewName)\n await createFunc()\n return queryViewRaw(viewName, params, db, createFunc, opts)\n } else if (err.status === 409) {\n // can happen when multiple queries occur at once, view couldn't be created\n // other design docs being updated, re-run\n return queryViewRaw(viewName, params, db, createFunc, opts)\n } else {\n throw err\n }\n }\n}\n\nexport const queryView = async <T extends Document>(\n viewName: ViewName,\n params: DatabaseQueryOpts,\n db: Database,\n createFunc: any,\n opts?: QueryViewOptions\n): Promise<T[] | T> => {\n const response = await queryViewRaw<T>(viewName, params, db, createFunc, opts)\n const rows = response.rows\n const docs = rows.map(row => (params.include_docs ? row.doc! : row.value))\n\n // if arrayResponse has been requested, always return array regardless of length\n if (opts?.arrayResponse) {\n return docs as T[]\n } else {\n // return the single document if there is only one\n return docs.length <= 1 ? (docs[0] as T) : (docs as T[])\n }\n}\n\n// PLATFORM\n\nasync function createPlatformView(viewJs: string, viewName: ViewName) {\n try {\n await doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => {\n await createView(db, viewJs, viewName)\n })\n } catch (e: any) {\n if (e.status === 409 && env.isTest()) {\n // multiple tests can try to initialise platforms views\n // at once - safe to exit on conflict\n return\n }\n throw e\n }\n}\n\nexport const createPlatformAccountEmailView = async () => {\n const viewJs = `function(doc) {\n if (doc._id.startsWith(\"${DocumentType.ACCOUNT_METADATA}${SEPARATOR}\")) {\n emit(doc.email.toLowerCase(), doc._id)\n }\n }`\n await createPlatformView(viewJs, ViewName.ACCOUNT_BY_EMAIL)\n}\n\nexport const createPlatformUserView = async () => {\n const viewJs = `function(doc) {\n if (doc.tenantId) {\n emit(doc._id.toLowerCase(), doc._id)\n }\n\n if (doc.ssoId) {\n emit(doc.ssoId, doc._id)\n }\n }`\n await createPlatformView(viewJs, ViewName.PLATFORM_USERS_LOWERCASE)\n}\n\nexport const queryPlatformView = async <T extends Document>(\n viewName: ViewName,\n params: DatabaseQueryOpts,\n opts?: QueryViewOptions\n): Promise<T[] | T> => {\n const CreateFuncByName: any = {\n [ViewName.ACCOUNT_BY_EMAIL]: createPlatformAccountEmailView,\n [ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView,\n }\n\n return doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => {\n const createFn = CreateFuncByName[viewName]\n return queryView(viewName, params, db, createFn, opts)\n })\n}\n\nconst CreateFuncByName: any = {\n [ViewName.USER_BY_EMAIL]: createNewUserEmailView,\n [ViewName.BY_API_KEY]: createApiKeyView,\n [ViewName.USER_BY_APP]: createUserAppView,\n}\n\nexport const queryGlobalView = async <T extends Document>(\n viewName: ViewName,\n params: DatabaseQueryOpts,\n db?: Database,\n opts?: QueryViewOptions\n): Promise<T[] | T | undefined> => {\n // can pass DB in if working with something specific\n if (!db) {\n db = getGlobalDB()\n }\n const createFn = CreateFuncByName[viewName]\n return queryView<T>(viewName, params, db!, createFn, opts)\n}\n\nexport async function queryGlobalViewRaw<T extends Document>(\n viewName: ViewName,\n params: DatabaseQueryOpts,\n opts?: QueryViewOptions\n) {\n const db = getGlobalDB()\n const createFn = CreateFuncByName[viewName]\n return queryViewRaw<T>(viewName, params, db, createFn, opts)\n}\n", "import { getPouchDB, closePouchDB } from \"./couch\"\nimport { DocumentType } from \"../constants\"\n\nclass Replication {\n source: any\n target: any\n replication: any\n\n /**\n *\n * @param source - the DB you want to replicate or rollback to\n * @param target - the DB you want to replicate to, or rollback from\n */\n constructor({ source, target }: any) {\n this.source = getPouchDB(source)\n this.target = getPouchDB(target)\n }\n\n close() {\n return Promise.all([closePouchDB(this.source), closePouchDB(this.target)])\n }\n\n promisify(operation: any, opts = {}) {\n return new Promise(resolve => {\n operation(this.target, opts)\n .on(\"denied\", function (err: any) {\n // a document failed to replicate (e.g. due to permissions)\n throw new Error(`Denied: Document failed to replicate ${err}`)\n })\n .on(\"complete\", function (info: any) {\n return resolve(info)\n })\n .on(\"error\", function (err: any) {\n throw new Error(`Replication Error: ${err}`)\n })\n })\n }\n\n /**\n * Two way replication operation, intended to be promise based.\n * @param opts - PouchDB replication options\n */\n sync(opts = {}) {\n this.replication = this.promisify(this.source.sync, opts)\n return this.replication\n }\n\n /**\n * One way replication operation, intended to be promise based.\n * @param opts - PouchDB replication options\n */\n replicate(opts = {}) {\n this.replication = this.promisify(this.source.replicate.to, opts)\n return this.replication\n }\n\n appReplicateOpts() {\n return {\n filter: (doc: any) => {\n if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) {\n return false\n }\n return doc._id !== DocumentType.APP_METADATA\n },\n }\n }\n\n /**\n * Rollback the target DB back to the state of the source DB\n */\n async rollback() {\n await this.target.destroy()\n // Recreate the DB again\n this.target = getPouchDB(this.target.name)\n // take the opportunity to remove deleted tombstones\n await this.replicate()\n }\n\n cancel() {\n this.replication.cancel()\n }\n}\n\nexport default Replication\n", "import fetch from \"node-fetch\"\nimport { getCouchInfo } from \"./couch\"\nimport { SearchFilters, Row, EmptyFilterOption } from \"@budibase/types\"\n\nconst QUERY_START_REGEX = /\\d[0-9]*:/g\n\ninterface SearchResponse<T> {\n rows: T[] | any[]\n bookmark?: string\n totalRows: number\n}\n\ninterface PaginatedSearchResponse<T> extends SearchResponse<T> {\n hasNextPage: boolean\n}\n\nexport type SearchParams<T> = {\n tableId?: string\n sort?: string\n sortOrder?: string\n sortType?: string\n limit?: number\n bookmark?: string\n version?: string\n indexer?: () => Promise<any>\n disableEscaping?: boolean\n rows?: T | Row[]\n}\n\nexport function removeKeyNumbering(key: any): string {\n if (typeof key === \"string\" && key.match(QUERY_START_REGEX) != null) {\n const parts = key.split(\":\")\n // remove the number\n parts.shift()\n return parts.join(\":\")\n } else {\n return key\n }\n}\n\n/**\n * Class to build lucene query URLs.\n * Optionally takes a base lucene query object.\n */\nexport class QueryBuilder<T> {\n #dbName: string\n #index: string\n #query: SearchFilters\n #limit: number\n #sort?: string\n #bookmark?: string\n #sortOrder: string\n #sortType: string\n #includeDocs: boolean\n #version?: string\n #indexBuilder?: () => Promise<any>\n #noEscaping = false\n #skip?: number\n\n static readonly maxLimit = 200\n\n constructor(dbName: string, index: string, base?: SearchFilters) {\n this.#dbName = dbName\n this.#index = index\n this.#query = {\n allOr: false,\n onEmptyFilter: EmptyFilterOption.RETURN_ALL,\n string: {},\n fuzzy: {},\n range: {},\n equal: {},\n notEqual: {},\n empty: {},\n notEmpty: {},\n oneOf: {},\n contains: {},\n notContains: {},\n containsAny: {},\n ...base,\n }\n this.#limit = 50\n this.#sortOrder = \"ascending\"\n this.#sortType = \"string\"\n this.#includeDocs = true\n }\n\n disableEscaping() {\n this.#noEscaping = true\n return this\n }\n\n setIndexBuilder(builderFn: () => Promise<any>) {\n this.#indexBuilder = builderFn\n return this\n }\n\n setVersion(version?: string) {\n if (version != null) {\n this.#version = version\n }\n return this\n }\n\n setTable(tableId: string) {\n this.#query.equal!.tableId = tableId\n return this\n }\n\n setLimit(limit?: number) {\n if (limit != null) {\n this.#limit = limit\n }\n return this\n }\n\n setSort(sort?: string) {\n if (sort != null) {\n this.#sort = sort\n }\n return this\n }\n\n setSortOrder(sortOrder?: string) {\n if (sortOrder != null) {\n this.#sortOrder = sortOrder\n }\n return this\n }\n\n setSortType(sortType?: string) {\n if (sortType != null) {\n this.#sortType = sortType\n }\n return this\n }\n\n setBookmark(bookmark?: string) {\n if (bookmark != null) {\n this.#bookmark = bookmark\n }\n return this\n }\n\n setSkip(skip: number | undefined) {\n this.#skip = skip\n return this\n }\n\n excludeDocs() {\n this.#includeDocs = false\n return this\n }\n\n includeDocs() {\n this.#includeDocs = true\n return this\n }\n\n addString(key: string, partial: string) {\n this.#query.string![key] = partial\n return this\n }\n\n addFuzzy(key: string, fuzzy: string) {\n this.#query.fuzzy![key] = fuzzy\n return this\n }\n\n addRange(key: string, low: string | number, high: string | number) {\n this.#query.range![key] = {\n low,\n high,\n }\n return this\n }\n\n addEqual(key: string, value: any) {\n this.#query.equal![key] = value\n return this\n }\n\n addNotEqual(key: string, value: any) {\n this.#query.notEqual![key] = value\n return this\n }\n\n addEmpty(key: string, value: any) {\n this.#query.empty![key] = value\n return this\n }\n\n addNotEmpty(key: string, value: any) {\n this.#query.notEmpty![key] = value\n return this\n }\n\n addOneOf(key: string, value: any) {\n this.#query.oneOf![key] = value\n return this\n }\n\n addContains(key: string, value: any) {\n this.#query.contains![key] = value\n return this\n }\n\n addNotContains(key: string, value: any) {\n this.#query.notContains![key] = value\n return this\n }\n\n addContainsAny(key: string, value: any) {\n this.#query.containsAny![key] = value\n return this\n }\n\n setAllOr() {\n this.#query.allOr = true\n }\n\n setOnEmptyFilter(value: EmptyFilterOption) {\n this.#query.onEmptyFilter = value\n }\n\n handleSpaces(input: string) {\n if (this.#noEscaping) {\n return input\n } else {\n return input.replace(/ /g, \"_\")\n }\n }\n\n /**\n * Preprocesses a value before going into a lucene search.\n * Transforms strings to lowercase and wraps strings and bools in quotes.\n * @param value The value to process\n * @param options The preprocess options\n * @returns {string|*}\n */\n preprocess(value: any, { escape, lowercase, wrap, type }: any = {}) {\n const hasVersion = !!this.#version\n // Determine if type needs wrapped\n const originalType = typeof value\n // Convert to lowercase\n if (value && lowercase) {\n value = value.toLowerCase ? value.toLowerCase() : value\n }\n // Escape characters\n if (!this.#noEscaping && escape && originalType === \"string\") {\n value = `${value}`.replace(/[ \\/#+\\-&|!(){}\\]^\"~*?:\\\\]/g, \"\\\\$&\")\n }\n\n // Wrap in quotes\n if (originalType === \"string\" && !isNaN(value) && !type) {\n value = `\"${value}\"`\n } else if (hasVersion && wrap) {\n value = originalType === \"number\" ? value : `\"${value}\"`\n }\n return value\n }\n\n isMultiCondition() {\n let count = 0\n for (let filters of Object.values(this.#query)) {\n // not contains is one massive filter in allOr mode\n if (typeof filters === \"object\") {\n count += Object.keys(filters).length\n }\n }\n return count > 1\n }\n\n compressFilters(filters: Record<string, string[]>) {\n const compressed: typeof filters = {}\n for (let key of Object.keys(filters)) {\n const finalKey = removeKeyNumbering(key)\n if (compressed[finalKey]) {\n compressed[finalKey] = compressed[finalKey].concat(filters[key])\n } else {\n compressed[finalKey] = filters[key]\n }\n }\n // add prefixes back\n const final: typeof filters = {}\n let count = 1\n for (let [key, value] of Object.entries(compressed)) {\n final[`${count++}:${key}`] = value\n }\n return final\n }\n\n buildSearchQuery() {\n const builder = this\n let allOr = this.#query && this.#query.allOr\n let query = allOr ? \"\" : \"*:*\"\n let allFiltersEmpty = true\n const allPreProcessingOpts = { escape: true, lowercase: true, wrap: true }\n let tableId: string = \"\"\n if (this.#query.equal!.tableId) {\n tableId = this.#query.equal!.tableId\n delete this.#query.equal!.tableId\n }\n\n const equal = (key: string, value: any) => {\n // 0 evaluates to false, which means we would return all rows if we don't check it\n if (!value && value !== 0) {\n return null\n }\n return `${key}:${builder.preprocess(value, allPreProcessingOpts)}`\n }\n\n const contains = (key: string, value: any, mode = \"AND\") => {\n if (!value || (Array.isArray(value) && value.length === 0)) {\n return null\n }\n if (!Array.isArray(value)) {\n return `${key}:${value}`\n }\n let statement = `${builder.preprocess(value[0], { escape: true })}`\n for (let i = 1; i < value.length; i++) {\n statement += ` ${mode} ${builder.preprocess(value[i], {\n escape: true,\n })}`\n }\n return `${key}:(${statement})`\n }\n\n const fuzzy = (key: string, value: any) => {\n if (!value) {\n return null\n }\n value = builder.preprocess(value, {\n escape: true,\n lowercase: true,\n type: \"fuzzy\",\n })\n return `${key}:/.*${value}.*/`\n }\n\n const notContains = (key: string, value: any) => {\n const allPrefix = allOr ? \"*:* AND \" : \"\"\n const mode = allOr ? \"AND\" : undefined\n return allPrefix + \"NOT \" + contains(key, value, mode)\n }\n\n const containsAny = (key: string, value: any) => {\n return contains(key, value, \"OR\")\n }\n\n const oneOf = (key: string, value: any) => {\n if (!value) {\n return `*:*`\n }\n if (!Array.isArray(value)) {\n if (typeof value === \"string\") {\n value = value.split(\",\")\n } else {\n return \"\"\n }\n }\n let orStatement = `${builder.preprocess(value[0], allPreProcessingOpts)}`\n for (let i = 1; i < value.length; i++) {\n orStatement += ` OR ${builder.preprocess(\n value[i],\n allPreProcessingOpts\n )}`\n }\n return `${key}:(${orStatement})`\n }\n\n function build(\n structure: any,\n queryFn: (key: string, value: any) => string | null,\n opts?: { returnBuilt?: boolean; mode?: string }\n ) {\n let built = \"\"\n for (let [key, value] of Object.entries(structure)) {\n // check for new format - remove numbering if needed\n key = removeKeyNumbering(key)\n key = builder.preprocess(builder.handleSpaces(key), {\n escape: true,\n })\n let expression = queryFn(key, value)\n if (expression == null) {\n continue\n }\n if (built.length > 0 || query.length > 0) {\n const mode = opts?.mode ? opts.mode : allOr ? \"OR\" : \"AND\"\n built += ` ${mode} `\n }\n built += expression\n if (\n (typeof value !== \"string\" && value != null) ||\n (typeof value === \"string\" && value !== tableId && value !== \"\")\n ) {\n allFiltersEmpty = false\n }\n }\n if (opts?.returnBuilt) {\n return built\n } else {\n query += built\n }\n }\n\n // Construct the actual lucene search query string from JSON structure\n if (this.#query.string) {\n build(this.#query.string, (key: string, value: any) => {\n if (!value) {\n return null\n }\n value = builder.preprocess(value, {\n escape: true,\n lowercase: true,\n type: \"string\",\n })\n return `${key}:${value}*`\n })\n }\n if (this.#query.range) {\n build(this.#query.range, (key: string, value: any) => {\n if (!value) {\n return null\n }\n if (value.low == null || value.low === \"\") {\n return null\n }\n if (value.high == null || value.high === \"\") {\n return null\n }\n const low = builder.preprocess(value.low, allPreProcessingOpts)\n const high = builder.preprocess(value.high, allPreProcessingOpts)\n return `${key}:[${low} TO ${high}]`\n })\n }\n if (this.#query.fuzzy) {\n build(this.#query.fuzzy, fuzzy)\n }\n if (this.#query.equal) {\n build(this.#query.equal, equal)\n }\n if (this.#query.notEqual) {\n build(this.#query.notEqual, (key: string, value: any) => {\n if (!value) {\n return null\n }\n if (typeof value === \"boolean\") {\n return `(*:* AND !${key}:${value})`\n }\n return `!${key}:${builder.preprocess(value, allPreProcessingOpts)}`\n })\n }\n if (this.#query.empty) {\n build(this.#query.empty, (key: string) => `(*:* -${key}:[\"\" TO *])`)\n }\n if (this.#query.notEmpty) {\n build(this.#query.notEmpty, (key: string) => `${key}:[\"\" TO *]`)\n }\n if (this.#query.oneOf) {\n build(this.#query.oneOf, oneOf)\n }\n if (this.#query.contains) {\n build(this.#query.contains, contains)\n }\n if (this.#query.notContains) {\n build(this.compressFilters(this.#query.notContains), notContains)\n }\n if (this.#query.containsAny) {\n build(this.#query.containsAny, containsAny)\n }\n // make sure table ID is always added as an AND\n if (tableId) {\n query = this.isMultiCondition() ? `(${query})` : query\n allOr = false\n build({ tableId }, equal)\n }\n if (allFiltersEmpty) {\n if (this.#query.onEmptyFilter === EmptyFilterOption.RETURN_NONE) {\n return \"\"\n } else if (this.#query?.allOr) {\n return query.replace(\"()\", \"(*:*)\")\n }\n }\n return query\n }\n\n buildSearchBody() {\n let body: any = {\n q: this.buildSearchQuery(),\n limit: Math.min(this.#limit, QueryBuilder.maxLimit),\n include_docs: this.#includeDocs,\n }\n if (this.#bookmark) {\n body.bookmark = this.#bookmark\n }\n if (this.#sort) {\n const order = this.#sortOrder === \"descending\" ? \"-\" : \"\"\n const type = `<${this.#sortType}>`\n body.sort = `${order}${this.handleSpaces(this.#sort)}${type}`\n }\n return body\n }\n\n async run() {\n if (this.#skip) {\n await this.#skipItems(this.#skip)\n }\n return await this.#execute()\n }\n\n /**\n * Lucene queries do not support pagination and use bookmarks instead.\n * For the given builder, walk through pages using bookmarks until the desired\n * page has been met.\n */\n async #skipItems(skip: number) {\n // Lucene does not support pagination.\n // Handle pagination by finding the right bookmark\n const prevIncludeDocs = this.#includeDocs\n const prevLimit = this.#limit\n\n this.excludeDocs()\n let skipRemaining = skip\n let iterationFetched = 0\n do {\n const toSkip = Math.min(QueryBuilder.maxLimit, skipRemaining)\n this.setLimit(toSkip)\n const { bookmark, rows } = await this.#execute()\n this.setBookmark(bookmark)\n iterationFetched = rows.length\n skipRemaining -= rows.length\n } while (skipRemaining > 0 && iterationFetched > 0)\n\n this.#includeDocs = prevIncludeDocs\n this.#limit = prevLimit\n }\n\n async #execute() {\n const { url, cookie } = getCouchInfo()\n const fullPath = `${url}/${this.#dbName}/_design/database/_search/${\n this.#index\n }`\n const body = this.buildSearchBody()\n try {\n return await runQuery<T>(fullPath, body, cookie)\n } catch (err: any) {\n if (err.status === 404 && this.#indexBuilder) {\n await this.#indexBuilder()\n return await runQuery<T>(fullPath, body, cookie)\n } else {\n throw err\n }\n }\n }\n}\n\n/**\n * Executes a lucene search query.\n * @param url The query URL\n * @param body The request body defining search criteria\n * @param cookie The auth cookie for CouchDB\n * @returns {Promise<{rows: []}>}\n */\nasync function runQuery<T>(\n url: string,\n body: any,\n cookie: string\n): Promise<SearchResponse<T>> {\n const response = await fetch(url, {\n body: JSON.stringify(body),\n method: \"POST\",\n headers: {\n Authorization: cookie,\n },\n })\n\n if (response.status === 404) {\n throw response\n }\n const json = await response.json()\n\n let output: SearchResponse<T> = {\n rows: [],\n totalRows: 0,\n }\n if (json.rows != null && json.rows.length > 0) {\n output.rows = json.rows.map((row: any) => row.doc)\n }\n if (json.bookmark) {\n output.bookmark = json.bookmark\n }\n if (json.total_rows) {\n output.totalRows = json.total_rows\n }\n return output\n}\n\n/**\n * Gets round the fixed limit of 200 results from a query by fetching as many\n * pages as required and concatenating the results. This recursively operates\n * until enough results have been found.\n * @param dbName Which database to run a lucene query on\n * @param index Which search index to utilise\n * @param query The JSON query structure\n * @param params The search params including:\n * tableId {string} The table ID to search\n * sort {string} The sort column\n * sortOrder {string} The sort order (\"ascending\" or \"descending\")\n * sortType {string} Whether to treat sortable values as strings or\n * numbers. (\"string\" or \"number\")\n * limit {number} The number of results to fetch\n * bookmark {string|null} Current bookmark in the recursive search\n * rows {array|null} Current results in the recursive search\n * @returns {Promise<*[]|*>}\n */\nasync function recursiveSearch<T>(\n dbName: string,\n index: string,\n query: any,\n params: any\n): Promise<any> {\n const bookmark = params.bookmark\n const rows = params.rows || []\n if (rows.length >= params.limit) {\n return rows\n }\n let pageSize = QueryBuilder.maxLimit\n if (rows.length > params.limit - QueryBuilder.maxLimit) {\n pageSize = params.limit - rows.length\n }\n const page = await new QueryBuilder<T>(dbName, index, query)\n .setVersion(params.version)\n .setTable(params.tableId)\n .setBookmark(bookmark)\n .setLimit(pageSize)\n .setSort(params.sort)\n .setSortOrder(params.sortOrder)\n .setSortType(params.sortType)\n .run()\n if (!page.rows.length) {\n return rows\n }\n if (page.rows.length < QueryBuilder.maxLimit) {\n return [...rows, ...page.rows]\n }\n const newParams = {\n ...params,\n bookmark: page.bookmark,\n rows: [...rows, ...page.rows],\n }\n return await recursiveSearch(dbName, index, query, newParams)\n}\n\n/**\n * Performs a paginated search. A bookmark will be returned to allow the next\n * page to be fetched. There is a max limit off 200 results per page in a\n * paginated search.\n * @param dbName Which database to run a lucene query on\n * @param index Which search index to utilise\n * @param query The JSON query structure\n * @param params The search params including:\n * tableId {string} The table ID to search\n * sort {string} The sort column\n * sortOrder {string} The sort order (\"ascending\" or \"descending\")\n * sortType {string} Whether to treat sortable values as strings or\n * numbers. (\"string\" or \"number\")\n * limit {number} The desired page size\n * bookmark {string} The bookmark to resume from\n * @returns {Promise<{hasNextPage: boolean, rows: *[]}>}\n */\nexport async function paginatedSearch<T>(\n dbName: string,\n index: string,\n query: SearchFilters,\n params: SearchParams<T>\n) {\n let limit = params.limit\n if (limit == null || isNaN(limit) || limit < 0) {\n limit = 50\n }\n limit = Math.min(limit, QueryBuilder.maxLimit)\n const search = new QueryBuilder<T>(dbName, index, query)\n if (params.version) {\n search.setVersion(params.version)\n }\n if (params.tableId) {\n search.setTable(params.tableId)\n }\n if (params.sort) {\n search\n .setSort(params.sort)\n .setSortOrder(params.sortOrder)\n .setSortType(params.sortType)\n }\n if (params.indexer) {\n search.setIndexBuilder(params.indexer)\n }\n if (params.disableEscaping) {\n search.disableEscaping()\n }\n const searchResults = await search\n .setBookmark(params.bookmark)\n .setLimit(limit)\n .run()\n\n // Try fetching 1 row in the next page to see if another page of results\n // exists or not\n search.setBookmark(searchResults.bookmark).setLimit(1)\n if (params.tableId) {\n search.setTable(params.tableId)\n }\n const nextResults = await search.run()\n\n return {\n ...searchResults,\n hasNextPage: nextResults.rows && nextResults.rows.length > 0,\n }\n}\n\n/**\n * Performs a full search, fetching multiple pages if required to return the\n * desired amount of results. There is a limit of 1000 results to avoid\n * heavy performance hits, and to avoid client components breaking from\n * handling too much data.\n * @param dbName Which database to run a lucene query on\n * @param index Which search index to utilise\n * @param query The JSON query structure\n * @param params The search params including:\n * tableId {string} The table ID to search\n * sort {string} The sort column\n * sortOrder {string} The sort order (\"ascending\" or \"descending\")\n * sortType {string} Whether to treat sortable values as strings or\n * numbers. (\"string\" or \"number\")\n * limit {number} The desired number of results\n * @returns {Promise<{rows: *}>}\n */\nexport async function fullSearch<T>(\n dbName: string,\n index: string,\n query: SearchFilters,\n params: SearchParams<T>\n) {\n let limit = params.limit\n if (limit == null || isNaN(limit) || limit < 0) {\n limit = 1000\n }\n params.limit = Math.min(limit, 1000)\n const rows = await recursiveSearch<T>(dbName, index, query, params)\n return { rows }\n}\n", "import { User, SearchIndex } from \"@budibase/types\"\nimport { getGlobalDB } from \"../../context\"\n\nexport async function createUserIndex() {\n const db = getGlobalDB()\n let designDoc\n try {\n designDoc = await db.get<any>(\"_design/database\")\n } catch (err: any) {\n if (err.status === 404) {\n designDoc = { _id: \"_design/database\" }\n }\n }\n\n const fn = function (user: User) {\n if (user._id && !user._id.startsWith(\"us_\")) {\n return\n }\n const ignoredFields = [\n \"_id\",\n \"_rev\",\n \"password\",\n \"account\",\n \"license\",\n \"budibaseAccess\",\n \"accountPortalAccess\",\n \"csrfToken\",\n ]\n\n function idx(input: Record<string, any>, prev?: string) {\n for (let key of Object.keys(input)) {\n if (ignoredFields.includes(key)) {\n continue\n }\n let idxKey = prev != null ? `${prev}.${key}` : key\n if (typeof input[key] === \"string\") {\n // eslint-disable-next-line no-undef\n // @ts-ignore\n index(idxKey, input[key].toLowerCase(), { facet: true })\n } else if (typeof input[key] !== \"object\") {\n // eslint-disable-next-line no-undef\n // @ts-ignore\n index(idxKey, input[key], { facet: true })\n } else {\n idx(input[key], idxKey)\n }\n }\n }\n idx(user)\n }\n\n designDoc.indexes = {\n [SearchIndex.USER]: {\n index: fn.toString(),\n analyzer: {\n default: \"keyword\",\n name: \"perfield\",\n },\n },\n }\n await db.put(designDoc)\n}\n", "export * from \"./searchIndexes\"\n", "export function checkErrorCode(error: any, code: number) {\n const stringCode = code.toString()\n if (typeof error === \"object\") {\n return error.status === code || error.message?.includes(stringCode)\n } else if (typeof error === \"number\") {\n return error === code\n } else if (typeof error === \"string\") {\n return error.includes(stringCode)\n }\n}\n\nexport function isDocumentConflictError(error: any) {\n return checkErrorCode(error, 409)\n}\n", "export * from \"./couch\"\nexport * from \"./db\"\nexport * from \"./utils\"\nexport * from \"./views\"\nexport * from \"../docIds/conversions\"\nexport { default as Replication } from \"./Replication\"\n// exports to support old export structure\nexport * from \"../constants/db\"\nexport { getGlobalDBName, baseGlobalDBName } from \"../context\"\nexport * from \"./lucene\"\nexport * as searchIndexes from \"./searchIndexes\"\nexport * from \"./errors\"\n", "const sanitize = require(\"sanitize-s3-objectkey\")\n\nimport AWS from \"aws-sdk\"\nimport stream, { Readable } from \"stream\"\nimport fetch from \"node-fetch\"\nimport tar from \"tar-fs\"\nimport zlib from \"zlib\"\nimport { promisify } from \"util\"\nimport { join } from \"path\"\nimport fs from \"fs\"\nimport env from \"../environment\"\nimport { budibaseTempDir } from \"./utils\"\nimport { v4 } from \"uuid\"\nimport { APP_PREFIX, APP_DEV_PREFIX } from \"../db\"\n\nconst streamPipeline = promisify(stream.pipeline)\n// use this as a temporary store of buckets that are being created\nconst STATE = {\n bucketCreationPromises: {},\n}\n\ntype ListParams = {\n ContinuationToken?: string\n}\n\ntype UploadParams = {\n bucket: string\n filename: string\n path: string\n type?: string | null\n // can be undefined, we will remove it\n metadata?: {\n [key: string]: string | undefined\n }\n}\n\nconst CONTENT_TYPE_MAP: any = {\n txt: \"text/plain\",\n html: \"text/html\",\n css: \"text/css\",\n js: \"application/javascript\",\n json: \"application/json\",\n gz: \"application/gzip\",\n}\n\nconst STRING_CONTENT_TYPES = [\n CONTENT_TYPE_MAP.html,\n CONTENT_TYPE_MAP.css,\n CONTENT_TYPE_MAP.js,\n CONTENT_TYPE_MAP.json,\n]\n\n// does normal sanitization and then swaps dev apps to apps\nexport function sanitizeKey(input: string) {\n return sanitize(sanitizeBucket(input)).replace(/\\\\/g, \"/\")\n}\n\n// simply handles the dev app to app conversion\nexport function sanitizeBucket(input: string) {\n return input.replace(new RegExp(APP_DEV_PREFIX, \"g\"), APP_PREFIX)\n}\n\n/**\n * Gets a connection to the object store using the S3 SDK.\n * @param bucket the name of the bucket which blobs will be uploaded/retrieved from.\n * @param opts configuration for the object store.\n * @return an S3 object store object, check S3 Nodejs SDK for usage.\n * @constructor\n */\nexport function ObjectStore(\n bucket: string,\n opts: { presigning: boolean } = { presigning: false }\n) {\n const config: any = {\n s3ForcePathStyle: true,\n signatureVersion: \"v4\",\n apiVersion: \"2006-03-01\",\n accessKeyId: env.MINIO_ACCESS_KEY,\n secretAccessKey: env.MINIO_SECRET_KEY,\n region: env.AWS_REGION,\n }\n if (bucket) {\n config.params = {\n Bucket: sanitizeBucket(bucket),\n }\n }\n\n // custom S3 is in use i.e. minio\n if (env.MINIO_URL) {\n if (opts.presigning && env.MINIO_ENABLED) {\n // IMPORTANT: Signed urls will inspect the host header of the request.\n // Normally a signed url will need to be generated with a specified host in mind.\n // To support dynamic hosts, e.g. some unknown self-hosted installation url,\n // use a predefined host. The host 'minio-service' is also forwarded to minio requests via nginx\n config.endpoint = \"minio-service\"\n } else {\n config.endpoint = env.MINIO_URL\n }\n }\n\n return new AWS.S3(config)\n}\n\n/**\n * Given an object store and a bucket name this will make sure the bucket exists,\n * if it does not exist then it will create it.\n */\nexport async function makeSureBucketExists(client: any, bucketName: string) {\n bucketName = sanitizeBucket(bucketName)\n try {\n await client\n .headBucket({\n Bucket: bucketName,\n })\n .promise()\n } catch (err: any) {\n const promises: any = STATE.bucketCreationPromises\n const doesntExist = err.statusCode === 404,\n noAccess = err.statusCode === 403\n if (promises[bucketName]) {\n await promises[bucketName]\n } else if (doesntExist || noAccess) {\n if (doesntExist) {\n // bucket doesn't exist create it\n promises[bucketName] = client\n .createBucket({\n Bucket: bucketName,\n })\n .promise()\n await promises[bucketName]\n delete promises[bucketName]\n }\n } else {\n throw new Error(\"Unable to write to object store bucket.\")\n }\n }\n}\n\n/**\n * Uploads the contents of a file given the required parameters, useful when\n * temp files in use (for example file uploaded as an attachment).\n */\nexport async function upload({\n bucket: bucketName,\n filename,\n path,\n type,\n metadata,\n}: UploadParams) {\n const extension = filename.split(\".\").pop()\n const fileBytes = fs.readFileSync(path)\n\n const objectStore = ObjectStore(bucketName)\n await makeSureBucketExists(objectStore, bucketName)\n\n let contentType = type\n if (!contentType) {\n contentType = extension\n ? CONTENT_TYPE_MAP[extension.toLowerCase()]\n : CONTENT_TYPE_MAP.txt\n }\n const config: any = {\n // windows file paths need to be converted to forward slashes for s3\n Key: sanitizeKey(filename),\n Body: fileBytes,\n ContentType: contentType,\n }\n if (metadata && typeof metadata === \"object\") {\n // remove any nullish keys from the metadata object, as these may be considered invalid\n for (let key of Object.keys(metadata)) {\n if (!metadata[key] || typeof metadata[key] !== \"string\") {\n delete metadata[key]\n }\n }\n config.Metadata = metadata\n }\n return objectStore.upload(config).promise()\n}\n\n/**\n * Similar to the upload function but can be used to send a file stream\n * through to the object store.\n */\nexport async function streamUpload(\n bucketName: string,\n filename: string,\n stream: any,\n extra = {}\n) {\n const objectStore = ObjectStore(bucketName)\n await makeSureBucketExists(objectStore, bucketName)\n\n // Set content type for certain known extensions\n if (filename?.endsWith(\".js\")) {\n extra = {\n ...extra,\n ContentType: \"application/javascript\",\n }\n } else if (filename?.endsWith(\".svg\")) {\n extra = {\n ...extra,\n ContentType: \"image\",\n }\n }\n\n const params = {\n Bucket: sanitizeBucket(bucketName),\n Key: sanitizeKey(filename),\n Body: stream,\n ...extra,\n }\n return objectStore.upload(params).promise()\n}\n\n/**\n * retrieves the contents of a file from the object store, if it is a known content type it\n * will be converted, otherwise it will be returned as a buffer stream.\n */\nexport async function retrieve(bucketName: string, filepath: string) {\n const objectStore = ObjectStore(bucketName)\n const params = {\n Bucket: sanitizeBucket(bucketName),\n Key: sanitizeKey(filepath),\n }\n const response: any = await objectStore.getObject(params).promise()\n // currently these are all strings\n if (STRING_CONTENT_TYPES.includes(response.ContentType)) {\n return response.Body.toString(\"utf8\")\n } else {\n return response.Body\n }\n}\n\nexport async function listAllObjects(bucketName: string, path: string) {\n const objectStore = ObjectStore(bucketName)\n const list = (params: ListParams = {}) => {\n return objectStore\n .listObjectsV2({\n ...params,\n Bucket: sanitizeBucket(bucketName),\n Prefix: sanitizeKey(path),\n })\n .promise()\n }\n let isTruncated = false,\n token,\n objects: AWS.S3.Types.Object[] = []\n do {\n let params: ListParams = {}\n if (token) {\n params.ContinuationToken = token\n }\n const response = await list(params)\n if (response.Contents) {\n objects = objects.concat(response.Contents)\n }\n isTruncated = !!response.IsTruncated\n } while (isTruncated)\n return objects\n}\n\n/**\n * Generate a presigned url with a default TTL of 1 hour\n */\nexport function getPresignedUrl(\n bucketName: string,\n key: string,\n durationSeconds: number = 3600\n) {\n const objectStore = ObjectStore(bucketName, { presigning: true })\n const params = {\n Bucket: sanitizeBucket(bucketName),\n Key: sanitizeKey(key),\n Expires: durationSeconds,\n }\n const url = objectStore.getSignedUrl(\"getObject\", params)\n\n if (!env.MINIO_ENABLED) {\n // return the full URL to the client\n return url\n } else {\n // return the path only to the client\n // use the presigned url route to ensure the static\n // hostname will be used in the request\n const signedUrl = new URL(url)\n const path = signedUrl.pathname\n const query = signedUrl.search\n return `/files/signed${path}${query}`\n }\n}\n\n/**\n * Same as retrieval function but puts to a temporary file.\n */\nexport async function retrieveToTmp(bucketName: string, filepath: string) {\n bucketName = sanitizeBucket(bucketName)\n filepath = sanitizeKey(filepath)\n const data = await retrieve(bucketName, filepath)\n const outputPath = join(budibaseTempDir(), v4())\n fs.writeFileSync(outputPath, data)\n return outputPath\n}\n\nexport async function retrieveDirectory(bucketName: string, path: string) {\n let writePath = join(budibaseTempDir(), v4())\n fs.mkdirSync(writePath)\n const objects = await listAllObjects(bucketName, path)\n let streams = await Promise.all(\n objects.map(obj => getReadStream(bucketName, obj.Key!))\n )\n let count = 0\n const writePromises: Promise<Error>[] = []\n for (let obj of objects) {\n const filename = obj.Key!\n const stream = streams[count++]\n const possiblePath = filename.split(\"/\")\n const dirs = possiblePath.slice(0, possiblePath.length - 1)\n const possibleDir = join(writePath, ...dirs)\n if (possiblePath.length > 1 && !fs.existsSync(possibleDir)) {\n fs.mkdirSync(possibleDir, { recursive: true })\n }\n const writeStream = fs.createWriteStream(join(writePath, ...possiblePath), {\n mode: 0o644,\n })\n stream.pipe(writeStream)\n writePromises.push(\n new Promise((resolve, reject) => {\n stream.on(\"finish\", resolve)\n stream.on(\"error\", reject)\n writeStream.on(\"error\", reject)\n })\n )\n }\n await Promise.all(writePromises)\n return writePath\n}\n\n/**\n * Delete a single file.\n */\nexport async function deleteFile(bucketName: string, filepath: string) {\n const objectStore = ObjectStore(bucketName)\n await makeSureBucketExists(objectStore, bucketName)\n const params = {\n Bucket: bucketName,\n Key: sanitizeKey(filepath),\n }\n return objectStore.deleteObject(params).promise()\n}\n\nexport async function deleteFiles(bucketName: string, filepaths: string[]) {\n const objectStore = ObjectStore(bucketName)\n await makeSureBucketExists(objectStore, bucketName)\n const params = {\n Bucket: bucketName,\n Delete: {\n Objects: filepaths.map((path: any) => ({ Key: sanitizeKey(path) })),\n },\n }\n return objectStore.deleteObjects(params).promise()\n}\n\n/**\n * Delete a path, including everything within.\n */\nexport async function deleteFolder(\n bucketName: string,\n folder: string\n): Promise<any> {\n bucketName = sanitizeBucket(bucketName)\n folder = sanitizeKey(folder)\n const client = ObjectStore(bucketName)\n const listParams = {\n Bucket: bucketName,\n Prefix: folder,\n }\n\n const existingObjectsResponse = await client.listObjects(listParams).promise()\n if (existingObjectsResponse.Contents?.length === 0) {\n return\n }\n const deleteParams: any = {\n Bucket: bucketName,\n Delete: {\n Objects: [],\n },\n }\n\n existingObjectsResponse.Contents?.forEach((content: any) => {\n deleteParams.Delete.Objects.push({ Key: content.Key })\n })\n\n const deleteResponse = await client.deleteObjects(deleteParams).promise()\n // can only empty 1000 items at once\n if (deleteResponse.Deleted?.length === 1000) {\n return deleteFolder(bucketName, folder)\n }\n}\n\nexport async function uploadDirectory(\n bucketName: string,\n localPath: string,\n bucketPath: string\n) {\n bucketName = sanitizeBucket(bucketName)\n let uploads = []\n const files = fs.readdirSync(localPath, { withFileTypes: true })\n for (let file of files) {\n const path = sanitizeKey(join(bucketPath, file.name))\n const local = join(localPath, file.name)\n if (file.isDirectory()) {\n uploads.push(uploadDirectory(bucketName, local, path))\n } else {\n uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))\n }\n }\n await Promise.all(uploads)\n return files\n}\n\nexport async function downloadTarballDirect(\n url: string,\n path: string,\n headers = {}\n) {\n path = sanitizeKey(path)\n const response = await fetch(url, { headers })\n if (!response.ok) {\n throw new Error(`unexpected response ${response.statusText}`)\n }\n\n await streamPipeline(response.body, zlib.createUnzip(), tar.extract(path))\n}\n\nexport async function downloadTarball(\n url: string,\n bucketName: string,\n path: string\n) {\n bucketName = sanitizeBucket(bucketName)\n path = sanitizeKey(path)\n const response = await fetch(url)\n if (!response.ok) {\n throw new Error(`unexpected response ${response.statusText}`)\n }\n\n const tmpPath = join(budibaseTempDir(), path)\n await streamPipeline(response.body, zlib.createUnzip(), tar.extract(tmpPath))\n if (!env.isTest() && env.SELF_HOSTED) {\n await uploadDirectory(bucketName, tmpPath, path)\n }\n // return the temporary path incase there is a use for it\n return tmpPath\n}\n\nexport async function getReadStream(\n bucketName: string,\n path: string\n): Promise<Readable> {\n bucketName = sanitizeBucket(bucketName)\n path = sanitizeKey(path)\n const client = ObjectStore(bucketName)\n const params = {\n Bucket: bucketName,\n Key: path,\n }\n return client.getObject(params).createReadStream()\n}\n", "import env from \"../environment\"\nimport * as cfsign from \"aws-cloudfront-sign\"\n\nlet PRIVATE_KEY: string | undefined\n\nfunction getPrivateKey() {\n if (!env.CLOUDFRONT_PRIVATE_KEY_64) {\n throw new Error(\"CLOUDFRONT_PRIVATE_KEY_64 is not set\")\n }\n\n if (PRIVATE_KEY) {\n return PRIVATE_KEY\n }\n\n PRIVATE_KEY = Buffer.from(env.CLOUDFRONT_PRIVATE_KEY_64, \"base64\").toString(\n \"utf-8\"\n )\n\n return PRIVATE_KEY\n}\n\nconst getCloudfrontSignParams = () => {\n return {\n keypairId: env.CLOUDFRONT_PUBLIC_KEY_ID!,\n privateKeyString: getPrivateKey(),\n expireTime: new Date().getTime() + 1000 * 60 * 60, // 1 hour\n }\n}\n\nexport const getPresignedUrl = (s3Key: string) => {\n const url = getUrl(s3Key)\n return cfsign.getSignedUrl(url, getCloudfrontSignParams())\n}\n\nexport const getUrl = (s3Key: string) => {\n let prefix = \"/\"\n if (s3Key.startsWith(\"/\")) {\n prefix = \"\"\n }\n return `${env.CLOUDFRONT_CDN}${prefix}${s3Key}`\n}\n", "import env from \"../../environment\"\nimport * as objectStore from \"../objectStore\"\nimport * as cloudfront from \"../cloudfront\"\nimport qs from \"querystring\"\nimport { DEFAULT_TENANT_ID, getTenantId } from \"../../context\"\n\nexport function clientLibraryPath(appId: string) {\n return `${objectStore.sanitizeKey(appId)}/budibase-client.js`\n}\n\n/**\n * Previously we used to serve the client library directly from Cloudfront, however\n * due to issues with the domain we were unable to continue doing this - keeping\n * incase we are able to switch back to CDN path again in future.\n */\nexport function clientLibraryCDNUrl(appId: string, version: string) {\n let file = clientLibraryPath(appId)\n if (env.CLOUDFRONT_CDN) {\n // append app version to bust the cache\n if (version) {\n file += `?v=${version}`\n }\n // don't need to use presigned for client with cloudfront\n // file is public\n return cloudfront.getUrl(file)\n } else {\n return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)\n }\n}\n\nexport function clientLibraryUrl(appId: string, version: string) {\n let tenantId, qsParams: { appId: string; version: string; tenantId?: string }\n try {\n tenantId = getTenantId()\n } finally {\n qsParams = {\n appId,\n version,\n }\n }\n if (tenantId && tenantId !== DEFAULT_TENANT_ID) {\n qsParams.tenantId = tenantId\n }\n return `/api/assets/client?${qs.encode(qsParams)}`\n}\n\nexport function getAppFileUrl(s3Key: string) {\n if (env.CLOUDFRONT_CDN) {\n return cloudfront.getPresignedUrl(s3Key)\n } else {\n return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)\n }\n}\n", "import env from \"../../environment\"\nimport * as context from \"../../context\"\nimport * as objectStore from \"../objectStore\"\nimport * as cloudfront from \"../cloudfront\"\n\n// URLs\n\nexport const getGlobalFileUrl = (type: string, name: string, etag?: string) => {\n let file = getGlobalFileS3Key(type, name)\n if (env.CLOUDFRONT_CDN) {\n if (etag) {\n file = `${file}?etag=${etag}`\n }\n return cloudfront.getPresignedUrl(file)\n } else {\n return objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)\n }\n}\n\n// KEYS\n\nexport const getGlobalFileS3Key = (type: string, name: string) => {\n let file = `${type}/${name}`\n if (env.MULTI_TENANCY) {\n const tenantId = context.getTenantId()\n file = `${tenantId}/${file}`\n }\n return file\n}\n", "import env from \"../../environment\"\nimport * as objectStore from \"../objectStore\"\nimport * as context from \"../../context\"\nimport * as cloudfront from \"../cloudfront\"\nimport { Plugin } from \"@budibase/types\"\n\n// URLS\n\nexport function enrichPluginURLs(plugins: Plugin[]) {\n if (!plugins || !plugins.length) {\n return []\n }\n return plugins.map(plugin => {\n const jsUrl = getPluginJSUrl(plugin)\n const iconUrl = getPluginIconUrl(plugin)\n return { ...plugin, jsUrl, iconUrl }\n })\n}\n\nfunction getPluginJSUrl(plugin: Plugin) {\n const s3Key = getPluginJSKey(plugin)\n return getPluginUrl(s3Key)\n}\n\nfunction getPluginIconUrl(plugin: Plugin): string | undefined {\n const s3Key = getPluginIconKey(plugin)\n if (!s3Key) {\n return\n }\n return getPluginUrl(s3Key)\n}\n\nfunction getPluginUrl(s3Key: string) {\n if (env.CLOUDFRONT_CDN) {\n return cloudfront.getPresignedUrl(s3Key)\n } else {\n return objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)\n }\n}\n\n// S3 KEYS\n\nexport function getPluginJSKey(plugin: Plugin) {\n return getPluginS3Key(plugin, \"plugin.min.js\")\n}\n\nexport function getPluginIconKey(plugin: Plugin) {\n // stored iconUrl is deprecated - hardcode to icon.svg in this case\n const iconFileName = plugin.iconUrl ? \"icon.svg\" : plugin.iconFileName\n if (!iconFileName) {\n return\n }\n return getPluginS3Key(plugin, iconFileName)\n}\n\nfunction getPluginS3Key(plugin: Plugin, fileName: string) {\n const s3Key = getPluginS3Dir(plugin.name)\n return `${s3Key}/${fileName}`\n}\n\nexport function getPluginS3Dir(pluginName: string) {\n let s3Key = `${pluginName}`\n if (env.MULTI_TENANCY) {\n const tenantId = context.getTenantId()\n s3Key = `${tenantId}/${s3Key}`\n }\n if (env.CLOUDFRONT_CDN) {\n s3Key = `plugins/${s3Key}`\n }\n return s3Key\n}\n", "export * from \"./app\"\nexport * from \"./global\"\nexport * from \"./plugins\"\n", "export * from \"./objectStore\"\nexport * from \"./utils\"\nexport * from \"./buckets\"\n", "import fs from \"fs\"\nimport path from \"path\"\nimport * as rfs from \"rotating-file-stream\"\n\nimport env from \"../environment\"\nimport { budibaseTempDir } from \"../objectStore\"\n\nconst logsFileName = `budibase.log`\nconst budibaseLogsHistoryFileName = \"budibase-logs-history.txt\"\n\nconst logsPath = path.join(budibaseTempDir(), \"systemlogs\")\n\nfunction getFullPath(fileName: string) {\n return path.join(logsPath, fileName)\n}\n\nexport function getSingleFileMaxSizeInfo(totalMaxSize: string) {\n const regex = /(\\d+)([A-Za-z])/\n const match = totalMaxSize?.match(regex)\n if (!match) {\n console.warn(`totalMaxSize does not have a valid value`, {\n totalMaxSize,\n })\n return undefined\n }\n\n const size = +match[1]\n const unit = match[2]\n if (size === 1) {\n switch (unit) {\n case \"B\":\n return { size: `${size}B`, totalHistoryFiles: 1 }\n case \"K\":\n return { size: `${(size * 1000) / 2}B`, totalHistoryFiles: 1 }\n case \"M\":\n return { size: `${(size * 1000) / 2}K`, totalHistoryFiles: 1 }\n case \"G\":\n return { size: `${(size * 1000) / 2}M`, totalHistoryFiles: 1 }\n default:\n return undefined\n }\n }\n\n if (size % 2 === 0) {\n return { size: `${size / 2}${unit}`, totalHistoryFiles: 1 }\n }\n\n return { size: `1${unit}`, totalHistoryFiles: size - 1 }\n}\n\nexport function localFileDestination() {\n const fileInfo = getSingleFileMaxSizeInfo(env.ROLLING_LOG_MAX_SIZE)\n const outFile = rfs.createStream(logsFileName, {\n // As we have a rolling size, we want to half the max size\n size: fileInfo?.size,\n path: logsPath,\n maxFiles: fileInfo?.totalHistoryFiles || 1,\n immutable: true,\n history: budibaseLogsHistoryFileName,\n initialRotation: false,\n })\n\n return outFile\n}\n\nexport function getLogReadStream() {\n const streams = []\n const historyFile = getFullPath(budibaseLogsHistoryFileName)\n if (fs.existsSync(historyFile)) {\n const fileContent = fs.readFileSync(historyFile, \"utf-8\")\n const historyFiles = fileContent.split(\"\\n\")\n for (const historyFile of historyFiles.filter(x => x)) {\n streams.push(fs.readFileSync(historyFile))\n }\n }\n\n streams.push(fs.readFileSync(getFullPath(logsFileName)))\n\n const combinedContent = Buffer.concat(streams)\n return combinedContent\n}\n", "import pino, { LoggerOptions } from \"pino\"\nimport pinoPretty from \"pino-pretty\"\n\nimport { IdentityType } from \"@budibase/types\"\nimport env from \"../../environment\"\nimport * as context from \"../../context\"\nimport * as correlation from \"../correlation\"\nimport tracer from \"dd-trace\"\nimport { formats } from \"dd-trace/ext\"\n\nimport { localFileDestination } from \"../system\"\n\n// LOGGER\n\nlet pinoInstance: pino.Logger | undefined\nif (!env.DISABLE_PINO_LOGGER) {\n const level = env.LOG_LEVEL\n const pinoOptions: LoggerOptions = {\n level,\n formatters: {\n level: level => {\n return { level: level.toUpperCase() }\n },\n bindings: () => {\n if (env.SELF_HOSTED) {\n // \"service\" is being injected in datadog using the pod names,\n // so we should leave it blank to allow the default behaviour if it's not running self-hosted\n return {\n service: env.SERVICE_NAME,\n }\n } else {\n return {}\n }\n },\n },\n timestamp: () => `,\"timestamp\":\"${new Date(Date.now()).toISOString()}\"`,\n }\n\n const destinations: pino.StreamEntry[] = []\n\n destinations.push(\n env.isDev()\n ? {\n stream: pinoPretty({ singleLine: true }),\n level: level as pino.Level,\n }\n : { stream: process.stdout, level: level as pino.Level }\n )\n\n if (env.SELF_HOSTED) {\n destinations.push({\n stream: localFileDestination(),\n level: level as pino.Level,\n })\n }\n\n pinoInstance = destinations.length\n ? pino(pinoOptions, pino.multistream(destinations))\n : pino(pinoOptions)\n\n // CONSOLE OVERRIDES\n\n interface MergingObject {\n objects?: any[]\n tenantId?: string\n appId?: string\n automationId?: string\n identityId?: string\n identityType?: IdentityType\n correlationId?: string\n err?: Error\n }\n\n function isPlainObject(obj: any) {\n return typeof obj === \"object\" && obj !== null && !(obj instanceof Error)\n }\n\n function isError(obj: any) {\n return obj instanceof Error\n }\n\n function isMessage(obj: any) {\n return typeof obj === \"string\"\n }\n\n /**\n * Backwards compatibility between console logging statements\n * and pino logging requirements.\n */\n function getLogParams(args: any[]): [MergingObject, string] {\n let error = undefined\n let objects: any[] = []\n let message = \"\"\n\n args.forEach(arg => {\n if (isMessage(arg)) {\n message = `${message} ${arg}`.trimStart()\n }\n if (isPlainObject(arg)) {\n objects.push(arg)\n }\n if (isError(arg)) {\n error = arg\n }\n })\n\n const identity = getIdentity()\n\n let contextObject = {}\n\n contextObject = {\n tenantId: getTenantId(),\n appId: getAppId(),\n automationId: getAutomationId(),\n identityId: identity?._id,\n identityType: identity?.type,\n correlationId: correlation.getId(),\n }\n\n const span = tracer.scope().active()\n if (span) {\n tracer.inject(span.context(), formats.LOG, contextObject)\n }\n\n const mergingObject: any = {\n err: error,\n pid: process.pid,\n ...contextObject,\n }\n\n if (objects.length) {\n // init generic data object for params supplied that don't have a\n // '_logKey' field. This prints an object using argument index as the key\n // e.g. { 0: {}, 1: {} }\n const data: any = {}\n let dataIndex = 0\n\n for (let i = 0; i < objects.length; i++) {\n const object = objects[i]\n // the object has specified a log key\n // use this instead of generic key\n const logKey = object._logKey\n if (logKey) {\n delete object._logKey\n mergingObject[logKey] = object\n } else {\n data[dataIndex] = object\n dataIndex++\n }\n }\n\n if (Object.keys(data).length) {\n mergingObject.data = data\n }\n }\n\n return [mergingObject, message]\n }\n\n console.log = (...arg: any[]) => {\n const [obj, msg] = getLogParams(arg)\n pinoInstance?.info(obj, msg)\n }\n console.info = (...arg: any[]) => {\n const [obj, msg] = getLogParams(arg)\n pinoInstance?.info(obj, msg)\n }\n console.warn = (...arg: any[]) => {\n const [obj, msg] = getLogParams(arg)\n pinoInstance?.warn(obj, msg)\n }\n console.error = (...arg: any[]) => {\n const [obj, msg] = getLogParams(arg)\n pinoInstance?.error(obj, msg)\n }\n\n /**\n * custom trace impl - this resembles the node trace behaviour rather\n * than traditional trace logging\n * @param arg\n */\n console.trace = (...arg: any[]) => {\n const [obj, msg] = getLogParams(arg)\n if (!obj.err) {\n // to get stack trace\n obj.err = new Error()\n }\n pinoInstance?.trace(obj, msg)\n }\n\n console.debug = (...arg: any) => {\n const [obj, msg] = getLogParams(arg)\n pinoInstance?.debug(obj, msg)\n }\n\n // CONTEXT\n\n const getTenantId = () => {\n let tenantId\n try {\n tenantId = context.getTenantId()\n } catch (e: any) {\n // do nothing\n }\n return tenantId\n }\n\n const getAppId = () => {\n let appId\n try {\n appId = context.getAppId()\n } catch (e) {\n // do nothing\n }\n return appId\n }\n\n const getAutomationId = () => {\n let appId\n try {\n appId = context.getAutomationId()\n } catch (e) {\n // do nothing\n }\n return appId\n }\n\n const getIdentity = () => {\n let identity\n try {\n identity = context.getIdentity()\n } catch (e) {\n // do nothing\n }\n return identity\n }\n}\n\nexport const logger = pinoInstance\n", "const NonErrors = [\"AccountError\"]\n\nfunction isSuppressed(e?: any) {\n return e && e[\"suppressAlert\"]\n}\n\nexport function logAlert(message: string, e?: any) {\n if (e && NonErrors.includes(e.name) && isSuppressed(e)) {\n return\n }\n console.error(`bb-alert: ${message}`, e)\n}\n\nexport function logAlertWithInfo(\n message: string,\n db: string,\n id: string,\n error: any\n) {\n message = `${message} - db: ${db} - doc: ${id} - error: `\n logAlert(message, error)\n}\n\nexport function logWarn(message: string, e?: any) {\n console.warn(`bb-warn: ${message}`, e)\n}\n", "export * as correlation from \"./correlation/correlation\"\nexport { logger } from \"./pino/logger\"\nexport * from \"./alerts\"\nexport * as system from \"./system\"\n", "let intervals: NodeJS.Timeout[] = []\n\nexport function set(callback: () => any, period: number) {\n const interval = setInterval(callback, period)\n intervals.push(interval)\n return interval\n}\n\nexport function clear(interval: NodeJS.Timeout) {\n const idx = intervals.indexOf(interval)\n if (idx !== -1) {\n intervals.splice(idx, 1)\n }\n clearInterval(interval)\n}\n\nexport function cleanup() {\n for (let interval of intervals) {\n clearInterval(interval)\n }\n intervals = []\n}\n\nexport class ExecutionTimeoutError extends Error {\n public readonly name = \"ExecutionTimeoutError\"\n}\n\nexport class ExecutionTimeTracker {\n static withLimit(limitMs: number) {\n return new ExecutionTimeTracker(limitMs)\n }\n\n constructor(readonly limitMs: number) {}\n\n private totalTimeMs = 0\n\n track<T>(f: () => T): T {\n this.checkLimit()\n const start = process.hrtime.bigint()\n try {\n return f()\n } finally {\n const end = process.hrtime.bigint()\n this.totalTimeMs += Number(end - start) / 1e6\n this.checkLimit()\n }\n }\n\n get elapsedMS() {\n return this.totalTimeMs\n }\n\n checkLimit() {\n if (this.totalTimeMs > this.limitMs) {\n throw new ExecutionTimeoutError(\n `Execution time limit of ${this.limitMs}ms exceeded: ${this.totalTimeMs}ms`\n )\n }\n }\n}\n", "export * from \"./timers\"\n", "import env from \"../environment\"\nimport Redis from \"ioredis\"\n// mock-redis doesn't have any typing\nlet MockRedis: any | undefined\nif (env.MOCK_REDIS) {\n try {\n // ioredis mock is all in memory\n MockRedis = require(\"ioredis-mock\")\n } catch (err) {\n console.log(\"Mock redis unavailable\")\n }\n}\nimport {\n addDbPrefix,\n removeDbPrefix,\n getRedisOptions,\n SEPARATOR,\n SelectableDatabase,\n getRedisConnectionDetails,\n} from \"./utils\"\nimport { logAlert } from \"../logging\"\nimport * as timers from \"../timers\"\n\nconst RETRY_PERIOD_MS = 2000\nconst STARTUP_TIMEOUT_MS = 5000\nconst CLUSTERED = env.REDIS_CLUSTERED\nconst DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT\n\n// for testing just generate the client once\nlet CLOSED = false\nlet CLIENTS: { [key: number]: any } = {}\nlet CONNECTED = false\n\n// mock redis always connected\nif (env.MOCK_REDIS) {\n CONNECTED = true\n}\n\nfunction pickClient(selectDb: number): any {\n return CLIENTS[selectDb]\n}\n\nfunction connectionError(timeout: NodeJS.Timeout, err: Error | string) {\n // manually shut down, ignore errors\n if (CLOSED) {\n return\n }\n CLOSED = true\n // always clear this on error\n clearTimeout(timeout)\n CONNECTED = false\n logAlert(\"Redis connection failed\", err)\n setTimeout(() => {\n init()\n }, RETRY_PERIOD_MS)\n}\n\n/**\n * Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise\n * will return the ioredis client which will be ready to use.\n */\nfunction init(selectDb = DEFAULT_SELECT_DB) {\n const RedisCore = env.MOCK_REDIS && MockRedis ? MockRedis : Redis\n let timeout: NodeJS.Timeout\n CLOSED = false\n let client = pickClient(selectDb)\n // already connected, ignore\n if (client && CONNECTED) {\n return\n }\n // testing uses a single in memory client\n if (env.MOCK_REDIS) {\n CLIENTS[selectDb] = new RedisCore(getRedisOptions())\n }\n // start the timer - only allowed 5 seconds to connect\n timeout = setTimeout(() => {\n if (!CONNECTED) {\n connectionError(timeout, \"Did not successfully connect in timeout\")\n }\n }, STARTUP_TIMEOUT_MS)\n\n // disconnect any lingering client\n if (client) {\n client.disconnect()\n }\n const { host, port } = getRedisConnectionDetails()\n const opts = getRedisOptions()\n\n if (CLUSTERED) {\n client = new RedisCore.Cluster([{ host, port }], opts)\n } else {\n client = new RedisCore(opts)\n }\n // attach handlers\n client.on(\"end\", (err: Error) => {\n if (env.isTest()) {\n // don't try to re-connect in test env\n // allow the process to exit\n return\n }\n connectionError(timeout, err)\n })\n client.on(\"error\", (err: Error) => {\n connectionError(timeout, err)\n })\n client.on(\"connect\", () => {\n console.log(`Connected to Redis DB: ${selectDb}`)\n clearTimeout(timeout)\n CONNECTED = true\n })\n CLIENTS[selectDb] = client\n}\n\nfunction waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {\n return new Promise(resolve => {\n if (pickClient(selectDb) == null) {\n init()\n } else if (CONNECTED) {\n resolve(\"\")\n return\n }\n // check if the connection is ready\n const interval = timers.set(() => {\n if (CONNECTED) {\n timers.clear(interval)\n resolve(\"\")\n }\n }, 500)\n })\n}\n\n/**\n * Utility function, takes a redis stream and converts it to a promisified response -\n * this can only be done with redis streams because they will have an end.\n * @param stream A redis stream, specifically as this type of stream will have an end.\n * @param client The client to use for further lookups.\n * @return The final output of the stream\n */\nfunction promisifyStream(stream: any, client: RedisWrapper) {\n return new Promise((resolve, reject) => {\n const outputKeys = new Set()\n stream.on(\"data\", (keys: string[]) => {\n keys.forEach(key => {\n outputKeys.add(key)\n })\n })\n stream.on(\"error\", (err: Error) => {\n reject(err)\n })\n stream.on(\"end\", async () => {\n const keysArray: string[] = Array.from(outputKeys) as string[]\n try {\n let getPromises = []\n for (let key of keysArray) {\n getPromises.push(client.get(key))\n }\n const jsonArray = await Promise.all(getPromises)\n resolve(\n keysArray.map(key => ({\n key: removeDbPrefix(key),\n value: JSON.parse(jsonArray.shift()),\n }))\n )\n } catch (err) {\n reject(err)\n }\n })\n })\n}\n\nclass RedisWrapper {\n _db: string\n _select: number\n\n constructor(db: string, selectDb: number | null = null) {\n this._db = db\n this._select = selectDb || DEFAULT_SELECT_DB\n }\n\n getClient() {\n return pickClient(this._select)\n }\n\n async init() {\n CLOSED = false\n init(this._select)\n await waitForConnection(this._select)\n if (this._select && !env.isTest()) {\n this.getClient().select(this._select)\n }\n return this\n }\n\n async finish() {\n CLOSED = true\n this.getClient().disconnect()\n }\n\n async scan(key = \"\"): Promise<any> {\n const db = this._db\n key = `${db}${SEPARATOR}${key}`\n let stream\n if (CLUSTERED) {\n let node = this.getClient().nodes(\"master\")\n stream = node[0].scanStream({ match: key + \"*\", count: 100 })\n } else {\n stream = this.getClient().scanStream({ match: key + \"*\", count: 100 })\n }\n return promisifyStream(stream, this.getClient())\n }\n\n async keys(pattern: string) {\n const db = this._db\n return this.getClient().keys(addDbPrefix(db, pattern))\n }\n\n async exists(key: string) {\n const db = this._db\n return await this.getClient().exists(addDbPrefix(db, key))\n }\n\n async get(key: string) {\n const db = this._db\n let response = await this.getClient().get(addDbPrefix(db, key))\n // overwrite the prefixed key\n if (response != null && response.key) {\n response.key = key\n }\n // if its not an object just return the response\n try {\n return JSON.parse(response)\n } catch (err) {\n return response\n }\n }\n\n async bulkGet<T>(keys: string[]) {\n const db = this._db\n if (keys.length === 0) {\n return {}\n }\n const prefixedKeys = keys.map(key => addDbPrefix(db, key))\n let response = await this.getClient().mget(prefixedKeys)\n if (Array.isArray(response)) {\n let final: Record<string, T> = {}\n let count = 0\n for (let result of response) {\n if (result) {\n let parsed\n try {\n parsed = JSON.parse(result)\n } catch (err) {\n parsed = result\n }\n final[keys[count]] = parsed\n }\n count++\n }\n return final\n } else {\n throw new Error(`Invalid response: ${response}`)\n }\n }\n\n async store(key: string, value: any, expirySeconds: number | null = null) {\n const db = this._db\n if (typeof value === \"object\") {\n value = JSON.stringify(value)\n }\n const prefixedKey = addDbPrefix(db, key)\n await this.getClient().set(prefixedKey, value)\n if (expirySeconds) {\n await this.getClient().expire(prefixedKey, expirySeconds)\n }\n }\n\n async getTTL(key: string) {\n const db = this._db\n const prefixedKey = addDbPrefix(db, key)\n return this.getClient().ttl(prefixedKey)\n }\n\n async setExpiry(key: string, expirySeconds: number | null) {\n const db = this._db\n const prefixedKey = addDbPrefix(db, key)\n await this.getClient().expire(prefixedKey, expirySeconds)\n }\n\n async delete(key: string) {\n const db = this._db\n await this.getClient().del(addDbPrefix(db, key))\n }\n\n async clear() {\n let items = await this.scan()\n await Promise.all(items.map((obj: any) => this.delete(obj.key)))\n }\n}\n\nexport default RedisWrapper\n", "import Client from \"./redis\"\nimport * as utils from \"./utils\"\n\nlet userClient: Client,\n sessionClient: Client,\n appClient: Client,\n cacheClient: Client,\n writethroughClient: Client,\n lockClient: Client,\n socketClient: Client,\n inviteClient: Client,\n passwordResetClient: Client\n\nexport async function init() {\n userClient = await new Client(utils.Databases.USER_CACHE).init()\n sessionClient = await new Client(utils.Databases.SESSIONS).init()\n appClient = await new Client(utils.Databases.APP_METADATA).init()\n cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()\n lockClient = await new Client(utils.Databases.LOCKS).init()\n writethroughClient = await new Client(utils.Databases.WRITE_THROUGH).init()\n inviteClient = await new Client(utils.Databases.INVITATIONS).init()\n passwordResetClient = await new Client(utils.Databases.PW_RESETS).init()\n socketClient = await new Client(\n utils.Databases.SOCKET_IO,\n utils.SelectableDatabase.SOCKET_IO\n ).init()\n}\n\nexport async function shutdown() {\n if (userClient) await userClient.finish()\n if (sessionClient) await sessionClient.finish()\n if (appClient) await appClient.finish()\n if (cacheClient) await cacheClient.finish()\n if (writethroughClient) await writethroughClient.finish()\n if (lockClient) await lockClient.finish()\n if (inviteClient) await inviteClient.finish()\n if (passwordResetClient) await passwordResetClient.finish()\n if (socketClient) await socketClient.finish()\n}\n\nprocess.on(\"exit\", async () => {\n await shutdown()\n})\n\nexport async function getUserClient() {\n if (!userClient) {\n await init()\n }\n return userClient\n}\n\nexport async function getSessionClient() {\n if (!sessionClient) {\n await init()\n }\n return sessionClient\n}\n\nexport async function getAppClient() {\n if (!appClient) {\n await init()\n }\n return appClient\n}\n\nexport async function getCacheClient() {\n if (!cacheClient) {\n await init()\n }\n return cacheClient\n}\n\nexport async function getWritethroughClient() {\n if (!writethroughClient) {\n await init()\n }\n return writethroughClient\n}\n\nexport async function getLockClient() {\n if (!lockClient) {\n await init()\n }\n return lockClient\n}\n\nexport async function getSocketClient() {\n if (!socketClient) {\n await init()\n }\n return socketClient\n}\n\nexport async function getInviteClient() {\n if (!inviteClient) {\n await init()\n }\n return inviteClient\n}\n\nexport async function getPasswordResetClient() {\n if (!passwordResetClient) {\n await init()\n }\n return passwordResetClient\n}\n", "export * as configs from \"./configs\"\nexport * as events from \"./events\"\nexport * as migrations from \"./migrations\"\nexport * as users from \"./users\"\nexport * as roles from \"./security/roles\"\nexport * as permissions from \"./security/permissions\"\nexport * as accounts from \"./accounts\"\nexport * as installation from \"./installation\"\nexport * as featureFlags from \"./features\"\nexport * as features from \"./features/installation\"\nexport * as sessions from \"./security/sessions\"\nexport * as platform from \"./platform\"\nexport * as auth from \"./auth\"\nexport * as constants from \"./constants\"\nexport * as logging from \"./logging\"\nexport * as middleware from \"./middleware\"\nexport * as plugins from \"./plugin\"\nexport * as encryption from \"./security/encryption\"\nexport * as queue from \"./queue\"\nexport * as db from \"./db\"\nexport * as context from \"./context\"\nexport * as cache from \"./cache\"\nexport * as objectStore from \"./objectStore\"\nexport * as redis from \"./redis\"\nexport { Client as RedisClient } from \"./redis\"\nexport * as locks from \"./redis/redlockImpl\"\nexport * as utils from \"./utils\"\nexport * as errors from \"./errors\"\nexport * as timers from \"./timers\"\nexport { default as env } from \"./environment\"\nexport * as blacklist from \"./blacklist\"\nexport * as docUpdates from \"./docUpdates\"\nexport * from \"./utils/Duration\"\nexport { SearchParams } from \"./db\"\nexport * as docIds from \"./docIds\"\nexport * as security from \"./security\"\n// Add context to tenancy for backwards compatibility\n// only do this for external usages to prevent internal\n// circular dependencies\nimport * as context from \"./context\"\nimport * as _tenancy from \"./tenancy\"\n\nexport const tenancy = {\n ..._tenancy,\n ...context,\n}\n\n// expose error classes directly\nexport * from \"./errors\"\n\n// expose constants directly\nexport * from \"./constants\"\n\n// expose package init function\nimport * as db from \"./db\"\n\nexport const init = (opts: any = {}) => {\n db.init(opts.db)\n}\n", "export * from \"./configs\"\n", "import {\n Config,\n ConfigType,\n GoogleConfig,\n GoogleInnerConfig,\n OIDCConfig,\n OIDCInnerConfig,\n OIDCLogosConfig,\n SCIMConfig,\n SCIMInnerConfig,\n SettingsConfig,\n SettingsInnerConfig,\n SMTPConfig,\n SMTPInnerConfig,\n} from \"@budibase/types\"\nimport { DocumentType, SEPARATOR } from \"../constants\"\nimport { CacheKey, TTL, withCache } from \"../cache\"\nimport * as context from \"../context\"\nimport env from \"../environment\"\n\n// UTILS\n\n/**\n * Generates a new configuration ID.\n * @returns The new configuration ID which the config doc can be stored under.\n */\nexport function generateConfigID(type: ConfigType) {\n return `${DocumentType.CONFIG}${SEPARATOR}${type}`\n}\n\nexport async function getConfig<T extends Config>(\n type: ConfigType\n): Promise<T | undefined> {\n const db = context.getGlobalDB()\n try {\n // await to catch error\n return (await db.get(generateConfigID(type))) as T\n } catch (e: any) {\n if (e.status === 404) {\n return\n }\n throw e\n }\n}\n\nexport async function save(\n config: Config\n): Promise<{ id: string; rev: string }> {\n const db = context.getGlobalDB()\n return db.put(config)\n}\n\n// SETTINGS\n\nexport async function getSettingsConfigDoc(): Promise<SettingsConfig> {\n let config = await getConfig<SettingsConfig>(ConfigType.SETTINGS)\n\n if (!config) {\n config = {\n _id: generateConfigID(ConfigType.SETTINGS),\n type: ConfigType.SETTINGS,\n config: {},\n }\n }\n\n // overridden fields\n config.config.platformUrl = await getPlatformUrl({\n tenantAware: true,\n config: config.config,\n })\n config.config.analyticsEnabled = await analyticsEnabled({\n config: config.config,\n })\n\n return config\n}\n\nexport async function getSettingsConfig(): Promise<SettingsInnerConfig> {\n return (await getSettingsConfigDoc()).config\n}\n\nexport async function getPlatformUrl(\n opts: { tenantAware: boolean; config?: SettingsInnerConfig } = {\n tenantAware: true,\n }\n) {\n let platformUrl = env.PLATFORM_URL || \"http://localhost:10000\"\n\n if (!env.SELF_HOSTED && env.MULTI_TENANCY && opts.tenantAware) {\n // cloud and multi tenant - add the tenant to the default platform url\n const tenantId = context.getTenantId()\n if (!platformUrl.includes(\"localhost:\")) {\n platformUrl = platformUrl.replace(\"://\", `://${tenantId}.`)\n }\n } else if (env.SELF_HOSTED) {\n const config = opts?.config\n ? opts.config\n : // direct to db to prevent infinite loop\n (await getConfig<SettingsConfig>(ConfigType.SETTINGS))?.config\n if (config?.platformUrl) {\n platformUrl = config.platformUrl\n }\n }\n\n return platformUrl\n}\n\nexport const analyticsEnabled = async (opts?: {\n config?: SettingsInnerConfig\n}) => {\n // cloud - always use the environment variable\n if (!env.SELF_HOSTED) {\n return !!env.ENABLE_ANALYTICS\n }\n\n // self host - prefer the settings doc\n // use cache as events have high throughput\n const enabledInDB = await withCache(\n CacheKey.ANALYTICS_ENABLED,\n TTL.ONE_DAY,\n async () => {\n const config = opts?.config\n ? opts.config\n : // direct to db to prevent infinite loop\n (await getConfig<SettingsConfig>(ConfigType.SETTINGS))?.config\n\n // need to do explicit checks in case the field is not set\n if (config?.analyticsEnabled === false) {\n return false\n } else if (config?.analyticsEnabled === true) {\n return true\n }\n }\n )\n\n if (enabledInDB !== undefined) {\n return enabledInDB\n }\n\n // fallback to the environment variable\n // explicitly check for 0 or false here, undefined or otherwise is treated as true\n const envEnabled: any = env.ENABLE_ANALYTICS\n if (envEnabled === 0 || envEnabled === false) {\n return false\n } else {\n return true\n }\n}\n\n// GOOGLE\n\nasync function getGoogleConfigDoc(): Promise<GoogleConfig | undefined> {\n return await getConfig<GoogleConfig>(ConfigType.GOOGLE)\n}\n\nexport async function getGoogleConfig(): Promise<\n GoogleInnerConfig | undefined\n> {\n const config = await getGoogleConfigDoc()\n return config?.config\n}\n\nexport async function getGoogleDatasourceConfig(): Promise<\n GoogleInnerConfig | undefined\n> {\n if (!env.SELF_HOSTED) {\n // always use the env vars in cloud\n return getDefaultGoogleConfig()\n }\n\n // prefer the config in self-host\n let config = await getGoogleConfig()\n\n // fallback to env vars\n if (!config || !config.activated) {\n config = getDefaultGoogleConfig()\n }\n\n return config\n}\n\nexport function getDefaultGoogleConfig(): GoogleInnerConfig | undefined {\n if (env.GOOGLE_CLIENT_ID && env.GOOGLE_CLIENT_SECRET) {\n return {\n clientID: env.GOOGLE_CLIENT_ID!,\n clientSecret: env.GOOGLE_CLIENT_SECRET!,\n activated: true,\n }\n }\n}\n\n// OIDC\n\nexport async function getOIDCLogosDoc(): Promise<OIDCLogosConfig | undefined> {\n return getConfig<OIDCLogosConfig>(ConfigType.OIDC_LOGOS)\n}\n\nasync function getOIDCConfigDoc(): Promise<OIDCConfig | undefined> {\n return getConfig<OIDCConfig>(ConfigType.OIDC)\n}\n\nexport async function getOIDCConfig(): Promise<OIDCInnerConfig | undefined> {\n const config = (await getOIDCConfigDoc())?.config\n // default to the 0th config\n return config?.configs && config.configs[0]\n}\n\n/**\n * @param configId The config id of the inner config to retrieve\n */\nexport async function getOIDCConfigById(\n configId: string\n): Promise<OIDCInnerConfig | undefined> {\n const config = (await getConfig<OIDCConfig>(ConfigType.OIDC))?.config\n return config && config.configs.filter((c: any) => c.uuid === configId)[0]\n}\n\n// SMTP\n\nexport async function getSMTPConfigDoc(): Promise<SMTPConfig | undefined> {\n return getConfig<SMTPConfig>(ConfigType.SMTP)\n}\n\nexport async function getSMTPConfig(\n isAutomation?: boolean\n): Promise<SMTPInnerConfig | undefined> {\n const config = await getSMTPConfigDoc()\n if (config) {\n return config.config\n }\n\n // always allow fallback in self host\n // in cloud don't allow for automations\n const allowFallback = env.SELF_HOSTED || !isAutomation\n\n // Use an SMTP fallback configuration from env variables\n if (env.SMTP_FALLBACK_ENABLED && allowFallback) {\n return {\n port: env.SMTP_PORT,\n host: env.SMTP_HOST!,\n secure: false,\n from: env.SMTP_FROM_ADDRESS!,\n auth: {\n user: env.SMTP_USER!,\n pass: env.SMTP_PASSWORD!,\n },\n }\n }\n}\n\n// SCIM\n\nexport async function getSCIMConfig(): Promise<SCIMInnerConfig | undefined> {\n const config = await getConfig<SCIMConfig>(ConfigType.SCIM)\n return config?.config\n}\n", "export * as generic from \"./generic\"\nexport * as user from \"./user\"\nexport * as app from \"./appMetadata\"\nexport * as writethrough from \"./writethrough\"\nexport * as invite from \"./invite\"\nexport * as passwordReset from \"./passwordReset\"\nexport * from \"./generic\"\n", "import BaseCache from \"./base\"\n\nconst GENERIC = new BaseCache()\n\nexport enum CacheKey {\n CHECKLIST = \"checklist\",\n INSTALLATION = \"installation\",\n ANALYTICS_ENABLED = \"analyticsEnabled\",\n UNIQUE_TENANT_ID = \"uniqueTenantId\",\n EVENTS = \"events\",\n BACKFILL_METADATA = \"backfillMetadata\",\n EVENTS_RATE_LIMIT = \"eventsRateLimit\",\n}\n\nexport enum TTL {\n ONE_MINUTE = 600,\n ONE_HOUR = 3600,\n ONE_DAY = 86400,\n}\n\nexport const keys = (...args: Parameters<typeof GENERIC.keys>) =>\n GENERIC.keys(...args)\nexport const get = (...args: Parameters<typeof GENERIC.get>) =>\n GENERIC.get(...args)\nexport const store = (...args: Parameters<typeof GENERIC.store>) =>\n GENERIC.store(...args)\nexport const destroy = (...args: Parameters<typeof GENERIC.delete>) =>\n GENERIC.delete(...args)\nexport const withCache = (...args: Parameters<typeof GENERIC.withCache>) =>\n GENERIC.withCache(...args)\nexport const bustCache = (...args: Parameters<typeof GENERIC.bustCache>) =>\n GENERIC.bustCache(...args)\n", "import { getTenantId } from \"../../context\"\nimport * as redis from \"../../redis/init\"\nimport { Client } from \"../../redis\"\n\nfunction generateTenantKey(key: string) {\n const tenantId = getTenantId()\n return `${key}:${tenantId}`\n}\n\nexport default class BaseCache {\n client: Client | undefined\n\n constructor(client: Client | undefined = undefined) {\n this.client = client\n }\n\n async getClient() {\n return !this.client ? await redis.getCacheClient() : this.client\n }\n\n async keys(pattern: string) {\n const client = await this.getClient()\n return client.keys(pattern)\n }\n\n /**\n * Read only from the cache.\n */\n async get(key: string, opts = { useTenancy: true }) {\n key = opts.useTenancy ? generateTenantKey(key) : key\n const client = await this.getClient()\n return client.get(key)\n }\n\n /**\n * Write to the cache.\n */\n async store(\n key: string,\n value: any,\n ttl: number | null = null,\n opts = { useTenancy: true }\n ) {\n key = opts.useTenancy ? generateTenantKey(key) : key\n const client = await this.getClient()\n await client.store(key, value, ttl)\n }\n\n /**\n * Remove from cache.\n */\n async delete(key: string, opts = { useTenancy: true }) {\n key = opts.useTenancy ? generateTenantKey(key) : key\n const client = await this.getClient()\n return client.delete(key)\n }\n\n /**\n * Read from the cache. Write to the cache if not exists.\n */\n async withCache(\n key: string,\n ttl: number,\n fetchFn: any,\n opts = { useTenancy: true }\n ) {\n const cachedValue = await this.get(key, opts)\n if (cachedValue) {\n return cachedValue\n }\n\n try {\n const fetchedValue = await fetchFn()\n\n await this.store(key, fetchedValue, ttl, opts)\n return fetchedValue\n } catch (err) {\n console.error(\"Error fetching before cache - \", err)\n throw err\n }\n }\n\n async bustCache(key: string, opts = { client: null }) {\n const client = await this.getClient()\n try {\n await client.delete(generateTenantKey(key))\n } catch (err) {\n console.error(\"Error busting cache - \", err)\n throw err\n }\n }\n}\n", "import * as redis from \"../redis/init\"\nimport * as tenancy from \"../tenancy\"\nimport * as context from \"../context\"\nimport * as platform from \"../platform\"\nimport env from \"../environment\"\nimport * as accounts from \"../accounts\"\nimport { UserDB } from \"../users\"\nimport { sdk } from \"@budibase/shared-core\"\nimport { User } from \"@budibase/types\"\n\nconst EXPIRY_SECONDS = 3600\n\n/**\n * The default populate user function\n */\nasync function populateFromDB(userId: string, tenantId: string) {\n const db = tenancy.getTenantDB(tenantId)\n const user = await db.get<any>(userId)\n user.budibaseAccess = true\n if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {\n const account = await accounts.getAccount(user.email)\n if (account) {\n user.account = account\n user.accountPortalAccess = true\n }\n }\n\n return user\n}\n\nasync function populateUsersFromDB(\n userIds: string[]\n): Promise<{ users: User[]; notFoundIds?: string[] }> {\n const getUsersResponse = await UserDB.bulkGet(userIds)\n\n // Handle missed user ids\n const notFoundIds = userIds.filter((uid, i) => !getUsersResponse[i])\n\n const users = getUsersResponse.filter(x => x)\n\n await Promise.all(\n users.map(async (user: any) => {\n user.budibaseAccess = true\n if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {\n const account = await accounts.getAccount(user.email)\n if (account) {\n user.account = account\n user.accountPortalAccess = true\n }\n }\n })\n )\n\n if (notFoundIds.length) {\n return { users, notFoundIds }\n }\n return { users }\n}\n\n/**\n * Get the requested user by id.\n * Use redis cache to first read the user.\n * If not present fallback to loading the user directly and re-caching.\n * @param userId the id of the user to get\n * @param tenantId the tenant of the user to get\n * @param populateUser function to provide the user for re-caching. default to couch db\n * @returns\n */\nexport async function getUser(\n userId: string,\n tenantId?: string,\n populateUser?: any\n) {\n if (!populateUser) {\n populateUser = populateFromDB\n }\n if (!tenantId) {\n try {\n tenantId = context.getTenantId()\n } catch (err) {\n tenantId = await platform.users.lookupTenantId(userId)\n }\n }\n const client = await redis.getUserClient()\n // try cache\n let user = await client.get(userId)\n if (!user) {\n user = await populateUser(userId, tenantId)\n await client.store(userId, user, EXPIRY_SECONDS)\n }\n if (user && !user.tenantId && tenantId) {\n // make sure the tenant ID is always correct/set\n user.tenantId = tenantId\n }\n // if has groups, could have builder permissions granted by a group\n if (user.userGroups && !sdk.users.isGlobalBuilder(user)) {\n await context.doInTenant(tenantId, async () => {\n const appIds = await UserDB.getGroupBuilderAppIds(user)\n if (appIds.length) {\n const existing = user.builder?.apps || []\n user.builder = {\n apps: [...new Set(existing.concat(appIds))],\n }\n }\n })\n }\n return user\n}\n\n/**\n * Get the requested users by id.\n * Use redis cache to first read the users.\n * If not present fallback to loading the users directly and re-caching.\n * @param userIds the ids of the user to get\n * @param tenantId the tenant of the users to get\n * @returns\n */\nexport async function getUsers(\n userIds: string[]\n): Promise<{ users: User[]; notFoundIds?: string[] }> {\n const client = await redis.getUserClient()\n // try cache\n let usersFromCache = await client.bulkGet<User>(userIds)\n const missingUsersFromCache = userIds.filter(uid => !usersFromCache[uid])\n const users = Object.values(usersFromCache)\n let notFoundIds\n\n if (missingUsersFromCache.length) {\n const usersFromDb = await populateUsersFromDB(missingUsersFromCache)\n\n notFoundIds = usersFromDb.notFoundIds\n for (const userToCache of usersFromDb.users) {\n await client.store(userToCache._id!, userToCache, EXPIRY_SECONDS)\n }\n users.push(...usersFromDb.users)\n }\n return { users, notFoundIds: notFoundIds }\n}\n\nexport async function invalidateUser(userId: string) {\n const client = await redis.getUserClient()\n await client.delete(userId)\n}\n", "export * from \"./db\"\nexport * from \"./tenancy\"\n", "import { getDB } from \"../db/db\"\nimport { getGlobalDBName } from \"../context\"\n\nexport function getTenantDB(tenantId: string) {\n return getDB(getGlobalDBName(tenantId))\n}\n", "import {\n DEFAULT_TENANT_ID,\n getTenantId,\n getTenantIDFromAppID,\n isMultiTenant,\n getPlatformURL,\n} from \"../context\"\nimport {\n BBContext,\n TenantResolutionStrategy,\n GetTenantIdOptions,\n} from \"@budibase/types\"\nimport { Header } from \"../constants\"\n\nexport function addTenantToUrl(url: string) {\n const tenantId = getTenantId()\n\n if (isMultiTenant()) {\n const char = url.indexOf(\"?\") === -1 ? \"?\" : \"&\"\n url += `${char}tenantId=${tenantId}`\n }\n\n return url\n}\n\nexport const isUserInAppTenant = (appId: string, user?: any) => {\n let userTenantId\n if (user) {\n userTenantId = user.tenantId || DEFAULT_TENANT_ID\n } else {\n userTenantId = getTenantId()\n }\n const tenantId = getTenantIDFromAppID(appId) || DEFAULT_TENANT_ID\n return tenantId === userTenantId\n}\n\nconst ALL_STRATEGIES = Object.values(TenantResolutionStrategy)\n\nexport const getTenantIDFromCtx = (\n ctx: BBContext,\n opts: GetTenantIdOptions\n): string | undefined => {\n // exit early if not multi-tenant\n if (!isMultiTenant()) {\n return DEFAULT_TENANT_ID\n }\n\n // opt defaults\n if (opts.allowNoTenant === undefined) {\n opts.allowNoTenant = false\n }\n if (!opts.includeStrategies) {\n opts.includeStrategies = ALL_STRATEGIES\n }\n if (!opts.excludeStrategies) {\n opts.excludeStrategies = []\n }\n\n const isAllowed = (strategy: TenantResolutionStrategy) => {\n // excluded takes precedence\n if (opts.excludeStrategies?.includes(strategy)) {\n return false\n }\n if (opts.includeStrategies?.includes(strategy)) {\n return true\n }\n }\n\n // always use user first\n if (isAllowed(TenantResolutionStrategy.USER)) {\n const userTenantId = ctx.user?.tenantId\n if (userTenantId) {\n return userTenantId\n }\n }\n\n // header\n if (isAllowed(TenantResolutionStrategy.HEADER)) {\n const headerTenantId = ctx.request.headers[Header.TENANT_ID]\n if (headerTenantId) {\n return headerTenantId as string\n }\n }\n\n // query param\n if (isAllowed(TenantResolutionStrategy.QUERY)) {\n const queryTenantId = ctx.request.query.tenantId\n if (queryTenantId) {\n return queryTenantId as string\n }\n }\n\n // subdomain\n if (isAllowed(TenantResolutionStrategy.SUBDOMAIN)) {\n // e.g. budibase.app or local.com:10000\n let platformHost\n try {\n platformHost = new URL(getPlatformURL()).host.split(\":\")[0]\n } catch (err: any) {\n // if invalid URL, just don't try to process subdomain\n if (err.code !== \"ERR_INVALID_URL\") {\n throw err\n }\n }\n // e.g. tenant.budibase.app or tenant.local.com\n const requestHost = ctx.host\n // parse the tenant id from the difference\n if (platformHost && requestHost.includes(platformHost)) {\n const tenantId = requestHost.substring(\n 0,\n requestHost.indexOf(`.${platformHost}`)\n )\n if (tenantId) {\n return tenantId\n }\n }\n }\n\n // path\n if (isAllowed(TenantResolutionStrategy.PATH)) {\n // params - have to parse manually due to koa-router not run yet\n const match = ctx.matched.find(\n (m: any) => !!m.paramNames.find((p: any) => p.name === \"tenantId\")\n )\n\n // get the raw path url - without any query params\n const ctxUrl = ctx.originalUrl\n let url\n if (ctxUrl.includes(\"?\")) {\n url = ctxUrl.split(\"?\")[0]\n } else {\n url = ctxUrl\n }\n\n if (match) {\n const params = match.params(url, match.captures(url), {})\n if (params.tenantId) {\n return params.tenantId\n }\n }\n }\n\n if (!opts.allowNoTenant) {\n ctx.throw(403, \"Tenant id not set\")\n }\n\n return undefined\n}\n", "export * as users from \"./users\"\nexport * as tenants from \"./tenants\"\nexport * from \"./platformDb\"\n", "import { getPlatformDB } from \"./platformDb\"\nimport { DEFAULT_TENANT_ID } from \"../constants\"\nimport env from \"../environment\"\nimport {\n PlatformUser,\n PlatformUserByEmail,\n PlatformUserById,\n PlatformUserBySsoId,\n User,\n} from \"@budibase/types\"\n\n// READ\n\nexport async function lookupTenantId(userId: string) {\n if (!env.MULTI_TENANCY) {\n return DEFAULT_TENANT_ID\n }\n\n const user = await getUserDoc(userId)\n return user.tenantId\n}\n\nasync function getUserDoc(emailOrId: string): Promise<PlatformUser> {\n const db = getPlatformDB()\n return db.get(emailOrId)\n}\n\n// CREATE\n\nfunction newUserIdDoc(id: string, tenantId: string): PlatformUserById {\n return {\n _id: id,\n tenantId,\n }\n}\n\nfunction newUserEmailDoc(\n userId: string,\n email: string,\n tenantId: string\n): PlatformUserByEmail {\n return {\n _id: email,\n userId,\n tenantId,\n }\n}\n\nfunction newUserSsoIdDoc(\n ssoId: string,\n email: string,\n userId: string,\n tenantId: string\n): PlatformUserBySsoId {\n return {\n _id: ssoId,\n userId,\n email,\n tenantId,\n }\n}\n\n/**\n * Add a new user id or email doc if it doesn't exist.\n */\nasync function addUserDoc(emailOrId: string, newDocFn: () => PlatformUser) {\n const db = getPlatformDB()\n let user: PlatformUser\n\n try {\n await db.get(emailOrId)\n } catch (e: any) {\n if (e.status === 404) {\n user = newDocFn()\n await db.put(user)\n } else {\n throw e\n }\n }\n}\n\nexport async function addUser(\n tenantId: string,\n userId: string,\n email: string,\n ssoId?: string\n) {\n const promises = [\n addUserDoc(userId, () => newUserIdDoc(userId, tenantId)),\n addUserDoc(email, () => newUserEmailDoc(userId, email, tenantId)),\n ]\n\n if (ssoId) {\n promises.push(\n addUserDoc(ssoId, () => newUserSsoIdDoc(ssoId, email, userId, tenantId))\n )\n }\n\n await Promise.all(promises)\n}\n\n// DELETE\n\nexport async function removeUser(user: User) {\n const db = getPlatformDB()\n const keys = [user._id!, user.email]\n const userDocs = await db.allDocs({\n keys,\n include_docs: true,\n })\n const toDelete = userDocs.rows.map((row: any) => {\n return {\n ...row.doc,\n _deleted: true,\n }\n })\n await db.bulkDocs(toDelete)\n}\n", "import { StaticDatabases } from \"../constants\"\nimport { getDB } from \"../db/db\"\n\nexport function getPlatformDB() {\n return getDB(StaticDatabases.PLATFORM_INFO.name)\n}\n", "import { StaticDatabases } from \"../constants\"\nimport { getPlatformDB } from \"./platformDb\"\nimport { LockName, LockOptions, LockType, Tenants } from \"@budibase/types\"\nimport * as locks from \"../redis/redlockImpl\"\n\nconst TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants\n\nexport const tenacyLockOptions: LockOptions = {\n type: LockType.DEFAULT,\n name: LockName.UPDATE_TENANTS_DOC,\n ttl: 10 * 1000, // auto expire after 10 seconds\n systemLock: true,\n}\n\n// READ\n\nexport async function getTenantIds(): Promise<string[]> {\n const tenants = await getTenants()\n return tenants.tenantIds\n}\n\nasync function getTenants(): Promise<Tenants> {\n const db = getPlatformDB()\n let tenants: Tenants\n\n try {\n tenants = await db.get(TENANT_DOC)\n } catch (e: any) {\n // doesn't exist yet - create\n if (e.status === 404) {\n tenants = await createTenantsDoc()\n } else {\n throw e\n }\n }\n\n return tenants\n}\n\nexport async function exists(tenantId: string) {\n const tenants = await getTenants()\n return tenants.tenantIds.indexOf(tenantId) !== -1\n}\n\n// CREATE / UPDATE\n\nfunction newTenantsDoc(): Tenants {\n return {\n _id: TENANT_DOC,\n tenantIds: [],\n }\n}\n\nasync function createTenantsDoc(): Promise<Tenants> {\n const db = getPlatformDB()\n let tenants = newTenantsDoc()\n\n try {\n const response = await db.put(tenants)\n tenants._rev = response.rev\n } catch (e: any) {\n // don't throw 409 is doc has already been created\n if (e.status === 409) {\n return db.get(TENANT_DOC)\n }\n throw e\n }\n\n return tenants\n}\n\nexport async function addTenant(tenantId: string) {\n const db = getPlatformDB()\n\n // use a lock as tenant creation is conflict prone\n await locks.doWithLock(tenacyLockOptions, async () => {\n const tenants = await getTenants()\n\n // write the new tenant if it doesn't already exist\n if (tenants.tenantIds.indexOf(tenantId) === -1) {\n tenants.tenantIds.push(tenantId)\n await db.put(tenants)\n }\n })\n}\n\n// DELETE\n\nexport async function removeTenant(tenantId: string) {\n try {\n await locks.doWithLock(tenacyLockOptions, async () => {\n const db = getPlatformDB()\n const tenants = await getTenants()\n tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId)\n await db.put(tenants)\n })\n } catch (err) {\n console.error(`Error removing tenant ${tenantId} from info db`, err)\n throw err\n }\n}\n", "import Redlock from \"redlock\"\nimport { getLockClient } from \"./init\"\nimport { LockOptions, LockType } from \"@budibase/types\"\nimport * as context from \"../context\"\nimport { utils } from \"@budibase/shared-core\"\nimport { Duration } from \"../utils\"\n\nasync function getClient(\n type: LockType,\n opts?: Redlock.Options\n): Promise<Redlock> {\n if (type === LockType.CUSTOM) {\n return newRedlock(opts)\n }\n\n switch (type) {\n case LockType.TRY_ONCE: {\n return newRedlock(OPTIONS.TRY_ONCE)\n }\n case LockType.TRY_TWICE: {\n return newRedlock(OPTIONS.TRY_TWICE)\n }\n case LockType.DEFAULT: {\n return newRedlock(OPTIONS.DEFAULT)\n }\n case LockType.DELAY_500: {\n return newRedlock(OPTIONS.DELAY_500)\n }\n case LockType.AUTO_EXTEND: {\n return newRedlock(OPTIONS.AUTO_EXTEND)\n }\n default: {\n throw utils.unreachable(type)\n }\n }\n}\n\nconst OPTIONS: Record<keyof typeof LockType, Redlock.Options> = {\n TRY_ONCE: {\n // immediately throws an error if the lock is already held\n retryCount: 0,\n },\n TRY_TWICE: {\n retryCount: 1,\n },\n DEFAULT: {\n // the expected clock drift; for more details\n // see http://redis.io/topics/distlock\n driftFactor: 0.01, // multiplied by lock ttl to determine drift time\n\n // the max number of times Redlock will attempt\n // to lock a resource before erroring\n retryCount: 10,\n\n // the time in ms between attempts\n retryDelay: 200, // time in ms\n\n // the max time in ms randomly added to retries\n // to improve performance under high contention\n // see https://www.awsarchitectureblog.com/2015/03/backoff.html\n retryJitter: 100, // time in ms\n },\n DELAY_500: {\n retryDelay: 500,\n },\n CUSTOM: {},\n AUTO_EXTEND: {\n retryCount: -1,\n },\n}\n\nexport async function newRedlock(opts: Redlock.Options = {}) {\n const options = { ...OPTIONS.DEFAULT, ...opts }\n const redisWrapper = await getLockClient()\n const client = redisWrapper.getClient()\n return new Redlock([client], options)\n}\n\ntype SuccessfulRedlockExecution<T> = {\n executed: true\n result: T\n}\ntype UnsuccessfulRedlockExecution = {\n executed: false\n}\n\ntype RedlockExecution<T> =\n | SuccessfulRedlockExecution<T>\n | UnsuccessfulRedlockExecution\n\nfunction getLockName(opts: LockOptions) {\n // determine lock name\n // by default use the tenantId for uniqueness, unless using a system lock\n const prefix = opts.systemLock ? \"system\" : context.getTenantId()\n let name: string = `lock:${prefix}_${opts.name}`\n // add additional unique name if required\n if (opts.resource) {\n name = name + `_${opts.resource}`\n }\n return name\n}\n\nexport const AUTO_EXTEND_POLLING_MS = Duration.fromSeconds(10).toMs()\n\nexport async function doWithLock<T>(\n opts: LockOptions,\n task: () => Promise<T>\n): Promise<RedlockExecution<T>> {\n const redlock = await getClient(opts.type, opts.customOptions)\n let lock: Redlock.Lock | undefined\n let timeout\n try {\n const name = getLockName(opts)\n\n const ttl =\n opts.type === LockType.AUTO_EXTEND ? AUTO_EXTEND_POLLING_MS : opts.ttl\n\n // create the lock\n lock = await redlock.lock(name, ttl)\n\n if (opts.type === LockType.AUTO_EXTEND) {\n // We keep extending the lock while the task is running\n const extendInIntervals = (): void => {\n timeout = setTimeout(async () => {\n lock = await lock!.extend(ttl, () => opts.onExtend && opts.onExtend())\n\n extendInIntervals()\n }, ttl / 2)\n }\n\n extendInIntervals()\n }\n\n // perform locked task\n // need to await to ensure completion before unlocking\n const result = await task()\n return { executed: true, result }\n } catch (e: any) {\n // lock limit exceeded\n if (e.name === \"LockError\") {\n if (opts.type === LockType.TRY_ONCE) {\n // don't throw for try-once locks, they will always error\n // due to retry count (0) exceeded\n return { executed: false }\n } else {\n throw e\n }\n } else {\n throw e\n }\n } finally {\n clearTimeout(timeout)\n await lock?.unlock()\n }\n}\n", "export * from \"./hashing\"\nexport * from \"./utils\"\nexport * from \"./stringUtils\"\nexport * from \"./Duration\"\n", "import env from \"../environment\"\n\nexport * from \"../docIds/newid\"\nconst bcrypt = env.JS_BCRYPT ? require(\"bcryptjs\") : require(\"bcrypt\")\n\nconst SALT_ROUNDS = env.SALT_ROUNDS || 10\n\nexport async function hash(data: string) {\n const salt = await bcrypt.genSalt(SALT_ROUNDS)\n return bcrypt.hash(data, salt)\n}\n\nexport async function compare(data: string, encrypted: string) {\n return bcrypt.compare(data, encrypted)\n}\n", "import { getAllApps } from \"../db\"\nimport { Header, MAX_VALID_DATE, DocumentType, SEPARATOR } from \"../constants\"\nimport env from \"../environment\"\nimport * as tenancy from \"../tenancy\"\nimport * as context from \"../context\"\nimport {\n App,\n AuditedEventFriendlyName,\n Ctx,\n Event,\n TenantResolutionStrategy,\n} from \"@budibase/types\"\nimport type { SetOption } from \"cookies\"\nimport jwt, { Secret } from \"jsonwebtoken\"\n\nconst APP_PREFIX = DocumentType.APP + SEPARATOR\nconst PROD_APP_PREFIX = \"/app/\"\n\nconst BUILDER_PREVIEW_PATH = \"/app/preview\"\nconst BUILDER_PREFIX = \"/builder\"\nconst BUILDER_APP_PREFIX = `${BUILDER_PREFIX}/app/`\nconst PUBLIC_API_PREFIX = \"/api/public/v\"\n\nfunction confirmAppId(possibleAppId: string | undefined) {\n return possibleAppId && possibleAppId.startsWith(APP_PREFIX)\n ? possibleAppId\n : undefined\n}\n\nexport async function resolveAppUrl(ctx: Ctx) {\n const appUrl = ctx.path.split(\"/\")[2]\n let possibleAppUrl = `/${appUrl.toLowerCase()}`\n\n let tenantId: string | undefined = context.getTenantId()\n if (!env.isDev() && env.MULTI_TENANCY) {\n // always use the tenant id from the subdomain in multi tenancy\n // this ensures the logged-in user tenant id doesn't overwrite\n // e.g. in the case of viewing a public app while already logged-in to another tenant\n tenantId = tenancy.getTenantIDFromCtx(ctx, {\n includeStrategies: [TenantResolutionStrategy.SUBDOMAIN],\n })\n }\n\n // search prod apps for an url that matches\n const apps: App[] = await context.doInTenant(\n tenantId,\n () => getAllApps({ dev: false }) as Promise<App[]>\n )\n const app = apps.filter(\n a => a.url && a.url.toLowerCase() === possibleAppUrl\n )[0]\n\n return app && app.appId ? app.appId : undefined\n}\n\nexport function isServingApp(ctx: Ctx) {\n // dev app\n if (ctx.path.startsWith(`/${APP_PREFIX}`)) {\n return true\n }\n // prod app\n return ctx.path.startsWith(PROD_APP_PREFIX)\n}\n\nexport function isServingBuilder(ctx: Ctx): boolean {\n return ctx.path.startsWith(BUILDER_APP_PREFIX)\n}\n\nexport function isServingBuilderPreview(ctx: Ctx): boolean {\n return ctx.path.startsWith(BUILDER_PREVIEW_PATH)\n}\n\nexport function isPublicApiRequest(ctx: Ctx): boolean {\n return ctx.path.startsWith(PUBLIC_API_PREFIX)\n}\n\n/**\n * Given a request tries to find the appId, which can be located in various places\n * @param ctx The main request body to look through.\n * @returns If an appId was found it will be returned.\n */\nexport async function getAppIdFromCtx(ctx: Ctx) {\n // look in headers\n const options = [ctx.request.headers[Header.APP_ID]]\n let appId\n for (let option of options) {\n appId = confirmAppId(option as string)\n if (appId) {\n break\n }\n }\n\n // look in body\n if (!appId && ctx.request.body && ctx.request.body.appId) {\n appId = confirmAppId(ctx.request.body.appId)\n }\n\n // look in the path\n const pathId = parseAppIdFromUrlPath(ctx.path)\n if (!appId && pathId) {\n appId = confirmAppId(pathId)\n }\n\n // lookup using custom url - prod apps only\n // filter out the builder preview path which collides with the prod app path\n // to ensure we don't load all apps excessively\n const isBuilderPreview = ctx.path.startsWith(BUILDER_PREVIEW_PATH)\n const isViewingProdApp =\n ctx.path.startsWith(PROD_APP_PREFIX) && !isBuilderPreview\n if (!appId && isViewingProdApp) {\n appId = confirmAppId(await resolveAppUrl(ctx))\n }\n\n // look in the referer - builder only\n // make sure this is performed after prod app url resolution, in case the\n // referer header is present from a builder redirect\n const referer = ctx.request.headers.referer\n if (!appId && referer?.includes(BUILDER_APP_PREFIX)) {\n const refererId = parseAppIdFromUrlPath(ctx.request.headers.referer)\n appId = confirmAppId(refererId)\n }\n\n return appId\n}\n\nfunction parseAppIdFromUrlPath(url?: string) {\n if (!url) {\n return\n }\n return url\n .split(\"?\")[0] // Remove any possible query string\n .split(\"/\")\n .find(subPath => subPath.startsWith(APP_PREFIX))\n}\n\n/**\n * opens the contents of the specified encrypted JWT.\n * @return the contents of the token.\n */\nexport function openJwt<T>(token?: string): T | undefined {\n if (!token) {\n return undefined\n }\n try {\n return jwt.verify(token, env.JWT_SECRET as Secret) as T\n } catch (e) {\n if (env.JWT_SECRET_FALLBACK) {\n // fallback to enable rotation\n return jwt.verify(token, env.JWT_SECRET_FALLBACK) as T\n } else {\n throw e\n }\n }\n}\n\nexport function isValidInternalAPIKey(apiKey: string) {\n if (env.INTERNAL_API_KEY && env.INTERNAL_API_KEY === apiKey) {\n return true\n }\n // fallback to enable rotation\n return !!(\n env.INTERNAL_API_KEY_FALLBACK && env.INTERNAL_API_KEY_FALLBACK === apiKey\n )\n}\n\n/**\n * Get a cookie from context, and decrypt if necessary.\n * @param ctx The request which is to be manipulated.\n * @param name The name of the cookie to get.\n */\nexport function getCookie<T>(ctx: Ctx, name: string) {\n const cookie = ctx.cookies.get(name)\n\n if (!cookie) {\n return undefined\n }\n\n return openJwt<T>(cookie)\n}\n\n/**\n * Store a cookie for the request - it will not expire.\n * @param ctx The request which is to be manipulated.\n * @param name The name of the cookie to set.\n * @param value The value of cookie which will be set.\n * @param opts options like whether to sign.\n */\nexport function setCookie(\n ctx: Ctx,\n value: any,\n name = \"builder\",\n opts = { sign: true }\n) {\n if (value && opts && opts.sign) {\n value = jwt.sign(value, env.JWT_SECRET as Secret)\n }\n\n const config: SetOption = {\n expires: MAX_VALID_DATE,\n path: \"/\",\n httpOnly: false,\n overwrite: true,\n }\n\n if (env.COOKIE_DOMAIN) {\n config.domain = env.COOKIE_DOMAIN\n }\n\n ctx.cookies.set(name, value, config)\n}\n\n/**\n * Utility function, simply calls setCookie with an empty string for value\n */\nexport function clearCookie(ctx: Ctx, name: string) {\n setCookie(ctx, null, name)\n}\n\n/**\n * Checks if the API call being made (based on the provided ctx object) is from the client. If\n * the call is not from a client app then it is from the builder.\n * @param ctx The koa context object to be tested.\n * @return returns true if the call is from the client lib (a built app rather than the builder).\n */\nexport function isClient(ctx: Ctx) {\n return ctx.headers[Header.TYPE] === \"client\"\n}\n\nexport function timeout(timeMs: number) {\n return new Promise(resolve => setTimeout(resolve, timeMs))\n}\n\nexport function isAudited(event: Event) {\n return !!AuditedEventFriendlyName[event]\n}\n\nexport function hasCircularStructure(json: any) {\n if (typeof json !== \"object\") {\n return false\n }\n try {\n JSON.stringify(json)\n } catch (err) {\n if (err instanceof Error && err?.message.includes(\"circular structure\")) {\n return true\n }\n }\n return false\n}\n", "export function validEmail(value: string) {\n return (\n value &&\n !!value.match(\n /^(([^<>()[\\]\\\\.,;:\\s@\"]+(\\.[^<>()[\\]\\\\.,;:\\s@\"]+)*)|(\".+\"))@((\\[[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}])|(([a-zA-Z\\-0-9]+\\.)+[a-zA-Z]{2,}))$/\n )\n )\n}\n", "export enum DurationType {\n MILLISECONDS = \"milliseconds\",\n SECONDS = \"seconds\",\n MINUTES = \"minutes\",\n HOURS = \"hours\",\n DAYS = \"days\",\n}\n\nconst conversion: Record<DurationType, number> = {\n milliseconds: 1,\n seconds: 1000,\n minutes: 60 * 1000,\n hours: 60 * 60 * 1000,\n days: 24 * 60 * 60 * 1000,\n}\n\nexport class Duration {\n static convert(from: DurationType, to: DurationType, duration: number) {\n const milliseconds = duration * conversion[from]\n return milliseconds / conversion[to]\n }\n\n static from(from: DurationType, duration: number) {\n return {\n to: (to: DurationType) => {\n return Duration.convert(from, to, duration)\n },\n toMs: () => {\n return Duration.convert(from, DurationType.MILLISECONDS, duration)\n },\n toSeconds: () => {\n return Duration.convert(from, DurationType.SECONDS, duration)\n },\n }\n }\n\n static fromSeconds(duration: number) {\n return Duration.from(DurationType.SECONDS, duration)\n }\n\n static fromMinutes(duration: number) {\n return Duration.from(DurationType.MINUTES, duration)\n }\n\n static fromHours(duration: number) {\n return Duration.from(DurationType.HOURS, duration)\n }\n\n static fromDays(duration: number) {\n return Duration.from(DurationType.DAYS, duration)\n }\n\n static fromMilliseconds(duration: number) {\n return Duration.from(DurationType.MILLISECONDS, duration)\n }\n}\n", "export * from \"./accounts\"\n", "import fetch from \"node-fetch\"\nimport * as logging from \"../logging\"\n\nexport default class API {\n host: string\n\n constructor(host: string) {\n this.host = host\n }\n\n async apiCall(method: string, url: string, options?: any) {\n if (!options.headers) {\n options.headers = {}\n }\n\n if (!options.headers[\"Content-Type\"]) {\n options.headers = {\n \"Content-Type\": \"application/json\",\n Accept: \"application/json\",\n ...options.headers,\n }\n }\n\n let json = options.headers[\"Content-Type\"] === \"application/json\"\n\n // add x-budibase-correlation-id header\n logging.correlation.setHeader(options.headers)\n\n const requestOptions = {\n method: method,\n body: json ? JSON.stringify(options.body) : options.body,\n headers: options.headers,\n // TODO: See if this is necessary\n credentials: \"include\",\n }\n\n return await fetch(`${this.host}${url}`, requestOptions)\n }\n\n async post(url: string, options?: any) {\n return this.apiCall(\"POST\", url, options)\n }\n\n async get(url: string, options?: any) {\n return this.apiCall(\"GET\", url, options)\n }\n\n async patch(url: string, options?: any) {\n return this.apiCall(\"PATCH\", url, options)\n }\n\n async del(url: string, options?: any) {\n return this.apiCall(\"DELETE\", url, options)\n }\n\n async put(url: string, options?: any) {\n return this.apiCall(\"PUT\", url, options)\n }\n}\n", "import API from \"./api\"\nimport env from \"../environment\"\nimport { Header } from \"../constants\"\nimport { CloudAccount, HealthStatusResponse } from \"@budibase/types\"\n\nconst api = new API(env.ACCOUNT_PORTAL_URL)\n\n/**\n * This client is intended to be used in a cloud hosted deploy only.\n * Rather than relying on each consumer to perform the necessary environmental checks\n * we use the following check to exit early with a undefined response which should be\n * handled by the caller.\n */\nconst EXIT_EARLY = env.SELF_HOSTED || env.DISABLE_ACCOUNT_PORTAL\n\nexport const getAccount = async (\n email: string\n): Promise<CloudAccount | undefined> => {\n if (EXIT_EARLY) {\n return\n }\n const payload = {\n email,\n }\n const response = await api.post(`/api/accounts/search`, {\n body: payload,\n headers: {\n [Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,\n },\n })\n\n if (response.status !== 200) {\n throw new Error(`Error getting account by email ${email}`)\n }\n\n const json: CloudAccount[] = await response.json()\n return json[0]\n}\n\nexport const getAccountByTenantId = async (\n tenantId: string\n): Promise<CloudAccount | undefined> => {\n if (EXIT_EARLY) {\n return\n }\n const payload = {\n tenantId,\n }\n const response = await api.post(`/api/accounts/search`, {\n body: payload,\n headers: {\n [Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,\n },\n })\n\n if (response.status !== 200) {\n throw new Error(`Error getting account by tenantId ${tenantId}`)\n }\n\n const json: CloudAccount[] = await response.json()\n return json[0]\n}\n\nexport const getStatus = async (): Promise<\n HealthStatusResponse | undefined\n> => {\n if (EXIT_EARLY) {\n return\n }\n const response = await api.get(`/api/status`, {\n headers: {\n [Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,\n },\n })\n const json = await response.json()\n\n if (response.status !== 200) {\n throw new Error(`Error getting status`)\n }\n\n return json\n}\n", "export * from \"./users\"\nexport * from \"./utils\"\nexport * from \"./lookup\"\nexport { UserDB } from \"./db\"\n", "import {\n directCouchFind,\n DocumentType,\n generateAppUserID,\n getGlobalUserParams,\n getProdAppID,\n getUsersByAppParams,\n pagination,\n queryGlobalView,\n queryGlobalViewRaw,\n SEPARATOR,\n UNICODE_MAX,\n ViewName,\n} from \"../db\"\nimport {\n BulkDocsResponse,\n SearchQuery,\n SearchQueryOperators,\n SearchUsersRequest,\n User,\n ContextUser,\n DatabaseQueryOpts,\n CouchFindOptions,\n} from \"@budibase/types\"\nimport { getGlobalDB } from \"../context\"\nimport * as context from \"../context\"\nimport { isCreator } from \"./utils\"\nimport { UserDB } from \"./db\"\n\ntype GetOpts = { cleanup?: boolean }\n\nfunction removeUserPassword(users: User | User[]) {\n if (Array.isArray(users)) {\n return users.map(user => {\n if (user) {\n delete user.password\n return user\n }\n })\n } else if (users) {\n delete users.password\n return users\n }\n return users\n}\n\nexport function isSupportedUserSearch(query: SearchQuery) {\n const allowed = [\n { op: SearchQueryOperators.STRING, key: \"email\" },\n { op: SearchQueryOperators.EQUAL, key: \"_id\" },\n ]\n for (let [key, operation] of Object.entries(query)) {\n if (typeof operation !== \"object\") {\n return false\n }\n const fields = Object.keys(operation || {})\n // this filter doesn't contain options - ignore\n if (fields.length === 0) {\n continue\n }\n const allowedOperation = allowed.find(\n allow =>\n allow.op === key && fields.length === 1 && fields[0] === allow.key\n )\n if (!allowedOperation) {\n return false\n }\n }\n return true\n}\n\nexport async function bulkGetGlobalUsersById(\n userIds: string[],\n opts?: GetOpts\n) {\n const db = getGlobalDB()\n let users = (\n await db.allDocs({\n keys: userIds,\n include_docs: true,\n })\n ).rows.map(row => row.doc) as User[]\n if (opts?.cleanup) {\n users = removeUserPassword(users) as User[]\n }\n return users\n}\n\nexport async function getAllUserIds() {\n const db = getGlobalDB()\n const startKey = `${DocumentType.USER}${SEPARATOR}`\n const response = await db.allDocs({\n startkey: startKey,\n endkey: `${startKey}${UNICODE_MAX}`,\n })\n return response.rows.map(row => row.id)\n}\n\nexport async function bulkUpdateGlobalUsers(users: User[]) {\n const db = getGlobalDB()\n return (await db.bulkDocs(users)) as BulkDocsResponse\n}\n\nexport async function getById(id: string, opts?: GetOpts): Promise<User> {\n const db = context.getGlobalDB()\n let user = await db.get<User>(id)\n if (opts?.cleanup) {\n user = removeUserPassword(user) as User\n }\n return user\n}\n\n/**\n * Given an email address this will use a view to search through\n * all the users to find one with this email address.\n */\nexport async function getGlobalUserByEmail(\n email: String,\n opts?: GetOpts\n): Promise<User | undefined> {\n if (email == null) {\n throw \"Must supply an email address to view\"\n }\n\n const response = await queryGlobalView<User>(ViewName.USER_BY_EMAIL, {\n key: email.toLowerCase(),\n include_docs: true,\n })\n\n if (Array.isArray(response)) {\n // shouldn't be able to happen, but need to handle just in case\n throw new Error(`Multiple users found with email address: ${email}`)\n }\n\n let user = response as User\n if (opts?.cleanup) {\n user = removeUserPassword(user) as User\n }\n\n return user\n}\n\nexport async function doesUserExist(email: string) {\n try {\n const user = await getGlobalUserByEmail(email)\n if (Array.isArray(user) || user != null) {\n return true\n }\n } catch (err) {\n return false\n }\n return false\n}\n\nexport async function searchGlobalUsersByApp(\n appId: any,\n opts: DatabaseQueryOpts,\n getOpts?: GetOpts\n) {\n if (typeof appId !== \"string\") {\n throw new Error(\"Must provide a string based app ID\")\n }\n const params = getUsersByAppParams(appId, {\n include_docs: true,\n })\n params.startkey = opts && opts.startkey ? opts.startkey : params.startkey\n let response = await queryGlobalView<User>(ViewName.USER_BY_APP, params)\n\n if (!response) {\n response = []\n }\n let users: User[] = Array.isArray(response) ? response : [response]\n if (getOpts?.cleanup) {\n users = removeUserPassword(users) as User[]\n }\n return users\n}\n\n/*\n Return any user who potentially has access to the application\n Admins, developers and app users with the explicitly role.\n*/\nexport async function searchGlobalUsersByAppAccess(\n appId: any,\n opts?: { limit?: number }\n) {\n const roleSelector = `roles.${appId}`\n\n let orQuery: any[] = [\n {\n \"builder.global\": true,\n },\n {\n \"admin.global\": true,\n },\n ]\n\n if (appId) {\n const roleCheck = {\n [roleSelector]: {\n $exists: true,\n },\n }\n orQuery.push(roleCheck)\n }\n\n let searchOptions: CouchFindOptions = {\n selector: {\n $or: orQuery,\n _id: {\n $regex: \"^us_\",\n },\n },\n limit: opts?.limit || 50,\n }\n\n const resp = await directCouchFind(context.getGlobalDBName(), searchOptions)\n return resp.rows\n}\n\nexport function getGlobalUserByAppPage(appId: string, user: User) {\n if (!user) {\n return\n }\n return generateAppUserID(getProdAppID(appId)!, user._id!)\n}\n\n/**\n * Performs a starts with search on the global email view.\n */\nexport async function searchGlobalUsersByEmail(\n email: string | unknown,\n opts: any,\n getOpts?: GetOpts\n) {\n if (typeof email !== \"string\") {\n throw new Error(\"Must provide a string to search by\")\n }\n const lcEmail = email.toLowerCase()\n // handle if passing up startkey for pagination\n const startkey = opts && opts.startkey ? opts.startkey : lcEmail\n let response = await queryGlobalView<User>(ViewName.USER_BY_EMAIL, {\n ...opts,\n startkey,\n endkey: `${lcEmail}${UNICODE_MAX}`,\n })\n if (!response) {\n response = []\n }\n let users: User[] = Array.isArray(response) ? response : [response]\n if (getOpts?.cleanup) {\n users = removeUserPassword(users) as User[]\n }\n return users\n}\n\nconst PAGE_LIMIT = 8\nexport async function paginatedUsers({\n bookmark,\n query,\n appId,\n limit,\n}: SearchUsersRequest = {}) {\n const db = getGlobalDB()\n const pageSize = limit ?? PAGE_LIMIT\n const pageLimit = pageSize + 1\n // get one extra document, to have the next page\n const opts: DatabaseQueryOpts = {\n include_docs: true,\n limit: pageLimit,\n }\n // add a startkey if the page was specified (anchor)\n if (bookmark) {\n opts.startkey = bookmark\n }\n // property specifies what to use for the page/anchor\n let userList: User[],\n property = \"_id\",\n getKey\n if (query?.equal?._id) {\n userList = [await getById(query.equal._id)]\n } else if (appId) {\n userList = await searchGlobalUsersByApp(appId, opts)\n getKey = (doc: any) => getGlobalUserByAppPage(appId, doc)\n } else if (query?.string?.email) {\n userList = await searchGlobalUsersByEmail(query?.string?.email, opts)\n property = \"email\"\n } else {\n // no search, query allDocs\n const response = await db.allDocs(getGlobalUserParams(null, opts))\n userList = response.rows.map((row: any) => row.doc)\n }\n return pagination(userList, pageSize, {\n paginate: true,\n property,\n getKey,\n })\n}\n\nexport async function getUserCount() {\n const response = await queryGlobalViewRaw(ViewName.USER_BY_EMAIL, {\n limit: 0, // to be as fast as possible - we just want the total rows count\n include_docs: false,\n })\n return response.total_rows\n}\n\nexport async function getCreatorCount() {\n let creators = 0\n async function iterate(startPage?: string) {\n const page = await paginatedUsers({ bookmark: startPage })\n creators += page.data.filter(isCreator).length\n if (page.hasNextPage) {\n await iterate(page.nextPage)\n }\n }\n await iterate()\n return creators\n}\n\n// used to remove the builder/admin permissions, for processing the\n// user as an app user (they may have some specific role/group\nexport function removePortalUserPermissions(user: User | ContextUser) {\n delete user.admin\n delete user.builder\n return user\n}\n\nexport function cleanseUserObject(user: User | ContextUser, base?: User) {\n delete user.admin\n delete user.builder\n delete user.roles\n if (base) {\n user.admin = base.admin\n user.builder = base.builder\n user.roles = base.roles\n }\n return user\n}\n\nexport async function addAppBuilder(user: User, appId: string) {\n const prodAppId = getProdAppID(appId)\n user.builder ??= {}\n user.builder.creator = true\n user.builder.apps ??= []\n user.builder.apps.push(prodAppId)\n await UserDB.save(user, { hashPassword: false })\n}\n\nexport async function removeAppBuilder(user: User, appId: string) {\n const prodAppId = getProdAppID(appId)\n if (user.builder && user.builder.apps?.includes(prodAppId)) {\n user.builder.apps = user.builder.apps.filter(id => id !== prodAppId)\n }\n await UserDB.save(user, { hashPassword: false })\n}\n", "import { CloudAccount } from \"@budibase/types\"\nimport * as accountSdk from \"../accounts\"\nimport env from \"../environment\"\nimport { getPlatformUser } from \"./lookup\"\nimport { EmailUnavailableError } from \"../errors\"\nimport { getTenantId } from \"../context\"\nimport { sdk } from \"@budibase/shared-core\"\nimport { getAccountByTenantId } from \"../accounts\"\n\n// extract from shared-core to make easily accessible from backend-core\nexport const isBuilder = sdk.users.isBuilder\nexport const isAdmin = sdk.users.isAdmin\nexport const isCreator = sdk.users.isCreator\nexport const isGlobalBuilder = sdk.users.isGlobalBuilder\nexport const isAdminOrBuilder = sdk.users.isAdminOrBuilder\nexport const hasAdminPermissions = sdk.users.hasAdminPermissions\nexport const hasBuilderPermissions = sdk.users.hasBuilderPermissions\nexport const hasAppBuilderPermissions = sdk.users.hasAppBuilderPermissions\n\nexport async function validateUniqueUser(email: string, tenantId: string) {\n // check budibase users in other tenants\n if (env.MULTI_TENANCY) {\n const tenantUser = await getPlatformUser(email)\n if (tenantUser != null && tenantUser.tenantId !== tenantId) {\n throw new EmailUnavailableError(email)\n }\n }\n\n // check root account users in account portal\n if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {\n const account = await accountSdk.getAccount(email)\n if (account && account.verified && account.tenantId !== tenantId) {\n throw new EmailUnavailableError(email)\n }\n }\n}\n\n/**\n * For the given user id's, return the account holder if it is in the ids.\n */\nexport async function getAccountHolderFromUserIds(\n userIds: string[]\n): Promise<CloudAccount | undefined> {\n if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {\n const tenantId = getTenantId()\n const account = await getAccountByTenantId(tenantId)\n if (!account) {\n throw new Error(`Account not found for tenantId=${tenantId}`)\n }\n\n const budibaseUserId = account.budibaseUserId\n if (userIds.includes(budibaseUserId)) {\n return account\n }\n }\n}\n", "import {\n AccountMetadata,\n PlatformUser,\n PlatformUserByEmail,\n User,\n} from \"@budibase/types\"\nimport * as dbUtils from \"../db\"\nimport { ViewName } from \"../constants\"\nimport { getExistingInvites } from \"../cache/invite\"\n\n/**\n * Apply a system-wide search on emails:\n * - in tenant\n * - cross tenant\n * - accounts\n * return an array of emails that match the supplied emails.\n */\nexport async function searchExistingEmails(emails: string[]) {\n let matchedEmails: string[] = []\n\n const existingTenantUsers = await getExistingTenantUsers(emails)\n matchedEmails.push(...existingTenantUsers.map(user => user.email))\n\n const existingPlatformUsers = await getExistingPlatformUsers(emails)\n matchedEmails.push(...existingPlatformUsers.map(user => user._id!))\n\n const existingAccounts = await getExistingAccounts(emails)\n matchedEmails.push(...existingAccounts.map(account => account.email))\n\n const invitedEmails = await getExistingInvites(emails)\n matchedEmails.push(...invitedEmails.map(invite => invite.email))\n\n return [...new Set(matchedEmails.map(email => email.toLowerCase()))]\n}\n\n// lookup, could be email or userId, either will return a doc\nexport async function getPlatformUser(\n identifier: string\n): Promise<PlatformUser | null> {\n // use the view here and allow to find anyone regardless of casing\n // Use lowercase to ensure email login is case insensitive\n return (await dbUtils.queryPlatformView(ViewName.PLATFORM_USERS_LOWERCASE, {\n keys: [identifier.toLowerCase()],\n include_docs: true,\n })) as PlatformUser\n}\n\nexport async function getExistingTenantUsers(\n emails: string[]\n): Promise<User[]> {\n const lcEmails = emails.map(email => email.toLowerCase())\n const params = {\n keys: lcEmails,\n include_docs: true,\n }\n\n const opts = {\n arrayResponse: true,\n }\n\n return (await dbUtils.queryGlobalView(\n ViewName.USER_BY_EMAIL,\n params,\n undefined,\n opts\n )) as User[]\n}\n\nexport async function getExistingPlatformUsers(\n emails: string[]\n): Promise<PlatformUserByEmail[]> {\n const lcEmails = emails.map(email => email.toLowerCase())\n const params = {\n keys: lcEmails,\n include_docs: true,\n }\n\n const opts = {\n arrayResponse: true,\n }\n return (await dbUtils.queryPlatformView(\n ViewName.PLATFORM_USERS_LOWERCASE,\n params,\n opts\n )) as PlatformUserByEmail[]\n}\n\nexport async function getExistingAccounts(\n emails: string[]\n): Promise<AccountMetadata[]> {\n const lcEmails = emails.map(email => email.toLowerCase())\n const params = {\n keys: lcEmails,\n include_docs: true,\n }\n\n const opts = {\n arrayResponse: true,\n }\n\n return (await dbUtils.queryPlatformView(\n ViewName.ACCOUNT_BY_EMAIL,\n params,\n opts\n )) as AccountMetadata[]\n}\n", "import * as utils from \"../utils\"\nimport { Duration, DurationType } from \"../utils\"\nimport env from \"../environment\"\nimport { getTenantId } from \"../context\"\nimport * as redis from \"../redis/init\"\n\nconst TTL_SECONDS = Duration.fromDays(7).toSeconds()\n\ninterface Invite {\n email: string\n info: any\n}\n\ninterface InviteWithCode extends Invite {\n code: string\n}\n\n/**\n * Given an invite code and invite body, allow the update an existing/valid invite in redis\n * @param code The invite code for an invite in redis\n * @param value The body of the updated user invitation\n */\nexport async function updateCode(code: string, value: Invite) {\n const client = await redis.getInviteClient()\n await client.store(code, value, TTL_SECONDS)\n}\n\n/**\n * Generates an invitation code and writes it to redis - which can later be checked for user creation.\n * @param email the email address which the code is being sent to (for use later).\n * @param info Information to be carried along with the invitation.\n * @return returns the code that was stored to redis.\n */\nexport async function createCode(email: string, info: any): Promise<string> {\n const code = utils.newid()\n const client = await redis.getInviteClient()\n await client.store(code, { email, info }, TTL_SECONDS)\n return code\n}\n\n/**\n * Checks that the provided invite code is valid - will return the email address of user that was invited.\n * @param code the invite code that was provided as part of the link.\n * @return If the code is valid then an email address will be returned.\n */\nexport async function getCode(code: string): Promise<Invite> {\n const client = await redis.getInviteClient()\n const value = (await client.get(code)) as Invite | undefined\n if (!value) {\n throw \"Invitation is not valid or has expired, please request a new one.\"\n }\n return value\n}\n\nexport async function deleteCode(code: string) {\n const client = await redis.getInviteClient()\n await client.delete(code)\n}\n\n/**\n Get all currently available user invitations for the current tenant.\n **/\nexport async function getInviteCodes(): Promise<InviteWithCode[]> {\n const client = await redis.getInviteClient()\n const invites: { key: string; value: Invite }[] = await client.scan()\n\n const results: InviteWithCode[] = invites.map(invite => {\n return {\n ...invite.value,\n code: invite.key,\n }\n })\n if (!env.MULTI_TENANCY) {\n return results\n }\n const tenantId = getTenantId()\n return results.filter(invite => tenantId === invite.info.tenantId)\n}\n\nexport async function getExistingInvites(\n emails: string[]\n): Promise<InviteWithCode[]> {\n return (await getInviteCodes()).filter(invite =>\n emails.includes(invite.email)\n )\n}\n", "export * from \"./errors\"\n", "// BASE\n\nexport abstract class BudibaseError extends Error {\n code: string\n\n constructor(message: string, code: ErrorCode) {\n super(message)\n this.code = code\n }\n\n protected getPublicError?(): any\n}\n\n// ERROR HANDLING\n\nexport enum ErrorCode {\n USAGE_LIMIT_EXCEEDED = \"usage_limit_exceeded\",\n FEATURE_DISABLED = \"feature_disabled\",\n INVALID_API_KEY = \"invalid_api_key\",\n HTTP = \"http\",\n}\n\n/**\n * For the given error, build the public representation that is safe\n * to be exposed over an api.\n */\nexport const getPublicError = (err: any) => {\n let error\n if (err.code) {\n // add generic error information\n error = {\n code: err.code,\n }\n\n if (err.getPublicError) {\n error = {\n ...error,\n // get any additional context from this error\n ...err.getPublicError(),\n }\n }\n }\n\n return error\n}\n\n// HTTP\n\nexport class HTTPError extends BudibaseError {\n status: number\n\n constructor(message: string, httpStatus: number, code = ErrorCode.HTTP) {\n super(message, code)\n this.status = httpStatus\n }\n}\n\nexport class NotFoundError extends HTTPError {\n constructor(message: string) {\n super(message, 404)\n }\n}\n\nexport class BadRequestError extends HTTPError {\n constructor(message: string) {\n super(message, 400)\n }\n}\n\n// LICENSING\n\nexport class UsageLimitError extends HTTPError {\n limitName: string\n\n constructor(message: string, limitName: string) {\n super(message, 400, ErrorCode.USAGE_LIMIT_EXCEEDED)\n this.limitName = limitName\n }\n\n getPublicError() {\n return {\n limitName: this.limitName,\n }\n }\n}\n\nexport class FeatureDisabledError extends HTTPError {\n featureName: string\n\n constructor(message: string, featureName: string) {\n super(message, 400, ErrorCode.FEATURE_DISABLED)\n this.featureName = featureName\n }\n\n getPublicError() {\n return {\n featureName: this.featureName,\n }\n }\n}\n\n// AUTH\n\nexport class InvalidAPIKeyError extends BudibaseError {\n constructor() {\n super(\n \"Invalid API key - may need re-generated, or user doesn't exist\",\n ErrorCode.INVALID_API_KEY\n )\n }\n}\n\n// USERS\n\nexport class EmailUnavailableError extends Error {\n constructor(email: string) {\n super(`Email already in use: '${email}'`)\n }\n}\n", "import env from \"../environment\"\nimport * as eventHelpers from \"./events\"\nimport * as accountSdk from \"../accounts\"\nimport * as cache from \"../cache\"\nimport { getGlobalDB, getIdentity, getTenantId } from \"../context\"\nimport * as dbUtils from \"../db\"\nimport { EmailUnavailableError, HTTPError } from \"../errors\"\nimport * as platform from \"../platform\"\nimport * as sessions from \"../security/sessions\"\nimport * as usersCore from \"./users\"\nimport {\n Account,\n BulkUserCreated,\n BulkUserDeleted,\n isSSOAccount,\n isSSOUser,\n SaveUserOpts,\n User,\n UserStatus,\n UserGroup,\n} from \"@budibase/types\"\nimport {\n getAccountHolderFromUserIds,\n isAdmin,\n isCreator,\n validateUniqueUser,\n} from \"./utils\"\nimport { searchExistingEmails } from \"./lookup\"\nimport { hash } from \"../utils\"\nimport { validatePassword } from \"../security\"\n\ntype QuotaUpdateFn = (\n change: number,\n creatorsChange: number,\n cb?: () => Promise<any>\n) => Promise<any>\ntype GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>\ntype FeatureFn = () => Promise<Boolean>\ntype GroupGetFn = (ids: string[]) => Promise<UserGroup[]>\ntype GroupBuildersFn = (user: User) => Promise<string[]>\ntype QuotaFns = { addUsers: QuotaUpdateFn; removeUsers: QuotaUpdateFn }\ntype GroupFns = {\n addUsers: GroupUpdateFn\n getBulk: GroupGetFn\n getGroupBuilderAppIds: GroupBuildersFn\n}\ntype CreateAdminUserOpts = {\n ssoId?: string\n hashPassword?: boolean\n requirePassword?: boolean\n skipPasswordValidation?: boolean\n}\ntype FeatureFns = { isSSOEnforced: FeatureFn; isAppBuildersEnabled: FeatureFn }\n\nconst bulkDeleteProcessing = async (dbUser: User) => {\n const userId = dbUser._id as string\n await platform.users.removeUser(dbUser)\n await eventHelpers.handleDeleteEvents(dbUser)\n await cache.user.invalidateUser(userId)\n await sessions.invalidateSessions(userId, { reason: \"bulk-deletion\" })\n}\n\nexport class UserDB {\n static quotas: QuotaFns\n static groups: GroupFns\n static features: FeatureFns\n\n static init(quotaFns: QuotaFns, groupFns: GroupFns, featureFns: FeatureFns) {\n UserDB.quotas = quotaFns\n UserDB.groups = groupFns\n UserDB.features = featureFns\n }\n\n static async isPreventPasswordActions(user: User, account?: Account) {\n // when in maintenance mode we allow sso users with the admin role\n // to perform any password action - this prevents lockout\n if (env.ENABLE_SSO_MAINTENANCE_MODE && isAdmin(user)) {\n return false\n }\n\n // SSO is enforced for all users\n if (await UserDB.features.isSSOEnforced()) {\n return true\n }\n\n // Check local sso\n if (isSSOUser(user)) {\n return true\n }\n\n // Check account sso\n if (!account) {\n account = await accountSdk.getAccountByTenantId(getTenantId())\n }\n return !!(account && account.email === user.email && isSSOAccount(account))\n }\n\n static async buildUser(\n user: User,\n opts: SaveUserOpts = {\n hashPassword: true,\n requirePassword: true,\n },\n tenantId: string,\n dbUser?: any,\n account?: Account\n ): Promise<User> {\n let { password, _id } = user\n\n // don't require a password if the db user doesn't already have one\n if (dbUser && !dbUser.password) {\n opts.requirePassword = false\n }\n\n let hashedPassword\n if (password) {\n if (await UserDB.isPreventPasswordActions(user, account)) {\n throw new HTTPError(\"Password change is disabled for this user\", 400)\n }\n\n if (!opts.skipPasswordValidation) {\n const passwordValidation = validatePassword(password)\n if (!passwordValidation.valid) {\n throw new HTTPError(passwordValidation.error, 400)\n }\n }\n\n hashedPassword = opts.hashPassword ? await hash(password) : password\n } else if (dbUser) {\n hashedPassword = dbUser.password\n }\n\n // passwords are never required if sso is enforced\n const requirePasswords =\n opts.requirePassword && !(await UserDB.features.isSSOEnforced())\n if (!hashedPassword && requirePasswords) {\n throw \"Password must be specified.\"\n }\n\n _id = _id || dbUtils.generateGlobalUserID()\n\n const fullUser = {\n createdAt: Date.now(),\n ...dbUser,\n ...user,\n _id,\n password: hashedPassword,\n tenantId,\n }\n // make sure the roles object is always present\n if (!fullUser.roles) {\n fullUser.roles = {}\n }\n // add the active status to a user if it's not provided\n if (fullUser.status == null) {\n fullUser.status = UserStatus.ACTIVE\n }\n\n return fullUser\n }\n\n static async allUsers() {\n const db = getGlobalDB()\n const response = await db.allDocs<User>(\n dbUtils.getGlobalUserParams(null, {\n include_docs: true,\n })\n )\n return response.rows.map(row => row.doc!)\n }\n\n static async countUsersByApp(appId: string) {\n let response: any = await usersCore.searchGlobalUsersByApp(appId, {})\n return {\n userCount: response.length,\n }\n }\n\n static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) {\n let response: User[] = await usersCore.searchGlobalUsersByAppAccess(\n opts.appId,\n { limit: opts.limit || 50 }\n )\n return response\n }\n\n static async getUserByEmail(email: string) {\n return usersCore.getGlobalUserByEmail(email)\n }\n\n /**\n * Gets a user by ID from the global database, based on the current tenancy.\n */\n static async getUser(userId: string) {\n const user = await usersCore.getById(userId)\n if (user) {\n delete user.password\n }\n return user\n }\n\n static async bulkGet(userIds: string[]) {\n return await usersCore.bulkGetGlobalUsersById(userIds)\n }\n\n static async bulkUpdate(users: User[]) {\n return await usersCore.bulkUpdateGlobalUsers(users)\n }\n\n static async save(user: User, opts: SaveUserOpts = {}): Promise<User> {\n // default booleans to true\n if (opts.hashPassword == null) {\n opts.hashPassword = true\n }\n if (opts.requirePassword == null) {\n opts.requirePassword = true\n }\n const tenantId = getTenantId()\n const db = getGlobalDB()\n\n let { email, _id, userGroups = [], roles } = user\n\n if (!email && !_id) {\n throw new Error(\"_id or email is required\")\n }\n\n let dbUser: User | undefined\n if (_id) {\n // try to get existing user from db\n try {\n dbUser = (await db.get(_id)) as User\n if (email && dbUser.email !== email) {\n throw \"Email address cannot be changed\"\n }\n email = dbUser.email\n } catch (e: any) {\n if (e.status === 404) {\n // do nothing, save this new user with the id specified - required for SSO auth\n } else {\n throw e\n }\n }\n }\n\n if (!dbUser && email) {\n // no id was specified - load from email instead\n dbUser = await usersCore.getGlobalUserByEmail(email)\n if (dbUser && dbUser._id !== _id) {\n throw new EmailUnavailableError(email)\n }\n }\n\n const change = dbUser ? 0 : 1 // no change if there is existing user\n const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0\n return UserDB.quotas.addUsers(change, creatorsChange, async () => {\n await validateUniqueUser(email, tenantId)\n\n let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)\n // don't allow a user to update its own roles/perms\n if (opts.currentUserId && opts.currentUserId === dbUser?._id) {\n builtUser = usersCore.cleanseUserObject(builtUser, dbUser) as User\n }\n\n if (!dbUser && roles?.length) {\n builtUser.roles = { ...roles }\n }\n\n // make sure we set the _id field for a new user\n // Also if this is a new user, associate groups with them\n let groupPromises = []\n if (!_id) {\n _id = builtUser._id!\n\n if (userGroups.length > 0) {\n for (let groupId of userGroups) {\n groupPromises.push(UserDB.groups.addUsers(groupId, [_id!]))\n }\n }\n }\n\n try {\n // save the user to db\n let response = await db.put(builtUser)\n builtUser._rev = response.rev\n\n await eventHelpers.handleSaveEvents(builtUser, dbUser)\n await platform.users.addUser(\n tenantId,\n builtUser._id!,\n builtUser.email,\n builtUser.ssoId\n )\n await cache.user.invalidateUser(response.id)\n\n await Promise.all(groupPromises)\n\n // finally returned the saved user from the db\n return db.get(builtUser._id!)\n } catch (err: any) {\n if (err.status === 409) {\n throw \"User exists already\"\n } else {\n throw err\n }\n }\n })\n }\n\n static async bulkCreate(\n newUsersRequested: User[],\n groups?: string[]\n ): Promise<BulkUserCreated> {\n const tenantId = getTenantId()\n\n let usersToSave: any[] = []\n let newUsers: any[] = []\n let newCreators: any[] = []\n\n const emails = newUsersRequested.map((user: User) => user.email)\n const existingEmails = await searchExistingEmails(emails)\n const unsuccessful: { email: string; reason: string }[] = []\n\n for (const newUser of newUsersRequested) {\n if (\n newUsers.find(\n (x: User) => x.email.toLowerCase() === newUser.email.toLowerCase()\n ) ||\n existingEmails.includes(newUser.email.toLowerCase())\n ) {\n unsuccessful.push({\n email: newUser.email,\n reason: `Unavailable`,\n })\n continue\n }\n newUser.userGroups = groups || []\n newUsers.push(newUser)\n if (isCreator(newUser)) {\n newCreators.push(newUser)\n }\n }\n\n const account = await accountSdk.getAccountByTenantId(tenantId)\n return UserDB.quotas.addUsers(\n newUsers.length,\n newCreators.length,\n async () => {\n // create the promises array that will be called by bulkDocs\n newUsers.forEach((user: any) => {\n usersToSave.push(\n UserDB.buildUser(\n user,\n {\n hashPassword: true,\n requirePassword: user.requirePassword,\n },\n tenantId,\n undefined, // no dbUser\n account\n )\n )\n })\n\n const usersToBulkSave = await Promise.all(usersToSave)\n await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)\n\n // Post-processing of bulk added users, e.g. events and cache operations\n for (const user of usersToBulkSave) {\n // TODO: Refactor to bulk insert users into the info db\n // instead of relying on looping tenant creation\n await platform.users.addUser(tenantId, user._id, user.email)\n await eventHelpers.handleSaveEvents(user, undefined)\n }\n\n const saved = usersToBulkSave.map(user => {\n return {\n _id: user._id,\n email: user.email,\n }\n })\n\n // now update the groups\n if (Array.isArray(saved) && groups) {\n const groupPromises = []\n const createdUserIds = saved.map(user => user._id)\n for (let groupId of groups) {\n groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))\n }\n await Promise.all(groupPromises)\n }\n\n return {\n successful: saved,\n unsuccessful,\n }\n }\n )\n }\n\n static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {\n const db = getGlobalDB()\n\n const response: BulkUserDeleted = {\n successful: [],\n unsuccessful: [],\n }\n\n // remove the account holder from the delete request if present\n const account = await getAccountHolderFromUserIds(userIds)\n if (account) {\n userIds = userIds.filter(u => u !== account.budibaseUserId)\n // mark user as unsuccessful\n response.unsuccessful.push({\n _id: account.budibaseUserId,\n email: account.email,\n reason: \"Account holder cannot be deleted\",\n })\n }\n\n // Get users and delete\n const allDocsResponse = await db.allDocs<User>({\n include_docs: true,\n keys: userIds,\n })\n const usersToDelete = allDocsResponse.rows.map(user => {\n return user.doc!\n })\n\n // Delete from DB\n const toDelete = usersToDelete.map(user => ({\n ...user,\n _deleted: true,\n }))\n const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)\n const creatorsToDelete = usersToDelete.filter(isCreator)\n\n for (let user of usersToDelete) {\n await bulkDeleteProcessing(user)\n }\n await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)\n\n // Build Response\n // index users by id\n const userIndex: { [key: string]: User } = {}\n usersToDelete.reduce((prev, current) => {\n prev[current._id!] = current\n return prev\n }, userIndex)\n\n // add the successful and unsuccessful users to response\n dbResponse.forEach(item => {\n const email = userIndex[item.id].email\n if (item.ok) {\n response.successful.push({ _id: item.id, email })\n } else {\n response.unsuccessful.push({\n _id: item.id,\n email,\n reason: \"Database error\",\n })\n }\n })\n\n return response\n }\n\n static async destroy(id: string) {\n const db = getGlobalDB()\n const dbUser = (await db.get(id)) as User\n const userId = dbUser._id as string\n\n if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {\n // root account holder can't be deleted from inside budibase\n const email = dbUser.email\n const account = await accountSdk.getAccount(email)\n if (account) {\n if (dbUser.userId === getIdentity()!._id) {\n throw new HTTPError('Please visit \"Account\" to delete this user', 400)\n } else {\n throw new HTTPError(\"Account holder cannot be deleted\", 400)\n }\n }\n }\n\n await platform.users.removeUser(dbUser)\n\n await db.remove(userId, dbUser._rev)\n\n const creatorsToDelete = isCreator(dbUser) ? 1 : 0\n await UserDB.quotas.removeUsers(1, creatorsToDelete)\n await eventHelpers.handleDeleteEvents(dbUser)\n await cache.user.invalidateUser(userId)\n await sessions.invalidateSessions(userId, { reason: \"deletion\" })\n }\n\n static async createAdminUser(\n email: string,\n password: string,\n tenantId: string,\n opts?: CreateAdminUserOpts\n ) {\n const user: User = {\n email: email,\n password: password,\n createdAt: Date.now(),\n roles: {},\n builder: {\n global: true,\n },\n admin: {\n global: true,\n },\n tenantId,\n }\n if (opts?.ssoId) {\n user.ssoId = opts.ssoId\n }\n // always bust checklist beforehand, if an error occurs but can proceed, don't get\n // stuck in a cycle\n await cache.bustCache(cache.CacheKey.CHECKLIST)\n return await UserDB.save(user, {\n hashPassword: opts?.hashPassword,\n requirePassword: opts?.requirePassword,\n skipPasswordValidation: opts?.skipPasswordValidation,\n })\n }\n\n static async getGroups(groupIds: string[]) {\n return await this.groups.getBulk(groupIds)\n }\n\n static async getGroupBuilderAppIds(user: User) {\n return await this.groups.getGroupBuilderAppIds(user)\n }\n}\n", "import env from \"../environment\"\nimport * as events from \"../events\"\nimport * as accounts from \"../accounts\"\nimport { getTenantId } from \"../context\"\nimport { User, UserRoles, CloudAccount } from \"@budibase/types\"\nimport { hasBuilderPermissions, hasAdminPermissions } from \"./utils\"\n\nexport const handleDeleteEvents = async (user: any) => {\n await events.user.deleted(user)\n\n if (hasBuilderPermissions(user)) {\n await events.user.permissionBuilderRemoved(user)\n }\n\n if (hasAdminPermissions(user)) {\n await events.user.permissionAdminRemoved(user)\n }\n}\n\nconst assignAppRoleEvents = async (\n user: User,\n roles: UserRoles,\n existingRoles: UserRoles\n) => {\n for (const [appId, role] of Object.entries(roles)) {\n // app role in existing is not same as new\n if (!existingRoles || existingRoles[appId] !== role) {\n await events.role.assigned(user, role)\n }\n }\n}\n\nconst unassignAppRoleEvents = async (\n user: User,\n roles: UserRoles,\n existingRoles: UserRoles\n) => {\n if (!existingRoles) {\n return\n }\n for (const [appId, role] of Object.entries(existingRoles)) {\n // app role in new is not same as existing\n if (!roles || roles[appId] !== role) {\n await events.role.unassigned(user, role)\n }\n }\n}\n\nconst handleAppRoleEvents = async (user: any, existingUser: any) => {\n const roles = user.roles\n const existingRoles = existingUser?.roles\n\n await assignAppRoleEvents(user, roles, existingRoles)\n await unassignAppRoleEvents(user, roles, existingRoles)\n}\n\nexport const handleSaveEvents = async (\n user: User,\n existingUser: User | undefined\n) => {\n const tenantId = getTenantId()\n let tenantAccount: CloudAccount | undefined\n if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {\n tenantAccount = await accounts.getAccountByTenantId(tenantId)\n }\n await events.identification.identifyUser(user, tenantAccount)\n\n if (existingUser) {\n await events.user.updated(user)\n\n if (isRemovingBuilder(user, existingUser)) {\n await events.user.permissionBuilderRemoved(user)\n }\n\n if (isRemovingAdmin(user, existingUser)) {\n await events.user.permissionAdminRemoved(user)\n }\n\n if (isOnboardingComplete(user, existingUser)) {\n await events.user.onboardingComplete(user)\n }\n\n if (\n !existingUser.forceResetPassword &&\n user.forceResetPassword &&\n user.password\n ) {\n await events.user.passwordForceReset(user)\n }\n\n if (user.password !== existingUser.password) {\n await events.user.passwordUpdated(user)\n }\n } else {\n await events.user.created(user)\n }\n\n if (isAddingBuilder(user, existingUser)) {\n await events.user.permissionBuilderAssigned(user)\n }\n\n if (isAddingAdmin(user, existingUser)) {\n await events.user.permissionAdminAssigned(user)\n }\n\n await handleAppRoleEvents(user, existingUser)\n}\n\nexport const isAddingBuilder = (user: any, existingUser: any) => {\n return isAddingPermission(user, existingUser, hasBuilderPermissions)\n}\n\nexport const isRemovingBuilder = (user: any, existingUser: any) => {\n return isRemovingPermission(user, existingUser, hasBuilderPermissions)\n}\n\nconst isAddingAdmin = (user: any, existingUser: any) => {\n return isAddingPermission(user, existingUser, hasAdminPermissions)\n}\n\nconst isRemovingAdmin = (user: any, existingUser: any) => {\n return isRemovingPermission(user, existingUser, hasAdminPermissions)\n}\n\nconst isOnboardingComplete = (user: any, existingUser: any) => {\n return !existingUser?.onboardedAt && typeof user.onboardedAt === \"string\"\n}\n\n/**\n * Check if a permission is being added to a new or existing user.\n */\nconst isAddingPermission = (\n user: any,\n existingUser: any,\n hasPermission: any\n) => {\n // new user doesn't have the permission\n if (!hasPermission(user)) {\n return false\n }\n\n // existing user has the permission\n if (existingUser && hasPermission(existingUser)) {\n return false\n }\n\n // permission is being added\n return true\n}\n\n/**\n * Check if a permission is being removed from an existing user.\n */\nconst isRemovingPermission = (\n user: any,\n existingUser: any,\n hasPermission: any\n) => {\n // new user has the permission\n if (hasPermission(user)) {\n return false\n }\n\n // no existing user or existing user doesn't have the permission\n if (!existingUser) {\n return false\n }\n\n // existing user doesn't have the permission\n if (!hasPermission(existingUser)) {\n return false\n }\n\n // permission is being removed\n return true\n}\n", "export * from \"./publishers\"\nexport * as processors from \"./processors\"\nexport * as analytics from \"./analytics\"\nexport { default as identification } from \"./identification\"\nexport * as backfillCache from \"./backfill\"\n\nimport { processors } from \"./processors\"\n\nexport function initAsyncEvents() {}\n\nexport const shutdown = () => {\n processors.shutdown()\n console.log(\"Events shutdown\")\n}\n", "import AnalyticsProcessor from \"./AnalyticsProcessor\"\nimport LoggingProcessor from \"./LoggingProcessor\"\nimport AuditLogsProcessor from \"./AuditLogsProcessor\"\nimport Processors from \"./Processors\"\nimport { AuditLogFn } from \"@budibase/types\"\n\nexport const analyticsProcessor = new AnalyticsProcessor()\nconst loggingProcessor = new LoggingProcessor()\nconst auditLogsProcessor = new AuditLogsProcessor()\n\nexport function init(auditingFn: AuditLogFn) {\n return AuditLogsProcessor.init(auditingFn)\n}\n\nexport const processors = new Processors([\n analyticsProcessor,\n loggingProcessor,\n auditLogsProcessor,\n])\n", "import { Event, Identity, Group, IdentityType } from \"@budibase/types\"\nimport { EventProcessor } from \"./types\"\nimport env from \"../../environment\"\nimport * as analytics from \"../analytics\"\nimport PosthogProcessor from \"./posthog\"\n\n/**\n * Events that are always captured.\n */\nconst EVENT_WHITELIST = [\n Event.INSTALLATION_VERSION_UPGRADED,\n Event.INSTALLATION_VERSION_DOWNGRADED,\n]\nconst IDENTITY_WHITELIST = [IdentityType.INSTALLATION, IdentityType.TENANT]\n\nexport default class AnalyticsProcessor implements EventProcessor {\n posthog: PosthogProcessor | undefined\n\n constructor() {\n if (env.POSTHOG_TOKEN && !env.isTest()) {\n this.posthog = new PosthogProcessor(env.POSTHOG_TOKEN)\n }\n }\n\n async processEvent(\n event: Event,\n identity: Identity,\n properties: any,\n timestamp?: string | number\n ): Promise<void> {\n if (!EVENT_WHITELIST.includes(event) && !(await analytics.enabled())) {\n return\n }\n if (this.posthog) {\n await this.posthog.processEvent(event, identity, properties, timestamp)\n }\n }\n\n async identify(identity: Identity, timestamp?: string | number) {\n // Group indentifications (tenant and installation) always on\n if (\n !IDENTITY_WHITELIST.includes(identity.type) &&\n !(await analytics.enabled())\n ) {\n return\n }\n if (this.posthog) {\n await this.posthog.identify(identity, timestamp)\n }\n }\n\n async identifyGroup(group: Group, timestamp?: string | number) {\n // Group indentifications (tenant and installation) always on\n if (this.posthog) {\n await this.posthog.identifyGroup(group, timestamp)\n }\n }\n\n shutdown() {\n if (this.posthog) {\n this.posthog.shutdown()\n }\n }\n}\n", "import * as configs from \"../configs\"\n\n// wrapper utility function\nexport const enabled = async () => {\n return configs.analyticsEnabled()\n}\n", "import PostHog from \"posthog-node\"\nimport { Event, Identity, Group, BaseEvent } from \"@budibase/types\"\nimport { EventProcessor } from \"../types\"\nimport env from \"../../../environment\"\nimport * as context from \"../../../context\"\nimport * as rateLimiting from \"./rateLimiting\"\n\nconst EXCLUDED_EVENTS: Event[] = [\n Event.USER_UPDATED,\n Event.EMAIL_SMTP_UPDATED,\n Event.AUTH_SSO_UPDATED,\n Event.APP_UPDATED,\n Event.ROLE_UPDATED,\n Event.DATASOURCE_UPDATED,\n Event.QUERY_UPDATED,\n Event.TABLE_UPDATED,\n Event.VIEW_UPDATED,\n Event.VIEW_FILTER_UPDATED,\n Event.VIEW_CALCULATION_UPDATED,\n Event.AUTOMATION_TRIGGER_UPDATED,\n Event.USER_GROUP_UPDATED,\n]\n\nexport default class PosthogProcessor implements EventProcessor {\n posthog: PostHog\n\n constructor(token: string | undefined) {\n if (!token) {\n throw new Error(\"Posthog token is not defined\")\n }\n this.posthog = new PostHog(token)\n }\n\n async processEvent(\n event: Event,\n identity: Identity,\n properties: BaseEvent,\n timestamp?: string | number\n ): Promise<void> {\n // don't send excluded events\n if (EXCLUDED_EVENTS.includes(event)) {\n return\n }\n\n if (await rateLimiting.limited(event)) {\n return\n }\n\n properties = this.clearPIIProperties(properties)\n\n properties.version = env.VERSION\n properties.service = env.SERVICE\n properties.environment = identity.environment\n properties.hosting = identity.hosting\n\n const appId = context.getAppId()\n if (appId) {\n properties.appId = appId\n }\n\n const payload: any = { distinctId: identity.id, event, properties }\n\n if (timestamp) {\n payload.timestamp = new Date(timestamp)\n }\n\n // add groups to the event\n if (identity.installationId || identity.tenantId) {\n payload.groups = {}\n if (identity.installationId) {\n payload.groups.installation = identity.installationId\n payload.properties.installationId = identity.installationId\n }\n if (identity.tenantId) {\n payload.groups.tenant = identity.tenantId\n payload.properties.tenantId = identity.tenantId\n }\n }\n\n this.posthog.capture(payload)\n }\n\n clearPIIProperties(properties: any) {\n if (properties.email) {\n delete properties.email\n }\n if (properties.audited) {\n delete properties.audited\n }\n return properties\n }\n\n async identify(identity: Identity, timestamp?: string | number) {\n const payload: any = { distinctId: identity.id, properties: identity }\n if (timestamp) {\n payload.timestamp = new Date(timestamp)\n }\n this.posthog.identify(payload)\n }\n\n async identifyGroup(group: Group, timestamp?: string | number) {\n const payload: any = {\n distinctId: group.id,\n groupType: group.type,\n groupKey: group.id,\n properties: group,\n }\n\n if (timestamp) {\n payload.timestamp = new Date(timestamp)\n }\n this.posthog.groupIdentify(payload)\n }\n\n shutdown() {\n this.posthog.shutdown()\n }\n}\n", "import { Event } from \"@budibase/types\"\nimport { CacheKey, TTL } from \"../../../cache/generic\"\nimport * as cache from \"../../../cache/generic\"\nimport * as context from \"../../../context\"\n\ntype RateLimitedEvent =\n | Event.SERVED_BUILDER\n | Event.SERVED_APP_PREVIEW\n | Event.SERVED_APP\n\nconst isRateLimited = (event: Event): event is RateLimitedEvent => {\n return (\n event === Event.SERVED_BUILDER ||\n event === Event.SERVED_APP_PREVIEW ||\n event === Event.SERVED_APP\n )\n}\n\nconst isPerApp = (event: RateLimitedEvent) => {\n return event === Event.SERVED_APP_PREVIEW || event === Event.SERVED_APP\n}\n\ninterface EventProperties {\n timestamp: number\n}\n\nenum RateLimit {\n CALENDAR_DAY = \"calendarDay\",\n}\n\nconst RATE_LIMITS = {\n [Event.SERVED_APP]: RateLimit.CALENDAR_DAY,\n [Event.SERVED_APP_PREVIEW]: RateLimit.CALENDAR_DAY,\n [Event.SERVED_BUILDER]: RateLimit.CALENDAR_DAY,\n}\n\n/**\n * Check if this event should be sent right now\n * Return false to signal the event SHOULD be sent\n * Return true to signal the event should NOT be sent\n */\nexport const limited = async (event: Event): Promise<boolean> => {\n // not a rate limited event -- send\n if (!isRateLimited(event)) {\n return false\n }\n\n const cachedEvent = await readEvent(event)\n if (cachedEvent) {\n const timestamp = new Date(cachedEvent.timestamp)\n const limit = RATE_LIMITS[event]\n switch (limit) {\n case RateLimit.CALENDAR_DAY: {\n // get midnight at the start of the next day for the timestamp\n timestamp.setDate(timestamp.getDate() + 1)\n timestamp.setHours(0, 0, 0, 0)\n\n // if we have passed the threshold into the next day\n if (Date.now() > timestamp.getTime()) {\n // update the timestamp in the event -- send\n await recordEvent(event, { timestamp: Date.now() })\n return false\n } else {\n // still within the limited period -- don't send\n return true\n }\n }\n }\n } else {\n // no event present i.e. expired -- send\n await recordEvent(event, { timestamp: Date.now() })\n return false\n }\n}\n\nconst eventKey = (event: RateLimitedEvent) => {\n let key = `${CacheKey.EVENTS_RATE_LIMIT}:${event}`\n if (isPerApp(event)) {\n key = key + \":\" + context.getAppId()\n }\n return key\n}\n\nconst readEvent = async (\n event: RateLimitedEvent\n): Promise<EventProperties | undefined> => {\n const key = eventKey(event)\n const result = await cache.get(key)\n return result as EventProperties\n}\n\nconst recordEvent = async (\n event: RateLimitedEvent,\n properties: EventProperties\n) => {\n const key = eventKey(event)\n const limit = RATE_LIMITS[event]\n let ttl\n switch (limit) {\n case RateLimit.CALENDAR_DAY: {\n ttl = TTL.ONE_DAY\n }\n }\n\n await cache.store(key, properties, ttl)\n}\n", "import PosthogProcessor from \"./PosthogProcessor\"\n\nexport default PosthogProcessor\n", "import { Event, Identity, Group } from \"@budibase/types\"\nimport { EventProcessor } from \"./types\"\nimport env from \"../../environment\"\n\nconst skipLogging = env.SELF_HOSTED && !env.isDev()\n\nexport default class LoggingProcessor implements EventProcessor {\n async processEvent(\n event: Event,\n identity: Identity,\n properties: any,\n timestamp?: string\n ): Promise<void> {\n if (skipLogging) {\n return\n }\n console.log(`[audit] [identityType=${identity.type}] ${event}`, properties)\n }\n\n async identify(identity: Identity, timestamp?: string | number) {\n if (skipLogging) {\n return\n }\n console.log(`[audit] identified`, identity)\n }\n\n async identifyGroup(group: Group, timestamp?: string | number) {\n if (skipLogging) {\n return\n }\n console.log(`[audit] group identified`, group)\n }\n\n shutdown(): void {\n // no-op\n }\n}\n", "import {\n Event,\n Identity,\n Group,\n IdentityType,\n AuditLogQueueEvent,\n AuditLogFn,\n HostInfo,\n} from \"@budibase/types\"\nimport { EventProcessor } from \"./types\"\nimport { getAppId, doInTenant, getTenantId } from \"../../context\"\nimport BullQueue from \"bull\"\nimport { createQueue, JobQueue } from \"../../queue\"\nimport { isAudited } from \"../../utils\"\nimport env from \"../../environment\"\n\nexport default class AuditLogsProcessor implements EventProcessor {\n static auditLogsEnabled = false\n static auditLogQueue: BullQueue.Queue<AuditLogQueueEvent>\n\n // can't use constructor as need to return promise\n static init(fn: AuditLogFn) {\n AuditLogsProcessor.auditLogsEnabled = true\n const writeAuditLogs = fn\n AuditLogsProcessor.auditLogQueue = createQueue<AuditLogQueueEvent>(\n JobQueue.AUDIT_LOG\n )\n return AuditLogsProcessor.auditLogQueue.process(async job => {\n return doInTenant(job.data.tenantId, async () => {\n let properties = job.data.properties\n if (properties.audited) {\n properties = {\n ...properties,\n ...properties.audited,\n }\n delete properties.audited\n }\n\n // this feature is disabled by default due to privacy requirements\n // in some countries - available as env var in-case it is desired\n // in self host deployments\n let hostInfo: HostInfo | undefined = {}\n if (env.ENABLE_AUDIT_LOG_IP_ADDR) {\n hostInfo = job.data.opts.hostInfo\n }\n\n await writeAuditLogs(job.data.event, properties, {\n userId: job.data.opts.userId,\n timestamp: job.data.opts.timestamp,\n appId: job.data.opts.appId,\n hostInfo,\n })\n })\n })\n }\n\n async processEvent(\n event: Event,\n identity: Identity,\n properties: any,\n timestamp?: string\n ): Promise<void> {\n if (AuditLogsProcessor.auditLogsEnabled && isAudited(event)) {\n // only audit log actual events, don't include backfills\n const userId =\n identity.type === IdentityType.USER ? identity.id : undefined\n // add to the event queue, rather than just writing immediately\n await AuditLogsProcessor.auditLogQueue.add({\n event,\n properties,\n opts: {\n userId,\n timestamp,\n appId: getAppId(),\n hostInfo: identity.hostInfo,\n },\n tenantId: getTenantId(),\n })\n }\n }\n\n async identify(identity: Identity, timestamp?: string | number) {\n // no-op\n }\n\n async identifyGroup(group: Group, timestamp?: string | number) {\n // no-op\n }\n\n shutdown(): void {\n AuditLogsProcessor.auditLogQueue?.close()\n }\n}\n", "export * from \"./queue\"\nexport * from \"./constants\"\n", "import env from \"../environment\"\nimport { getRedisOptions } from \"../redis/utils\"\nimport { JobQueue } from \"./constants\"\nimport InMemoryQueue from \"./inMemoryQueue\"\nimport BullQueue, { QueueOptions } from \"bull\"\nimport { addListeners, StalledFn } from \"./listeners\"\nimport { Duration } from \"../utils\"\nimport * as timers from \"../timers\"\n\n// the queue lock is held for 5 minutes\nconst QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()\n// queue lock is refreshed every 30 seconds\nconst QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs()\n// cleanup the queue every 60 seconds\nconst CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs()\nlet QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []\nlet cleanupInterval: NodeJS.Timeout\n\nasync function cleanup() {\n for (let queue of QUEUES) {\n await queue.clean(CLEANUP_PERIOD_MS, \"completed\")\n }\n}\n\nexport function createQueue<T>(\n jobQueue: JobQueue,\n opts: { removeStalledCb?: StalledFn } = {}\n): BullQueue.Queue<T> {\n const redisOpts = getRedisOptions()\n const queueConfig: QueueOptions = {\n redis: redisOpts,\n settings: {\n maxStalledCount: 0,\n lockDuration: QUEUE_LOCK_MS,\n lockRenewTime: QUEUE_LOCK_RENEW_INTERNAL_MS,\n },\n }\n let queue: any\n if (!env.isTest()) {\n queue = new BullQueue(jobQueue, queueConfig)\n } else {\n queue = new InMemoryQueue(jobQueue, queueConfig)\n }\n addListeners(queue, jobQueue, opts?.removeStalledCb)\n QUEUES.push(queue)\n if (!cleanupInterval && !env.isTest()) {\n cleanupInterval = timers.set(cleanup, CLEANUP_PERIOD_MS)\n // fire off an initial cleanup\n cleanup().catch(err => {\n console.error(`Unable to cleanup ${jobQueue} initially - ${err}`)\n })\n }\n return queue\n}\n\nexport async function shutdown() {\n if (cleanupInterval) {\n timers.clear(cleanupInterval)\n }\n if (QUEUES.length) {\n for (let queue of QUEUES) {\n await queue.close()\n }\n QUEUES = []\n }\n console.log(\"Queues shutdown\")\n}\n", "import events from \"events\"\nimport { timeout } from \"../utils\"\n\n/**\n * Bull works with a Job wrapper around all messages that contains a lot more information about\n * the state of the message, this object constructor implements the same schema of Bull jobs\n * for the sake of maintaining API consistency.\n * @param queue The name of the queue which the message will be carried on.\n * @param message The JSON message which will be passed back to the consumer.\n * @returns A new job which can now be put onto the queue, this is mostly an\n * internal structure so that an in memory queue can be easily swapped for a Bull queue.\n */\nfunction newJob(queue: string, message: any) {\n return {\n timestamp: Date.now(),\n queue: queue,\n data: message,\n opts: {},\n }\n}\n\n/**\n * This is designed to replicate Bull (https://github.com/OptimalBits/bull) in memory as a sort of mock.\n * It is relatively simple, using an event emitter internally to register when messages are available\n * to the consumers - in can support many inputs and many consumers.\n */\nclass InMemoryQueue {\n _name: string\n _opts?: any\n _messages: any[]\n _emitter: EventEmitter\n _runCount: number\n _addCount: number\n /**\n * The constructor the queue, exactly the same as that of Bulls.\n * @param name The name of the queue which is being configured.\n * @param opts This is not used by the in memory queue as there is no real use\n * case when in memory, but is the same API as Bull\n */\n constructor(name: string, opts?: any) {\n this._name = name\n this._opts = opts\n this._messages = []\n this._emitter = new events.EventEmitter()\n this._runCount = 0\n this._addCount = 0\n }\n\n /**\n * Same callback API as Bull, each callback passed to this will consume messages as they are\n * available. Please note this is a queue service, not a notification service, so each\n * consumer will receive different messages.\n * @param func The callback function which will return a \"Job\", the same\n * as the Bull API, within this job the property \"data\" contains the JSON message. Please\n * note this is incredibly limited compared to Bull as in reality the Job would contain\n * a lot more information about the queue and current status of Bull cluster.\n */\n process(func: any) {\n this._emitter.on(\"message\", async () => {\n if (this._messages.length <= 0) {\n return\n }\n let msg = this._messages.shift()\n let resp = func(msg)\n if (resp.then != null) {\n await resp\n }\n this._runCount++\n })\n }\n\n async isReady() {\n return true\n }\n\n // simply puts a message to the queue and emits to the queue for processing\n /**\n * Simple function to replicate the add message functionality of Bull, putting\n * a new message on the queue. This then emits an event which will be used to\n * return the message to a consumer (if one is attached).\n * @param msg A message to be transported over the queue, this should be\n * a JSON message as this is required by Bull.\n * @param repeat serves no purpose for the import queue.\n */\n // eslint-disable-next-line no-unused-vars\n add(msg: any, repeat: boolean) {\n if (typeof msg !== \"object\") {\n throw \"Queue only supports carrying JSON.\"\n }\n this._messages.push(newJob(this._name, msg))\n this._addCount++\n this._emitter.emit(\"message\")\n }\n\n /**\n * replicating the close function from bull, which waits for jobs to finish.\n */\n async close() {\n return []\n }\n\n /**\n * This removes a cron which has been implemented, this is part of Bull API.\n * @param cronJobId The cron which is to be removed.\n */\n removeRepeatableByKey(cronJobId: string) {\n // TODO: implement for testing\n console.log(cronJobId)\n }\n\n /**\n * Implemented for tests\n */\n getRepeatableJobs() {\n return []\n }\n\n // eslint-disable-next-line no-unused-vars\n removeJobs(pattern: string) {\n // no-op\n }\n\n /**\n * Implemented for tests\n */\n async clean() {\n return []\n }\n\n async getJob() {\n return {}\n }\n\n on() {\n // do nothing\n return this\n }\n\n async waitForCompletion() {\n do {\n await timeout(50)\n } while (this._addCount < this._runCount)\n }\n}\n\nexport default InMemoryQueue\n", "export enum JobQueue {\n AUTOMATION = \"automationQueue\",\n APP_BACKUP = \"appBackupQueue\",\n AUDIT_LOG = \"auditLogQueue\",\n SYSTEM_EVENT_QUEUE = \"systemEventQueue\",\n APP_MIGRATION = \"appMigration\",\n}\n", "import { Job, JobId, Queue } from \"bull\"\nimport { JobQueue } from \"./constants\"\nimport * as context from \"../context\"\n\nexport type StalledFn = (job: Job) => Promise<void>\n\nexport function addListeners(\n queue: Queue,\n jobQueue: JobQueue,\n removeStalledCb?: StalledFn\n) {\n logging(queue, jobQueue)\n if (removeStalledCb) {\n handleStalled(queue, removeStalledCb)\n }\n}\n\nfunction handleStalled(queue: Queue, removeStalledCb?: StalledFn) {\n queue.on(\"stalled\", async (job: Job) => {\n if (removeStalledCb) {\n await removeStalledCb(job)\n } else if (job.opts.repeat) {\n const jobId = job.id\n const repeatJobs = await queue.getRepeatableJobs()\n for (let repeatJob of repeatJobs) {\n if (repeatJob.id === jobId) {\n await queue.removeRepeatableByKey(repeatJob.key)\n }\n }\n console.log(`jobId=${jobId} disabled`)\n }\n })\n}\n\nfunction getLogParams(\n eventType: QueueEventType,\n event: BullEvent,\n opts: {\n job?: Job\n jobId?: JobId\n error?: Error\n } = {},\n extra: any = {}\n) {\n const message = `[BULL] ${eventType}=${event}`\n const err = opts.error\n\n const bullLog = {\n _logKey: \"bull\",\n eventType,\n event,\n job: opts.job,\n jobId: opts.jobId || opts.job?.id,\n ...extra,\n }\n\n let automationLog\n if (opts.job?.data?.automation) {\n automationLog = {\n _logKey: \"automation\",\n trigger: opts.job\n ? opts.job.data.automation.definition.trigger.event\n : undefined,\n }\n }\n\n return [message, err, bullLog, automationLog]\n}\n\nenum BullEvent {\n ERROR = \"error\",\n WAITING = \"waiting\",\n ACTIVE = \"active\",\n STALLED = \"stalled\",\n PROGRESS = \"progress\",\n COMPLETED = \"completed\",\n FAILED = \"failed\",\n PAUSED = \"paused\",\n RESUMED = \"resumed\",\n CLEANED = \"cleaned\",\n DRAINED = \"drained\",\n REMOVED = \"removed\",\n}\n\nenum QueueEventType {\n AUTOMATION_EVENT = \"automation-event\",\n APP_BACKUP_EVENT = \"app-backup-event\",\n AUDIT_LOG_EVENT = \"audit-log-event\",\n SYSTEM_EVENT = \"system-event\",\n APP_MIGRATION = \"app-migration\",\n}\n\nconst EventTypeMap: { [key in JobQueue]: QueueEventType } = {\n [JobQueue.AUTOMATION]: QueueEventType.AUTOMATION_EVENT,\n [JobQueue.APP_BACKUP]: QueueEventType.APP_BACKUP_EVENT,\n [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT,\n [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT,\n [JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION,\n}\n\nfunction logging(queue: Queue, jobQueue: JobQueue) {\n const eventType = EventTypeMap[jobQueue]\n\n function doInJobContext(job: Job, task: any) {\n // if this is an automation job try to get the app id\n const appId = job.data.event?.appId\n if (appId) {\n return context.doInContext(appId, task)\n } else {\n task()\n }\n }\n\n queue\n .on(BullEvent.STALLED, async (job: Job) => {\n // A job has been marked as stalled. This is useful for debugging job\n // workers that crash or pause the event loop.\n await doInJobContext(job, () => {\n console.error(...getLogParams(eventType, BullEvent.STALLED, { job }))\n })\n })\n .on(BullEvent.ERROR, (error: any) => {\n // An error occurred.\n console.error(...getLogParams(eventType, BullEvent.ERROR, { error }))\n })\n\n if (process.env.NODE_DEBUG?.includes(\"bull\")) {\n queue\n .on(BullEvent.WAITING, (jobId: JobId) => {\n // A Job is waiting to be processed as soon as a worker is idling.\n console.info(...getLogParams(eventType, BullEvent.WAITING, { jobId }))\n })\n .on(BullEvent.ACTIVE, async (job: Job, jobPromise: any) => {\n // A job has started. You can use `jobPromise.cancel()`` to abort it.\n await doInJobContext(job, () => {\n console.info(...getLogParams(eventType, BullEvent.ACTIVE, { job }))\n })\n })\n .on(BullEvent.PROGRESS, async (job: Job, progress: any) => {\n // A job's progress was updated\n await doInJobContext(job, () => {\n console.info(\n ...getLogParams(\n eventType,\n BullEvent.PROGRESS,\n { job },\n { progress }\n )\n )\n })\n })\n .on(BullEvent.COMPLETED, async (job: Job, result) => {\n // A job successfully completed with a `result`.\n await doInJobContext(job, () => {\n console.info(\n ...getLogParams(eventType, BullEvent.COMPLETED, { job }, { result })\n )\n })\n })\n .on(BullEvent.FAILED, async (job: Job, error: any) => {\n // A job failed with reason `err`!\n await doInJobContext(job, () => {\n console.error(\n ...getLogParams(eventType, BullEvent.FAILED, { job, error })\n )\n })\n })\n .on(BullEvent.PAUSED, () => {\n // The queue has been paused.\n console.info(...getLogParams(eventType, BullEvent.PAUSED))\n })\n .on(BullEvent.RESUMED, () => {\n // The queue has been resumed.\n console.info(...getLogParams(eventType, BullEvent.RESUMED))\n })\n .on(BullEvent.CLEANED, (jobs: Job[], type: string) => {\n // Old jobs have been cleaned from the queue. `jobs` is an array of cleaned\n // jobs, and `type` is the type of jobs cleaned.\n console.info(\n ...getLogParams(\n eventType,\n BullEvent.CLEANED,\n {},\n { length: jobs.length, type }\n )\n )\n })\n .on(BullEvent.DRAINED, () => {\n // Emitted every time the queue has processed all the waiting jobs (even if there can be some delayed jobs not yet processed)\n console.info(...getLogParams(eventType, BullEvent.DRAINED))\n })\n .on(BullEvent.REMOVED, (job: Job) => {\n // A job successfully removed.\n console.info(...getLogParams(eventType, BullEvent.REMOVED, { job }))\n })\n }\n}\n", "import { Event, Identity, Group } from \"@budibase/types\"\nimport { EventProcessor } from \"./types\"\n\nexport default class Processor implements EventProcessor {\n initialised: boolean = false\n processors: EventProcessor[] = []\n\n constructor(processors: EventProcessor[]) {\n this.processors = processors\n }\n\n async processEvent(\n event: Event,\n identity: Identity,\n properties: any,\n timestamp?: string | number\n ): Promise<void> {\n for (const eventProcessor of this.processors) {\n await eventProcessor.processEvent(event, identity, properties, timestamp)\n }\n }\n\n async identify(\n identity: Identity,\n timestamp?: string | number\n ): Promise<void> {\n for (const eventProcessor of this.processors) {\n if (eventProcessor.identify) {\n await eventProcessor.identify(identity, timestamp)\n }\n }\n }\n\n async identifyGroup(\n identity: Group,\n timestamp?: string | number\n ): Promise<void> {\n for (const eventProcessor of this.processors) {\n if (eventProcessor.identifyGroup) {\n await eventProcessor.identifyGroup(identity, timestamp)\n }\n }\n }\n\n shutdown() {\n for (const eventProcessor of this.processors) {\n if (eventProcessor.shutdown) {\n eventProcessor.shutdown()\n }\n }\n }\n}\n", "import * as context from \"../context\"\nimport * as identityCtx from \"../context/identity\"\nimport env from \"../environment\"\nimport {\n Hosting,\n User,\n Identity,\n IdentityType,\n Account,\n isCloudAccount,\n isSSOAccount,\n TenantGroup,\n CloudAccount,\n UserIdentity,\n InstallationGroup,\n UserContext,\n Group,\n isSSOUser,\n} from \"@budibase/types\"\nimport { processors } from \"./processors\"\nimport { newid } from \"../utils\"\nimport * as installation from \"../installation\"\nimport * as configs from \"../configs\"\nimport * as users from \"../users\"\nimport { withCache, TTL, CacheKey } from \"../cache/generic\"\n\n/**\n * An identity can be:\n * - account user (Self host)\n * - budibase user\n * - tenant\n * - installation\n */\nconst getCurrentIdentity = async (): Promise<Identity> => {\n let identityContext = identityCtx.getIdentity()\n const environment = getDeploymentEnvironment()\n\n let identityType\n\n if (!identityContext) {\n identityType = IdentityType.TENANT\n } else {\n identityType = identityContext.type\n }\n\n if (identityType === IdentityType.INSTALLATION) {\n const installationId = await getInstallationId()\n const hosting = getHostingFromEnv()\n return {\n id: formatDistinctId(installationId, identityType),\n hosting,\n type: identityType,\n installationId,\n environment,\n }\n } else if (identityType === IdentityType.TENANT) {\n const installationId = await getInstallationId()\n const tenantId = await getEventTenantId(context.getTenantId())\n const hosting = getHostingFromEnv()\n\n return {\n id: formatDistinctId(tenantId, identityType),\n type: identityType,\n hosting,\n installationId,\n tenantId,\n realTenantId: context.getTenantId(),\n environment,\n }\n } else if (identityType === IdentityType.USER) {\n const userContext = identityContext as UserContext\n const tenantId = await getEventTenantId(context.getTenantId())\n const installationId = await getInstallationId()\n\n const account = userContext.account\n let hosting\n if (account) {\n hosting = account.hosting\n } else {\n hosting = getHostingFromEnv()\n }\n\n return {\n id: userContext._id,\n type: identityType,\n hosting,\n installationId,\n tenantId,\n environment,\n realTenantId: context.getTenantId(),\n hostInfo: userContext.hostInfo,\n }\n } else {\n throw new Error(\"Unknown identity type\")\n }\n}\n\nconst identifyInstallationGroup = async (\n installId: string,\n timestamp?: string | number\n): Promise<void> => {\n const id = installId\n const type = IdentityType.INSTALLATION\n const hosting = getHostingFromEnv()\n const version = env.VERSION\n const environment = getDeploymentEnvironment()\n\n const group: InstallationGroup = {\n id,\n type,\n hosting,\n version,\n environment,\n }\n\n await identifyGroup(group, timestamp)\n // need to create a normal identity for the group to be able to query it globally\n // match the posthog syntax to link this identity to the empty auto generated one\n await identify({ ...group, id: `$${type}_${id}` }, timestamp)\n}\n\nconst identifyTenantGroup = async (\n tenantId: string,\n account: Account | undefined,\n timestamp?: string | number\n): Promise<void> => {\n const id = await getEventTenantId(tenantId)\n const type = IdentityType.TENANT\n const installationId = await getInstallationId()\n const environment = getDeploymentEnvironment()\n\n let hosting: Hosting\n let profession: string | undefined\n let companySize: string | undefined\n\n if (account) {\n profession = account.profession\n companySize = account.size\n hosting = account.hosting\n } else {\n hosting = getHostingFromEnv()\n }\n\n const group: TenantGroup = {\n id,\n type,\n hosting,\n environment,\n installationId,\n profession,\n companySize,\n }\n\n await identifyGroup(group, timestamp)\n // need to create a normal identity for the group to be able to query it globally\n // match the posthog syntax to link this identity to the auto generated one\n await identify({ ...group, id: `$${type}_${id}` }, timestamp)\n}\n\nconst identifyUser = async (\n user: User,\n account: CloudAccount | undefined,\n timestamp?: string | number\n) => {\n const id = user._id as string\n const tenantId = await getEventTenantId(user.tenantId)\n const type = IdentityType.USER\n let builder = users.hasBuilderPermissions(user)\n let admin = users.hasAdminPermissions(user)\n let providerType\n if (isSSOUser(user)) {\n providerType = user.providerType\n }\n const accountHolder = account?.budibaseUserId === user._id || false\n const verified =\n account && account?.budibaseUserId === user._id ? account.verified : false\n const installationId = await getInstallationId()\n const hosting = account ? account.hosting : getHostingFromEnv()\n const environment = getDeploymentEnvironment()\n\n const identity: UserIdentity = {\n id,\n type,\n hosting,\n installationId,\n tenantId,\n verified,\n accountHolder,\n providerType,\n builder,\n admin,\n environment,\n }\n\n await identify(identity, timestamp)\n}\n\nconst identifyAccount = async (account: Account) => {\n let id = account.accountId\n const tenantId = account.tenantId\n let type = IdentityType.USER\n let providerType = isSSOAccount(account) ? account.providerType : undefined\n const verified = account.verified\n const accountHolder = true\n const hosting = account.hosting\n const installationId = await getInstallationId()\n const environment = getDeploymentEnvironment()\n\n if (isCloudAccount(account)) {\n if (account.budibaseUserId) {\n // use the budibase user as the id if set\n id = account.budibaseUserId\n }\n }\n\n const identity: UserIdentity = {\n id,\n type,\n hosting,\n installationId,\n tenantId,\n providerType,\n verified,\n accountHolder,\n environment,\n }\n\n await identify(identity)\n}\n\nconst identify = async (identity: Identity, timestamp?: string | number) => {\n await processors.identify(identity, timestamp)\n}\n\nconst identifyGroup = async (group: Group, timestamp?: string | number) => {\n await processors.identifyGroup(group, timestamp)\n}\n\nconst getDeploymentEnvironment = () => {\n if (env.isDev()) {\n return \"development\"\n } else {\n return env.DEPLOYMENT_ENVIRONMENT\n }\n}\n\nconst getHostingFromEnv = () => {\n return env.SELF_HOSTED ? Hosting.SELF : Hosting.CLOUD\n}\n\nconst getInstallationId = async () => {\n if (isAccountPortal()) {\n return \"account-portal\"\n }\n const install = await installation.getInstall()\n return install.installId\n}\n\nconst getEventTenantId = async (tenantId: string): Promise<string> => {\n if (env.SELF_HOSTED) {\n return getUniqueTenantId(tenantId)\n } else {\n // tenant id's in the cloud are already unique\n return tenantId\n }\n}\n\nexport const getUniqueTenantId = async (tenantId: string): Promise<string> => {\n // make sure this tenantId always matches the tenantId in context\n return context.doInTenant(tenantId, () => {\n return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {\n const db = context.getGlobalDB()\n const config = await configs.getSettingsConfigDoc()\n\n let uniqueTenantId: string\n if (config.config.uniqueTenantId) {\n return config.config.uniqueTenantId\n } else {\n uniqueTenantId = `${newid()}_${tenantId}`\n config.config.uniqueTenantId = uniqueTenantId\n await db.put(config)\n return uniqueTenantId\n }\n })\n })\n}\n\nconst isAccountPortal = () => {\n return env.SERVICE === \"account-portal\"\n}\n\nconst formatDistinctId = (id: string, type: IdentityType) => {\n if (type === IdentityType.INSTALLATION || type === IdentityType.TENANT) {\n return `$${type}_${id}`\n } else {\n return id\n }\n}\n\nexport default {\n getCurrentIdentity,\n identifyInstallationGroup,\n identifyTenantGroup,\n identifyUser,\n identifyAccount,\n identify,\n identifyGroup,\n getInstallationId,\n getUniqueTenantId,\n}\n", "import { newid } from \"./utils\"\nimport * as events from \"./events\"\nimport { StaticDatabases, doWithDB } from \"./db\"\nimport { Installation, IdentityType, Database } from \"@budibase/types\"\nimport * as context from \"./context\"\nimport semver from \"semver\"\nimport { bustCache, withCache, TTL, CacheKey } from \"./cache/generic\"\nimport environment from \"./environment\"\n\nexport const getInstall = async (): Promise<Installation> => {\n return withCache(CacheKey.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {\n useTenancy: false,\n })\n}\nasync function createInstallDoc(platformDb: Database) {\n const install: Installation = {\n _id: StaticDatabases.PLATFORM_INFO.docs.install,\n installId: newid(),\n version: environment.VERSION,\n }\n try {\n const resp = await platformDb.put(install)\n install._rev = resp.rev\n return install\n } catch (err: any) {\n if (err.status === 409) {\n return getInstallFromDB()\n } else {\n throw err\n }\n }\n}\n\nexport const getInstallFromDB = async (): Promise<Installation> => {\n return doWithDB(\n StaticDatabases.PLATFORM_INFO.name,\n async (platformDb: any) => {\n let install: Installation\n try {\n install = await platformDb.get(\n StaticDatabases.PLATFORM_INFO.docs.install\n )\n } catch (e: any) {\n if (e.status === 404) {\n install = await createInstallDoc(platformDb)\n } else {\n throw e\n }\n }\n return install\n }\n )\n}\n\nconst updateVersion = async (version: string): Promise<boolean> => {\n try {\n await doWithDB(\n StaticDatabases.PLATFORM_INFO.name,\n async (platformDb: any) => {\n const install = await getInstall()\n install.version = version\n await platformDb.put(install)\n await bustCache(CacheKey.INSTALLATION)\n }\n )\n } catch (e: any) {\n if (e.status === 409) {\n // do nothing - version has already been updated\n // likely in clustered environment\n return false\n }\n throw e\n }\n return true\n}\n\nexport const checkInstallVersion = async (): Promise<void> => {\n const install = await getInstall()\n\n const currentVersion = install.version\n const newVersion = environment.VERSION\n\n if (currentVersion !== newVersion) {\n const isUpgrade = semver.gt(newVersion, currentVersion)\n const isDowngrade = semver.lt(newVersion, currentVersion)\n\n const success = await updateVersion(newVersion)\n\n if (success) {\n await context.doInIdentityContext(\n {\n _id: install.installId,\n type: IdentityType.INSTALLATION,\n },\n async () => {\n if (isUpgrade) {\n await events.installation.upgraded(currentVersion, newVersion)\n } else if (isDowngrade) {\n await events.installation.downgraded(currentVersion, newVersion)\n }\n }\n )\n await events.identification.identifyInstallationGroup(install.installId)\n }\n }\n}\n", "import {\n Event,\n BackfillMetadata,\n CachedEvent,\n SSOCreatedEvent,\n AutomationCreatedEvent,\n AutomationStepCreatedEvent,\n DatasourceCreatedEvent,\n LayoutCreatedEvent,\n QueryCreatedEvent,\n RoleCreatedEvent,\n ScreenCreatedEvent,\n TableCreatedEvent,\n ViewCreatedEvent,\n ViewCalculationCreatedEvent,\n ViewFilterCreatedEvent,\n AppPublishedEvent,\n UserCreatedEvent,\n RoleAssignedEvent,\n UserPermissionAssignedEvent,\n AppCreatedEvent,\n} from \"@budibase/types\"\nimport * as context from \"../context\"\nimport { CacheKey } from \"../cache/generic\"\nimport * as cache from \"../cache/generic\"\n\n// LIFECYCLE\n\nexport const start = async (events: Event[]) => {\n const metadata: BackfillMetadata = {\n eventWhitelist: events,\n }\n return saveBackfillMetadata(metadata)\n}\n\nexport const recordEvent = async (event: Event, properties: any) => {\n const eventKey = getEventKey(event, properties)\n // don't use a ttl - cleaned up by migration\n // don't use tenancy - already in the key\n await cache.store(eventKey, properties, undefined, { useTenancy: false })\n}\n\nexport const end = async () => {\n await deleteBackfillMetadata()\n await clearEvents()\n}\n\n// CRUD\n\nconst getBackfillMetadata = async (): Promise<BackfillMetadata | null> => {\n return cache.get(CacheKey.BACKFILL_METADATA)\n}\n\nconst saveBackfillMetadata = async (\n backfill: BackfillMetadata\n): Promise<void> => {\n // no TTL - deleted by backfill\n return cache.store(CacheKey.BACKFILL_METADATA, backfill)\n}\n\nconst deleteBackfillMetadata = async (): Promise<void> => {\n await cache.destroy(CacheKey.BACKFILL_METADATA)\n}\n\nconst clearEvents = async () => {\n // wildcard\n const pattern = getEventKey()\n const keys = await cache.keys(pattern)\n\n for (const key of keys) {\n // delete each key\n // don't use tenancy, already in the key\n await cache.destroy(key, { useTenancy: false })\n }\n}\n\n// HELPERS\n\nexport const isBackfillingEvent = async (event: Event) => {\n const backfill = await getBackfillMetadata()\n const events = backfill?.eventWhitelist\n if (events && events.includes(event)) {\n return true\n } else {\n return false\n }\n}\n\nexport const isAlreadySent = async (event: Event, properties: any) => {\n const eventKey = getEventKey(event, properties)\n const cachedEvent: CachedEvent = await cache.get(eventKey, {\n useTenancy: false,\n })\n return !!cachedEvent\n}\n\nconst CUSTOM_PROPERTY_SUFFIX: any = {\n // APP EVENTS\n [Event.AUTOMATION_CREATED]: (properties: AutomationCreatedEvent) => {\n return properties.automationId\n },\n [Event.AUTOMATION_STEP_CREATED]: (properties: AutomationStepCreatedEvent) => {\n return properties.stepId\n },\n [Event.DATASOURCE_CREATED]: (properties: DatasourceCreatedEvent) => {\n return properties.datasourceId\n },\n [Event.LAYOUT_CREATED]: (properties: LayoutCreatedEvent) => {\n return properties.layoutId\n },\n [Event.QUERY_CREATED]: (properties: QueryCreatedEvent) => {\n return properties.queryId\n },\n [Event.ROLE_CREATED]: (properties: RoleCreatedEvent) => {\n return properties.roleId\n },\n [Event.SCREEN_CREATED]: (properties: ScreenCreatedEvent) => {\n return properties.screenId\n },\n [Event.TABLE_CREATED]: (properties: TableCreatedEvent) => {\n return properties.tableId\n },\n [Event.VIEW_CREATED]: (properties: ViewCreatedEvent) => {\n return properties.tableId // best uniqueness\n },\n [Event.VIEW_CALCULATION_CREATED]: (\n properties: ViewCalculationCreatedEvent\n ) => {\n return properties.tableId // best uniqueness\n },\n [Event.VIEW_FILTER_CREATED]: (properties: ViewFilterCreatedEvent) => {\n return properties.tableId // best uniqueness\n },\n [Event.APP_CREATED]: (properties: AppCreatedEvent) => {\n return properties.appId // best uniqueness\n },\n [Event.APP_PUBLISHED]: (properties: AppPublishedEvent) => {\n return properties.appId // best uniqueness\n },\n // GLOBAL EVENTS\n [Event.AUTH_SSO_CREATED]: (properties: SSOCreatedEvent) => {\n return properties.type\n },\n [Event.AUTH_SSO_ACTIVATED]: (properties: SSOCreatedEvent) => {\n return properties.type\n },\n [Event.USER_CREATED]: (properties: UserCreatedEvent) => {\n return properties.userId\n },\n [Event.USER_PERMISSION_ADMIN_ASSIGNED]: (\n properties: UserPermissionAssignedEvent\n ) => {\n return properties.userId\n },\n [Event.USER_PERMISSION_BUILDER_ASSIGNED]: (\n properties: UserPermissionAssignedEvent\n ) => {\n return properties.userId\n },\n [Event.ROLE_ASSIGNED]: (properties: RoleAssignedEvent) => {\n return `${properties.roleId}-${properties.userId}`\n },\n}\n\nconst getEventKey = (event?: Event, properties?: any) => {\n let eventKey: string\n\n const tenantId = context.getTenantId()\n if (event) {\n eventKey = `${CacheKey.EVENTS}:${tenantId}:${event}`\n\n // use some properties to make the key more unique\n const custom = CUSTOM_PROPERTY_SUFFIX[event]\n const suffix = custom ? custom(properties) : undefined\n if (suffix) {\n eventKey = `${eventKey}:${suffix}`\n }\n } else {\n eventKey = `${CacheKey.EVENTS}:${tenantId}:*`\n }\n\n return eventKey\n}\n", "import BullQueue from \"bull\"\nimport { createQueue, JobQueue } from \"../../queue\"\nimport { Event, Identity } from \"@budibase/types\"\n\nexport interface EventPayload {\n event: Event\n identity: Identity\n properties: any\n timestamp?: string | number\n}\n\nexport let asyncEventQueue: BullQueue.Queue\n\nexport function init() {\n asyncEventQueue = createQueue<EventPayload>(JobQueue.SYSTEM_EVENT_QUEUE)\n}\n\nexport async function shutdown() {\n if (asyncEventQueue) {\n await asyncEventQueue.close()\n }\n}\n", "import { AsyncEvents } from \"@budibase/types\"\nimport { EventPayload, asyncEventQueue, init } from \"./queue\"\n\nexport async function publishAsyncEvent(payload: EventPayload) {\n if (!asyncEventQueue) {\n init()\n }\n const { event, identity } = payload\n if (AsyncEvents.indexOf(event) !== -1 && identity.tenantId) {\n await asyncEventQueue.add(payload)\n }\n}\n", "import { Event } from \"@budibase/types\"\nimport { processors } from \"./processors\"\nimport identification from \"./identification\"\nimport * as backfill from \"./backfill\"\nimport { publishAsyncEvent } from \"./asyncEvents\"\n\nexport const publishEvent = async (\n event: Event,\n properties: any,\n timestamp?: string | number\n) => {\n // in future this should use async events via a distributed queue.\n const identity = await identification.getCurrentIdentity()\n\n const backfilling = await backfill.isBackfillingEvent(event)\n // no backfill - send the event and exit\n if (!backfilling) {\n // send off async events if required\n await publishAsyncEvent({\n event,\n identity,\n properties,\n timestamp,\n })\n // now handle the main sync event processing pipeline\n await processors.processEvent(event, identity, properties, timestamp)\n return\n }\n\n // backfill active - check if the event has been sent already\n const alreadySent = await backfill.isAlreadySent(event, properties)\n if (alreadySent) {\n // do nothing\n return\n } else {\n // send and record the event\n await processors.processEvent(event, identity, properties, timestamp)\n await backfill.recordEvent(event, properties)\n }\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Account,\n AccountCreatedEvent,\n AccountDeletedEvent,\n AccountVerifiedEvent,\n} from \"@budibase/types\"\n\nasync function created(account: Account) {\n const properties: AccountCreatedEvent = {\n tenantId: account.tenantId,\n }\n await publishEvent(Event.ACCOUNT_CREATED, properties)\n}\n\nasync function deleted(account: Account) {\n const properties: AccountDeletedEvent = {\n tenantId: account.tenantId,\n }\n await publishEvent(Event.ACCOUNT_DELETED, properties)\n}\n\nasync function verified(account: Account) {\n const properties: AccountVerifiedEvent = {\n tenantId: account.tenantId,\n }\n await publishEvent(Event.ACCOUNT_VERIFIED, properties)\n}\n\nexport default {\n created,\n deleted,\n verified,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n App,\n AppCreatedEvent,\n AppUpdatedEvent,\n AppDeletedEvent,\n AppPublishedEvent,\n AppUnpublishedEvent,\n AppFileImportedEvent,\n AppTemplateImportedEvent,\n AppVersionUpdatedEvent,\n AppVersionRevertedEvent,\n AppRevertedEvent,\n AppExportedEvent,\n} from \"@budibase/types\"\n\nconst created = async (app: App, timestamp?: string | number) => {\n const properties: AppCreatedEvent = {\n appId: app.appId,\n version: app.version,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_CREATED, properties, timestamp)\n}\n\nasync function updated(app: App) {\n const properties: AppUpdatedEvent = {\n appId: app.appId,\n version: app.version,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_UPDATED, properties)\n}\n\nasync function deleted(app: App) {\n const properties: AppDeletedEvent = {\n appId: app.appId,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_DELETED, properties)\n}\n\nasync function published(app: App, timestamp?: string | number) {\n const properties: AppPublishedEvent = {\n appId: app.appId,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_PUBLISHED, properties, timestamp)\n}\n\nasync function unpublished(app: App) {\n const properties: AppUnpublishedEvent = {\n appId: app.appId,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_UNPUBLISHED, properties)\n}\n\nasync function fileImported(app: App) {\n const properties: AppFileImportedEvent = {\n appId: app.appId,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_FILE_IMPORTED, properties)\n}\n\nasync function templateImported(app: App, templateKey: string) {\n const properties: AppTemplateImportedEvent = {\n appId: app.appId,\n templateKey,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_TEMPLATE_IMPORTED, properties)\n}\n\nasync function versionUpdated(\n app: App,\n currentVersion: string,\n updatedToVersion: string\n) {\n const properties: AppVersionUpdatedEvent = {\n appId: app.appId,\n currentVersion,\n updatedToVersion,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_VERSION_UPDATED, properties)\n}\n\nasync function versionReverted(\n app: App,\n currentVersion: string,\n revertedToVersion: string\n) {\n const properties: AppVersionRevertedEvent = {\n appId: app.appId,\n currentVersion,\n revertedToVersion,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_VERSION_REVERTED, properties)\n}\n\nasync function reverted(app: App) {\n const properties: AppRevertedEvent = {\n appId: app.appId,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_REVERTED, properties)\n}\n\nasync function exported(app: App) {\n const properties: AppExportedEvent = {\n appId: app.appId,\n audited: {\n name: app.name,\n },\n }\n await publishEvent(Event.APP_EXPORTED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n published,\n unpublished,\n fileImported,\n templateImported,\n versionUpdated,\n versionReverted,\n reverted,\n exported,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n LoginEvent,\n LoginSource,\n LogoutEvent,\n SSOActivatedEvent,\n SSOCreatedEvent,\n SSODeactivatedEvent,\n SSOType,\n SSOUpdatedEvent,\n} from \"@budibase/types\"\nimport { identification } from \"..\"\n\nasync function login(source: LoginSource, email: string) {\n const identity = await identification.getCurrentIdentity()\n const properties: LoginEvent = {\n userId: identity.id,\n source,\n audited: {\n email,\n },\n }\n await publishEvent(Event.AUTH_LOGIN, properties)\n}\n\nasync function logout(email?: string) {\n const identity = await identification.getCurrentIdentity()\n const properties: LogoutEvent = {\n userId: identity.id,\n audited: {\n email,\n },\n }\n await publishEvent(Event.AUTH_LOGOUT, properties)\n}\n\nasync function SSOCreated(type: SSOType, timestamp?: string | number) {\n const properties: SSOCreatedEvent = {\n type,\n }\n await publishEvent(Event.AUTH_SSO_CREATED, properties, timestamp)\n}\n\nasync function SSOUpdated(type: SSOType) {\n const properties: SSOUpdatedEvent = {\n type,\n }\n await publishEvent(Event.AUTH_SSO_UPDATED, properties)\n}\n\nasync function SSOActivated(type: SSOType, timestamp?: string | number) {\n const properties: SSOActivatedEvent = {\n type,\n }\n await publishEvent(Event.AUTH_SSO_ACTIVATED, properties, timestamp)\n}\n\nasync function SSODeactivated(type: SSOType) {\n const properties: SSODeactivatedEvent = {\n type,\n }\n await publishEvent(Event.AUTH_SSO_DEACTIVATED, properties)\n}\n\nexport default {\n login,\n logout,\n SSOCreated,\n SSOUpdated,\n SSOActivated,\n SSODeactivated,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Automation,\n Event,\n AutomationStep,\n AutomationCreatedEvent,\n AutomationDeletedEvent,\n AutomationTestedEvent,\n AutomationStepCreatedEvent,\n AutomationStepDeletedEvent,\n AutomationTriggerUpdatedEvent,\n AutomationsRunEvent,\n} from \"@budibase/types\"\n\nasync function created(automation: Automation, timestamp?: string | number) {\n const properties: AutomationCreatedEvent = {\n appId: automation.appId,\n automationId: automation._id as string,\n triggerId: automation.definition?.trigger?.id,\n triggerType: automation.definition?.trigger?.stepId,\n audited: {\n name: automation.name,\n },\n }\n await publishEvent(Event.AUTOMATION_CREATED, properties, timestamp)\n}\n\nasync function triggerUpdated(automation: Automation) {\n const properties: AutomationTriggerUpdatedEvent = {\n appId: automation.appId,\n automationId: automation._id as string,\n triggerId: automation.definition?.trigger?.id,\n triggerType: automation.definition?.trigger?.stepId,\n }\n await publishEvent(Event.AUTOMATION_TRIGGER_UPDATED, properties)\n}\n\nasync function deleted(automation: Automation) {\n const properties: AutomationDeletedEvent = {\n appId: automation.appId,\n automationId: automation._id as string,\n triggerId: automation.definition?.trigger?.id,\n triggerType: automation.definition?.trigger?.stepId,\n audited: {\n name: automation.name,\n },\n }\n await publishEvent(Event.AUTOMATION_DELETED, properties)\n}\n\nasync function tested(automation: Automation) {\n const properties: AutomationTestedEvent = {\n appId: automation.appId,\n automationId: automation._id as string,\n triggerId: automation.definition?.trigger?.id,\n triggerType: automation.definition?.trigger?.stepId,\n }\n await publishEvent(Event.AUTOMATION_TESTED, properties)\n}\n\nconst run = async (count: number, timestamp?: string | number) => {\n const properties: AutomationsRunEvent = {\n count,\n }\n await publishEvent(Event.AUTOMATIONS_RUN, properties, timestamp)\n}\n\nasync function stepCreated(\n automation: Automation,\n step: AutomationStep,\n timestamp?: string | number\n) {\n const properties: AutomationStepCreatedEvent = {\n appId: automation.appId,\n automationId: automation._id as string,\n triggerId: automation.definition?.trigger?.id,\n triggerType: automation.definition?.trigger?.stepId,\n stepId: step.id!,\n stepType: step.stepId,\n audited: {\n name: automation.name,\n },\n }\n await publishEvent(Event.AUTOMATION_STEP_CREATED, properties, timestamp)\n}\n\nasync function stepDeleted(automation: Automation, step: AutomationStep) {\n const properties: AutomationStepDeletedEvent = {\n appId: automation.appId,\n automationId: automation._id as string,\n triggerId: automation.definition?.trigger?.id,\n triggerType: automation.definition?.trigger?.stepId,\n stepId: step.id!,\n stepType: step.stepId,\n audited: {\n name: automation.name,\n },\n }\n await publishEvent(Event.AUTOMATION_STEP_DELETED, properties)\n}\n\nexport default {\n created,\n triggerUpdated,\n deleted,\n tested,\n run,\n stepCreated,\n stepDeleted,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Datasource,\n DatasourceCreatedEvent,\n DatasourceUpdatedEvent,\n DatasourceDeletedEvent,\n SourceName,\n} from \"@budibase/types\"\n\nfunction isCustom(datasource: Datasource) {\n const sources = Object.values(SourceName)\n // if not in the base source list, then it must be custom\n return !sources.includes(datasource.source)\n}\n\nasync function created(datasource: Datasource, timestamp?: string | number) {\n const properties: DatasourceCreatedEvent = {\n datasourceId: datasource._id as string,\n source: datasource.source,\n custom: isCustom(datasource),\n }\n await publishEvent(Event.DATASOURCE_CREATED, properties, timestamp)\n}\n\nasync function updated(datasource: Datasource) {\n const properties: DatasourceUpdatedEvent = {\n datasourceId: datasource._id as string,\n source: datasource.source,\n custom: isCustom(datasource),\n }\n await publishEvent(Event.DATASOURCE_UPDATED, properties)\n}\n\nasync function deleted(datasource: Datasource) {\n const properties: DatasourceDeletedEvent = {\n datasourceId: datasource._id as string,\n source: datasource.source,\n custom: isCustom(datasource),\n }\n await publishEvent(Event.DATASOURCE_DELETED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n}\n", "import { publishEvent } from \"../events\"\nimport { Event, SMTPCreatedEvent, SMTPUpdatedEvent } from \"@budibase/types\"\n\nasync function SMTPCreated(timestamp?: string | number) {\n const properties: SMTPCreatedEvent = {}\n await publishEvent(Event.EMAIL_SMTP_CREATED, properties, timestamp)\n}\n\nasync function SMTPUpdated() {\n const properties: SMTPUpdatedEvent = {}\n await publishEvent(Event.EMAIL_SMTP_UPDATED, properties)\n}\n\nexport default {\n SMTPCreated,\n SMTPUpdated,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n LicenseActivatedEvent,\n LicensePlanChangedEvent,\n PlanType,\n Account,\n LicensePortalOpenedEvent,\n LicenseCheckoutSuccessEvent,\n LicenseCheckoutOpenedEvent,\n LicensePaymentFailedEvent,\n LicensePaymentRecoveredEvent,\n PriceDuration,\n} from \"@budibase/types\"\n\nasync function planChanged(\n account: Account,\n opts: {\n from: PlanType\n to: PlanType\n fromQuantity: number | undefined\n toQuantity: number | undefined\n fromDuration: PriceDuration | undefined\n toDuration: PriceDuration | undefined\n }\n) {\n const properties: LicensePlanChangedEvent = {\n accountId: account.accountId,\n ...opts,\n }\n await publishEvent(Event.LICENSE_PLAN_CHANGED, properties)\n}\n\nasync function activated(account: Account) {\n const properties: LicenseActivatedEvent = {\n accountId: account.accountId,\n }\n await publishEvent(Event.LICENSE_ACTIVATED, properties)\n}\n\nasync function checkoutOpened(account: Account) {\n const properties: LicenseCheckoutOpenedEvent = {\n accountId: account.accountId,\n }\n await publishEvent(Event.LICENSE_CHECKOUT_OPENED, properties)\n}\n\nasync function checkoutSuccess(account: Account) {\n const properties: LicenseCheckoutSuccessEvent = {\n accountId: account.accountId,\n }\n await publishEvent(Event.LICENSE_CHECKOUT_SUCCESS, properties)\n}\n\nasync function portalOpened(account: Account) {\n const properties: LicensePortalOpenedEvent = {\n accountId: account.accountId,\n }\n await publishEvent(Event.LICENSE_PORTAL_OPENED, properties)\n}\n\nasync function paymentFailed(account: Account) {\n const properties: LicensePaymentFailedEvent = {\n accountId: account.accountId,\n }\n await publishEvent(Event.LICENSE_PAYMENT_FAILED, properties)\n}\n\nasync function paymentRecovered(account: Account) {\n const properties: LicensePaymentRecoveredEvent = {\n accountId: account.accountId,\n }\n await publishEvent(Event.LICENSE_PAYMENT_RECOVERED, properties)\n}\n\nexport default {\n planChanged,\n activated,\n checkoutOpened,\n checkoutSuccess,\n portalOpened,\n paymentFailed,\n paymentRecovered,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Layout,\n LayoutCreatedEvent,\n LayoutDeletedEvent,\n} from \"@budibase/types\"\n\nasync function created(layout: Layout, timestamp?: string | number) {\n const properties: LayoutCreatedEvent = {\n layoutId: layout._id as string,\n }\n await publishEvent(Event.LAYOUT_CREATED, properties, timestamp)\n}\n\nasync function deleted(layoutId: string) {\n const properties: LayoutDeletedEvent = {\n layoutId,\n }\n await publishEvent(Event.LAYOUT_DELETED, properties)\n}\n\nexport default {\n created,\n deleted,\n}\n", "import { publishEvent } from \"../events\"\nimport { Event } from \"@budibase/types\"\n\nasync function nameUpdated(timestamp?: string | number) {\n const properties = {}\n await publishEvent(Event.ORG_NAME_UPDATED, properties, timestamp)\n}\n\nasync function logoUpdated(timestamp?: string | number) {\n const properties = {}\n await publishEvent(Event.ORG_LOGO_UPDATED, properties, timestamp)\n}\n\nasync function platformURLUpdated(timestamp?: string | number) {\n const properties = {}\n await publishEvent(Event.ORG_PLATFORM_URL_UPDATED, properties, timestamp)\n}\n\n// TODO\n\nasync function analyticsOptOut() {\n const properties = {}\n await publishEvent(Event.ANALYTICS_OPT_OUT, properties)\n}\n\nasync function analyticsOptIn() {\n const properties = {}\n await publishEvent(Event.ANALYTICS_OPT_OUT, properties)\n}\n\nexport default {\n nameUpdated,\n logoUpdated,\n platformURLUpdated,\n analyticsOptOut,\n analyticsOptIn,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Datasource,\n Query,\n QueryCreatedEvent,\n QueryUpdatedEvent,\n QueryDeletedEvent,\n QueryImportedEvent,\n QueryPreviewedEvent,\n QueriesRunEvent,\n} from \"@budibase/types\"\n\n/* eslint-disable */\n\nconst created = async (\n datasource: Datasource,\n query: Query,\n timestamp?: string | number\n) => {\n const properties: QueryCreatedEvent = {\n queryId: query._id as string,\n datasourceId: datasource._id as string,\n source: datasource.source,\n queryVerb: query.queryVerb,\n }\n await publishEvent(Event.QUERY_CREATED, properties, timestamp)\n}\n\nconst updated = async (datasource: Datasource, query: Query) => {\n const properties: QueryUpdatedEvent = {\n queryId: query._id as string,\n datasourceId: datasource._id as string,\n source: datasource.source,\n queryVerb: query.queryVerb,\n }\n await publishEvent(Event.QUERY_UPDATED, properties)\n}\n\nconst deleted = async (datasource: Datasource, query: Query) => {\n const properties: QueryDeletedEvent = {\n queryId: query._id as string,\n datasourceId: datasource._id as string,\n source: datasource.source,\n queryVerb: query.queryVerb,\n }\n await publishEvent(Event.QUERY_DELETED, properties)\n}\n\nconst imported = async (\n datasource: Datasource,\n importSource: any,\n count: any\n) => {\n const properties: QueryImportedEvent = {\n datasourceId: datasource._id as string,\n source: datasource.source,\n count,\n importSource,\n }\n await publishEvent(Event.QUERY_IMPORT, properties)\n}\n\nconst run = async (count: number, timestamp?: string | number) => {\n const properties: QueriesRunEvent = {\n count,\n }\n await publishEvent(Event.QUERIES_RUN, properties, timestamp)\n}\n\nconst previewed = async (datasource: Datasource, query: Query) => {\n const properties: QueryPreviewedEvent = {\n queryId: query._id,\n datasourceId: datasource._id as string,\n source: datasource.source,\n queryVerb: query.queryVerb,\n }\n await publishEvent(Event.QUERY_PREVIEWED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n imported,\n run,\n previewed,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Role,\n RoleAssignedEvent,\n RoleCreatedEvent,\n RoleDeletedEvent,\n RoleUnassignedEvent,\n RoleUpdatedEvent,\n User,\n} from \"@budibase/types\"\n\nasync function created(role: Role, timestamp?: string | number) {\n const properties: RoleCreatedEvent = {\n roleId: role._id as string,\n permissionId: role.permissionId,\n inherits: role.inherits,\n }\n await publishEvent(Event.ROLE_CREATED, properties, timestamp)\n}\n\nasync function updated(role: Role) {\n const properties: RoleUpdatedEvent = {\n roleId: role._id as string,\n permissionId: role.permissionId,\n inherits: role.inherits,\n }\n await publishEvent(Event.ROLE_UPDATED, properties)\n}\n\nasync function deleted(role: Role) {\n const properties: RoleDeletedEvent = {\n roleId: role._id as string,\n permissionId: role.permissionId,\n inherits: role.inherits,\n }\n await publishEvent(Event.ROLE_DELETED, properties)\n}\n\nasync function assigned(user: User, roleId: string, timestamp?: number) {\n const properties: RoleAssignedEvent = {\n userId: user._id as string,\n roleId,\n }\n await publishEvent(Event.ROLE_ASSIGNED, properties, timestamp)\n}\n\nasync function unassigned(user: User, roleId: string) {\n const properties: RoleUnassignedEvent = {\n userId: user._id as string,\n roleId,\n }\n await publishEvent(Event.ROLE_UNASSIGNED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n assigned,\n unassigned,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Screen,\n ScreenCreatedEvent,\n ScreenDeletedEvent,\n} from \"@budibase/types\"\n\nasync function created(screen: Screen, timestamp?: string | number) {\n const properties: ScreenCreatedEvent = {\n layoutId: screen.layoutId,\n screenId: screen._id as string,\n roleId: screen.routing.roleId,\n audited: {\n name: screen.routing?.route,\n },\n }\n await publishEvent(Event.SCREEN_CREATED, properties, timestamp)\n}\n\nasync function deleted(screen: Screen) {\n const properties: ScreenDeletedEvent = {\n layoutId: screen.layoutId,\n screenId: screen._id as string,\n roleId: screen.routing.roleId,\n audited: {\n name: screen.routing?.route,\n },\n }\n await publishEvent(Event.SCREEN_DELETED, properties)\n}\n\nexport default {\n created,\n deleted,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n RowsImportedEvent,\n RowsCreatedEvent,\n Table,\n} from \"@budibase/types\"\n\n/* eslint-disable */\n\nconst created = async (count: number, timestamp?: string | number) => {\n const properties: RowsCreatedEvent = {\n count,\n }\n await publishEvent(Event.ROWS_CREATED, properties, timestamp)\n}\n\nconst imported = async (table: Table, count: number) => {\n const properties: RowsImportedEvent = {\n tableId: table._id as string,\n count,\n }\n await publishEvent(Event.ROWS_IMPORTED, properties)\n}\n\nexport default {\n created,\n imported,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n TableExportFormat,\n Table,\n TableCreatedEvent,\n TableUpdatedEvent,\n TableDeletedEvent,\n TableExportedEvent,\n TableImportedEvent,\n} from \"@budibase/types\"\n\nasync function created(table: Table, timestamp?: string | number) {\n const properties: TableCreatedEvent = {\n tableId: table._id as string,\n audited: {\n name: table.name,\n },\n }\n await publishEvent(Event.TABLE_CREATED, properties, timestamp)\n}\n\nasync function updated(table: Table) {\n const properties: TableUpdatedEvent = {\n tableId: table._id as string,\n audited: {\n name: table.name,\n },\n }\n await publishEvent(Event.TABLE_UPDATED, properties)\n}\n\nasync function deleted(table: Table) {\n const properties: TableDeletedEvent = {\n tableId: table._id as string,\n audited: {\n name: table.name,\n },\n }\n await publishEvent(Event.TABLE_DELETED, properties)\n}\n\nasync function exported(table: Table, format: TableExportFormat) {\n const properties: TableExportedEvent = {\n tableId: table._id as string,\n format,\n audited: {\n name: table.name,\n },\n }\n await publishEvent(Event.TABLE_EXPORTED, properties)\n}\n\nasync function imported(table: Table) {\n const properties: TableImportedEvent = {\n tableId: table._id as string,\n audited: {\n name: table.name,\n },\n }\n await publishEvent(Event.TABLE_IMPORTED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n exported,\n imported,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n App,\n BuilderServedEvent,\n Event,\n AppPreviewServedEvent,\n AppServedEvent,\n} from \"@budibase/types\"\n\nasync function servedBuilder(timezone: string) {\n const properties: BuilderServedEvent = {\n timezone,\n }\n await publishEvent(Event.SERVED_BUILDER, properties)\n}\n\nasync function servedApp(\n app: App,\n timezone: string,\n embed?: boolean | undefined\n) {\n const properties: AppServedEvent = {\n appVersion: app.version,\n timezone,\n embed: embed === true,\n }\n await publishEvent(Event.SERVED_APP, properties)\n}\n\nasync function servedAppPreview(app: App, timezone: string) {\n const properties: AppPreviewServedEvent = {\n appId: app.appId,\n appVersion: app.version,\n timezone,\n }\n await publishEvent(Event.SERVED_APP_PREVIEW, properties)\n}\n\nexport default {\n servedBuilder,\n servedApp,\n servedAppPreview,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n User,\n UserCreatedEvent,\n UserDataCollaborationEvent,\n UserDeletedEvent,\n UserInviteAcceptedEvent,\n UserInvitedEvent,\n UserPasswordForceResetEvent,\n UserPasswordResetEvent,\n UserPasswordResetRequestedEvent,\n UserPasswordUpdatedEvent,\n UserPermissionAssignedEvent,\n UserPermissionRemovedEvent,\n UserUpdatedEvent,\n UserOnboardingEvent,\n} from \"@budibase/types\"\nimport { isScim } from \"../../context\"\n\nasync function created(user: User, timestamp?: number) {\n const properties: UserCreatedEvent = {\n userId: user._id as string,\n viaScim: isScim(),\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_CREATED, properties, timestamp)\n}\n\nasync function updated(user: User) {\n const properties: UserUpdatedEvent = {\n userId: user._id as string,\n viaScim: isScim(),\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_UPDATED, properties)\n}\n\nasync function deleted(user: User) {\n const properties: UserDeletedEvent = {\n userId: user._id as string,\n viaScim: isScim(),\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_DELETED, properties)\n}\n\nexport async function onboardingComplete(user: User) {\n const properties: UserOnboardingEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_ONBOARDING_COMPLETE, properties)\n}\n\n// PERMISSIONS\n\nasync function permissionAdminAssigned(user: User, timestamp?: number) {\n const properties: UserPermissionAssignedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(\n Event.USER_PERMISSION_ADMIN_ASSIGNED,\n properties,\n timestamp\n )\n}\n\nasync function permissionAdminRemoved(user: User) {\n const properties: UserPermissionRemovedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_PERMISSION_ADMIN_REMOVED, properties)\n}\n\nasync function permissionBuilderAssigned(user: User, timestamp?: number) {\n const properties: UserPermissionAssignedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(\n Event.USER_PERMISSION_BUILDER_ASSIGNED,\n properties,\n timestamp\n )\n}\n\nasync function permissionBuilderRemoved(user: User) {\n const properties: UserPermissionRemovedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_PERMISSION_BUILDER_REMOVED, properties)\n}\n\n// INVITE\n\nasync function invited(email: string) {\n const properties: UserInvitedEvent = {\n audited: {\n email,\n },\n }\n await publishEvent(Event.USER_INVITED, properties)\n}\n\nasync function inviteAccepted(user: User) {\n const properties: UserInviteAcceptedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_INVITED_ACCEPTED, properties)\n}\n\n// PASSWORD\n\nasync function passwordForceReset(user: User) {\n const properties: UserPasswordForceResetEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_PASSWORD_FORCE_RESET, properties)\n}\n\nasync function passwordUpdated(user: User) {\n const properties: UserPasswordUpdatedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_PASSWORD_UPDATED, properties)\n}\n\nasync function passwordResetRequested(user: User) {\n const properties: UserPasswordResetRequestedEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_PASSWORD_RESET_REQUESTED, properties)\n}\n\nasync function passwordReset(user: User) {\n const properties: UserPasswordResetEvent = {\n userId: user._id as string,\n audited: {\n email: user.email,\n },\n }\n await publishEvent(Event.USER_PASSWORD_RESET, properties)\n}\n\n// COLLABORATION\n\nasync function dataCollaboration(users: number) {\n const properties: UserDataCollaborationEvent = {\n users,\n }\n await publishEvent(Event.USER_DATA_COLLABORATION, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n permissionAdminAssigned,\n permissionAdminRemoved,\n permissionBuilderAssigned,\n permissionBuilderRemoved,\n onboardingComplete,\n invited,\n inviteAccepted,\n passwordForceReset,\n passwordUpdated,\n passwordResetRequested,\n passwordReset,\n dataCollaboration,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n ViewCalculationCreatedEvent,\n ViewCalculationDeletedEvent,\n ViewCalculationUpdatedEvent,\n ViewCreatedEvent,\n ViewDeletedEvent,\n ViewExportedEvent,\n ViewFilterCreatedEvent,\n ViewFilterDeletedEvent,\n ViewFilterUpdatedEvent,\n ViewUpdatedEvent,\n View,\n ViewCalculation,\n Table,\n TableExportFormat,\n} from \"@budibase/types\"\n\n/* eslint-disable */\n\nasync function created(view: View, timestamp?: string | number) {\n const properties: ViewCreatedEvent = {\n tableId: view.tableId,\n }\n await publishEvent(Event.VIEW_CREATED, properties, timestamp)\n}\n\nasync function updated(view: View) {\n const properties: ViewUpdatedEvent = {\n tableId: view.tableId,\n }\n await publishEvent(Event.VIEW_UPDATED, properties)\n}\n\nasync function deleted(view: View) {\n const properties: ViewDeletedEvent = {\n tableId: view.tableId,\n }\n await publishEvent(Event.VIEW_DELETED, properties)\n}\n\nasync function exported(table: Table, format: TableExportFormat) {\n const properties: ViewExportedEvent = {\n tableId: table._id as string,\n format,\n }\n await publishEvent(Event.VIEW_EXPORTED, properties)\n}\n\nasync function filterCreated(view: View, timestamp?: string | number) {\n const properties: ViewFilterCreatedEvent = {\n tableId: view.tableId,\n }\n await publishEvent(Event.VIEW_FILTER_CREATED, properties, timestamp)\n}\n\nasync function filterUpdated(view: View) {\n const properties: ViewFilterUpdatedEvent = {\n tableId: view.tableId,\n }\n await publishEvent(Event.VIEW_FILTER_UPDATED, properties)\n}\n\nasync function filterDeleted(view: View) {\n const properties: ViewFilterDeletedEvent = {\n tableId: view.tableId,\n }\n await publishEvent(Event.VIEW_FILTER_DELETED, properties)\n}\n\nasync function calculationCreated(view: View, timestamp?: string | number) {\n const properties: ViewCalculationCreatedEvent = {\n tableId: view.tableId,\n calculation: view.calculation as ViewCalculation,\n }\n await publishEvent(Event.VIEW_CALCULATION_CREATED, properties, timestamp)\n}\n\nasync function calculationUpdated(view: View) {\n const properties: ViewCalculationUpdatedEvent = {\n tableId: view.tableId,\n calculation: view.calculation as ViewCalculation,\n }\n await publishEvent(Event.VIEW_CALCULATION_UPDATED, properties)\n}\n\nasync function calculationDeleted(existingView: View) {\n const properties: ViewCalculationDeletedEvent = {\n tableId: existingView.tableId,\n calculation: existingView.calculation as ViewCalculation,\n }\n await publishEvent(Event.VIEW_CALCULATION_DELETED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n exported,\n filterCreated,\n filterUpdated,\n filterDeleted,\n calculationCreated,\n calculationUpdated,\n calculationDeleted,\n}\n", "import { publishEvent } from \"../events\"\nimport { Event, VersionCheckedEvent, VersionChangeEvent } from \"@budibase/types\"\n\nasync function versionChecked(version: string) {\n const properties: VersionCheckedEvent = {\n currentVersion: version,\n }\n await publishEvent(Event.INSTALLATION_VERSION_CHECKED, properties)\n}\n\nasync function upgraded(from: string, to: string) {\n const properties: VersionChangeEvent = {\n from,\n to,\n }\n\n await publishEvent(Event.INSTALLATION_VERSION_UPGRADED, properties)\n}\n\nasync function downgraded(from: string, to: string) {\n const properties: VersionChangeEvent = {\n from,\n to,\n }\n await publishEvent(Event.INSTALLATION_VERSION_DOWNGRADED, properties)\n}\n\nasync function firstStartup() {\n const properties = {}\n await publishEvent(Event.INSTALLATION_FIRST_STARTUP, properties)\n}\n\nexport default {\n versionChecked,\n upgraded,\n downgraded,\n firstStartup,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n AppBackfillSucceededEvent,\n AppBackfillFailedEvent,\n TenantBackfillSucceededEvent,\n TenantBackfillFailedEvent,\n InstallationBackfillSucceededEvent,\n InstallationBackfillFailedEvent,\n} from \"@budibase/types\"\nimport env from \"../../environment\"\n\nconst shouldSkip = !env.SELF_HOSTED && !env.isDev()\n\nasync function appSucceeded(properties: AppBackfillSucceededEvent) {\n if (shouldSkip) {\n return\n }\n await publishEvent(Event.APP_BACKFILL_SUCCEEDED, properties)\n}\n\nasync function appFailed(error: any) {\n if (shouldSkip) {\n return\n }\n const properties: AppBackfillFailedEvent = {\n error: JSON.stringify(error, Object.getOwnPropertyNames(error)),\n }\n await publishEvent(Event.APP_BACKFILL_FAILED, properties)\n}\n\nasync function tenantSucceeded(properties: TenantBackfillSucceededEvent) {\n if (shouldSkip) {\n return\n }\n await publishEvent(Event.TENANT_BACKFILL_SUCCEEDED, properties)\n}\n\nasync function tenantFailed(error: any) {\n if (shouldSkip) {\n return\n }\n const properties: TenantBackfillFailedEvent = {\n error: JSON.stringify(error, Object.getOwnPropertyNames(error)),\n }\n await publishEvent(Event.TENANT_BACKFILL_FAILED, properties)\n}\n\nasync function installationSucceeded() {\n if (shouldSkip) {\n return\n }\n const properties: InstallationBackfillSucceededEvent = {}\n await publishEvent(Event.INSTALLATION_BACKFILL_SUCCEEDED, properties)\n}\n\nasync function installationFailed(error: any) {\n if (shouldSkip) {\n return\n }\n const properties: InstallationBackfillFailedEvent = {\n error: JSON.stringify(error, Object.getOwnPropertyNames(error)),\n }\n await publishEvent(Event.INSTALLATION_BACKFILL_FAILED, properties)\n}\n\nexport default {\n appSucceeded,\n appFailed,\n tenantSucceeded,\n tenantFailed,\n installationSucceeded,\n installationFailed,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n UserGroup,\n GroupCreatedEvent,\n GroupDeletedEvent,\n GroupUpdatedEvent,\n GroupUsersAddedEvent,\n GroupUsersDeletedEvent,\n GroupAddedOnboardingEvent,\n GroupPermissionsEditedEvent,\n} from \"@budibase/types\"\nimport { isScim } from \"../../context\"\n\nasync function created(group: UserGroup, timestamp?: number) {\n const properties: GroupCreatedEvent = {\n groupId: group._id as string,\n viaScim: isScim(),\n audited: {\n name: group.name,\n },\n }\n await publishEvent(Event.USER_GROUP_CREATED, properties, timestamp)\n}\n\nasync function updated(group: UserGroup) {\n const properties: GroupUpdatedEvent = {\n groupId: group._id as string,\n viaScim: isScim(),\n audited: {\n name: group.name,\n },\n }\n await publishEvent(Event.USER_GROUP_UPDATED, properties)\n}\n\nasync function deleted(group: UserGroup) {\n const properties: GroupDeletedEvent = {\n groupId: group._id as string,\n viaScim: isScim(),\n audited: {\n name: group.name,\n },\n }\n await publishEvent(Event.USER_GROUP_DELETED, properties)\n}\n\nasync function usersAdded(count: number, group: UserGroup) {\n const properties: GroupUsersAddedEvent = {\n count,\n groupId: group._id as string,\n viaScim: isScim(),\n audited: {\n name: group.name,\n },\n }\n await publishEvent(Event.USER_GROUP_USERS_ADDED, properties)\n}\n\nasync function usersDeleted(count: number, group: UserGroup) {\n const properties: GroupUsersDeletedEvent = {\n count,\n groupId: group._id as string,\n viaScim: isScim(),\n audited: {\n name: group.name,\n },\n }\n await publishEvent(Event.USER_GROUP_USERS_REMOVED, properties)\n}\n\nasync function createdOnboarding(groupId: string) {\n const properties: GroupAddedOnboardingEvent = {\n groupId: groupId,\n onboarding: true,\n }\n await publishEvent(Event.USER_GROUP_ONBOARDING, properties)\n}\n\nasync function permissionsEdited(group: UserGroup) {\n const properties: GroupPermissionsEditedEvent = {\n permissions: group.roles!,\n groupId: group._id as string,\n audited: {\n name: group.name,\n },\n }\n await publishEvent(Event.USER_GROUP_PERMISSIONS_EDITED, properties)\n}\n\nexport default {\n created,\n updated,\n deleted,\n usersAdded,\n usersDeleted,\n createdOnboarding,\n permissionsEdited,\n}\n", "import { publishEvent } from \"../events\"\nimport {\n Event,\n Plugin,\n PluginDeletedEvent,\n PluginImportedEvent,\n PluginInitEvent,\n} from \"@budibase/types\"\n\nasync function init(plugin: Plugin) {\n const properties: PluginInitEvent = {\n type: plugin.schema.type,\n name: plugin.name,\n description: plugin.description,\n version: plugin.version,\n }\n await publishEvent(Event.PLUGIN_INIT, properties)\n}\n\nasync function imported(plugin: Plugin) {\n const properties: PluginImportedEvent = {\n pluginId: plugin._id as string,\n type: plugin.schema.type,\n source: plugin.source,\n name: plugin.name,\n description: plugin.description,\n version: plugin.version,\n }\n await publishEvent(Event.PLUGIN_IMPORTED, properties)\n}\n\nasync function deleted(plugin: Plugin) {\n const properties: PluginDeletedEvent = {\n pluginId: plugin._id as string,\n type: plugin.schema.type,\n name: plugin.name,\n description: plugin.description,\n version: plugin.version,\n }\n await publishEvent(Event.PLUGIN_DELETED, properties)\n}\n\nexport default {\n init,\n imported,\n deleted,\n}\n", "import {\n AppBackup,\n AppBackupRestoreEvent,\n AppBackupTriggeredEvent,\n AppBackupTrigger,\n AppBackupType,\n Event,\n} from \"@budibase/types\"\nimport { publishEvent } from \"../events\"\n\nasync function appBackupRestored(backup: AppBackup) {\n const properties: AppBackupRestoreEvent = {\n appId: backup.appId,\n restoreId: backup._id!,\n backupCreatedAt: backup.timestamp,\n name: backup.name as string,\n }\n\n await publishEvent(Event.APP_BACKUP_RESTORED, properties)\n}\n\nasync function appBackupTriggered(\n appId: string,\n backupId: string,\n type: AppBackupType,\n trigger: AppBackupTrigger,\n name: string\n) {\n const properties: AppBackupTriggeredEvent = {\n appId: appId,\n backupId,\n type,\n trigger,\n name,\n }\n await publishEvent(Event.APP_BACKUP_TRIGGERED, properties)\n}\n\nexport default {\n appBackupRestored,\n appBackupTriggered,\n}\n", "import {\n Event,\n EnvironmentVariableCreatedEvent,\n EnvironmentVariableDeletedEvent,\n EnvironmentVariableUpgradePanelOpenedEvent,\n} from \"@budibase/types\"\nimport { publishEvent } from \"../events\"\n\nasync function created(name: string, environments: string[]) {\n const properties: EnvironmentVariableCreatedEvent = {\n name,\n environments,\n }\n await publishEvent(Event.ENVIRONMENT_VARIABLE_CREATED, properties)\n}\n\nasync function deleted(name: string) {\n const properties: EnvironmentVariableDeletedEvent = {\n name,\n }\n await publishEvent(Event.ENVIRONMENT_VARIABLE_DELETED, properties)\n}\n\nasync function upgradePanelOpened(userId: string) {\n const properties: EnvironmentVariableUpgradePanelOpenedEvent = {\n userId,\n }\n await publishEvent(\n Event.ENVIRONMENT_VARIABLE_UPGRADE_PANEL_OPENED,\n properties\n )\n}\n\nexport default {\n created,\n deleted,\n upgradePanelOpened,\n}\n", "import {\n Event,\n AuditLogSearchParams,\n AuditLogFilteredEvent,\n AuditLogDownloadedEvent,\n} from \"@budibase/types\"\nimport { publishEvent } from \"../events\"\n\nasync function filtered(search: AuditLogSearchParams) {\n const properties: AuditLogFilteredEvent = {\n filters: search,\n }\n await publishEvent(Event.AUDIT_LOGS_FILTERED, properties)\n}\n\nasync function downloaded(search: AuditLogSearchParams) {\n const properties: AuditLogDownloadedEvent = {\n filters: search,\n }\n await publishEvent(Event.AUDIT_LOGS_DOWNLOADED, properties)\n}\n\nexport default {\n filtered,\n downloaded,\n}\n", "const redis = require(\"../redis/init\")\nconst { v4: uuidv4 } = require(\"uuid\")\nconst { logWarn } = require(\"../logging\")\n\nimport env from \"../environment\"\nimport {\n Session,\n ScannedSession,\n SessionKey,\n CreateSession,\n} from \"@budibase/types\"\n\n// a week in seconds\nconst EXPIRY_SECONDS = 86400 * 7\n\nfunction makeSessionID(userId: string, sessionId: string) {\n return `${userId}/${sessionId}`\n}\n\nexport async function getSessionsForUser(userId: string): Promise<Session[]> {\n if (!userId) {\n console.trace(\"Cannot get sessions for undefined userId\")\n return []\n }\n const client = await redis.getSessionClient()\n const sessions: ScannedSession[] = await client.scan(userId)\n return sessions.map(session => session.value)\n}\n\nexport async function invalidateSessions(\n userId: string,\n opts: { sessionIds?: string[]; reason?: string } = {}\n) {\n try {\n const reason = opts?.reason || \"unknown\"\n let sessionIds: string[] = opts.sessionIds || []\n let sessionKeys: SessionKey[]\n\n // If no sessionIds, get all the sessions for the user\n if (sessionIds.length === 0) {\n const sessions = await getSessionsForUser(userId)\n sessionKeys = sessions.map(session => ({\n key: makeSessionID(session.userId, session.sessionId),\n }))\n } else {\n // use the passed array of sessionIds\n sessionIds = Array.isArray(sessionIds) ? sessionIds : [sessionIds]\n sessionKeys = sessionIds.map(sessionId => ({\n key: makeSessionID(userId, sessionId),\n }))\n }\n\n if (sessionKeys && sessionKeys.length > 0) {\n const client = await redis.getSessionClient()\n const promises = []\n for (let sessionKey of sessionKeys) {\n promises.push(client.delete(sessionKey.key))\n }\n if (!env.isTest()) {\n logWarn(\n `Invalidating sessions for ${userId} (reason: ${reason}) - ${sessionKeys\n .map(sessionKey => sessionKey.key)\n .join(\", \")}`\n )\n }\n await Promise.all(promises)\n }\n } catch (err) {\n console.error(`Error invalidating sessions: ${err}`)\n }\n}\n\nexport async function createASession(\n userId: string,\n createSession: CreateSession\n) {\n // invalidate all other sessions\n await invalidateSessions(userId, { reason: \"creation\" })\n\n const client = await redis.getSessionClient()\n const sessionId = createSession.sessionId\n const csrfToken = createSession.csrfToken ? createSession.csrfToken : uuidv4()\n const key = makeSessionID(userId, sessionId)\n\n const session: Session = {\n ...createSession,\n csrfToken,\n createdAt: new Date().toISOString(),\n lastAccessedAt: new Date().toISOString(),\n userId,\n }\n await client.store(key, session, EXPIRY_SECONDS)\n return session\n}\n\nexport async function updateSessionTTL(session: Session) {\n const client = await redis.getSessionClient()\n const key = makeSessionID(session.userId, session.sessionId)\n session.lastAccessedAt = new Date().toISOString()\n await client.store(key, session, EXPIRY_SECONDS)\n}\n\nexport async function endSession(userId: string, sessionId: string) {\n const client = await redis.getSessionClient()\n await client.delete(makeSessionID(userId, sessionId))\n}\n\nexport async function getSession(\n userId: string,\n sessionId: string\n): Promise<Session> {\n if (!userId || !sessionId) {\n throw new Error(`Invalid session details - ${userId} - ${sessionId}`)\n }\n const client = await redis.getSessionClient()\n const session = await client.get(makeSessionID(userId, sessionId))\n if (!session) {\n throw new Error(`Session not found - ${userId} - ${sessionId}`)\n }\n return session\n}\n", "export * from \"./auth\"\n", "import env from \"../environment\"\n\nexport const PASSWORD_MIN_LENGTH = +(env.PASSWORD_MIN_LENGTH || 8)\nexport const PASSWORD_MAX_LENGTH = +(env.PASSWORD_MAX_LENGTH || 512)\n\nexport function validatePassword(\n password: string\n): { valid: true } | { valid: false; error: string } {\n if (!password || password.length < PASSWORD_MIN_LENGTH) {\n return {\n valid: false,\n error: `Password invalid. Minimum ${PASSWORD_MIN_LENGTH} characters.`,\n }\n }\n\n if (password.length > PASSWORD_MAX_LENGTH) {\n return {\n valid: false,\n error: `Password invalid. Maximum ${PASSWORD_MAX_LENGTH} characters.`,\n }\n }\n\n return { valid: true }\n}\n", "import BaseCache from \"./base\"\nimport { getWritethroughClient } from \"../redis/init\"\nimport { logWarn } from \"../logging\"\nimport { Database, Document, LockName, LockType } from \"@budibase/types\"\nimport * as locks from \"../redis/redlockImpl\"\n\nconst DEFAULT_WRITE_RATE_MS = 10000\nlet CACHE: BaseCache | null = null\n\ninterface CacheItem<T extends Document> {\n doc: any\n lastWrite: number\n}\n\nasync function getCache() {\n if (!CACHE) {\n const client = await getWritethroughClient()\n CACHE = new BaseCache(client)\n }\n return CACHE\n}\n\nfunction makeCacheKey(db: Database, key: string) {\n return db.name + key\n}\n\nfunction makeCacheItem<T extends Document>(\n doc: T,\n lastWrite: number | null = null\n): CacheItem<T> {\n return { doc, lastWrite: lastWrite || Date.now() }\n}\n\nasync function put(\n db: Database,\n doc: Document,\n writeRateMs: number = DEFAULT_WRITE_RATE_MS\n) {\n const cache = await getCache()\n const key = doc._id\n let cacheItem: CacheItem<any> | undefined\n if (key) {\n cacheItem = await cache.get(makeCacheKey(db, key))\n }\n const updateDb = !cacheItem || cacheItem.lastWrite < Date.now() - writeRateMs\n let output = doc\n if (updateDb) {\n const lockResponse = await locks.doWithLock(\n {\n type: LockType.TRY_ONCE,\n name: LockName.PERSIST_WRITETHROUGH,\n resource: key,\n ttl: 15000,\n },\n async () => {\n const writeDb = async (toWrite: any) => {\n // doc should contain the _id and _rev\n const response = await db.put(toWrite, { force: true })\n output._id = response.id\n output._rev = response.rev\n }\n try {\n await writeDb(doc)\n } catch (err: any) {\n if (err.status !== 409) {\n throw err\n } else {\n // Swallow 409s but log them\n logWarn(`Ignoring conflict in write-through cache`)\n }\n }\n }\n )\n\n if (!lockResponse.executed) {\n logWarn(`Ignoring redlock conflict in write-through cache`)\n }\n }\n // if we are updating the DB then need to set the lastWrite to now\n cacheItem = makeCacheItem(output, updateDb ? null : cacheItem?.lastWrite)\n if (output._id) {\n await cache.store(makeCacheKey(db, output._id), cacheItem)\n }\n return { ok: true, id: output._id, rev: output._rev }\n}\n\nasync function get<T extends Document>(db: Database, id: string): Promise<T> {\n const cache = await getCache()\n const cacheKey = makeCacheKey(db, id)\n let cacheItem: CacheItem<T> = await cache.get(cacheKey)\n if (!cacheItem) {\n const doc = await db.get<T>(id)\n cacheItem = makeCacheItem(doc)\n await cache.store(cacheKey, cacheItem)\n }\n return cacheItem.doc\n}\n\nasync function remove(db: Database, docOrId: any, rev?: any): Promise<void> {\n const cache = await getCache()\n if (!docOrId) {\n throw new Error(\"No ID/Rev provided.\")\n }\n const id = typeof docOrId === \"string\" ? docOrId : docOrId._id\n rev = typeof docOrId === \"string\" ? rev : docOrId._rev\n try {\n await cache.delete(makeCacheKey(db, id))\n } finally {\n await db.remove(id, rev)\n }\n}\n\nexport class Writethrough {\n db: Database\n writeRateMs: number\n\n constructor(db: Database, writeRateMs: number = DEFAULT_WRITE_RATE_MS) {\n this.db = db\n this.writeRateMs = writeRateMs\n }\n\n async put(doc: any, writeRateMs: number = this.writeRateMs) {\n return put(this.db, doc, writeRateMs)\n }\n\n async get<T extends Document>(id: string) {\n return get<T>(this.db, id)\n }\n\n async remove(docOrId: any, rev?: any) {\n return remove(this.db, docOrId, rev)\n }\n}\n", "import * as redis from \"../redis/init\"\nimport * as utils from \"../utils\"\nimport { Duration } from \"../utils\"\n\nconst TTL_SECONDS = Duration.fromHours(1).toSeconds()\n\ninterface PasswordReset {\n userId: string\n info: any\n}\n\n/**\n * Given a user ID this will store a code (that is returned) for an hour in redis.\n * The user can then return this code for resetting their password (through their reset link).\n * @param userId the ID of the user which is to be reset.\n * @param info Info about the user/the reset process.\n * @return returns the code that was stored to redis.\n */\nexport async function createCode(userId: string, info: any): Promise<string> {\n const code = utils.newid()\n const client = await redis.getPasswordResetClient()\n await client.store(code, { userId, info }, TTL_SECONDS)\n return code\n}\n\n/**\n * Given a reset code this will lookup to redis, check if the code is valid.\n * @param code The code provided via the email link.\n * @return returns the user ID if it is found\n */\nexport async function getCode(code: string): Promise<PasswordReset> {\n const client = await redis.getPasswordResetClient()\n const value = (await client.get(code)) as PasswordReset | undefined\n if (!value) {\n throw new Error(\n \"Provided information is not valid, cannot reset password - please try again.\"\n )\n }\n return value\n}\n\n/**\n * Given a reset code this will invalidate it.\n * @param code The code provided via the email link.\n */\nexport async function invalidateCode(code: string): Promise<void> {\n const client = await redis.getPasswordResetClient()\n await client.delete(code)\n}\n", "export * from \"./migrations\"\nexport * from \"./definitions\"\n", "import { DEFAULT_TENANT_ID } from \"../constants\"\nimport {\n DocumentType,\n StaticDatabases,\n getAllApps,\n getGlobalDBName,\n getDB,\n} from \"../db\"\nimport environment from \"../environment\"\nimport * as platform from \"../platform\"\nimport * as context from \"../context\"\nimport { DEFINITIONS } from \".\"\nimport {\n Migration,\n MigrationOptions,\n MigrationType,\n MigrationNoOpOptions,\n App,\n} from \"@budibase/types\"\n\nexport const getMigrationsDoc = async (db: any) => {\n // get the migrations doc\n try {\n return await db.get(DocumentType.MIGRATIONS)\n } catch (err: any) {\n if (err.status && err.status === 404) {\n return { _id: DocumentType.MIGRATIONS }\n } else {\n console.error(err)\n throw err\n }\n }\n}\n\nexport const backPopulateMigrations = async (opts: MigrationNoOpOptions) => {\n // filter migrations to the type and populate a no-op migration\n const migrations: Migration[] = DEFINITIONS.filter(\n def => def.type === opts.type\n ).map(d => ({ ...d, fn: () => {} }))\n await runMigrations(migrations, { noOp: opts })\n}\n\nexport const runMigration = async (\n migration: Migration,\n options: MigrationOptions = {}\n) => {\n const migrationType = migration.type\n let tenantId: string | undefined\n if (migrationType !== MigrationType.INSTALLATION) {\n tenantId = context.getTenantId()\n }\n const migrationName = migration.name\n const silent = migration.silent\n\n const log = (message: string) => {\n if (!silent) {\n console.log(message)\n }\n }\n\n // get the db to store the migration in\n let dbNames: string[]\n if (migrationType === MigrationType.GLOBAL) {\n dbNames = [getGlobalDBName()]\n } else if (migrationType === MigrationType.APP) {\n if (options.noOp) {\n if (!options.noOp.appId) {\n throw new Error(\"appId is required for noOp app migration\")\n }\n dbNames = [options.noOp.appId]\n } else {\n const apps = (await getAllApps(migration.appOpts)) as App[]\n dbNames = apps.map(app => app.appId)\n }\n } else if (migrationType === MigrationType.INSTALLATION) {\n dbNames = [StaticDatabases.PLATFORM_INFO.name]\n } else {\n throw new Error(`Unrecognised migration type [${migrationType}]`)\n }\n\n const length = dbNames.length\n let count = 0\n\n // run the migration against each db\n for (const dbName of dbNames) {\n count++\n const lengthStatement = length > 1 ? `[${count}/${length}]` : \"\"\n\n const db = getDB(dbName)\n\n try {\n const doc = await getMigrationsDoc(db)\n\n // the migration has already been run\n if (doc[migrationName]) {\n // check for force\n if (\n options.force &&\n options.force[migrationType] &&\n options.force[migrationType].includes(migrationName)\n ) {\n log(`[Migration: ${migrationName}] [DB: ${dbName}] Forcing`)\n } else {\n // no force, exit\n return\n }\n }\n\n // check if the migration is not a no-op\n if (!options.noOp) {\n log(\n `[Migration: ${migrationName}] [DB: ${dbName}] Running ${lengthStatement}`\n )\n\n if (migration.preventRetry) {\n // eagerly set the completion date\n // so that we never run this migration twice even upon failure\n doc[migrationName] = Date.now()\n const response = await db.put(doc)\n doc._rev = response.rev\n }\n\n // run the migration\n if (migrationType === MigrationType.APP) {\n await context.doInAppContext(db.name, async () => {\n await migration.fn(db)\n })\n } else {\n await migration.fn(db)\n }\n\n log(`[Migration: ${migrationName}] [DB: ${dbName}] Complete`)\n }\n\n // mark as complete\n doc[migrationName] = Date.now()\n await db.put(doc)\n } catch (err) {\n console.error(\n `[Migration: ${migrationName}] [DB: ${dbName}] Error: `,\n err\n )\n throw err\n }\n }\n}\n\nexport const runMigrations = async (\n migrations: Migration[],\n options: MigrationOptions = {}\n) => {\n let tenantIds\n\n if (environment.MULTI_TENANCY) {\n if (options.noOp) {\n tenantIds = [options.noOp.tenantId]\n } else if (!options.tenantIds || !options.tenantIds.length) {\n // run for all tenants\n tenantIds = await platform.tenants.getTenantIds()\n } else {\n tenantIds = options.tenantIds\n }\n } else {\n // single tenancy\n tenantIds = [DEFAULT_TENANT_ID]\n }\n\n if (tenantIds.length > 1) {\n console.log(`Checking migrations for ${tenantIds.length} tenants`)\n } else {\n console.log(\"Checking migrations\")\n }\n\n let count = 0\n // for all tenants\n for (const tenantId of tenantIds) {\n count++\n if (tenantIds.length > 1) {\n console.log(`Progress [${count}/${tenantIds.length}]`)\n }\n // for all migrations\n for (const migration of migrations) {\n // run the migration\n await context.doInTenant(\n tenantId,\n async () => await runMigration(migration, options)\n )\n }\n }\n console.log(\"Migrations complete\")\n}\n", "import {\n MigrationType,\n MigrationName,\n MigrationDefinition,\n} from \"@budibase/types\"\n\nexport const DEFINITIONS: MigrationDefinition[] = [\n {\n type: MigrationType.GLOBAL,\n name: MigrationName.USER_EMAIL_VIEW_CASING,\n },\n {\n type: MigrationType.GLOBAL,\n name: MigrationName.SYNC_QUOTAS,\n },\n {\n type: MigrationType.APP,\n name: MigrationName.APP_URLS,\n },\n {\n type: MigrationType.APP,\n name: MigrationName.EVENT_APP_BACKFILL,\n },\n {\n type: MigrationType.APP,\n name: MigrationName.TABLE_SETTINGS_LINKS_TO_ACTIONS,\n },\n {\n type: MigrationType.GLOBAL,\n name: MigrationName.EVENT_GLOBAL_BACKFILL,\n },\n {\n type: MigrationType.INSTALLATION,\n name: MigrationName.EVENT_INSTALLATION_BACKFILL,\n },\n {\n type: MigrationType.GLOBAL,\n name: MigrationName.GLOBAL_INFO_SYNC_USERS,\n },\n]\n", "import { BuiltinPermissionID, PermissionLevel } from \"./permissions\"\nimport {\n prefixRoleID,\n getRoleParams,\n DocumentType,\n SEPARATOR,\n doWithDB,\n} from \"../db\"\nimport { getAppDB } from \"../context\"\nimport { Screen, Role as RoleDoc } from \"@budibase/types\"\nimport cloneDeep from \"lodash/fp/cloneDeep\"\n\nexport const BUILTIN_ROLE_IDS = {\n ADMIN: \"ADMIN\",\n POWER: \"POWER\",\n BASIC: \"BASIC\",\n PUBLIC: \"PUBLIC\",\n}\n\nconst BUILTIN_IDS = {\n ...BUILTIN_ROLE_IDS,\n BUILDER: \"BUILDER\",\n}\n\n// exclude internal roles like builder\nconst EXTERNAL_BUILTIN_ROLE_IDS = [\n BUILTIN_IDS.ADMIN,\n BUILTIN_IDS.POWER,\n BUILTIN_IDS.BASIC,\n BUILTIN_IDS.PUBLIC,\n]\n\nexport const RoleIDVersion = {\n // original version, with a UUID based ID\n UUID: undefined,\n // new version - with name based ID\n NAME: \"name\",\n}\n\nexport class Role implements RoleDoc {\n _id: string\n _rev?: string\n name: string\n permissionId: string\n inherits?: string\n version?: string\n permissions = {}\n\n constructor(id: string, name: string, permissionId: string) {\n this._id = id\n this.name = name\n this.permissionId = permissionId\n // version for managing the ID - removing the role_ when responding\n this.version = RoleIDVersion.NAME\n }\n\n addInheritance(inherits: string) {\n this.inherits = inherits\n return this\n }\n}\n\nconst BUILTIN_ROLES = {\n ADMIN: new Role(\n BUILTIN_IDS.ADMIN,\n \"Admin\",\n BuiltinPermissionID.ADMIN\n ).addInheritance(BUILTIN_IDS.POWER),\n POWER: new Role(\n BUILTIN_IDS.POWER,\n \"Power\",\n BuiltinPermissionID.POWER\n ).addInheritance(BUILTIN_IDS.BASIC),\n BASIC: new Role(\n BUILTIN_IDS.BASIC,\n \"Basic\",\n BuiltinPermissionID.WRITE\n ).addInheritance(BUILTIN_IDS.PUBLIC),\n PUBLIC: new Role(BUILTIN_IDS.PUBLIC, \"Public\", BuiltinPermissionID.PUBLIC),\n BUILDER: new Role(BUILTIN_IDS.BUILDER, \"Builder\", BuiltinPermissionID.ADMIN),\n}\n\nexport function getBuiltinRoles(): { [key: string]: RoleDoc } {\n return cloneDeep(BUILTIN_ROLES)\n}\n\nexport const BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map(\n role => role._id\n)\n\nexport const BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map(\n role => role.name\n)\n\nexport function isBuiltin(role?: string) {\n return BUILTIN_ROLE_ID_ARRAY.some(builtin => role?.includes(builtin))\n}\n\n/**\n * Works through the inheritance ranks to see how far up the builtin stack this ID is.\n */\nexport function builtinRoleToNumber(id?: string) {\n if (!id) {\n return 0\n }\n const builtins = getBuiltinRoles()\n const MAX = Object.values(builtins).length + 1\n if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) {\n return MAX\n }\n let role = builtins[id],\n count = 0\n do {\n if (!role) {\n break\n }\n role = builtins[role.inherits!]\n count++\n } while (role !== null)\n return count\n}\n\n/**\n * Converts any role to a number, but has to be async to get the roles from db.\n */\nexport async function roleToNumber(id?: string) {\n if (isBuiltin(id)) {\n return builtinRoleToNumber(id)\n }\n const hierarchy = (await getUserRoleHierarchy(id, {\n defaultPublic: true,\n })) as RoleDoc[]\n for (let role of hierarchy) {\n if (isBuiltin(role?.inherits)) {\n return builtinRoleToNumber(role.inherits) + 1\n }\n }\n return 0\n}\n\n/**\n * Returns whichever builtin roleID is lower.\n */\nexport function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {\n if (!roleId1) {\n return roleId2 as string\n }\n if (!roleId2) {\n return roleId1 as string\n }\n return builtinRoleToNumber(roleId1) > builtinRoleToNumber(roleId2)\n ? roleId2\n : roleId1\n}\n\n/**\n * Gets the role object, this is mainly useful for two purposes, to check if the level exists and\n * to check if the role inherits any others.\n * @param roleId The level ID to lookup.\n * @param opts options for the function, like whether to halt errors, instead return public.\n * @returns The role object, which may contain an \"inherits\" property.\n */\nexport async function getRole(\n roleId?: string,\n opts?: { defaultPublic?: boolean }\n): Promise<RoleDoc | undefined> {\n if (!roleId) {\n return undefined\n }\n let role: any = {}\n // built in roles mostly come from the in-code implementation,\n // but can be extended by a doc stored about them (e.g. permissions)\n if (isBuiltin(roleId)) {\n role = cloneDeep(\n Object.values(BUILTIN_ROLES).find(role => role._id === roleId)\n )\n } else {\n // make sure has the prefix (if it has it then it won't be added)\n roleId = prefixRoleID(roleId)\n }\n try {\n const db = getAppDB()\n const dbRole = await db.get(getDBRoleID(roleId))\n role = Object.assign(role, dbRole)\n // finalise the ID\n role._id = getExternalRoleID(role._id, role.version)\n } catch (err) {\n if (!isBuiltin(roleId) && opts?.defaultPublic) {\n return cloneDeep(BUILTIN_ROLES.PUBLIC)\n }\n // only throw an error if there is no role at all\n if (Object.keys(role).length === 0) {\n throw err\n }\n }\n return role\n}\n\n/**\n * Simple function to get all the roles based on the top level user role ID.\n */\nasync function getAllUserRoles(\n userRoleId?: string,\n opts?: { defaultPublic?: boolean }\n): Promise<RoleDoc[]> {\n // admins have access to all roles\n if (userRoleId === BUILTIN_IDS.ADMIN) {\n return getAllRoles()\n }\n let currentRole = await getRole(userRoleId, opts)\n let roles = currentRole ? [currentRole] : []\n let roleIds = [userRoleId]\n // get all the inherited roles\n while (\n currentRole &&\n currentRole.inherits &&\n roleIds.indexOf(currentRole.inherits) === -1\n ) {\n roleIds.push(currentRole.inherits)\n currentRole = await getRole(currentRole.inherits)\n if (currentRole) {\n roles.push(currentRole)\n }\n }\n return roles\n}\n\nexport async function getUserRoleIdHierarchy(\n userRoleId?: string\n): Promise<string[]> {\n const roles = await getUserRoleHierarchy(userRoleId)\n return roles.map(role => role._id!)\n}\n\n/**\n * Returns an ordered array of the user's inherited role IDs, this can be used\n * to determine if a user can access something that requires a specific role.\n * @param userRoleId The user's role ID, this can be found in their access token.\n * @param opts optional - if want to default to public use this.\n * @returns returns an ordered array of the roles, with the first being their\n * highest level of access and the last being the lowest level.\n */\nexport async function getUserRoleHierarchy(\n userRoleId?: string,\n opts?: { defaultPublic?: boolean }\n) {\n // special case, if they don't have a role then they are a public user\n return getAllUserRoles(userRoleId, opts)\n}\n\n// this function checks that the provided permissions are in an array format\n// some templates/older apps will use a simple string instead of array for roles\n// convert the string to an array using the theory that write is higher than read\nexport function checkForRoleResourceArray(\n rolePerms: { [key: string]: string[] },\n resourceId: string\n) {\n if (rolePerms && !Array.isArray(rolePerms[resourceId])) {\n const permLevel = rolePerms[resourceId] as any\n rolePerms[resourceId] = [permLevel]\n if (permLevel === PermissionLevel.WRITE) {\n rolePerms[resourceId].push(PermissionLevel.READ)\n }\n }\n return rolePerms\n}\n\nexport async function getAllRoleIds(appId?: string) {\n const roles = await getAllRoles(appId)\n return roles.map(role => role._id)\n}\n\n/**\n * Given an app ID this will retrieve all of the roles that are currently within that app.\n * @return An array of the role objects that were found.\n */\nexport async function getAllRoles(appId?: string): Promise<RoleDoc[]> {\n if (appId) {\n return doWithDB(appId, internal)\n } else {\n let appDB\n try {\n appDB = getAppDB()\n } catch (error) {\n // We don't have any apps, so we'll just use the built-in roles\n }\n return internal(appDB)\n }\n async function internal(db: any) {\n let roles: RoleDoc[] = []\n if (db) {\n const body = await db.allDocs(\n getRoleParams(null, {\n include_docs: true,\n })\n )\n roles = body.rows.map((row: any) => row.doc)\n roles.forEach(\n role => (role._id = getExternalRoleID(role._id!, role.version))\n )\n }\n const builtinRoles = getBuiltinRoles()\n\n // need to combine builtin with any DB record of them (for sake of permissions)\n for (let builtinRoleId of EXTERNAL_BUILTIN_ROLE_IDS) {\n const builtinRole = builtinRoles[builtinRoleId]\n const dbBuiltin = roles.filter(\n dbRole =>\n getExternalRoleID(dbRole._id!, dbRole.version) === builtinRoleId\n )[0]\n if (dbBuiltin == null) {\n roles.push(builtinRole || builtinRoles.BASIC)\n } else {\n // remove role and all back after combining with the builtin\n roles = roles.filter(role => role._id !== dbBuiltin._id)\n dbBuiltin._id = getExternalRoleID(dbBuiltin._id!, dbBuiltin.version)\n roles.push(Object.assign(builtinRole, dbBuiltin))\n }\n }\n // check permissions\n for (let role of roles) {\n if (!role.permissions) {\n continue\n }\n for (let resourceId of Object.keys(role.permissions)) {\n role.permissions = checkForRoleResourceArray(\n role.permissions,\n resourceId\n )\n }\n }\n return roles\n }\n}\n\nexport class AccessController {\n userHierarchies: { [key: string]: string[] }\n constructor() {\n this.userHierarchies = {}\n }\n\n async hasAccess(tryingRoleId?: string, userRoleId?: string) {\n // special cases, the screen has no role, the roles are the same or the user\n // is currently in the builder\n if (\n tryingRoleId == null ||\n tryingRoleId === \"\" ||\n tryingRoleId === userRoleId ||\n tryingRoleId === BUILTIN_IDS.BUILDER ||\n userRoleId === BUILTIN_IDS.BUILDER\n ) {\n return true\n }\n let roleIds = userRoleId ? this.userHierarchies[userRoleId] : null\n if (!roleIds && userRoleId) {\n roleIds = await getUserRoleIdHierarchy(userRoleId)\n this.userHierarchies[userRoleId] = roleIds\n }\n\n return roleIds?.indexOf(tryingRoleId) !== -1\n }\n\n async checkScreensAccess(screens: Screen[], userRoleId: string) {\n let accessibleScreens = []\n // don't want to handle this with Promise.all as this would mean all custom roles would be\n // retrieved at same time, it is likely a custom role will be re-used and therefore want\n // to work in sync for performance save\n for (let screen of screens) {\n const accessible = await this.checkScreenAccess(screen, userRoleId)\n if (accessible) {\n accessibleScreens.push(accessible)\n }\n }\n return accessibleScreens\n }\n\n async checkScreenAccess(screen: Screen, userRoleId: string) {\n const roleId = screen && screen.routing ? screen.routing.roleId : undefined\n if (await this.hasAccess(roleId, userRoleId)) {\n return screen\n }\n return null\n }\n}\n\n/**\n * Adds the \"role_\" for builtin role IDs which are to be written to the DB (for permissions).\n */\nexport function getDBRoleID(roleName: string) {\n if (roleName?.startsWith(DocumentType.ROLE)) {\n return roleName\n }\n return prefixRoleID(roleName)\n}\n\n/**\n * Remove the \"role_\" from builtin role IDs that have been written to the DB (for permissions).\n */\nexport function getExternalRoleID(roleId: string, version?: string) {\n // for built-in roles we want to remove the DB role ID element (role_)\n if (\n roleId.startsWith(DocumentType.ROLE) &&\n (isBuiltin(roleId) || version === RoleIDVersion.NAME)\n ) {\n return roleId.split(`${DocumentType.ROLE}${SEPARATOR}`)[1]\n }\n return roleId\n}\n", "import { PermissionLevel, PermissionType } from \"@budibase/types\"\nimport flatten from \"lodash/flatten\"\nimport cloneDeep from \"lodash/fp/cloneDeep\"\n\nexport { PermissionType, PermissionLevel } from \"@budibase/types\"\n\nexport type RoleHierarchy = {\n permissionId: string\n}[]\n\nexport class Permission {\n type: PermissionType\n level: PermissionLevel\n\n constructor(type: PermissionType, level: PermissionLevel) {\n this.type = type\n this.level = level\n }\n}\n\nexport function levelToNumber(perm: PermissionLevel) {\n switch (perm) {\n // not everything has execute privileges\n case PermissionLevel.EXECUTE:\n return 0\n case PermissionLevel.READ:\n return 1\n case PermissionLevel.WRITE:\n return 2\n case PermissionLevel.ADMIN:\n return 3\n default:\n return -1\n }\n}\n\n/**\n * Given the specified permission level for the user return the levels they are allowed to carry out.\n * @param userPermLevel The permission level of the user.\n * @return All the permission levels this user is allowed to carry out.\n */\nexport function getAllowedLevels(userPermLevel: PermissionLevel): string[] {\n switch (userPermLevel) {\n case PermissionLevel.EXECUTE:\n return [PermissionLevel.EXECUTE]\n case PermissionLevel.READ:\n return [PermissionLevel.EXECUTE, PermissionLevel.READ]\n case PermissionLevel.WRITE:\n case PermissionLevel.ADMIN:\n return [\n PermissionLevel.EXECUTE,\n PermissionLevel.READ,\n PermissionLevel.WRITE,\n ]\n default:\n return []\n }\n}\n\nexport enum BuiltinPermissionID {\n PUBLIC = \"public\",\n READ_ONLY = \"read_only\",\n WRITE = \"write\",\n ADMIN = \"admin\",\n POWER = \"power\",\n}\n\nexport const BUILTIN_PERMISSIONS = {\n PUBLIC: {\n _id: BuiltinPermissionID.PUBLIC,\n name: \"Public\",\n permissions: [\n new Permission(PermissionType.WEBHOOK, PermissionLevel.EXECUTE),\n ],\n },\n READ_ONLY: {\n _id: BuiltinPermissionID.READ_ONLY,\n name: \"Read only\",\n permissions: [\n new Permission(PermissionType.QUERY, PermissionLevel.READ),\n new Permission(PermissionType.TABLE, PermissionLevel.READ),\n new Permission(PermissionType.APP, PermissionLevel.READ),\n ],\n },\n WRITE: {\n _id: BuiltinPermissionID.WRITE,\n name: \"Read/Write\",\n permissions: [\n new Permission(PermissionType.QUERY, PermissionLevel.WRITE),\n new Permission(PermissionType.TABLE, PermissionLevel.WRITE),\n new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),\n new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),\n new Permission(PermissionType.APP, PermissionLevel.READ),\n ],\n },\n POWER: {\n _id: BuiltinPermissionID.POWER,\n name: \"Power\",\n permissions: [\n new Permission(PermissionType.TABLE, PermissionLevel.WRITE),\n new Permission(PermissionType.USER, PermissionLevel.READ),\n new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),\n new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),\n new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),\n new Permission(PermissionType.APP, PermissionLevel.READ),\n ],\n },\n ADMIN: {\n _id: BuiltinPermissionID.ADMIN,\n name: \"Admin\",\n permissions: [\n new Permission(PermissionType.TABLE, PermissionLevel.ADMIN),\n new Permission(PermissionType.USER, PermissionLevel.ADMIN),\n new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN),\n new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),\n new Permission(PermissionType.QUERY, PermissionLevel.ADMIN),\n new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),\n new Permission(PermissionType.APP, PermissionLevel.READ),\n ],\n },\n}\n\nexport function getBuiltinPermissions() {\n return cloneDeep(BUILTIN_PERMISSIONS)\n}\n\nexport function getBuiltinPermissionByID(id: string) {\n const perms = Object.values(BUILTIN_PERMISSIONS)\n return perms.find(perm => perm._id === id)\n}\n\nexport function doesHaveBasePermission(\n permType: PermissionType,\n permLevel: PermissionLevel,\n rolesHierarchy: RoleHierarchy\n) {\n const basePermissions = [\n ...new Set(rolesHierarchy.map(role => role.permissionId)),\n ]\n const builtins = Object.values(BUILTIN_PERMISSIONS)\n let permissions = flatten(\n builtins\n .filter(builtin => basePermissions.indexOf(builtin._id) !== -1)\n .map(builtin => builtin.permissions)\n )\n for (let permission of permissions) {\n if (\n permission.type === permType &&\n getAllowedLevels(permission.level).indexOf(permLevel) !== -1\n ) {\n return true\n }\n }\n return false\n}\n\nexport function isPermissionLevelHigherThanRead(level: PermissionLevel) {\n return levelToNumber(level) > 1\n}\n\n// utility as a lot of things need simply the builder permission\nexport const BUILDER = PermissionType.BUILDER\nexport const CREATOR = PermissionType.CREATOR\nexport const GLOBAL_BUILDER = PermissionType.GLOBAL_BUILDER\n", "import env from \"../environment\"\nimport * as context from \"../context\"\n\nexport * from \"./installation\"\n\n/**\n * Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.\n * The env var is formatted as:\n * tenant1:feature1:feature2,tenant2:feature1\n */\nexport function buildFeatureFlags() {\n if (!env.TENANT_FEATURE_FLAGS) {\n return\n }\n\n const tenantFeatureFlags: Record<string, string[]> = {}\n\n env.TENANT_FEATURE_FLAGS.split(\",\").forEach(tenantToFeatures => {\n const [tenantId, ...features] = tenantToFeatures.split(\":\")\n\n features.forEach(feature => {\n if (!tenantFeatureFlags[tenantId]) {\n tenantFeatureFlags[tenantId] = []\n }\n tenantFeatureFlags[tenantId].push(feature)\n })\n })\n\n return tenantFeatureFlags\n}\n\nexport function isEnabled(featureFlag: string) {\n const tenantId = context.getTenantId()\n const flags = getTenantFeatureFlags(tenantId)\n return flags.includes(featureFlag)\n}\n\nexport function getTenantFeatureFlags(tenantId: string) {\n let flags: string[] = []\n const envFlags = buildFeatureFlags()\n if (envFlags) {\n const globalFlags = envFlags[\"*\"]\n const tenantFlags = envFlags[tenantId] || []\n\n // Explicitly exclude tenants from global features if required.\n // Prefix the tenant flag with '!'\n const tenantOverrides = tenantFlags.reduce(\n (acc: string[], flag: string) => {\n if (flag.startsWith(\"!\")) {\n let stripped = flag.substring(1)\n acc.push(stripped)\n }\n return acc\n },\n []\n )\n\n if (globalFlags) {\n flags.push(...globalFlags)\n }\n if (tenantFlags.length) {\n flags.push(...tenantFlags)\n }\n\n // Purge any tenant specific overrides\n flags = flags.filter(flag => {\n return tenantOverrides.indexOf(flag) == -1 && !flag.startsWith(\"!\")\n })\n }\n\n return flags\n}\n\nexport enum TenantFeatureFlag {\n LICENSING = \"LICENSING\",\n GOOGLE_SHEETS = \"GOOGLE_SHEETS\",\n USER_GROUPS = \"USER_GROUPS\",\n ONBOARDING_TOUR = \"ONBOARDING_TOUR\",\n}\n", "export function processFeatureEnvVar<T>(\n fullList: string[],\n featureList?: string\n) {\n let list\n if (!featureList) {\n list = fullList\n } else {\n list = featureList.split(\",\")\n }\n for (let feature of list) {\n if (!fullList.includes(feature)) {\n throw new Error(`Feature: ${feature} is not an allowed option`)\n }\n }\n return list as unknown as T[]\n}\n", "export * from \"./auth\"\n", "const _passport = require(\"koa-passport\")\nconst LocalStrategy = require(\"passport-local\").Strategy\n\nimport { getGlobalDB } from \"../context\"\nimport { Cookie } from \"../constants\"\nimport { getSessionsForUser, invalidateSessions } from \"../security/sessions\"\nimport {\n authenticated,\n csrf,\n google,\n local,\n oidc,\n tenancy,\n} from \"../middleware\"\nimport * as userCache from \"../cache/user\"\nimport { invalidateUser } from \"../cache/user\"\nimport {\n ConfigType,\n GoogleInnerConfig,\n OIDCInnerConfig,\n PlatformLogoutOpts,\n SessionCookie,\n SSOProviderType,\n} from \"@budibase/types\"\nimport * as events from \"../events\"\nimport * as configs from \"../configs\"\nimport { clearCookie, getCookie } from \"../utils\"\nimport { ssoSaveUserNoOp } from \"../middleware/passport/sso/sso\"\n\nconst refresh = require(\"passport-oauth2-refresh\")\n\nexport {\n auditLog,\n authError,\n internalApi,\n ssoCallbackUrl,\n adminOnly,\n builderOnly,\n builderOrAdmin,\n joiValidator,\n google,\n oidc,\n} from \"../middleware\"\nexport const buildAuthMiddleware = authenticated\nexport const buildTenancyMiddleware = tenancy\nexport const buildCsrfMiddleware = csrf\nexport const passport = _passport\n\n// Strategies\n_passport.use(new LocalStrategy(local.options, local.authenticate))\n\nasync function refreshOIDCAccessToken(\n chosenConfig: OIDCInnerConfig,\n refreshToken: string\n): Promise<RefreshResponse> {\n const callbackUrl = await oidc.getCallbackUrl()\n let enrichedConfig: any\n let strategy: any\n\n try {\n enrichedConfig = await oidc.fetchStrategyConfig(chosenConfig, callbackUrl)\n if (!enrichedConfig) {\n throw new Error(\"OIDC Config contents invalid\")\n }\n strategy = await oidc.strategyFactory(enrichedConfig, ssoSaveUserNoOp)\n } catch (err) {\n console.error(err)\n throw new Error(\"Could not refresh OAuth Token\")\n }\n\n refresh.use(strategy, {\n setRefreshOAuth2() {\n return strategy._getOAuth2Client(enrichedConfig)\n },\n })\n\n return new Promise(resolve => {\n refresh.requestNewAccessToken(\n ConfigType.OIDC,\n refreshToken,\n (err: any, accessToken: string, refreshToken: any, params: any) => {\n resolve({ err, accessToken, refreshToken, params })\n }\n )\n })\n}\n\nasync function refreshGoogleAccessToken(\n config: GoogleInnerConfig,\n refreshToken: any\n): Promise<RefreshResponse> {\n let callbackUrl = await google.getCallbackUrl(config)\n\n let strategy\n try {\n strategy = await google.strategyFactory(\n config,\n callbackUrl,\n ssoSaveUserNoOp\n )\n } catch (err: any) {\n console.error(err)\n throw new Error(\n `Error constructing OIDC refresh strategy: message=${err.message}`\n )\n }\n\n refresh.use(strategy)\n\n return new Promise(resolve => {\n refresh.requestNewAccessToken(\n ConfigType.GOOGLE,\n refreshToken,\n (err: any, accessToken: string, refreshToken: string, params: any) => {\n resolve({ err, accessToken, refreshToken, params })\n }\n )\n })\n}\n\ninterface RefreshResponse {\n err?: {\n data?: string\n }\n accessToken?: string\n refreshToken?: string\n params?: any\n}\n\nexport async function refreshOAuthToken(\n refreshToken: string,\n providerType: SSOProviderType,\n configId?: string\n): Promise<RefreshResponse> {\n switch (providerType) {\n case SSOProviderType.OIDC:\n if (!configId) {\n return { err: { data: \"OIDC config id not provided\" } }\n }\n const oidcConfig = await configs.getOIDCConfigById(configId)\n if (!oidcConfig) {\n return { err: { data: \"OIDC configuration not found\" } }\n }\n return refreshOIDCAccessToken(oidcConfig, refreshToken)\n case SSOProviderType.GOOGLE:\n let googleConfig = await configs.getGoogleConfig()\n if (!googleConfig) {\n return { err: { data: \"Google configuration not found\" } }\n }\n return refreshGoogleAccessToken(googleConfig, refreshToken)\n }\n}\n\n// TODO: Refactor to use user save function instead to prevent the need for\n// manually saving and invalidating on callback\nexport async function updateUserOAuth(userId: string, oAuthConfig: any) {\n const details = {\n accessToken: oAuthConfig.accessToken,\n refreshToken: oAuthConfig.refreshToken,\n }\n\n try {\n const db = getGlobalDB()\n const dbUser = await db.get<any>(userId)\n\n //Do not overwrite the refresh token if a valid one is not provided.\n if (typeof details.refreshToken !== \"string\") {\n delete details.refreshToken\n }\n\n dbUser.oauth2 = {\n ...dbUser.oauth2,\n ...details,\n }\n\n await db.put(dbUser)\n\n await invalidateUser(userId)\n } catch (e) {\n console.error(\"Could not update OAuth details for current user\", e)\n }\n}\n\n/**\n * Logs a user out from budibase. Re-used across account portal and builder.\n */\nexport async function platformLogout(opts: PlatformLogoutOpts) {\n const ctx = opts.ctx\n const userId = opts.userId\n const keepActiveSession = opts.keepActiveSession\n\n if (!ctx) throw new Error(\"Koa context must be supplied to logout.\")\n\n const currentSession = getCookie<SessionCookie>(ctx, Cookie.Auth)\n let sessions = await getSessionsForUser(userId)\n\n if (currentSession && keepActiveSession) {\n sessions = sessions.filter(\n session => session.sessionId !== currentSession.sessionId\n )\n } else {\n // clear cookies\n clearCookie(ctx, Cookie.Auth)\n }\n\n const sessionIds = sessions.map(({ sessionId }) => sessionId)\n await invalidateSessions(userId, { sessionIds, reason: \"logout\" })\n await events.auth.logout(ctx.user?.email)\n await userCache.invalidateUser(userId)\n}\n", "export * as local from \"./passport/local\"\nexport * as google from \"./passport/sso/google\"\nexport * as oidc from \"./passport/sso/oidc\"\nimport * as datasourceGoogle from \"./passport/datasource/google\"\n\nexport const datasource = {\n google: datasourceGoogle,\n}\nexport { authError, ssoCallbackUrl } from \"./passport/utils\"\nexport { default as authenticated } from \"./authenticated\"\nexport { default as auditLog } from \"./auditLog\"\nexport { default as tenancy } from \"./tenancy\"\nexport { default as internalApi } from \"./internalApi\"\nexport { default as csrf } from \"./csrf\"\nexport { default as adminOnly } from \"./adminOnly\"\nexport { default as builderOrAdmin } from \"./builderOrAdmin\"\nexport { default as builderOnly } from \"./builderOnly\"\nexport { default as pino } from \"../logging/pino/middleware\"\nexport { default as correlation } from \"../logging/correlation/middleware\"\nexport { default as errorHandling } from \"./errorHandling\"\nexport { default as querystringToBody } from \"./querystringToBody\"\nexport * as joiValidator from \"./joi-validator\"\n", "import { UserStatus } from \"../../constants\"\nimport { compare } from \"../../utils\"\nimport * as users from \"../../users\"\nimport { authError } from \"./utils\"\nimport { BBContext } from \"@budibase/types\"\n\nconst INVALID_ERR = \"Invalid credentials\"\nconst EXPIRED = \"This account has expired. Please reset your password\"\n\nexport const options = {\n passReqToCallback: true,\n}\n\n/**\n * Passport Local Authentication Middleware.\n * @param ctx the request structure\n * @param email username to login with\n * @param password plain text password to log in with\n * @param done callback from passport to return user information and errors\n * @returns The authenticated user, or errors if they occur\n */\nexport async function authenticate(\n ctx: BBContext,\n email: string,\n password: string,\n done: Function\n) {\n if (!email) return authError(done, \"Email Required\")\n if (!password) return authError(done, \"Password Required\")\n\n const dbUser = await users.getGlobalUserByEmail(email)\n if (dbUser == null) {\n console.info(`user=${email} could not be found`)\n return authError(done, INVALID_ERR)\n }\n\n if (dbUser.status === UserStatus.INACTIVE) {\n console.info(`user=${email} is inactive`, dbUser)\n return authError(done, INVALID_ERR)\n }\n\n if (!dbUser.password) {\n console.info(`user=${email} has no password set`, dbUser)\n return authError(done, EXPIRED)\n }\n\n if (!(await compare(password, dbUser.password))) {\n return authError(done, INVALID_ERR)\n }\n\n // intentionally remove the users password in payload\n delete dbUser.password\n return done(null, dbUser)\n}\n", "import { getTenantId, isMultiTenant } from \"../../context\"\nimport * as configs from \"../../configs\"\nimport { ConfigType, GoogleInnerConfig } from \"@budibase/types\"\n\n/**\n * Utility to handle authentication errors.\n *\n * @param done The passport callback.\n * @param message Message that will be returned in the response body\n * @param err (Optional) error that will be logged\n */\n\nexport function authError(done: Function, message: string, err?: any) {\n return done(\n err,\n null, // never return a user\n { message: message }\n )\n}\n\nexport async function ssoCallbackUrl(\n type: ConfigType,\n config?: GoogleInnerConfig\n) {\n // incase there is a callback URL from before\n if (config && (config as GoogleInnerConfig).callbackURL) {\n return (config as GoogleInnerConfig).callbackURL as string\n }\n const settingsConfig = await configs.getSettingsConfig()\n\n let callbackUrl = `/api/global/auth`\n if (isMultiTenant()) {\n callbackUrl += `/${getTenantId()}`\n }\n callbackUrl += `/${type}/callback`\n\n return `${settingsConfig.platformUrl}${callbackUrl}`\n}\n", "import { ssoCallbackUrl } from \"../utils\"\nimport * as sso from \"./sso\"\nimport {\n ConfigType,\n SSOProfile,\n SSOAuthDetails,\n SSOProviderType,\n SaveSSOUserFunction,\n GoogleInnerConfig,\n} from \"@budibase/types\"\n\nconst GoogleStrategy = require(\"passport-google-oauth\").OAuth2Strategy\n\nexport function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {\n return (\n accessToken: string,\n refreshToken: string,\n profile: SSOProfile,\n done: Function\n ) => {\n const details: SSOAuthDetails = {\n provider: \"google\",\n providerType: SSOProviderType.GOOGLE,\n userId: profile.id,\n profile: profile,\n email: profile._json.email,\n oauth2: {\n accessToken,\n refreshToken,\n },\n }\n\n return sso.authenticate(\n details,\n true, // require local accounts to exist\n done,\n saveUserFn\n )\n }\n}\n\n/**\n * Create an instance of the google passport strategy. This wrapper fetches the configuration\n * from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.\n * @returns Dynamically configured Passport Google Strategy\n */\nexport async function strategyFactory(\n config: GoogleInnerConfig,\n callbackUrl: string,\n saveUserFn: SaveSSOUserFunction\n) {\n try {\n const { clientID, clientSecret } = config\n\n if (!clientID || !clientSecret) {\n throw new Error(\n \"Configuration invalid. Must contain google clientID and clientSecret\"\n )\n }\n\n const verify = buildVerifyFn(saveUserFn)\n return new GoogleStrategy(\n {\n clientID: config.clientID,\n clientSecret: config.clientSecret,\n callbackURL: callbackUrl,\n },\n verify\n )\n } catch (err: any) {\n console.error(err)\n throw new Error(`Error constructing google authentication strategy: ${err}`)\n }\n}\n\nexport async function getCallbackUrl(config: GoogleInnerConfig) {\n return ssoCallbackUrl(ConfigType.GOOGLE, config)\n}\n", "import { generateGlobalUserID } from \"../../../db\"\nimport { authError } from \"../utils\"\nimport * as users from \"../../../users\"\nimport * as context from \"../../../context\"\nimport fetch from \"node-fetch\"\nimport {\n SaveSSOUserFunction,\n SaveUserOpts,\n SSOAuthDetails,\n SSOUser,\n User,\n} from \"@budibase/types\"\n\n// no-op function for user save\n// - this allows datasource auth and access token refresh to work correctly\n// - prefer no-op over an optional argument to ensure function is provided to login flows\nexport const ssoSaveUserNoOp: SaveSSOUserFunction = (\n user: SSOUser,\n opts: SaveUserOpts\n) => Promise.resolve(user)\n\n/**\n * Common authentication logic for third parties. e.g. OAuth, OIDC.\n */\nexport async function authenticate(\n details: SSOAuthDetails,\n requireLocalAccount: boolean = true,\n done: any,\n saveUserFn: SaveSSOUserFunction\n) {\n if (!saveUserFn) {\n throw new Error(\"Save user function must be provided\")\n }\n if (!details.userId) {\n return authError(done, \"sso user id required\")\n }\n if (!details.email) {\n return authError(done, \"sso user email required\")\n }\n\n // use the third party id\n const userId = generateGlobalUserID(details.userId)\n\n let dbUser: User | undefined\n\n // try to load by id\n try {\n dbUser = await users.getById(userId)\n } catch (err: any) {\n // abort when not 404 error\n if (!err.status || err.status !== 404) {\n return authError(\n done,\n \"Unexpected error when retrieving existing user\",\n err\n )\n }\n }\n\n // fallback to loading by email\n if (!dbUser) {\n dbUser = await users.getGlobalUserByEmail(details.email)\n }\n\n // exit early if there is still no user and auto creation is disabled\n if (!dbUser && requireLocalAccount) {\n return authError(\n done,\n \"Email does not yet exist. You must set up your local budibase account first.\"\n )\n }\n\n // first time creation\n if (!dbUser) {\n // setup a blank user using the third party id\n dbUser = {\n _id: userId,\n email: details.email,\n roles: {},\n tenantId: context.getTenantId(),\n }\n }\n\n let ssoUser = await syncUser(dbUser, details)\n // never prompt for password reset\n ssoUser.forceResetPassword = false\n\n try {\n // don't try to re-save any existing password\n delete ssoUser.password\n // create or sync the user\n ssoUser = (await saveUserFn(ssoUser, {\n hashPassword: false,\n requirePassword: false,\n })) as SSOUser\n } catch (err: any) {\n return authError(done, \"Error saving user\", err)\n }\n\n return done(null, ssoUser)\n}\n\nasync function getProfilePictureUrl(user: User, details: SSOAuthDetails) {\n const pictureUrl = details.profile?._json.picture\n if (pictureUrl) {\n const response = await fetch(pictureUrl)\n if (response.status === 200) {\n const type = response.headers.get(\"content-type\") as string\n if (type.startsWith(\"image/\")) {\n return pictureUrl\n }\n }\n }\n}\n\n/**\n * @returns a user that has been sync'd with third party information\n */\nasync function syncUser(user: User, details: SSOAuthDetails): Promise<SSOUser> {\n let firstName\n let lastName\n let pictureUrl\n let oauth2\n let thirdPartyProfile\n\n if (details.profile) {\n const profile = details.profile\n\n if (profile.name) {\n const name = profile.name\n // first name\n if (name.givenName) {\n firstName = name.givenName\n }\n // last name\n if (name.familyName) {\n lastName = name.familyName\n }\n }\n\n pictureUrl = await getProfilePictureUrl(user, details)\n\n thirdPartyProfile = {\n ...profile._json,\n }\n }\n\n // oauth tokens for future use\n if (details.oauth2) {\n oauth2 = {\n ...details.oauth2,\n }\n }\n\n return {\n ...user,\n provider: details.provider,\n providerType: details.providerType,\n firstName,\n lastName,\n thirdPartyProfile,\n pictureUrl,\n oauth2,\n }\n}\n", "import fetch from \"node-fetch\"\nimport * as sso from \"./sso\"\nimport { ssoCallbackUrl } from \"../utils\"\nimport { validEmail } from \"../../../utils\"\nimport {\n ConfigType,\n OIDCInnerConfig,\n SSOProfile,\n OIDCStrategyConfiguration,\n SSOAuthDetails,\n SSOProviderType,\n JwtClaims,\n SaveSSOUserFunction,\n} from \"@budibase/types\"\n\nconst OIDCStrategy = require(\"@techpass/passport-openidconnect\").Strategy\n\nexport function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {\n /**\n * @param issuer The identity provider base URL\n * @param sub The user ID\n * @param profile The user profile information. Created by passport from the /userinfo response\n * @param jwtClaims The parsed id_token claims\n * @param accessToken The access_token for contacting the identity provider - may or may not be a JWT\n * @param refreshToken The refresh_token for obtaining a new access_token - usually not a JWT\n * @param idToken The id_token - always a JWT\n * @param params The response body from requesting an access_token\n * @param done The passport callback: err, user, info\n */\n return async (\n issuer: string,\n sub: string,\n profile: SSOProfile,\n jwtClaims: JwtClaims,\n accessToken: string,\n refreshToken: string,\n idToken: string,\n params: any,\n done: Function\n ) => {\n const details: SSOAuthDetails = {\n // store the issuer info to enable sync in future\n provider: issuer,\n providerType: SSOProviderType.OIDC,\n userId: profile.id,\n profile: profile,\n email: getEmail(profile, jwtClaims),\n oauth2: {\n accessToken: accessToken,\n refreshToken: refreshToken,\n },\n }\n\n return sso.authenticate(\n details,\n false, // don't require local accounts to exist\n done,\n saveUserFn\n )\n }\n}\n\n/**\n * @param profile The structured profile created by passport using the user info endpoint\n * @param jwtClaims The claims returned in the id token\n */\nfunction getEmail(profile: SSOProfile, jwtClaims: JwtClaims) {\n // profile not guaranteed to contain email e.g. github connected azure ad account\n if (profile._json.email) {\n return profile._json.email\n }\n\n // fallback to id token email\n if (jwtClaims.email) {\n return jwtClaims.email\n }\n\n // fallback to id token preferred username\n const username = jwtClaims.preferred_username\n if (username && validEmail(username)) {\n return username\n }\n\n throw new Error(\n `Could not determine user email from profile ${JSON.stringify(\n profile\n )} and claims ${JSON.stringify(jwtClaims)}`\n )\n}\n\n/**\n * Create an instance of the oidc passport strategy. This wrapper fetches the configuration\n * from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.\n * @returns Dynamically configured Passport OIDC Strategy\n */\nexport async function strategyFactory(\n config: OIDCStrategyConfiguration,\n saveUserFn: SaveSSOUserFunction\n) {\n try {\n const verify = buildVerifyFn(saveUserFn)\n const strategy = new OIDCStrategy(config, verify)\n strategy.name = \"oidc\"\n return strategy\n } catch (err: any) {\n console.error(err)\n throw new Error(`Error constructing OIDC authentication strategy - ${err}`)\n }\n}\n\nexport async function fetchStrategyConfig(\n oidcConfig: OIDCInnerConfig,\n callbackUrl?: string\n): Promise<OIDCStrategyConfiguration> {\n try {\n const { clientID, clientSecret, configUrl } = oidcConfig\n\n if (!clientID || !clientSecret || !callbackUrl || !configUrl) {\n // check for remote config and all required elements\n throw new Error(\n \"Configuration invalid. Must contain clientID, clientSecret, callbackUrl and configUrl\"\n )\n }\n\n const response = await fetch(configUrl)\n\n if (!response.ok) {\n throw new Error(\n `Unexpected response when fetching openid-configuration: ${response.statusText}`\n )\n }\n\n const body = await response.json()\n\n return {\n issuer: body.issuer,\n authorizationURL: body.authorization_endpoint,\n tokenURL: body.token_endpoint,\n userInfoURL: body.userinfo_endpoint,\n clientID: clientID,\n clientSecret: clientSecret,\n callbackURL: callbackUrl,\n }\n } catch (err) {\n console.error(err)\n throw new Error(\n `Error constructing OIDC authentication configuration - ${err}`\n )\n }\n}\n\nexport async function getCallbackUrl() {\n return ssoCallbackUrl(ConfigType.OIDC)\n}\n", "import * as google from \"../sso/google\"\nimport { Cookie } from \"../../../constants\"\nimport * as configs from \"../../../configs\"\nimport * as cache from \"../../../cache\"\nimport * as utils from \"../../../utils\"\nimport { UserCtx, SSOProfile, DatasourceAuthCookie } from \"@budibase/types\"\nimport { ssoSaveUserNoOp } from \"../sso/sso\"\n\nconst GoogleStrategy = require(\"passport-google-oauth\").OAuth2Strategy\n\ntype Passport = {\n authenticate: any\n}\n\nasync function fetchGoogleCreds() {\n let config = await configs.getGoogleDatasourceConfig()\n\n if (!config) {\n throw new Error(\"No google configuration found\")\n }\n return config\n}\n\nexport async function preAuth(\n passport: Passport,\n ctx: UserCtx,\n next: Function\n) {\n // get the relevant config\n const googleConfig = await fetchGoogleCreds()\n const platformUrl = await configs.getPlatformUrl({ tenantAware: false })\n\n let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback`\n const strategy = await google.strategyFactory(\n googleConfig,\n callbackUrl,\n ssoSaveUserNoOp\n )\n\n if (!ctx.query.appId) {\n ctx.throw(400, \"appId query param not present.\")\n }\n\n return passport.authenticate(strategy, {\n scope: [\"profile\", \"email\", \"https://www.googleapis.com/auth/spreadsheets\"],\n accessType: \"offline\",\n prompt: \"consent\",\n })(ctx, next)\n}\n\nexport async function postAuth(\n passport: Passport,\n ctx: UserCtx,\n next: Function\n) {\n // get the relevant config\n const config = await fetchGoogleCreds()\n const platformUrl = await configs.getPlatformUrl({ tenantAware: false })\n\n let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback`\n const authStateCookie = utils.getCookie<{ appId: string }>(\n ctx,\n Cookie.DatasourceAuth\n )\n\n if (!authStateCookie) {\n throw new Error(\"Unable to fetch datasource auth cookie\")\n }\n\n return passport.authenticate(\n new GoogleStrategy(\n {\n clientID: config.clientID,\n clientSecret: config.clientSecret,\n callbackURL: callbackUrl,\n },\n (\n accessToken: string,\n refreshToken: string,\n _profile: SSOProfile,\n done: Function\n ) => {\n utils.clearCookie(ctx, Cookie.DatasourceAuth)\n done(null, { accessToken, refreshToken })\n }\n ),\n { successRedirect: \"/\", failureRedirect: \"/error\" },\n async (err: any, tokens: string[]) => {\n const baseUrl = `/builder/app/${authStateCookie.appId}/data`\n\n const id = utils.newid()\n await cache.store(\n `datasource:creation:${authStateCookie.appId}:google:${id}`,\n {\n tokens,\n }\n )\n\n ctx.redirect(`${baseUrl}/new?continue_google_setup=${id}`)\n }\n )(ctx, next)\n}\n", "import { Cookie, Header } from \"../constants\"\nimport {\n getCookie,\n clearCookie,\n openJwt,\n isValidInternalAPIKey,\n} from \"../utils\"\nimport { getUser } from \"../cache/user\"\nimport { getSession, updateSessionTTL } from \"../security/sessions\"\nimport { buildMatcherRegex, matches } from \"./matchers\"\nimport { SEPARATOR, queryGlobalView, ViewName } from \"../db\"\nimport { getGlobalDB, doInTenant } from \"../context\"\nimport { decrypt } from \"../security/encryption\"\nimport * as identity from \"../context/identity\"\nimport env from \"../environment\"\nimport { Ctx, EndpointMatcher, SessionCookie } from \"@budibase/types\"\nimport { InvalidAPIKeyError, ErrorCode } from \"../errors\"\nimport tracer from \"dd-trace\"\n\nconst ONE_MINUTE = env.SESSION_UPDATE_PERIOD\n ? parseInt(env.SESSION_UPDATE_PERIOD)\n : 60 * 1000\n\ninterface FinaliseOpts {\n authenticated?: boolean\n internal?: boolean\n publicEndpoint?: boolean\n version?: string\n user?: any\n}\n\nfunction timeMinusOneMinute() {\n return new Date(Date.now() - ONE_MINUTE).toISOString()\n}\n\nfunction finalise(ctx: any, opts: FinaliseOpts = {}) {\n ctx.publicEndpoint = opts.publicEndpoint || false\n ctx.isAuthenticated = opts.authenticated || false\n ctx.user = opts.user\n ctx.internal = opts.internal || false\n ctx.version = opts.version\n}\n\nasync function checkApiKey(apiKey: string, populateUser?: Function) {\n // check both the primary and the fallback internal api keys\n // this allows for rotation\n if (isValidInternalAPIKey(apiKey)) {\n return { valid: true, user: undefined }\n }\n const decrypted = decrypt(apiKey)\n const tenantId = decrypted.split(SEPARATOR)[0]\n return doInTenant(tenantId, async () => {\n let userId\n try {\n const db = getGlobalDB()\n // api key is encrypted in the database\n userId = (await queryGlobalView(\n ViewName.BY_API_KEY,\n {\n key: apiKey,\n },\n db\n )) as string\n } catch (err) {\n userId = undefined\n }\n if (userId) {\n return {\n valid: true,\n user: await getUser(userId, tenantId, populateUser),\n }\n } else {\n throw new InvalidAPIKeyError()\n }\n })\n}\n\n/**\n * This middleware is tenancy aware, so that it does not depend on other middlewares being used.\n * The tenancy modules should not be used here and it should be assumed that the tenancy context\n * has not yet been populated.\n */\nexport default function (\n noAuthPatterns: EndpointMatcher[] = [],\n opts: { publicAllowed?: boolean; populateUser?: Function } = {\n publicAllowed: false,\n }\n) {\n const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []\n return async (ctx: Ctx | any, next: any) => {\n let publicEndpoint = false\n const version = ctx.request.headers[Header.API_VER]\n // the path is not authenticated\n const found = matches(ctx, noAuthOptions)\n if (found) {\n publicEndpoint = true\n }\n try {\n // check the actual user is authenticated first, try header or cookie\n let headerToken = ctx.request.headers[Header.TOKEN]\n\n const authCookie =\n getCookie<SessionCookie>(ctx, Cookie.Auth) ||\n openJwt<SessionCookie>(headerToken)\n let apiKey = ctx.request.headers[Header.API_KEY]\n\n if (!apiKey && ctx.request.headers[Header.AUTHORIZATION]) {\n apiKey = ctx.request.headers[Header.AUTHORIZATION].split(\" \")[1]\n }\n\n const tenantId = ctx.request.headers[Header.TENANT_ID]\n let authenticated = false,\n user = null,\n internal = false\n if (authCookie && !apiKey) {\n const sessionId = authCookie.sessionId\n const userId = authCookie.userId\n let session\n try {\n // getting session handles error checking (if session exists etc)\n session = await getSession(userId, sessionId)\n if (opts && opts.populateUser) {\n user = await getUser(\n userId,\n session.tenantId,\n opts.populateUser(ctx)\n )\n } else {\n user = await getUser(userId, session.tenantId)\n }\n user.csrfToken = session.csrfToken\n\n if (session?.lastAccessedAt < timeMinusOneMinute()) {\n // make sure we denote that the session is still in use\n await updateSessionTTL(session)\n }\n authenticated = true\n } catch (err: any) {\n authenticated = false\n console.error(`Auth Error: ${err.message}`)\n console.error(err)\n // remove the cookie as the user does not exist anymore\n clearCookie(ctx, Cookie.Auth)\n }\n }\n // this is an internal request, no user made it\n if (!authenticated && apiKey) {\n const populateUser = opts.populateUser ? opts.populateUser(ctx) : null\n const { valid, user: foundUser } = await checkApiKey(\n apiKey,\n populateUser\n )\n if (valid && foundUser) {\n authenticated = true\n user = foundUser\n } else if (valid) {\n authenticated = true\n internal = true\n }\n }\n if (!user && tenantId) {\n user = { tenantId }\n } else if (user) {\n delete user.password\n }\n // be explicit\n if (!authenticated) {\n authenticated = false\n }\n\n if (user) {\n tracer.setUser({\n id: user?._id,\n tenantId: user?.tenantId,\n budibaseAccess: user?.budibaseAccess,\n status: user?.status,\n })\n }\n\n // isAuthenticated is a function, so use a variable to be able to check authed state\n finalise(ctx, { authenticated, user, internal, version, publicEndpoint })\n\n if (user && user.email) {\n return identity.doInUserContext(user, ctx, next)\n } else {\n return next()\n }\n } catch (err: any) {\n console.error(`Auth Error: ${err.message}`)\n console.error(err)\n // invalid token, clear the cookie\n if (err?.name === \"JsonWebTokenError\") {\n clearCookie(ctx, Cookie.Auth)\n } else if (err?.code === ErrorCode.INVALID_API_KEY) {\n ctx.throw(403, err.message)\n }\n // allow configuring for public access\n if ((opts && opts.publicAllowed) || publicEndpoint) {\n finalise(ctx, { authenticated: false, version, publicEndpoint })\n return next()\n } else {\n ctx.throw(err.status || 403, err)\n }\n }\n }\n}\n", "import { BBContext, EndpointMatcher, RegexMatcher } from \"@budibase/types\"\n\nconst PARAM_REGEX = /\\/:(.*?)(\\/.*)?$/g\n\nexport const buildMatcherRegex = (\n patterns: EndpointMatcher[]\n): RegexMatcher[] => {\n if (!patterns) {\n return []\n }\n return patterns.map(pattern => {\n let route = pattern.route\n const method = pattern.method\n const strict = pattern.strict ? pattern.strict : false\n\n // if there is a param in the route\n // use a wildcard pattern\n const matches = route.match(PARAM_REGEX)\n if (matches) {\n for (let match of matches) {\n const suffix = match.endsWith(\"/\") ? \"/\" : \"\"\n const pattern = \"/.*\" + suffix\n route = route.replace(match, pattern)\n }\n }\n\n return { regex: new RegExp(route), method, strict, route }\n })\n}\n\nexport const matches = (ctx: BBContext, options: RegexMatcher[]) => {\n return options.find(({ regex, method, strict, route }) => {\n let urlMatch\n if (strict) {\n urlMatch = ctx.request.url === route\n } else {\n urlMatch = regex.test(ctx.request.url)\n }\n\n const methodMatch =\n method === \"ALL\"\n ? true\n : ctx.request.method.toLowerCase() === method.toLowerCase()\n\n return urlMatch && methodMatch\n })\n}\n", "import crypto from \"crypto\"\nimport fs from \"fs\"\nimport zlib from \"zlib\"\nimport env from \"../environment\"\nimport { join } from \"path\"\n\nconst ALGO = \"aes-256-ctr\"\nconst SEPARATOR = \"-\"\nconst ITERATIONS = 10000\nconst STRETCH_LENGTH = 32\n\nconst SALT_LENGTH = 16\nconst IV_LENGTH = 16\n\nexport enum SecretOption {\n API = \"api\",\n ENCRYPTION = \"encryption\",\n}\n\nexport function getSecret(secretOption: SecretOption): string {\n let secret, secretName\n switch (secretOption) {\n case SecretOption.ENCRYPTION:\n secret = env.ENCRYPTION_KEY\n secretName = \"ENCRYPTION_KEY\"\n break\n case SecretOption.API:\n default:\n secret = env.API_ENCRYPTION_KEY\n secretName = \"API_ENCRYPTION_KEY\"\n break\n }\n if (!secret) {\n throw new Error(`Secret \"${secretName}\" has not been set in environment.`)\n }\n return secret\n}\n\nfunction stretchString(secret: string, salt: Buffer) {\n return crypto.pbkdf2Sync(secret, salt, ITERATIONS, STRETCH_LENGTH, \"sha512\")\n}\n\nexport function encrypt(\n input: string,\n secretOption: SecretOption = SecretOption.API\n) {\n const salt = crypto.randomBytes(SALT_LENGTH)\n const stretched = stretchString(getSecret(secretOption), salt)\n const cipher = crypto.createCipheriv(ALGO, stretched, salt)\n const base = cipher.update(input)\n const final = cipher.final()\n const encrypted = Buffer.concat([base, final]).toString(\"hex\")\n return `${salt.toString(\"hex\")}${SEPARATOR}${encrypted}`\n}\n\nexport function decrypt(\n input: string,\n secretOption: SecretOption = SecretOption.API\n) {\n const [salt, encrypted] = input.split(SEPARATOR)\n const saltBuffer = Buffer.from(salt, \"hex\")\n const stretched = stretchString(getSecret(secretOption), saltBuffer)\n const decipher = crypto.createDecipheriv(ALGO, stretched, saltBuffer)\n const base = decipher.update(Buffer.from(encrypted, \"hex\"))\n const final = decipher.final()\n return Buffer.concat([base, final]).toString()\n}\n\nexport async function encryptFile(\n { dir, filename }: { dir: string; filename: string },\n secret: string\n) {\n const outputFileName = `${filename}.enc`\n\n const filePath = join(dir, filename)\n if (fs.lstatSync(filePath).isDirectory()) {\n throw new Error(\"Unable to encrypt directory\")\n }\n const inputFile = fs.createReadStream(filePath)\n const outputFile = fs.createWriteStream(join(dir, outputFileName))\n\n const salt = crypto.randomBytes(SALT_LENGTH)\n const iv = crypto.randomBytes(IV_LENGTH)\n const stretched = stretchString(secret, salt)\n const cipher = crypto.createCipheriv(ALGO, stretched, iv)\n\n outputFile.write(salt)\n outputFile.write(iv)\n\n inputFile.pipe(zlib.createGzip()).pipe(cipher).pipe(outputFile)\n\n return new Promise<{ filename: string; dir: string }>(r => {\n outputFile.on(\"finish\", () => {\n r({\n filename: outputFileName,\n dir,\n })\n })\n })\n}\n\nasync function getSaltAndIV(path: string) {\n const fileStream = fs.createReadStream(path)\n\n const salt = await readBytes(fileStream, SALT_LENGTH)\n const iv = await readBytes(fileStream, IV_LENGTH)\n fileStream.close()\n return { salt, iv }\n}\n\nexport async function decryptFile(\n inputPath: string,\n outputPath: string,\n secret: string\n) {\n if (fs.lstatSync(inputPath).isDirectory()) {\n throw new Error(\"Unable to encrypt directory\")\n }\n const { salt, iv } = await getSaltAndIV(inputPath)\n const inputFile = fs.createReadStream(inputPath, {\n start: SALT_LENGTH + IV_LENGTH,\n })\n\n const outputFile = fs.createWriteStream(outputPath)\n\n const stretched = stretchString(secret, salt)\n const decipher = crypto.createDecipheriv(ALGO, stretched, iv)\n\n const unzip = zlib.createGunzip()\n\n inputFile.pipe(decipher).pipe(unzip).pipe(outputFile)\n\n return new Promise<void>((res, rej) => {\n outputFile.on(\"finish\", () => {\n outputFile.close()\n res()\n })\n\n inputFile.on(\"error\", e => {\n outputFile.close()\n rej(e)\n })\n\n decipher.on(\"error\", e => {\n outputFile.close()\n rej(e)\n })\n\n unzip.on(\"error\", e => {\n outputFile.close()\n rej(e)\n })\n\n outputFile.on(\"error\", e => {\n outputFile.close()\n rej(e)\n })\n })\n}\n\nfunction readBytes(stream: fs.ReadStream, length: number) {\n return new Promise<Buffer>((resolve, reject) => {\n let bytesRead = 0\n const data: Buffer[] = []\n\n stream.on(\"readable\", () => {\n let chunk\n\n while ((chunk = stream.read(length - bytesRead)) !== null) {\n data.push(chunk)\n bytesRead += chunk.length\n }\n\n resolve(Buffer.concat(data))\n })\n\n stream.on(\"end\", () => {\n reject(new Error(\"Insufficient data in the stream.\"))\n })\n\n stream.on(\"error\", error => {\n reject(error)\n })\n })\n}\n", "import { BBContext } from \"@budibase/types\"\n\nexport default async (ctx: BBContext | any, next: any) => {\n // Placeholder for audit log middleware\n return next()\n}\n", "import { doInTenant } from \"../context\"\nimport { getTenantIDFromCtx } from \"../tenancy\"\nimport { buildMatcherRegex, matches } from \"./matchers\"\nimport { Header } from \"../constants\"\nimport {\n BBContext,\n EndpointMatcher,\n GetTenantIdOptions,\n TenantResolutionStrategy,\n} from \"@budibase/types\"\n\nexport default function (\n allowQueryStringPatterns: EndpointMatcher[],\n noTenancyPatterns: EndpointMatcher[],\n opts: { noTenancyRequired?: boolean } = { noTenancyRequired: false }\n) {\n const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)\n const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)\n\n return async function (ctx: BBContext | any, next: any) {\n const allowNoTenant =\n opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)\n const tenantOpts: GetTenantIdOptions = {\n allowNoTenant,\n }\n\n const allowQs = !!matches(ctx, allowQsOptions)\n if (!allowQs) {\n tenantOpts.excludeStrategies = [TenantResolutionStrategy.QUERY]\n }\n\n const tenantId = getTenantIDFromCtx(ctx, tenantOpts)\n ctx.set(Header.TENANT_ID, tenantId as string)\n return doInTenant(tenantId, next)\n }\n}\n", "import { Header } from \"../constants\"\nimport { BBContext } from \"@budibase/types\"\nimport { isValidInternalAPIKey } from \"../utils\"\n\n/**\n * API Key only endpoint.\n */\nexport default async (ctx: BBContext, next: any) => {\n const apiKey = ctx.request.headers[Header.API_KEY]\n if (!apiKey) {\n ctx.throw(403, \"Unauthorized\")\n }\n\n if (Array.isArray(apiKey)) {\n ctx.throw(403, \"Unauthorized\")\n }\n\n if (!isValidInternalAPIKey(apiKey)) {\n ctx.throw(403, \"Unauthorized\")\n }\n\n return next()\n}\n", "import { Header } from \"../constants\"\nimport { buildMatcherRegex, matches } from \"./matchers\"\nimport { BBContext, EndpointMatcher } from \"@budibase/types\"\n\n/**\n * GET, HEAD and OPTIONS methods are considered safe operations\n *\n * POST, PUT, PATCH, and DELETE methods, being state changing verbs,\n * should have a CSRF token attached to the request\n */\nconst EXCLUDED_METHODS = [\"GET\", \"HEAD\", \"OPTIONS\"]\n\n/**\n * There are only three content type values that can be used in cross domain requests.\n * If any other value is used, e.g. application/json, the browser will first make a OPTIONS\n * request which will be protected by CORS.\n */\nconst INCLUDED_CONTENT_TYPES = [\n \"application/x-www-form-urlencoded\",\n \"multipart/form-data\",\n \"text/plain\",\n]\n\n/**\n * Validate the CSRF token generated aganst the user session.\n * Compare the token with the x-csrf-token header.\n *\n * If the token is not found within the request or the value provided\n * does not match the value within the user session, the request is rejected.\n *\n * CSRF protection provided using the 'Synchronizer Token Pattern'\n * https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#synchronizer-token-pattern\n *\n */\nexport default function (\n opts: { noCsrfPatterns: EndpointMatcher[] } = { noCsrfPatterns: [] }\n) {\n const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns)\n return async (ctx: BBContext | any, next: any) => {\n // don't apply for excluded paths\n const found = matches(ctx, noCsrfOptions)\n if (found) {\n return next()\n }\n\n // don't apply for the excluded http methods\n if (EXCLUDED_METHODS.indexOf(ctx.method) !== -1) {\n return next()\n }\n\n // don't apply when the content type isn't supported\n let contentType = ctx.get(\"content-type\")\n ? ctx.get(\"content-type\").toLowerCase()\n : \"\"\n if (\n !INCLUDED_CONTENT_TYPES.filter(type => contentType.includes(type)).length\n ) {\n return next()\n }\n\n // don't apply csrf when the internal api key has been used\n if (ctx.internal) {\n return next()\n }\n\n // apply csrf when there is a token in the session (new logins)\n // in future there should be a hard requirement that the token is present\n const userToken = ctx.user?.csrfToken\n if (!userToken) {\n return next()\n }\n\n // reject if no token in request or mismatch\n const requestToken = ctx.get(Header.CSRF_TOKEN)\n if (!requestToken || requestToken !== userToken) {\n ctx.throw(403, \"Invalid CSRF token\")\n }\n\n return next()\n }\n}\n", "import { UserCtx } from \"@budibase/types\"\nimport { isAdmin } from \"../users\"\n\nexport default async (ctx: UserCtx, next: any) => {\n if (!ctx.internal && !isAdmin(ctx.user)) {\n ctx.throw(403, \"Admin user only endpoint.\")\n }\n return next()\n}\n", "import { UserCtx } from \"@budibase/types\"\nimport { isBuilder, isAdmin, hasBuilderPermissions } from \"../users\"\nimport { getAppId } from \"../context\"\nimport env from \"../environment\"\n\nexport default async (ctx: UserCtx, next: any) => {\n const appId = getAppId()\n const builderFn =\n env.isWorker() || !appId\n ? hasBuilderPermissions\n : env.isApps()\n ? isBuilder\n : undefined\n if (!builderFn) {\n throw new Error(\"Service name unknown - middleware inactive.\")\n }\n if (!ctx.internal && !builderFn(ctx.user, appId) && !isAdmin(ctx.user)) {\n ctx.throw(403, \"Admin/Builder user only endpoint.\")\n }\n return next()\n}\n", "import { UserCtx } from \"@budibase/types\"\nimport { isBuilder, hasBuilderPermissions } from \"../users\"\nimport { getAppId } from \"../context\"\nimport env from \"../environment\"\n\nexport default async (ctx: UserCtx, next: any) => {\n const appId = getAppId()\n const builderFn =\n env.isWorker() || !appId\n ? hasBuilderPermissions\n : env.isApps()\n ? isBuilder\n : undefined\n if (!builderFn) {\n throw new Error(\"Service name unknown - middleware inactive.\")\n }\n if (!ctx.internal && !builderFn(ctx.user, appId)) {\n ctx.throw(403, \"Builder user only endpoint.\")\n }\n return next()\n}\n", "import env from \"../../environment\"\nimport { logger } from \"./logger\"\nimport { IncomingMessage } from \"http\"\n\nconst pino = require(\"koa-pino-logger\")\n\nimport { Options } from \"pino-http\"\nimport { Ctx } from \"@budibase/types\"\n\nconst correlator = require(\"correlation-id\")\n\nexport function pinoSettings(): Options {\n return {\n logger,\n genReqId: correlator.getId,\n autoLogging: {\n ignore: (req: IncomingMessage) => !!req.url?.includes(\"/health\"),\n },\n serializers: {\n req: req => {\n return {\n method: req.method,\n url: req.url,\n correlationId: req.id,\n }\n },\n res: res => {\n return {\n status: res.statusCode,\n }\n },\n },\n }\n}\n\nfunction getMiddleware() {\n if (env.HTTP_LOGGING) {\n return pino(pinoSettings())\n } else {\n return (ctx: Ctx, next: any) => {\n return next()\n }\n }\n}\n\nconst pinoMiddleware = getMiddleware()\n\nexport default pinoMiddleware\n", "import { Header } from \"../../constants\"\nimport { v4 as uuid } from \"uuid\"\n\nconst correlator = require(\"correlation-id\")\n\nconst correlation = (ctx: any, next: any) => {\n // use the provided correlation id header if present\n let correlationId = ctx.headers[Header.CORRELATION_ID]\n if (!correlationId) {\n correlationId = uuid()\n }\n\n return correlator.withId(correlationId, () => {\n return next()\n })\n}\n\nexport default correlation\n", "import { APIError } from \"@budibase/types\"\nimport * as errors from \"../errors\"\n\nexport async function errorHandling(ctx: any, next: any) {\n try {\n await next()\n } catch (err: any) {\n const status = err.status || err.statusCode || 500\n ctx.status = status\n\n if (status >= 400 && status < 500) {\n console.warn(err)\n } else {\n console.error(err)\n }\n\n const error = errors.getPublicError(err)\n const body: APIError = {\n message: err.message,\n status: status,\n validationErrors: err.validation,\n error,\n }\n\n ctx.body = body\n }\n}\n\nexport default errorHandling\n", "import { Ctx } from \"@budibase/types\"\n\n/**\n * Expects a standard \"query\" query string property which is the JSON body\n * of the request, which has to be sent via query string due to the requirement\n * of making an endpoint a GET request e.g. downloading a file stream.\n */\nexport default function (ctx: Ctx, next: any) {\n const queryString = ctx.request.query?.query as string | undefined\n if (ctx.request.method.toLowerCase() !== \"get\") {\n ctx.throw(\n 500,\n \"Query to download middleware can only be used for get requests.\"\n )\n }\n if (!queryString) {\n return next()\n }\n const decoded = decodeURIComponent(queryString)\n let json\n try {\n json = JSON.parse(decoded)\n } catch (err) {\n return next()\n }\n ctx.request.body = json\n return next()\n}\n", "import Joi, { ObjectSchema } from \"joi\"\nimport { BBContext } from \"@budibase/types\"\n\nfunction validate(\n schema: Joi.ObjectSchema | Joi.ArraySchema,\n property: string\n) {\n // Return a Koa middleware function\n return (ctx: BBContext, next: any) => {\n if (!schema) {\n return next()\n }\n let params = null\n // @ts-ignore\n let reqProp = ctx.request?.[property]\n if (ctx[property] != null) {\n params = ctx[property]\n } else if (reqProp != null) {\n params = reqProp\n }\n\n // not all schemas have the append property e.g. array schemas\n if ((schema as Joi.ObjectSchema).append) {\n schema = (schema as Joi.ObjectSchema).append({\n createdAt: Joi.any().optional(),\n updatedAt: Joi.any().optional(),\n })\n }\n\n const { error } = schema.validate(params)\n if (error) {\n ctx.throw(400, `Invalid ${property} - ${error.message}`)\n return\n }\n return next()\n }\n}\n\nexport function body(schema: Joi.ObjectSchema | Joi.ArraySchema) {\n return validate(schema, \"body\")\n}\n\nexport function params(schema: Joi.ObjectSchema | Joi.ArraySchema) {\n return validate(schema, \"params\")\n}\n", "export * from \"./utils\"\n", "import {\n DatasourceFieldType,\n QueryType,\n PluginType,\n AutomationStepType,\n AutomationStepIdArray,\n AutomationIOType,\n AutomationCustomIOType,\n DatasourceFeature,\n} from \"@budibase/types\"\nimport joi from \"joi\"\n\nconst DATASOURCE_TYPES = [\n \"Relational\",\n \"Non-relational\",\n \"Spreadsheet\",\n \"Object store\",\n \"Graph\",\n \"API\",\n]\n\nfunction runJoi(validator: joi.Schema, schema: any) {\n const { error } = validator.validate(schema)\n if (error) {\n throw error\n }\n}\n\nfunction validateComponent(schema: any) {\n const validator = joi.object({\n type: joi.string().allow(PluginType.COMPONENT).required(),\n metadata: joi.object().unknown(true).required(),\n hash: joi.string().optional(),\n version: joi.string().optional(),\n schema: joi\n .object({\n name: joi.string().required(),\n settings: joi.array().items(joi.object().unknown(true)).required(),\n })\n .unknown(true),\n })\n runJoi(validator, schema)\n}\n\nfunction validateDatasource(schema: any) {\n const fieldValidator = joi.object({\n type: joi\n .string()\n .allow(...Object.values(DatasourceFieldType))\n .required(),\n required: joi.boolean().required(),\n default: joi.any(),\n display: joi.string(),\n })\n\n const queryValidator = joi\n .object({\n type: joi.string().allow(...Object.values(QueryType)),\n readable: joi.boolean(),\n fields: joi.object().pattern(joi.string(), fieldValidator),\n })\n .required()\n\n const validator = joi.object({\n type: joi.string().allow(PluginType.DATASOURCE).required(),\n metadata: joi.object().unknown(true).required(),\n hash: joi.string().optional(),\n version: joi.string().optional(),\n schema: joi.object({\n docs: joi.string(),\n plus: joi.boolean().optional(),\n isSQL: joi.boolean().optional(),\n auth: joi\n .object({\n type: joi.string().required(),\n })\n .optional(),\n features: joi\n .object(\n Object.fromEntries(\n Object.values(DatasourceFeature).map(key => [\n key,\n joi.boolean().optional(),\n ])\n )\n )\n .optional(),\n relationships: joi.boolean().optional(),\n description: joi.string().required(),\n friendlyName: joi.string().required(),\n type: joi.string().allow(...DATASOURCE_TYPES),\n datasource: joi.object().pattern(joi.string(), fieldValidator).required(),\n query: joi\n .object()\n .pattern(joi.string(), queryValidator)\n .unknown(true)\n .required(),\n extra: joi.object().pattern(\n joi.string(),\n joi.object({\n type: joi.string().required(),\n displayName: joi.string().required(),\n required: joi.boolean(),\n data: joi.object(),\n })\n ),\n }),\n })\n runJoi(validator, schema)\n}\n\nfunction validateAutomation(schema: any) {\n const basePropsValidator = joi.object().pattern(joi.string(), {\n type: joi\n .string()\n .allow(...Object.values(AutomationIOType))\n .required(),\n customType: joi.string().allow(...Object.values(AutomationCustomIOType)),\n title: joi.string(),\n description: joi.string(),\n enum: joi.array().items(joi.string()),\n pretty: joi.array().items(joi.string()),\n })\n const stepSchemaValidator = joi\n .object({\n properties: basePropsValidator,\n required: joi.array().items(joi.string()),\n })\n .concat(basePropsValidator)\n .required()\n const validator = joi.object({\n type: joi.string().allow(PluginType.AUTOMATION).required(),\n metadata: joi.object().unknown(true).required(),\n hash: joi.string().optional(),\n version: joi.string().optional(),\n schema: joi.object({\n name: joi.string().required(),\n tagline: joi.string().required(),\n icon: joi.string().required(),\n description: joi.string().required(),\n type: joi\n .string()\n .allow(AutomationStepType.ACTION, AutomationStepType.LOGIC)\n .required(),\n stepId: joi\n .string()\n .disallow(...AutomationStepIdArray)\n .required(),\n inputs: joi.object().optional(),\n schema: joi\n .object({\n inputs: stepSchemaValidator,\n outputs: stepSchemaValidator,\n })\n .required(),\n }),\n })\n runJoi(validator, schema)\n}\n\nexport function validate(schema: any) {\n switch (schema?.type) {\n case PluginType.COMPONENT:\n validateComponent(schema)\n break\n case PluginType.DATASOURCE:\n validateDatasource(schema)\n break\n case PluginType.AUTOMATION:\n validateAutomation(schema)\n break\n default:\n throw new Error(`Unknown plugin type - check schema.json: ${schema.type}`)\n }\n}\n", "// Mimic the outer package export for usage in index.ts\n// The outer exports can't be used as they now reference dist directly\nexport { default as Client } from \"./redis\"\nexport * as utils from \"./utils\"\nexport * as clients from \"./init\"\nexport * as locks from \"./redlockImpl\"\n", "export * from \"./blacklist\"\n", "import dns from \"dns\"\nimport net from \"net\"\nimport env from \"../environment\"\nimport { promisify } from \"util\"\n\nlet blackListArray: string[] | undefined\nconst performLookup = promisify(dns.lookup)\n\nasync function lookup(address: string): Promise<string[]> {\n if (!net.isIP(address)) {\n // need this for URL parsing simply\n if (!address.startsWith(\"http\")) {\n address = `https://${address}`\n }\n address = new URL(address).hostname\n }\n const addresses = await performLookup(address, {\n all: true,\n })\n return addresses.map(addr => addr.address)\n}\n\nexport async function refreshBlacklist() {\n const blacklist = env.BLACKLIST_IPS\n const list = blacklist?.split(\",\") || []\n let final: string[] = []\n for (let addr of list) {\n const trimmed = addr.trim()\n if (!net.isIP(trimmed)) {\n const addresses = await lookup(trimmed)\n final = final.concat(addresses)\n } else {\n final.push(trimmed)\n }\n }\n blackListArray = final\n}\n\nexport async function isBlacklisted(address: string): Promise<boolean> {\n if (!blackListArray) {\n await refreshBlacklist()\n }\n if (blackListArray?.length === 0) {\n return false\n }\n // no need for DNS\n let ips: string[]\n if (!net.isIP(address)) {\n ips = await lookup(address)\n } else {\n ips = [address]\n }\n return !!blackListArray?.find(addr => ips.includes(addr))\n}\n", "import { asyncEventQueue, init as initQueue } from \"../events/asyncEvents\"\nimport {\n ProcessorMap,\n default as DocumentUpdateProcessor,\n} from \"../events/processors/async/DocumentUpdateProcessor\"\n\nlet processingPromise: Promise<void>\nlet documentProcessor: DocumentUpdateProcessor\n\nexport function init(processors: ProcessorMap) {\n if (!asyncEventQueue) {\n initQueue()\n }\n if (!documentProcessor) {\n documentProcessor = new DocumentUpdateProcessor(processors)\n }\n // if not processing in this instance, kick it off\n if (!processingPromise) {\n processingPromise = asyncEventQueue.process(async job => {\n const { event, identity, properties, timestamp } = job.data\n await documentProcessor.processEvent(\n event,\n identity,\n properties,\n timestamp\n )\n })\n }\n}\n", "import { EventProcessor } from \"../types\"\nimport { Event, Identity, DocUpdateEvent } from \"@budibase/types\"\nimport { doInTenant } from \"../../../context\"\nimport { getDocumentId } from \"../../documentId\"\nimport { shutdown } from \"../../asyncEvents\"\n\nexport type Processor = (update: DocUpdateEvent) => Promise<void>\nexport type ProcessorMap = { events: Event[]; processor: Processor }[]\n\nexport default class DocumentUpdateProcessor implements EventProcessor {\n processors: ProcessorMap = []\n\n constructor(processors: ProcessorMap) {\n this.processors = processors\n }\n\n async processEvent(\n event: Event,\n identity: Identity,\n properties: any,\n timestamp?: string | number\n ) {\n const tenantId = identity.realTenantId\n const docId = getDocumentId(event, properties)\n if (!tenantId || !docId) {\n return\n }\n for (let { events, processor } of this.processors) {\n if (events.includes(event)) {\n await doInTenant(tenantId, async () => {\n await processor({\n id: docId,\n tenantId,\n })\n })\n }\n }\n }\n\n shutdown() {\n return shutdown()\n }\n}\n", "import {\n Event,\n UserCreatedEvent,\n UserUpdatedEvent,\n UserDeletedEvent,\n UserPermissionAssignedEvent,\n UserPermissionRemovedEvent,\n GroupCreatedEvent,\n GroupUpdatedEvent,\n GroupDeletedEvent,\n GroupUsersAddedEvent,\n GroupUsersDeletedEvent,\n GroupPermissionsEditedEvent,\n} from \"@budibase/types\"\n\nconst getEventProperties: Record<\n string,\n (properties: any) => string | undefined\n> = {\n [Event.USER_CREATED]: (properties: UserCreatedEvent) => properties.userId,\n [Event.USER_UPDATED]: (properties: UserUpdatedEvent) => properties.userId,\n [Event.USER_DELETED]: (properties: UserDeletedEvent) => properties.userId,\n [Event.USER_PERMISSION_ADMIN_ASSIGNED]: (\n properties: UserPermissionAssignedEvent\n ) => properties.userId,\n [Event.USER_PERMISSION_ADMIN_REMOVED]: (\n properties: UserPermissionRemovedEvent\n ) => properties.userId,\n [Event.USER_PERMISSION_BUILDER_ASSIGNED]: (\n properties: UserPermissionAssignedEvent\n ) => properties.userId,\n [Event.USER_PERMISSION_BUILDER_REMOVED]: (\n properties: UserPermissionRemovedEvent\n ) => properties.userId,\n [Event.USER_GROUP_CREATED]: (properties: GroupCreatedEvent) =>\n properties.groupId,\n [Event.USER_GROUP_UPDATED]: (properties: GroupUpdatedEvent) =>\n properties.groupId,\n [Event.USER_GROUP_DELETED]: (properties: GroupDeletedEvent) =>\n properties.groupId,\n [Event.USER_GROUP_USERS_ADDED]: (properties: GroupUsersAddedEvent) =>\n properties.groupId,\n [Event.USER_GROUP_USERS_REMOVED]: (properties: GroupUsersDeletedEvent) =>\n properties.groupId,\n [Event.USER_GROUP_PERMISSIONS_EDITED]: (\n properties: GroupPermissionsEditedEvent\n ) => properties.groupId,\n}\n\nexport function getDocumentId(event: Event, properties: any) {\n const extractor = getEventProperties[event]\n if (!extractor) {\n throw new Error(\"Event does not have a method of document ID extraction\")\n }\n return extractor(properties)\n}\n"],
5
5
  "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IA+La,qBAgBA,aAQA;AAvNb;AAAA;AAAA;AA+LO,IAAM,sBAA+B;AAAA,MAC1C;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEO,IAAM,cAAuB,CAAC,GAAG,mBAAmB;AAQpD,IAAM,2BAA8D;AAAA;AAAA,MAEzE,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,0DAAoC,GAAG;AAAA,MACxC,CAAC,wDAAmC,GAAG;AAAA,MACvC,CAAC,8DAAsC,GAAG;AAAA,MAC1C,CAAC,4DAAqC,GAAG;AAAA,MACzC,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,kDAA2B,GAAG;AAAA,MAC/B,CAAC,mDAA2B,GAAG;AAAA,MAC/B,CAAC,mEAAmC,GAAG;AAAA,MACvC,CAAC,+CAAyB,GAAG;AAAA,MAC7B,CAAC,6CAAwB,GAAG;AAAA,MAC5B,CAAC,6CAAwB,GAAG;AAAA,MAC5B,CAAC,6CAAwB,GAAG;AAAA,MAC5B,CAAC,oDAA4B,GAAG;AAAA,MAChC,CAAC,yDAA8B,GAAG;AAAA,MAClC,CAAC,mEAAmC,GAAG;AAAA,MACvC,CAAC,2DAA+B,GAAG;AAAA,MACnC,CAAC,yDAA2B,GAAG;AAAA,MAC/B,CAAC,yDAA8B,GAAG;AAAA,MAClC,CAAC,uDAA6B,GAAG;AAAA;AAAA,MAGjC,CAAC,6CAAwB,GAAG;AAAA,MAC5B,CAAC,6CAAwB,GAAG;AAAA;AAAA,MAG5B,CAAC,yCAAsB,GAAG;AAAA,MAC1B,CAAC,yCAAsB,GAAG;AAAA,MAC1B,CAAC,6CAAwB,GAAG;AAAA,MAC5B,CAAC,iDAA0B,GAAG;AAAA,MAC9B,CAAC,6BAAgB,GAAG;AAAA,MACpB,CAAC,+BAAiB,GAAG;AAAA;AAAA,MAGrB,CAAC,8CAAsB,GAAG;AAAA,MAC1B,CAAC,8CAAsB,GAAG;AAAA,MAC1B,CAAC,wDAA8B,GAAG;AAAA;AAAA,MAGlC,CAAC,+BAAiB,GAAG;AAAA,MACrB,CAAC,+BAAiB,GAAG;AAAA,MACrB,CAAC,+BAAiB,GAAG;AAAA,MACrB,CAAC,mCAAmB,GAAG;AAAA,MACvB,CAAC,uCAAqB,GAAG;AAAA,MACzB,CAAC,mDAA2B,GAAG;AAAA,MAC/B,CAAC,2CAAuB,GAAG;AAAA,MAC3B,CAAC,+CAAyB,GAAG;AAAA,MAC7B,CAAC,iDAA0B,GAAG;AAAA,MAC9B,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,+CAAyB,GAAG;AAAA,MAC7B,CAAC,iDAA0B,GAAG;AAAA;AAAA,MAG9B,CAAC,6CAAwB,GAAG;AAAA,MAC5B,CAAC,6CAAwB,GAAG;AAAA,MAC5B,CAAC,6CAAwB,GAAG;AAAA;AAAA,MAG5B,CAAC,mCAAmB,GAAG;AAAA,MACvB,CAAC,mCAAmB,GAAG;AAAA,MACvB,CAAC,mCAAmB,GAAG;AAAA,MACvB,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,+BAAiB,GAAG;AAAA,MACrB,CAAC,uCAAqB,GAAG;AAAA;AAAA,MAGzB,CAAC,mCAAmB,GAAG;AAAA,MACvB,CAAC,mCAAmB,GAAG;AAAA,MACvB,CAAC,mCAAmB,GAAG;AAAA,MACvB,CAAC,qCAAoB,GAAG;AAAA,MACxB,CAAC,qCAAoB,GAAG;AAAA,MACxB,CAAC,+CAAyB,GAAG;AAAA;AAAA,MAG7B,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,mCAAmB,GAAG;AAAA;AAAA,MAGvB,CAAC,6CAAwB,GAAG;AAAA,MAC5B,CAAC,6CAAwB,GAAG;AAAA,MAC5B,CAAC,uDAA6B,GAAG;AAAA,MACjC,CAAC,uDAA6B,GAAG;AAAA,MACjC,CAAC,2CAAuB,GAAG;AAAA,MAC3B,CAAC,uCAAqB,GAAG;AAAA,MACzB,CAAC,6DAAgC,GAAG;AAAA;AAAA,MAGpC,CAAC,qCAAoB,GAAG;AAAA,MACxB,CAAC,qCAAoB,GAAG;AAAA;AAAA,MAGxB,CAAC,2CAAuB,GAAG;AAAA,MAC3B,CAAC,2CAAuB,GAAG;AAAA;AAAA,MAG3B,CAAC,iEAAkC,GAAG;AAAA,MACtC,CAAC,iEAAkC,GAAG;AAAA,MACtC,CAAC,2FAA+C,GAAG;AAAA;AAAA,MAGnD,CAAC,uCAAqB,GAAG;AAAA,MACzB,CAAC,qCAAoB,GAAG;AAAA,MACxB,CAAC,+BAAiB,GAAG;AAAA;AAAA,MAGrB,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,mCAAmB,GAAG;AAAA,MACvB,CAAC,uCAAqB,GAAG;AAAA;AAAA,MAGzB,CAAC,iDAA0B,GAAG;AAAA,MAC9B,CAAC,2CAAuB,GAAG;AAAA,MAC3B,CAAC,qDAA4B,GAAG;AAAA,MAChC,CAAC,2DAA+B,GAAG;AAAA,MACnC,CAAC,uDAA6B,GAAG;AAAA,MACjC,CAAC,yDAA8B,GAAG;AAAA,MAClC,CAAC,mDAA2B,GAAG;AAAA;AAAA,MAG/B,CAAC,uCAAqB,GAAG;AAAA,MACzB,CAAC,uCAAqB,GAAG;AAAA,MACzB,CAAC,yCAAsB,GAAG;AAAA;AAAA,MAG1B,CAAC,qDAA4B,GAAG;AAAA,MAChC,CAAC,+CAAyB,GAAG;AAAA,MAC7B,CAAC,2DAA+B,GAAG;AAAA,MACnC,CAAC,qDAA4B,GAAG;AAAA,MAChC,CAAC,uEAAqC,GAAG;AAAA,MACzC,CAAC,iEAAkC,GAAG;AAAA;AAAA,MAGtC,CAAC,qCAAoB,GAAG;AAAA,MACxB,CAAC,qCAAoB,GAAG;AAAA;AAAA,MAGxB,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,iCAAkB,GAAG;AAAA,MACtB,CAAC,mCAAmB,GAAG;AAAA,MACvB,CAAC,+CAAyB,GAAG;AAAA,MAC7B,CAAC,+CAAyB,GAAG;AAAA,MAC7B,CAAC,+CAAyB,GAAG;AAAA,MAC7B,CAAC,yDAA8B,GAAG;AAAA,MAClC,CAAC,yDAA8B,GAAG;AAAA,MAClC,CAAC,yDAA8B,GAAG;AAAA;AAAA,MAGlC,CAAC,qCAAoB,GAAG;AAAA,MACxB,CAAC,6BAAgB,GAAG;AAAA,MACpB,CAAC,6CAAwB,GAAG;AAAA;AAAA,MAG5B,CAAC,2CAAuB,GAAG;AAAA,MAC3B,CAAC,yCAAsB,GAAG;AAAA;AAAA,MAG1B,CAAC,iEAAkC,GAAG;AAAA,MACtC,CAAC,mEAAmC,GAAG;AAAA,MACvC,CAAC,uEAAqC,GAAG;AAAA,MACzC,CAAC,4DAAgC,GAAG;AAAA;AAAA,MAGpC,CAAC,8CAAyB,GAAG;AAAA,MAC7B,CAAC,kDAA2B,GAAG;AAAA,IACjC;AAAA;AAAA;;;ACnYA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAAA;;;ACxBA,IAAAA,gBAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA,IAAAC;AACA;AACA;AACA;AACA;AAAA;AAAA;;;ACJA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAoBY,WAMA,qBAeA,YAmCA;AA5EZ;AAAA;AAAA;AAoBO,IAAK,YAAL,kBAAKC,eAAL;AACL,MAAAA,WAAA,SAAM;AACN,MAAAA,WAAA,UAAO;AACP,MAAAA,WAAA,YAAS;AAHC,aAAAA;AAAA,OAAA;AAML,IAAK,sBAAL,kBAAKC,yBAAL;AACL,MAAAA,qBAAA,YAAS;AACT,MAAAA,qBAAA,UAAO;AACP,MAAAA,qBAAA,cAAW;AACX,MAAAA,qBAAA,aAAU;AACV,MAAAA,qBAAA,YAAS;AACT,MAAAA,qBAAA,cAAW;AACX,MAAAA,qBAAA,UAAO;AACP,MAAAA,qBAAA,YAAS;AACT,MAAAA,qBAAA,UAAO;AACP,MAAAA,qBAAA,UAAO;AACP,MAAAA,qBAAA,iBAAc;AACd,MAAAA,qBAAA,YAAS;AAZC,aAAAA;AAAA,OAAA;AAeL,IAAK,aAAL,kBAAKC,gBAAL;AACL,MAAAA,YAAA,cAAW;AACX,MAAAA,YAAA,cAAW;AACX,MAAAA,YAAA,aAAU;AACV,MAAAA,YAAA,mBAAgB;AAChB,MAAAA,YAAA,aAAU;AACV,MAAAA,YAAA,gBAAa;AACb,MAAAA,YAAA,QAAK;AACL,MAAAA,YAAA,cAAW;AACX,MAAAA,YAAA,WAAQ;AACR,MAAAA,YAAA,cAAW;AACX,MAAAA,YAAA,UAAO;AACP,MAAAA,YAAA,YAAS;AACT,MAAAA,YAAA,mBAAgB;AAChB,MAAAA,YAAA,eAAY;AACZ,MAAAA,YAAA,WAAQ;AACR,MAAAA,YAAA,eAAY;AAhBF,aAAAA;AAAA,OAAA;AAmCL,IAAK,oBAAL,kBAAKC,uBAAL;AACL,MAAAA,mBAAA,yBAAsB;AACtB,MAAAA,mBAAA,uBAAoB;AACpB,MAAAA,mBAAA,mBAAgB;AAHN,aAAAA;AAAA,OAAA;AAAA;AAAA;;;AC5EZ;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAuGa;AAvGb;AAAA;AAAA;AAuGO,IAAM,aAAa,CAAC,QAA8B;AACvD,aAAO,OAAO,QAAQ,YAAY,IAAI,OAAO,IAAI;AAAA,IACnD;AAAA;AAAA;;;ACzGA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAMY;AANZ;AAAA;AAAA;AAMO,IAAK,2BAAL,kBAAKC,8BAAL;AACL,MAAAA,0BAAA,UAAO;AACP,MAAAA,0BAAA,YAAS;AACT,MAAAA,0BAAA,WAAQ;AACR,MAAAA,0BAAA,eAAY;AACZ,MAAAA,0BAAA,UAAO;AALG,aAAAA;AAAA,OAAA;AAAA;AAAA;;;ACNZ;AAAA;AAAA;AAAA;AACA;AAAA;AAAA;;;ACDA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAea;AAfb;AAAA;AAAA;AAAA;AAeO,IAAM,iBAAiB;AAAA,MAC5B,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,cAAc;AAAA,MACd;AAAA,IACF;AAAA;AAAA;;;ACpBA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAY,iBAQA;AARZ;AAAA;AAAA;AAAO,IAAK,kBAAL,kBAAKC,qBAAL;AACL,MAAAA,iBAAA,UAAO;AACP,MAAAA,iBAAA,WAAQ;AACR,MAAAA,iBAAA,aAAU;AACV,MAAAA,iBAAA,WAAQ;AAJE,aAAAA;AAAA,OAAA;AAQL,IAAK,iBAAL,kBAAKC,oBAAL;AACL,MAAAA,gBAAA,SAAM;AACN,MAAAA,gBAAA,WAAQ;AACR,MAAAA,gBAAA,UAAO;AACP,MAAAA,gBAAA,gBAAa;AACb,MAAAA,gBAAA,aAAU;AACV,MAAAA,gBAAA,aAAU;AACV,MAAAA,gBAAA,aAAU;AACV,MAAAA,gBAAA,oBAAiB;AACjB,MAAAA,gBAAA,WAAQ;AACR,MAAAA,gBAAA,UAAO;AACP,MAAAA,gBAAA,iBAAc;AAXJ,aAAAA;AAAA,OAAA;AAAA;AAAA;;;ACRZ;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAAC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAAC;AACA;AACA;AACA;AACA;AAAA;AAAA;;;ACrBA,IA6Ea,gBAMA;AAnFb,IAAAC,gBAAA;AAAA;AAAA;AAAA;AA6EO,IAAM,iBAAiB,CAAC,YAC7B,QAAQ;AAKH,IAAM,eAAe,CAAC,YAC3B,QAAQ,aAAa;AAAA;AAAA;;;ACpFvB,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,gBAAA;AAAA;AAAA;AAAA,IAAAA;AACA,IAAAC;AACA;AAAA;AAAA;;;ACFA,IAAAC,YAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAIY,kBAUA,wBAoBA,yBAeA,wBAmDC;AApGb,IAAAC,mBAAA;AAAA;AAAA;AAIO,IAAK,mBAAL,kBAAKC,sBAAL;AACL,MAAAA,kBAAA,YAAS;AACT,MAAAA,kBAAA,YAAS;AACT,MAAAA,kBAAA,aAAU;AACV,MAAAA,kBAAA,YAAS;AACT,MAAAA,kBAAA,WAAQ;AACR,MAAAA,kBAAA,UAAO;AACP,MAAAA,kBAAA,UAAO;AAPG,aAAAA;AAAA,OAAA;AAUL,IAAK,yBAAL,kBAAKC,4BAAL;AACL,MAAAA,wBAAA,WAAQ;AACR,MAAAA,wBAAA,SAAM;AACN,MAAAA,wBAAA,UAAO;AACP,MAAAA,wBAAA,UAAO;AACP,MAAAA,wBAAA,WAAQ;AACR,MAAAA,wBAAA,kBAAe;AACf,MAAAA,wBAAA,iBAAc;AACd,MAAAA,wBAAA,iBAAc;AACd,MAAAA,wBAAA,UAAO;AACP,MAAAA,wBAAA,UAAO;AACP,MAAAA,wBAAA,aAAU;AACV,MAAAA,wBAAA,YAAS;AACT,MAAAA,wBAAA,oBAAiB;AACjB,MAAAA,wBAAA,UAAO;AACP,MAAAA,wBAAA,iBAAc;AACd,MAAAA,wBAAA,gBAAa;AACb,MAAAA,wBAAA,uBAAoB;AAjBV,aAAAA;AAAA,OAAA;AAoBL,IAAK,0BAAL,kBAAKC,6BAAL;AACL,MAAAA,yBAAA,eAAY;AACZ,MAAAA,yBAAA,iBAAc;AACd,MAAAA,yBAAA,iBAAc;AACd,MAAAA,yBAAA,aAAU;AACV,MAAAA,yBAAA,SAAM;AACN,MAAAA,yBAAA,UAAO;AANG,aAAAA;AAAA,OAAA;AAeL,IAAK,yBAAL,kBAAKC,4BAAL;AACL,MAAAA,wBAAA,qBAAkB;AAClB,MAAAA,wBAAA,gBAAa;AACb,MAAAA,wBAAA,gBAAa;AACb,MAAAA,wBAAA,gBAAa;AACb,MAAAA,wBAAA,kBAAe;AACf,MAAAA,wBAAA,sBAAmB;AACnB,MAAAA,wBAAA,oBAAiB;AACjB,MAAAA,wBAAA,mBAAgB;AAChB,MAAAA,wBAAA,gBAAa;AACb,MAAAA,wBAAA,WAAQ;AACR,MAAAA,wBAAA,YAAS;AACT,MAAAA,wBAAA,gBAAa;AACb,MAAAA,wBAAA,UAAO;AACP,MAAAA,wBAAA,aAAU;AACV,MAAAA,wBAAA,YAAS;AACT,MAAAA,wBAAA,4BAAyB;AAEzB,MAAAA,wBAAA,aAAU;AACV,MAAAA,wBAAA,WAAQ;AACR,MAAAA,wBAAA,YAAS;AACT,MAAAA,wBAAA,gBAAa;AArBH,aAAAA;AAAA,OAAA;AAmDL,IAAM,wBAAwB;AAAA,MACnC,GAAG,OAAO,OAAO,sBAAsB;AAAA,MACvC,GAAG,OAAO,OAAO,uBAAuB;AAAA,IAC1C;AAAA;AAAA;;;ACvGA,IAAAC,mBAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,eAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,cAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,cAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,YAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,kBAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAEA,IAAAC;AACA,IAAAC;AAAA;AAAA;;;ACHA,IAAAC,cAAA;AAAA;AAAA;AAAA,IAAAA;AACA;AACA,IAAAC;AAAA;AAAA;;;ACFA,IAAAC,eAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAa,WACA,aAEA,UAED,cAwDA;AA7DZ;AAAA;AAAA;AAAO,IAAM,YAAY;AAClB,IAAM,cAAc;AAEpB,IAAM,WAAW,CAAC,SAAuB,GAAG,IAAI,GAAG,SAAS;AAE5D,IAAK,eAAL,kBAAKC,kBAAL;AACL,MAAAA,cAAA,UAAO;AACP,MAAAA,cAAA,WAAQ;AACR,MAAAA,cAAA,eAAY;AACZ,MAAAA,cAAA,YAAS;AACT,MAAAA,cAAA,cAAW;AACX,MAAAA,cAAA,SAAM;AACN,MAAAA,cAAA,SAAM;AACN,MAAAA,cAAA,aAAU;AACV,MAAAA,cAAA,kBAAe;AACf,MAAAA,cAAA,UAAO;AACP,MAAAA,cAAA,gBAAa;AACb,MAAAA,cAAA,cAAW;AACX,MAAAA,cAAA,oBAAiB;AACjB,MAAAA,cAAA,sBAAmB;AACnB,MAAAA,cAAA,YAAS;AACT,MAAAA,cAAA,gBAAa;AACb,MAAAA,cAAA,qBAAkB;AAClB,MAAAA,cAAA,gBAAa;AACb,MAAAA,cAAA,WAAQ;AACR,MAAAA,cAAA,SAAM;AACN,MAAAA,cAAA,gBAAa;AACb,MAAAA,cAAA,UAAO;AACP,MAAAA,cAAA,aAAU;AACV,MAAAA,cAAA,cAAW;AACX,MAAAA,cAAA,YAAS;AACT,MAAAA,cAAA,YAAS;AACT,MAAAA,cAAA,WAAQ;AACR,MAAAA,cAAA,iBAAc;AACd,MAAAA,cAAA,cAAW;AACX,MAAAA,cAAA,cAAW;AACX,MAAAA,cAAA,eAAY;AACZ,MAAAA,cAAA,yBAAsB;AACtB,MAAAA,cAAA,eAAY;AACZ,MAAAA,cAAA,4BAAyB;AAlCf,aAAAA;AAAA,OAAA;AAwDL,IAAK,gBAAL,kBAAKC,mBAAL;AACL,MAAAA,eAAA,mBAAgB;AADN,aAAAA;AAAA,OAAA;AAAA;AAAA;;;AC7DZ,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,eAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,YAAA;AAAA;AAAA;AAAA,IAAAA;AACA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA;AACA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA;AACA;AACA;AAAA;AAAA;;;ACfA;AAAA;AAAA;AAAA;AAAA;;;AC4BO,SAAS,UAAU,MAA6B;AACrD,SAAO,CAAC,CAAE,KAAiB;AAC7B;AA9BA,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,kBAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAEY,YAmCC;AArCb,IAAAC,eAAA;AAAA;AAAA;AAEO,IAAK,aAAL,kBAAKC,gBAAL;AACL,MAAAA,YAAA,gBAAa;AACb,MAAAA,YAAA,eAAY;AACZ,MAAAA,YAAA,gBAAa;AAHH,aAAAA;AAAA,OAAA;AAmCL,IAAM,kBAAkB,OAAO,OAAO,UAAU;AAAA;AAAA;;;ACrCvD,IAQa,iBAMA;AAdb;AAAA;AAAA;AAAA;AAQO,IAAM,kBAAkB;AAAA;AAAA;AAAA;AAAA,IAI/B;AAEO,IAAM,wBAAwB;AAAA;AAAA;AAAA,IAGrC;AAAA;AAAA;;;ACjBA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,6BAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,kBAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AACA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA;AACA;AACA;AACA,IAAAC;AACA,IAAAC;AAAA;AAAA;;;ACRA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AAAA;AAAA;;;ACHA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA,IAAAC;AACA,IAAAC;AACA;AACA;AACA;AACA;AAAA;AAAA;;;ACLA,IAAAC,iBAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,gBAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,gBAAA;AAAA;AAAA;AAAA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA;AAAA;AAAA;;;ACHA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,iBAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,eAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,mBAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,YAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,cAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,aAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,YAAA;AAAA;AAAA;AAAA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA;AACA;AACA,IAAAC;AAAA;AAAA;;;ACRA,IAAAC,6BAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,kBAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,eAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,cAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA,IAAAC;AACA;AACA;AAAA;AAAA;;;ACFA,IAAAC,gBAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,eAAA;AAAA;AAAA;AAAA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA;AACA;AACA,IAAAC;AAAA;AAAA;;;ACLA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AACA,IAAAC;AACA,IAAAC;AACA;AACA;AACA,IAAAC;AACA;AACA,IAAAC;AACA,IAAAC;AACA;AACA;AACA;AAAA;AAAA;;;ACXA;AAAA;AAAA;AAAA,IAAAC;AACA;AAAA;AAAA;;;ACDA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,eAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA,IAAAC;AAAA;AAAA;;;ACJA,IAYY,oBAMA,UAaC,iBAOA,iBAuBA,YACA,SACA,gBACA;AAhEb,IAAAC,WAAA;AAAA;AAAA;AAAA;AAEA;AAUO,IAAK,qBAAL,kBAAKC,wBAAL;AACL,MAAAA,oBAAA,SAAM;AACN,MAAAA,oBAAA,gBAAa;AACb,MAAAA,oBAAA,YAAS;AAHC,aAAAA;AAAA,OAAA;AAML,IAAK,WAAL,kBAAKC,cAAL;AACL,MAAAA,UAAA,iBAAc;AACd,MAAAA,UAAA,mBAAgB;AAChB,MAAAA,UAAA,gBAAa;AACb,MAAAA,UAAA,UAAO;AACP,MAAAA,UAAA,aAAU;AACV,MAAAA,UAAA,qBAAkB;AAClB,MAAAA,UAAA,sBAAmB;AACnB,MAAAA,UAAA,8BAA2B;AAC3B,MAAAA,UAAA,mBAAgB;AAChB,MAAAA,UAAA,2BAAwB;AAVd,aAAAA;AAAA,OAAA;AAaL,IAAM,kBAA4C;AAAA,MACvD,CAAC,+BAAsB,GAAG;AAAA;AAAA,QAExB;AAAA,MACF;AAAA,IACF;AAEO,IAAM,kBAAkB;AAAA,MAC7B,QAAQ;AAAA,QACN,MAAM;AAAA,QACN,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,YAAY;AAAA,UACZ,aAAa;AAAA,UACb,sBAAsB;AAAA,QACxB;AAAA,MACF;AAAA;AAAA,MAEA,eAAe;AAAA,QACb,MAAM;AAAA,QACN,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,SAAS;AAAA,QACX;AAAA,MACF;AAAA,MACA,YAAY;AAAA,QACV,MAAM;AAAA,MACR;AAAA,IACF;AAEO,IAAM,aAAa,wBAAyB;AAC5C,IAAM,UAAU,gCAA6B;AAC7C,IAAM,iBAAiB;AACvB,IAAM,2BAA2B;AAAA;AAAA;;;AChExC,IAAY;AAAZ,IAAAC,YAAA;AAAA;AAAA;AAAO,IAAK,SAAL,kBAAKC,YAAL;AACL,MAAAA,QAAA,aAAU;AACV,MAAAA,QAAA,iBAAc;AACd,MAAAA,QAAA,aAAU;AACV,MAAAA,QAAA,YAAS;AACT,MAAAA,QAAA,gBAAa;AACb,MAAAA,QAAA,UAAO;AACP,MAAAA,QAAA,kBAAe;AACf,MAAAA,QAAA,eAAY;AACZ,MAAAA,QAAA,uBAAoB;AACpB,MAAAA,QAAA,8BAA2B;AAC3B,MAAAA,QAAA,yBAAsB;AACtB,MAAAA,QAAA,gCAA6B;AAC7B,MAAAA,QAAA,WAAQ;AACR,MAAAA,QAAA,gBAAa;AACb,MAAAA,QAAA,oBAAiB;AACjB,MAAAA,QAAA,mBAAgB;AAChB,MAAAA,QAAA,mBAAgB;AAjBN,aAAAA;AAAA,OAAA;AAAA;AAAA;;;ACAZ,IAEa;AAFb,IAAAC,kBAAA;AAAA;AAAA;AAAA,IAAAC;AAEO,IAAM,kBAAkB;AAAA,MAC7B,QAAQ;AAAA,QACN,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,MACA,WAAW;AAAA,QACT,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,MACA,OAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,MACA,UAAU;AAAA,QACR,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,MACA,YAAY;AAAA,QACV,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,MACA,MAAM;AAAA,QACJ,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,MACA,UAAU;AAAA,QACR,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,MACA,UAAU;AAAA,QACR,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,MACA,UAAU;AAAA,QACR,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,MACA,aAAa;AAAA,QACX,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,MACA,IAAI;AAAA,QACF,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,MACA,aAAa;AAAA,QACX,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA;;;ACnDA;AAAA,mDAAAC,SAAA;AAAA;AAAA,KAAC,SAAS,GAAE,GAAE;AAAC,kBAAU,OAAO,WAAS,eAAa,OAAOA,UAAOA,QAAO,UAAQ,EAAE,IAAE,cAAY,OAAO,UAAQ,OAAO,MAAI,OAAO,CAAC,KAAG,IAAE,eAAa,OAAO,aAAW,aAAW,KAAG,MAAM,QAAM,EAAE;AAAA,IAAC,EAAE,SAAM,WAAU;AAAC;AAAa,UAAI,IAAE,KAAI,IAAE,KAAI,IAAE,MAAK,IAAE,eAAc,IAAE,UAAS,IAAE,UAAS,IAAE,QAAO,IAAE,OAAM,IAAE,QAAO,IAAE,SAAQ,IAAE,WAAU,IAAE,QAAO,IAAE,QAAO,IAAE,gBAAe,IAAE,8FAA6F,IAAE,uFAAsF,IAAE,EAAC,MAAK,MAAK,UAAS,2DAA2D,MAAM,GAAG,GAAE,QAAO,wFAAwF,MAAM,GAAG,GAAE,SAAQ,SAASC,IAAE;AAAC,YAAIC,KAAE,CAAC,MAAK,MAAK,MAAK,IAAI,GAAEC,KAAEF,KAAE;AAAI,eAAM,MAAIA,MAAGC,IAAGC,KAAE,MAAI,EAAE,KAAGD,GAAEC,EAAC,KAAGD,GAAE,CAAC,KAAG;AAAA,MAAG,EAAC,GAAE,IAAE,SAASD,IAAEC,IAAEC,IAAE;AAAC,YAAIC,KAAE,OAAOH,EAAC;AAAE,eAAM,CAACG,MAAGA,GAAE,UAAQF,KAAED,KAAE,KAAG,MAAMC,KAAE,IAAEE,GAAE,MAAM,EAAE,KAAKD,EAAC,IAAEF;AAAA,MAAC,GAAE,IAAE,EAAC,GAAE,GAAE,GAAE,SAASA,IAAE;AAAC,YAAIC,KAAE,CAACD,GAAE,UAAU,GAAEE,KAAE,KAAK,IAAID,EAAC,GAAEE,KAAE,KAAK,MAAMD,KAAE,EAAE,GAAEE,KAAEF,KAAE;AAAG,gBAAOD,MAAG,IAAE,MAAI,OAAK,EAAEE,IAAE,GAAE,GAAG,IAAE,MAAI,EAAEC,IAAE,GAAE,GAAG;AAAA,MAAC,GAAE,GAAE,SAASJ,GAAEC,IAAEC,IAAE;AAAC,YAAGD,GAAE,KAAK,IAAEC,GAAE,KAAK;AAAE,iBAAM,CAACF,GAAEE,IAAED,EAAC;AAAE,YAAIE,KAAE,MAAID,GAAE,KAAK,IAAED,GAAE,KAAK,MAAIC,GAAE,MAAM,IAAED,GAAE,MAAM,IAAGG,KAAEH,GAAE,MAAM,EAAE,IAAIE,IAAE,CAAC,GAAEE,KAAEH,KAAEE,KAAE,GAAEE,KAAEL,GAAE,MAAM,EAAE,IAAIE,MAAGE,KAAE,KAAG,IAAG,CAAC;AAAE,eAAM,EAAE,EAAEF,MAAGD,KAAEE,OAAIC,KAAED,KAAEE,KAAEA,KAAEF,QAAK;AAAA,MAAE,GAAE,GAAE,SAASJ,IAAE;AAAC,eAAOA,KAAE,IAAE,KAAK,KAAKA,EAAC,KAAG,IAAE,KAAK,MAAMA,EAAC;AAAA,MAAC,GAAE,GAAE,SAASA,IAAE;AAAC,eAAM,EAAC,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,IAAG,GAAE,GAAE,EAAC,EAAEA,EAAC,KAAG,OAAOA,MAAG,EAAE,EAAE,YAAY,EAAE,QAAQ,MAAK,EAAE;AAAA,MAAC,GAAE,GAAE,SAASA,IAAE;AAAC,eAAO,WAASA;AAAA,MAAC,EAAC,GAAE,IAAE,MAAK,IAAE,CAAC;AAAE,QAAE,CAAC,IAAE;AAAE,UAAI,IAAE,kBAAiB,IAAE,SAASA,IAAE;AAAC,eAAOA,cAAaO,MAAG,EAAE,CAACP,MAAG,CAACA,GAAE,CAAC;AAAA,MAAE,GAAE,IAAE,SAASA,GAAEC,IAAEC,IAAEC,IAAE;AAAC,YAAIC;AAAE,YAAG,CAACH;AAAE,iBAAO;AAAE,YAAG,YAAU,OAAOA,IAAE;AAAC,cAAII,KAAEJ,GAAE,YAAY;AAAE,YAAEI,EAAC,MAAID,KAAEC,KAAGH,OAAI,EAAEG,EAAC,IAAEH,IAAEE,KAAEC;AAAG,cAAIC,KAAEL,GAAE,MAAM,GAAG;AAAE,cAAG,CAACG,MAAGE,GAAE,SAAO;AAAE,mBAAON,GAAEM,GAAE,CAAC,CAAC;AAAA,QAAC,OAAK;AAAC,cAAIE,KAAEP,GAAE;AAAK,YAAEO,EAAC,IAAEP,IAAEG,KAAEI;AAAA,QAAC;AAAC,eAAM,CAACL,MAAGC,OAAI,IAAEA,KAAGA,MAAG,CAACD,MAAG;AAAA,MAAC,GAAE,IAAE,SAASH,IAAEC,IAAE;AAAC,YAAG,EAAED,EAAC;AAAE,iBAAOA,GAAE,MAAM;AAAE,YAAIE,KAAE,YAAU,OAAOD,KAAEA,KAAE,CAAC;AAAE,eAAOC,GAAE,OAAKF,IAAEE,GAAE,OAAK,WAAU,IAAIK,GAAEL,EAAC;AAAA,MAAC,GAAE,IAAE;AAAE,QAAE,IAAE,GAAE,EAAE,IAAE,GAAE,EAAE,IAAE,SAASF,IAAEC,IAAE;AAAC,eAAO,EAAED,IAAE,EAAC,QAAOC,GAAE,IAAG,KAAIA,GAAE,IAAG,GAAEA,GAAE,IAAG,SAAQA,GAAE,QAAO,CAAC;AAAA,MAAC;AAAE,UAAIM,KAAE,WAAU;AAAC,iBAASE,GAAET,IAAE;AAAC,eAAK,KAAG,EAAEA,GAAE,QAAO,MAAK,IAAE,GAAE,KAAK,MAAMA,EAAC,GAAE,KAAK,KAAG,KAAK,MAAIA,GAAE,KAAG,CAAC,GAAE,KAAK,CAAC,IAAE;AAAA,QAAE;AAAC,YAAIU,KAAED,GAAE;AAAU,eAAOC,GAAE,QAAM,SAASV,IAAE;AAAC,eAAK,KAAG,SAASA,IAAE;AAAC,gBAAIC,KAAED,GAAE,MAAKE,KAAEF,GAAE;AAAI,gBAAG,SAAOC;AAAE,qBAAO,oBAAI,KAAK,GAAG;AAAE,gBAAG,EAAE,EAAEA,EAAC;AAAE,qBAAO,oBAAI;AAAK,gBAAGA,cAAa;AAAK,qBAAO,IAAI,KAAKA,EAAC;AAAE,gBAAG,YAAU,OAAOA,MAAG,CAAC,MAAM,KAAKA,EAAC,GAAE;AAAC,kBAAIE,KAAEF,GAAE,MAAM,CAAC;AAAE,kBAAGE,IAAE;AAAC,oBAAIC,KAAED,GAAE,CAAC,IAAE,KAAG,GAAEE,MAAGF,GAAE,CAAC,KAAG,KAAK,UAAU,GAAE,CAAC;AAAE,uBAAOD,KAAE,IAAI,KAAK,KAAK,IAAIC,GAAE,CAAC,GAAEC,IAAED,GAAE,CAAC,KAAG,GAAEA,GAAE,CAAC,KAAG,GAAEA,GAAE,CAAC,KAAG,GAAEA,GAAE,CAAC,KAAG,GAAEE,EAAC,CAAC,IAAE,IAAI,KAAKF,GAAE,CAAC,GAAEC,IAAED,GAAE,CAAC,KAAG,GAAEA,GAAE,CAAC,KAAG,GAAEA,GAAE,CAAC,KAAG,GAAEA,GAAE,CAAC,KAAG,GAAEE,EAAC;AAAA,cAAC;AAAA,YAAC;AAAC,mBAAO,IAAI,KAAKJ,EAAC;AAAA,UAAC,EAAED,EAAC,GAAE,KAAK,KAAK;AAAA,QAAC,GAAEU,GAAE,OAAK,WAAU;AAAC,cAAIV,KAAE,KAAK;AAAG,eAAK,KAAGA,GAAE,YAAY,GAAE,KAAK,KAAGA,GAAE,SAAS,GAAE,KAAK,KAAGA,GAAE,QAAQ,GAAE,KAAK,KAAGA,GAAE,OAAO,GAAE,KAAK,KAAGA,GAAE,SAAS,GAAE,KAAK,KAAGA,GAAE,WAAW,GAAE,KAAK,KAAGA,GAAE,WAAW,GAAE,KAAK,MAAIA,GAAE,gBAAgB;AAAA,QAAC,GAAEU,GAAE,SAAO,WAAU;AAAC,iBAAO;AAAA,QAAC,GAAEA,GAAE,UAAQ,WAAU;AAAC,iBAAM,EAAE,KAAK,GAAG,SAAS,MAAI;AAAA,QAAE,GAAEA,GAAE,SAAO,SAASV,IAAEC,IAAE;AAAC,cAAIC,KAAE,EAAEF,EAAC;AAAE,iBAAO,KAAK,QAAQC,EAAC,KAAGC,MAAGA,MAAG,KAAK,MAAMD,EAAC;AAAA,QAAC,GAAES,GAAE,UAAQ,SAASV,IAAEC,IAAE;AAAC,iBAAO,EAAED,EAAC,IAAE,KAAK,QAAQC,EAAC;AAAA,QAAC,GAAES,GAAE,WAAS,SAASV,IAAEC,IAAE;AAAC,iBAAO,KAAK,MAAMA,EAAC,IAAE,EAAED,EAAC;AAAA,QAAC,GAAEU,GAAE,KAAG,SAASV,IAAEC,IAAEC,IAAE;AAAC,iBAAO,EAAE,EAAEF,EAAC,IAAE,KAAKC,EAAC,IAAE,KAAK,IAAIC,IAAEF,EAAC;AAAA,QAAC,GAAEU,GAAE,OAAK,WAAU;AAAC,iBAAO,KAAK,MAAM,KAAK,QAAQ,IAAE,GAAG;AAAA,QAAC,GAAEA,GAAE,UAAQ,WAAU;AAAC,iBAAO,KAAK,GAAG,QAAQ;AAAA,QAAC,GAAEA,GAAE,UAAQ,SAASV,IAAEC,IAAE;AAAC,cAAIC,KAAE,MAAKC,KAAE,CAAC,CAAC,EAAE,EAAEF,EAAC,KAAGA,IAAEU,KAAE,EAAE,EAAEX,EAAC,GAAEY,KAAE,SAASZ,IAAEC,IAAE;AAAC,gBAAIG,KAAE,EAAE,EAAEF,GAAE,KAAG,KAAK,IAAIA,GAAE,IAAGD,IAAED,EAAC,IAAE,IAAI,KAAKE,GAAE,IAAGD,IAAED,EAAC,GAAEE,EAAC;AAAE,mBAAOC,KAAEC,KAAEA,GAAE,MAAM,CAAC;AAAA,UAAC,GAAES,KAAE,SAASb,IAAEC,IAAE;AAAC,mBAAO,EAAE,EAAEC,GAAE,OAAO,EAAEF,EAAC,EAAE,MAAME,GAAE,OAAO,GAAG,IAAGC,KAAE,CAAC,GAAE,GAAE,GAAE,CAAC,IAAE,CAAC,IAAG,IAAG,IAAG,GAAG,GAAG,MAAMF,EAAC,CAAC,GAAEC,EAAC;AAAA,UAAC,GAAEY,KAAE,KAAK,IAAGL,KAAE,KAAK,IAAGC,KAAE,KAAK,IAAGK,KAAE,SAAO,KAAK,KAAG,QAAM;AAAI,kBAAOJ,IAAE;AAAA,YAAC,KAAK;AAAE,qBAAOR,KAAES,GAAE,GAAE,CAAC,IAAEA,GAAE,IAAG,EAAE;AAAA,YAAE,KAAK;AAAE,qBAAOT,KAAES,GAAE,GAAEH,EAAC,IAAEG,GAAE,GAAEH,KAAE,CAAC;AAAA,YAAE,KAAK;AAAE,kBAAIO,KAAE,KAAK,QAAQ,EAAE,aAAW,GAAEC,MAAGH,KAAEE,KAAEF,KAAE,IAAEA,MAAGE;AAAE,qBAAOJ,GAAET,KAAEO,KAAEO,KAAEP,MAAG,IAAEO,KAAGR,EAAC;AAAA,YAAE,KAAK;AAAA,YAAE,KAAK;AAAE,qBAAOI,GAAEE,KAAE,SAAQ,CAAC;AAAA,YAAE,KAAK;AAAE,qBAAOF,GAAEE,KAAE,WAAU,CAAC;AAAA,YAAE,KAAK;AAAE,qBAAOF,GAAEE,KAAE,WAAU,CAAC;AAAA,YAAE,KAAK;AAAE,qBAAOF,GAAEE,KAAE,gBAAe,CAAC;AAAA,YAAE;AAAQ,qBAAO,KAAK,MAAM;AAAA,UAAC;AAAA,QAAC,GAAEL,GAAE,QAAM,SAASV,IAAE;AAAC,iBAAO,KAAK,QAAQA,IAAE,KAAE;AAAA,QAAC,GAAEU,GAAE,OAAK,SAASV,IAAEC,IAAE;AAAC,cAAIC,IAAEgB,KAAE,EAAE,EAAElB,EAAC,GAAEW,KAAE,SAAO,KAAK,KAAG,QAAM,KAAIC,MAAGV,KAAE,CAAC,GAAEA,GAAE,CAAC,IAAES,KAAE,QAAOT,GAAE,CAAC,IAAES,KAAE,QAAOT,GAAE,CAAC,IAAES,KAAE,SAAQT,GAAE,CAAC,IAAES,KAAE,YAAWT,GAAE,CAAC,IAAES,KAAE,SAAQT,GAAE,CAAC,IAAES,KAAE,WAAUT,GAAE,CAAC,IAAES,KAAE,WAAUT,GAAE,CAAC,IAAES,KAAE,gBAAeT,IAAGgB,EAAC,GAAEL,KAAEK,OAAI,IAAE,KAAK,MAAIjB,KAAE,KAAK,MAAIA;AAAE,cAAGiB,OAAI,KAAGA,OAAI,GAAE;AAAC,gBAAIJ,KAAE,KAAK,MAAM,EAAE,IAAI,GAAE,CAAC;AAAE,YAAAA,GAAE,GAAGF,EAAC,EAAEC,EAAC,GAAEC,GAAE,KAAK,GAAE,KAAK,KAAGA,GAAE,IAAI,GAAE,KAAK,IAAI,KAAK,IAAGA,GAAE,YAAY,CAAC,CAAC,EAAE;AAAA,UAAE;AAAM,YAAAF,MAAG,KAAK,GAAGA,EAAC,EAAEC,EAAC;AAAE,iBAAO,KAAK,KAAK,GAAE;AAAA,QAAI,GAAEH,GAAE,MAAI,SAASV,IAAEC,IAAE;AAAC,iBAAO,KAAK,MAAM,EAAE,KAAKD,IAAEC,EAAC;AAAA,QAAC,GAAES,GAAE,MAAI,SAASV,IAAE;AAAC,iBAAO,KAAK,EAAE,EAAEA,EAAC,CAAC,EAAE;AAAA,QAAC,GAAEU,GAAE,MAAI,SAASP,IAAEQ,IAAE;AAAC,cAAIQ,IAAEP,KAAE;AAAK,UAAAT,KAAE,OAAOA,EAAC;AAAE,cAAIU,KAAE,EAAE,EAAEF,EAAC,GAAEG,KAAE,SAASd,IAAE;AAAC,gBAAIC,KAAE,EAAEW,EAAC;AAAE,mBAAO,EAAE,EAAEX,GAAE,KAAKA,GAAE,KAAK,IAAE,KAAK,MAAMD,KAAEG,EAAC,CAAC,GAAES,EAAC;AAAA,UAAC;AAAE,cAAGC,OAAI;AAAE,mBAAO,KAAK,IAAI,GAAE,KAAK,KAAGV,EAAC;AAAE,cAAGU,OAAI;AAAE,mBAAO,KAAK,IAAI,GAAE,KAAK,KAAGV,EAAC;AAAE,cAAGU,OAAI;AAAE,mBAAOC,GAAE,CAAC;AAAE,cAAGD,OAAI;AAAE,mBAAOC,GAAE,CAAC;AAAE,cAAIL,MAAGU,KAAE,CAAC,GAAEA,GAAE,CAAC,IAAE,GAAEA,GAAE,CAAC,IAAE,GAAEA,GAAE,CAAC,IAAE,GAAEA,IAAGN,EAAC,KAAG,GAAEH,KAAE,KAAK,GAAG,QAAQ,IAAEP,KAAEM;AAAE,iBAAO,EAAE,EAAEC,IAAE,IAAI;AAAA,QAAC,GAAEA,GAAE,WAAS,SAASV,IAAEC,IAAE;AAAC,iBAAO,KAAK,IAAI,KAAGD,IAAEC,EAAC;AAAA,QAAC,GAAES,GAAE,SAAO,SAASV,IAAE;AAAC,cAAIC,KAAE,MAAKC,KAAE,KAAK,QAAQ;AAAE,cAAG,CAAC,KAAK,QAAQ;AAAE,mBAAOA,GAAE,eAAa;AAAE,cAAIC,KAAEH,MAAG,wBAAuBI,KAAE,EAAE,EAAE,IAAI,GAAEC,KAAE,KAAK,IAAGC,KAAE,KAAK,IAAGE,KAAE,KAAK,IAAGU,KAAEhB,GAAE,UAASkB,KAAElB,GAAE,QAAOS,KAAET,GAAE,UAASmB,KAAE,SAASrB,IAAEE,IAAEE,IAAEC,IAAE;AAAC,mBAAOL,OAAIA,GAAEE,EAAC,KAAGF,GAAEC,IAAEE,EAAC,MAAIC,GAAEF,EAAC,EAAE,MAAM,GAAEG,EAAC;AAAA,UAAC,GAAEc,KAAE,SAASnB,IAAE;AAAC,mBAAO,EAAE,EAAEK,KAAE,MAAI,IAAGL,IAAE,GAAG;AAAA,UAAC,GAAEa,KAAEF,MAAG,SAASX,IAAEC,IAAEC,IAAE;AAAC,gBAAIC,KAAEH,KAAE,KAAG,OAAK;AAAK,mBAAOE,KAAEC,GAAE,YAAY,IAAEA;AAAA,UAAC;AAAE,iBAAOA,GAAE,QAAQ,GAAG,SAASH,IAAEG,IAAE;AAAC,mBAAOA,MAAG,SAASH,IAAE;AAAC,sBAAOA,IAAE;AAAA,gBAAC,KAAI;AAAK,yBAAO,OAAOC,GAAE,EAAE,EAAE,MAAM,EAAE;AAAA,gBAAE,KAAI;AAAO,yBAAO,EAAE,EAAEA,GAAE,IAAG,GAAE,GAAG;AAAA,gBAAE,KAAI;AAAI,yBAAOO,KAAE;AAAA,gBAAE,KAAI;AAAK,yBAAO,EAAE,EAAEA,KAAE,GAAE,GAAE,GAAG;AAAA,gBAAE,KAAI;AAAM,yBAAOa,GAAEnB,GAAE,aAAYM,IAAEY,IAAE,CAAC;AAAA,gBAAE,KAAI;AAAO,yBAAOC,GAAED,IAAEZ,EAAC;AAAA,gBAAE,KAAI;AAAI,yBAAOP,GAAE;AAAA,gBAAG,KAAI;AAAK,yBAAO,EAAE,EAAEA,GAAE,IAAG,GAAE,GAAG;AAAA,gBAAE,KAAI;AAAI,yBAAO,OAAOA,GAAE,EAAE;AAAA,gBAAE,KAAI;AAAK,yBAAOoB,GAAEnB,GAAE,aAAYD,GAAE,IAAGiB,IAAE,CAAC;AAAA,gBAAE,KAAI;AAAM,yBAAOG,GAAEnB,GAAE,eAAcD,GAAE,IAAGiB,IAAE,CAAC;AAAA,gBAAE,KAAI;AAAO,yBAAOA,GAAEjB,GAAE,EAAE;AAAA,gBAAE,KAAI;AAAI,yBAAO,OAAOI,EAAC;AAAA,gBAAE,KAAI;AAAK,yBAAO,EAAE,EAAEA,IAAE,GAAE,GAAG;AAAA,gBAAE,KAAI;AAAI,yBAAOc,GAAE,CAAC;AAAA,gBAAE,KAAI;AAAK,yBAAOA,GAAE,CAAC;AAAA,gBAAE,KAAI;AAAI,yBAAON,GAAER,IAAEC,IAAE,IAAE;AAAA,gBAAE,KAAI;AAAI,yBAAOO,GAAER,IAAEC,IAAE,KAAE;AAAA,gBAAE,KAAI;AAAI,yBAAO,OAAOA,EAAC;AAAA,gBAAE,KAAI;AAAK,yBAAO,EAAE,EAAEA,IAAE,GAAE,GAAG;AAAA,gBAAE,KAAI;AAAI,yBAAO,OAAOL,GAAE,EAAE;AAAA,gBAAE,KAAI;AAAK,yBAAO,EAAE,EAAEA,GAAE,IAAG,GAAE,GAAG;AAAA,gBAAE,KAAI;AAAM,yBAAO,EAAE,EAAEA,GAAE,KAAI,GAAE,GAAG;AAAA,gBAAE,KAAI;AAAI,yBAAOG;AAAA,cAAC;AAAC,qBAAO;AAAA,YAAI,EAAEJ,EAAC,KAAGI,GAAE,QAAQ,KAAI,EAAE;AAAA,UAAC,CAAE;AAAA,QAAC,GAAEM,GAAE,YAAU,WAAU;AAAC,iBAAO,KAAG,CAAC,KAAK,MAAM,KAAK,GAAG,kBAAkB,IAAE,EAAE;AAAA,QAAC,GAAEA,GAAE,OAAK,SAASP,IAAEgB,IAAEP,IAAE;AAAC,cAAIC,IAAEC,KAAE,MAAKL,KAAE,EAAE,EAAEU,EAAC,GAAET,KAAE,EAAEP,EAAC,GAAEY,MAAGL,GAAE,UAAU,IAAE,KAAK,UAAU,KAAG,GAAEM,KAAE,OAAKN,IAAEO,KAAE,WAAU;AAAC,mBAAO,EAAE,EAAEH,IAAEJ,EAAC;AAAA,UAAC;AAAE,kBAAOD,IAAE;AAAA,YAAC,KAAK;AAAE,cAAAI,KAAEI,GAAE,IAAE;AAAG;AAAA,YAAM,KAAK;AAAE,cAAAJ,KAAEI,GAAE;AAAE;AAAA,YAAM,KAAK;AAAE,cAAAJ,KAAEI,GAAE,IAAE;AAAE;AAAA,YAAM,KAAK;AAAE,cAAAJ,MAAGG,KAAED,MAAG;AAAO;AAAA,YAAM,KAAK;AAAE,cAAAF,MAAGG,KAAED,MAAG;AAAM;AAAA,YAAM,KAAK;AAAE,cAAAF,KAAEG,KAAE;AAAE;AAAA,YAAM,KAAK;AAAE,cAAAH,KAAEG,KAAE;AAAE;AAAA,YAAM,KAAK;AAAE,cAAAH,KAAEG,KAAE;AAAE;AAAA,YAAM;AAAQ,cAAAH,KAAEG;AAAA,UAAC;AAAC,iBAAOJ,KAAEC,KAAE,EAAE,EAAEA,EAAC;AAAA,QAAC,GAAEH,GAAE,cAAY,WAAU;AAAC,iBAAO,KAAK,MAAM,CAAC,EAAE;AAAA,QAAE,GAAEA,GAAE,UAAQ,WAAU;AAAC,iBAAO,EAAE,KAAK,EAAE;AAAA,QAAC,GAAEA,GAAE,SAAO,SAASV,IAAEC,IAAE;AAAC,cAAG,CAACD;AAAE,mBAAO,KAAK;AAAG,cAAIE,KAAE,KAAK,MAAM,GAAEC,KAAE,EAAEH,IAAEC,IAAE,IAAE;AAAE,iBAAOE,OAAID,GAAE,KAAGC,KAAGD;AAAA,QAAC,GAAEQ,GAAE,QAAM,WAAU;AAAC,iBAAO,EAAE,EAAE,KAAK,IAAG,IAAI;AAAA,QAAC,GAAEA,GAAE,SAAO,WAAU;AAAC,iBAAO,IAAI,KAAK,KAAK,QAAQ,CAAC;AAAA,QAAC,GAAEA,GAAE,SAAO,WAAU;AAAC,iBAAO,KAAK,QAAQ,IAAE,KAAK,YAAY,IAAE;AAAA,QAAI,GAAEA,GAAE,cAAY,WAAU;AAAC,iBAAO,KAAK,GAAG,YAAY;AAAA,QAAC,GAAEA,GAAE,WAAS,WAAU;AAAC,iBAAO,KAAK,GAAG,YAAY;AAAA,QAAC,GAAED;AAAA,MAAC,EAAE,GAAE,IAAEF,GAAE;AAAU,aAAO,EAAE,YAAU,GAAE,CAAC,CAAC,OAAM,CAAC,GAAE,CAAC,MAAK,CAAC,GAAE,CAAC,MAAK,CAAC,GAAE,CAAC,MAAK,CAAC,GAAE,CAAC,MAAK,CAAC,GAAE,CAAC,MAAK,CAAC,GAAE,CAAC,MAAK,CAAC,GAAE,CAAC,MAAK,CAAC,CAAC,EAAE,QAAS,SAASP,IAAE;AAAC,UAAEA,GAAE,CAAC,CAAC,IAAE,SAASC,IAAE;AAAC,iBAAO,KAAK,GAAGA,IAAED,GAAE,CAAC,GAAEA,GAAE,CAAC,CAAC;AAAA,QAAC;AAAA,MAAC,CAAE,GAAE,EAAE,SAAO,SAASA,IAAEC,IAAE;AAAC,eAAOD,GAAE,OAAKA,GAAEC,IAAEM,IAAE,CAAC,GAAEP,GAAE,KAAG,OAAI;AAAA,MAAC,GAAE,EAAE,SAAO,GAAE,EAAE,UAAQ,GAAE,EAAE,OAAK,SAASA,IAAE;AAAC,eAAO,EAAE,MAAIA,EAAC;AAAA,MAAC,GAAE,EAAE,KAAG,EAAE,CAAC,GAAE,EAAE,KAAG,GAAE,EAAE,IAAE,CAAC,GAAE;AAAA,IAAC,CAAE;AAAA;AAAA;;;ACAt/N;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAsB,gBAAA;AAAA;AAAA;AAAA;AACA;AAAA;AAAA;;;ACDA,IAWA,cAwEa;AAnFb;AAAA;AAAA;AAAA;AAWA,mBAAkB;AAClB,IAAAC;AACA,IAAAC;AAsEO,IAAM,uBAAuB;AAAA,MAClC,gBAAgB,WAAW;AAAA,MAC3B,gBAAgB,KAAK;AAAA,MACrB,gBAAgB,OAAO;AAAA,MACvB,gBAAgB,UAAU;AAAA,MAC1B,gBAAgB,SAAS;AAAA,MACzB,gBAAgB,YAAY;AAAA,IAC9B;AAAA;AAAA;;;AC1FA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,SAAS,YACd,OACA,UAAU,sCAAsC,KAAK,IACrD;AACA,QAAM,IAAI,MAAM,OAAO;AACzB;AAEA,eAAsB,gBACpB,OACA,MACA,gBACe;AACf,QAAM,WAA4B,CAAC;AACnC,MAAIC,SAAQ;AAEZ,QAAM,cAAc,OAAO,SAAY;AACrC,QAAI;AACF,YAAM,KAAK,IAAI;AAAA,IACjB,UAAE;AACA,kBAAY;AAAA,IACd;AAAA,EACF;AAEA,QAAM,cAAc,MAAM;AACxB,QAAIA,UAAS,MAAM,QAAQ;AAEzB;AAAA,IACF;AAEA,UAAM,OAAO,MAAMA,MAAK;AACxB,IAAAA;AAEA,UAAM,UAAU,YAAY,IAAI;AAChC,aAAS,KAAK,OAAO;AAErB,QAAI,SAAS,UAAU,gBAAgB;AACrC,cAAQ,KAAK,QAAQ,EAAE,KAAK,WAAW;AAAA,IACzC,OAAO;AACL,kBAAY;AAAA,IACd;AAAA,EACF;AACA,cAAY;AAEZ,QAAM,QAAQ,IAAI,QAAQ;AAC5B;AAEO,SAAS,qBAAqB;AACnC,SAAO,OAAO,KAAe,eAAe,EAAE;AAAA,IAC5C,CAAC,KAAgC,QAAgB;AAC/C,YAAM,MAAwC;AAC9C,YAAM,KAAgC,IAAI,GAAG;AAC7C,UAAI,GAAG,OAAO,CAAC,IAAI,GAAG;AACtB,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AACF;AA1DA;AAAA;AAAA;AAAA,IAAAC;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAKO,SAAS,YAAY,OAAe;AACzC,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,oBAAoB;AAAA,EACtC;AACA,MAAI,MAAM,WAAWC,eAAc,GAAG;AACpC,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,MAAM,MAAMC,WAAU;AACpC,QAAM,MAAM;AACZ,QAAM,OAAO,MAAM,KAAKA,WAAU;AAClC,SAAO,GAAGD,eAAc,GAAG,IAAI;AACjC;AAKO,SAAS,aAAa,OAAe;AAC1C,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,oBAAoB;AAAA,EACtC;AACA,MAAI,CAAC,MAAM,WAAWA,eAAc,GAAG;AACrC,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,MAAM,MAAMA,eAAc;AACxC,QAAM,MAAM;AACZ,QAAM,OAAO,MAAM,KAAKA,eAAc;AACtC,SAAO,GAAGC,WAAU,GAAG,IAAI;AAC7B;AAlCA,IAEMA,aACAD;AAHN;AAAA;AAAA;AAAA;AAEA,IAAMC,cAAa,wBAAyB;AAC5C,IAAMD,kBAAiB,gCAA6B;AAAA;AAAA;;;ACHpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAWO,SAAS,UAAU,MAA0B,OAAyB;AAC3E,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AACA,MAAI,KAAK,SAAS,QAAQ;AACxB,WAAO;AAAA,EACT,WAAW,SAAS,KAAK,SAAS,MAAM,SAAS,aAAa,KAAK,CAAC,GAAG;AACrE,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEO,SAAS,gBAAgB,MAAmC;AACjE,SAAQ,UAAU,IAAI,KAAK,CAAC,yBAAyB,IAAI,KAAM,QAAQ,IAAI;AAC7E;AAEO,SAAS,cAAc,MAAmC;AAC/D,SAAO,gBAAgB,IAAI,KAAK,sBAAsB,IAAI;AAC5D;AAKO,SAAS,QAAQ,MAAmC;AACzD,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AACA,SAAO,oBAAoB,IAAI;AACjC;AAEO,SAAS,iBACd,MACA,OACS;AACT,SAAO,UAAU,MAAM,KAAK,KAAK,QAAQ,IAAI;AAC/C;AAEO,SAAS,uBACd,MACA,OACS;AACT,SAAO,gBAAgB,IAAI,KAAK,QAAQ,IAAI;AAC9C;AAGO,SAAS,yBAAyB,MAAoC;AAC3E,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AACA,QAAM,YAAY,KAAK,SAAS,MAAM;AACtC,QAAME,mBAAkB,CAAC,CAAC,KAAK,SAAS;AACxC,SAAO,CAACA,oBAAmB,aAAa,QAAQ,YAAY;AAC9D;AAEO,SAAS,yBAAyB,MAAoC;AAC3E,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AACA,SAAS;AAAA,IACL,MAAI,OAAO;AAAA,IACX;AAAA,IACA,OAAK,OAAK,MAAM,SAAS;AAAA,IAC3B,OAAK,CAAC,CAAC;AAAA,EACT,EAAE,IAAI;AACR;AAGO,SAAS,sBAAsB,MAAoC;AACxE,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AACA,SACE,KAAK,SAAS,UACd,yBAAyB,IAAI,KAC7B,sBAAsB,IAAI;AAE9B;AAGO,SAAS,oBAAoB,MAAoC;AACtE,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AACA,SAAO,CAAC,CAAC,KAAK,OAAO;AACvB;AAEO,SAAS,sBAAsB,MAAoC;AACxE,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AACA,SAAO,CAAC,CAAC,KAAK,SAAS;AACzB;AAEO,SAAS,UAAU,MAAoC;AAC5D,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AACA,SACE,gBAAgB,IAAK,KACrB,oBAAoB,IAAI,KACxB,sBAAsB,IAAI,KAC1B,yBAAyB,IAAI,KAC7B,yBAAyB,IAAI;AAEjC;AAEO,SAAS,gBAAgB,QAAqC;AACnE,MAAI,OAAO,WAAW,UAAU;AAC9B,WAAO;AAAA,EACT;AACA,QAAM,SAAS,iBAAmB,GAAG,SAAS,iCAA8B,GAAG,SAAS;AACxF,MAAI,CAAC,OAAO,WAAW,MAAM,GAAG;AAC9B,WAAO;AAAA,EACT;AACA,SAAO,OAAO,MAAM,MAAM,EAAE,CAAC;AAC/B;AAEO,SAAS,eAAe,OAAoC;AACjE,MAAI,OAAO,UAAU,UAAU;AAC7B,WAAO;AAAA,EACT;AACA,SAAO,MAAM,SAAS,kBAAoB,GAAG,SAAS,EAAE;AAC1D;AArIA,IAQA;AARA,IAAAC,cAAA;AAAA;AAAA;AAAA;AAOA;AACA,QAAmB;AAAA;AAAA;;;ACRnB,IAAAC,kBAAA;AAAA;AAAA;AAAA;AACA,IAAAC;AAAA;AAAA;;;ACDA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAC,YAAA;AAAA;AAAA;AAAA,IAAAC;AAAA;AAAA;;;ACAA,IAEM,0BAoBA;AAtBN,IAAAC,cAAA;AAAA;AAAA;AAAA;AAEA,IAAM,2BAAuD;AAAA,MAC3D,sBAAiB,GAAG;AAAA,MACpB,0BAAmB,GAAG;AAAA,MACtB,wBAAkB,GAAG;AAAA,MACrB,sBAAiB,GAAG;AAAA,MACpB,0BAAmB,GAAG;AAAA,MACtB,wBAAkB,GAAG;AAAA,MACrB,kBAAe,GAAG;AAAA,MAClB,0BAAmB,GAAG;AAAA,MACtB,4BAAoB,GAAG;AAAA,MACvB,sBAAiB,GAAG;AAAA,MAEpB,wBAAkB,GAAG;AAAA,MACrB,oBAAgB,GAAG;AAAA,MACnB,8BAAqB,GAAG;AAAA,MACxB,kBAAe,GAAG;AAAA,MAClB,kBAAe,GAAG;AAAA,MAClB,kCAAuB,GAAG;AAAA,IAC5B;AAEA,IAAM,wBAAoD;AAAA,MACxD,sBAAiB,GAAG;AAAA,MACpB,0BAAmB,GAAG;AAAA,MACtB,wBAAkB,GAAG;AAAA,MACrB,sBAAiB,GAAG;AAAA,MACpB,0BAAmB,GAAG;AAAA,MACtB,kBAAe,GAAG;AAAA,MAClB,0BAAmB,GAAG;AAAA,MACtB,4BAAoB,GAAG;AAAA,MACvB,sBAAiB,GAAG;AAAA,MACpB,wBAAkB,GAAG;AAAA,MACrB,kBAAe,GAAG;AAAA,MAElB,wBAAkB,GAAG;AAAA,MACrB,8BAAqB,GAAG;AAAA,MACxB,oBAAgB,GAAG;AAAA,MACnB,kBAAe,GAAG;AAAA,MAClB,kCAAuB,GAAG;AAAA,IAC5B;AAAA;AAAA;;;ACxCA,IAAAC,YAAA;AAAA;AAAA;AAAA,IAAAC;AACA;AACA,IAAAC;AACA;AACA,IAAAC;AACA,IAAAC;AAAA;AAAA;;;ACLA,IAAY,YAKA,QAUA,YAOA,QAUC,gBACA,gBACA;AAlCb;AAAA;AAAA;AAaA,IAAAC;AAbO,IAAK,aAAL,kBAAKC,gBAAL;AACL,MAAAA,YAAA,YAAS;AACT,MAAAA,YAAA,cAAW;AAFD,aAAAA;AAAA,OAAA;AAKL,IAAK,SAAL,kBAAKC,YAAL;AACL,MAAAA,QAAA,UAAO;AACP,MAAAA,QAAA,UAAO;AACP,MAAAA,QAAA,wBAAqB;AACrB,MAAAA,QAAA,oBAAiB;AACjB,MAAAA,QAAA,iBAAc;AALJ,aAAAA;AAAA,OAAA;AAUL,IAAK,aAAL,kBAAKC,gBAAL;AACL,MAAAA,YAAA,WAAQ;AACR,MAAAA,YAAA,WAAQ;AACR,MAAAA,YAAA,aAAU;AACV,MAAAA,YAAA,uBAAoB;AAJV,aAAAA;AAAA,OAAA;AAOL,IAAK,SAAL,kBAAKC,YAAL;AACL,MAAAA,QAAA,cAAW;AACX,MAAAA,QAAA,aAAU;AACV,MAAAA,QAAA,UAAO;AACP,MAAAA,QAAA,YAAS;AACT,MAAAA,QAAA,UAAO;AACP,MAAAA,QAAA,gBAAa;AACb,MAAAA,QAAA,UAAO;AAPG,aAAAA;AAAA,OAAA;AAUL,IAAM,iBAAiB,oBAAI,KAAK,aAAc;AAC9C,IAAM,iBAAiB,oBAAI,KAAK,YAAa;AAC7C,IAAM,oBAAoB;AAAA;AAAA;;;AClCjC,IAAAC,qBAAA;AAAA,SAAAA,oBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAC,kBAAA;AAAA;AAAA;AAAA,IAAAC;AACA;AAAA;AAAA;;;ACDA;AAAA;AAAA;AAAA,6BAAAC;AAAA,EAAA;AAAA;AAAA,qBAAAC;AAAA;AAYO,SAASA,eAA2C;AACzD,SAAe,YAAY;AAC7B;AAEO,SAASD,qBAAoB,UAA2B,MAAW;AACxE,SAAe,oBAAoB,UAAU,IAAI;AACnD;AAGO,SAAS,gBAAgB,MAAY,KAAU,MAAW;AAC/D,QAAM,cAA2B;AAAA,IAC/B,GAAG;AAAA,IACH,KAAK,KAAK;AAAA,IACV;AAAA,IACA,UAAU;AAAA,MACR,WAAW,IAAI,QAAQ;AAAA;AAAA,MAEvB,WAAW,IAAI,UAAU,OAAO;AAAA,IAClC;AAAA,EACF;AACA,SAAOA,qBAAoB,aAAa,IAAI;AAC9C;AAGO,SAAS,mBAAmB,SAAkB,MAAW;AAC9D,QAAM,MAAM,iBAAiB,OAAO;AACpC,QAAM,WAAW,QAAQ;AACzB,QAAM,iBAAqC;AAAA,IACzC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,SAAOA,qBAAoB,gBAAgB,IAAI;AACjD;AAEO,SAAS,iBAAiB,SAAkB;AACjD,MAAI;AACJ,MAAI,eAAe,OAAO,GAAG;AAC3B,aAAS,QAAQ;AAAA,EACnB,OAAO;AAEL,aAAS,QAAQ;AAAA,EACnB;AACA,SAAO;AACT;AAzDA;AAAA;AAAA;AAAA;AAUA,IAAAE;AAAA;AAAA;;;ACPA,SAAS,SAAS;AAChB,SAAO,OAAO;AAChB;AAEA,SAAS,SAAS;AAChB,SACE,QAAQ,IAAI,aAAa,UACxB,QAAQ,IAAI,kBAAkB,QAC7B,QAAQ,IAAI,mBAAmB;AAErC;AAEA,SAAS,QAAQ;AACf,SAAO,QAAQ,IAAI,aAAa;AAClC;AAkBA,SAAS,sBAAsB;AAC7B,SAAO,QAAQ,IAAI,qBACf,QAAQ,IAAI,qBACZ,QAAQ,IAAI;AAClB;AAEA,SAAS,cAAc;AACrB,MAAI,QAAQ,IAAI,iBAAiB,QAAW;AAE1C,WAAO;AAAA,EACT;AAEA,SAAO,QAAQ,IAAI;AACrB;AAEA,SAAS,uBAGP;AACA,WAAS,oBACP,UACA,YACe;AACf,UAAM,WAAW,GAAG,UAAU,IAAI,QAAQ;AAC1C,YAAI,sBAAW,QAAQ,GAAG;AACxB,aAAO;AAAA,IACT;AAEA,UAAM,YAAY,GAAG,UAAU;AAC/B,QAAI,cAAc,YAAY;AAE5B,aAAO;AAAA,IACT;AAEA,WAAO,oBAAoB,UAAU,SAAS;AAAA,EAChD;AAEA,MAAI;AACF,UAAM,kBAAkB,oBAAoB,gBAAgB,QAAQ,IAAI,CAAC;AACzE,UAAM,cAAU,wBAAa,iBAAkB,OAAO;AACtD,UAAM,gBAAgB,KAAK,MAAM,OAAO;AACxC,WAAO;AAAA,MACL,SAAS,QAAQ,IAAI,oBAAoB,cAAc;AAAA,MACvD,cAAc,cAAc;AAAA,IAC9B;AAAA,EACF,QAAQ;AAEN,WAAO,EAAE,SAAS,QAAQ,IAAI,oBAAoB,IAAI,cAAc,GAAG;AAAA,EACzE;AACF;AAEA,SAAS,WAAW;AAClB,SAAO,YAAY;AACrB;AAEA,SAAS,SAAS;AAChB,SAAO,YAAY;AACrB;AA5FA,eAmBI,QAME,mBAQA,YA6DA,aA6GC;AA3MP,IAAAC,oBAAA;AAAA;AAAA;AAAA,gBAAyC;AACzC;AAkBA,IAAI,SAAS;AACb,QAAI,CAAC,UAAU,MAAM,KAAK,CAAC,OAAO,GAAG;AACnC,cAAQ,QAAQ,EAAE,OAAO;AACzB,eAAS;AAAA,IACX;AAEA,IAAM,oBAAoB;AAAA,MACxB,SAAS;AAAA,MACT,MAAM;AAAA,MACN,WAAW;AAAA,MACX,QAAQ;AAAA,MACR,SAAS;AAAA,IACX;AAEA,IAAM,aAAa,CAAC,CAAC,SAAS,QAAQ,IAAI,eAAe,EAAE;AA6D3D,IAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ,MAAM;AACZ,eAAO,CAAC,MAAM;AAAA,MAChB;AAAA,MACA,WAAW,QAAQ,IAAI;AAAA,MACvB,YAAY,QAAQ,IAAI;AAAA,MACxB,qBAAqB,QAAQ,IAAI;AAAA,MACjC,gBAAgB,QAAQ,IAAI;AAAA,MAC5B,oBAAoB,oBAAoB;AAAA,MACxC,cAAc,QAAQ,IAAI,gBAAgB;AAAA,MAC1C,kBAAkB,QAAQ,IAAI,oBAAoB;AAAA,MAClD,mBAAmB,QAAQ,IAAI;AAAA,MAC/B,mBAAmB,QAAQ,IAAI;AAAA,MAC/B,kBAAkB,QAAQ,IAAI;AAAA,MAC9B,sBAAsB,QAAQ,IAAI;AAAA,MAClC,aAAa,QAAQ,IAAI;AAAA,MACzB,WAAW,QAAQ,IAAI,aAAa;AAAA,MACpC,gBAAgB,QAAQ,IAAI;AAAA,MAC5B,iBAAiB,QAAQ,IAAI;AAAA,MAC7B,kBAAkB,QAAQ,IAAI;AAAA,MAC9B,kBAAkB,QAAQ,IAAI;AAAA,MAC9B,YAAY,QAAQ,IAAI;AAAA,MACxB,WAAW,QAAQ,IAAI;AAAA,MACvB,eAAe,QAAQ,IAAI,iBAAiB;AAAA,MAC5C,kBAAkB,QAAQ,IAAI;AAAA,MAC9B,2BAA2B,QAAQ,IAAI;AAAA,MACvC,eAAe,QAAQ,IAAI;AAAA,MAC3B,oBACE,QAAQ,IAAI,sBAAsB;AAAA,MACpC,wBAAwB,QAAQ,IAAI,0BAA0B;AAAA,MAC9D,wBAAwB,QAAQ,IAAI;AAAA,MACpC,aAAa;AAAA,MACb,eAAe,QAAQ,IAAI;AAAA,MAC3B,cAAc,QAAQ,IAAI,gBAAgB;AAAA,MAC1C,eAAe,QAAQ,IAAI;AAAA,MAC3B,kBAAkB,QAAQ,IAAI;AAAA,MAC9B,sBAAsB,QAAQ,IAAI;AAAA,MAClC,gBAAgB,QAAQ,IAAI;AAAA,MAC5B,2BAA2B,QAAQ,IAAI;AAAA,MACvC,0BAA0B,QAAQ,IAAI;AAAA,MACtC,qBACE,QAAQ,IAAI,uBAAuB,kBAAkB;AAAA,MACvD,kBAAkB,QAAQ,IAAI,oBAAoB,kBAAkB;AAAA,MACpE,uBACE,QAAQ,IAAI,yBAAyB,kBAAkB;AAAA,MACzD,oBACE,QAAQ,IAAI,sBAAsB,kBAAkB;AAAA,MACtD,oBACE,QAAQ,IAAI,sBAAsB,kBAAkB;AAAA,MACtD,WAAW,QAAQ,IAAI,aAAa;AAAA,MACpC,YAAY,QAAQ,IAAI;AAAA,MACxB,iBAAiB,QAAQ,IAAI;AAAA,MAC7B,SAAS,QAAQ,IAAI,WAAW;AAAA,MAChC,WAAW,QAAQ,IAAI,aAAa;AAAA,MACpC,uBAAuB,QAAQ,IAAI;AAAA,MACnC,wBACE,QAAQ,IAAI,0BAA0B;AAAA,MACxC,cAAc,YAAY;AAAA,MAC1B,0BAA0B,QAAQ,IAAI;AAAA;AAAA,MAEtC,uBAAuB,QAAQ,IAAI;AAAA,MACnC,WAAW,QAAQ,IAAI;AAAA,MACvB,eAAe,QAAQ,IAAI;AAAA,MAC3B,WAAW,QAAQ,IAAI;AAAA,MACvB,WAAW,SAAS,QAAQ,IAAI,aAAa,EAAE;AAAA,MAC/C,mBAAmB,QAAQ,IAAI;AAAA,MAC/B,qBAAqB,QAAQ,IAAI;AAAA,MACjC,eAAe,QAAQ,IAAI;AAAA,MAC3B,cAAc;AAAA,MACd,qBAAqB,QAAQ,IAAI;AAAA,MACjC,qBAAqB,QAAQ,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMjC,6BAA6B,aACzB,QAAQ,IAAI,8BACZ;AAAA,MACJ,GAAG,qBAAqB;AAAA,MACxB,qBAAqB,QAAQ,IAAI;AAAA,MACjC,cAAc,QAAQ,IAAI;AAAA,MAC1B,KAAK,KAAU,OAAY;AACzB,gBAAQ,IAAI,GAAG,IAAI;AAEnB,oBAAY,GAAG,IAAI;AAAA,MACrB;AAAA,MACA,sBAAsB,QAAQ,IAAI,wBAAwB;AAAA,IAC5D;AAGA,aAAS,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,WAAW,GAAG;AAEpD,UAAI,UAAU,KAAK;AAEjB,oBAAY,GAAG,IAAI;AAAA,MACrB;AAEA,UAAI,UAAU,SAAS;AAErB,oBAAY,GAAG,IAAI;AAAA,MACrB;AAAA,IACF;AAEA,IAAO,sBAAQ;AAAA;AAAA;;;AC3Mf,wBAGqB;AAHrB;AAAA;AAAA;AAAA,yBAAkC;AAGlC,IAAqB,UAArB,MAAqB,SAAQ;AAAA,MAC3B;AAAA,aAAO,UAAU,IAAI,qCAA8B;AAAA;AAAA,MAEnD,OAAO,IAAO,SAAqB,MAAe;AAChD,eAAO,SAAQ,QAAQ,IAAI,SAAS,MAAM,KAAK,CAAC;AAAA,MAClD;AAAA,MAEA,OAAO,MAAkB;AACvB,eAAO,SAAQ,QAAQ,SAAS;AAAA,MAClC;AAAA,IACF;AAAA;AAAA;;;ACRO,SAAS,WAAW,OAAgB;AACzC,MAAI,CAAC,OAAO;AACV,UAAM;AAAA,EACR;AACA,SAAO,MAAM,WAAW,cAAc;AACxC;AAEO,SAAS,YAAY,OAAgB;AAC1C,MAAI,CAAC,OAAO;AACV,UAAM;AAAA,EACR;AACA,SAAO,MAAM,WAAW,UAAU,KAAK,CAAC,WAAW,KAAK;AAC1D;AAEO,SAAS,SAAS,KAAU;AACjC,MAAI,CAAC,KAAK;AACR,UAAM;AAAA,EACR;AACA,SAAO,WAAW,IAAI,KAAK;AAC7B;AAMO,SAAS,oBAAoB,OAAe;AACjD,MAAI,CAAC,SAAS,MAAM,WAAW,cAAc,GAAG;AAC9C,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,MAAM,MAAM,UAAU;AACpC,QAAM,MAAM;AACZ,QAAM,OAAO,MAAM,KAAK,UAAU;AAClC,SAAO,GAAG,cAAc,GAAG,IAAI;AACjC;AAMO,SAASC,cAAa,OAAe;AAC1C,MAAI,CAAC,SAAS,CAAC,MAAM,WAAW,cAAc,GAAG;AAC/C,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,MAAM,MAAM,cAAc;AACxC,QAAM,MAAM;AACZ,QAAM,OAAO,MAAM,KAAK,cAAc;AACtC,SAAO,GAAG,UAAU,GAAG,IAAI;AAC7B;AAEO,SAAS,eAAe,IAAY;AACzC,QAAM,QAAQ,IAAI,MAAM,GAAG,KAAK,CAAC;AACjC,SAAO,MAAM,SAAS,MAAM,MAAM,SAAS,CAAC,IAAI;AAClD;AA3DA,IAGM,cAqCOC;AAxCb;AAAA;AAAA;AAAA,IAAAC;AAGA,IAAM,eAAe;AAqCd,IAAMD,eAAc;AAAA;AAAA;;;ACxC3B,IAEa,cAiCA;AAnCb;AAAA;AAAA;AAAA,IAAAE;AAEO,IAAM,eAAe,CAAC,eAAwB;AACnD,YAAM,UAAU,WAAW,UAAU;AACrC,UAAI;AACJ,UAAI;AACJ,UAAI,QAAQ,MAAM,UAAU;AAE1B,mBAAW,QAAQ,KAAK;AAAA,MAC1B,WAAW,oBAAI,mBAAmB;AAEhC,mBAAW,oBAAI;AAAA,MACjB,WAAW,CAAC,oBAAI,OAAO,GAAG;AACxB,cAAM,IAAI,MAAM,0BAA0B;AAAA,MAC5C;AACA,UAAI,QAAQ,MAAM,UAAU;AAE1B,mBAAW,QAAQ,KAAK;AAAA,MAC1B,WAAW,oBAAI,mBAAmB;AAEhC,mBAAW,oBAAI;AAAA,MACjB,WAAW,CAAC,oBAAI,OAAO,GAAG;AACxB,cAAM,IAAI,MAAM,0BAA0B;AAAA,MAC5C;AACA,YAAM,aAAa,OAAO,KAAK,GAAG,QAAQ,IAAI,QAAQ,EAAE,EAAE,SAAS,QAAQ;AAC3E,aAAO;AAAA,QACL,KAAK,QAAQ;AAAA,QACb,MAAM;AAAA,UACJ;AAAA,UACA;AAAA,QACF;AAAA,QACA,QAAQ,SAAS,UAAU;AAAA,MAC7B;AAAA,IACF;AAEO,IAAM,aAAa,CAAC,MAAM,oBAAI,iBAAiB;AACpD,UAAI,UAAU,UAAU,UAAU;AAClC,UAAI,KAAK;AAEP,cAAM,aAAa;AACnB,YAAI,CAAC,WAAW,KAAK,GAAG,GAAG;AACzB,gBAAM,UAAU,GAAG;AAAA,QACrB;AAGA,cAAM,QAAQ,IAAI,MAAM,KAAK;AAC7B,cAAM,WAAW,MAAM,CAAC;AACxB,cAAM,OAAO,MAAM,MAAM,CAAC,EAAE,KAAK,KAAK;AAGtC,YAAI,IAAI,SAAS,GAAG,GAAG;AAErB,cAAI,QAAQ,KAAK,MAAM,GAAG;AAC1B,iBAAO,MAAM,MAAM,SAAS,CAAC;AAC7B,cAAI,OAAO,MAAM,MAAM,GAAG,EAAE,EAAE,KAAK,GAAG;AAGtC,cAAI,KAAK,SAAS,GAAG,GAAG;AACtB,kBAAM,YAAY,KAAK,MAAM,GAAG;AAChC,uBAAW,UAAU,CAAC;AACtB,uBAAW,UAAU,MAAM,CAAC,EAAE,KAAK,GAAG;AAAA,UACxC,OAAO;AACL,uBAAW;AAAA,UACb;AAAA,QACF,OAAO;AACL,iBAAO;AAAA,QACT;AACA,mBAAW,GAAG,QAAQ,MAAM,IAAI;AAAA,MAClC;AACA,aAAO;AAAA,QACL,KAAK;AAAA,QACL,MAAM;AAAA,UACJ;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA;AAAA;;;ACtEO,SAAS,kBAAkB,KAAa;AAC7C,SAAO,IAAI,QAAQ,wBAAwB,MAAM;AACnD;AARA,IAAAC,gBAAA;AAAA;AAAA;AAAA;AAAA;;;ACIA,eAAsB,gBACpBC,OACA,SAAiB,OACjBC,OACA;AACA,MAAI,EAAE,KAAK,OAAO,IAAI,aAAa;AACnC,QAAM,WAAW,GAAG,GAAG,IAAID,KAAI;AAC/B,SAAO,MAAM,mBAAmB,EAAE,KAAK,UAAU,QAAQ,QAAQ,MAAAC,MAAK,CAAC;AACzE;AAEA,eAAsB,mBAAmB;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AAAA,EACA,MAAAA;AACF,GAKG;AACD,QAAMC,UAAc;AAAA,IAClB;AAAA,IACA,SAAS;AAAA,MACP,eAAe;AAAA,IACjB;AAAA,EACF;AACA,MAAID,SAAQ,WAAW,OAAO;AAC5B,IAAAC,QAAO,OAAO,KAAK,UAAUD,KAAI;AACjC,IAAAC,QAAO,QAAQ,cAAc,IAAI;AAAA,EACnC;AACA,SAAO,UAAM,kBAAAC,SAAM,kBAAkB,UAAU,GAAG,CAAC,GAAGD,OAAM;AAC9D;AAEA,eAAsB,iBACpBF,OACA,SAAiB,OACjBC,OACA;AACA,QAAM,WAAW,MAAM,gBAAgBD,OAAM,QAAQC,KAAI;AACzD,MAAI,SAAS,SAAS,KAAK;AACzB,WAAO,MAAM,SAAS,KAAK;AAAA,EAC7B,OAAO;AACL,UAAM;AAAA,EACR;AACF;AAjDA,IACA;AADA,IAAAG,cAAA;AAAA;AAAA;AAAA;AACA,wBAAkB;AAClB,IAAAC;AAAA;AAAA;;;ACqDO,SAAS,KAAK,MAAqB;AACxC,UAAQ,SAAS,IAAI;AACrB,gBAAc;AAChB;AAQO,SAAS,WAAW,QAAgB,MAA8B;AACvE,mBAAiB;AACjB,QAAM,KAAK,IAAI,MAAM,QAAQ,IAAI;AACjC,QAAM,QAAQ,GAAG;AACjB,KAAG,MAAM,OAAO,KAAUC,WAAU,CAAC,MAAM;AACzC,QAAI,CAAC,IAAI,WAAW;AAClB,UAAI,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACzC;AACA,QAAI,aAAY,oBAAI,KAAK,GAAE,YAAY;AACvC,WAAO,MAAM,KAAKA,QAAO;AAAA,EAC3B;AACA,KAAG,SAAS,YAAY;AACtB,UAAM,OAAO,MAAM,GAAG,KAAK;AAC3B,WAAO,CAAC,KAAK;AAAA,EACf;AACA,SAAO;AACT;AAIA,eAAsB,aAAa,IAAsB;AACvD,MAAI,CAAC,MAAM,oBAAI,OAAO,GAAG;AACvB;AAAA,EACF;AACA,MAAI;AAEF,WAAO,MAAM,GAAG,MAAM;AAAA,EACxB,SAAS,KAAK;AAAA,EAEd;AACF;AAhGA,oBAKI,OACA,aAOS,UA+CP;AA5DN;AAAA;AAAA;AAAA,qBAAoB;AACpB,IAAAC;AAEA;AAGA,IAAI,cAAc;AAOX,IAAM,WAAW,CAAC,OAAqB,CAAC,MAAM;AACnD,UAAI,EAAE,KAAK,OAAO,IAAI,aAAa;AACnC,UAAI,oBAAoB;AAAA,QACtB,QAAQ;AAAA,QACR,OAAO,CAACC,MAAaC,UAAc;AAEjC,UAAAA,MAAK,QAAQ,IAAI,iBAAiB,MAAM;AACxC,iBAAO,eAAAC,QAAQ,MAAMF,MAAKC,KAAI;AAAA,QAChC;AAAA,MACF;AAEA,UAAI,KAAK,UAAU;AACjB,cAAM,WAAW,QAAQ,wBAAwB;AACjD,uBAAAC,QAAQ,OAAO,QAAQ;AACvB,4BAAoB;AAAA;AAAA,UAElB,SAAS;AAAA,QACX;AAAA,MACF;AAEA,UAAI,KAAK,QAAQ;AACf,4BAAoB;AAAA;AAAA,UAElB,SAAS;AAAA,QACX;AAAA,MACF;AAEA,UAAI,KAAK,aAAa;AACpB,cAAM,oBAAoB,QAAQ,sCAAsC;AACxE,uBAAAA,QAAQ,OAAO,kBAAkB,MAAM;AAEvC,uBAAAA,QAAQ,QAAQ,kBAAkB,kBAAkB,SAAS,cAAc;AAAA,MAC7E;AAEA,UAAI,KAAK,MAAM;AACb,cAAMC,QAAO,QAAQ,cAAc;AACnC,uBAAAD,QAAQ,OAAOC,KAAI;AAAA,MACrB;AAEA,aAAO,eAAAD,QAAQ,SAAS,iBAAiB;AAAA,IAC3C;AAOA,IAAM,mBAAmB,MAAM;AAC7B,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI,MAAM,0BAA0B;AAAA,MAC5C;AAAA,IACF;AAAA;AAAA;;;AC9DO,SAAS,QAAQ;AACtB,aAAO,gBAAG,EAAE,QAAQ,MAAM,EAAE;AAC9B;AAJA;AAAA;AAAA;AAAA;AAAA,kBAAmB;AAAA;AAAA;;;ACAnB,IAgBA,iBAGa;AAnBb;AAAA;AAAA;AAgBA,sBAAmB;AAGZ,IAAM,yBAAN,MAAiD;AAAA,MACtD,YAA6B,IAAc;AAAd;AAAA,MAAe;AAAA,MAE5C,IAAI,OAAe;AACjB,eAAO,KAAK,GAAG;AAAA,MACjB;AAAA,MAEA,SAA2B;AACzB,eAAO,gBAAAE,QAAO,MAAM,aAAa,UAAQ;AACvC,gBAAM,QAAQ,EAAE,SAAS,KAAK,KAAK,CAAC;AACpC,iBAAO,KAAK,GAAG,OAAO;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,MAEA,IAAwB,IAAqC;AAC3D,eAAO,gBAAAA,QAAO,MAAM,UAAU,UAAQ;AACpC,gBAAM,QAAQ,EAAE,SAAS,KAAK,MAAM,QAAQ,GAAG,CAAC;AAChD,iBAAO,KAAK,GAAG,IAAI,EAAE;AAAA,QACvB,CAAC;AAAA,MACH;AAAA,MAEA,YACE,KACA,MACc;AACd,eAAO,gBAAAA,QAAO,MAAM,kBAAkB,UAAQ;AAC5C,gBAAM,QAAQ;AAAA,YACZ,SAAS,KAAK;AAAA,YACd,UAAU,IAAI;AAAA,YACd,eAAe,MAAM;AAAA,UACvB,CAAC;AACD,iBAAO,KAAK,GAAG,YAAY,KAAK,IAAI;AAAA,QACtC,CAAC;AAAA,MACH;AAAA,MAEA,OACE,IACA,KACkC;AAClC,eAAO,gBAAAA,QAAO,MAAM,aAAa,UAAQ;AACvC,gBAAM,QAAQ,EAAE,SAAS,KAAK,MAAM,QAAQ,GAAG,CAAC;AAChD,iBAAO,KAAK,GAAG,OAAO,IAAI,GAAG;AAAA,QAC/B,CAAC;AAAA,MACH;AAAA,MAEA,IACE,UACA,MACiC;AACjC,eAAO,gBAAAA,QAAO,MAAM,UAAU,UAAQ;AACpC,gBAAM,QAAQ,EAAE,SAAS,KAAK,MAAM,QAAQ,SAAS,IAAI,CAAC;AAC1D,iBAAO,KAAK,GAAG,IAAI,UAAU,IAAI;AAAA,QACnC,CAAC;AAAA,MACH;AAAA,MAEA,SAAS,WAA2D;AAClE,eAAO,gBAAAA,QAAO,MAAM,eAAe,UAAQ;AACzC,gBAAM,QAAQ,EAAE,SAAS,KAAK,MAAM,UAAU,UAAU,OAAO,CAAC;AAChE,iBAAO,KAAK,GAAG,SAAS,SAAS;AAAA,QACnC,CAAC;AAAA,MACH;AAAA,MAEA,QACEC,SAC6B;AAC7B,eAAO,gBAAAD,QAAO,MAAM,cAAc,UAAQ;AACxC,gBAAM,QAAQ,EAAE,SAAS,KAAK,KAAK,CAAC;AACpC,iBAAO,KAAK,GAAG,QAAQC,OAAM;AAAA,QAC/B,CAAC;AAAA,MACH;AAAA,MAEA,MACE,UACAA,SAC6B;AAC7B,eAAO,gBAAAD,QAAO,MAAM,YAAY,UAAQ;AACtC,gBAAM,QAAQ,EAAE,SAAS,KAAK,MAAM,WAAW,SAAS,CAAC;AACzD,iBAAO,KAAK,GAAG,MAAM,UAAUC,OAAM;AAAA,QACvC,CAAC;AAAA,MACH;AAAA,MAEA,UAAsC;AACpC,eAAO,gBAAAD,QAAO,MAAM,cAAc,UAAQ;AACxC,gBAAM,QAAQ,EAAE,SAAS,KAAK,KAAK,CAAC;AACpC,iBAAO,KAAK,GAAG,QAAQ;AAAA,QACzB,CAAC;AAAA,MACH;AAAA,MAEA,UAAsC;AACpC,eAAO,gBAAAA,QAAO,MAAM,cAAc,UAAQ;AACxC,gBAAM,QAAQ,EAAE,SAAS,KAAK,KAAK,CAAC;AACpC,iBAAO,KAAK,GAAG,QAAQ;AAAA,QACzB,CAAC;AAAA,MACH;AAAA,MAEA,KAAKE,SAAkB,MAAmD;AACxE,eAAO,gBAAAF,QAAO,MAAM,WAAW,UAAQ;AACrC,gBAAM,QAAQ,EAAE,SAAS,KAAK,KAAK,CAAC;AACpC,iBAAO,KAAK,GAAG,KAAKE,SAAQ,IAAI;AAAA,QAClC,CAAC;AAAA,MACH;AAAA,MAEA,QAAQ,MAA2B;AACjC,eAAO,gBAAAF,QAAO,MAAM,WAAW,UAAQ;AACrC,gBAAM,QAAQ,EAAE,SAAS,KAAK,KAAK,CAAC;AACpC,iBAAO,KAAK,GAAG,KAAK,GAAG,IAAI;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,MAEA,eAAe,MAA2B;AACxC,eAAO,gBAAAA,QAAO,MAAM,kBAAkB,UAAQ;AAC5C,gBAAM,QAAQ,EAAE,SAAS,KAAK,KAAK,CAAC;AACpC,iBAAO,KAAK,GAAG,YAAY,GAAG,IAAI;AAAA,QACpC,CAAC;AAAA,MACH;AAAA,MAEA,eAAe,MAA2B;AACxC,eAAO,gBAAAA,QAAO,MAAM,kBAAkB,UAAQ;AAC5C,gBAAM,QAAQ,EAAE,SAAS,KAAK,KAAK,CAAC;AACpC,iBAAO,KAAK,GAAG,YAAY,GAAG,IAAI;AAAA,QACpC,CAAC;AAAA,MACH;AAAA,MAEA,cAAc,MAA2B;AACvC,eAAO,gBAAAA,QAAO,MAAM,iBAAiB,UAAQ;AAC3C,gBAAM,QAAQ,EAAE,SAAS,KAAK,KAAK,CAAC;AACpC,iBAAO,KAAK,GAAG,WAAW,GAAG,IAAI;AAAA,QACnC,CAAC;AAAA,MACH;AAAA,IACF;AAAA;AAAA;;;AC7HA,SAAS,UAAU,WAA4C;AAC7D,aAAO,YAAAG,SAAK;AAAA,IACV,KAAK,UAAU;AAAA,IACf,iBAAiB;AAAA,MACf,SAAS;AAAA,QACP,eAAe,UAAU;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AACH;AAIO,SAAS,uBACd,QACA,YACA,MACA;AACA,QAAM,KAAK,IAAI,aAAa,QAAQ,MAAM,UAAU;AACpD,SAAO,IAAI,uBAAuB,EAAE;AACtC;AA5CA,iBAqBM,oBAyBO;AA9Cb;AAAA;AAAA;AAAA,kBAAiB;AACjB;AAaA;AACA,IAAAC;AACA;AAEA;AACA;AAEA,IAAM,qBAAqB;AAyBpB,IAAM,eAAN,MAAM,cAAiC;AAAA,MAQ5C,YAAY,QAAgB,MAAqB,YAAqB;AAFtE,aAAiB,YAAY,aAAa;AAGxC,aAAK,OAAO;AACZ,aAAK,YAAY,QAAQ,CAAC;AAC1B,YAAI,YAAY;AACd,eAAK,YAAY,aAAa,UAAU;AACxC,eAAK,eAAe,UAAU,KAAK,SAAS;AAAA,QAC9C;AACA,YAAI,CAAC,cAAa,MAAM;AACtB,wBAAa,KAAK;AAAA,QACpB;AAAA,MACF;AAAA,MAEA,OAAO,OAAO;AACZ,cAAM,YAAY,aAAa;AAC/B,sBAAa,OAAO,UAAU,SAAS;AAAA,MACzC;AAAA,MAEA,MAAM,SAAS;AACb,cAAM,WAAW,MAAM,mBAAmB;AAAA,UACxC,KAAK,GAAG,KAAK,UAAU,GAAG,IAAI,KAAK,IAAI;AAAA,UACvC,QAAQ;AAAA,UACR,QAAQ,KAAK,UAAU;AAAA,QACzB,CAAC;AACD,eAAO,SAAS,WAAW;AAAA,MAC7B;AAAA,MAEQ,OAAO;AACb,eAAO,KAAK,gBAAgB,cAAa;AAAA,MAC3C;AAAA,MAEQ,QAAQ;AACd,eAAO,KAAK,KAAK,EAAE,GAAG,IAAI,KAAK,IAAI;AAAA,MACrC;AAAA,MAEA,MAAc,mBAAmB;AAC/B,YAAI,eAAe,CAAC,KAAK,WAAW;AAEpC,YAAIC,UAAS,MAAM,KAAK,OAAO;AAC/B,YAAI,CAAC,gBAAgB,CAACA,SAAQ;AAC5B,gBAAM,IAAI,MAAM,mBAAmB;AAAA,QACrC;AACA,YAAI,CAACA,SAAQ;AACX,cAAI;AACF,kBAAM,KAAK,KAAK,EAAE,GAAG,OAAO,KAAK,IAAI;AAAA,UACvC,SAAS,KAAU;AAEjB,gBAAI,IAAI,eAAe,KAAK;AAC1B,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AACA,eAAO,KAAK,MAAM;AAAA,MACpB;AAAA;AAAA,MAGA,MAAc,YACZ,MACc;AACd,cAAM,KAAK,KAAK,MAAM;AACtB,cAAM,MAAM,MAAM,KAAK,EAAE;AACzB,YAAI;AACF,iBAAO,MAAM,IAAI;AAAA,QACnB,SAAS,KAAU;AACjB,cAAI,IAAI,eAAe,OAAO,IAAI,WAAW,oBAAoB;AAC/D,kBAAM,KAAK,iBAAiB;AAC5B,mBAAO,MAAM,KAAK,YAAY,IAAI;AAAA,UACpC,WAAW,IAAI,YAAY;AACzB,gBAAI,SAAS,IAAI;AAAA,UACnB;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,MAEA,MAAM,IAAwB,IAAyB;AACrD,eAAO,KAAK,YAAY,QAAM;AAC5B,cAAI,CAAC,IAAI;AACP,kBAAM,IAAI,MAAM,wCAAwC;AAAA,UAC1D;AACA,iBAAO,MAAM,GAAG,IAAI,EAAE;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,MAEA,MAAM,YACJ,KACA,MACc;AAEd,cAAM,CAAC,GAAG,IAAI,IAAI,GAAG,CAAC;AACtB,cAAM,WAAW,MAAM,KAAK,QAAW;AAAA,UACrC,MAAM;AAAA,UACN,cAAc;AAAA,QAChB,CAAC;AACD,cAAM,iBAAiB,CAAC,QAAwB;AAE9C,cAAI,IAAI,OAAO,QAAS,aAAa,IAAI,SAAS,IAAI,MAAM,SAAU;AACpE,mBAAO;AAAA,UACT;AACA,iBAAO,IAAI,UAAU;AAAA,QACvB;AAEA,cAAM,OAAO,SAAS,KAAK,OAAO,SAAO,CAAC,eAAe,GAAG,CAAC;AAC7D,cAAM,cAAc,KAAK,WAAW,SAAS,KAAK;AAElD,YAAI,CAAC,MAAM,gBAAgB,aAAa;AACtC,gBAAM,UAAU,SAAS,KAAK,OAAO,SAAO,eAAe,GAAG,CAAC;AAC/D,gBAAM,aAAa,QAAQ,IAAI,SAAO,IAAI,GAAG,EAAE,KAAK,IAAI;AACxD,gBAAM,IAAI,MAAM,4BAA4B,UAAU,EAAE;AAAA,QAC1D;AACA,eAAO,KAAK,IAAI,SAAO,IAAI,GAAI;AAAA,MACjC;AAAA,MAEA,MAAM,OAAO,SAA4B,KAAc;AACrD,eAAO,KAAK,YAAY,QAAM;AAC5B,cAAI;AACJ,cAAI;AAEJ,cAAI,WAAW,OAAO,GAAG;AACvB,kBAAM,QAAQ;AACd,mBAAO,QAAQ;AAAA,UACjB,OAAO;AACL,kBAAM;AACN,mBAAO;AAAA,UACT;AAEA,cAAI,CAAC,OAAO,CAAC,MAAM;AACjB,kBAAM,IAAI,MAAM,oDAAoD;AAAA,UACtE;AACA,iBAAO,MAAM,GAAG,QAAQ,KAAK,IAAI;AAAA,QACnC,CAAC;AAAA,MACH;AAAA,MAEA,MAAM,KAAK,UAAuB,MAAwB;AACxD,YAAI,CAAC,SAAS,KAAK;AACjB,mBAAS,MAAM,MAAM;AAAA,QACvB;AACA,eAAO,KAAK,IAAI,UAAU,IAAI;AAAA,MAChC;AAAA,MAEA,MAAM,IAAI,UAAuB,MAAwB;AACvD,YAAI,CAAC,SAAS,KAAK;AACjB,gBAAM,IAAI,MAAM,0CAA0C;AAAA,QAC5D;AACA,eAAO,KAAK,YAAY,OAAM,OAAM;AAClC,cAAI,CAAC,SAAS,WAAW;AACvB,qBAAS,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,UAC9C;AACA,mBAAS,aAAY,oBAAI,KAAK,GAAE,YAAY;AAC5C,cAAI,MAAM,SAAS,SAAS,KAAK;AAC/B,gBAAI;AACF,oBAAM,WAAW,MAAM,KAAK,IAAI,SAAS,GAAG;AAC5C,kBAAI,UAAU;AACZ,yBAAS,OAAO,SAAS;AAAA,cAC3B;AAAA,YACF,SAAS,KAAU;AACjB,kBAAI,IAAI,WAAW,KAAK;AACtB,sBAAM;AAAA,cACR;AAAA,YACF;AAAA,UACF;AACA,iBAAO,MAAM,GAAG,OAAO,QAAQ;AAAA,QACjC,CAAC;AAAA,MACH;AAAA,MAEA,MAAM,SAAS,WAA0B;AACvC,eAAO,KAAK,YAAY,QAAM;AAC5B,iBAAO,MAAM,GAAG,KAAK,EAAE,MAAM,UAAU,CAAC;AAAA,QAC1C,CAAC;AAAA,MACH;AAAA,MAEA,MAAM,QACJC,SAC6B;AAC7B,eAAO,KAAK,YAAY,QAAM;AAC5B,iBAAO,MAAM,GAAG,KAAKA,OAAM;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,MAEA,MAAM,MACJ,UACAA,SAC6B;AAC7B,eAAO,KAAK,YAAY,QAAM;AAC5B,gBAAM,CAAC,UAAU,IAAI,IAAI,SAAS,MAAM,GAAG;AAC3C,iBAAO,MAAM,GAAG,KAAK,UAAU,MAAMA,OAAM;AAAA,QAC7C,CAAC;AAAA,MACH;AAAA,MAEA,MAAM,UAAU;AACd,YAAI;AACF,iBAAO,MAAM,KAAK,KAAK,EAAE,GAAG,QAAQ,KAAK,IAAI;AAAA,QAC/C,SAAS,KAAU;AAEjB,cAAI,IAAI,eAAe,KAAK;AAC1B;AAAA,UACF,OAAO;AACL,kBAAM,EAAE,GAAG,KAAK,QAAQ,IAAI,WAAW;AAAA,UACzC;AAAA,QACF;AAAA,MACF;AAAA,MAEA,MAAM,UAAU;AACd,eAAO,KAAK,YAAY,QAAM;AAC5B,iBAAO,MAAM,GAAG,QAAQ;AAAA,QAC1B,CAAC;AAAA,MACH;AAAA;AAAA;AAAA,MAIA,MAAM,KAAKC,SAAqB,MAAyB;AACvD,cAAM,QAAQ,WAAW,KAAK,IAAI;AAElC,eAAO,MAAM,KAAKA,SAAQ,IAAI;AAAA,MAChC;AAAA,MAEA,MAAM,KAAKA,SAAoB;AAC7B,cAAM,QAAQ,WAAW,KAAK,IAAI;AAElC,eAAO,MAAM,KAAKA,OAAM;AAAA,MAC1B;AAAA,MAEA,MAAM,YAAY,MAA+B;AAC/C,cAAM,QAAQ,WAAW,KAAK,IAAI;AAClC,eAAO,MAAM,YAAY,IAAI;AAAA,MAC/B;AAAA,MAEA,MAAM,YAAY,MAA+B;AAC/C,cAAM,QAAQ,WAAW,KAAK,IAAI;AAClC,eAAO,MAAM,YAAY,IAAI;AAAA,MAC/B;AAAA,MAEA,MAAM,aAAa;AACjB,cAAM,QAAQ,WAAW,KAAK,IAAI;AAClC,eAAO,MAAM,WAAW;AAAA,MAC1B;AAAA,IACF;AAAA;AAAA;;;ACrRO,SAAS,qBAAqB,MAAuB;AAC1D,SAAQ,2BAAiD,SAAS,IAAI;AACxE;AAbA,IAAa,4BASA;AATb,IAAAC,kBAAA;AAAA;AAAA;AAAO,IAAM,6BAA6B;AAAA,MACxC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEO,IAAM,6BAA6B,CAAC,OAAO,QAAQ,SAAS;AAAA;AAAA;;;ACTnE;AAAA;AAAA;AAAA;AACA;AACA,IAAAC;AACA;AACA,IAAAC;AAAA;AAAA;;;ACAO,SAAS,MAAM,QAAgB,MAA+B;AACnE,SAAO,IAAI,uBAAuB,IAAI,aAAa,QAAQ,IAAI,CAAC;AAClE;AAKA,eAAsB,SACpB,QACA,IACA,MACA;AACA,QAAM,KAAK,MAAM,QAAQ,IAAI;AAG7B,SAAO,MAAM,GAAG,EAAE;AACpB;AAEA,eAAsB,kBAAkB,aAAsB;AAC5D,MAAI,YAAY;AAChB,MAAI,aAAa;AACf,iBAAa,IAAI,WAAW;AAAA,EAC9B;AACA,SAAO,MAAM,iBAAiB,SAAS;AACzC;AAEA,eAAsB,gBAAgB,QAAgB,MAAwB;AAC5E,QAAM,OAAO,MAAM,iBAAiB,GAAG,MAAM,UAAU,QAAQ,IAAI;AACnE,SAAO,EAAE,MAAM,KAAK,MAAM,UAAU,KAAK,SAAS;AACpD;AAjCA,IAAAC,WAAA;AAAA;AAAA;AAAA;AAEA;AAAA;AAAA;;;ACeO,SAAS,gBAAgB,UAAmB;AAGjD,MAAI,CAAC,UAAU;AACb,eAAW,YAAY;AAAA,EACzB;AACA,SAAO,iBAAiB,QAAQ;AAClC;AAEO,SAAS,kBAAkB,UAAmB;AACnD,MAAI,CAAC,UAAU;AACb,eAAW,YAAY;AAAA,EACzB;AACA,MAAI,aAAa,mBAAmB;AAClC,WAAO,gBAAgB,WAAW;AAAA,EACpC,OAAO;AACL,WAAO,GAAG,QAAQ,GAAG,SAAS,GAAG,gBAAgB,WAAW,IAAI;AAAA,EAClE;AACF;AAEO,SAAS,iBAAiB,UAAqC;AACpE,MAAI,CAAC,YAAY,aAAa,mBAAmB;AAC/C,WAAO,gBAAgB,OAAO;AAAA,EAChC,OAAO;AACL,WAAO,GAAG,QAAQ,GAAG,SAAS,GAAG,gBAAgB,OAAO,IAAI;AAAA,EAC9D;AACF;AAEO,SAAS,iBAAiB;AAC/B,SAAO,oBAAI;AACb;AAEO,SAAS,gBAAgB;AAC9B,SAAO,CAAC,CAAC,oBAAI;AACf;AAEO,SAAS,gBAAgB;AAC9B,QAAM,UAAU,QAAQ,IAAI;AAC5B,SAAO,CAAC,CAAC,SAAS;AACpB;AAEO,SAAS,mBAAmB;AACjC,SAAO,oBAAI;AACb;AAMO,SAAS,qBAAqB,OAAe;AAClD,MAAI,CAAC,OAAO;AACV,WAAO;AAAA,EACT;AACA,MAAI,CAAC,cAAc,GAAG;AACpB,WAAO;AAAA,EACT;AACA,QAAM,QAAQ,MAAM,MAAM,SAAS;AACnC,QAAM,SAAS,MAAM,CAAC;AACtB,MAAK,UAAU,MAAM,WAAW,KAAO,CAAC,UAAU,MAAM,WAAW,GAAI;AACrE,WAAO;AAAA,EACT;AACA,MAAI,QAAQ;AACV,WAAO,MAAM,CAAC;AAAA,EAChB,OAAO;AACL,WAAO,MAAM,CAAC;AAAA,EAChB;AACF;AAEA,SAAS,cAAc,SAAiC;AACtD,MAAI;AACJ,MAAI;AACF,cAAU,QAAQ,IAAI;AAAA,EACxB,SAAS,KAAK;AAEZ,cAAU,CAAC;AAAA,EACb;AACA,YAAU;AAAA,IACR,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AACA,SAAO;AACT;AAEA,eAAe,WAAc,SAAqB,MAAe;AAC/D,iBAAe;AAGf,MAAI,UAAsB,cAAc,OAAO;AAC/C,SAAO,QAAQ,IAAI,SAAS,IAAI;AAClC;AAEA,eAAsB,sBAAyBC,SAIhC;AACb,QAAM,WAAW,qBAAqBA,QAAO,KAAK;AAClD,SAAO;AAAA,IACL;AAAA,MACE;AAAA,MACA,OAAOA,QAAO;AAAA,MACd,cAAcA,QAAO;AAAA,IACvB;AAAA,IACAA,QAAO;AAAA,EACT;AACF;AAEA,eAAsB,YAAY,OAAe,MAAyB;AACxE,QAAM,WAAW,qBAAqB,KAAK;AAC3C,SAAO;AAAA,IACL;AAAA,MACE;AAAA,MACA;AAAA,IACF;AAAA,IACA;AAAA,EACF;AACF;AAEA,eAAsB,WACpB,UACA,MACY;AAEZ,MAAI,CAAC,oBAAI,eAAe;AACtB,eAAW,YAAY;AAAA,EACzB;AAEA,QAAM,UAAU,WAAW,EAAE,SAAS,IAAI,CAAC;AAC3C,SAAO,WAAW,SAAS,IAAI;AACjC;AAEA,eAAsB,eACpB,OACA,MACY;AACZ,SAAO,gBAAgB,OAAO,IAAI;AACpC;AAEA,eAAe,gBACb,OACA,MACA,sBACY;AACZ,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,mBAAmB;AAAA,EACrC;AAEA,QAAM,WAAW,qBAAqB,KAAK;AAC3C,QAAM,UAAsB,EAAE,OAAO,GAAG,qBAAqB;AAC7D,MAAI,UAAU;AACZ,YAAQ,WAAW;AAAA,EACrB;AAEA,SAAO,WAAW,SAAS,IAAI;AACjC;AAEA,eAAsB,oBACpB,UACA,MACY;AACZ,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,sBAAsB;AAAA,EACxC;AAEA,QAAM,UAAsB;AAAA,IAC1B;AAAA,EACF;AACA,MAAI,SAAS,UAAU;AACrB,YAAQ,WAAW,SAAS;AAAA,EAC9B;AACA,SAAO,WAAW,SAAS,IAAI;AACjC;AAEA,SAAS,iBAAiB;AACxB,QAAM,UAAU,QAAQ,IAAI;AAC5B,MAAI,SAAS,aAAa;AACxB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAsB,wBACpB,OACA,MACY;AACZ,SAAO,gBAAgB,OAAO,MAAM;AAAA,IAClC,aAAa;AAAA,EACf,CAAC;AACH;AAEO,SAAS,cAA2C;AACzD,MAAI;AACF,UAAM,UAAU,QAAQ,IAAI;AAC5B,WAAO,SAAS;AAAA,EAClB,SAAS,GAAG;AAAA,EAEZ;AACF;AAEO,SAAS,cAAsB;AACpC,MAAI,CAAC,cAAc,GAAG;AACpB,WAAO;AAAA,EACT;AACA,QAAM,UAAU,QAAQ,IAAI;AAC5B,QAAM,WAAW,SAAS;AAC1B,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,qBAAqB;AAAA,EACvC;AACA,SAAO;AACT;AAEO,SAAS,kBAAsC;AACpD,QAAM,UAAU,QAAQ,IAAI;AAC5B,SAAO,SAAS;AAClB;AAEO,SAAS,WAA+B;AAC7C,QAAM,UAAU,QAAQ,IAAI;AAC5B,QAAM,UAAU,SAAS;AACzB,MAAI,CAAC,WAAW,oBAAI,OAAO,KAAK,aAAa;AAC3C,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAUO,SAAS,uBACdC,SACA,MACA;AACA,MAAI,CAACA,SAAQ;AACX,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AACA,QAAM,UAAU;AAAA,IACd,sBAAsBA;AAAA,EACxB;AACA,SAAO,WAAW,SAAS,IAAI;AACjC;AAEO,SAAS,gBAAgB,MAAW;AACzC,QAAM,UAAsB;AAAA,IAC1B,QAAQ;AAAA,EACV;AACA,SAAO,WAAW,SAAS,IAAI;AACjC;AAEO,SAAS,0BAA0B;AACxC,QAAM,UAAU,QAAQ,IAAI;AAC5B,MAAI,CAAC,QAAQ,sBAAsB;AACjC,WAAO;AAAA,EACT,OAAO;AACL,WAAO,QAAQ;AAAA,EACjB;AACF;AAEO,SAAS,cAAwB;AACtC,QAAM,UAAU,QAAQ,IAAI;AAC5B,MAAI,CAAC,WAAY,oBAAI,iBAAiB,CAAC,QAAQ,UAAW;AACxD,UAAM,IAAI,MAAM,qBAAqB;AAAA,EACvC;AACA,SAAO,MAAM,iBAAiB,SAAS,QAAQ,CAAC;AAClD;AAEO,SAAS,iBAA2B;AACzC,MAAI,CAAC,YAAY,GAAG;AAClB,UAAM,IAAI,MAAM,+CAA+C;AAAA,EACjE;AACA,SAAO,MAAM,kBAAkB,CAAC;AAClC;AAMO,SAAS,SAAS,MAAsB;AAC7C,QAAM,QAAQ,SAAS;AACvB,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,wCAAwC;AAAA,EAC1D;AACA,SAAO,MAAM,OAAO,IAAI;AAC1B;AAMO,SAAS,aAAa,MAAsB;AACjD,QAAM,QAAQ,SAAS;AACvB,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,yCAAyC;AAAA,EAC3D;AACA,SAAO,MAAkBC,cAAa,KAAK,GAAG,IAAI;AACpD;AAMO,SAAS,YAAY,MAAsB;AAChD,QAAM,QAAQ,SAAS;AACvB,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,wCAAwC;AAAA,EAC1D;AACA,SAAO,MAAkB,oBAAoB,KAAK,GAAG,IAAI;AAC3D;AAEO,SAAS,SAAkB;AAChC,QAAM,UAAU,QAAQ,IAAI;AAC5B,QAAM,WAAW,SAAS;AAC1B,SAAO,CAAC,CAAC;AACX;AAEO,SAAS,oBAA4C;AAC1D,MAAI;AACF,WAAO,QAAQ,IAAI;AAAA,EACrB,SAAS,GAAG;AACV,WAAO;AAAA,EACT;AACF;AAxVA,IAeI,aAqOS;AApPb;AAAA;AAAA;AAEA,IAAAC;AACA;AACA;AACA,IAAAC;AACA,IAAAC;AASA,IAAI,cAA6B;AAqO1B,IAAM,eAAe,MAAM;AAChC,YAAM,QAAQ,SAAS;AACvB,UAAI,CAAC,OAAO;AACV,cAAM,IAAI,MAAM,qBAAqB;AAAA,MACvC;AACA,aAAmBH,cAAa,KAAK;AAAA,IACvC;AAAA;AAAA;;;AC1PA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAI,gBAAA;AAAA;AAAA;AAAA,IAAAC;AACA;AACA;AAAA;AAAA;;;ACFA,IAAAC,iBAAA;AAAA,SAAAA,gBAAA;AAAA;AAAA,mBAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA6DO,SAAS,4BAA4B;AAC1C,MAAI,WAAW,oBAAI;AACnB,MAAI,MAAyB,oBAAI,UAAU,MAAM,IAAI;AAErD,QAAM,IAAI,SAAS,IAAI,IAAI,CAAC,IAAI,IAAI,CAAC;AAErC,QAAM,IAAI,MAAM,GAAG;AACnB,MAAI,IAAI,SAAS,GAAG;AAElB,eAAW,IAAI,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC;AAC9B,UAAM,IAAI,CAAC;AAAA,EACb,OAAO;AACL,UAAM,IAAI,CAAC;AAAA,EACb;AACA,QAAM,CAAC,MAAM,IAAI,IAAI,IAAI,MAAM,GAAG;AAElC,QAAM,aAAa,SAAS,IAAI;AAChC,SAAO;AAAA,IACL;AAAA,IACA;AAAA;AAAA,IAEA,MAAM,MAAM,UAAU,IAAI,OAAO;AAAA,EACnC;AACF;AAEO,SAAS,kBAAkB;AAChC,QAAM,EAAE,MAAM,UAAU,KAAK,IAAI,0BAA0B;AAC3D,MAAI,YAAgC;AAAA,IAClC,gBAAgB;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,MAAI,OAAkD;AACtD,MAAI,oBAAI,iBAAiB;AACvB,WAAO;AAAA,MACL,gBAAgB;AAAA,MAChB,cAAc;AAAA,QACZ,GAAG;AAAA,QACH,KAAK,CAAC;AAAA,MACR;AAAA,MACA,qBAAqB;AAAA,MACrB,WAAW,CAAC,SAAiB,aAAkB,SAAS,MAAM,OAAO;AAAA,IACvE;AAAA,EACF;AACA,SAAO;AACT;AAEO,SAAS,YAAY,IAAY,KAAa;AACnD,MAAI,IAAI,SAAS,EAAE,GAAG;AACpB,WAAO;AAAA,EACT;AACA,SAAO,GAAG,EAAE,GAAGA,UAAS,GAAG,GAAG;AAChC;AAEO,SAAS,eAAe,KAAa;AAC1C,MAAI,QAAQ,IAAI,MAAMA,UAAS;AAC/B,MAAI,MAAM,UAAU,GAAG;AACrB,UAAM,MAAM;AACZ,WAAO,MAAM,KAAKA,UAAS;AAAA,EAC7B,OAAO;AAEL,WAAO,MAAM,CAAC;AAAA,EAChB;AACF;AA7HA,IAGM,iBACA,oBACOA,YAUD,WA2BA;AA1CZ,IAAAC,cAAA;AAAA;AAAA;AAAA,IAAAC;AAGA,IAAM,kBAAkB;AACxB,IAAM,qBAAqB;AACpB,IAAMF,aAAY;AAUlB,IAAK,YAAL,kBAAKG,eAAL;AACL,MAAAA,WAAA,eAAY;AACZ,MAAAA,WAAA,mBAAgB;AAChB,MAAAA,WAAA,iBAAc;AACd,MAAAA,WAAA,eAAY;AACZ,MAAAA,WAAA,cAAW;AACX,MAAAA,WAAA,cAAW;AACX,MAAAA,WAAA,gBAAa;AACb,MAAAA,WAAA,WAAQ;AACR,MAAAA,WAAA,kBAAe;AACf,MAAAA,WAAA,gBAAa;AACb,MAAAA,WAAA,cAAW;AACX,MAAAA,WAAA,mBAAgB;AAChB,MAAAA,WAAA,mBAAgB;AAChB,MAAAA,WAAA,WAAQ;AACR,MAAAA,WAAA,eAAY;AAfF,aAAAA;AAAA,OAAA;AA2BL,IAAK,qBAAL,kBAAKC,wBAAL;AACL,MAAAA,wCAAA,aAAU,KAAV;AACA,MAAAA,wCAAA,eAAY,KAAZ;AACA,MAAAA,wCAAA,mBAAgB,KAAhB;AACA,MAAAA,wCAAA,cAAW,KAAX;AACA,MAAAA,wCAAA,cAAW,KAAX;AACA,MAAAA,wCAAA,cAAW,KAAX;AACA,MAAAA,wCAAA,cAAW,KAAX;AACA,MAAAA,wCAAA,cAAW,KAAX;AACA,MAAAA,wCAAA,cAAW,KAAX;AACA,MAAAA,wCAAA,cAAW,KAAX;AACA,MAAAA,wCAAA,cAAW,MAAX;AACA,MAAAA,wCAAA,eAAY,MAAZ;AACA,MAAAA,wCAAA,eAAY,MAAZ;AACA,MAAAA,wCAAA,eAAY,MAAZ;AACA,MAAAA,wCAAA,eAAY,MAAZ;AACA,MAAAA,wCAAA,eAAY,MAAZ;AAhBU,aAAAA;AAAA,OAAA;AAAA;AAAA;;;AC1CZ;AAAA;AAAA;AAAA;AAAA;AAWO,SAAS,QAAQ;AACtB,SAAO,WAAW,MAAM;AAC1B;AAbA,IAEM,YAEO;AAJb;AAAA;AAAA;AAAA,IAAAC;AAEA,IAAM,aAAa,QAAQ,gBAAgB;AAEpC,IAAM,YAAY,CAAC,YAAiB;AACzC,YAAM,gBAAgB,WAAW,MAAM;AACvC,UAAI,eAAe;AACjB,gEAA6B,IAAI;AAAA,MACnC;AAAA,IACF;AAAA;AAAA;;;ACTA,IAAAC,oBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;AC4BO,SAAS,kBAAkB;AAChC,SAAO;AACT;AA9BA,iBACA,WACAC,YASa,oBAQP;AAnBN,IAAAC,cAAA;AAAA;AAAA;AAAA,kBAAqB;AACrB,gBAAuB;AACvB,IAAAD,aAAe;AACf,IAAAE;AAQO,IAAM,qBAAqB;AAAA,MAChC,SAAS,oBAAI;AAAA,MACb,MAAM,oBAAI;AAAA,MACV,WAAW,oBAAI;AAAA,MACf,QAAQ,oBAAI;AAAA,MACZ,SAAS,oBAAI;AAAA,IACf;AAEA,IAAM,YAAQ,sBAAK,kBAAO,GAAG,WAAW;AACxC,QAAI;AACF,iBAAAC,QAAG,UAAU,KAAK;AAAA,IACpB,SAAS,GAAQ;AACf,UAAI,EAAE,SAAS,UAAU;AACvB,cAAM;AAAA,MACR;AAAA,IACF;AAAA;AAAA;;;AC1BA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBA,eAAe,eAAe,OAAe;AAC3C,SAAO;AAAA,IACL;AAAA,IACA,CAAC,OAAiB;AAChB,aAAO,GAAG,qCAAkC;AAAA,IAC9C;AAAA,IACA,EAAE,YAAY,KAAK;AAAA,EACrB;AACF;AAEA,SAAS,UAAU,UAA8B;AAC/C,SAAO,CAAC,YAAY,SAAS,UAAU;AACzC;AASA,eAAsB,eAAe,OAA0C;AAC7E,QAAM,SAAS,MAAM,aAAa;AAElC,MAAI,WAAW,MAAM,OAAO,IAAI,KAAK;AACrC,MAAI,CAAC,UAAU;AACb,QAAI,SAA6B;AACjC,QAAI;AACF,iBAAW,MAAM,eAAe,KAAK;AAAA,IACvC,SAAS,KAAU;AAEjB,UAAI,OAAO,IAAI,WAAW,KAAK;AAC7B,mBAAW,EAAE,OAAO,wBAAiB;AAGrC,iBAAS;AAAA,MACX,OAAO;AACL,cAAM;AAAA,MACR;AAAA,IACF;AAIA,QAAI,UAAU,QAAQ,GAAG;AACvB,YAAM,OAAO,MAAM,OAAO,IAAI,KAAK;AACnC,UAAI,MAAM;AACR,mBAAW;AAAA,MACb;AAAA,IACF;AACA,UAAM,OAAO,MAAM,OAAO,UAAU,MAAM;AAAA,EAC5C;AAEA,SAAO;AACT;AAQA,eAAsB,sBAAsB,OAAe,aAAmB;AAC5E,MAAI,CAAC,OAAO;AACV,UAAM;AAAA,EACR;AACA,QAAM,SAAS,MAAM,aAAa;AAClC,QAAM,OAAO,OAAO,KAAK;AACzB,MAAI,aAAa;AACf,UAAM,OAAO,MAAM,OAAO,aAAa,cAAc;AAAA,EACvD;AACF;AAvFA,IAIY,UAQN;AAZN;AAAA;AAAA;AAAA;AACA,IAAAC;AAGO,IAAK,WAAL,kBAAKC,cAAL;AACL,MAAAA,UAAA,aAAU;AADA,aAAAA;AAAA,OAAA;AAQZ,IAAM,iBAAiB;AAAA;AAAA;;;ACchB,SAAS,cAAc,SAAiB,IAAa;AAC1D,OAAK,MAAM,MAAM;AACjB,SAAO,iBAAmB,GAAG,SAAS,GAAG,OAAO,GAAG,SAAS,GAAG,EAAE;AACnE;AAMO,SAAS,sBAAsB;AACpC,SAAO,8BAAyB,GAAG,SAAS,GAAG,MAAM,CAAC;AACxD;AAMO,SAAS,qBAAqB,IAAU;AAC7C,SAAO,kBAAoB,GAAG,SAAS,GAAG,MAAM,MAAM,CAAC;AACzD;AAGO,SAAS,eAAe,IAAY;AACzC,SAAO,oBAAoB,KAAK,EAAE;AACpC;AAOO,SAAS,uBAAuB,UAAkB;AACvD,SAAO,8CAA2C,QAAQ;AAC5D;AAKO,SAAS,8BAA8B,IAAY;AACxD,QAAM,SAAS,iBAAmB,GAAG,SAAS,iCAA8B,GAAG,SAAS;AACxF,MAAI,CAAC,MAAM,CAAC,GAAG,SAAS,MAAM,GAAG;AAC/B,WAAO;AAAA,EACT;AACA,SAAO,GAAG,MAAM,MAAM,EAAE,CAAC;AAC3B;AAMO,SAAS,mBAAmB,SAAc;AAC/C,SAAO,4BAAwB,GAAG,SAAS,GAAG,OAAO,GAAG,SAAS,GAAG,MAAM,CAAC;AAC7E;AAEO,SAAS,kBAAkB,WAAmB,QAAgB;AACnE,SAAO,GAAG,SAAS,GAAG,SAAS,GAAG,MAAM;AAC1C;AAMO,SAAS,eAAe,MAAc;AAC3C,QAAM,SAAS,oBAAoB,GAAG,SAAS;AAC/C,MAAI,KAAK,WAAW,MAAM,GAAG;AAC3B,WAAO;AAAA,EACT;AACA,SAAO,GAAG,MAAM,GAAG,IAAI;AACzB;AAKO,SAAS,aAAa,MAAc;AACzC,SAAO,eAAe,IAAI;AAC5B;AArGA,IAYa,eAmCP,qBA4DO,mBAQA;AAnHb;AAAA;AAAA;AAAA,IAAAC;AAMA;AAMO,IAAM,gBAAgB,CAAC,aAA6B;AACzD,UAAI,KAAK;AACT,UAAI,UAAU;AACZ,cAAM,GAAG,QAAQ,GAAG,SAAS;AAAA,MAC/B;AACA,aAAO,GAAG,EAAE,GAAG,MAAM,CAAC;AAAA,IACxB;AA6BA,IAAM,sBAAsB,IAAI,OAAO,mBAAqB,GAAG,SAAS,IAAI;AA4DrE,IAAM,oBAAoB,CAAC,WAAgB;AAChD,aAAO,2BAAwB,GAAG,SAAS,GAAG,MAAM;AAAA,IACtD;AAMO,IAAM,mBAAmB,CAAC,SAAiB;AAChD,aAAO,qBAAsB,GAAG,SAAS,GAAG,IAAI;AAAA,IAClD;AAAA;AAAA;;;AC/FO,SAAS,aACd,SACA,OACA,aAAyC,CAAC,GACvB;AACnB,MAAI,SAAS,MAAM;AACjB,YAAQ;AAAA,EACV;AACA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,UAAU,GAAG,OAAO,GAAG,SAAS,GAAG,KAAK;AAAA,IACxC,QAAQ,GAAG,OAAO,GAAG,SAAS,GAAG,KAAK,GAAG,WAAW;AAAA,EACtD;AACF;AAUO,SAAS,aACd,SACA,OACA,aAAyC,CAAC,GACvB;AACnB,MAAI,WAAW,MAAM;AACnB,WAAO,6BAA+B,MAAM,UAAU;AAAA,EACxD;AAEA,QAAM,WAAW,SAAS,OAAO,GAAG,OAAO,GAAG,SAAS,KAAK;AAE5D,SAAO,6BAA+B,UAAU,UAAU;AAC5D;AAKO,SAAS,cAAc,UAAoB;AAChD,SAAO,YAAY,QAAQ;AAC7B;AA2BO,SAAS,mBACd,KAAK,IACL,aAAyC,CAAC,GACvB;AACnB,SAAO;AAAA,IACL,GAAG;AAAA,IACH,UAAU,8BAAyB,GAAG,SAAS,GAAG,EAAE;AAAA,IACpD,QAAQ,8BAAyB,GAAG,SAAS,GAAG,EAAE,GAAG,WAAW;AAAA,EAClE;AACF;AAKO,SAAS,oBACd,UACA,aAAyC,CAAC,GACvB;AACnB,MAAI,CAAC,UAAU;AACb,eAAW;AAAA,EACb;AACA,QAAM,WAAW,YAAY;AAC7B,SAAO;AAAA,IACL,GAAG;AAAA;AAAA,IAEH,UAAU,WACN,WACA,kBAAoB,GAAG,SAAS,GAAG,QAAQ;AAAA,IAC/C,QAAQ,kBAAoB,GAAG,SAAS,GAAG,QAAQ,GAAG,WAAW;AAAA,EACnE;AACF;AAKO,SAAS,sBACd,QACA,aAAyC,CAAC,GACvB;AACnB,SAAO,6CAA0C,QAAQ,UAAU;AACrE;AAEO,SAAS,oBACd,OACA,aAAyC,CAAC,GACvB;AACnB,QAAM,YAAYC,cAAa,KAAK;AACpC,SAAO;AAAA,IACL,GAAG;AAAA,IACH,UAAU;AAAA,IACV,QAAQ,GAAG,SAAS,GAAG,WAAW;AAAA,EACpC;AACF;AAKO,SAAS,kBACd,SACA,YACA,aAAa,CAAC,GACd;AACA,MAAI,CAAC,YAAY;AACf,iBAAa;AAAA,EACf;AACA,MAAI;AACJ,MAAI,YAAY;AACd,YAAQ;AAAA,EACV,OAAO;AACL,YAAQ,4BAAwB,GAAG,SAAS,GAAG,OAAO,GAAG,SAAS;AAAA,EACpE;AACA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,UAAU;AAAA,IACV,QAAQ,GAAG,KAAK,GAAG,WAAW;AAAA,EAChC;AACF;AAKO,SAAS,cAAc,QAAwB,aAAa,CAAC,GAAG;AACrE,SAAO,gCAAgC,QAAQ,UAAU;AAC3D;AAEO,SAAS,kBAAkB,SAAc,UAAmB;AACjE,QAAMC,WAAU,WAAW,GAAG,SAAS,GAAG,QAAQ,KAAK;AACvD,SAAO,aAAa,OAAO,GAAGA,QAAO,aAAa,OAAO,GAAGA,QAAO,GAAG,WAAW;AACnF;AAnLA,IAsEa,WAaA,gBAqGA;AAxLb;AAAA;AAAA;AAAA,IAAAC;AAOA;AA+DO,IAAM,YAAY,CAAC,OAAe;AAEvC,aACE,OACC,GAAG,WAAW,mBAAqB,GAAG,SAAS,EAAE,KAChD,GAAG,WAAW,0CAA+B,GAAG,SAAS,EAAE;AAAA,IAEjE;AAMO,IAAM,iBAAiB,CAAC,OAAe;AAE5C,aAAO,MAAM,GAAG,WAAW,gCAA0B,GAAG,SAAS,EAAE;AAAA,IACrE;AAkGO,IAAM,kBAAkB,CAAC,UAA0B,aAAa,CAAC,MAAM;AAC5E,aAAO,iCAAkC,UAAU,UAAU;AAAA,IAC/D;AAAA;AAAA;;;AC1LA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AAAA;AAAA;;;ACgBA,eAAsB,UAAU,OAAO,EAAE,WAAW,MAAM,GAAG;AAC3D,QAAM,YAAY,QAAQ,KAAK;AAE/B,MAAI,MAAa,CAAC;AAClB,iBAAe,OAAO,aAAsB;AAC1C,UAAM,OAAO,MAAM,kBAAkB,WAAW;AAChD,UAAM,IAAI,OAAO,IAAI;AAAA,EACvB;AACA,MAAI,WAAW,YAAY;AAC3B,MAAI,CAAC,oBAAI,iBAAkB,CAAC,aAAa,aAAa,mBAAoB;AAMxE,UAAM,OAAO;AAAA,EACf,OAAO;AAEL,UAAM,OAAO,mCAAoC,QAAQ,CAAC;AAE1D,UAAM,OAAO,2CAAwC,QAAQ,CAAC;AAE9D,QAAI,KAAK,gBAAgB,QAAQ,CAAC;AAAA,EACpC;AACA,SAAO;AACT;AAQA,eAAsB,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,IAAS,CAAC,GAA8B;AACtC,MAAI,WAAW,YAAY;AAC3B,MAAI,CAAC,oBAAI,iBAAiB,CAAC,UAAU;AACnC,eAAW;AAAA,EACb;AACA,MAAI,MAAM,MAAM,UAAU,EAAE,UAAU,CAAC;AACvC,QAAM,aAAa,IAAI,OAAO,CAAC,WAAgB;AAC7C,QAAI,oBAAI,OAAO,KAAK,CAAC,QAAQ;AAC3B,aAAO;AAAA,IACT;AAEA,UAAM,QAAQ,OAAO,MAAM,SAAS;AAEpC,QAAI,MAAM,CAAC,uBAAwB;AAEjC,YAAM,mBAAmB,MAAM,MAAM,SAAS,CAAC;AAE/C,YAAM,aACJ,MAAM,WAAW,KAAK;AAExB,aACG,aAAa,qBAAqB,cACnC,qBAAqB;AAAA,IAEzB;AACA,WAAO;AAAA,EACT,CAAC;AACD,MAAI,SAAS;AACX,UAAM,YAAY,WAAW,OAAO,WAAS,WAAW,KAAK,CAAC;AAC9D,UAAM,aAAa,WAAW,OAAO,WAAS,CAAC,WAAW,KAAK,CAAC;AAChE,YAAQ,KAAK;AAAA,MACX,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT;AACE,eAAO;AAAA,IACX;AAAA,EACF;AACA,QAAM,cAAc,WAAW;AAAA,IAAI,CAAC;AAAA;AAAA,MAElC,eAAe,GAAG;AAAA;AAAA,EACpB;AACA,MAAI,YAAY,WAAW,GAAG;AAC5B,WAAO,CAAC;AAAA,EACV,OAAO;AACL,UAAM,WAAW,MAAM,QAAQ,WAAW,WAAW;AACrD,UAAM,OAAO,SACV;AAAA,MACC,CAAC,WACC,OAAO,WAAW,eAClB,OAAO,OAAO;AAAA,IAClB,EACC,IAAI,CAAC,EAAE,MAAM,MAAW,KAAK;AAChC,QAAI,CAAC,KAAK;AACR,aAAO,KAAK,OAAO,CAAC,QAAa;AAC/B,YAAI,KAAK;AACP,iBAAO,SAAS,GAAG;AAAA,QACrB;AACA,eAAO,CAAC,SAAS,GAAG;AAAA,MACtB,CAAC;AAAA,IACH,OAAO;AACL,aAAO,KAAK,IAAI,CAAC,SAAc;AAAA,QAC7B,GAAG;AAAA,QACH,QAAQ,SAAS,GAAG,IAAI,gBAAgB;AAAA,MAC1C,EAAE;AAAA,IACJ;AAAA,EACF;AACF;AAEA,eAAsB,aAAa,QAAkB;AACnD,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,OAAO,IAAI,WAAS,eAAe,KAAK,CAAC;AAAA,EAC3C;AAEA,SAAO,QACJ;AAAA,IACC,aACE,QAAQ,WAAW,eAClB,QAAQ,MAAqB;AAAA,EAClC,EACC,IAAI,aAAY,QAAwC,KAAK;AAClE;AAKA,eAAsB,gBAAgB;AACpC,QAAM,OAAQ,MAAM,WAAW,EAAE,SAAS,KAAK,CAAC;AAChD,SAAO,KAAK,OAAO,CAAC,OAAY,CAAC,WAAW,EAAE,CAAC;AACjD;AAKA,eAAsB,eAAe;AACnC,QAAM,OAAQ,MAAM,WAAW,EAAE,SAAS,KAAK,CAAC;AAChD,SAAO,KAAK,OAAO,CAAC,OAAY,WAAW,EAAE,CAAC;AAChD;AAEO,SAAS,YACd,QACA,QACA;AACA,MAAI,UAAU,UAAa,UAAU,QAAW;AAC9C,WAAO;AAAA,EACT;AACA,SAAOC,cAAa,MAAM,MAAMA,cAAa,MAAM;AACrD;AAEA,eAAsB,SAAS,QAAa;AAC1C,SAAO;AAAA,IACL;AAAA,IACA,OAAO,OAAiB;AACtB,aAAO,MAAM,GAAG,OAAO;AAAA,IACzB;AAAA,IACA,EAAE,YAAY,KAAK;AAAA,EACrB;AACF;AAEO,SAAS,WACd,MACA,UACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,IAII;AAAA,EACF,UAAU;AAAA,EACV,UAAU;AACZ,GACA;AACA,MAAI,CAAC,UAAU;AACb,WAAO,EAAE,MAAM,aAAa,MAAM;AAAA,EACpC;AACA,QAAM,cAAc,KAAK,SAAS;AAClC,MAAI,WAAW;AACf,MAAI,CAAC,QAAQ;AACX,aAAS,CAAC,QAAc,WAAW,MAAM,QAAQ,IAAI,KAAK;AAAA,EAC5D;AACA,MAAI,aAAa;AACf,eAAW,OAAO,KAAK,QAAQ,CAAC;AAAA,EAClC;AACA,SAAO;AAAA,IACL,MAAM,KAAK,MAAM,GAAG,QAAQ;AAAA,IAC5B;AAAA,IACA;AAAA,EACF;AACF;AA/MA,IAAAC,cAAA;AAAA;AAAA;AAAA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA;AACA;AAEA;AAEA;AAAA;AAAA;;;ACYA,SAAS,YAA4B;AACnC,SAAO;AAAA,IACL,KAAK;AAAA;AAAA;AAAA,IAGL,OAAO,CAAC;AAAA,EACV;AACF;AAEA,eAAe,iBAAiB,IAAc,UAAoB;AAChE,MAAI,CAAC,gBAAgB,QAAQ,GAAG;AAC9B;AAAA,EACF;AACA,MAAI;AACF,UAAM,YAAY,MAAM,GAAG,IAAoB,SAAS;AACxD,aAAS,mBAAmB,gBAAgB,QAAQ,GAAG;AACrD,aAAO,UAAU,QAAQ,eAAe;AAAA,IAC1C;AACA,UAAM,GAAG,IAAI,SAAS;AAAA,EACxB,SAAS,KAAK;AAAA,EAEd;AACF;AAEA,eAAsB,WACpB,IACA,QACA,UACe;AACf,MAAI;AACJ,MAAI;AACF,gBAAY,MAAM,GAAG,IAAoB,SAAS;AAAA,EACpD,SAAS,KAAK;AAEZ,gBAAY,UAAU;AAAA,EACxB;AACA,QAAM,OAAe;AAAA,IACnB,KAAK;AAAA,EACP;AACA,YAAU,QAAQ;AAAA,IAChB,GAAG,UAAU;AAAA,IACb,CAAC,QAAQ,GAAG;AAAA,EACd;AACA,MAAI;AACF,UAAM,GAAG,IAAI,SAAS;AAAA,EACxB,SAAS,KAAU;AACjB,QAAI,IAAI,WAAW,KAAK;AACtB,aAAO,MAAM,WAAW,IAAI,QAAQ,QAAQ;AAAA,IAC9C,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAuCA,eAAsB,aACpB,UACAC,SACA,IACA,YACA,MAC6B;AAC7B,MAAI;AACF,UAAM,WAAW,MAAM,GAAG,MAAS,YAAY,QAAQ,IAAIA,OAAM;AAEjE,WAAO;AAAA,EACT,SAAS,KAAU;AACjB,UAAM,gBAAgB,OAAO,IAAI,SAAS;AAC1C,UAAM,gBAAgB,OAAO,IAAI,WAAW;AAC5C,QAAI,iBAAiB,eAAe;AAClC,YAAM,iBAAiB,IAAI,QAAQ;AACnC,YAAM,WAAW;AACjB,aAAO,aAAa,UAAUA,SAAQ,IAAI,YAAY,IAAI;AAAA,IAC5D,WAAW,IAAI,WAAW,KAAK;AAG7B,aAAO,aAAa,UAAUA,SAAQ,IAAI,YAAY,IAAI;AAAA,IAC5D,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAwBA,eAAe,mBAAmB,QAAgB,UAAoB;AACpE,MAAI;AACF,UAAM,SAAS,gBAAgB,cAAc,MAAM,OAAO,OAAiB;AACzE,YAAM,WAAW,IAAI,QAAQ,QAAQ;AAAA,IACvC,CAAC;AAAA,EACH,SAAS,GAAQ;AACf,QAAI,EAAE,WAAW,OAAO,oBAAI,OAAO,GAAG;AAGpC;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AA4DA,eAAsB,mBACpB,UACAA,SACA,MACA;AACA,QAAM,KAAK,YAAY;AACvB,QAAM,WAAW,iBAAiB,QAAQ;AAC1C,SAAO,aAAgB,UAAUA,SAAQ,IAAI,UAAU,IAAI;AAC7D;AAnPA,IAmBM,WAwDO,wBAUA,mBAaA,kBA0CA,WAqCA,gCASA,wBAaA,mBAgBP,kBAMO;AA7Nb;AAAA;AAAA;AAAA,IAAAC;AAOA,IAAAC;AACA,IAAAC;AASA,IAAAC;AAEA,IAAM,YAAY;AAwDX,IAAM,yBAAyB,YAAY;AAChD,YAAM,KAAK,YAAY;AACvB,YAAM,SAAS;AAAA,6CAC8B,GAAG,SAAS;AAAA;AAAA;AAAA;AAIzD,YAAM,WAAW,IAAI,uCAA8B;AAAA,IACrD;AAEO,IAAM,oBAAoB,YAAY;AAC3C,YAAM,KAAK,YAAY;AACvB,YAAM,SAAS;AAAA,6CAC8B,GAAG,SAAS;AAAA;AAAA,qCAEtB,SAAS;AAAA;AAAA;AAAA;AAAA;AAK5C,YAAM,WAAW,IAAI,kCAA4B;AAAA,IACnD;AAEO,IAAM,mBAAmB,YAAY;AAC1C,YAAM,KAAK,YAAY;AACvB,YAAM,SAAS;AAAA,sDACkC;AAAA;AAAA;AAAA;AAIjD,YAAM,WAAW,IAAI,qCAA2B;AAAA,IAClD;AAkCO,IAAM,YAAY,OACvB,UACAJ,SACA,IACA,YACA,SACqB;AACrB,YAAM,WAAW,MAAM,aAAgB,UAAUA,SAAQ,IAAI,YAAY,IAAI;AAC7E,YAAM,OAAO,SAAS;AACtB,YAAM,OAAO,KAAK,IAAI,SAAQA,QAAO,eAAe,IAAI,MAAO,IAAI,KAAM;AAGzE,UAAI,MAAM,eAAe;AACvB,eAAO;AAAA,MACT,OAAO;AAEL,eAAO,KAAK,UAAU,IAAK,KAAK,CAAC,IAAW;AAAA,MAC9C;AAAA,IACF;AAmBO,IAAM,iCAAiC,YAAY;AACxD,YAAM,SAAS;AAAA,mEAC0C,GAAG,SAAS;AAAA;AAAA;AAAA;AAIrE,YAAM,mBAAmB,iDAAiC;AAAA,IAC5D;AAEO,IAAM,yBAAyB,YAAY;AAChD,YAAM,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AASf,YAAM,mBAAmB,mEAAyC;AAAA,IACpE;AAEO,IAAM,oBAAoB,OAC/B,UACAA,SACA,SACqB;AACrB,YAAMK,oBAAwB;AAAA,QAC5B,0CAA0B,GAAG;AAAA,QAC7B,4DAAkC,GAAG;AAAA,MACvC;AAEA,aAAO,SAAS,gBAAgB,cAAc,MAAM,OAAO,OAAiB;AAC1E,cAAM,WAAWA,kBAAiB,QAAQ;AAC1C,eAAO,UAAU,UAAUL,SAAQ,IAAI,UAAU,IAAI;AAAA,MACvD,CAAC;AAAA,IACH;AAEA,IAAM,mBAAwB;AAAA,MAC5B,gCAAuB,GAAG;AAAA,MAC1B,8BAAoB,GAAG;AAAA,MACvB,2BAAqB,GAAG;AAAA,IAC1B;AAEO,IAAM,kBAAkB,OAC7B,UACAA,SACA,IACA,SACiC;AAEjC,UAAI,CAAC,IAAI;AACP,aAAK,YAAY;AAAA,MACnB;AACA,YAAM,WAAW,iBAAiB,QAAQ;AAC1C,aAAO,UAAa,UAAUA,SAAQ,IAAK,UAAU,IAAI;AAAA,IAC3D;AAAA;AAAA;;;ACzOA,IAGM,aAgFC;AAnFP;AAAA;AAAA;AAAA;AACA,IAAAM;AAEA,IAAM,cAAN,MAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAUhB,YAAY,EAAE,QAAQ,OAAO,GAAQ;AACnC,aAAK,SAAS,WAAW,MAAM;AAC/B,aAAK,SAAS,WAAW,MAAM;AAAA,MACjC;AAAA,MAEA,QAAQ;AACN,eAAO,QAAQ,IAAI,CAAC,aAAa,KAAK,MAAM,GAAG,aAAa,KAAK,MAAM,CAAC,CAAC;AAAA,MAC3E;AAAA,MAEA,UAAU,WAAgB,OAAO,CAAC,GAAG;AACnC,eAAO,IAAI,QAAQ,aAAW;AAC5B,oBAAU,KAAK,QAAQ,IAAI,EACxB,GAAG,UAAU,SAAU,KAAU;AAEhC,kBAAM,IAAI,MAAM,wCAAwC,GAAG,EAAE;AAAA,UAC/D,CAAC,EACA,GAAG,YAAY,SAAU,MAAW;AACnC,mBAAO,QAAQ,IAAI;AAAA,UACrB,CAAC,EACA,GAAG,SAAS,SAAU,KAAU;AAC/B,kBAAM,IAAI,MAAM,sBAAsB,GAAG,EAAE;AAAA,UAC7C,CAAC;AAAA,QACL,CAAC;AAAA,MACH;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,KAAK,OAAO,CAAC,GAAG;AACd,aAAK,cAAc,KAAK,UAAU,KAAK,OAAO,MAAM,IAAI;AACxD,eAAO,KAAK;AAAA,MACd;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,UAAU,OAAO,CAAC,GAAG;AACnB,aAAK,cAAc,KAAK,UAAU,KAAK,OAAO,UAAU,IAAI,IAAI;AAChE,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,mBAAmB;AACjB,eAAO;AAAA,UACL,QAAQ,CAAC,QAAa;AACpB,gBAAI,IAAI,OAAO,IAAI,IAAI,wCAAsC,GAAG;AAC9D,qBAAO;AAAA,YACT;AACA,mBAAO,IAAI;AAAA,UACb;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKA,MAAM,WAAW;AACf,cAAM,KAAK,OAAO,QAAQ;AAE1B,aAAK,SAAS,WAAW,KAAK,OAAO,IAAI;AAEzC,cAAM,KAAK,UAAU;AAAA,MACvB;AAAA,MAEA,SAAS;AACP,aAAK,YAAY,OAAO;AAAA,MAC1B;AAAA,IACF;AAEA,IAAO,sBAAQ;AAAA;AAAA;;;ACtDR,SAAS,mBAAmB,KAAkB;AACnD,MAAI,OAAO,QAAQ,YAAY,IAAI,MAAM,iBAAiB,KAAK,MAAM;AACnE,UAAM,QAAQ,IAAI,MAAM,GAAG;AAE3B,UAAM,MAAM;AACZ,WAAO,MAAM,KAAK,GAAG;AAAA,EACvB,OAAO;AACL,WAAO;AAAA,EACT;AACF;AA6gBA,eAAe,SACb,KACAC,OACA,QAC4B;AAC5B,QAAM,WAAW,UAAM,mBAAAC,SAAM,KAAK;AAAA,IAChC,MAAM,KAAK,UAAUD,KAAI;AAAA,IACzB,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,eAAe;AAAA,IACjB;AAAA,EACF,CAAC;AAED,MAAI,SAAS,WAAW,KAAK;AAC3B,UAAM;AAAA,EACR;AACA,QAAM,OAAO,MAAM,SAAS,KAAK;AAEjC,MAAI,SAA4B;AAAA,IAC9B,MAAM,CAAC;AAAA,IACP,WAAW;AAAA,EACb;AACA,MAAI,KAAK,QAAQ,QAAQ,KAAK,KAAK,SAAS,GAAG;AAC7C,WAAO,OAAO,KAAK,KAAK,IAAI,CAAC,QAAa,IAAI,GAAG;AAAA,EACnD;AACA,MAAI,KAAK,UAAU;AACjB,WAAO,WAAW,KAAK;AAAA,EACzB;AACA,MAAI,KAAK,YAAY;AACnB,WAAO,YAAY,KAAK;AAAA,EAC1B;AACA,SAAO;AACT;AAoBA,eAAe,gBACb,QACAE,QACA,OACAC,SACc;AACd,QAAM,WAAWA,QAAO;AACxB,QAAM,OAAOA,QAAO,QAAQ,CAAC;AAC7B,MAAI,KAAK,UAAUA,QAAO,OAAO;AAC/B,WAAO;AAAA,EACT;AACA,MAAI,WAAW,aAAa;AAC5B,MAAI,KAAK,SAASA,QAAO,QAAQ,aAAa,UAAU;AACtD,eAAWA,QAAO,QAAQ,KAAK;AAAA,EACjC;AACA,QAAM,OAAO,MAAM,IAAI,aAAgB,QAAQD,QAAO,KAAK,EACxD,WAAWC,QAAO,OAAO,EACzB,SAASA,QAAO,OAAO,EACvB,YAAY,QAAQ,EACpB,SAAS,QAAQ,EACjB,QAAQA,QAAO,IAAI,EACnB,aAAaA,QAAO,SAAS,EAC7B,YAAYA,QAAO,QAAQ,EAC3B,IAAI;AACP,MAAI,CAAC,KAAK,KAAK,QAAQ;AACrB,WAAO;AAAA,EACT;AACA,MAAI,KAAK,KAAK,SAAS,aAAa,UAAU;AAC5C,WAAO,CAAC,GAAG,MAAM,GAAG,KAAK,IAAI;AAAA,EAC/B;AACA,QAAM,YAAY;AAAA,IAChB,GAAGA;AAAA,IACH,UAAU,KAAK;AAAA,IACf,MAAM,CAAC,GAAG,MAAM,GAAG,KAAK,IAAI;AAAA,EAC9B;AACA,SAAO,MAAM,gBAAgB,QAAQD,QAAO,OAAO,SAAS;AAC9D;AAmBA,eAAsB,gBACpB,QACAA,QACA,OACAC,SACA;AACA,MAAI,QAAQA,QAAO;AACnB,MAAI,SAAS,QAAQ,MAAM,KAAK,KAAK,QAAQ,GAAG;AAC9C,YAAQ;AAAA,EACV;AACA,UAAQ,KAAK,IAAI,OAAO,aAAa,QAAQ;AAC7C,QAAM,SAAS,IAAI,aAAgB,QAAQD,QAAO,KAAK;AACvD,MAAIC,QAAO,SAAS;AAClB,WAAO,WAAWA,QAAO,OAAO;AAAA,EAClC;AACA,MAAIA,QAAO,SAAS;AAClB,WAAO,SAASA,QAAO,OAAO;AAAA,EAChC;AACA,MAAIA,QAAO,MAAM;AACf,WACG,QAAQA,QAAO,IAAI,EACnB,aAAaA,QAAO,SAAS,EAC7B,YAAYA,QAAO,QAAQ;AAAA,EAChC;AACA,MAAIA,QAAO,SAAS;AAClB,WAAO,gBAAgBA,QAAO,OAAO;AAAA,EACvC;AACA,MAAIA,QAAO,iBAAiB;AAC1B,WAAO,gBAAgB;AAAA,EACzB;AACA,QAAM,gBAAgB,MAAM,OACzB,YAAYA,QAAO,QAAQ,EAC3B,SAAS,KAAK,EACd,IAAI;AAIP,SAAO,YAAY,cAAc,QAAQ,EAAE,SAAS,CAAC;AACrD,MAAIA,QAAO,SAAS;AAClB,WAAO,SAASA,QAAO,OAAO;AAAA,EAChC;AACA,QAAM,cAAc,MAAM,OAAO,IAAI;AAErC,SAAO;AAAA,IACL,GAAG;AAAA,IACH,aAAa,YAAY,QAAQ,YAAY,KAAK,SAAS;AAAA,EAC7D;AACF;AAmBA,eAAsB,WACpB,QACAD,QACA,OACAC,SACA;AACA,MAAI,QAAQA,QAAO;AACnB,MAAI,SAAS,QAAQ,MAAM,KAAK,KAAK,QAAQ,GAAG;AAC9C,YAAQ;AAAA,EACV;AACA,EAAAA,QAAO,QAAQ,KAAK,IAAI,OAAO,GAAI;AACnC,QAAM,OAAO,MAAM,gBAAmB,QAAQD,QAAO,OAAOC,OAAM;AAClE,SAAO,EAAE,KAAK;AAChB;AA7uBA,IAAAC,oBAIM,mBAwCO;AA5Cb;AAAA;AAAA;AAAA,IAAAA,qBAAkB;AAClB;AACA;AAEA,IAAM,oBAAoB;AAwCnB,IAAM,eAAN,MAAM,cAAgB;AAAA,MAC3B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,cAAc;AAAA,MACd;AAAA,MAEA;AAAA,aAAgB,WAAW;AAAA;AAAA,MAE3B,YAAY,QAAgBF,QAAe,MAAsB;AAC/D,aAAK,UAAU;AACf,aAAK,SAASA;AACd,aAAK,SAAS;AAAA,UACZ,OAAO;AAAA,UACP;AAAA,UACA,QAAQ,CAAC;AAAA,UACT,OAAO,CAAC;AAAA,UACR,OAAO,CAAC;AAAA,UACR,OAAO,CAAC;AAAA,UACR,UAAU,CAAC;AAAA,UACX,OAAO,CAAC;AAAA,UACR,UAAU,CAAC;AAAA,UACX,OAAO,CAAC;AAAA,UACR,UAAU,CAAC;AAAA,UACX,aAAa,CAAC;AAAA,UACd,aAAa,CAAC;AAAA,UACd,GAAG;AAAA,QACL;AACA,aAAK,SAAS;AACd,aAAK,aAAa;AAClB,aAAK,YAAY;AACjB,aAAK,eAAe;AAAA,MACtB;AAAA,MAEA,kBAAkB;AAChB,aAAK,cAAc;AACnB,eAAO;AAAA,MACT;AAAA,MAEA,gBAAgB,WAA+B;AAC7C,aAAK,gBAAgB;AACrB,eAAO;AAAA,MACT;AAAA,MAEA,WAAW,SAAkB;AAC3B,YAAI,WAAW,MAAM;AACnB,eAAK,WAAW;AAAA,QAClB;AACA,eAAO;AAAA,MACT;AAAA,MAEA,SAAS,SAAiB;AACxB,aAAK,OAAO,MAAO,UAAU;AAC7B,eAAO;AAAA,MACT;AAAA,MAEA,SAAS,OAAgB;AACvB,YAAI,SAAS,MAAM;AACjB,eAAK,SAAS;AAAA,QAChB;AACA,eAAO;AAAA,MACT;AAAA,MAEA,QAAQ,MAAe;AACrB,YAAI,QAAQ,MAAM;AAChB,eAAK,QAAQ;AAAA,QACf;AACA,eAAO;AAAA,MACT;AAAA,MAEA,aAAa,WAAoB;AAC/B,YAAI,aAAa,MAAM;AACrB,eAAK,aAAa;AAAA,QACpB;AACA,eAAO;AAAA,MACT;AAAA,MAEA,YAAY,UAAmB;AAC7B,YAAI,YAAY,MAAM;AACpB,eAAK,YAAY;AAAA,QACnB;AACA,eAAO;AAAA,MACT;AAAA,MAEA,YAAY,UAAmB;AAC7B,YAAI,YAAY,MAAM;AACpB,eAAK,YAAY;AAAA,QACnB;AACA,eAAO;AAAA,MACT;AAAA,MAEA,QAAQ,MAA0B;AAChC,aAAK,QAAQ;AACb,eAAO;AAAA,MACT;AAAA,MAEA,cAAc;AACZ,aAAK,eAAe;AACpB,eAAO;AAAA,MACT;AAAA,MAEA,cAAc;AACZ,aAAK,eAAe;AACpB,eAAO;AAAA,MACT;AAAA,MAEA,UAAU,KAAa,SAAiB;AACtC,aAAK,OAAO,OAAQ,GAAG,IAAI;AAC3B,eAAO;AAAA,MACT;AAAA,MAEA,SAAS,KAAa,OAAe;AACnC,aAAK,OAAO,MAAO,GAAG,IAAI;AAC1B,eAAO;AAAA,MACT;AAAA,MAEA,SAAS,KAAa,KAAsB,MAAuB;AACjE,aAAK,OAAO,MAAO,GAAG,IAAI;AAAA,UACxB;AAAA,UACA;AAAA,QACF;AACA,eAAO;AAAA,MACT;AAAA,MAEA,SAAS,KAAa,OAAY;AAChC,aAAK,OAAO,MAAO,GAAG,IAAI;AAC1B,eAAO;AAAA,MACT;AAAA,MAEA,YAAY,KAAa,OAAY;AACnC,aAAK,OAAO,SAAU,GAAG,IAAI;AAC7B,eAAO;AAAA,MACT;AAAA,MAEA,SAAS,KAAa,OAAY;AAChC,aAAK,OAAO,MAAO,GAAG,IAAI;AAC1B,eAAO;AAAA,MACT;AAAA,MAEA,YAAY,KAAa,OAAY;AACnC,aAAK,OAAO,SAAU,GAAG,IAAI;AAC7B,eAAO;AAAA,MACT;AAAA,MAEA,SAAS,KAAa,OAAY;AAChC,aAAK,OAAO,MAAO,GAAG,IAAI;AAC1B,eAAO;AAAA,MACT;AAAA,MAEA,YAAY,KAAa,OAAY;AACnC,aAAK,OAAO,SAAU,GAAG,IAAI;AAC7B,eAAO;AAAA,MACT;AAAA,MAEA,eAAe,KAAa,OAAY;AACtC,aAAK,OAAO,YAAa,GAAG,IAAI;AAChC,eAAO;AAAA,MACT;AAAA,MAEA,eAAe,KAAa,OAAY;AACtC,aAAK,OAAO,YAAa,GAAG,IAAI;AAChC,eAAO;AAAA,MACT;AAAA,MAEA,WAAW;AACT,aAAK,OAAO,QAAQ;AAAA,MACtB;AAAA,MAEA,iBAAiB,OAA0B;AACzC,aAAK,OAAO,gBAAgB;AAAA,MAC9B;AAAA,MAEA,aAAa,OAAe;AAC1B,YAAI,KAAK,aAAa;AACpB,iBAAO;AAAA,QACT,OAAO;AACL,iBAAO,MAAM,QAAQ,MAAM,GAAG;AAAA,QAChC;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASA,WAAW,OAAY,EAAE,QAAQ,WAAW,MAAM,KAAK,IAAS,CAAC,GAAG;AAClE,cAAM,aAAa,CAAC,CAAC,KAAK;AAE1B,cAAM,eAAe,OAAO;AAE5B,YAAI,SAAS,WAAW;AACtB,kBAAQ,MAAM,cAAc,MAAM,YAAY,IAAI;AAAA,QACpD;AAEA,YAAI,CAAC,KAAK,eAAe,UAAU,iBAAiB,UAAU;AAC5D,kBAAQ,GAAG,KAAK,GAAG,QAAQ,+BAA+B,MAAM;AAAA,QAClE;AAGA,YAAI,iBAAiB,YAAY,CAAC,MAAM,KAAK,KAAK,CAAC,MAAM;AACvD,kBAAQ,IAAI,KAAK;AAAA,QACnB,WAAW,cAAc,MAAM;AAC7B,kBAAQ,iBAAiB,WAAW,QAAQ,IAAI,KAAK;AAAA,QACvD;AACA,eAAO;AAAA,MACT;AAAA,MAEA,mBAAmB;AACjB,YAAI,QAAQ;AACZ,iBAAS,WAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AAE9C,cAAI,OAAO,YAAY,UAAU;AAC/B,qBAAS,OAAO,KAAK,OAAO,EAAE;AAAA,UAChC;AAAA,QACF;AACA,eAAO,QAAQ;AAAA,MACjB;AAAA,MAEA,gBAAgB,SAAmC;AACjD,cAAM,aAA6B,CAAC;AACpC,iBAAS,OAAO,OAAO,KAAK,OAAO,GAAG;AACpC,gBAAM,WAAW,mBAAmB,GAAG;AACvC,cAAI,WAAW,QAAQ,GAAG;AACxB,uBAAW,QAAQ,IAAI,WAAW,QAAQ,EAAE,OAAO,QAAQ,GAAG,CAAC;AAAA,UACjE,OAAO;AACL,uBAAW,QAAQ,IAAI,QAAQ,GAAG;AAAA,UACpC;AAAA,QACF;AAEA,cAAM,QAAwB,CAAC;AAC/B,YAAI,QAAQ;AACZ,iBAAS,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,UAAU,GAAG;AACnD,gBAAM,GAAG,OAAO,IAAI,GAAG,EAAE,IAAI;AAAA,QAC/B;AACA,eAAO;AAAA,MACT;AAAA,MAEA,mBAAmB;AACjB,cAAM,UAAU;AAChB,YAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACvC,YAAI,QAAQ,QAAQ,KAAK;AACzB,YAAI,kBAAkB;AACtB,cAAM,uBAAuB,EAAE,QAAQ,MAAM,WAAW,MAAM,MAAM,KAAK;AACzE,YAAI,UAAkB;AACtB,YAAI,KAAK,OAAO,MAAO,SAAS;AAC9B,oBAAU,KAAK,OAAO,MAAO;AAC7B,iBAAO,KAAK,OAAO,MAAO;AAAA,QAC5B;AAEA,cAAM,QAAQ,CAAC,KAAa,UAAe;AAEzC,cAAI,CAAC,SAAS,UAAU,GAAG;AACzB,mBAAO;AAAA,UACT;AACA,iBAAO,GAAG,GAAG,IAAI,QAAQ,WAAW,OAAO,oBAAoB,CAAC;AAAA,QAClE;AAEA,cAAM,WAAW,CAAC,KAAa,OAAY,OAAO,UAAU;AAC1D,cAAI,CAAC,SAAU,MAAM,QAAQ,KAAK,KAAK,MAAM,WAAW,GAAI;AAC1D,mBAAO;AAAA,UACT;AACA,cAAI,CAAC,MAAM,QAAQ,KAAK,GAAG;AACzB,mBAAO,GAAG,GAAG,IAAI,KAAK;AAAA,UACxB;AACA,cAAI,YAAY,GAAG,QAAQ,WAAW,MAAM,CAAC,GAAG,EAAE,QAAQ,KAAK,CAAC,CAAC;AACjE,mBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,yBAAa,IAAI,IAAI,IAAI,QAAQ,WAAW,MAAM,CAAC,GAAG;AAAA,cACpD,QAAQ;AAAA,YACV,CAAC,CAAC;AAAA,UACJ;AACA,iBAAO,GAAG,GAAG,KAAK,SAAS;AAAA,QAC7B;AAEA,cAAM,QAAQ,CAAC,KAAa,UAAe;AACzC,cAAI,CAAC,OAAO;AACV,mBAAO;AAAA,UACT;AACA,kBAAQ,QAAQ,WAAW,OAAO;AAAA,YAChC,QAAQ;AAAA,YACR,WAAW;AAAA,YACX,MAAM;AAAA,UACR,CAAC;AACD,iBAAO,GAAG,GAAG,OAAO,KAAK;AAAA,QAC3B;AAEA,cAAM,cAAc,CAAC,KAAa,UAAe;AAC/C,gBAAM,YAAY,QAAQ,aAAa;AACvC,gBAAM,OAAO,QAAQ,QAAQ;AAC7B,iBAAO,YAAY,SAAS,SAAS,KAAK,OAAO,IAAI;AAAA,QACvD;AAEA,cAAM,cAAc,CAAC,KAAa,UAAe;AAC/C,iBAAO,SAAS,KAAK,OAAO,IAAI;AAAA,QAClC;AAEA,cAAM,QAAQ,CAAC,KAAa,UAAe;AACzC,cAAI,CAAC,OAAO;AACV,mBAAO;AAAA,UACT;AACA,cAAI,CAAC,MAAM,QAAQ,KAAK,GAAG;AACzB,gBAAI,OAAO,UAAU,UAAU;AAC7B,sBAAQ,MAAM,MAAM,GAAG;AAAA,YACzB,OAAO;AACL,qBAAO;AAAA,YACT;AAAA,UACF;AACA,cAAI,cAAc,GAAG,QAAQ,WAAW,MAAM,CAAC,GAAG,oBAAoB,CAAC;AACvE,mBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,2BAAe,OAAO,QAAQ;AAAA,cAC5B,MAAM,CAAC;AAAA,cACP;AAAA,YACF,CAAC;AAAA,UACH;AACA,iBAAO,GAAG,GAAG,KAAK,WAAW;AAAA,QAC/B;AAEA,iBAAS,MACP,WACA,SACA,MACA;AACA,cAAI,QAAQ;AACZ,mBAAS,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,SAAS,GAAG;AAElD,kBAAM,mBAAmB,GAAG;AAC5B,kBAAM,QAAQ,WAAW,QAAQ,aAAa,GAAG,GAAG;AAAA,cAClD,QAAQ;AAAA,YACV,CAAC;AACD,gBAAI,aAAa,QAAQ,KAAK,KAAK;AACnC,gBAAI,cAAc,MAAM;AACtB;AAAA,YACF;AACA,gBAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;AACxC,oBAAM,OAAO,MAAM,OAAO,KAAK,OAAO,QAAQ,OAAO;AACrD,uBAAS,IAAI,IAAI;AAAA,YACnB;AACA,qBAAS;AACT,gBACG,OAAO,UAAU,YAAY,SAAS,QACtC,OAAO,UAAU,YAAY,UAAU,WAAW,UAAU,IAC7D;AACA,gCAAkB;AAAA,YACpB;AAAA,UACF;AACA,cAAI,MAAM,aAAa;AACrB,mBAAO;AAAA,UACT,OAAO;AACL,qBAAS;AAAA,UACX;AAAA,QACF;AAGA,YAAI,KAAK,OAAO,QAAQ;AACtB,gBAAM,KAAK,OAAO,QAAQ,CAAC,KAAa,UAAe;AACrD,gBAAI,CAAC,OAAO;AACV,qBAAO;AAAA,YACT;AACA,oBAAQ,QAAQ,WAAW,OAAO;AAAA,cAChC,QAAQ;AAAA,cACR,WAAW;AAAA,cACX,MAAM;AAAA,YACR,CAAC;AACD,mBAAO,GAAG,GAAG,IAAI,KAAK;AAAA,UACxB,CAAC;AAAA,QACH;AACA,YAAI,KAAK,OAAO,OAAO;AACrB,gBAAM,KAAK,OAAO,OAAO,CAAC,KAAa,UAAe;AACpD,gBAAI,CAAC,OAAO;AACV,qBAAO;AAAA,YACT;AACA,gBAAI,MAAM,OAAO,QAAQ,MAAM,QAAQ,IAAI;AACzC,qBAAO;AAAA,YACT;AACA,gBAAI,MAAM,QAAQ,QAAQ,MAAM,SAAS,IAAI;AAC3C,qBAAO;AAAA,YACT;AACA,kBAAM,MAAM,QAAQ,WAAW,MAAM,KAAK,oBAAoB;AAC9D,kBAAM,OAAO,QAAQ,WAAW,MAAM,MAAM,oBAAoB;AAChE,mBAAO,GAAG,GAAG,KAAK,GAAG,OAAO,IAAI;AAAA,UAClC,CAAC;AAAA,QACH;AACA,YAAI,KAAK,OAAO,OAAO;AACrB,gBAAM,KAAK,OAAO,OAAO,KAAK;AAAA,QAChC;AACA,YAAI,KAAK,OAAO,OAAO;AACrB,gBAAM,KAAK,OAAO,OAAO,KAAK;AAAA,QAChC;AACA,YAAI,KAAK,OAAO,UAAU;AACxB,gBAAM,KAAK,OAAO,UAAU,CAAC,KAAa,UAAe;AACvD,gBAAI,CAAC,OAAO;AACV,qBAAO;AAAA,YACT;AACA,gBAAI,OAAO,UAAU,WAAW;AAC9B,qBAAO,aAAa,GAAG,IAAI,KAAK;AAAA,YAClC;AACA,mBAAO,IAAI,GAAG,IAAI,QAAQ,WAAW,OAAO,oBAAoB,CAAC;AAAA,UACnE,CAAC;AAAA,QACH;AACA,YAAI,KAAK,OAAO,OAAO;AACrB,gBAAM,KAAK,OAAO,OAAO,CAAC,QAAgB,SAAS,GAAG,aAAa;AAAA,QACrE;AACA,YAAI,KAAK,OAAO,UAAU;AACxB,gBAAM,KAAK,OAAO,UAAU,CAAC,QAAgB,GAAG,GAAG,YAAY;AAAA,QACjE;AACA,YAAI,KAAK,OAAO,OAAO;AACrB,gBAAM,KAAK,OAAO,OAAO,KAAK;AAAA,QAChC;AACA,YAAI,KAAK,OAAO,UAAU;AACxB,gBAAM,KAAK,OAAO,UAAU,QAAQ;AAAA,QACtC;AACA,YAAI,KAAK,OAAO,aAAa;AAC3B,gBAAM,KAAK,gBAAgB,KAAK,OAAO,WAAW,GAAG,WAAW;AAAA,QAClE;AACA,YAAI,KAAK,OAAO,aAAa;AAC3B,gBAAM,KAAK,OAAO,aAAa,WAAW;AAAA,QAC5C;AAEA,YAAI,SAAS;AACX,kBAAQ,KAAK,iBAAiB,IAAI,IAAI,KAAK,MAAM;AACjD,kBAAQ;AACR,gBAAM,EAAE,QAAQ,GAAG,KAAK;AAAA,QAC1B;AACA,YAAI,iBAAiB;AACnB,cAAI,KAAK,OAAO,4CAAiD;AAC/D,mBAAO;AAAA,UACT,WAAW,KAAK,QAAQ,OAAO;AAC7B,mBAAO,MAAM,QAAQ,MAAM,OAAO;AAAA,UACpC;AAAA,QACF;AACA,eAAO;AAAA,MACT;AAAA,MAEA,kBAAkB;AAChB,YAAIF,QAAY;AAAA,UACd,GAAG,KAAK,iBAAiB;AAAA,UACzB,OAAO,KAAK,IAAI,KAAK,QAAQ,cAAa,QAAQ;AAAA,UAClD,cAAc,KAAK;AAAA,QACrB;AACA,YAAI,KAAK,WAAW;AAClB,UAAAA,MAAK,WAAW,KAAK;AAAA,QACvB;AACA,YAAI,KAAK,OAAO;AACd,gBAAM,QAAQ,KAAK,eAAe,eAAe,MAAM;AACvD,gBAAM,OAAO,IAAI,KAAK,SAAS;AAC/B,UAAAA,MAAK,OAAO,GAAG,KAAK,GAAG,KAAK,aAAa,KAAK,KAAK,CAAC,GAAG,IAAI;AAAA,QAC7D;AACA,eAAOA;AAAA,MACT;AAAA,MAEA,MAAM,MAAM;AACV,YAAI,KAAK,OAAO;AACd,gBAAM,KAAK,WAAW,KAAK,KAAK;AAAA,QAClC;AACA,eAAO,MAAM,KAAK,SAAS;AAAA,MAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,MAAM,WAAW,MAAc;AAG7B,cAAM,kBAAkB,KAAK;AAC7B,cAAM,YAAY,KAAK;AAEvB,aAAK,YAAY;AACjB,YAAI,gBAAgB;AACpB,YAAI,mBAAmB;AACvB,WAAG;AACD,gBAAM,SAAS,KAAK,IAAI,cAAa,UAAU,aAAa;AAC5D,eAAK,SAAS,MAAM;AACpB,gBAAM,EAAE,UAAU,KAAK,IAAI,MAAM,KAAK,SAAS;AAC/C,eAAK,YAAY,QAAQ;AACzB,6BAAmB,KAAK;AACxB,2BAAiB,KAAK;AAAA,QACxB,SAAS,gBAAgB,KAAK,mBAAmB;AAEjD,aAAK,eAAe;AACpB,aAAK,SAAS;AAAA,MAChB;AAAA,MAEA,MAAM,WAAW;AACf,cAAM,EAAE,KAAK,OAAO,IAAI,aAAa;AACrC,cAAM,WAAW,GAAG,GAAG,IAAI,KAAK,OAAO,6BACrC,KAAK,MACP;AACA,cAAMA,QAAO,KAAK,gBAAgB;AAClC,YAAI;AACF,iBAAO,MAAM,SAAY,UAAUA,OAAM,MAAM;AAAA,QACjD,SAAS,KAAU;AACjB,cAAI,IAAI,WAAW,OAAO,KAAK,eAAe;AAC5C,kBAAM,KAAK,cAAc;AACzB,mBAAO,MAAM,SAAY,UAAUA,OAAM,MAAM;AAAA,UACjD,OAAO;AACL,kBAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA;AAAA;;;ACviBA,eAAsB,kBAAkB;AACtC,QAAM,KAAK,YAAY;AACvB,MAAI;AACJ,MAAI;AACF,gBAAY,MAAM,GAAG,IAAS,kBAAkB;AAAA,EAClD,SAAS,KAAU;AACjB,QAAI,IAAI,WAAW,KAAK;AACtB,kBAAY,EAAE,KAAK,mBAAmB;AAAA,IACxC;AAAA,EACF;AAEA,QAAM,KAAK,SAAU,MAAY;AAC/B,QAAI,KAAK,OAAO,CAAC,KAAK,IAAI,WAAW,KAAK,GAAG;AAC3C;AAAA,IACF;AACA,UAAM,gBAAgB;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,aAAS,IAAI,OAA4B,MAAe;AACtD,eAAS,OAAO,OAAO,KAAK,KAAK,GAAG;AAClC,YAAI,cAAc,SAAS,GAAG,GAAG;AAC/B;AAAA,QACF;AACA,YAAI,SAAS,QAAQ,OAAO,GAAG,IAAI,IAAI,GAAG,KAAK;AAC/C,YAAI,OAAO,MAAM,GAAG,MAAM,UAAU;AAGlC,gBAAM,QAAQ,MAAM,GAAG,EAAE,YAAY,GAAG,EAAE,OAAO,KAAK,CAAC;AAAA,QACzD,WAAW,OAAO,MAAM,GAAG,MAAM,UAAU;AAGzC,gBAAM,QAAQ,MAAM,GAAG,GAAG,EAAE,OAAO,KAAK,CAAC;AAAA,QAC3C,OAAO;AACL,cAAI,MAAM,GAAG,GAAG,MAAM;AAAA,QACxB;AAAA,MACF;AAAA,IACF;AACA,QAAI,IAAI;AAAA,EACV;AAEA,YAAU,UAAU;AAAA,IAClB,kBAAiB,GAAG;AAAA,MAClB,OAAO,GAAG,SAAS;AAAA,MACnB,UAAU;AAAA,QACR,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,QAAM,GAAG,IAAI,SAAS;AACxB;AA7DA;AAAA;AAAA;AAAA;AACA,IAAAK;AAAA;AAAA;;;ACDA;AAAA;AAAA;AAAA;AAAA,IAAAC,sBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAO,SAAS,eAAe,OAAY,MAAc;AACvD,QAAM,aAAa,KAAK,SAAS;AACjC,MAAI,OAAO,UAAU,UAAU;AAC7B,WAAO,MAAM,WAAW,QAAQ,MAAM,SAAS,SAAS,UAAU;AAAA,EACpE,WAAW,OAAO,UAAU,UAAU;AACpC,WAAO,UAAU;AAAA,EACnB,WAAW,OAAO,UAAU,UAAU;AACpC,WAAO,MAAM,SAAS,UAAU;AAAA,EAClC;AACF;AAEO,SAAS,wBAAwB,OAAY;AAClD,SAAO,eAAe,OAAO,GAAG;AAClC;AAbA,IAAAC,eAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAC,WAAA;AAAA;AAAA;AAAA;AACA,IAAAA;AACA,IAAAC;AACA;AACA;AACA;AAEA,IAAAD;AACA,IAAAE;AACA;AACA,IAAAC;AACA,IAAAC;AAAA;AAAA;;;AC0CO,SAAS,YAAY,OAAe;AACzC,SAAO,SAAS,eAAe,KAAK,CAAC,EAAE,QAAQ,OAAO,GAAG;AAC3D;AAGO,SAAS,eAAe,OAAe;AAC5C,SAAO,MAAM,QAAQ,IAAI,OAAO,gBAAgB,GAAG,GAAG,UAAU;AAClE;AASO,SAAS,YACd,QACA,OAAgC,EAAE,YAAY,MAAM,GACpD;AACA,QAAM,SAAc;AAAA,IAClB,kBAAkB;AAAA,IAClB,kBAAkB;AAAA,IAClB,YAAY;AAAA,IACZ,aAAa,oBAAI;AAAA,IACjB,iBAAiB,oBAAI;AAAA,IACrB,QAAQ,oBAAI;AAAA,EACd;AACA,MAAI,QAAQ;AACV,WAAO,SAAS;AAAA,MACd,QAAQ,eAAe,MAAM;AAAA,IAC/B;AAAA,EACF;AAGA,MAAI,oBAAI,WAAW;AACjB,QAAI,KAAK,cAAc,oBAAI,eAAe;AAKxC,aAAO,WAAW;AAAA,IACpB,OAAO;AACL,aAAO,WAAW,oBAAI;AAAA,IACxB;AAAA,EACF;AAEA,SAAO,IAAI,eAAAC,QAAI,GAAG,MAAM;AAC1B;AAMA,eAAsB,qBAAqB,QAAa,YAAoB;AAC1E,eAAa,eAAe,UAAU;AACtC,MAAI;AACF,UAAM,OACH,WAAW;AAAA,MACV,QAAQ;AAAA,IACV,CAAC,EACA,QAAQ;AAAA,EACb,SAAS,KAAU;AACjB,UAAM,WAAgB,MAAM;AAC5B,UAAM,cAAc,IAAI,eAAe,KACrC,WAAW,IAAI,eAAe;AAChC,QAAI,SAAS,UAAU,GAAG;AACxB,YAAM,SAAS,UAAU;AAAA,IAC3B,WAAW,eAAe,UAAU;AAClC,UAAI,aAAa;AAEf,iBAAS,UAAU,IAAI,OACpB,aAAa;AAAA,UACZ,QAAQ;AAAA,QACV,CAAC,EACA,QAAQ;AACX,cAAM,SAAS,UAAU;AACzB,eAAO,SAAS,UAAU;AAAA,MAC5B;AAAA,IACF,OAAO;AACL,YAAM,IAAI,MAAM,yCAAyC;AAAA,IAC3D;AAAA,EACF;AACF;AAMA,eAAsB,OAAO;AAAA,EAC3B,QAAQ;AAAA,EACR;AAAA,EACA,MAAAC;AAAA,EACA;AAAA,EACA;AACF,GAAiB;AACf,QAAM,YAAY,SAAS,MAAM,GAAG,EAAE,IAAI;AAC1C,QAAM,YAAY,WAAAC,QAAG,aAAaD,KAAI;AAEtC,QAAM,cAAc,YAAY,UAAU;AAC1C,QAAM,qBAAqB,aAAa,UAAU;AAElD,MAAI,cAAc;AAClB,MAAI,CAAC,aAAa;AAChB,kBAAc,YACV,iBAAiB,UAAU,YAAY,CAAC,IACxC,iBAAiB;AAAA,EACvB;AACA,QAAM,SAAc;AAAA;AAAA,IAElB,KAAK,YAAY,QAAQ;AAAA,IACzB,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AACA,MAAI,YAAY,OAAO,aAAa,UAAU;AAE5C,aAAS,OAAO,OAAO,KAAK,QAAQ,GAAG;AACrC,UAAI,CAAC,SAAS,GAAG,KAAK,OAAO,SAAS,GAAG,MAAM,UAAU;AACvD,eAAO,SAAS,GAAG;AAAA,MACrB;AAAA,IACF;AACA,WAAO,WAAW;AAAA,EACpB;AACA,SAAO,YAAY,OAAO,MAAM,EAAE,QAAQ;AAC5C;AAMA,eAAsB,aACpB,YACA,UACAE,SACA,QAAQ,CAAC,GACT;AACA,QAAM,cAAc,YAAY,UAAU;AAC1C,QAAM,qBAAqB,aAAa,UAAU;AAGlD,MAAI,UAAU,SAAS,KAAK,GAAG;AAC7B,YAAQ;AAAA,MACN,GAAG;AAAA,MACH,aAAa;AAAA,IACf;AAAA,EACF,WAAW,UAAU,SAAS,MAAM,GAAG;AACrC,YAAQ;AAAA,MACN,GAAG;AAAA,MACH,aAAa;AAAA,IACf;AAAA,EACF;AAEA,QAAMC,UAAS;AAAA,IACb,QAAQ,eAAe,UAAU;AAAA,IACjC,KAAK,YAAY,QAAQ;AAAA,IACzB,MAAMD;AAAA,IACN,GAAG;AAAA,EACL;AACA,SAAO,YAAY,OAAOC,OAAM,EAAE,QAAQ;AAC5C;AAMA,eAAsB,SAAS,YAAoB,UAAkB;AACnE,QAAM,cAAc,YAAY,UAAU;AAC1C,QAAMA,UAAS;AAAA,IACb,QAAQ,eAAe,UAAU;AAAA,IACjC,KAAK,YAAY,QAAQ;AAAA,EAC3B;AACA,QAAM,WAAgB,MAAM,YAAY,UAAUA,OAAM,EAAE,QAAQ;AAElE,MAAI,qBAAqB,SAAS,SAAS,WAAW,GAAG;AACvD,WAAO,SAAS,KAAK,SAAS,MAAM;AAAA,EACtC,OAAO;AACL,WAAO,SAAS;AAAA,EAClB;AACF;AAEA,eAAsB,eAAe,YAAoBH,OAAc;AACrE,QAAM,cAAc,YAAY,UAAU;AAC1C,QAAM,OAAO,CAACG,UAAqB,CAAC,MAAM;AACxC,WAAO,YACJ,cAAc;AAAA,MACb,GAAGA;AAAA,MACH,QAAQ,eAAe,UAAU;AAAA,MACjC,QAAQ,YAAYH,KAAI;AAAA,IAC1B,CAAC,EACA,QAAQ;AAAA,EACb;AACA,MAAI,cAAc,OAChB,OACA,UAAiC,CAAC;AACpC,KAAG;AACD,QAAIG,UAAqB,CAAC;AAC1B,QAAI,OAAO;AACT,MAAAA,QAAO,oBAAoB;AAAA,IAC7B;AACA,UAAM,WAAW,MAAM,KAAKA,OAAM;AAClC,QAAI,SAAS,UAAU;AACrB,gBAAU,QAAQ,OAAO,SAAS,QAAQ;AAAA,IAC5C;AACA,kBAAc,CAAC,CAAC,SAAS;AAAA,EAC3B,SAAS;AACT,SAAO;AACT;AAKO,SAAS,gBACd,YACA,KACA,kBAA0B,MAC1B;AACA,QAAM,cAAc,YAAY,YAAY,EAAE,YAAY,KAAK,CAAC;AAChE,QAAMA,UAAS;AAAA,IACb,QAAQ,eAAe,UAAU;AAAA,IACjC,KAAK,YAAY,GAAG;AAAA,IACpB,SAAS;AAAA,EACX;AACA,QAAM,MAAM,YAAY,aAAa,aAAaA,OAAM;AAExD,MAAI,CAAC,oBAAI,eAAe;AAEtB,WAAO;AAAA,EACT,OAAO;AAIL,UAAM,YAAY,IAAI,IAAI,GAAG;AAC7B,UAAMH,QAAO,UAAU;AACvB,UAAM,QAAQ,UAAU;AACxB,WAAO,gBAAgBA,KAAI,GAAG,KAAK;AAAA,EACrC;AACF;AAKA,eAAsB,cAAc,YAAoB,UAAkB;AACxE,eAAa,eAAe,UAAU;AACtC,aAAW,YAAY,QAAQ;AAC/B,QAAM,OAAO,MAAM,SAAS,YAAY,QAAQ;AAChD,QAAM,iBAAa,mBAAK,gBAAgB,OAAG,iBAAG,CAAC;AAC/C,aAAAC,QAAG,cAAc,YAAY,IAAI;AACjC,SAAO;AACT;AAEA,eAAsB,kBAAkB,YAAoBD,OAAc;AACxE,MAAI,gBAAY,mBAAK,gBAAgB,OAAG,iBAAG,CAAC;AAC5C,aAAAC,QAAG,UAAU,SAAS;AACtB,QAAM,UAAU,MAAM,eAAe,YAAYD,KAAI;AACrD,MAAI,UAAU,MAAM,QAAQ;AAAA,IAC1B,QAAQ,IAAI,SAAO,cAAc,YAAY,IAAI,GAAI,CAAC;AAAA,EACxD;AACA,MAAI,QAAQ;AACZ,QAAM,gBAAkC,CAAC;AACzC,WAAS,OAAO,SAAS;AACvB,UAAM,WAAW,IAAI;AACrB,UAAME,UAAS,QAAQ,OAAO;AAC9B,UAAM,eAAe,SAAS,MAAM,GAAG;AACvC,UAAM,OAAO,aAAa,MAAM,GAAG,aAAa,SAAS,CAAC;AAC1D,UAAM,kBAAc,mBAAK,WAAW,GAAG,IAAI;AAC3C,QAAI,aAAa,SAAS,KAAK,CAAC,WAAAD,QAAG,WAAW,WAAW,GAAG;AAC1D,iBAAAA,QAAG,UAAU,aAAa,EAAE,WAAW,KAAK,CAAC;AAAA,IAC/C;AACA,UAAM,cAAc,WAAAA,QAAG,sBAAkB,mBAAK,WAAW,GAAG,YAAY,GAAG;AAAA,MACzE,MAAM;AAAA,IACR,CAAC;AACD,IAAAC,QAAO,KAAK,WAAW;AACvB,kBAAc;AAAA,MACZ,IAAI,QAAQ,CAAC,SAAS,WAAW;AAC/B,QAAAA,QAAO,GAAG,UAAU,OAAO;AAC3B,QAAAA,QAAO,GAAG,SAAS,MAAM;AACzB,oBAAY,GAAG,SAAS,MAAM;AAAA,MAChC,CAAC;AAAA,IACH;AAAA,EACF;AACA,QAAM,QAAQ,IAAI,aAAa;AAC/B,SAAO;AACT;AAKA,eAAsB,WAAW,YAAoB,UAAkB;AACrE,QAAM,cAAc,YAAY,UAAU;AAC1C,QAAM,qBAAqB,aAAa,UAAU;AAClD,QAAMC,UAAS;AAAA,IACb,QAAQ;AAAA,IACR,KAAK,YAAY,QAAQ;AAAA,EAC3B;AACA,SAAO,YAAY,aAAaA,OAAM,EAAE,QAAQ;AAClD;AAEA,eAAsB,YAAY,YAAoB,WAAqB;AACzE,QAAM,cAAc,YAAY,UAAU;AAC1C,QAAM,qBAAqB,aAAa,UAAU;AAClD,QAAMA,UAAS;AAAA,IACb,QAAQ;AAAA,IACR,QAAQ;AAAA,MACN,SAAS,UAAU,IAAI,CAACH,WAAe,EAAE,KAAK,YAAYA,KAAI,EAAE,EAAE;AAAA,IACpE;AAAA,EACF;AACA,SAAO,YAAY,cAAcG,OAAM,EAAE,QAAQ;AACnD;AAKA,eAAsB,aACpB,YACA,QACc;AACd,eAAa,eAAe,UAAU;AACtC,WAAS,YAAY,MAAM;AAC3B,QAAM,SAAS,YAAY,UAAU;AACrC,QAAM,aAAa;AAAA,IACjB,QAAQ;AAAA,IACR,QAAQ;AAAA,EACV;AAEA,QAAM,0BAA0B,MAAM,OAAO,YAAY,UAAU,EAAE,QAAQ;AAC7E,MAAI,wBAAwB,UAAU,WAAW,GAAG;AAClD;AAAA,EACF;AACA,QAAM,eAAoB;AAAA,IACxB,QAAQ;AAAA,IACR,QAAQ;AAAA,MACN,SAAS,CAAC;AAAA,IACZ;AAAA,EACF;AAEA,0BAAwB,UAAU,QAAQ,CAAC,YAAiB;AAC1D,iBAAa,OAAO,QAAQ,KAAK,EAAE,KAAK,QAAQ,IAAI,CAAC;AAAA,EACvD,CAAC;AAED,QAAM,iBAAiB,MAAM,OAAO,cAAc,YAAY,EAAE,QAAQ;AAExE,MAAI,eAAe,SAAS,WAAW,KAAM;AAC3C,WAAO,aAAa,YAAY,MAAM;AAAA,EACxC;AACF;AAEA,eAAsB,gBACpB,YACA,WACA,YACA;AACA,eAAa,eAAe,UAAU;AACtC,MAAI,UAAU,CAAC;AACf,QAAM,QAAQ,WAAAF,QAAG,YAAY,WAAW,EAAE,eAAe,KAAK,CAAC;AAC/D,WAAS,QAAQ,OAAO;AACtB,UAAMD,QAAO,gBAAY,mBAAK,YAAY,KAAK,IAAI,CAAC;AACpD,UAAM,YAAQ,mBAAK,WAAW,KAAK,IAAI;AACvC,QAAI,KAAK,YAAY,GAAG;AACtB,cAAQ,KAAK,gBAAgB,YAAY,OAAOA,KAAI,CAAC;AAAA,IACvD,OAAO;AACL,cAAQ,KAAK,aAAa,YAAYA,OAAM,WAAAC,QAAG,iBAAiB,KAAK,CAAC,CAAC;AAAA,IACzE;AAAA,EACF;AACA,QAAM,QAAQ,IAAI,OAAO;AACzB,SAAO;AACT;AAEA,eAAsB,sBACpB,KACAD,OACA,UAAU,CAAC,GACX;AACA,EAAAA,QAAO,YAAYA,KAAI;AACvB,QAAM,WAAW,UAAM,mBAAAI,SAAM,KAAK,EAAE,QAAQ,CAAC;AAC7C,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI,MAAM,uBAAuB,SAAS,UAAU,EAAE;AAAA,EAC9D;AAEA,QAAM,eAAe,SAAS,MAAM,YAAAC,QAAK,YAAY,GAAG,cAAAC,QAAI,QAAQN,KAAI,CAAC;AAC3E;AAEA,eAAsB,gBACpB,KACA,YACAA,OACA;AACA,eAAa,eAAe,UAAU;AACtC,EAAAA,QAAO,YAAYA,KAAI;AACvB,QAAM,WAAW,UAAM,mBAAAI,SAAM,GAAG;AAChC,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI,MAAM,uBAAuB,SAAS,UAAU,EAAE;AAAA,EAC9D;AAEA,QAAM,cAAU,mBAAK,gBAAgB,GAAGJ,KAAI;AAC5C,QAAM,eAAe,SAAS,MAAM,YAAAK,QAAK,YAAY,GAAG,cAAAC,QAAI,QAAQ,OAAO,CAAC;AAC5E,MAAI,CAAC,oBAAI,OAAO,KAAK,oBAAI,aAAa;AACpC,UAAM,gBAAgB,YAAY,SAASN,KAAI;AAAA,EACjD;AAEA,SAAO;AACT;AAEA,eAAsB,cACpB,YACAA,OACmB;AACnB,eAAa,eAAe,UAAU;AACtC,EAAAA,QAAO,YAAYA,KAAI;AACvB,QAAM,SAAS,YAAY,UAAU;AACrC,QAAMG,UAAS;AAAA,IACb,QAAQ;AAAA,IACR,KAAKH;AAAA,EACP;AACA,SAAO,OAAO,UAAUG,OAAM,EAAE,iBAAiB;AACnD;AAndA,IAEA,gBACA,eACAI,oBACA,eACA,aACA,aACAC,cACAC,YAGAC,cAZM,UAeA,gBAEA,OAmBA,kBASA;AA7CN;AAAA;AAAA;AAEA,qBAAgB;AAChB,oBAAiC;AACjC,IAAAH,qBAAkB;AAClB,oBAAgB;AAChB,kBAAiB;AACjB,kBAA0B;AAC1B,IAAAC,eAAqB;AACrB,IAAAC,aAAe;AACf,IAAAE;AACA,IAAAC;AACA,IAAAF,eAAmB;AACnB,IAAAG;AAbA,IAAM,WAAW,QAAQ,uBAAuB;AAehD,IAAM,qBAAiB,uBAAU,cAAAX,QAAO,QAAQ;AAEhD,IAAM,QAAQ;AAAA,MACZ,wBAAwB,CAAC;AAAA,IAC3B;AAiBA,IAAM,mBAAwB;AAAA,MAC5B,KAAK;AAAA,MACL,MAAM;AAAA,MACN,KAAK;AAAA,MACL,IAAI;AAAA,MACJ,MAAM;AAAA,MACN,IAAI;AAAA,IACN;AAEA,IAAM,uBAAuB;AAAA,MAC3B,iBAAiB;AAAA,MACjB,iBAAiB;AAAA,MACjB,iBAAiB;AAAA,MACjB,iBAAiB;AAAA,IACnB;AAAA;AAAA;;;AC7CA,SAAS,gBAAgB;AACvB,MAAI,CAAC,oBAAI,2BAA2B;AAClC,UAAM,IAAI,MAAM,sCAAsC;AAAA,EACxD;AAEA,MAAI,aAAa;AACf,WAAO;AAAA,EACT;AAEA,gBAAc,OAAO,KAAK,oBAAI,2BAA2B,QAAQ,EAAE;AAAA,IACjE;AAAA,EACF;AAEA,SAAO;AACT;AAnBA,IACA,QAEI,aAkBE,yBAQOY,kBAKA;AAlCb;AAAA;AAAA;AAAA,IAAAC;AACA,aAAwB;AAoBxB,IAAM,0BAA0B,MAAM;AACpC,aAAO;AAAA,QACL,WAAW,oBAAI;AAAA,QACf,kBAAkB,cAAc;AAAA,QAChC,aAAY,oBAAI,KAAK,GAAE,QAAQ,IAAI,MAAO,KAAK;AAAA;AAAA,MACjD;AAAA,IACF;AAEO,IAAMD,mBAAkB,CAAC,UAAkB;AAChD,YAAM,MAAM,OAAO,KAAK;AACxB,aAAc,oBAAa,KAAK,wBAAwB,CAAC;AAAA,IAC3D;AAEO,IAAM,SAAS,CAAC,UAAkB;AACvC,UAAI,SAAS;AACb,UAAI,MAAM,WAAW,GAAG,GAAG;AACzB,iBAAS;AAAA,MACX;AACA,aAAO,GAAG,oBAAI,cAAc,GAAG,MAAM,GAAG,KAAK;AAAA,IAC/C;AAAA;AAAA;;;AClCO,SAAS,kBAAkB,OAAe;AAC/C,SAAO,GAAe,YAAY,KAAK,CAAC;AAC1C;AAOO,SAAS,oBAAoB,OAAe,SAAiB;AAClE,MAAI,OAAO,kBAAkB,KAAK;AAClC,MAAI,oBAAI,gBAAgB;AAEtB,QAAI,SAAS;AACX,cAAQ,MAAM,OAAO;AAAA,IACvB;AAGA,WAAkB,OAAO,IAAI;AAAA,EAC/B,OAAO;AACL,WAAmB,gBAAgB,oBAAI,kBAAkB,IAAI;AAAA,EAC/D;AACF;AAEO,SAAS,iBAAiB,OAAe,SAAiB;AAC/D,MAAI,UAAU;AACd,MAAI;AACF,eAAW,YAAY;AAAA,EACzB,UAAE;AACA,eAAW;AAAA,MACT;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACA,MAAI,YAAY,aAAa,mBAAmB;AAC9C,aAAS,WAAW;AAAA,EACtB;AACA,SAAO,sBAAsB,mBAAAE,QAAG,OAAO,QAAQ,CAAC;AAClD;AAEO,SAAS,cAAc,OAAe;AAC3C,MAAI,oBAAI,gBAAgB;AACtB,WAAkBC,iBAAgB,KAAK;AAAA,EACzC,OAAO;AACL,WAAmB,gBAAgB,oBAAI,kBAAkB,KAAK;AAAA,EAChE;AACF;AApDA,IAGA;AAHA,IAAAC,YAAA;AAAA;AAAA;AAAA,IAAAC;AACA;AACA;AACA,yBAAe;AACf,IAAAC;AAAA;AAAA;;;ACJA,IAOa,kBAcA;AArBb,IAAAC,eAAA;AAAA;AAAA;AAAA,IAAAC;AACA,IAAAC;AACA;AACA;AAIO,IAAM,mBAAmB,CAAC,MAAc,MAAc,SAAkB;AAC7E,UAAI,OAAO,mBAAmB,MAAM,IAAI;AACxC,UAAI,oBAAI,gBAAgB;AACtB,YAAI,MAAM;AACR,iBAAO,GAAG,IAAI,SAAS,IAAI;AAAA,QAC7B;AACA,eAAkBC,iBAAgB,IAAI;AAAA,MACxC,OAAO;AACL,eAAmB,gBAAgB,oBAAI,oBAAoB,IAAI;AAAA,MACjE;AAAA,IACF;AAIO,IAAM,qBAAqB,CAAC,MAAc,SAAiB;AAChE,UAAI,OAAO,GAAG,IAAI,IAAI,IAAI;AAC1B,UAAI,oBAAI,eAAe;AACrB,cAAM,WAAmB,YAAY;AACrC,eAAO,GAAG,QAAQ,IAAI,IAAI;AAAA,MAC5B;AACA,aAAO;AAAA,IACT;AAAA;AAAA;;;ACpBO,SAAS,iBAAiB,SAAmB;AAClD,MAAI,CAAC,WAAW,CAAC,QAAQ,QAAQ;AAC/B,WAAO,CAAC;AAAA,EACV;AACA,SAAO,QAAQ,IAAI,YAAU;AAC3B,UAAM,QAAQ,eAAe,MAAM;AACnC,UAAM,UAAU,iBAAiB,MAAM;AACvC,WAAO,EAAE,GAAG,QAAQ,OAAO,QAAQ;AAAA,EACrC,CAAC;AACH;AAEA,SAAS,eAAe,QAAgB;AACtC,QAAM,QAAQ,eAAe,MAAM;AACnC,SAAO,aAAa,KAAK;AAC3B;AAEA,SAAS,iBAAiB,QAAoC;AAC5D,QAAM,QAAQ,iBAAiB,MAAM;AACrC,MAAI,CAAC,OAAO;AACV;AAAA,EACF;AACA,SAAO,aAAa,KAAK;AAC3B;AAEA,SAAS,aAAa,OAAe;AACnC,MAAI,oBAAI,gBAAgB;AACtB,WAAkBC,iBAAgB,KAAK;AAAA,EACzC,OAAO;AACL,WAAmB,gBAAgB,oBAAI,oBAAoB,KAAK;AAAA,EAClE;AACF;AAIO,SAAS,eAAe,QAAgB;AAC7C,SAAO,eAAe,QAAQ,eAAe;AAC/C;AAEO,SAAS,iBAAiB,QAAgB;AAE/C,QAAM,eAAe,OAAO,UAAU,aAAa,OAAO;AAC1D,MAAI,CAAC,cAAc;AACjB;AAAA,EACF;AACA,SAAO,eAAe,QAAQ,YAAY;AAC5C;AAEA,SAAS,eAAe,QAAgB,UAAkB;AACxD,QAAM,QAAQ,eAAe,OAAO,IAAI;AACxC,SAAO,GAAG,KAAK,IAAI,QAAQ;AAC7B;AAEO,SAAS,eAAe,YAAoB;AACjD,MAAI,QAAQ,GAAG,UAAU;AACzB,MAAI,oBAAI,eAAe;AACrB,UAAM,WAAmB,YAAY;AACrC,YAAQ,GAAG,QAAQ,IAAI,KAAK;AAAA,EAC9B;AACA,MAAI,oBAAI,gBAAgB;AACtB,YAAQ,WAAW,KAAK;AAAA,EAC1B;AACA,SAAO;AACT;AAtEA;AAAA;AAAA;AAAA,IAAAC;AACA;AACA,IAAAC;AACA;AAAA;AAAA;;;ACHA;AAAA;AAAA;AAAA,IAAAC;AACA,IAAAC;AACA;AAAA;AAAA;;;ACFA,IAAAC,uBAAA;AAAA,SAAAA,sBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAC,oBAAA;AAAA;AAAA;AAAA;AACA,IAAAC;AACA;AAAA;AAAA;;;ACFA;AAAA;AAAA;AAAA;AAAA;AAAA;AAYA,SAAS,YAAY,UAAkB;AACrC,SAAO,aAAAC,QAAK,KAAK,UAAU,QAAQ;AACrC;AAEO,SAAS,yBAAyB,cAAsB;AAC7D,QAAM,QAAQ;AACd,QAAM,QAAQ,cAAc,MAAM,KAAK;AACvC,MAAI,CAAC,OAAO;AACV,YAAQ,KAAK,4CAA4C;AAAA,MACvD;AAAA,IACF,CAAC;AACD,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,CAAC,MAAM,CAAC;AACrB,QAAM,OAAO,MAAM,CAAC;AACpB,MAAI,SAAS,GAAG;AACd,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,eAAO,EAAE,MAAM,GAAG,IAAI,KAAK,mBAAmB,EAAE;AAAA,MAClD,KAAK;AACH,eAAO,EAAE,MAAM,GAAI,OAAO,MAAQ,CAAC,KAAK,mBAAmB,EAAE;AAAA,MAC/D,KAAK;AACH,eAAO,EAAE,MAAM,GAAI,OAAO,MAAQ,CAAC,KAAK,mBAAmB,EAAE;AAAA,MAC/D,KAAK;AACH,eAAO,EAAE,MAAM,GAAI,OAAO,MAAQ,CAAC,KAAK,mBAAmB,EAAE;AAAA,MAC/D;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAEA,MAAI,OAAO,MAAM,GAAG;AAClB,WAAO,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,IAAI,IAAI,mBAAmB,EAAE;AAAA,EAC5D;AAEA,SAAO,EAAE,MAAM,IAAI,IAAI,IAAI,mBAAmB,OAAO,EAAE;AACzD;AAEO,SAAS,uBAAuB;AACrC,QAAM,WAAW,yBAAyB,oBAAI,oBAAoB;AAClE,QAAM,UAAc,iBAAa,cAAc;AAAA;AAAA,IAE7C,MAAM,UAAU;AAAA,IAChB,MAAM;AAAA,IACN,UAAU,UAAU,qBAAqB;AAAA,IACzC,WAAW;AAAA,IACX,SAAS;AAAA,IACT,iBAAiB;AAAA,EACnB,CAAC;AAED,SAAO;AACT;AAEO,SAAS,mBAAmB;AACjC,QAAM,UAAU,CAAC;AACjB,QAAM,cAAc,YAAY,2BAA2B;AAC3D,MAAI,WAAAC,QAAG,WAAW,WAAW,GAAG;AAC9B,UAAM,cAAc,WAAAA,QAAG,aAAa,aAAa,OAAO;AACxD,UAAM,eAAe,YAAY,MAAM,IAAI;AAC3C,eAAWC,gBAAe,aAAa,OAAO,OAAK,CAAC,GAAG;AACrD,cAAQ,KAAK,WAAAD,QAAG,aAAaC,YAAW,CAAC;AAAA,IAC3C;AAAA,EACF;AAEA,UAAQ,KAAK,WAAAD,QAAG,aAAa,YAAY,YAAY,CAAC,CAAC;AAEvD,QAAM,kBAAkB,OAAO,OAAO,OAAO;AAC7C,SAAO;AACT;AAhFA,IAAAE,YACAC,cACA,KAKM,cACA,6BAEA;AAVN,IAAAC,eAAA;AAAA;AAAA;AAAA,IAAAF,aAAe;AACf,IAAAC,eAAiB;AACjB,UAAqB;AAErB,IAAAE;AACA,IAAAC;AAEA,IAAM,eAAe;AACrB,IAAM,8BAA8B;AAEpC,IAAM,WAAW,aAAAP,QAAK,KAAK,gBAAgB,GAAG,YAAY;AAAA;AAAA;;;ACV1D,iBACA,oBAMAQ,kBACA,YAMI,cA2DOC,gBAIAC,UAIAC,YAQAC,eAqJE;AA9Ob;AAAA;AAAA;AAAA,kBAAoC;AACpC,yBAAuB;AAGvB,IAAAC;AACA,IAAAC;AACA,IAAAC;AACA,IAAAP,mBAAmB;AACnB,iBAAwB;AAExB,IAAAQ;AAKA,QAAI,CAAC,oBAAI,qBAAqB;AA0D5B,UAAS,gBAAT,SAAuB,KAAU;AAC/B,eAAO,OAAO,QAAQ,YAAY,QAAQ,QAAQ,EAAE,eAAe;AAAA,MACrE,GAES,UAAT,SAAiB,KAAU;AACzB,eAAO,eAAe;AAAA,MACxB,GAES,YAAT,SAAmB,KAAU;AAC3B,eAAO,OAAO,QAAQ;AAAA,MACxB,GAMSJ,gBAAT,SAAsB,MAAsC;AAC1D,YAAI,QAAQ;AACZ,YAAI,UAAiB,CAAC;AACtB,YAAI,UAAU;AAEd,aAAK,QAAQ,SAAO;AAClB,cAAI,UAAU,GAAG,GAAG;AAClB,sBAAU,GAAG,OAAO,IAAI,GAAG,GAAG,UAAU;AAAA,UAC1C;AACA,cAAI,cAAc,GAAG,GAAG;AACtB,oBAAQ,KAAK,GAAG;AAAA,UAClB;AACA,cAAI,QAAQ,GAAG,GAAG;AAChB,oBAAQ;AAAA,UACV;AAAA,QACF,CAAC;AAED,cAAM,WAAWK,aAAY;AAE7B,YAAI,gBAAgB,CAAC;AAErB,wBAAgB;AAAA,UACd,UAAUC,aAAY;AAAA,UACtB,OAAOC,UAAS;AAAA,UAChB,cAAcC,iBAAgB;AAAA,UAC9B,YAAY,UAAU;AAAA,UACtB,cAAc,UAAU;AAAA,UACxB,eAA2B,MAAM;AAAA,QACnC;AAEA,cAAM,OAAO,iBAAAC,QAAO,MAAM,EAAE,OAAO;AACnC,YAAI,MAAM;AACR,2BAAAA,QAAO,OAAO,KAAK,QAAQ,GAAG,mBAAQ,KAAK,aAAa;AAAA,QAC1D;AAEA,cAAM,gBAAqB;AAAA,UACzB,KAAK;AAAA,UACL,KAAK,QAAQ;AAAA,UACb,GAAG;AAAA,QACL;AAEA,YAAI,QAAQ,QAAQ;AAIlB,gBAAM,OAAY,CAAC;AACnB,cAAI,YAAY;AAEhB,mBAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,kBAAM,SAAS,QAAQ,CAAC;AAGxB,kBAAM,SAAS,OAAO;AACtB,gBAAI,QAAQ;AACV,qBAAO,OAAO;AACd,4BAAc,MAAM,IAAI;AAAA,YAC1B,OAAO;AACL,mBAAK,SAAS,IAAI;AAClB;AAAA,YACF;AAAA,UACF;AAEA,cAAI,OAAO,KAAK,IAAI,EAAE,QAAQ;AAC5B,0BAAc,OAAO;AAAA,UACvB;AAAA,QACF;AAEA,eAAO,CAAC,eAAe,OAAO;AAAA,MAChC;AApFS,MAAAZ,iBAAA,eAIAC,WAAA,SAIAC,aAAA,WAQAC;AAzET,YAAM,QAAQ,oBAAI;AAClB,YAAM,cAA6B;AAAA,QACjC;AAAA,QACA,YAAY;AAAA,UACV,OAAO,CAAAU,WAAS;AACd,mBAAO,EAAE,OAAOA,OAAM,YAAY,EAAE;AAAA,UACtC;AAAA,UACA,UAAU,MAAM;AACd,gBAAI,oBAAI,aAAa;AAGnB,qBAAO;AAAA,gBACL,SAAS,oBAAI;AAAA,cACf;AAAA,YACF,OAAO;AACL,qBAAO,CAAC;AAAA,YACV;AAAA,UACF;AAAA,QACF;AAAA,QACA,WAAW,MAAM,iBAAiB,IAAI,KAAK,KAAK,IAAI,CAAC,EAAE,YAAY,CAAC;AAAA,MACtE;AAEA,YAAM,eAAmC,CAAC;AAE1C,mBAAa;AAAA,QACX,oBAAI,MAAM,IACN;AAAA,UACE,YAAQ,mBAAAC,SAAW,EAAE,YAAY,KAAK,CAAC;AAAA,UACvC;AAAA,QACF,IACA,EAAE,QAAQ,QAAQ,QAAQ,MAA2B;AAAA,MAC3D;AAEA,UAAI,oBAAI,aAAa;AACnB,qBAAa,KAAK;AAAA,UAChB,QAAQ,qBAAqB;AAAA,UAC7B;AAAA,QACF,CAAC;AAAA,MACH;AAEA,qBAAe,aAAa,aACxB,YAAAC,SAAK,aAAa,YAAAA,QAAK,YAAY,YAAY,CAAC,QAChD,YAAAA,SAAK,WAAW;AAqGpB,cAAQ,MAAM,IAAI,QAAe;AAC/B,cAAM,CAAC,KAAK,GAAG,IAAIZ,cAAa,GAAG;AACnC,sBAAc,KAAK,KAAK,GAAG;AAAA,MAC7B;AACA,cAAQ,OAAO,IAAI,QAAe;AAChC,cAAM,CAAC,KAAK,GAAG,IAAIA,cAAa,GAAG;AACnC,sBAAc,KAAK,KAAK,GAAG;AAAA,MAC7B;AACA,cAAQ,OAAO,IAAI,QAAe;AAChC,cAAM,CAAC,KAAK,GAAG,IAAIA,cAAa,GAAG;AACnC,sBAAc,KAAK,KAAK,GAAG;AAAA,MAC7B;AACA,cAAQ,QAAQ,IAAI,QAAe;AACjC,cAAM,CAAC,KAAK,GAAG,IAAIA,cAAa,GAAG;AACnC,sBAAc,MAAM,KAAK,GAAG;AAAA,MAC9B;AAOA,cAAQ,QAAQ,IAAI,QAAe;AACjC,cAAM,CAAC,KAAK,GAAG,IAAIA,cAAa,GAAG;AACnC,YAAI,CAAC,IAAI,KAAK;AAEZ,cAAI,MAAM,IAAI,MAAM;AAAA,QACtB;AACA,sBAAc,MAAM,KAAK,GAAG;AAAA,MAC9B;AAEA,cAAQ,QAAQ,IAAI,QAAa;AAC/B,cAAM,CAAC,KAAK,GAAG,IAAIA,cAAa,GAAG;AACnC,sBAAc,MAAM,KAAK,GAAG;AAAA,MAC9B;AAIA,YAAMM,eAAc,MAAM;AACxB,YAAI;AACJ,YAAI;AACF,qBAAmB,YAAY;AAAA,QACjC,SAAS,GAAQ;AAAA,QAEjB;AACA,eAAO;AAAA,MACT;AAEA,YAAMC,YAAW,MAAM;AACrB,YAAI;AACJ,YAAI;AACF,kBAAgB,SAAS;AAAA,QAC3B,SAAS,GAAG;AAAA,QAEZ;AACA,eAAO;AAAA,MACT;AAEA,YAAMC,mBAAkB,MAAM;AAC5B,YAAI;AACJ,YAAI;AACF,kBAAgB,gBAAgB;AAAA,QAClC,SAAS,GAAG;AAAA,QAEZ;AACA,eAAO;AAAA,MACT;AAEA,YAAMH,eAAc,MAAM;AACxB,YAAI;AACJ,YAAI;AACF,qBAAmB,YAAY;AAAA,QACjC,SAAS,GAAG;AAAA,QAEZ;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAEO,IAAM,SAAS;AAAA;AAAA;;;AC5OtB,SAAS,aAAa,GAAS;AAC7B,SAAO,KAAK,EAAE,eAAe;AAC/B;AAEO,SAAS,SAAS,SAAiB,GAAS;AACjD,MAAI,KAAK,UAAU,SAAS,EAAE,IAAI,KAAK,aAAa,CAAC,GAAG;AACtD;AAAA,EACF;AACA,UAAQ,MAAM,aAAa,OAAO,IAAI,CAAC;AACzC;AAEO,SAAS,iBACd,SACA,IACA,IACA,OACA;AACA,YAAU,GAAG,OAAO,UAAU,EAAE,WAAW,EAAE;AAC7C,WAAS,SAAS,KAAK;AACzB;AAEO,SAAS,QAAQ,SAAiB,GAAS;AAChD,UAAQ,KAAK,YAAY,OAAO,IAAI,CAAC;AACvC;AAzBA,IAAM;AAAN;AAAA;AAAA;AAAA,IAAM,YAAY,CAAC,cAAc;AAAA;AAAA;;;ACAjC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA,IAAAQ;AAAA;AAAA;;;ACDO,SAAS,IAAI,UAAqB,QAAgB;AACvD,QAAM,WAAW,YAAY,UAAU,MAAM;AAC7C,YAAU,KAAK,QAAQ;AACvB,SAAO;AACT;AAEO,SAAS,MAAM,UAA0B;AAC9C,QAAM,MAAM,UAAU,QAAQ,QAAQ;AACtC,MAAI,QAAQ,IAAI;AACd,cAAU,OAAO,KAAK,CAAC;AAAA,EACzB;AACA,gBAAc,QAAQ;AACxB;AAEO,SAAS,UAAU;AACxB,WAAS,YAAY,WAAW;AAC9B,kBAAc,QAAQ;AAAA,EACxB;AACA,cAAY,CAAC;AACf;AArBA,IAAI,WAuBS,uBAIA;AA3Bb;AAAA;AAAA;AAAA,IAAI,YAA8B,CAAC;AAuB5B,IAAM,wBAAN,cAAoC,MAAM;AAAA,MAA1C;AAAA;AACL,aAAgB,OAAO;AAAA;AAAA,IACzB;AAEO,IAAM,uBAAN,MAAM,sBAAqB;AAAA,MAKhC,YAAqB,SAAiB;AAAjB;AAErB,aAAQ,cAAc;AAAA,MAFiB;AAAA,MAJvC,OAAO,UAAU,SAAiB;AAChC,eAAO,IAAI,sBAAqB,OAAO;AAAA,MACzC;AAAA,MAMA,MAAS,GAAe;AACtB,aAAK,WAAW;AAChB,cAAMC,SAAQ,QAAQ,OAAO,OAAO;AACpC,YAAI;AACF,iBAAO,EAAE;AAAA,QACX,UAAE;AACA,gBAAMC,OAAM,QAAQ,OAAO,OAAO;AAClC,eAAK,eAAe,OAAOA,OAAMD,MAAK,IAAI;AAC1C,eAAK,WAAW;AAAA,QAClB;AAAA,MACF;AAAA,MAEA,IAAI,YAAY;AACd,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,aAAa;AACX,YAAI,KAAK,cAAc,KAAK,SAAS;AACnC,gBAAM,IAAI;AAAA,YACR,2BAA2B,KAAK,OAAO,gBAAgB,KAAK,WAAW;AAAA,UACzE;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA;AAAA;;;AC3DA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAE,eAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACsCA,SAAS,WAAW,UAAuB;AACzC,SAAO,QAAQ,QAAQ;AACzB;AAEA,SAAS,gBAAgBC,UAAyB,KAAqB;AAErE,MAAI,QAAQ;AACV;AAAA,EACF;AACA,WAAS;AAET,eAAaA,QAAO;AACpB,cAAY;AACZ,WAAS,2BAA2B,GAAG;AACvC,aAAW,MAAM;AACf,IAAAC,MAAK;AAAA,EACP,GAAG,eAAe;AACpB;AAMA,SAASA,MAAK,WAAW,mBAAmB;AAC1C,QAAM,YAAY,oBAAI,cAAc,YAAY,YAAY,eAAAC;AAC5D,MAAIF;AACJ,WAAS;AACT,MAAI,SAAS,WAAW,QAAQ;AAEhC,MAAI,UAAU,WAAW;AACvB;AAAA,EACF;AAEA,MAAI,oBAAI,YAAY;AAClB,YAAQ,QAAQ,IAAI,IAAI,UAAU,gBAAgB,CAAC;AAAA,EACrD;AAEA,EAAAA,WAAU,WAAW,MAAM;AACzB,QAAI,CAAC,WAAW;AACd,sBAAgBA,UAAS,yCAAyC;AAAA,IACpE;AAAA,EACF,GAAG,kBAAkB;AAGrB,MAAI,QAAQ;AACV,WAAO,WAAW;AAAA,EACpB;AACA,QAAM,EAAE,MAAM,KAAK,IAAI,0BAA0B;AACjD,QAAM,OAAO,gBAAgB;AAE7B,MAAI,WAAW;AACb,aAAS,IAAI,UAAU,QAAQ,CAAC,EAAE,MAAM,KAAK,CAAC,GAAG,IAAI;AAAA,EACvD,OAAO;AACL,aAAS,IAAI,UAAU,IAAI;AAAA,EAC7B;AAEA,SAAO,GAAG,OAAO,CAAC,QAAe;AAC/B,QAAI,oBAAI,OAAO,GAAG;AAGhB;AAAA,IACF;AACA,oBAAgBA,UAAS,GAAG;AAAA,EAC9B,CAAC;AACD,SAAO,GAAG,SAAS,CAAC,QAAe;AACjC,oBAAgBA,UAAS,GAAG;AAAA,EAC9B,CAAC;AACD,SAAO,GAAG,WAAW,MAAM;AACzB,YAAQ,IAAI,0BAA0B,QAAQ,EAAE;AAChD,iBAAaA,QAAO;AACpB,gBAAY;AAAA,EACd,CAAC;AACD,UAAQ,QAAQ,IAAI;AACtB;AAEA,SAAS,kBAAkB,WAAmB,mBAAmB;AAC/D,SAAO,IAAI,QAAQ,aAAW;AAC5B,QAAI,WAAW,QAAQ,KAAK,MAAM;AAChC,MAAAC,MAAK;AAAA,IACP,WAAW,WAAW;AACpB,cAAQ,EAAE;AACV;AAAA,IACF;AAEA,UAAM,WAAkB,IAAI,MAAM;AAChC,UAAI,WAAW;AACb,QAAO,MAAM,QAAQ;AACrB,gBAAQ,EAAE;AAAA,MACZ;AAAA,IACF,GAAG,GAAG;AAAA,EACR,CAAC;AACH;AASA,SAAS,gBAAgBE,SAAa,QAAsB;AAC1D,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,UAAM,aAAa,oBAAI,IAAI;AAC3B,IAAAA,QAAO,GAAG,QAAQ,CAACC,UAAmB;AACpC,MAAAA,MAAK,QAAQ,SAAO;AAClB,mBAAW,IAAI,GAAG;AAAA,MACpB,CAAC;AAAA,IACH,CAAC;AACD,IAAAD,QAAO,GAAG,SAAS,CAAC,QAAe;AACjC,aAAO,GAAG;AAAA,IACZ,CAAC;AACD,IAAAA,QAAO,GAAG,OAAO,YAAY;AAC3B,YAAM,YAAsB,MAAM,KAAK,UAAU;AACjD,UAAI;AACF,YAAI,cAAc,CAAC;AACnB,iBAAS,OAAO,WAAW;AACzB,sBAAY,KAAK,OAAO,IAAI,GAAG,CAAC;AAAA,QAClC;AACA,cAAM,YAAY,MAAM,QAAQ,IAAI,WAAW;AAC/C;AAAA,UACE,UAAU,IAAI,UAAQ;AAAA,YACpB,KAAK,eAAe,GAAG;AAAA,YACvB,OAAO,KAAK,MAAM,UAAU,MAAM,CAAC;AAAA,UACrC,EAAE;AAAA,QACJ;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,GAAG;AAAA,MACZ;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACH;AAxKA,IACA,gBAEI,WAoBE,iBACA,oBACA,WACA,mBAGF,QACA,SACA,WA2IE,cAiIC;AA3SP;AAAA;AAAA;AAAA,IAAAE;AACA,qBAAkB;AAWlB,IAAAC;AAQA;AACA,IAAAC;AAjBA,QAAI,oBAAI,YAAY;AAClB,UAAI;AAEF,oBAAY,QAAQ,cAAc;AAAA,MACpC,SAAS,KAAK;AACZ,gBAAQ,IAAI,wBAAwB;AAAA,MACtC;AAAA,IACF;AAYA,IAAM,kBAAkB;AACxB,IAAM,qBAAqB;AAC3B,IAAM,YAAY,oBAAI;AACtB,IAAM;AAGN,IAAI,SAAS;AACb,IAAI,UAAkC,CAAC;AACvC,IAAI,YAAY;AAGhB,QAAI,oBAAI,YAAY;AAClB,kBAAY;AAAA,IACd;AAsIA,IAAM,eAAN,MAAmB;AAAA,MAIjB,YAAY,IAAY,WAA0B,MAAM;AACtD,aAAK,MAAM;AACX,aAAK,UAAU,YAAY;AAAA,MAC7B;AAAA,MAEA,YAAY;AACV,eAAO,WAAW,KAAK,OAAO;AAAA,MAChC;AAAA,MAEA,MAAM,OAAO;AACX,iBAAS;AACT,QAAAN,MAAK,KAAK,OAAO;AACjB,cAAM,kBAAkB,KAAK,OAAO;AACpC,YAAI,KAAK,WAAW,CAAC,oBAAI,OAAO,GAAG;AACjC,eAAK,UAAU,EAAE,OAAO,KAAK,OAAO;AAAA,QACtC;AACA,eAAO;AAAA,MACT;AAAA,MAEA,MAAM,SAAS;AACb,iBAAS;AACT,aAAK,UAAU,EAAE,WAAW;AAAA,MAC9B;AAAA,MAEA,MAAM,KAAK,MAAM,IAAkB;AACjC,cAAM,KAAK,KAAK;AAChB,cAAM,GAAG,EAAE,GAAGO,UAAS,GAAG,GAAG;AAC7B,YAAIL;AACJ,YAAI,WAAW;AACb,cAAI,OAAO,KAAK,UAAU,EAAE,MAAM,QAAQ;AAC1C,UAAAA,UAAS,KAAK,CAAC,EAAE,WAAW,EAAE,OAAO,MAAM,KAAK,OAAO,IAAI,CAAC;AAAA,QAC9D,OAAO;AACL,UAAAA,UAAS,KAAK,UAAU,EAAE,WAAW,EAAE,OAAO,MAAM,KAAK,OAAO,IAAI,CAAC;AAAA,QACvE;AACA,eAAO,gBAAgBA,SAAQ,KAAK,UAAU,CAAC;AAAA,MACjD;AAAA,MAEA,MAAM,KAAK,SAAiB;AAC1B,cAAM,KAAK,KAAK;AAChB,eAAO,KAAK,UAAU,EAAE,KAAK,YAAY,IAAI,OAAO,CAAC;AAAA,MACvD;AAAA,MAEA,MAAM,OAAO,KAAa;AACxB,cAAM,KAAK,KAAK;AAChB,eAAO,MAAM,KAAK,UAAU,EAAE,OAAO,YAAY,IAAI,GAAG,CAAC;AAAA,MAC3D;AAAA,MAEA,MAAM,IAAI,KAAa;AACrB,cAAM,KAAK,KAAK;AAChB,YAAI,WAAW,MAAM,KAAK,UAAU,EAAE,IAAI,YAAY,IAAI,GAAG,CAAC;AAE9D,YAAI,YAAY,QAAQ,SAAS,KAAK;AACpC,mBAAS,MAAM;AAAA,QACjB;AAEA,YAAI;AACF,iBAAO,KAAK,MAAM,QAAQ;AAAA,QAC5B,SAAS,KAAK;AACZ,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,MAEA,MAAM,QAAWC,OAAgB;AAC/B,cAAM,KAAK,KAAK;AAChB,YAAIA,MAAK,WAAW,GAAG;AACrB,iBAAO,CAAC;AAAA,QACV;AACA,cAAM,eAAeA,MAAK,IAAI,SAAO,YAAY,IAAI,GAAG,CAAC;AACzD,YAAI,WAAW,MAAM,KAAK,UAAU,EAAE,KAAK,YAAY;AACvD,YAAI,MAAM,QAAQ,QAAQ,GAAG;AAC3B,cAAI,QAA2B,CAAC;AAChC,cAAI,QAAQ;AACZ,mBAAS,UAAU,UAAU;AAC3B,gBAAI,QAAQ;AACV,kBAAI;AACJ,kBAAI;AACF,yBAAS,KAAK,MAAM,MAAM;AAAA,cAC5B,SAAS,KAAK;AACZ,yBAAS;AAAA,cACX;AACA,oBAAMA,MAAK,KAAK,CAAC,IAAI;AAAA,YACvB;AACA;AAAA,UACF;AACA,iBAAO;AAAA,QACT,OAAO;AACL,gBAAM,IAAI,MAAM,qBAAqB,QAAQ,EAAE;AAAA,QACjD;AAAA,MACF;AAAA,MAEA,MAAM,MAAM,KAAa,OAAY,gBAA+B,MAAM;AACxE,cAAM,KAAK,KAAK;AAChB,YAAI,OAAO,UAAU,UAAU;AAC7B,kBAAQ,KAAK,UAAU,KAAK;AAAA,QAC9B;AACA,cAAM,cAAc,YAAY,IAAI,GAAG;AACvC,cAAM,KAAK,UAAU,EAAE,IAAI,aAAa,KAAK;AAC7C,YAAI,eAAe;AACjB,gBAAM,KAAK,UAAU,EAAE,OAAO,aAAa,aAAa;AAAA,QAC1D;AAAA,MACF;AAAA,MAEA,MAAM,OAAO,KAAa;AACxB,cAAM,KAAK,KAAK;AAChB,cAAM,cAAc,YAAY,IAAI,GAAG;AACvC,eAAO,KAAK,UAAU,EAAE,IAAI,WAAW;AAAA,MACzC;AAAA,MAEA,MAAM,UAAU,KAAa,eAA8B;AACzD,cAAM,KAAK,KAAK;AAChB,cAAM,cAAc,YAAY,IAAI,GAAG;AACvC,cAAM,KAAK,UAAU,EAAE,OAAO,aAAa,aAAa;AAAA,MAC1D;AAAA,MAEA,MAAM,OAAO,KAAa;AACxB,cAAM,KAAK,KAAK;AAChB,cAAM,KAAK,UAAU,EAAE,IAAI,YAAY,IAAI,GAAG,CAAC;AAAA,MACjD;AAAA,MAEA,MAAM,QAAQ;AACZ,YAAI,QAAQ,MAAM,KAAK,KAAK;AAC5B,cAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,QAAa,KAAK,OAAO,IAAI,GAAG,CAAC,CAAC;AAAA,MACjE;AAAA,IACF;AAEA,IAAO,gBAAQ;AAAA;AAAA;;;AC3Sf;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAAAK;AAAA,EAAA;AAAA;AAaA,eAAsBA,QAAO;AAC3B,eAAa,MAAM,IAAI,sCAAiC,EAAE,KAAK;AAC/D,kBAAgB,MAAM,IAAI,sCAA+B,EAAE,KAAK;AAChE,cAAY,MAAM,IAAI,8CAAmC,EAAE,KAAK;AAChE,gBAAc,MAAM,IAAI,8CAAoC,EAAE,KAAK;AACnE,eAAa,MAAM,IAAI,iCAA4B,EAAE,KAAK;AAC1D,uBAAqB,MAAM,IAAI,gDAAoC,EAAE,KAAK;AAC1E,iBAAe,MAAM,IAAI,4CAAkC,EAAE,KAAK;AAClE,wBAAsB,MAAM,IAAI,uCAAgC,EAAE,KAAK;AACvE,iBAAe,MAAM,IAAI;AAAA;AAAA;AAAA,EAGzB,EAAE,KAAK;AACT;AAEA,eAAsB,WAAW;AAC/B,MAAI;AAAY,UAAM,WAAW,OAAO;AACxC,MAAI;AAAe,UAAM,cAAc,OAAO;AAC9C,MAAI;AAAW,UAAM,UAAU,OAAO;AACtC,MAAI;AAAa,UAAM,YAAY,OAAO;AAC1C,MAAI;AAAoB,UAAM,mBAAmB,OAAO;AACxD,MAAI;AAAY,UAAM,WAAW,OAAO;AACxC,MAAI;AAAc,UAAM,aAAa,OAAO;AAC5C,MAAI;AAAqB,UAAM,oBAAoB,OAAO;AAC1D,MAAI;AAAc,UAAM,aAAa,OAAO;AAC9C;AAMA,eAAsB,gBAAgB;AACpC,MAAI,CAAC,YAAY;AACf,UAAMA,MAAK;AAAA,EACb;AACA,SAAO;AACT;AAEA,eAAsB,mBAAmB;AACvC,MAAI,CAAC,eAAe;AAClB,UAAMA,MAAK;AAAA,EACb;AACA,SAAO;AACT;AAEA,eAAsB,eAAe;AACnC,MAAI,CAAC,WAAW;AACd,UAAMA,MAAK;AAAA,EACb;AACA,SAAO;AACT;AAEA,eAAsB,iBAAiB;AACrC,MAAI,CAAC,aAAa;AAChB,UAAMA,MAAK;AAAA,EACb;AACA,SAAO;AACT;AAEA,eAAsB,wBAAwB;AAC5C,MAAI,CAAC,oBAAoB;AACvB,UAAMA,MAAK;AAAA,EACb;AACA,SAAO;AACT;AAEA,eAAsB,gBAAgB;AACpC,MAAI,CAAC,YAAY;AACf,UAAMA,MAAK;AAAA,EACb;AACA,SAAO;AACT;AAEA,eAAsB,kBAAkB;AACtC,MAAI,CAAC,cAAc;AACjB,UAAMA,MAAK;AAAA,EACb;AACA,SAAO;AACT;AAEA,eAAsB,kBAAkB;AACtC,MAAI,CAAC,cAAc;AACjB,UAAMA,MAAK;AAAA,EACb;AACA,SAAO;AACT;AAEA,eAAsB,yBAAyB;AAC7C,MAAI,CAAC,qBAAqB;AACxB,UAAMA,MAAK;AAAA,EACb;AACA,SAAO;AACT;AAzGA,IAGI,YACF,eACA,WACA,aACA,oBACA,YACA,cACA,cACA;AAXF;AAAA;AAAA;AAAA;AACA,IAAAC;AAuCA,YAAQ,GAAG,QAAQ,YAAY;AAC7B,YAAM,SAAS;AAAA,IACjB,CAAC;AAAA;AAAA;;;AC1CD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAAC;AAAA,EAAA;AAAA,cAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,eAAAC;AAAA,EAAA,aAAAC;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAeAC;;;ACfA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;;;ACAAC;AACA;AAGA,SAAS,kBAAkB,KAAa;AACtC,QAAM,WAAW,YAAY;AAC7B,SAAO,GAAG,GAAG,IAAI,QAAQ;AAC3B;AAEA,IAAqB,YAArB,MAA+B;AAAA,EAG7B,YAAY,SAA6B,QAAW;AAClD,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,MAAM,YAAY;AAChB,WAAO,CAAC,KAAK,SAAS,MAAY,eAAe,IAAI,KAAK;AAAA,EAC5D;AAAA,EAEA,MAAM,KAAK,SAAiB;AAC1B,UAAM,SAAS,MAAM,KAAK,UAAU;AACpC,WAAO,OAAO,KAAK,OAAO;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,KAAa,OAAO,EAAE,YAAY,KAAK,GAAG;AAClD,UAAM,KAAK,aAAa,kBAAkB,GAAG,IAAI;AACjD,UAAM,SAAS,MAAM,KAAK,UAAU;AACpC,WAAO,OAAO,IAAI,GAAG;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,MACJ,KACA,OACA,MAAqB,MACrB,OAAO,EAAE,YAAY,KAAK,GAC1B;AACA,UAAM,KAAK,aAAa,kBAAkB,GAAG,IAAI;AACjD,UAAM,SAAS,MAAM,KAAK,UAAU;AACpC,UAAM,OAAO,MAAM,KAAK,OAAO,GAAG;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,KAAa,OAAO,EAAE,YAAY,KAAK,GAAG;AACrD,UAAM,KAAK,aAAa,kBAAkB,GAAG,IAAI;AACjD,UAAM,SAAS,MAAM,KAAK,UAAU;AACpC,WAAO,OAAO,OAAO,GAAG;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UACJ,KACA,KACA,SACA,OAAO,EAAE,YAAY,KAAK,GAC1B;AACA,UAAM,cAAc,MAAM,KAAK,IAAI,KAAK,IAAI;AAC5C,QAAI,aAAa;AACf,aAAO;AAAA,IACT;AAEA,QAAI;AACF,YAAM,eAAe,MAAM,QAAQ;AAEnC,YAAM,KAAK,MAAM,KAAK,cAAc,KAAK,IAAI;AAC7C,aAAO;AAAA,IACT,SAAS,KAAK;AACZ,cAAQ,MAAM,kCAAkC,GAAG;AACnD,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,UAAU,KAAa,OAAO,EAAE,QAAQ,KAAK,GAAG;AACpD,UAAM,SAAS,MAAM,KAAK,UAAU;AACpC,QAAI;AACF,YAAM,OAAO,OAAO,kBAAkB,GAAG,CAAC;AAAA,IAC5C,SAAS,KAAK;AACZ,cAAQ,MAAM,0BAA0B,GAAG;AAC3C,YAAM;AAAA,IACR;AAAA,EACF;AACF;;;ADzFA,IAAM,UAAU,IAAI,UAAU;AAEvB,IAAK,WAAL,kBAAKC,cAAL;AACL,EAAAA,UAAA,eAAY;AACZ,EAAAA,UAAA,kBAAe;AACf,EAAAA,UAAA,uBAAoB;AACpB,EAAAA,UAAA,sBAAmB;AACnB,EAAAA,UAAA,YAAS;AACT,EAAAA,UAAA,uBAAoB;AACpB,EAAAA,UAAA,uBAAoB;AAPV,SAAAA;AAAA,GAAA;AAUL,IAAK,MAAL,kBAAKC,SAAL;AACL,EAAAA,UAAA,gBAAa,OAAb;AACA,EAAAA,UAAA,cAAW,QAAX;AACA,EAAAA,UAAA,aAAU,SAAV;AAHU,SAAAA;AAAA,GAAA;AAML,IAAM,OAAO,IAAI,SACtB,QAAQ,KAAK,GAAG,IAAI;AACf,IAAMC,OAAM,IAAI,SACrB,QAAQ,IAAI,GAAG,IAAI;AACd,IAAM,QAAQ,IAAI,SACvB,QAAQ,MAAM,GAAG,IAAI;AAChB,IAAM,UAAU,IAAI,SACzB,QAAQ,OAAO,GAAG,IAAI;AACjB,IAAM,YAAY,IAAI,SAC3B,QAAQ,UAAU,GAAG,IAAI;AACpB,IAAM,YAAY,IAAI,SAC3B,QAAQ,UAAU,GAAG,IAAI;;;AE/B3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAAC;AACAC;AAEO,SAAS,YAAY,UAAkB;AAC5C,SAAO,MAAM,gBAAgB,QAAQ,CAAC;AACxC;;;ACLAC;AAOA;AAKAC;AAEO,SAAS,eAAe,KAAa;AAC1C,QAAM,WAAW,YAAY;AAE7B,MAAI,cAAc,GAAG;AACnB,UAAM,OAAO,IAAI,QAAQ,GAAG,MAAM,KAAK,MAAM;AAC7C,WAAO,GAAG,IAAI,YAAY,QAAQ;AAAA,EACpC;AAEA,SAAO;AACT;AAEO,IAAM,oBAAoB,CAAC,OAAe,SAAe;AAC9D,MAAI;AACJ,MAAI,MAAM;AACR,mBAAe,KAAK,YAAY;AAAA,EAClC,OAAO;AACL,mBAAe,YAAY;AAAA,EAC7B;AACA,QAAM,WAAW,qBAAqB,KAAK,KAAK;AAChD,SAAO,aAAa;AACtB;AAEA,IAAM,iBAAiB,OAAO,OAAO,wBAAwB;AAEtD,IAAM,qBAAqB,CAChC,KACA,SACuB;AAEvB,MAAI,CAAC,cAAc,GAAG;AACpB,WAAO;AAAA,EACT;AAGA,MAAI,KAAK,kBAAkB,QAAW;AACpC,SAAK,gBAAgB;AAAA,EACvB;AACA,MAAI,CAAC,KAAK,mBAAmB;AAC3B,SAAK,oBAAoB;AAAA,EAC3B;AACA,MAAI,CAAC,KAAK,mBAAmB;AAC3B,SAAK,oBAAoB,CAAC;AAAA,EAC5B;AAEA,QAAM,YAAY,CAAC,aAAuC;AAExD,QAAI,KAAK,mBAAmB,SAAS,QAAQ,GAAG;AAC9C,aAAO;AAAA,IACT;AACA,QAAI,KAAK,mBAAmB,SAAS,QAAQ,GAAG;AAC9C,aAAO;AAAA,IACT;AAAA,EACF;AAGA,MAAI,2BAAuC,GAAG;AAC5C,UAAM,eAAe,IAAI,MAAM;AAC/B,QAAI,cAAc;AAChB,aAAO;AAAA,IACT;AAAA,EACF;AAGA,MAAI,+BAAyC,GAAG;AAC9C,UAAM,iBAAiB,IAAI,QAAQ,8CAAwB;AAC3D,QAAI,gBAAgB;AAClB,aAAO;AAAA,IACT;AAAA,EACF;AAGA,MAAI,6BAAwC,GAAG;AAC7C,UAAM,gBAAgB,IAAI,QAAQ,MAAM;AACxC,QAAI,eAAe;AACjB,aAAO;AAAA,IACT;AAAA,EACF;AAGA,MAAI,qCAA4C,GAAG;AAEjD,QAAI;AACJ,QAAI;AACF,qBAAe,IAAI,IAAI,eAAe,CAAC,EAAE,KAAK,MAAM,GAAG,EAAE,CAAC;AAAA,IAC5D,SAAS,KAAU;AAEjB,UAAI,IAAI,SAAS,mBAAmB;AAClC,cAAM;AAAA,MACR;AAAA,IACF;AAEA,UAAM,cAAc,IAAI;AAExB,QAAI,gBAAgB,YAAY,SAAS,YAAY,GAAG;AACtD,YAAM,WAAW,YAAY;AAAA,QAC3B;AAAA,QACA,YAAY,QAAQ,IAAI,YAAY,EAAE;AAAA,MACxC;AACA,UAAI,UAAU;AACZ,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAGA,MAAI,2BAAuC,GAAG;AAE5C,UAAM,QAAQ,IAAI,QAAQ;AAAA,MACxB,CAAC,MAAW,CAAC,CAAC,EAAE,WAAW,KAAK,CAAC,MAAW,EAAE,SAAS,UAAU;AAAA,IACnE;AAGA,UAAM,SAAS,IAAI;AACnB,QAAI;AACJ,QAAI,OAAO,SAAS,GAAG,GAAG;AACxB,YAAM,OAAO,MAAM,GAAG,EAAE,CAAC;AAAA,IAC3B,OAAO;AACL,YAAM;AAAA,IACR;AAEA,QAAI,OAAO;AACT,YAAMC,UAAS,MAAM,OAAO,KAAK,MAAM,SAAS,GAAG,GAAG,CAAC,CAAC;AACxD,UAAIA,QAAO,UAAU;AACnB,eAAOA,QAAO;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,CAAC,KAAK,eAAe;AACvB,QAAI,MAAM,KAAK,mBAAmB;AAAA,EACpC;AAEA,SAAO;AACT;;;AHjJAC;;;AIFA;AAAA;AAAA;AAAA;AAAA,eAAAC;AAAA;;;ACAA,IAAAC,iBAAA;AAAA,SAAAA,gBAAA;AAAA;AAAA;AAAA;AAAA;;;ACAAC;AACAC;AAEO,SAAS,gBAAgB;AAC9B,SAAO,MAAM,gBAAgB,cAAc,IAAI;AACjD;;;ADJAC;AACAC;AAWA,eAAsB,eAAe,QAAgB;AACnD,MAAI,CAAC,oBAAI,eAAe;AACtB,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,MAAM,WAAW,MAAM;AACpC,SAAO,KAAK;AACd;AAEA,eAAe,WAAW,WAA0C;AAClE,QAAM,KAAK,cAAc;AACzB,SAAO,GAAG,IAAI,SAAS;AACzB;AAIA,SAAS,aAAa,IAAY,UAAoC;AACpE,SAAO;AAAA,IACL,KAAK;AAAA,IACL;AAAA,EACF;AACF;AAEA,SAAS,gBACP,QACA,OACA,UACqB;AACrB,SAAO;AAAA,IACL,KAAK;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAEA,SAAS,gBACP,OACA,OACA,QACA,UACqB;AACrB,SAAO;AAAA,IACL,KAAK;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKA,eAAe,WAAW,WAAmB,UAA8B;AACzE,QAAM,KAAK,cAAc;AACzB,MAAI;AAEJ,MAAI;AACF,UAAM,GAAG,IAAI,SAAS;AAAA,EACxB,SAAS,GAAQ;AACf,QAAI,EAAE,WAAW,KAAK;AACpB,aAAO,SAAS;AAChB,YAAM,GAAG,IAAI,IAAI;AAAA,IACnB,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,eAAsB,QACpB,UACA,QACA,OACA,OACA;AACA,QAAM,WAAW;AAAA,IACf,WAAW,QAAQ,MAAM,aAAa,QAAQ,QAAQ,CAAC;AAAA,IACvD,WAAW,OAAO,MAAM,gBAAgB,QAAQ,OAAO,QAAQ,CAAC;AAAA,EAClE;AAEA,MAAI,OAAO;AACT,aAAS;AAAA,MACP,WAAW,OAAO,MAAM,gBAAgB,OAAO,OAAO,QAAQ,QAAQ,CAAC;AAAA,IACzE;AAAA,EACF;AAEA,QAAM,QAAQ,IAAI,QAAQ;AAC5B;AAIA,eAAsB,WAAW,MAAY;AAC3C,QAAM,KAAK,cAAc;AACzB,QAAMC,QAAO,CAAC,KAAK,KAAM,KAAK,KAAK;AACnC,QAAM,WAAW,MAAM,GAAG,QAAQ;AAAA,IAChC,MAAAA;AAAA,IACA,cAAc;AAAA,EAChB,CAAC;AACD,QAAM,WAAW,SAAS,KAAK,IAAI,CAAC,QAAa;AAC/C,WAAO;AAAA,MACL,GAAG,IAAI;AAAA,MACP,UAAU;AAAA,IACZ;AAAA,EACF,CAAC;AACD,QAAM,GAAG,SAAS,QAAQ;AAC5B;;;AErHA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAAC;AAEA;;;ACFA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAoB;AACpB;AACA;AACAC;AACAC;;;ACJA,IAAAC,iBAAA;AAAA,SAAAA,gBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAAC;AAEA;AACA,IAAM,SAAS,oBAAI,YAAY,QAAQ,UAAU,IAAI,QAAQ,QAAQ;AAErE,IAAM,cAAc,oBAAI,eAAe;AAEvC,eAAsB,KAAK,MAAc;AACvC,QAAM,OAAO,MAAM,OAAO,QAAQ,WAAW;AAC7C,SAAO,OAAO,KAAK,MAAM,IAAI;AAC/B;AAEA,eAAsB,QAAQ,MAAc,WAAmB;AAC7D,SAAO,OAAO,QAAQ,MAAM,SAAS;AACvC;;;ACdAC;AACAC;AACAC;AAEAC;AACA;AAQA,0BAA4B;AAE5B,IAAMC,gCAAgC;AACtC,IAAM,kBAAkB;AAExB,IAAM,uBAAuB;AAC7B,IAAM,iBAAiB;AACvB,IAAM,qBAAqB,GAAG,cAAc;AAC5C,IAAM,oBAAoB;AAE1B,SAAS,aAAa,eAAmC;AACvD,SAAO,iBAAiB,cAAc,WAAWA,WAAU,IACvD,gBACA;AACN;AAEA,eAAsB,cAAc,KAAU;AAC5C,QAAM,SAAS,IAAI,KAAK,MAAM,GAAG,EAAE,CAAC;AACpC,MAAI,iBAAiB,IAAI,OAAO,YAAY,CAAC;AAE7C,MAAI,WAAuC,YAAY;AACvD,MAAI,CAAC,oBAAI,MAAM,KAAK,oBAAI,eAAe;AAIrC,eAAmB,mBAAmB,KAAK;AAAA,MACzC,mBAAmB,4BAAmC;AAAA,IACxD,CAAC;AAAA,EACH;AAGA,QAAM,OAAc,MAAc;AAAA,IAChC;AAAA,IACA,MAAM,WAAW,EAAE,KAAK,MAAM,CAAC;AAAA,EACjC;AACA,QAAM,MAAM,KAAK;AAAA,IACf,OAAK,EAAE,OAAO,EAAE,IAAI,YAAY,MAAM;AAAA,EACxC,EAAE,CAAC;AAEH,SAAO,OAAO,IAAI,QAAQ,IAAI,QAAQ;AACxC;AAEO,SAAS,aAAa,KAAU;AAErC,MAAI,IAAI,KAAK,WAAW,IAAIA,WAAU,EAAE,GAAG;AACzC,WAAO;AAAA,EACT;AAEA,SAAO,IAAI,KAAK,WAAW,eAAe;AAC5C;AAEO,SAAS,iBAAiB,KAAmB;AAClD,SAAO,IAAI,KAAK,WAAW,kBAAkB;AAC/C;AAEO,SAAS,wBAAwB,KAAmB;AACzD,SAAO,IAAI,KAAK,WAAW,oBAAoB;AACjD;AAEO,SAAS,mBAAmB,KAAmB;AACpD,SAAO,IAAI,KAAK,WAAW,iBAAiB;AAC9C;AAOA,eAAsB,gBAAgB,KAAU;AAE9C,QAAMC,WAAU,CAAC,IAAI,QAAQ,wCAAqB,CAAC;AACnD,MAAI;AACJ,WAAS,UAAUA,UAAS;AAC1B,YAAQ,aAAa,MAAgB;AACrC,QAAI,OAAO;AACT;AAAA,IACF;AAAA,EACF;AAGA,MAAI,CAAC,SAAS,IAAI,QAAQ,QAAQ,IAAI,QAAQ,KAAK,OAAO;AACxD,YAAQ,aAAa,IAAI,QAAQ,KAAK,KAAK;AAAA,EAC7C;AAGA,QAAM,SAAS,sBAAsB,IAAI,IAAI;AAC7C,MAAI,CAAC,SAAS,QAAQ;AACpB,YAAQ,aAAa,MAAM;AAAA,EAC7B;AAKA,QAAM,mBAAmB,IAAI,KAAK,WAAW,oBAAoB;AACjE,QAAM,mBACJ,IAAI,KAAK,WAAW,eAAe,KAAK,CAAC;AAC3C,MAAI,CAAC,SAAS,kBAAkB;AAC9B,YAAQ,aAAa,MAAM,cAAc,GAAG,CAAC;AAAA,EAC/C;AAKA,QAAM,UAAU,IAAI,QAAQ,QAAQ;AACpC,MAAI,CAAC,SAAS,SAAS,SAAS,kBAAkB,GAAG;AACnD,UAAM,YAAY,sBAAsB,IAAI,QAAQ,QAAQ,OAAO;AACnE,YAAQ,aAAa,SAAS;AAAA,EAChC;AAEA,SAAO;AACT;AAEA,SAAS,sBAAsB,KAAc;AAC3C,MAAI,CAAC,KAAK;AACR;AAAA,EACF;AACA,SAAO,IACJ,MAAM,GAAG,EAAE,CAAC,EACZ,MAAM,GAAG,EACT,KAAK,aAAW,QAAQ,WAAWD,WAAU,CAAC;AACnD;AAMO,SAAS,QAAW,OAA+B;AACxD,MAAI,CAAC,OAAO;AACV,WAAO;AAAA,EACT;AACA,MAAI;AACF,WAAO,oBAAAE,QAAI,OAAO,OAAO,oBAAI,UAAoB;AAAA,EACnD,SAAS,GAAG;AACV,QAAI,oBAAI,qBAAqB;AAE3B,aAAO,oBAAAA,QAAI,OAAO,OAAO,oBAAI,mBAAmB;AAAA,IAClD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEO,SAAS,sBAAsB,QAAgB;AACpD,MAAI,oBAAI,oBAAoB,oBAAI,qBAAqB,QAAQ;AAC3D,WAAO;AAAA,EACT;AAEA,SAAO,CAAC,EACN,oBAAI,6BAA6B,oBAAI,8BAA8B;AAEvE;AAOO,SAAS,UAAa,KAAU,MAAc;AACnD,QAAM,SAAS,IAAI,QAAQ,IAAI,IAAI;AAEnC,MAAI,CAAC,QAAQ;AACX,WAAO;AAAA,EACT;AAEA,SAAO,QAAW,MAAM;AAC1B;AASO,SAAS,UACd,KACA,OACA,OAAO,WACP,OAAO,EAAE,MAAM,KAAK,GACpB;AACA,MAAI,SAAS,QAAQ,KAAK,MAAM;AAC9B,YAAQ,oBAAAA,QAAI,KAAK,OAAO,oBAAI,UAAoB;AAAA,EAClD;AAEA,QAAM,SAAoB;AAAA,IACxB,SAAS;AAAA,IACT,MAAM;AAAA,IACN,UAAU;AAAA,IACV,WAAW;AAAA,EACb;AAEA,MAAI,oBAAI,eAAe;AACrB,WAAO,SAAS,oBAAI;AAAA,EACtB;AAEA,MAAI,QAAQ,IAAI,MAAM,OAAO,MAAM;AACrC;AAKO,SAAS,YAAY,KAAU,MAAc;AAClD,YAAU,KAAK,MAAM,IAAI;AAC3B;AAQO,SAAS,SAAS,KAAU;AACjC,SAAO,IAAI,oCAAmB,MAAM;AACtC;AAEO,SAAS,QAAQ,QAAgB;AACtC,SAAO,IAAI,QAAQ,aAAW,WAAW,SAAS,MAAM,CAAC;AAC3D;AAEO,SAAS,UAAU,OAAc;AACtC,SAAO,CAAC,CAAC,yBAAyB,KAAK;AACzC;AAEO,SAAS,qBAAqB,MAAW;AAC9C,MAAI,OAAO,SAAS,UAAU;AAC5B,WAAO;AAAA,EACT;AACA,MAAI;AACF,SAAK,UAAU,IAAI;AAAA,EACrB,SAAS,KAAK;AACZ,QAAI,eAAe,SAAS,KAAK,QAAQ,SAAS,oBAAoB,GAAG;AACvE,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;;;ACxPO,SAAS,WAAW,OAAe;AACxC,SACE,SACA,CAAC,CAAC,MAAM;AAAA,IACN;AAAA,EACF;AAEJ;;;ACPO,IAAK,eAAL,kBAAKC,kBAAL;AACL,EAAAA,cAAA,kBAAe;AACf,EAAAA,cAAA,aAAU;AACV,EAAAA,cAAA,aAAU;AACV,EAAAA,cAAA,WAAQ;AACR,EAAAA,cAAA,UAAO;AALG,SAAAA;AAAA,GAAA;AAQZ,IAAM,aAA2C;AAAA,EAC/C,cAAc;AAAA,EACd,SAAS;AAAA,EACT,SAAS,KAAK;AAAA,EACd,OAAO,KAAK,KAAK;AAAA,EACjB,MAAM,KAAK,KAAK,KAAK;AACvB;AAEO,IAAM,WAAN,MAAM,UAAS;AAAA,EACpB,OAAO,QAAQ,MAAoB,IAAkB,UAAkB;AACrE,UAAM,eAAe,WAAW,WAAW,IAAI;AAC/C,WAAO,eAAe,WAAW,EAAE;AAAA,EACrC;AAAA,EAEA,OAAO,KAAK,MAAoB,UAAkB;AAChD,WAAO;AAAA,MACL,IAAI,CAAC,OAAqB;AACxB,eAAO,UAAS,QAAQ,MAAM,IAAI,QAAQ;AAAA,MAC5C;AAAA,MACA,MAAM,MAAM;AACV,eAAO,UAAS,QAAQ,MAAM,mCAA2B,QAAQ;AAAA,MACnE;AAAA,MACA,WAAW,MAAM;AACf,eAAO,UAAS,QAAQ,MAAM,yBAAsB,QAAQ;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAAA,EAEA,OAAO,YAAY,UAAkB;AACnC,WAAO,UAAS,KAAK,yBAAsB,QAAQ;AAAA,EACrD;AAAA,EAEA,OAAO,YAAY,UAAkB;AACnC,WAAO,UAAS,KAAK,yBAAsB,QAAQ;AAAA,EACrD;AAAA,EAEA,OAAO,UAAU,UAAkB;AACjC,WAAO,UAAS,KAAK,qBAAoB,QAAQ;AAAA,EACnD;AAAA,EAEA,OAAO,SAAS,UAAkB;AAChC,WAAO,UAAS,KAAK,mBAAmB,QAAQ;AAAA,EAClD;AAAA,EAEA,OAAO,iBAAiB,UAAkB;AACxC,WAAO,UAAS,KAAK,mCAA2B,QAAQ;AAAA,EAC1D;AACF;;;ALhDA,eAAe,UACb,MACA,MACkB;AAClB,MAAI,gCAA0B;AAC5B,WAAO,WAAW,IAAI;AAAA,EACxB;AAEA,UAAQ,MAAM;AAAA,IACZ,gCAAwB;AACtB,aAAO,WAAW,QAAQ,QAAQ;AAAA,IACpC;AAAA,IACA,kCAAyB;AACvB,aAAO,WAAW,QAAQ,SAAS;AAAA,IACrC;AAAA,IACA,8BAAuB;AACrB,aAAO,WAAW,QAAQ,OAAO;AAAA,IACnC;AAAA,IACA,kCAAyB;AACvB,aAAO,WAAW,QAAQ,SAAS;AAAA,IACrC;AAAA,IACA,sCAA2B;AACzB,aAAO,WAAW,QAAQ,WAAW;AAAA,IACvC;AAAA,IACA,SAAS;AACP,YAAM,cAAM,YAAY,IAAI;AAAA,IAC9B;AAAA,EACF;AACF;AAEA,IAAM,UAA0D;AAAA,EAC9D,UAAU;AAAA;AAAA,IAER,YAAY;AAAA,EACd;AAAA,EACA,WAAW;AAAA,IACT,YAAY;AAAA,EACd;AAAA,EACA,SAAS;AAAA;AAAA;AAAA,IAGP,aAAa;AAAA;AAAA;AAAA;AAAA,IAIb,YAAY;AAAA;AAAA,IAGZ,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA,IAKZ,aAAa;AAAA;AAAA,EACf;AAAA,EACA,WAAW;AAAA,IACT,YAAY;AAAA,EACd;AAAA,EACA,QAAQ,CAAC;AAAA,EACT,aAAa;AAAA,IACX,YAAY;AAAA,EACd;AACF;AAEA,eAAsB,WAAW,OAAwB,CAAC,GAAG;AAC3D,QAAMC,WAAU,EAAE,GAAG,QAAQ,SAAS,GAAG,KAAK;AAC9C,QAAM,eAAe,MAAM,cAAc;AACzC,QAAM,SAAS,aAAa,UAAU;AACtC,SAAO,IAAI,eAAAC,QAAQ,CAAC,MAAM,GAAGD,QAAO;AACtC;AAcA,SAAS,YAAY,MAAmB;AAGtC,QAAM,SAAS,KAAK,aAAa,WAAmB,YAAY;AAChE,MAAI,OAAe,QAAQ,MAAM,IAAI,KAAK,IAAI;AAE9C,MAAI,KAAK,UAAU;AACjB,WAAO,OAAO,IAAI,KAAK,QAAQ;AAAA,EACjC;AACA,SAAO;AACT;AAEO,IAAM,yBAAyB,SAAS,YAAY,EAAE,EAAE,KAAK;AAEpE,eAAsB,WACpB,MACA,MAC8B;AAC9B,QAAM,UAAU,MAAM,UAAU,KAAK,MAAM,KAAK,aAAa;AAC7D,MAAI;AACJ,MAAIE;AACJ,MAAI;AACF,UAAM,OAAO,YAAY,IAAI;AAE7B,UAAM,MACJ,KAAK,2CAAgC,yBAAyB,KAAK;AAGrE,WAAO,MAAM,QAAQ,KAAK,MAAM,GAAG;AAEnC,QAAI,KAAK,0CAA+B;AAEtC,YAAM,oBAAoB,MAAY;AACpC,QAAAA,WAAU,WAAW,YAAY;AAC/B,iBAAO,MAAM,KAAM,OAAO,KAAK,MAAM,KAAK,YAAY,KAAK,SAAS,CAAC;AAErE,4BAAkB;AAAA,QACpB,GAAG,MAAM,CAAC;AAAA,MACZ;AAEA,wBAAkB;AAAA,IACpB;AAIA,UAAM,SAAS,MAAM,KAAK;AAC1B,WAAO,EAAE,UAAU,MAAM,OAAO;AAAA,EAClC,SAAS,GAAQ;AAEf,QAAI,EAAE,SAAS,aAAa;AAC1B,UAAI,KAAK,oCAA4B;AAGnC,eAAO,EAAE,UAAU,MAAM;AAAA,MAC3B,OAAO;AACL,cAAM;AAAA,MACR;AAAA,IACF,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF,UAAE;AACA,iBAAaA,QAAO;AACpB,UAAM,MAAM,OAAO;AAAA,EACrB;AACF;;;ADrJA,IAAM,aAAa,gBAAgB,cAAc,KAAK;AAE/C,IAAM,oBAAiC;AAAA,EAC5C;AAAA,EACA;AAAA,EACA,KAAK,KAAK;AAAA;AAAA,EACV,YAAY;AACd;AAIA,eAAsB,eAAkC;AACtD,QAAM,UAAU,MAAM,WAAW;AACjC,SAAO,QAAQ;AACjB;AAEA,eAAe,aAA+B;AAC5C,QAAM,KAAK,cAAc;AACzB,MAAI;AAEJ,MAAI;AACF,cAAU,MAAM,GAAG,IAAI,UAAU;AAAA,EACnC,SAAS,GAAQ;AAEf,QAAI,EAAE,WAAW,KAAK;AACpB,gBAAU,MAAM,iBAAiB;AAAA,IACnC,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AAEA,SAAO;AACT;AAEA,eAAsB,OAAO,UAAkB;AAC7C,QAAM,UAAU,MAAM,WAAW;AACjC,SAAO,QAAQ,UAAU,QAAQ,QAAQ,MAAM;AACjD;AAIA,SAAS,gBAAyB;AAChC,SAAO;AAAA,IACL,KAAK;AAAA,IACL,WAAW,CAAC;AAAA,EACd;AACF;AAEA,eAAe,mBAAqC;AAClD,QAAM,KAAK,cAAc;AACzB,MAAI,UAAU,cAAc;AAE5B,MAAI;AACF,UAAM,WAAW,MAAM,GAAG,IAAI,OAAO;AACrC,YAAQ,OAAO,SAAS;AAAA,EAC1B,SAAS,GAAQ;AAEf,QAAI,EAAE,WAAW,KAAK;AACpB,aAAO,GAAG,IAAI,UAAU;AAAA,IAC1B;AACA,UAAM;AAAA,EACR;AAEA,SAAO;AACT;AAEA,eAAsB,UAAU,UAAkB;AAChD,QAAM,KAAK,cAAc;AAGzB,QAAY,WAAW,mBAAmB,YAAY;AACpD,UAAM,UAAU,MAAM,WAAW;AAGjC,QAAI,QAAQ,UAAU,QAAQ,QAAQ,MAAM,IAAI;AAC9C,cAAQ,UAAU,KAAK,QAAQ;AAC/B,YAAM,GAAG,IAAI,OAAO;AAAA,IACtB;AAAA,EACF,CAAC;AACH;AAIA,eAAsB,aAAa,UAAkB;AACnD,MAAI;AACF,UAAY,WAAW,mBAAmB,YAAY;AACpD,YAAM,KAAK,cAAc;AACzB,YAAM,UAAU,MAAM,WAAW;AACjC,cAAQ,YAAY,QAAQ,UAAU,OAAO,QAAM,OAAO,QAAQ;AAClE,YAAM,GAAG,IAAI,OAAO;AAAA,IACtB,CAAC;AAAA,EACH,SAAS,KAAK;AACZ,YAAQ,MAAM,yBAAyB,QAAQ,iBAAiB,GAAG;AACnE,UAAM;AAAA,EACR;AACF;;;APhGAC;;;AcJA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAC,qBAAkB;AAClB;AAEA,IAAqB,MAArB,MAAyB;AAAA,EAGvB,YAAY,MAAc;AACxB,SAAK,OAAO;AAAA,EACd;AAAA,EAEA,MAAM,QAAQ,QAAgB,KAAaC,UAAe;AACxD,QAAI,CAACA,SAAQ,SAAS;AACpB,MAAAA,SAAQ,UAAU,CAAC;AAAA,IACrB;AAEA,QAAI,CAACA,SAAQ,QAAQ,cAAc,GAAG;AACpC,MAAAA,SAAQ,UAAU;AAAA,QAChB,gBAAgB;AAAA,QAChB,QAAQ;AAAA,QACR,GAAGA,SAAQ;AAAA,MACb;AAAA,IACF;AAEA,QAAI,OAAOA,SAAQ,QAAQ,cAAc,MAAM;AAG/C,IAAQ,oBAAY,UAAUA,SAAQ,OAAO;AAE7C,UAAM,iBAAiB;AAAA,MACrB;AAAA,MACA,MAAM,OAAO,KAAK,UAAUA,SAAQ,IAAI,IAAIA,SAAQ;AAAA,MACpD,SAASA,SAAQ;AAAA;AAAA,MAEjB,aAAa;AAAA,IACf;AAEA,WAAO,UAAM,mBAAAC,SAAM,GAAG,KAAK,IAAI,GAAG,GAAG,IAAI,cAAc;AAAA,EACzD;AAAA,EAEA,MAAM,KAAK,KAAaD,UAAe;AACrC,WAAO,KAAK,QAAQ,QAAQ,KAAKA,QAAO;AAAA,EAC1C;AAAA,EAEA,MAAM,IAAI,KAAaA,UAAe;AACpC,WAAO,KAAK,QAAQ,OAAO,KAAKA,QAAO;AAAA,EACzC;AAAA,EAEA,MAAM,MAAM,KAAaA,UAAe;AACtC,WAAO,KAAK,QAAQ,SAAS,KAAKA,QAAO;AAAA,EAC3C;AAAA,EAEA,MAAM,IAAI,KAAaA,UAAe;AACpC,WAAO,KAAK,QAAQ,UAAU,KAAKA,QAAO;AAAA,EAC5C;AAAA,EAEA,MAAM,IAAI,KAAaA,UAAe;AACpC,WAAO,KAAK,QAAQ,OAAO,KAAKA,QAAO;AAAA,EACzC;AACF;;;ACzDAE;AACAC;AAGA,IAAM,MAAM,IAAI,IAAI,oBAAI,kBAAkB;AAQ1C,IAAM,aAAa,oBAAI,eAAe,oBAAI;AAEnC,IAAM,aAAa,OACxB,UACsC;AACtC,MAAI,YAAY;AACd;AAAA,EACF;AACA,QAAM,UAAU;AAAA,IACd;AAAA,EACF;AACA,QAAM,WAAW,MAAM,IAAI,KAAK,wBAAwB;AAAA,IACtD,MAAM;AAAA,IACN,SAAS;AAAA,MACP,mCAAe,GAAG,oBAAI;AAAA,IACxB;AAAA,EACF,CAAC;AAED,MAAI,SAAS,WAAW,KAAK;AAC3B,UAAM,IAAI,MAAM,kCAAkC,KAAK,EAAE;AAAA,EAC3D;AAEA,QAAM,OAAuB,MAAM,SAAS,KAAK;AACjD,SAAO,KAAK,CAAC;AACf;AAEO,IAAM,uBAAuB,OAClC,aACsC;AACtC,MAAI,YAAY;AACd;AAAA,EACF;AACA,QAAM,UAAU;AAAA,IACd;AAAA,EACF;AACA,QAAM,WAAW,MAAM,IAAI,KAAK,wBAAwB;AAAA,IACtD,MAAM;AAAA,IACN,SAAS;AAAA,MACP,mCAAe,GAAG,oBAAI;AAAA,IACxB;AAAA,EACF,CAAC;AAED,MAAI,SAAS,WAAW,KAAK;AAC3B,UAAM,IAAI,MAAM,qCAAqC,QAAQ,EAAE;AAAA,EACjE;AAEA,QAAM,OAAuB,MAAM,SAAS,KAAK;AACjD,SAAO,KAAK,CAAC;AACf;AAEO,IAAM,YAAY,YAEpB;AACH,MAAI,YAAY;AACd;AAAA,EACF;AACA,QAAM,WAAW,MAAM,IAAI,IAAI,eAAe;AAAA,IAC5C,SAAS;AAAA,MACP,mCAAe,GAAG,oBAAI;AAAA,IACxB;AAAA,EACF,CAAC;AACD,QAAM,OAAO,MAAM,SAAS,KAAK;AAEjC,MAAI,SAAS,WAAW,KAAK;AAC3B,UAAM,IAAI,MAAM,sBAAsB;AAAA,EACxC;AAEA,SAAO;AACT;;;ACjFA,IAAAC,iBAAA;AAAA,SAAAA,gBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,6BAAAC;AAAA,EAAA,gCAAAC;AAAA,EAAA,6BAAAC;AAAA,EAAA,eAAAC;AAAA,EAAA,wBAAAC;AAAA,EAAA,iBAAAC;AAAA,EAAA,iBAAAC;AAAA,EAAA,uBAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAAC;AAcA;AAUAC;AACAA;;;ACvBAC;;;ACIAC;AACAC;;;ACPA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEAC;AACAC;AACA;AAEA,IAAM,cAAc,SAAS,SAAS,CAAC,EAAE,UAAU;AAgBnD,eAAsB,WAAW,MAAc,OAAe;AAC5D,QAAM,SAAS,MAAY,gBAAgB;AAC3C,QAAM,OAAO,MAAM,MAAM,OAAO,WAAW;AAC7C;AAQA,eAAsB,WAAW,OAAe,MAA4B;AAC1E,QAAM,OAAa,MAAM;AACzB,QAAM,SAAS,MAAY,gBAAgB;AAC3C,QAAM,OAAO,MAAM,MAAM,EAAE,OAAO,KAAK,GAAG,WAAW;AACrD,SAAO;AACT;AAOA,eAAsB,QAAQ,MAA+B;AAC3D,QAAM,SAAS,MAAY,gBAAgB;AAC3C,QAAM,QAAS,MAAM,OAAO,IAAI,IAAI;AACpC,MAAI,CAAC,OAAO;AACV,UAAM;AAAA,EACR;AACA,SAAO;AACT;AAEA,eAAsB,WAAW,MAAc;AAC7C,QAAM,SAAS,MAAY,gBAAgB;AAC3C,QAAM,OAAO,OAAO,IAAI;AAC1B;AAKA,eAAsB,iBAA4C;AAChE,QAAM,SAAS,MAAY,gBAAgB;AAC3C,QAAM,UAA4C,MAAM,OAAO,KAAK;AAEpE,QAAM,UAA4B,QAAQ,IAAI,YAAU;AACtD,WAAO;AAAA,MACL,GAAG,OAAO;AAAA,MACV,MAAM,OAAO;AAAA,IACf;AAAA,EACF,CAAC;AACD,MAAI,CAAC,oBAAI,eAAe;AACtB,WAAO;AAAA,EACT;AACA,QAAM,WAAW,YAAY;AAC7B,SAAO,QAAQ,OAAO,YAAU,aAAa,OAAO,KAAK,QAAQ;AACnE;AAEA,eAAsB,mBACpB,QAC2B;AAC3B,UAAQ,MAAM,eAAe,GAAG;AAAA,IAAO,YACrC,OAAO,SAAS,OAAO,KAAK;AAAA,EAC9B;AACF;;;ADpEA,eAAsB,qBAAqB,QAAkB;AAC3D,MAAI,gBAA0B,CAAC;AAE/B,QAAM,sBAAsB,MAAM,uBAAuB,MAAM;AAC/D,gBAAc,KAAK,GAAG,oBAAoB,IAAI,UAAQ,KAAK,KAAK,CAAC;AAEjE,QAAM,wBAAwB,MAAM,yBAAyB,MAAM;AACnE,gBAAc,KAAK,GAAG,sBAAsB,IAAI,UAAQ,KAAK,GAAI,CAAC;AAElE,QAAM,mBAAmB,MAAM,oBAAoB,MAAM;AACzD,gBAAc,KAAK,GAAG,iBAAiB,IAAI,aAAW,QAAQ,KAAK,CAAC;AAEpE,QAAM,gBAAgB,MAAM,mBAAmB,MAAM;AACrD,gBAAc,KAAK,GAAG,cAAc,IAAI,YAAU,OAAO,KAAK,CAAC;AAE/D,SAAO,CAAC,GAAG,IAAI,IAAI,cAAc,IAAI,WAAS,MAAM,YAAY,CAAC,CAAC,CAAC;AACrE;AAGA,eAAsB,gBACpB,YAC8B;AAG9B,SAAQ,MAAc,+EAAqD;AAAA,IACzE,MAAM,CAAC,WAAW,YAAY,CAAC;AAAA,IAC/B,cAAc;AAAA,EAChB,CAAC;AACH;AAEA,eAAsB,uBACpB,QACiB;AACjB,QAAM,WAAW,OAAO,IAAI,WAAS,MAAM,YAAY,CAAC;AACxD,QAAMC,UAAS;AAAA,IACb,MAAM;AAAA,IACN,cAAc;AAAA,EAChB;AAEA,QAAM,OAAO;AAAA,IACX,eAAe;AAAA,EACjB;AAEA,SAAQ,MAAc;AAAA;AAAA,IAEpBA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEA,eAAsB,yBACpB,QACgC;AAChC,QAAM,WAAW,OAAO,IAAI,WAAS,MAAM,YAAY,CAAC;AACxD,QAAMA,UAAS;AAAA,IACb,MAAM;AAAA,IACN,cAAc;AAAA,EAChB;AAEA,QAAM,OAAO;AAAA,IACX,eAAe;AAAA,EACjB;AACA,SAAQ,MAAc;AAAA;AAAA,IAEpBA;AAAA,IACA;AAAA,EACF;AACF;AAEA,eAAsB,oBACpB,QAC4B;AAC5B,QAAM,WAAW,OAAO,IAAI,WAAS,MAAM,YAAY,CAAC;AACxD,QAAMA,UAAS;AAAA,IACb,MAAM;AAAA,IACN,cAAc;AAAA,EAChB;AAEA,QAAM,OAAO;AAAA,IACX,eAAe;AAAA,EACjB;AAEA,SAAQ,MAAc;AAAA;AAAA,IAEpBA;AAAA,IACA;AAAA,EACF;AACF;;;AEzGA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACEO,IAAe,gBAAf,cAAqC,MAAM;AAAA,EAGhD,YAAY,SAAiB,MAAiB;AAC5C,UAAM,OAAO;AACb,SAAK,OAAO;AAAA,EACd;AAGF;AAIO,IAAK,YAAL,kBAAKC,eAAL;AACL,EAAAA,WAAA,0BAAuB;AACvB,EAAAA,WAAA,sBAAmB;AACnB,EAAAA,WAAA,qBAAkB;AAClB,EAAAA,WAAA,UAAO;AAJG,SAAAA;AAAA,GAAA;AAWL,IAAM,iBAAiB,CAAC,QAAa;AAC1C,MAAI;AACJ,MAAI,IAAI,MAAM;AAEZ,YAAQ;AAAA,MACN,MAAM,IAAI;AAAA,IACZ;AAEA,QAAI,IAAI,gBAAgB;AACtB,cAAQ;AAAA,QACN,GAAG;AAAA;AAAA,QAEH,GAAG,IAAI,eAAe;AAAA,MACxB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAIO,IAAM,YAAN,cAAwB,cAAc;AAAA,EAG3C,YAAY,SAAiB,YAAoB,OAAO,mBAAgB;AACtE,UAAM,SAAS,IAAI;AACnB,SAAK,SAAS;AAAA,EAChB;AACF;AAEO,IAAM,gBAAN,cAA4B,UAAU;AAAA,EAC3C,YAAY,SAAiB;AAC3B,UAAM,SAAS,GAAG;AAAA,EACpB;AACF;AAEO,IAAM,kBAAN,cAA8B,UAAU;AAAA,EAC7C,YAAY,SAAiB;AAC3B,UAAM,SAAS,GAAG;AAAA,EACpB;AACF;AAIO,IAAM,kBAAN,cAA8B,UAAU;AAAA,EAG7C,YAAY,SAAiB,WAAmB;AAC9C,UAAM,SAAS,KAAK,iDAA8B;AAClD,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,iBAAiB;AACf,WAAO;AAAA,MACL,WAAW,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEO,IAAM,uBAAN,cAAmC,UAAU;AAAA,EAGlD,YAAY,SAAiB,aAAqB;AAChD,UAAM,SAAS,KAAK,yCAA0B;AAC9C,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,iBAAiB;AACf,WAAO;AAAA,MACL,aAAa,KAAK;AAAA,IACpB;AAAA,EACF;AACF;AAIO,IAAM,qBAAN,cAAiC,cAAc;AAAA,EACpD,cAAc;AACZ;AAAA,MACE;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAIO,IAAM,wBAAN,cAAoC,MAAM;AAAA,EAC/C,YAAY,OAAe;AACzB,UAAM,0BAA0B,KAAK,GAAG;AAAA,EAC1C;AACF;;;AJjHAC;AACAC;AAIO,IAAMC,aAAY,YAAI,MAAM;AAC5B,IAAMC,WAAU,YAAI,MAAM;AAC1B,IAAMC,aAAY,YAAI,MAAM;AAC5B,IAAMC,mBAAkB,YAAI,MAAM;AAClC,IAAMC,oBAAmB,YAAI,MAAM;AACnC,IAAMC,uBAAsB,YAAI,MAAM;AACtC,IAAMC,yBAAwB,YAAI,MAAM;AACxC,IAAMC,4BAA2B,YAAI,MAAM;AAElD,eAAsB,mBAAmB,OAAe,UAAkB;AAExE,MAAI,oBAAI,eAAe;AACrB,UAAM,aAAa,MAAM,gBAAgB,KAAK;AAC9C,QAAI,cAAc,QAAQ,WAAW,aAAa,UAAU;AAC1D,YAAM,IAAI,sBAAsB,KAAK;AAAA,IACvC;AAAA,EACF;AAGA,MAAI,CAAC,oBAAI,eAAe,CAAC,oBAAI,wBAAwB;AACnD,UAAM,UAAU,MAAiB,WAAW,KAAK;AACjD,QAAI,WAAW,QAAQ,YAAY,QAAQ,aAAa,UAAU;AAChE,YAAM,IAAI,sBAAsB,KAAK;AAAA,IACvC;AAAA,EACF;AACF;AAKA,eAAsB,4BACpB,SACmC;AACnC,MAAI,CAAC,oBAAI,eAAe,CAAC,oBAAI,wBAAwB;AACnD,UAAM,WAAW,YAAY;AAC7B,UAAM,UAAU,MAAM,qBAAqB,QAAQ;AACnD,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,MAAM,kCAAkC,QAAQ,EAAE;AAAA,IAC9D;AAEA,UAAM,iBAAiB,QAAQ;AAC/B,QAAI,QAAQ,SAAS,cAAc,GAAG;AACpC,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;AKvDAC;;;ACAAC;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA,cAAAC;AAAA,EAAA;AAAA;;;ACAA;AAEAC;;;ACFA;AAAA;AAAA;AAAA;AAGO,IAAM,UAAU,YAAY;AACjC,SAAe,iBAAiB;AAClC;;;ACLA,0BAAoB;AACpB;AAEAC;AACAC;;;ACJA;AAGAC;AAOA,IAAM,gBAAgB,CAAC,UAA4C;AACjE,SACE,mDACA,2DACA;AAEJ;AAEA,IAAM,WAAW,CAAC,UAA4B;AAC5C,SAAO,2DAAsC;AAC/C;AAUA,IAAM,cAAc;AAAA,EAClB,8BAAiB,GAAG;AAAA,EACpB,8CAAyB,GAAG;AAAA,EAC5B,sCAAqB,GAAG;AAC1B;AAOO,IAAM,UAAU,OAAO,UAAmC;AAE/D,MAAI,CAAC,cAAc,KAAK,GAAG;AACzB,WAAO;AAAA,EACT;AAEA,QAAM,cAAc,MAAM,UAAU,KAAK;AACzC,MAAI,aAAa;AACf,UAAM,YAAY,IAAI,KAAK,YAAY,SAAS;AAChD,UAAM,QAAQ,YAAY,KAAK;AAC/B,YAAQ,OAAO;AAAA,MACb,KAAK,kCAAwB;AAE3B,kBAAU,QAAQ,UAAU,QAAQ,IAAI,CAAC;AACzC,kBAAU,SAAS,GAAG,GAAG,GAAG,CAAC;AAG7B,YAAI,KAAK,IAAI,IAAI,UAAU,QAAQ,GAAG;AAEpC,gBAAM,YAAY,OAAO,EAAE,WAAW,KAAK,IAAI,EAAE,CAAC;AAClD,iBAAO;AAAA,QACT,OAAO;AAEL,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAAA,EACF,OAAO;AAEL,UAAM,YAAY,OAAO,EAAE,WAAW,KAAK,IAAI,EAAE,CAAC;AAClD,WAAO;AAAA,EACT;AACF;AAEA,IAAM,WAAW,CAAC,UAA4B;AAC5C,MAAI,MAAM,4CAA6B,IAAI,KAAK;AAChD,MAAI,SAAS,KAAK,GAAG;AACnB,UAAM,MAAM,MAAc,SAAS;AAAA,EACrC;AACA,SAAO;AACT;AAEA,IAAM,YAAY,OAChB,UACyC;AACzC,QAAM,MAAM,SAAS,KAAK;AAC1B,QAAM,SAAS,MAAYC,KAAI,GAAG;AAClC,SAAO;AACT;AAEA,IAAM,cAAc,OAClB,OACA,eACG;AACH,QAAM,MAAM,SAAS,KAAK;AAC1B,QAAM,QAAQ,YAAY,KAAK;AAC/B,MAAI;AACJ,UAAQ,OAAO;AAAA,IACb,KAAK,kCAAwB;AAC3B;AAAA,IACF;AAAA,EACF;AAEA,QAAY,MAAM,KAAK,YAAY,GAAG;AACxC;;;ADlGA,IAAM,kBAA2B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAcjC;AAEA,IAAqB,mBAArB,MAAgE;AAAA,EAG9D,YAAY,OAA2B;AACrC,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,8BAA8B;AAAA,IAChD;AACA,SAAK,UAAU,IAAI,oBAAAC,QAAQ,KAAK;AAAA,EAClC;AAAA,EAEA,MAAM,aACJ,OACA,UACA,YACA,WACe;AAEf,QAAI,gBAAgB,SAAS,KAAK,GAAG;AACnC;AAAA,IACF;AAEA,QAAI,MAAmB,QAAQ,KAAK,GAAG;AACrC;AAAA,IACF;AAEA,iBAAa,KAAK,mBAAmB,UAAU;AAE/C,eAAW,UAAU,oBAAI;AACzB,eAAW,UAAU,oBAAI;AACzB,eAAW,cAAc,SAAS;AAClC,eAAW,UAAU,SAAS;AAE9B,UAAM,QAAgB,SAAS;AAC/B,QAAI,OAAO;AACT,iBAAW,QAAQ;AAAA,IACrB;AAEA,UAAM,UAAe,EAAE,YAAY,SAAS,IAAI,OAAO,WAAW;AAElE,QAAI,WAAW;AACb,cAAQ,YAAY,IAAI,KAAK,SAAS;AAAA,IACxC;AAGA,QAAI,SAAS,kBAAkB,SAAS,UAAU;AAChD,cAAQ,SAAS,CAAC;AAClB,UAAI,SAAS,gBAAgB;AAC3B,gBAAQ,OAAO,eAAe,SAAS;AACvC,gBAAQ,WAAW,iBAAiB,SAAS;AAAA,MAC/C;AACA,UAAI,SAAS,UAAU;AACrB,gBAAQ,OAAO,SAAS,SAAS;AACjC,gBAAQ,WAAW,WAAW,SAAS;AAAA,MACzC;AAAA,IACF;AAEA,SAAK,QAAQ,QAAQ,OAAO;AAAA,EAC9B;AAAA,EAEA,mBAAmB,YAAiB;AAClC,QAAI,WAAW,OAAO;AACpB,aAAO,WAAW;AAAA,IACpB;AACA,QAAI,WAAW,SAAS;AACtB,aAAO,WAAW;AAAA,IACpB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,SAAS,UAAoB,WAA6B;AAC9D,UAAM,UAAe,EAAE,YAAY,SAAS,IAAI,YAAY,SAAS;AACrE,QAAI,WAAW;AACb,cAAQ,YAAY,IAAI,KAAK,SAAS;AAAA,IACxC;AACA,SAAK,QAAQ,SAAS,OAAO;AAAA,EAC/B;AAAA,EAEA,MAAM,cAAc,OAAc,WAA6B;AAC7D,UAAM,UAAe;AAAA,MACnB,YAAY,MAAM;AAAA,MAClB,WAAW,MAAM;AAAA,MACjB,UAAU,MAAM;AAAA,MAChB,YAAY;AAAA,IACd;AAEA,QAAI,WAAW;AACb,cAAQ,YAAY,IAAI,KAAK,SAAS;AAAA,IACxC;AACA,SAAK,QAAQ,cAAc,OAAO;AAAA,EACpC;AAAA,EAEA,WAAW;AACT,SAAK,QAAQ,SAAS;AAAA,EACxB;AACF;;;AEnHA,IAAO,kBAAQ;;;AJOf,IAAM,kBAAkB;AAAA;AAAA;AAGxB;AACA,IAAM,qBAAqB,yDAA+C;AAE1E,IAAqB,qBAArB,MAAkE;AAAA,EAGhE,cAAc;AACZ,QAAI,oBAAI,iBAAiB,CAAC,oBAAI,OAAO,GAAG;AACtC,WAAK,UAAU,IAAI,gBAAiB,oBAAI,aAAa;AAAA,IACvD;AAAA,EACF;AAAA,EAEA,MAAM,aACJ,OACA,UACA,YACA,WACe;AACf,QAAI,CAAC,gBAAgB,SAAS,KAAK,KAAK,CAAE,MAAgB,QAAQ,GAAI;AACpE;AAAA,IACF;AACA,QAAI,KAAK,SAAS;AAChB,YAAM,KAAK,QAAQ,aAAa,OAAO,UAAU,YAAY,SAAS;AAAA,IACxE;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,UAAoB,WAA6B;AAE9D,QACE,CAAC,mBAAmB,SAAS,SAAS,IAAI,KAC1C,CAAE,MAAgB,QAAQ,GAC1B;AACA;AAAA,IACF;AACA,QAAI,KAAK,SAAS;AAChB,YAAM,KAAK,QAAQ,SAAS,UAAU,SAAS;AAAA,IACjD;AAAA,EACF;AAAA,EAEA,MAAM,cAAc,OAAc,WAA6B;AAE7D,QAAI,KAAK,SAAS;AAChB,YAAM,KAAK,QAAQ,cAAc,OAAO,SAAS;AAAA,IACnD;AAAA,EACF;AAAA,EAEA,WAAW;AACT,QAAI,KAAK,SAAS;AAChB,WAAK,QAAQ,SAAS;AAAA,IACxB;AAAA,EACF;AACF;;;AK7DAC;AAEA,IAAM,cAAc,oBAAI,eAAe,CAAC,oBAAI,MAAM;AAElD,IAAqB,mBAArB,MAAgE;AAAA,EAC9D,MAAM,aACJ,OACA,UACA,YACA,WACe;AACf,QAAI,aAAa;AACf;AAAA,IACF;AACA,YAAQ,IAAI,yBAAyB,SAAS,IAAI,KAAK,KAAK,IAAI,UAAU;AAAA,EAC5E;AAAA,EAEA,MAAM,SAAS,UAAoB,WAA6B;AAC9D,QAAI,aAAa;AACf;AAAA,IACF;AACA,YAAQ,IAAI,sBAAsB,QAAQ;AAAA,EAC5C;AAAA,EAEA,MAAM,cAAc,OAAc,WAA6B;AAC7D,QAAI,aAAa;AACf;AAAA,IACF;AACA,YAAQ,IAAI,4BAA4B,KAAK;AAAA,EAC/C;AAAA,EAEA,WAAiB;AAAA,EAEjB;AACF;;;ACpCA;AAUAC;;;ACVA;AAAA;AAAA;AAAA;AAAA,kBAAAC;AAAA;;;ACAAC;AACAC;;;ACDA,IAAAC,iBAAmB;AAYnB,SAAS,OAAO,OAAe,SAAc;AAC3C,SAAO;AAAA,IACL,WAAW,KAAK,IAAI;AAAA,IACpB;AAAA,IACA,MAAM;AAAA,IACN,MAAM,CAAC;AAAA,EACT;AACF;AAOA,IAAM,gBAAN,MAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAalB,YAAY,MAAc,MAAY;AACpC,SAAK,QAAQ;AACb,SAAK,QAAQ;AACb,SAAK,YAAY,CAAC;AAClB,SAAK,WAAW,IAAI,eAAAC,QAAO,aAAa;AACxC,SAAK,YAAY;AACjB,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,QAAQ,MAAW;AACjB,SAAK,SAAS,GAAG,WAAW,YAAY;AACtC,UAAI,KAAK,UAAU,UAAU,GAAG;AAC9B;AAAA,MACF;AACA,UAAI,MAAM,KAAK,UAAU,MAAM;AAC/B,UAAI,OAAO,KAAK,GAAG;AACnB,UAAI,KAAK,QAAQ,MAAM;AACrB,cAAM;AAAA,MACR;AACA,WAAK;AAAA,IACP,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,UAAU;AACd,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,IAAI,KAAU,QAAiB;AAC7B,QAAI,OAAO,QAAQ,UAAU;AAC3B,YAAM;AAAA,IACR;AACA,SAAK,UAAU,KAAK,OAAO,KAAK,OAAO,GAAG,CAAC;AAC3C,SAAK;AACL,SAAK,SAAS,KAAK,SAAS;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAQ;AACZ,WAAO,CAAC;AAAA,EACV;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,sBAAsB,WAAmB;AAEvC,YAAQ,IAAI,SAAS;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,oBAAoB;AAClB,WAAO,CAAC;AAAA,EACV;AAAA;AAAA,EAGA,WAAW,SAAiB;AAAA,EAE5B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAQ;AACZ,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,MAAM,SAAS;AACb,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,KAAK;AAEH,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,oBAAoB;AACxB,OAAG;AACD,YAAM,QAAQ,EAAE;AAAA,IAClB,SAAS,KAAK,YAAY,KAAK;AAAA,EACjC;AACF;AAEA,IAAO,wBAAQ;;;AD7If,kBAAwC;;;AEJjC,IAAK,WAAL,kBAAKC,cAAL;AACL,EAAAA,UAAA,gBAAa;AACb,EAAAA,UAAA,gBAAa;AACb,EAAAA,UAAA,eAAY;AACZ,EAAAA,UAAA,wBAAqB;AACrB,EAAAA,UAAA,mBAAgB;AALN,SAAAA;AAAA,GAAA;;;ACEZC;AAIO,SAAS,aACd,OACA,UACA,iBACA;AACA,UAAQ,OAAO,QAAQ;AACvB,MAAI,iBAAiB;AACnB,kBAAc,OAAO,eAAe;AAAA,EACtC;AACF;AAEA,SAAS,cAAc,OAAc,iBAA6B;AAChE,QAAM,GAAG,WAAW,OAAO,QAAa;AACtC,QAAI,iBAAiB;AACnB,YAAM,gBAAgB,GAAG;AAAA,IAC3B,WAAW,IAAI,KAAK,QAAQ;AAC1B,YAAM,QAAQ,IAAI;AAClB,YAAM,aAAa,MAAM,MAAM,kBAAkB;AACjD,eAAS,aAAa,YAAY;AAChC,YAAI,UAAU,OAAO,OAAO;AAC1B,gBAAM,MAAM,sBAAsB,UAAU,GAAG;AAAA,QACjD;AAAA,MACF;AACA,cAAQ,IAAI,SAAS,KAAK,WAAW;AAAA,IACvC;AAAA,EACF,CAAC;AACH;AAEA,SAAS,aACP,WACA,OACA,OAII,CAAC,GACL,QAAa,CAAC,GACd;AACA,QAAM,UAAU,UAAU,SAAS,IAAI,KAAK;AAC5C,QAAM,MAAM,KAAK;AAEjB,QAAM,UAAU;AAAA,IACd,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA,KAAK,KAAK;AAAA,IACV,OAAO,KAAK,SAAS,KAAK,KAAK;AAAA,IAC/B,GAAG;AAAA,EACL;AAEA,MAAI;AACJ,MAAI,KAAK,KAAK,MAAM,YAAY;AAC9B,oBAAgB;AAAA,MACd,SAAS;AAAA,MACT,SAAS,KAAK,MACV,KAAK,IAAI,KAAK,WAAW,WAAW,QAAQ,QAC5C;AAAA,IACN;AAAA,EACF;AAEA,SAAO,CAAC,SAAS,KAAK,SAAS,aAAa;AAC9C;AAyBA,IAAM,eAAsD;AAAA,EAC1D,mCAAoB,GAAG;AAAA,EACvB,kCAAoB,GAAG;AAAA,EACvB,gCAAmB,GAAG;AAAA,EACtB,4CAA4B,GAAG;AAAA,EAC/B,mCAAuB,GAAG;AAC5B;AAEA,SAAS,QAAQ,OAAc,UAAoB;AACjD,QAAM,YAAY,aAAa,QAAQ;AAEvC,WAAS,eAAe,KAAU,MAAW;AAE3C,UAAM,QAAQ,IAAI,KAAK,OAAO;AAC9B,QAAI,OAAO;AACT,aAAe,YAAY,OAAO,IAAI;AAAA,IACxC,OAAO;AACL,WAAK;AAAA,IACP;AAAA,EACF;AAEA,QACG,GAAG,yBAAmB,OAAO,QAAa;AAGzC,UAAM,eAAe,KAAK,MAAM;AAC9B,cAAQ,MAAM,GAAG,aAAa,WAAW,yBAAmB,EAAE,IAAI,CAAC,CAAC;AAAA,IACtE,CAAC;AAAA,EACH,CAAC,EACA,GAAG,qBAAiB,CAAC,UAAe;AAEnC,YAAQ,MAAM,GAAG,aAAa,WAAW,qBAAiB,EAAE,MAAM,CAAC,CAAC;AAAA,EACtE,CAAC;AAEH,MAAI,QAAQ,IAAI,YAAY,SAAS,MAAM,GAAG;AAC5C,UACG,GAAG,yBAAmB,CAAC,UAAiB;AAEvC,cAAQ,KAAK,GAAG,aAAa,WAAW,yBAAmB,EAAE,MAAM,CAAC,CAAC;AAAA,IACvE,CAAC,EACA,GAAG,uBAAkB,OAAO,KAAU,eAAoB;AAEzD,YAAM,eAAe,KAAK,MAAM;AAC9B,gBAAQ,KAAK,GAAG,aAAa,WAAW,uBAAkB,EAAE,IAAI,CAAC,CAAC;AAAA,MACpE,CAAC;AAAA,IACH,CAAC,EACA,GAAG,2BAAoB,OAAO,KAAU,aAAkB;AAEzD,YAAM,eAAe,KAAK,MAAM;AAC9B,gBAAQ;AAAA,UACN,GAAG;AAAA,YACD;AAAA,YACA;AAAA,YACA,EAAE,IAAI;AAAA,YACN,EAAE,SAAS;AAAA,UACb;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH,CAAC,EACA,GAAG,6BAAqB,OAAO,KAAU,WAAW;AAEnD,YAAM,eAAe,KAAK,MAAM;AAC9B,gBAAQ;AAAA,UACN,GAAG,aAAa,WAAW,6BAAqB,EAAE,IAAI,GAAG,EAAE,OAAO,CAAC;AAAA,QACrE;AAAA,MACF,CAAC;AAAA,IACH,CAAC,EACA,GAAG,uBAAkB,OAAO,KAAU,UAAe;AAEpD,YAAM,eAAe,KAAK,MAAM;AAC9B,gBAAQ;AAAA,UACN,GAAG,aAAa,WAAW,uBAAkB,EAAE,KAAK,MAAM,CAAC;AAAA,QAC7D;AAAA,MACF,CAAC;AAAA,IACH,CAAC,EACA,GAAG,uBAAkB,MAAM;AAE1B,cAAQ,KAAK,GAAG,aAAa,WAAW,qBAAgB,CAAC;AAAA,IAC3D,CAAC,EACA,GAAG,yBAAmB,MAAM;AAE3B,cAAQ,KAAK,GAAG,aAAa,WAAW,uBAAiB,CAAC;AAAA,IAC5D,CAAC,EACA,GAAG,yBAAmB,CAAC,MAAa,SAAiB;AAGpD,cAAQ;AAAA,QACN,GAAG;AAAA,UACD;AAAA,UACA;AAAA,UACA,CAAC;AAAA,UACD,EAAE,QAAQ,KAAK,QAAQ,KAAK;AAAA,QAC9B;AAAA,MACF;AAAA,IACF,CAAC,EACA,GAAG,yBAAmB,MAAM;AAE3B,cAAQ,KAAK,GAAG,aAAa,WAAW,uBAAiB,CAAC;AAAA,IAC5D,CAAC,EACA,GAAG,yBAAmB,CAAC,QAAa;AAEnC,cAAQ,KAAK,GAAG,aAAa,WAAW,yBAAmB,EAAE,IAAI,CAAC,CAAC;AAAA,IACrE,CAAC;AAAA,EACL;AACF;;;AH7LAC;AAGA,IAAM,gBAAgB,SAAS,YAAY,CAAC,EAAE,KAAK;AAEnD,IAAM,+BAA+B,SAAS,YAAY,EAAE,EAAE,KAAK;AAEnE,IAAM,oBAAoB,SAAS,YAAY,EAAE,EAAE,KAAK;AACxD,IAAI,SAA8C,CAAC;AACnD,IAAI;AAEJ,eAAeC,WAAU;AACvB,WAAS,SAAS,QAAQ;AACxB,UAAM,MAAM,MAAM,mBAAmB,WAAW;AAAA,EAClD;AACF;AAEO,SAAS,YACd,UACA,OAAwC,CAAC,GACrB;AACpB,QAAM,YAAY,gBAAgB;AAClC,QAAM,cAA4B;AAAA,IAChC,OAAO;AAAA,IACP,UAAU;AAAA,MACR,iBAAiB;AAAA,MACjB,cAAc;AAAA,MACd,eAAe;AAAA,IACjB;AAAA,EACF;AACA,MAAI;AACJ,MAAI,CAAC,oBAAI,OAAO,GAAG;AACjB,YAAQ,IAAI,YAAAC,QAAU,UAAU,WAAW;AAAA,EAC7C,OAAO;AACL,YAAQ,IAAI,sBAAc,UAAU,WAAW;AAAA,EACjD;AACA,eAAa,OAAO,UAAU,MAAM,eAAe;AACnD,SAAO,KAAK,KAAK;AACjB,MAAI,CAAC,mBAAmB,CAAC,oBAAI,OAAO,GAAG;AACrC,sBAAyB,IAAID,UAAS,iBAAiB;AAEvD,IAAAA,SAAQ,EAAE,MAAM,SAAO;AACrB,cAAQ,MAAM,qBAAqB,QAAQ,gBAAgB,GAAG,EAAE;AAAA,IAClE,CAAC;AAAA,EACH;AACA,SAAO;AACT;AAEA,eAAsBE,YAAW;AAC/B,MAAI,iBAAiB;AACnB,IAAO,MAAM,eAAe;AAAA,EAC9B;AACA,MAAI,OAAO,QAAQ;AACjB,aAAS,SAAS,QAAQ;AACxB,YAAM,MAAM,MAAM;AAAA,IACpB;AACA,aAAS,CAAC;AAAA,EACZ;AACA,UAAQ,IAAI,iBAAiB;AAC/B;;;AFpDAC;AAEA,IAAqB,qBAArB,MAAqB,oBAA6C;AAAA,EAChE;AAAA,SAAO,mBAAmB;AAAA;AAAA;AAAA,EAI1B,OAAO,KAAK,IAAgB;AAC1B,wBAAmB,mBAAmB;AACtC,UAAM,iBAAiB;AACvB,wBAAmB,gBAAgB;AAAA;AAAA,IAEnC;AACA,WAAO,oBAAmB,cAAc,QAAQ,OAAM,QAAO;AAC3D,aAAO,WAAW,IAAI,KAAK,UAAU,YAAY;AAC/C,YAAI,aAAa,IAAI,KAAK;AAC1B,YAAI,WAAW,SAAS;AACtB,uBAAa;AAAA,YACX,GAAG;AAAA,YACH,GAAG,WAAW;AAAA,UAChB;AACA,iBAAO,WAAW;AAAA,QACpB;AAKA,YAAI,WAAiC,CAAC;AACtC,YAAI,oBAAI,0BAA0B;AAChC,qBAAW,IAAI,KAAK,KAAK;AAAA,QAC3B;AAEA,cAAM,eAAe,IAAI,KAAK,OAAO,YAAY;AAAA,UAC/C,QAAQ,IAAI,KAAK,KAAK;AAAA,UACtB,WAAW,IAAI,KAAK,KAAK;AAAA,UACzB,OAAO,IAAI,KAAK,KAAK;AAAA,UACrB;AAAA,QACF,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,aACJ,OACA,UACA,YACA,WACe;AACf,QAAI,oBAAmB,oBAAoB,UAAU,KAAK,GAAG;AAE3D,YAAM,SACJ,SAAS,6BAA6B,SAAS,KAAK;AAEtD,YAAM,oBAAmB,cAAc,IAAI;AAAA,QACzC;AAAA,QACA;AAAA,QACA,MAAM;AAAA,UACJ;AAAA,UACA;AAAA,UACA,OAAO,SAAS;AAAA,UAChB,UAAU,SAAS;AAAA,QACrB;AAAA,QACA,UAAU,YAAY;AAAA,MACxB,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,UAAoB,WAA6B;AAAA,EAEhE;AAAA,EAEA,MAAM,cAAc,OAAc,WAA6B;AAAA,EAE/D;AAAA,EAEA,WAAiB;AACf,wBAAmB,eAAe,MAAM;AAAA,EAC1C;AACF;;;AMzFA,IAAqB,YAArB,MAAyD;AAAA,EAIvD,YAAYC,aAA8B;AAH1C,uBAAuB;AACvB,sBAA+B,CAAC;AAG9B,SAAK,aAAaA;AAAA,EACpB;AAAA,EAEA,MAAM,aACJ,OACA,UACA,YACA,WACe;AACf,eAAW,kBAAkB,KAAK,YAAY;AAC5C,YAAM,eAAe,aAAa,OAAO,UAAU,YAAY,SAAS;AAAA,IAC1E;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,UACA,WACe;AACf,eAAW,kBAAkB,KAAK,YAAY;AAC5C,UAAI,eAAe,UAAU;AAC3B,cAAM,eAAe,SAAS,UAAU,SAAS;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,cACJ,UACA,WACe;AACf,eAAW,kBAAkB,KAAK,YAAY;AAC5C,UAAI,eAAe,eAAe;AAChC,cAAM,eAAe,cAAc,UAAU,SAAS;AAAA,MACxD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,WAAW;AACT,eAAW,kBAAkB,KAAK,YAAY;AAC5C,UAAI,eAAe,UAAU;AAC3B,uBAAe,SAAS;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AACF;;;Ab7CO,IAAM,qBAAqB,IAAI,mBAAmB;AACzD,IAAM,mBAAmB,IAAI,iBAAiB;AAC9C,IAAM,qBAAqB,IAAI,mBAAmB;AAE3C,SAASC,MAAK,YAAwB;AAC3C,SAAO,mBAAmB,KAAK,UAAU;AAC3C;AAEO,IAAM,aAAa,IAAI,UAAW;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AACF,CAAC;;;AclBDC;AACA;AACAC;AACA;;;ACHA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEAC;AACA;AACAC;AACA,oBAAmB;AAEnBC;AAEO,IAAM,aAAa,YAAmC;AAC3D,SAAO,kEAA8C,kBAAkB;AAAA,IACrE,YAAY;AAAA,EACd,CAAC;AACH;AACA,eAAe,iBAAiB,YAAsB;AACpD,QAAM,UAAwB;AAAA,IAC5B,KAAK,gBAAgB,cAAc,KAAK;AAAA,IACxC,WAAW,MAAM;AAAA,IACjB,SAAS,oBAAY;AAAA,EACvB;AACA,MAAI;AACF,UAAM,OAAO,MAAM,WAAW,IAAI,OAAO;AACzC,YAAQ,OAAO,KAAK;AACpB,WAAO;AAAA,EACT,SAAS,KAAU;AACjB,QAAI,IAAI,WAAW,KAAK;AACtB,aAAO,iBAAiB;AAAA,IAC1B,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEO,IAAM,mBAAmB,YAAmC;AACjE,SAAO;AAAA,IACL,gBAAgB,cAAc;AAAA,IAC9B,OAAO,eAAoB;AACzB,UAAI;AACJ,UAAI;AACF,kBAAU,MAAM,WAAW;AAAA,UACzB,gBAAgB,cAAc,KAAK;AAAA,QACrC;AAAA,MACF,SAAS,GAAQ;AACf,YAAI,EAAE,WAAW,KAAK;AACpB,oBAAU,MAAM,iBAAiB,UAAU;AAAA,QAC7C,OAAO;AACL,gBAAM;AAAA,QACR;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAEA,IAAM,gBAAgB,OAAO,YAAsC;AACjE,MAAI;AACF,UAAM;AAAA,MACJ,gBAAgB,cAAc;AAAA,MAC9B,OAAO,eAAoB;AACzB,cAAM,UAAU,MAAM,WAAW;AACjC,gBAAQ,UAAU;AAClB,cAAM,WAAW,IAAI,OAAO;AAC5B,cAAM,2CAA+B;AAAA,MACvC;AAAA,IACF;AAAA,EACF,SAAS,GAAQ;AACf,QAAI,EAAE,WAAW,KAAK;AAGpB,aAAO;AAAA,IACT;AACA,UAAM;AAAA,EACR;AACA,SAAO;AACT;AAEO,IAAM,sBAAsB,YAA2B;AAC5D,QAAM,UAAU,MAAM,WAAW;AAEjC,QAAM,iBAAiB,QAAQ;AAC/B,QAAM,aAAa,oBAAY;AAE/B,MAAI,mBAAmB,YAAY;AACjC,UAAM,YAAY,cAAAC,QAAO,GAAG,YAAY,cAAc;AACtD,UAAM,cAAc,cAAAA,QAAO,GAAG,YAAY,cAAc;AAExD,UAAM,UAAU,MAAM,cAAc,UAAU;AAE9C,QAAI,SAAS;AACX,YAAc;AAAA,QACZ;AAAA,UACE,KAAK,QAAQ;AAAA,UACb;AAAA,QACF;AAAA,QACA,YAAY;AACV,cAAI,WAAW;AACb,kBAAa,qBAAa,SAAS,gBAAgB,UAAU;AAAA,UAC/D,WAAW,aAAa;AACtB,kBAAa,qBAAa,WAAW,gBAAgB,UAAU;AAAA,UACjE;AAAA,QACF;AAAA,MACF;AACA,YAAa,uBAAe,0BAA0B,QAAQ,SAAS;AAAA,IACzE;AAAA,EACF;AACF;;;ADxEA,IAAM,qBAAqB,YAA+B;AACxD,MAAI,kBAA8BC,aAAY;AAC9C,QAAMC,eAAc,yBAAyB;AAE7C,MAAI;AAEJ,MAAI,CAAC,iBAAiB;AACpB;AAAA,EACF,OAAO;AACL,mBAAe,gBAAgB;AAAA,EACjC;AAEA,MAAI,oDAA4C;AAC9C,UAAM,iBAAiB,MAAM,kBAAkB;AAC/C,UAAM,UAAU,kBAAkB;AAClC,WAAO;AAAA,MACL,IAAI,iBAAiB,gBAAgB,YAAY;AAAA,MACjD;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA,aAAAA;AAAA,IACF;AAAA,EACF,WAAW,wCAAsC;AAC/C,UAAM,iBAAiB,MAAM,kBAAkB;AAC/C,UAAM,WAAW,MAAM,iBAAyB,YAAY,CAAC;AAC7D,UAAM,UAAU,kBAAkB;AAElC,WAAO;AAAA,MACL,IAAI,iBAAiB,UAAU,YAAY;AAAA,MAC3C,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,cAAsB,YAAY;AAAA,MAClC,aAAAA;AAAA,IACF;AAAA,EACF,WAAW,oCAAoC;AAC7C,UAAM,cAAc;AACpB,UAAM,WAAW,MAAM,iBAAyB,YAAY,CAAC;AAC7D,UAAM,iBAAiB,MAAM,kBAAkB;AAE/C,UAAM,UAAU,YAAY;AAC5B,QAAI;AACJ,QAAI,SAAS;AACX,gBAAU,QAAQ;AAAA,IACpB,OAAO;AACL,gBAAU,kBAAkB;AAAA,IAC9B;AAEA,WAAO;AAAA,MACL,IAAI,YAAY;AAAA,MAChB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAAA;AAAA,MACA,cAAsB,YAAY;AAAA,MAClC,UAAU,YAAY;AAAA,IACxB;AAAA,EACF,OAAO;AACL,UAAM,IAAI,MAAM,uBAAuB;AAAA,EACzC;AACF;AAEA,IAAM,4BAA4B,OAChC,WACA,cACkB;AAClB,QAAM,KAAK;AACX,QAAM;AACN,QAAM,UAAU,kBAAkB;AAClC,QAAM,UAAU,oBAAI;AACpB,QAAMA,eAAc,yBAAyB;AAE7C,QAAM,QAA2B;AAAA,IAC/B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,aAAAA;AAAA,EACF;AAEA,QAAM,cAAc,OAAO,SAAS;AAGpC,QAAM,SAAS,EAAE,GAAG,OAAO,IAAI,IAAI,IAAI,IAAI,EAAE,GAAG,GAAG,SAAS;AAC9D;AAEA,IAAM,sBAAsB,OAC1B,UACA,SACA,cACkB;AAClB,QAAM,KAAK,MAAM,iBAAiB,QAAQ;AAC1C,QAAM;AACN,QAAM,iBAAiB,MAAM,kBAAkB;AAC/C,QAAMA,eAAc,yBAAyB;AAE7C,MAAI;AACJ,MAAI;AACJ,MAAI;AAEJ,MAAI,SAAS;AACX,iBAAa,QAAQ;AACrB,kBAAc,QAAQ;AACtB,cAAU,QAAQ;AAAA,EACpB,OAAO;AACL,cAAU,kBAAkB;AAAA,EAC9B;AAEA,QAAM,QAAqB;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,IACA,aAAAA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,QAAM,cAAc,OAAO,SAAS;AAGpC,QAAM,SAAS,EAAE,GAAG,OAAO,IAAI,IAAI,IAAI,IAAI,EAAE,GAAG,GAAG,SAAS;AAC9D;AAEA,IAAM,eAAe,OACnB,MACA,SACA,cACG;AACH,QAAM,KAAK,KAAK;AAChB,QAAM,WAAW,MAAM,iBAAiB,KAAK,QAAQ;AACrD,QAAM;AACN,MAAI,UAAgBC,uBAAsB,IAAI;AAC9C,MAAI,QAAcC,qBAAoB,IAAI;AAC1C,MAAI;AACJ,MAAI,UAAU,IAAI,GAAG;AACnB,mBAAe,KAAK;AAAA,EACtB;AACA,QAAM,gBAAgB,SAAS,mBAAmB,KAAK,OAAO;AAC9D,QAAMC,YACJ,WAAW,SAAS,mBAAmB,KAAK,MAAM,QAAQ,WAAW;AACvE,QAAM,iBAAiB,MAAM,kBAAkB;AAC/C,QAAM,UAAU,UAAU,QAAQ,UAAU,kBAAkB;AAC9D,QAAMH,eAAc,yBAAyB;AAE7C,QAAM,WAAyB;AAAA,IAC7B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAAG;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,aAAAH;AAAA,EACF;AAEA,QAAM,SAAS,UAAU,SAAS;AACpC;AAEA,IAAM,kBAAkB,OAAO,YAAqB;AAClD,MAAI,KAAK,QAAQ;AACjB,QAAM,WAAW,QAAQ;AACzB,MAAI;AACJ,MAAI,eAAe,aAAa,OAAO,IAAI,QAAQ,eAAe;AAClE,QAAMG,YAAW,QAAQ;AACzB,QAAM,gBAAgB;AACtB,QAAM,UAAU,QAAQ;AACxB,QAAM,iBAAiB,MAAM,kBAAkB;AAC/C,QAAMH,eAAc,yBAAyB;AAE7C,MAAI,eAAe,OAAO,GAAG;AAC3B,QAAI,QAAQ,gBAAgB;AAE1B,WAAK,QAAQ;AAAA,IACf;AAAA,EACF;AAEA,QAAM,WAAyB;AAAA,IAC7B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAAG;AAAA,IACA;AAAA,IACA,aAAAH;AAAA,EACF;AAEA,QAAM,SAAS,QAAQ;AACzB;AAEA,IAAM,WAAW,OAAO,UAAoB,cAAgC;AAC1E,QAAM,WAAW,SAAS,UAAU,SAAS;AAC/C;AAEA,IAAM,gBAAgB,OAAO,OAAc,cAAgC;AACzE,QAAM,WAAW,cAAc,OAAO,SAAS;AACjD;AAEA,IAAM,2BAA2B,MAAM;AACrC,MAAI,oBAAI,MAAM,GAAG;AACf,WAAO;AAAA,EACT,OAAO;AACL,WAAO,oBAAI;AAAA,EACb;AACF;AAEA,IAAM,oBAAoB,MAAM;AAC9B,SAAO,oBAAI;AACb;AAEA,IAAM,oBAAoB,YAAY;AACpC,MAAI,gBAAgB,GAAG;AACrB,WAAO;AAAA,EACT;AACA,QAAM,UAAU,MAAmB,WAAW;AAC9C,SAAO,QAAQ;AACjB;AAEA,IAAM,mBAAmB,OAAO,aAAsC;AACpE,MAAI,oBAAI,aAAa;AACnB,WAAO,kBAAkB,QAAQ;AAAA,EACnC,OAAO;AAEL,WAAO;AAAA,EACT;AACF;AAEO,IAAM,oBAAoB,OAAO,aAAsC;AAE5E,SAAe,WAAW,UAAU,MAAM;AACxC,WAAO,wEAAkD,YAAY;AACnE,YAAM,KAAa,YAAY;AAC/B,YAAM,SAAS,MAAc,qBAAqB;AAElD,UAAI;AACJ,UAAI,OAAO,OAAO,gBAAgB;AAChC,eAAO,OAAO,OAAO;AAAA,MACvB,OAAO;AACL,yBAAiB,GAAG,MAAM,CAAC,IAAI,QAAQ;AACvC,eAAO,OAAO,iBAAiB;AAC/B,cAAM,GAAG,IAAI,MAAM;AACnB,eAAO;AAAA,MACT;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACH;AAEA,IAAM,kBAAkB,MAAM;AAC5B,SAAO,oBAAI,YAAY;AACzB;AAEA,IAAM,mBAAmB,CAAC,IAAY,SAAuB;AAC3D,MAAI,8CAAsC,gCAA8B;AACtE,WAAO,IAAI,IAAI,IAAI,EAAE;AAAA,EACvB,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEA,IAAO,yBAAQ;AAAA,EACb;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;AErTA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAAI;AAAA,EAAA;AAAA;AAAA;AAsBAC;AAMO,IAAM,QAAQ,OAAOC,YAAoB;AAC9C,QAAM,WAA6B;AAAA,IACjC,gBAAgBA;AAAA,EAClB;AACA,SAAO,qBAAqB,QAAQ;AACtC;AAEO,IAAMC,eAAc,OAAO,OAAc,eAAoB;AAClE,QAAMC,YAAW,YAAY,OAAO,UAAU;AAG9C,QAAY,MAAMA,WAAU,YAAY,QAAW,EAAE,YAAY,MAAM,CAAC;AAC1E;AAEO,IAAM,MAAM,YAAY;AAC7B,QAAM,uBAAuB;AAC7B,QAAM,YAAY;AACpB;AAIA,IAAM,sBAAsB,YAA8C;AACxE,SAAaC,+CAA8B;AAC7C;AAEA,IAAM,uBAAuB,OAC3B,aACkB;AAElB,SAAa,kDAAkC,QAAQ;AACzD;AAEA,IAAM,yBAAyB,YAA2B;AACxD,QAAY,kDAAkC;AAChD;AAEA,IAAM,cAAc,YAAY;AAE9B,QAAM,UAAU,YAAY;AAC5B,QAAMC,QAAO,MAAY,KAAK,OAAO;AAErC,aAAW,OAAOA,OAAM;AAGtB,UAAY,QAAQ,KAAK,EAAE,YAAY,MAAM,CAAC;AAAA,EAChD;AACF;AAIO,IAAM,qBAAqB,OAAO,UAAiB;AACxD,QAAM,WAAW,MAAM,oBAAoB;AAC3C,QAAMJ,UAAS,UAAU;AACzB,MAAIA,WAAUA,QAAO,SAAS,KAAK,GAAG;AACpC,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEO,IAAM,gBAAgB,OAAO,OAAc,eAAoB;AACpE,QAAME,YAAW,YAAY,OAAO,UAAU;AAC9C,QAAM,cAA2B,MAAYC,KAAID,WAAU;AAAA,IACzD,YAAY;AAAA,EACd,CAAC;AACD,SAAO,CAAC,CAAC;AACX;AAEA,IAAM,yBAA8B;AAAA;AAAA,EAElC,8CAAyB,GAAG,CAAC,eAAuC;AAClE,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,wDAA8B,GAAG,CAAC,eAA2C;AAC3E,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,8CAAyB,GAAG,CAAC,eAAuC;AAClE,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,sCAAqB,GAAG,CAAC,eAAmC;AAC1D,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,oCAAoB,GAAG,CAAC,eAAkC;AACxD,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,kCAAmB,GAAG,CAAC,eAAiC;AACtD,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,sCAAqB,GAAG,CAAC,eAAmC;AAC1D,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,oCAAoB,GAAG,CAAC,eAAkC;AACxD,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,kCAAmB,GAAG,CAAC,eAAiC;AACtD,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,0DAA+B,GAAG,CAChC,eACG;AACH,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,gDAA0B,GAAG,CAAC,eAAuC;AACnE,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,gCAAkB,GAAG,CAAC,eAAgC;AACpD,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,oCAAoB,GAAG,CAAC,eAAkC;AACxD,WAAO,WAAW;AAAA,EACpB;AAAA;AAAA,EAEA,0CAAuB,GAAG,CAAC,eAAgC;AACzD,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,8CAAyB,GAAG,CAAC,eAAgC;AAC3D,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,kCAAmB,GAAG,CAAC,eAAiC;AACtD,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,2DAAqC,GAAG,CACtC,eACG;AACH,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,+DAAuC,GAAG,CACxC,eACG;AACH,WAAO,WAAW;AAAA,EACpB;AAAA,EACA,oCAAoB,GAAG,CAAC,eAAkC;AACxD,WAAO,GAAG,WAAW,MAAM,IAAI,WAAW,MAAM;AAAA,EAClD;AACF;AAEA,IAAM,cAAc,CAAC,OAAe,eAAqB;AACvD,MAAIA;AAEJ,QAAM,WAAmB,YAAY;AACrC,MAAI,OAAO;AACT,IAAAA,YAAW,wBAAkB,IAAI,QAAQ,IAAI,KAAK;AAGlD,UAAM,SAAS,uBAAuB,KAAK;AAC3C,UAAM,SAAS,SAAS,OAAO,UAAU,IAAI;AAC7C,QAAI,QAAQ;AACV,MAAAA,YAAW,GAAGA,SAAQ,IAAI,MAAM;AAAA,IAClC;AAAA,EACF,OAAO;AACL,IAAAA,YAAW,wBAAkB,IAAI,QAAQ;AAAA,EAC3C;AAEA,SAAOA;AACT;;;AC3KO,IAAI;AAEJ,SAASG,QAAO;AACrB,oBAAkB,uDAAqD;AACzE;AAEA,eAAsBC,YAAW;AAC/B,MAAI,iBAAiB;AACnB,UAAM,gBAAgB,MAAM;AAAA,EAC9B;AACF;;;ACrBA;AAGA,eAAsB,kBAAkB,SAAuB;AAC7D,MAAI,CAAC,iBAAiB;AACpB,IAAAC,MAAK;AAAA,EACP;AACA,QAAM,EAAE,OAAO,SAAS,IAAI;AAC5B,MAAI,YAAY,QAAQ,KAAK,MAAM,MAAM,SAAS,UAAU;AAC1D,UAAM,gBAAgB,IAAI,OAAO;AAAA,EACnC;AACF;;;ACLO,IAAM,eAAe,OAC1B,OACA,YACA,cACG;AAEH,QAAM,WAAW,MAAM,uBAAe,mBAAmB;AAEzD,QAAM,cAAc,MAAe,mBAAmB,KAAK;AAE3D,MAAI,CAAC,aAAa;AAEhB,UAAM,kBAAkB;AAAA,MACtB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,UAAM,WAAW,aAAa,OAAO,UAAU,YAAY,SAAS;AACpE;AAAA,EACF;AAGA,QAAM,cAAc,MAAe,cAAc,OAAO,UAAU;AAClE,MAAI,aAAa;AAEf;AAAA,EACF,OAAO;AAEL,UAAM,WAAW,aAAa,OAAO,UAAU,YAAY,SAAS;AACpE,UAAeC,aAAY,OAAO,UAAU;AAAA,EAC9C;AACF;;;ACtCA;AAQA,eAAe,QAAQ,SAAkB;AACvC,QAAM,aAAkC;AAAA,IACtC,UAAU,QAAQ;AAAA,EACpB;AACA,QAAM,sDAAoC,UAAU;AACtD;AAEA,eAAe,QAAQ,SAAkB;AACvC,QAAM,aAAkC;AAAA,IACtC,UAAU,QAAQ;AAAA,EACpB;AACA,QAAM,sDAAoC,UAAU;AACtD;AAEA,eAAe,SAAS,SAAkB;AACxC,QAAM,aAAmC;AAAA,IACvC,UAAU,QAAQ;AAAA,EACpB;AACA,QAAM,wDAAqC,UAAU;AACvD;AAEA,IAAO,kBAAQ;AAAA,EACb;AAAA,EACA;AAAA,EACA;AACF;;;ACjCA;AAgBA,IAAMC,WAAU,OAAO,KAAU,cAAgC;AAC/D,QAAM,aAA8B;AAAA,IAClC,OAAO,IAAI;AAAA,IACX,SAAS,IAAI;AAAA,IACb,SAAS;AAAA,MACP,MAAM,IAAI;AAAA,IACZ;AAAA,EACF;AACA,QAAM,8CAAgC,YAAY,SAAS;AAC7D;AAEA,eAAe,QAAQ,KAAU;AAC/B,QAAM,aAA8B;AAAA,IAClC,OAAO,IAAI;AAAA,IACX,SAAS,IAAI;AAAA,IACb,SAAS;AAAA,MACP,MAAM,IAAI;AAAA,IACZ;AAAA,EACF;AACA,QAAM,8CAAgC,UAAU;AAClD;AAEA,eAAeC,SAAQ,KAAU;AAC/B,QAAM,aAA8B;AAAA,IAClC,OAAO,IAAI;AAAA,IACX,SAAS;AAAA,MACP,MAAM,IAAI;AAAA,IACZ;AAAA,EACF;AACA,QAAM,8CAAgC,UAAU;AAClD;AAEA,eAAe,UAAU,KAAU,WAA6B;AAC9D,QAAM,aAAgC;AAAA,IACpC,OAAO,IAAI;AAAA,IACX,SAAS;AAAA,MACP,MAAM,IAAI;AAAA,IACZ;AAAA,EACF;AACA,QAAM,kDAAkC,YAAY,SAAS;AAC/D;AAEA,eAAe,YAAY,KAAU;AACnC,QAAM,aAAkC;AAAA,IACtC,OAAO,IAAI;AAAA,IACX,SAAS;AAAA,MACP,MAAM,IAAI;AAAA,IACZ;AAAA,EACF;AACA,QAAM,sDAAoC,UAAU;AACtD;AAEA,eAAe,aAAa,KAAU;AACpC,QAAM,aAAmC;AAAA,IACvC,OAAO,IAAI;AAAA,IACX,SAAS;AAAA,MACP,MAAM,IAAI;AAAA,IACZ;AAAA,EACF;AACA,QAAM,0DAAsC,UAAU;AACxD;AAEA,eAAe,iBAAiB,KAAU,aAAqB;AAC7D,QAAM,aAAuC;AAAA,IAC3C,OAAO,IAAI;AAAA,IACX;AAAA,IACA,SAAS;AAAA,MACP,MAAM,IAAI;AAAA,IACZ;AAAA,EACF;AACA,QAAM,kEAA0C,UAAU;AAC5D;AAEA,eAAe,eACb,KACA,gBACA,kBACA;AACA,QAAM,aAAqC;AAAA,IACzC,OAAO,IAAI;AAAA,IACX;AAAA,IACA;AAAA,IACA,SAAS;AAAA,MACP,MAAM,IAAI;AAAA,IACZ;AAAA,EACF;AACA,QAAM,8DAAwC,UAAU;AAC1D;AAEA,eAAe,gBACb,KACA,gBACA,mBACA;AACA,QAAM,aAAsC;AAAA,IAC1C,OAAO,IAAI;AAAA,IACX;AAAA,IACA;AAAA,IACA,SAAS;AAAA,MACP,MAAM,IAAI;AAAA,IACZ;AAAA,EACF;AACA,QAAM,gEAAyC,UAAU;AAC3D;AAEA,eAAe,SAAS,KAAU;AAChC,QAAM,aAA+B;AAAA,IACnC,OAAO,IAAI;AAAA,IACX,SAAS;AAAA,MACP,MAAM,IAAI;AAAA,IACZ;AAAA,EACF;AACA,QAAM,gDAAiC,UAAU;AACnD;AAEA,eAAe,SAAS,KAAU;AAChC,QAAM,aAA+B;AAAA,IACnC,OAAO,IAAI;AAAA,IACX,SAAS;AAAA,MACP,MAAM,IAAI;AAAA,IACZ;AAAA,EACF;AACA,QAAM,gDAAiC,UAAU;AACnD;AAEA,IAAO,cAAQ;AAAA,EACb,SAAAD;AAAA,EACA;AAAA,EACA,SAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACzJA;AAaA,eAAe,MAAM,QAAqB,OAAe;AACvD,QAAM,WAAW,MAAM,uBAAe,mBAAmB;AACzD,QAAM,aAAyB;AAAA,IAC7B,QAAQ,SAAS;AAAA,IACjB;AAAA,IACA,SAAS;AAAA,MACP;AAAA,IACF;AAAA,EACF;AACA,QAAM,4CAA+B,UAAU;AACjD;AAEA,eAAe,OAAO,OAAgB;AACpC,QAAM,WAAW,MAAM,uBAAe,mBAAmB;AACzD,QAAM,aAA0B;AAAA,IAC9B,QAAQ,SAAS;AAAA,IACjB,SAAS;AAAA,MACP;AAAA,IACF;AAAA,EACF;AACA,QAAM,8CAAgC,UAAU;AAClD;AAEA,eAAe,WAAW,MAAe,WAA6B;AACpE,QAAM,aAA8B;AAAA,IAClC;AAAA,EACF;AACA,QAAM,wDAAqC,YAAY,SAAS;AAClE;AAEA,eAAe,WAAW,MAAe;AACvC,QAAM,aAA8B;AAAA,IAClC;AAAA,EACF;AACA,QAAM,wDAAqC,UAAU;AACvD;AAEA,eAAe,aAAa,MAAe,WAA6B;AACtE,QAAM,aAAgC;AAAA,IACpC;AAAA,EACF;AACA,QAAM,4DAAuC,YAAY,SAAS;AACpE;AAEA,eAAe,eAAe,MAAe;AAC3C,QAAM,aAAkC;AAAA,IACtC;AAAA,EACF;AACA,QAAM,gEAAyC,UAAU;AAC3D;AAEA,IAAO,eAAQ;AAAA,EACb;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACvEA;AAaA,eAAeC,SAAQ,YAAwB,WAA6B;AAC1E,QAAM,aAAqC;AAAA,IACzC,OAAO,WAAW;AAAA,IAClB,cAAc,WAAW;AAAA,IACzB,WAAW,WAAW,YAAY,SAAS;AAAA,IAC3C,aAAa,WAAW,YAAY,SAAS;AAAA,IAC7C,SAAS;AAAA,MACP,MAAM,WAAW;AAAA,IACnB;AAAA,EACF;AACA,QAAM,4DAAuC,YAAY,SAAS;AACpE;AAEA,eAAe,eAAe,YAAwB;AACpD,QAAM,aAA4C;AAAA,IAChD,OAAO,WAAW;AAAA,IAClB,cAAc,WAAW;AAAA,IACzB,WAAW,WAAW,YAAY,SAAS;AAAA,IAC3C,aAAa,WAAW,YAAY,SAAS;AAAA,EAC/C;AACA,QAAM,4EAA+C,UAAU;AACjE;AAEA,eAAeC,SAAQ,YAAwB;AAC7C,QAAM,aAAqC;AAAA,IACzC,OAAO,WAAW;AAAA,IAClB,cAAc,WAAW;AAAA,IACzB,WAAW,WAAW,YAAY,SAAS;AAAA,IAC3C,aAAa,WAAW,YAAY,SAAS;AAAA,IAC7C,SAAS;AAAA,MACP,MAAM,WAAW;AAAA,IACnB;AAAA,EACF;AACA,QAAM,4DAAuC,UAAU;AACzD;AAEA,eAAe,OAAO,YAAwB;AAC5C,QAAM,aAAoC;AAAA,IACxC,OAAO,WAAW;AAAA,IAClB,cAAc,WAAW;AAAA,IACzB,WAAW,WAAW,YAAY,SAAS;AAAA,IAC3C,aAAa,WAAW,YAAY,SAAS;AAAA,EAC/C;AACA,QAAM,0DAAsC,UAAU;AACxD;AAEA,IAAM,MAAM,OAAO,OAAe,cAAgC;AAChE,QAAM,aAAkC;AAAA,IACtC;AAAA,EACF;AACA,QAAM,sDAAoC,YAAY,SAAS;AACjE;AAEA,eAAe,YACb,YACA,MACA,WACA;AACA,QAAM,aAAyC;AAAA,IAC7C,OAAO,WAAW;AAAA,IAClB,cAAc,WAAW;AAAA,IACzB,WAAW,WAAW,YAAY,SAAS;AAAA,IAC3C,aAAa,WAAW,YAAY,SAAS;AAAA,IAC7C,QAAQ,KAAK;AAAA,IACb,UAAU,KAAK;AAAA,IACf,SAAS;AAAA,MACP,MAAM,WAAW;AAAA,IACnB;AAAA,EACF;AACA,QAAM,sEAA4C,YAAY,SAAS;AACzE;AAEA,eAAe,YAAY,YAAwB,MAAsB;AACvE,QAAM,aAAyC;AAAA,IAC7C,OAAO,WAAW;AAAA,IAClB,cAAc,WAAW;AAAA,IACzB,WAAW,WAAW,YAAY,SAAS;AAAA,IAC3C,aAAa,WAAW,YAAY,SAAS;AAAA,IAC7C,QAAQ,KAAK;AAAA,IACb,UAAU,KAAK;AAAA,IACf,SAAS;AAAA,MACP,MAAM,WAAW;AAAA,IACnB;AAAA,EACF;AACA,QAAM,sEAA4C,UAAU;AAC9D;AAEA,IAAO,qBAAQ;AAAA,EACb,SAAAD;AAAA,EACA;AAAA,EACA,SAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;AC5GA;AASA,SAAS,SAASC,aAAwB;AACxC,QAAM,UAAU,OAAO,OAAO,UAAU;AAExC,SAAO,CAAC,QAAQ,SAASA,YAAW,MAAM;AAC5C;AAEA,eAAeC,SAAQD,aAAwB,WAA6B;AAC1E,QAAM,aAAqC;AAAA,IACzC,cAAcA,YAAW;AAAA,IACzB,QAAQA,YAAW;AAAA,IACnB,QAAQ,SAASA,WAAU;AAAA,EAC7B;AACA,QAAM,4DAAuC,YAAY,SAAS;AACpE;AAEA,eAAeE,SAAQF,aAAwB;AAC7C,QAAM,aAAqC;AAAA,IACzC,cAAcA,YAAW;AAAA,IACzB,QAAQA,YAAW;AAAA,IACnB,QAAQ,SAASA,WAAU;AAAA,EAC7B;AACA,QAAM,4DAAuC,UAAU;AACzD;AAEA,eAAeG,SAAQH,aAAwB;AAC7C,QAAM,aAAqC;AAAA,IACzC,cAAcA,YAAW;AAAA,IACzB,QAAQA,YAAW;AAAA,IACnB,QAAQ,SAASA,WAAU;AAAA,EAC7B;AACA,QAAM,4DAAuC,UAAU;AACzD;AAEA,IAAO,qBAAQ;AAAA,EACb,SAAAC;AAAA,EACA,SAAAC;AAAA,EACA,SAAAC;AACF;;;AC9CA;AAEA,eAAe,YAAY,WAA6B;AACtD,QAAM,aAA+B,CAAC;AACtC,QAAM,4DAAuC,YAAY,SAAS;AACpE;AAEA,eAAe,cAAc;AAC3B,QAAM,aAA+B,CAAC;AACtC,QAAM,4DAAuC,UAAU;AACzD;AAEA,IAAO,gBAAQ;AAAA,EACb;AAAA,EACA;AACF;;;ACfA;AAcA,eAAe,YACb,SACA,MAQA;AACA,QAAM,aAAsC;AAAA,IAC1C,WAAW,QAAQ;AAAA,IACnB,GAAG;AAAA,EACL;AACA,QAAM,gEAAyC,UAAU;AAC3D;AAEA,eAAe,UAAU,SAAkB;AACzC,QAAM,aAAoC;AAAA,IACxC,WAAW,QAAQ;AAAA,EACrB;AACA,QAAM,0DAAsC,UAAU;AACxD;AAEA,eAAe,eAAe,SAAkB;AAC9C,QAAM,aAAyC;AAAA,IAC7C,WAAW,QAAQ;AAAA,EACrB;AACA,QAAM,sEAA4C,UAAU;AAC9D;AAEA,eAAe,gBAAgB,SAAkB;AAC/C,QAAM,aAA0C;AAAA,IAC9C,WAAW,QAAQ;AAAA,EACrB;AACA,QAAM,wEAA6C,UAAU;AAC/D;AAEA,eAAe,aAAa,SAAkB;AAC5C,QAAM,aAAuC;AAAA,IAC3C,WAAW,QAAQ;AAAA,EACrB;AACA,QAAM,kEAA0C,UAAU;AAC5D;AAEA,eAAe,cAAc,SAAkB;AAC7C,QAAM,aAAwC;AAAA,IAC5C,WAAW,QAAQ;AAAA,EACrB;AACA,QAAM,oEAA2C,UAAU;AAC7D;AAEA,eAAe,iBAAiB,SAAkB;AAChD,QAAM,aAA2C;AAAA,IAC/C,WAAW,QAAQ;AAAA,EACrB;AACA,QAAM,0EAA8C,UAAU;AAChE;AAEA,IAAO,kBAAQ;AAAA,EACb;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;AClFA;AAOA,eAAeC,SAAQ,QAAgB,WAA6B;AAClE,QAAM,aAAiC;AAAA,IACrC,UAAU,OAAO;AAAA,EACnB;AACA,QAAM,oDAAmC,YAAY,SAAS;AAChE;AAEA,eAAeC,SAAQ,UAAkB;AACvC,QAAM,aAAiC;AAAA,IACrC;AAAA,EACF;AACA,QAAM,oDAAmC,UAAU;AACrD;AAEA,IAAO,iBAAQ;AAAA,EACb,SAAAD;AAAA,EACA,SAAAC;AACF;;;ACxBA;AAEA,eAAe,YAAY,WAA6B;AACtD,QAAM,aAAa,CAAC;AACpB,QAAM,6DAAqC,YAAY,SAAS;AAClE;AAEA,eAAe,YAAY,WAA6B;AACtD,QAAM,aAAa,CAAC;AACpB,QAAM,6DAAqC,YAAY,SAAS;AAClE;AAEA,eAAe,mBAAmB,WAA6B;AAC7D,QAAM,aAAa,CAAC;AACpB,QAAM,uEAA6C,YAAY,SAAS;AAC1E;AAIA,eAAe,kBAAkB;AAC/B,QAAM,aAAa,CAAC;AACpB,QAAM,0DAAsC,UAAU;AACxD;AAEA,eAAe,iBAAiB;AAC9B,QAAM,aAAa,CAAC;AACpB,QAAM,0DAAsC,UAAU;AACxD;AAEA,IAAO,cAAQ;AAAA,EACb;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACnCA;AAcA,IAAMC,WAAU,OACdC,aACA,OACA,cACG;AACH,QAAM,aAAgC;AAAA,IACpC,SAAS,MAAM;AAAA,IACf,cAAcA,YAAW;AAAA,IACzB,QAAQA,YAAW;AAAA,IACnB,WAAW,MAAM;AAAA,EACnB;AACA,QAAM,kDAAkC,YAAY,SAAS;AAC/D;AAEA,IAAMC,WAAU,OAAOD,aAAwB,UAAiB;AAC9D,QAAM,aAAgC;AAAA,IACpC,SAAS,MAAM;AAAA,IACf,cAAcA,YAAW;AAAA,IACzB,QAAQA,YAAW;AAAA,IACnB,WAAW,MAAM;AAAA,EACnB;AACA,QAAM,kDAAkC,UAAU;AACpD;AAEA,IAAME,WAAU,OAAOF,aAAwB,UAAiB;AAC9D,QAAM,aAAgC;AAAA,IACpC,SAAS,MAAM;AAAA,IACf,cAAcA,YAAW;AAAA,IACzB,QAAQA,YAAW;AAAA,IACnB,WAAW,MAAM;AAAA,EACnB;AACA,QAAM,kDAAkC,UAAU;AACpD;AAEA,IAAM,WAAW,OACfA,aACA,cACA,UACG;AACH,QAAM,aAAiC;AAAA,IACrC,cAAcA,YAAW;AAAA,IACzB,QAAQA,YAAW;AAAA,IACnB;AAAA,IACA;AAAA,EACF;AACA,QAAM,gDAAiC,UAAU;AACnD;AAEA,IAAMG,OAAM,OAAO,OAAe,cAAgC;AAChE,QAAM,aAA8B;AAAA,IAClC;AAAA,EACF;AACA,QAAM,8CAAgC,YAAY,SAAS;AAC7D;AAEA,IAAM,YAAY,OAAOH,aAAwB,UAAiB;AAChE,QAAM,aAAkC;AAAA,IACtC,SAAS,MAAM;AAAA,IACf,cAAcA,YAAW;AAAA,IACzB,QAAQA,YAAW;AAAA,IACnB,WAAW,MAAM;AAAA,EACnB;AACA,QAAM,sDAAoC,UAAU;AACtD;AAEA,IAAO,gBAAQ;AAAA,EACb,SAAAD;AAAA,EACA,SAAAE;AAAA,EACA,SAAAC;AAAA,EACA;AAAA,EACA,KAAAC;AAAA,EACA;AACF;;;ACtFA;AAWA,eAAeC,SAAQ,MAAY,WAA6B;AAC9D,QAAM,aAA+B;AAAA,IACnC,QAAQ,KAAK;AAAA,IACb,cAAc,KAAK;AAAA,IACnB,UAAU,KAAK;AAAA,EACjB;AACA,QAAM,gDAAiC,YAAY,SAAS;AAC9D;AAEA,eAAeC,SAAQ,MAAY;AACjC,QAAM,aAA+B;AAAA,IACnC,QAAQ,KAAK;AAAA,IACb,cAAc,KAAK;AAAA,IACnB,UAAU,KAAK;AAAA,EACjB;AACA,QAAM,gDAAiC,UAAU;AACnD;AAEA,eAAeC,SAAQ,MAAY;AACjC,QAAM,aAA+B;AAAA,IACnC,QAAQ,KAAK;AAAA,IACb,cAAc,KAAK;AAAA,IACnB,UAAU,KAAK;AAAA,EACjB;AACA,QAAM,gDAAiC,UAAU;AACnD;AAEA,eAAe,SAAS,MAAY,QAAgB,WAAoB;AACtE,QAAM,aAAgC;AAAA,IACpC,QAAQ,KAAK;AAAA,IACb;AAAA,EACF;AACA,QAAM,kDAAkC,YAAY,SAAS;AAC/D;AAEA,eAAe,WAAW,MAAY,QAAgB;AACpD,QAAM,aAAkC;AAAA,IACtC,QAAQ,KAAK;AAAA,IACb;AAAA,EACF;AACA,QAAM,sDAAoC,UAAU;AACtD;AAEA,IAAO,eAAQ;AAAA,EACb,SAAAF;AAAA,EACA,SAAAC;AAAA,EACA,SAAAC;AAAA,EACA;AAAA,EACA;AACF;;;AC5DA;AAOA,eAAeC,SAAQ,QAAgB,WAA6B;AAClE,QAAM,aAAiC;AAAA,IACrC,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,QAAQ,OAAO,QAAQ;AAAA,IACvB,SAAS;AAAA,MACP,MAAM,OAAO,SAAS;AAAA,IACxB;AAAA,EACF;AACA,QAAM,oDAAmC,YAAY,SAAS;AAChE;AAEA,eAAeC,SAAQ,QAAgB;AACrC,QAAM,aAAiC;AAAA,IACrC,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,QAAQ,OAAO,QAAQ;AAAA,IACvB,SAAS;AAAA,MACP,MAAM,OAAO,SAAS;AAAA,IACxB;AAAA,EACF;AACA,QAAM,oDAAmC,UAAU;AACrD;AAEA,IAAO,iBAAQ;AAAA,EACb,SAAAD;AAAA,EACA,SAAAC;AACF;;;AClCA;AASA,IAAMC,WAAU,OAAO,OAAe,cAAgC;AACpE,QAAM,aAA+B;AAAA,IACnC;AAAA,EACF;AACA,QAAM,gDAAiC,YAAY,SAAS;AAC9D;AAEA,IAAMC,YAAW,OAAO,OAAc,UAAkB;AACtD,QAAM,aAAgC;AAAA,IACpC,SAAS,MAAM;AAAA,IACf;AAAA,EACF;AACA,QAAM,kDAAkC,UAAU;AACpD;AAEA,IAAO,eAAQ;AAAA,EACb,SAAAD;AAAA,EACA,UAAAC;AACF;;;AC3BA;AAWA,eAAeC,UAAQ,OAAc,WAA6B;AAChE,QAAM,aAAgC;AAAA,IACpC,SAAS,MAAM;AAAA,IACf,SAAS;AAAA,MACP,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AACA,QAAM,kDAAkC,YAAY,SAAS;AAC/D;AAEA,eAAeC,SAAQ,OAAc;AACnC,QAAM,aAAgC;AAAA,IACpC,SAAS,MAAM;AAAA,IACf,SAAS;AAAA,MACP,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AACA,QAAM,kDAAkC,UAAU;AACpD;AAEA,eAAeC,SAAQ,OAAc;AACnC,QAAM,aAAgC;AAAA,IACpC,SAAS,MAAM;AAAA,IACf,SAAS;AAAA,MACP,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AACA,QAAM,kDAAkC,UAAU;AACpD;AAEA,eAAeC,UAAS,OAAc,QAA2B;AAC/D,QAAM,aAAiC;AAAA,IACrC,SAAS,MAAM;AAAA,IACf;AAAA,IACA,SAAS;AAAA,MACP,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AACA,QAAM,oDAAmC,UAAU;AACrD;AAEA,eAAeC,UAAS,OAAc;AACpC,QAAM,aAAiC;AAAA,IACrC,SAAS,MAAM;AAAA,IACf,SAAS;AAAA,MACP,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AACA,QAAM,oDAAmC,UAAU;AACrD;AAEA,IAAO,gBAAQ;AAAA,EACb,SAAAJ;AAAA,EACA,SAAAC;AAAA,EACA,SAAAC;AAAA,EACA,UAAAC;AAAA,EACA,UAAAC;AACF;;;ACpEA;AAQA,eAAe,cAAc,UAAkB;AAC7C,QAAM,aAAiC;AAAA,IACrC;AAAA,EACF;AACA,QAAM,oDAAmC,UAAU;AACrD;AAEA,eAAe,UACb,KACA,UACA,OACA;AACA,QAAM,aAA6B;AAAA,IACjC,YAAY,IAAI;AAAA,IAChB;AAAA,IACA,OAAO,UAAU;AAAA,EACnB;AACA,QAAM,4CAA+B,UAAU;AACjD;AAEA,eAAe,iBAAiB,KAAU,UAAkB;AAC1D,QAAM,aAAoC;AAAA,IACxC,OAAO,IAAI;AAAA,IACX,YAAY,IAAI;AAAA,IAChB;AAAA,EACF;AACA,QAAM,4DAAuC,UAAU;AACzD;AAEA,IAAO,gBAAQ;AAAA,EACb;AAAA,EACA;AAAA,EACA;AACF;;;ACzCA;AAiBAC;AAEA,eAAeC,UAAQ,MAAY,WAAoB;AACrD,QAAM,aAA+B;AAAA,IACnC,QAAQ,KAAK;AAAA,IACb,SAAS,OAAO;AAAA,IAChB,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM,gDAAiC,YAAY,SAAS;AAC9D;AAEA,eAAeC,SAAQ,MAAY;AACjC,QAAM,aAA+B;AAAA,IACnC,QAAQ,KAAK;AAAA,IACb,SAAS,OAAO;AAAA,IAChB,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM,gDAAiC,UAAU;AACnD;AAEA,eAAeC,UAAQ,MAAY;AACjC,QAAM,aAA+B;AAAA,IACnC,QAAQ,KAAK;AAAA,IACb,SAAS,OAAO;AAAA,IAChB,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM,gDAAiC,UAAU;AACnD;AAEA,eAAsB,mBAAmB,MAAY;AACnD,QAAM,aAAkC;AAAA,IACtC,QAAQ,KAAK;AAAA,IACb,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM,wEAA6C,UAAU;AAC/D;AAIA,eAAe,wBAAwB,MAAY,WAAoB;AACrE,QAAM,aAA0C;AAAA,IAC9C,QAAQ,KAAK;AAAA,IACb,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM;AAAA;AAAA,IAEJ;AAAA,IACA;AAAA,EACF;AACF;AAEA,eAAe,uBAAuB,MAAY;AAChD,QAAM,aAAyC;AAAA,IAC7C,QAAQ,KAAK;AAAA,IACb,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM,uEAAkD,UAAU;AACpE;AAEA,eAAe,0BAA0B,MAAY,WAAoB;AACvE,QAAM,aAA0C;AAAA,IAC9C,QAAQ,KAAK;AAAA,IACb,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM;AAAA;AAAA,IAEJ;AAAA,IACA;AAAA,EACF;AACF;AAEA,eAAe,yBAAyB,MAAY;AAClD,QAAM,aAAyC;AAAA,IAC7C,QAAQ,KAAK;AAAA,IACb,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM,2EAAoD,UAAU;AACtE;AAIA,eAAe,QAAQ,OAAe;AACpC,QAAM,aAA+B;AAAA,IACnC,SAAS;AAAA,MACP;AAAA,IACF;AAAA,EACF;AACA,QAAM,gDAAiC,UAAU;AACnD;AAEA,eAAe,eAAe,MAAY;AACxC,QAAM,aAAsC;AAAA,IAC1C,QAAQ,KAAK;AAAA,IACb,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM,iEAA0C,UAAU;AAC5D;AAIA,eAAe,mBAAmB,MAAY;AAC5C,QAAM,aAA0C;AAAA,IAC9C,QAAQ,KAAK;AAAA,IACb,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM,0EAA8C,UAAU;AAChE;AAEA,eAAe,gBAAgB,MAAY;AACzC,QAAM,aAAuC;AAAA,IAC3C,QAAQ,KAAK;AAAA,IACb,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM,kEAA0C,UAAU;AAC5D;AAEA,eAAe,uBAAuB,MAAY;AAChD,QAAM,aAA8C;AAAA,IAClD,QAAQ,KAAK;AAAA,IACb,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM,kFAAkD,UAAU;AACpE;AAEA,eAAe,cAAc,MAAY;AACvC,QAAM,aAAqC;AAAA,IACzC,QAAQ,KAAK;AAAA,IACb,SAAS;AAAA,MACP,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AACA,QAAM,8DAAwC,UAAU;AAC1D;AAIA,eAAe,kBAAkB,OAAe;AAC9C,QAAM,aAAyC;AAAA,IAC7C;AAAA,EACF;AACA,QAAM,sEAA4C,UAAU;AAC9D;AAEA,IAAO,eAAQ;AAAA,EACb,SAAAF;AAAA,EACA,SAAAC;AAAA,EACA,SAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACxMA;AAoBA,eAAeC,UAAQ,MAAY,WAA6B;AAC9D,QAAM,aAA+B;AAAA,IACnC,SAAS,KAAK;AAAA,EAChB;AACA,QAAM,gDAAiC,YAAY,SAAS;AAC9D;AAEA,eAAeC,SAAQ,MAAY;AACjC,QAAM,aAA+B;AAAA,IACnC,SAAS,KAAK;AAAA,EAChB;AACA,QAAM,gDAAiC,UAAU;AACnD;AAEA,eAAeC,UAAQ,MAAY;AACjC,QAAM,aAA+B;AAAA,IACnC,SAAS,KAAK;AAAA,EAChB;AACA,QAAM,gDAAiC,UAAU;AACnD;AAEA,eAAeC,UAAS,OAAc,QAA2B;AAC/D,QAAM,aAAgC;AAAA,IACpC,SAAS,MAAM;AAAA,IACf;AAAA,EACF;AACA,QAAM,kDAAkC,UAAU;AACpD;AAEA,eAAe,cAAc,MAAY,WAA6B;AACpE,QAAM,aAAqC;AAAA,IACzC,SAAS,KAAK;AAAA,EAChB;AACA,QAAM,8DAAwC,YAAY,SAAS;AACrE;AAEA,eAAe,cAAc,MAAY;AACvC,QAAM,aAAqC;AAAA,IACzC,SAAS,KAAK;AAAA,EAChB;AACA,QAAM,8DAAwC,UAAU;AAC1D;AAEA,eAAe,cAAc,MAAY;AACvC,QAAM,aAAqC;AAAA,IACzC,SAAS,KAAK;AAAA,EAChB;AACA,QAAM,8DAAwC,UAAU;AAC1D;AAEA,eAAe,mBAAmB,MAAY,WAA6B;AACzE,QAAM,aAA0C;AAAA,IAC9C,SAAS,KAAK;AAAA,IACd,aAAa,KAAK;AAAA,EACpB;AACA,QAAM,wEAA6C,YAAY,SAAS;AAC1E;AAEA,eAAe,mBAAmB,MAAY;AAC5C,QAAM,aAA0C;AAAA,IAC9C,SAAS,KAAK;AAAA,IACd,aAAa,KAAK;AAAA,EACpB;AACA,QAAM,wEAA6C,UAAU;AAC/D;AAEA,eAAe,mBAAmB,cAAoB;AACpD,QAAM,aAA0C;AAAA,IAC9C,SAAS,aAAa;AAAA,IACtB,aAAa,aAAa;AAAA,EAC5B;AACA,QAAM,wEAA6C,UAAU;AAC/D;AAEA,IAAO,eAAQ;AAAA,EACb,SAAAH;AAAA,EACA,SAAAC;AAAA,EACA,SAAAC;AAAA,EACA,UAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACzGA;AAEA,eAAe,eAAe,SAAiB;AAC7C,QAAM,aAAkC;AAAA,IACtC,gBAAgB;AAAA,EAClB;AACA,QAAM,gFAAiD,UAAU;AACnE;AAEA,eAAe,SAAS,MAAc,IAAY;AAChD,QAAM,aAAiC;AAAA,IACrC;AAAA,IACA;AAAA,EACF;AAEA,QAAM,kFAAkD,UAAU;AACpE;AAEA,eAAe,WAAW,MAAc,IAAY;AAClD,QAAM,aAAiC;AAAA,IACrC;AAAA,IACA;AAAA,EACF;AACA,QAAM,sFAAoD,UAAU;AACtE;AAEA,eAAe,eAAe;AAC5B,QAAM,aAAa,CAAC;AACpB,QAAM,2EAA+C,UAAU;AACjE;AAEA,IAAO,uBAAQ;AAAA,EACb;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACpCA;AASAC;AAEA,IAAM,aAAa,CAAC,oBAAI,eAAe,CAAC,oBAAI,MAAM;AAElD,eAAe,aAAa,YAAuC;AACjE,MAAI,YAAY;AACd;AAAA,EACF;AACA,QAAM,oEAA2C,UAAU;AAC7D;AAEA,eAAe,UAAU,OAAY;AACnC,MAAI,YAAY;AACd;AAAA,EACF;AACA,QAAM,aAAqC;AAAA,IACzC,OAAO,KAAK,UAAU,OAAO,OAAO,oBAAoB,KAAK,CAAC;AAAA,EAChE;AACA,QAAM,8DAAwC,UAAU;AAC1D;AAEA,eAAe,gBAAgB,YAA0C;AACvE,MAAI,YAAY;AACd;AAAA,EACF;AACA,QAAM,0EAA8C,UAAU;AAChE;AAEA,eAAe,aAAa,OAAY;AACtC,MAAI,YAAY;AACd;AAAA,EACF;AACA,QAAM,aAAwC;AAAA,IAC5C,OAAO,KAAK,UAAU,OAAO,OAAO,oBAAoB,KAAK,CAAC;AAAA,EAChE;AACA,QAAM,oEAA2C,UAAU;AAC7D;AAEA,eAAe,wBAAwB;AACrC,MAAI,YAAY;AACd;AAAA,EACF;AACA,QAAM,aAAiD,CAAC;AACxD,QAAM,sFAAoD,UAAU;AACtE;AAEA,eAAe,mBAAmB,OAAY;AAC5C,MAAI,YAAY;AACd;AAAA,EACF;AACA,QAAM,aAA8C;AAAA,IAClD,OAAO,KAAK,UAAU,OAAO,OAAO,oBAAoB,KAAK,CAAC;AAAA,EAChE;AACA,QAAM,gFAAiD,UAAU;AACnE;AAEA,IAAO,mBAAQ;AAAA,EACb;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACxEA;AAWAC;AAEA,eAAeC,UAAQ,OAAkB,WAAoB;AAC3D,QAAM,aAAgC;AAAA,IACpC,SAAS,MAAM;AAAA,IACf,SAAS,OAAO;AAAA,IAChB,SAAS;AAAA,MACP,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AACA,QAAM,4DAAuC,YAAY,SAAS;AACpE;AAEA,eAAeC,SAAQ,OAAkB;AACvC,QAAM,aAAgC;AAAA,IACpC,SAAS,MAAM;AAAA,IACf,SAAS,OAAO;AAAA,IAChB,SAAS;AAAA,MACP,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AACA,QAAM,4DAAuC,UAAU;AACzD;AAEA,eAAeC,UAAQ,OAAkB;AACvC,QAAM,aAAgC;AAAA,IACpC,SAAS,MAAM;AAAA,IACf,SAAS,OAAO;AAAA,IAChB,SAAS;AAAA,MACP,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AACA,QAAM,4DAAuC,UAAU;AACzD;AAEA,eAAe,WAAW,OAAe,OAAkB;AACzD,QAAM,aAAmC;AAAA,IACvC;AAAA,IACA,SAAS,MAAM;AAAA,IACf,SAAS,OAAO;AAAA,IAChB,SAAS;AAAA,MACP,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AACA,QAAM,mEAA2C,UAAU;AAC7D;AAEA,eAAe,aAAa,OAAe,OAAkB;AAC3D,QAAM,aAAqC;AAAA,IACzC;AAAA,IACA,SAAS,MAAM;AAAA,IACf,SAAS,OAAO;AAAA,IAChB,SAAS;AAAA,MACP,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AACA,QAAM,wEAA6C,UAAU;AAC/D;AAEA,eAAe,kBAAkB,SAAiB;AAChD,QAAM,aAAwC;AAAA,IAC5C;AAAA,IACA,YAAY;AAAA,EACd;AACA,QAAM,wEAA0C,UAAU;AAC5D;AAEA,eAAe,kBAAkB,OAAkB;AACjD,QAAM,aAA0C;AAAA,IAC9C,aAAa,MAAM;AAAA,IACnB,SAAS,MAAM;AAAA,IACf,SAAS;AAAA,MACP,MAAM,MAAM;AAAA,IACd;AAAA,EACF;AACA,QAAM,kFAAkD,UAAU;AACpE;AAEA,IAAO,gBAAQ;AAAA,EACb,SAAAF;AAAA,EACA,SAAAC;AAAA,EACA,SAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACjGA;AAQA,eAAeC,MAAK,QAAgB;AAClC,QAAM,aAA8B;AAAA,IAClC,MAAM,OAAO,OAAO;AAAA,IACpB,MAAM,OAAO;AAAA,IACb,aAAa,OAAO;AAAA,IACpB,SAAS,OAAO;AAAA,EAClB;AACA,QAAM,8CAAgC,UAAU;AAClD;AAEA,eAAeC,UAAS,QAAgB;AACtC,QAAM,aAAkC;AAAA,IACtC,UAAU,OAAO;AAAA,IACjB,MAAM,OAAO,OAAO;AAAA,IACpB,QAAQ,OAAO;AAAA,IACf,MAAM,OAAO;AAAA,IACb,aAAa,OAAO;AAAA,IACpB,SAAS,OAAO;AAAA,EAClB;AACA,QAAM,sDAAoC,UAAU;AACtD;AAEA,eAAeC,UAAQ,QAAgB;AACrC,QAAM,aAAiC;AAAA,IACrC,UAAU,OAAO;AAAA,IACjB,MAAM,OAAO,OAAO;AAAA,IACpB,MAAM,OAAO;AAAA,IACb,aAAa,OAAO;AAAA,IACpB,SAAS,OAAO;AAAA,EAClB;AACA,QAAM,oDAAmC,UAAU;AACrD;AAEA,IAAO,iBAAQ;AAAA,EACb,MAAAF;AAAA,EACA,UAAAC;AAAA,EACA,SAAAC;AACF;;;AC9CA;AAUA,eAAe,kBAAkB,QAAmB;AAClD,QAAM,aAAoC;AAAA,IACxC,OAAO,OAAO;AAAA,IACd,WAAW,OAAO;AAAA,IAClB,iBAAiB,OAAO;AAAA,IACxB,MAAM,OAAO;AAAA,EACf;AAEA,QAAM,8DAAwC,UAAU;AAC1D;AAEA,eAAe,mBACb,OACA,UACA,MACA,SACA,MACA;AACA,QAAM,aAAsC;AAAA,IAC1C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,QAAM,gEAAyC,UAAU;AAC3D;AAEA,IAAO,iBAAQ;AAAA,EACb;AAAA,EACA;AACF;;;ACzCA;AAQA,eAAeC,UAAQ,MAAc,cAAwB;AAC3D,QAAM,aAA8C;AAAA,IAClD;AAAA,IACA;AAAA,EACF;AACA,QAAM,gFAAiD,UAAU;AACnE;AAEA,eAAeC,UAAQ,MAAc;AACnC,QAAM,aAA8C;AAAA,IAClD;AAAA,EACF;AACA,QAAM,gFAAiD,UAAU;AACnE;AAEA,eAAe,mBAAmB,QAAgB;AAChD,QAAM,aAAyD;AAAA,IAC7D;AAAA,EACF;AACA,QAAM;AAAA;AAAA,IAEJ;AAAA,EACF;AACF;AAEA,IAAO,8BAAQ;AAAA,EACb,SAAAD;AAAA,EACA,SAAAC;AAAA,EACA;AACF;;;ACrCA;AAQA,eAAe,SAAS,QAA8B;AACpD,QAAM,aAAoC;AAAA,IACxC,SAAS;AAAA,EACX;AACA,QAAM,6DAAwC,UAAU;AAC1D;AAEA,eAAe,WAAW,QAA8B;AACtD,QAAM,aAAsC;AAAA,IAC1C,SAAS;AAAA,EACX;AACA,QAAM,iEAA0C,UAAU;AAC5D;AAEA,IAAO,mBAAQ;AAAA,EACb;AAAA,EACA;AACF;;;A5CjBO,SAAS,kBAAkB;AAAC;AAE5B,IAAMC,YAAW,MAAM;AAC5B,aAAW,SAAS;AACpB,UAAQ,IAAI,iBAAiB;AAC/B;;;ADVAC;AAIO,IAAM,qBAAqB,OAAO,SAAc;AACrD,QAAa,aAAK,QAAQ,IAAI;AAE9B,MAAIC,uBAAsB,IAAI,GAAG;AAC/B,UAAa,aAAK,yBAAyB,IAAI;AAAA,EACjD;AAEA,MAAIC,qBAAoB,IAAI,GAAG;AAC7B,UAAa,aAAK,uBAAuB,IAAI;AAAA,EAC/C;AACF;AAEA,IAAM,sBAAsB,OAC1B,MACA,OACA,kBACG;AACH,aAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,KAAK,GAAG;AAEjD,QAAI,CAAC,iBAAiB,cAAc,KAAK,MAAM,MAAM;AACnD,YAAa,aAAK,SAAS,MAAM,IAAI;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,wBAAwB,OAC5B,MACA,OACA,kBACG;AACH,MAAI,CAAC,eAAe;AAClB;AAAA,EACF;AACA,aAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,aAAa,GAAG;AAEzD,QAAI,CAAC,SAAS,MAAM,KAAK,MAAM,MAAM;AACnC,YAAa,aAAK,WAAW,MAAM,IAAI;AAAA,IACzC;AAAA,EACF;AACF;AAEA,IAAM,sBAAsB,OAAO,MAAW,iBAAsB;AAClE,QAAM,QAAQ,KAAK;AACnB,QAAM,gBAAgB,cAAc;AAEpC,QAAM,oBAAoB,MAAM,OAAO,aAAa;AACpD,QAAM,sBAAsB,MAAM,OAAO,aAAa;AACxD;AAEO,IAAM,mBAAmB,OAC9B,MACA,iBACG;AACH,QAAM,WAAW,YAAY;AAC7B,MAAI;AACJ,MAAI,CAAC,oBAAI,eAAe,CAAC,oBAAI,wBAAwB;AACnD,oBAAgB,MAAe,qBAAqB,QAAQ;AAAA,EAC9D;AACA,QAAa,uBAAe,aAAa,MAAM,aAAa;AAE5D,MAAI,cAAc;AAChB,UAAa,aAAK,QAAQ,IAAI;AAE9B,QAAI,kBAAkB,MAAM,YAAY,GAAG;AACzC,YAAa,aAAK,yBAAyB,IAAI;AAAA,IACjD;AAEA,QAAI,gBAAgB,MAAM,YAAY,GAAG;AACvC,YAAa,aAAK,uBAAuB,IAAI;AAAA,IAC/C;AAEA,QAAI,qBAAqB,MAAM,YAAY,GAAG;AAC5C,YAAa,aAAK,mBAAmB,IAAI;AAAA,IAC3C;AAEA,QACE,CAAC,aAAa,sBACd,KAAK,sBACL,KAAK,UACL;AACA,YAAa,aAAK,mBAAmB,IAAI;AAAA,IAC3C;AAEA,QAAI,KAAK,aAAa,aAAa,UAAU;AAC3C,YAAa,aAAK,gBAAgB,IAAI;AAAA,IACxC;AAAA,EACF,OAAO;AACL,UAAa,aAAK,QAAQ,IAAI;AAAA,EAChC;AAEA,MAAI,gBAAgB,MAAM,YAAY,GAAG;AACvC,UAAa,aAAK,0BAA0B,IAAI;AAAA,EAClD;AAEA,MAAI,cAAc,MAAM,YAAY,GAAG;AACrC,UAAa,aAAK,wBAAwB,IAAI;AAAA,EAChD;AAEA,QAAM,oBAAoB,MAAM,YAAY;AAC9C;AAEO,IAAM,kBAAkB,CAAC,MAAW,iBAAsB;AAC/D,SAAO,mBAAmB,MAAM,cAAcD,sBAAqB;AACrE;AAEO,IAAM,oBAAoB,CAAC,MAAW,iBAAsB;AACjE,SAAO,qBAAqB,MAAM,cAAcA,sBAAqB;AACvE;AAEA,IAAM,gBAAgB,CAAC,MAAW,iBAAsB;AACtD,SAAO,mBAAmB,MAAM,cAAcC,oBAAmB;AACnE;AAEA,IAAM,kBAAkB,CAAC,MAAW,iBAAsB;AACxD,SAAO,qBAAqB,MAAM,cAAcA,oBAAmB;AACrE;AAEA,IAAM,uBAAuB,CAAC,MAAW,iBAAsB;AAC7D,SAAO,CAAC,cAAc,eAAe,OAAO,KAAK,gBAAgB;AACnE;AAKA,IAAM,qBAAqB,CACzB,MACA,cACA,kBACG;AAEH,MAAI,CAAC,cAAc,IAAI,GAAG;AACxB,WAAO;AAAA,EACT;AAGA,MAAI,gBAAgB,cAAc,YAAY,GAAG;AAC/C,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAKA,IAAM,uBAAuB,CAC3B,MACA,cACA,kBACG;AAEH,MAAI,cAAc,IAAI,GAAG;AACvB,WAAO;AAAA,EACT;AAGA,MAAI,CAAC,cAAc;AACjB,WAAO;AAAA,EACT;AAGA,MAAI,CAAC,cAAc,YAAY,GAAG;AAChC,WAAO;AAAA,EACT;AAGA,SAAO;AACT;;;AD3KAC;AACAC;;;A+CLA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIAC;AAJA,IAAM,QAAQ;AACd,IAAM,EAAE,IAAI,OAAO,IAAI,QAAQ,MAAM;AACrC,IAAM,EAAE,SAAAC,SAAQ,IAAI;AAWpB,IAAMC,kBAAiB,QAAQ;AAE/B,SAAS,cAAc,QAAgB,WAAmB;AACxD,SAAO,GAAG,MAAM,IAAI,SAAS;AAC/B;AAEA,eAAsB,mBAAmB,QAAoC;AAC3E,MAAI,CAAC,QAAQ;AACX,YAAQ,MAAM,0CAA0C;AACxD,WAAO,CAAC;AAAA,EACV;AACA,QAAM,SAAS,MAAM,MAAM,iBAAiB;AAC5C,QAAM,WAA6B,MAAM,OAAO,KAAK,MAAM;AAC3D,SAAO,SAAS,IAAI,aAAW,QAAQ,KAAK;AAC9C;AAEA,eAAsB,mBACpB,QACA,OAAmD,CAAC,GACpD;AACA,MAAI;AACF,UAAM,SAAS,MAAM,UAAU;AAC/B,QAAI,aAAuB,KAAK,cAAc,CAAC;AAC/C,QAAI;AAGJ,QAAI,WAAW,WAAW,GAAG;AAC3B,YAAM,WAAW,MAAM,mBAAmB,MAAM;AAChD,oBAAc,SAAS,IAAI,cAAY;AAAA,QACrC,KAAK,cAAc,QAAQ,QAAQ,QAAQ,SAAS;AAAA,MACtD,EAAE;AAAA,IACJ,OAAO;AAEL,mBAAa,MAAM,QAAQ,UAAU,IAAI,aAAa,CAAC,UAAU;AACjE,oBAAc,WAAW,IAAI,gBAAc;AAAA,QACzC,KAAK,cAAc,QAAQ,SAAS;AAAA,MACtC,EAAE;AAAA,IACJ;AAEA,QAAI,eAAe,YAAY,SAAS,GAAG;AACzC,YAAM,SAAS,MAAM,MAAM,iBAAiB;AAC5C,YAAM,WAAW,CAAC;AAClB,eAAS,cAAc,aAAa;AAClC,iBAAS,KAAK,OAAO,OAAO,WAAW,GAAG,CAAC;AAAA,MAC7C;AACA,UAAI,CAAC,oBAAI,OAAO,GAAG;AACjB,QAAAD;AAAA,UACE,6BAA6B,MAAM,aAAa,MAAM,OAAO,YAC1D,IAAI,gBAAc,WAAW,GAAG,EAChC,KAAK,IAAI,CAAC;AAAA,QACf;AAAA,MACF;AACA,YAAM,QAAQ,IAAI,QAAQ;AAAA,IAC5B;AAAA,EACF,SAAS,KAAK;AACZ,YAAQ,MAAM,gCAAgC,GAAG,EAAE;AAAA,EACrD;AACF;AAEA,eAAsB,eACpB,QACA,eACA;AAEA,QAAM,mBAAmB,QAAQ,EAAE,QAAQ,WAAW,CAAC;AAEvD,QAAM,SAAS,MAAM,MAAM,iBAAiB;AAC5C,QAAM,YAAY,cAAc;AAChC,QAAM,YAAY,cAAc,YAAY,cAAc,YAAY,OAAO;AAC7E,QAAM,MAAM,cAAc,QAAQ,SAAS;AAE3C,QAAM,UAAmB;AAAA,IACvB,GAAG;AAAA,IACH;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,iBAAgB,oBAAI,KAAK,GAAE,YAAY;AAAA,IACvC;AAAA,EACF;AACA,QAAM,OAAO,MAAM,KAAK,SAASC,eAAc;AAC/C,SAAO;AACT;AAEA,eAAsB,iBAAiB,SAAkB;AACvD,QAAM,SAAS,MAAM,MAAM,iBAAiB;AAC5C,QAAM,MAAM,cAAc,QAAQ,QAAQ,QAAQ,SAAS;AAC3D,UAAQ,kBAAiB,oBAAI,KAAK,GAAE,YAAY;AAChD,QAAM,OAAO,MAAM,KAAK,SAASA,eAAc;AACjD;AAEA,eAAsB,WAAW,QAAgB,WAAmB;AAClE,QAAM,SAAS,MAAM,MAAM,iBAAiB;AAC5C,QAAM,OAAO,OAAO,cAAc,QAAQ,SAAS,CAAC;AACtD;AAEA,eAAsB,WACpB,QACA,WACkB;AAClB,MAAI,CAAC,UAAU,CAAC,WAAW;AACzB,UAAM,IAAI,MAAM,6BAA6B,MAAM,MAAM,SAAS,EAAE;AAAA,EACtE;AACA,QAAM,SAAS,MAAM,MAAM,iBAAiB;AAC5C,QAAM,UAAU,MAAM,OAAO,IAAI,cAAc,QAAQ,SAAS,CAAC;AACjE,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,uBAAuB,MAAM,MAAM,SAAS,EAAE;AAAA,EAChE;AACA,SAAO;AACT;;;A/C9GA;;;AgDVA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAAC;AAEO,IAAM,sBAAsB,EAAE,oBAAI,uBAAuB;AACzD,IAAM,sBAAsB,EAAE,oBAAI,uBAAuB;AAEzD,SAAS,iBACd,UACmD;AACnD,MAAI,CAAC,YAAY,SAAS,SAAS,qBAAqB;AACtD,WAAO;AAAA,MACL,OAAO;AAAA,MACP,OAAO,6BAA6B,mBAAmB;AAAA,IACzD;AAAA,EACF;AAEA,MAAI,SAAS,SAAS,qBAAqB;AACzC,WAAO;AAAA,MACL,OAAO;AAAA,MACP,OAAO,6BAA6B,mBAAmB;AAAA,IACzD;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,KAAK;AACvB;;;AjD+BA,IAAM,uBAAuB,OAAO,WAAiB;AACnD,QAAM,SAAS,OAAO;AACtB,QAAeC,eAAM,WAAW,MAAM;AACtC,QAAmB,mBAAmB,MAAM;AAC5C,QAAY,aAAK,eAAe,MAAM;AACtC,QAAe,mBAAmB,QAAQ,EAAE,QAAQ,gBAAgB,CAAC;AACvE;AAEO,IAAM,SAAN,MAAM,QAAO;AAAA,EAKlB,OAAO,KAAK,UAAoB,UAAoB,YAAwB;AAC1E,YAAO,SAAS;AAChB,YAAO,SAAS;AAChB,YAAO,WAAW;AAAA,EACpB;AAAA,EAEA,aAAa,yBAAyB,MAAY,SAAmB;AAGnE,QAAI,oBAAI,+BAA+BC,SAAQ,IAAI,GAAG;AACpD,aAAO;AAAA,IACT;AAGA,QAAI,MAAM,QAAO,SAAS,cAAc,GAAG;AACzC,aAAO;AAAA,IACT;AAGA,QAAI,UAAU,IAAI,GAAG;AACnB,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,SAAS;AACZ,gBAAU,MAAiB,qBAAqB,YAAY,CAAC;AAAA,IAC/D;AACA,WAAO,CAAC,EAAE,WAAW,QAAQ,UAAU,KAAK,SAAS,aAAa,OAAO;AAAA,EAC3E;AAAA,EAEA,aAAa,UACX,MACA,OAAqB;AAAA,IACnB,cAAc;AAAA,IACd,iBAAiB;AAAA,EACnB,GACA,UACA,QACA,SACe;AACf,QAAI,EAAE,UAAU,IAAI,IAAI;AAGxB,QAAI,UAAU,CAAC,OAAO,UAAU;AAC9B,WAAK,kBAAkB;AAAA,IACzB;AAEA,QAAI;AACJ,QAAI,UAAU;AACZ,UAAI,MAAM,QAAO,yBAAyB,MAAM,OAAO,GAAG;AACxD,cAAM,IAAI,UAAU,6CAA6C,GAAG;AAAA,MACtE;AAEA,UAAI,CAAC,KAAK,wBAAwB;AAChC,cAAM,qBAAqB,iBAAiB,QAAQ;AACpD,YAAI,CAAC,mBAAmB,OAAO;AAC7B,gBAAM,IAAI,UAAU,mBAAmB,OAAO,GAAG;AAAA,QACnD;AAAA,MACF;AAEA,uBAAiB,KAAK,eAAe,MAAM,KAAK,QAAQ,IAAI;AAAA,IAC9D,WAAW,QAAQ;AACjB,uBAAiB,OAAO;AAAA,IAC1B;AAGA,UAAM,mBACJ,KAAK,mBAAmB,CAAE,MAAM,QAAO,SAAS,cAAc;AAChE,QAAI,CAAC,kBAAkB,kBAAkB;AACvC,YAAM;AAAA,IACR;AAEA,UAAM,OAAe,qBAAqB;AAE1C,UAAM,WAAW;AAAA,MACf,WAAW,KAAK,IAAI;AAAA,MACpB,GAAG;AAAA,MACH,GAAG;AAAA,MACH;AAAA,MACA,UAAU;AAAA,MACV;AAAA,IACF;AAEA,QAAI,CAAC,SAAS,OAAO;AACnB,eAAS,QAAQ,CAAC;AAAA,IACpB;AAEA,QAAI,SAAS,UAAU,MAAM;AAC3B,eAAS;AAAA,IACX;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,aAAa,WAAW;AACtB,UAAM,KAAK,YAAY;AACvB,UAAM,WAAW,MAAM,GAAG;AAAA,MAChB,oBAAoB,MAAM;AAAA,QAChC,cAAc;AAAA,MAChB,CAAC;AAAA,IACH;AACA,WAAO,SAAS,KAAK,IAAI,SAAO,IAAI,GAAI;AAAA,EAC1C;AAAA,EAEA,aAAa,gBAAgB,OAAe;AAC1C,QAAI,WAAgB,MAAgB,uBAAuB,OAAO,CAAC,CAAC;AACpE,WAAO;AAAA,MACL,WAAW,SAAS;AAAA,IACtB;AAAA,EACF;AAAA,EAEA,aAAa,oBAAoB,MAA0C;AACzE,QAAI,WAAmB,MAAgB;AAAA,MACrC,KAAK;AAAA,MACL,EAAE,OAAO,KAAK,SAAS,GAAG;AAAA,IAC5B;AACA,WAAO;AAAA,EACT;AAAA,EAEA,aAAa,eAAe,OAAe;AACzC,WAAiB,qBAAqB,KAAK;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,QAAQ,QAAgB;AACnC,UAAM,OAAO,MAAgB,QAAQ,MAAM;AAC3C,QAAI,MAAM;AACR,aAAO,KAAK;AAAA,IACd;AACA,WAAO;AAAA,EACT;AAAA,EAEA,aAAa,QAAQ,SAAmB;AACtC,WAAO,MAAgB,uBAAuB,OAAO;AAAA,EACvD;AAAA,EAEA,aAAa,WAAW,OAAe;AACrC,WAAO,MAAgB,sBAAsB,KAAK;AAAA,EACpD;AAAA,EAEA,aAAa,KAAK,MAAY,OAAqB,CAAC,GAAkB;AAEpE,QAAI,KAAK,gBAAgB,MAAM;AAC7B,WAAK,eAAe;AAAA,IACtB;AACA,QAAI,KAAK,mBAAmB,MAAM;AAChC,WAAK,kBAAkB;AAAA,IACzB;AACA,UAAM,WAAW,YAAY;AAC7B,UAAM,KAAK,YAAY;AAEvB,QAAI,EAAE,OAAO,KAAK,aAAa,CAAC,GAAG,MAAM,IAAI;AAE7C,QAAI,CAAC,SAAS,CAAC,KAAK;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,QAAI;AACJ,QAAI,KAAK;AAEP,UAAI;AACF,iBAAU,MAAM,GAAG,IAAI,GAAG;AAC1B,YAAI,SAAS,OAAO,UAAU,OAAO;AACnC,gBAAM;AAAA,QACR;AACA,gBAAQ,OAAO;AAAA,MACjB,SAAS,GAAQ;AACf,YAAI,EAAE,WAAW,KAAK;AAAA,QAEtB,OAAO;AACL,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,QAAI,CAAC,UAAU,OAAO;AAEpB,eAAS,MAAgB,qBAAqB,KAAK;AACnD,UAAI,UAAU,OAAO,QAAQ,KAAK;AAChC,cAAM,IAAI,sBAAsB,KAAK;AAAA,MACvC;AAAA,IACF;AAEA,UAAM,SAAS,SAAS,IAAI;AAC5B,UAAM,iBAAiBC,WAAU,MAAM,MAAMA,WAAU,IAAI,IAAI,IAAI;AACnE,WAAO,QAAO,OAAO,SAAS,QAAQ,gBAAgB,YAAY;AAChE,YAAM,mBAAmB,OAAO,QAAQ;AAExC,UAAI,YAAY,MAAM,QAAO,UAAU,MAAM,MAAM,UAAU,MAAM;AAEnE,UAAI,KAAK,iBAAiB,KAAK,kBAAkB,QAAQ,KAAK;AAC5D,oBAAsB,kBAAkB,WAAW,MAAM;AAAA,MAC3D;AAEA,UAAI,CAAC,UAAU,OAAO,QAAQ;AAC5B,kBAAU,QAAQ,EAAE,GAAG,MAAM;AAAA,MAC/B;AAIA,UAAI,gBAAgB,CAAC;AACrB,UAAI,CAAC,KAAK;AACR,cAAM,UAAU;AAEhB,YAAI,WAAW,SAAS,GAAG;AACzB,mBAAS,WAAW,YAAY;AAC9B,0BAAc,KAAK,QAAO,OAAO,SAAS,SAAS,CAAC,GAAI,CAAC,CAAC;AAAA,UAC5D;AAAA,QACF;AAAA,MACF;AAEA,UAAI;AAEF,YAAI,WAAW,MAAM,GAAG,IAAI,SAAS;AACrC,kBAAU,OAAO,SAAS;AAE1B,cAAmB,iBAAiB,WAAW,MAAM;AACrD,cAAeF,eAAM;AAAA,UACnB;AAAA,UACA,UAAU;AAAA,UACV,UAAU;AAAA,UACV,UAAU;AAAA,QACZ;AACA,cAAY,aAAK,eAAe,SAAS,EAAE;AAE3C,cAAM,QAAQ,IAAI,aAAa;AAG/B,eAAO,GAAG,IAAI,UAAU,GAAI;AAAA,MAC9B,SAAS,KAAU;AACjB,YAAI,IAAI,WAAW,KAAK;AACtB,gBAAM;AAAA,QACR,OAAO;AACL,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,aAAa,WACX,mBACA,QAC0B;AAC1B,UAAM,WAAW,YAAY;AAE7B,QAAI,cAAqB,CAAC;AAC1B,QAAI,WAAkB,CAAC;AACvB,QAAI,cAAqB,CAAC;AAE1B,UAAM,SAAS,kBAAkB,IAAI,CAAC,SAAe,KAAK,KAAK;AAC/D,UAAM,iBAAiB,MAAM,qBAAqB,MAAM;AACxD,UAAM,eAAoD,CAAC;AAE3D,eAAW,WAAW,mBAAmB;AACvC,UACE,SAAS;AAAA,QACP,CAAC,MAAY,EAAE,MAAM,YAAY,MAAM,QAAQ,MAAM,YAAY;AAAA,MACnE,KACA,eAAe,SAAS,QAAQ,MAAM,YAAY,CAAC,GACnD;AACA,qBAAa,KAAK;AAAA,UAChB,OAAO,QAAQ;AAAA,UACf,QAAQ;AAAA,QACV,CAAC;AACD;AAAA,MACF;AACA,cAAQ,aAAa,UAAU,CAAC;AAChC,eAAS,KAAK,OAAO;AACrB,UAAIE,WAAU,OAAO,GAAG;AACtB,oBAAY,KAAK,OAAO;AAAA,MAC1B;AAAA,IACF;AAEA,UAAM,UAAU,MAAiB,qBAAqB,QAAQ;AAC9D,WAAO,QAAO,OAAO;AAAA,MACnB,SAAS;AAAA,MACT,YAAY;AAAA,MACZ,YAAY;AAEV,iBAAS,QAAQ,CAAC,SAAc;AAC9B,sBAAY;AAAA,YACV,QAAO;AAAA,cACL;AAAA,cACA;AAAA,gBACE,cAAc;AAAA,gBACd,iBAAiB,KAAK;AAAA,cACxB;AAAA,cACA;AAAA,cACA;AAAA;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAED,cAAM,kBAAkB,MAAM,QAAQ,IAAI,WAAW;AACrD,cAAgB,sBAAsB,eAAe;AAGrD,mBAAW,QAAQ,iBAAiB;AAGlC,gBAAeF,eAAM,QAAQ,UAAU,KAAK,KAAK,KAAK,KAAK;AAC3D,gBAAmB,iBAAiB,MAAM,MAAS;AAAA,QACrD;AAEA,cAAM,QAAQ,gBAAgB,IAAI,UAAQ;AACxC,iBAAO;AAAA,YACL,KAAK,KAAK;AAAA,YACV,OAAO,KAAK;AAAA,UACd;AAAA,QACF,CAAC;AAGD,YAAI,MAAM,QAAQ,KAAK,KAAK,QAAQ;AAClC,gBAAM,gBAAgB,CAAC;AACvB,gBAAM,iBAAiB,MAAM,IAAI,UAAQ,KAAK,GAAG;AACjD,mBAAS,WAAW,QAAQ;AAC1B,0BAAc,KAAK,QAAO,OAAO,SAAS,SAAS,cAAc,CAAC;AAAA,UACpE;AACA,gBAAM,QAAQ,IAAI,aAAa;AAAA,QACjC;AAEA,eAAO;AAAA,UACL,YAAY;AAAA,UACZ;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,aAAa,WAAW,SAA6C;AACnE,UAAM,KAAK,YAAY;AAEvB,UAAM,WAA4B;AAAA,MAChC,YAAY,CAAC;AAAA,MACb,cAAc,CAAC;AAAA,IACjB;AAGA,UAAM,UAAU,MAAM,4BAA4B,OAAO;AACzD,QAAI,SAAS;AACX,gBAAU,QAAQ,OAAO,OAAK,MAAM,QAAQ,cAAc;AAE1D,eAAS,aAAa,KAAK;AAAA,QACzB,KAAK,QAAQ;AAAA,QACb,OAAO,QAAQ;AAAA,QACf,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAGA,UAAM,kBAAkB,MAAM,GAAG,QAAc;AAAA,MAC7C,cAAc;AAAA,MACd,MAAM;AAAA,IACR,CAAC;AACD,UAAM,gBAAgB,gBAAgB,KAAK,IAAI,UAAQ;AACrD,aAAO,KAAK;AAAA,IACd,CAAC;AAGD,UAAM,WAAW,cAAc,IAAI,WAAS;AAAA,MAC1C,GAAG;AAAA,MACH,UAAU;AAAA,IACZ,EAAE;AACF,UAAM,aAAa,MAAgB,sBAAsB,QAAQ;AACjE,UAAM,mBAAmB,cAAc,OAAOE,UAAS;AAEvD,aAAS,QAAQ,eAAe;AAC9B,YAAM,qBAAqB,IAAI;AAAA,IACjC;AACA,UAAM,QAAO,OAAO,YAAY,SAAS,QAAQ,iBAAiB,MAAM;AAIxE,UAAM,YAAqC,CAAC;AAC5C,kBAAc,OAAO,CAAC,MAAM,YAAY;AACtC,WAAK,QAAQ,GAAI,IAAI;AACrB,aAAO;AAAA,IACT,GAAG,SAAS;AAGZ,eAAW,QAAQ,UAAQ;AACzB,YAAM,QAAQ,UAAU,KAAK,EAAE,EAAE;AACjC,UAAI,KAAK,IAAI;AACX,iBAAS,WAAW,KAAK,EAAE,KAAK,KAAK,IAAI,MAAM,CAAC;AAAA,MAClD,OAAO;AACL,iBAAS,aAAa,KAAK;AAAA,UACzB,KAAK,KAAK;AAAA,UACV;AAAA,UACA,QAAQ;AAAA,QACV,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA,EAEA,aAAa,QAAQ,IAAY;AAC/B,UAAM,KAAK,YAAY;AACvB,UAAM,SAAU,MAAM,GAAG,IAAI,EAAE;AAC/B,UAAM,SAAS,OAAO;AAEtB,QAAI,CAAC,oBAAI,eAAe,CAAC,oBAAI,wBAAwB;AAEnD,YAAM,QAAQ,OAAO;AACrB,YAAM,UAAU,MAAiB,WAAW,KAAK;AACjD,UAAI,SAAS;AACX,YAAI,OAAO,WAAW,YAAY,EAAG,KAAK;AACxC,gBAAM,IAAI,UAAU,8CAA8C,GAAG;AAAA,QACvE,OAAO;AACL,gBAAM,IAAI,UAAU,oCAAoC,GAAG;AAAA,QAC7D;AAAA,MACF;AAAA,IACF;AAEA,UAAeF,eAAM,WAAW,MAAM;AAEtC,UAAM,GAAG,OAAO,QAAQ,OAAO,IAAI;AAEnC,UAAM,mBAAmBE,WAAU,MAAM,IAAI,IAAI;AACjD,UAAM,QAAO,OAAO,YAAY,GAAG,gBAAgB;AACnD,UAAmB,mBAAmB,MAAM;AAC5C,UAAY,aAAK,eAAe,MAAM;AACtC,UAAe,mBAAmB,QAAQ,EAAE,QAAQ,WAAW,CAAC;AAAA,EAClE;AAAA,EAEA,aAAa,gBACX,OACA,UACA,UACA,MACA;AACA,UAAM,OAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,CAAC;AAAA,MACR,SAAS;AAAA,QACP,QAAQ;AAAA,MACV;AAAA,MACA,OAAO;AAAA,QACL,QAAQ;AAAA,MACV;AAAA,MACA;AAAA,IACF;AACA,QAAI,MAAM,OAAO;AACf,WAAK,QAAQ,KAAK;AAAA,IACpB;AAGA,UAAY,qCAAkC;AAC9C,WAAO,MAAM,QAAO,KAAK,MAAM;AAAA,MAC7B,cAAc,MAAM;AAAA,MACpB,iBAAiB,MAAM;AAAA,MACvB,wBAAwB,MAAM;AAAA,IAChC,CAAC;AAAA,EACH;AAAA,EAEA,aAAa,UAAU,UAAoB;AACzC,WAAO,MAAM,KAAK,OAAO,QAAQ,QAAQ;AAAA,EAC3C;AAAA,EAEA,aAAa,sBAAsB,MAAY;AAC7C,WAAO,MAAM,KAAK,OAAO,sBAAsB,IAAI;AAAA,EACrD;AACF;;;ANvfA,SAAS,mBAAmB,OAAsB;AAChD,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,WAAO,MAAM,IAAI,UAAQ;AACvB,UAAI,MAAM;AACR,eAAO,KAAK;AACZ,eAAO;AAAA,MACT;AAAA,IACF,CAAC;AAAA,EACH,WAAW,OAAO;AAChB,WAAO,MAAM;AACb,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEO,SAAS,sBAAsB,OAAoB;AACxD,QAAM,UAAU;AAAA,IACd,EAAE,2BAAiC,KAAK,QAAQ;AAAA,IAChD,EAAE,yBAAgC,KAAK,MAAM;AAAA,EAC/C;AACA,WAAS,CAAC,KAAK,SAAS,KAAK,OAAO,QAAQ,KAAK,GAAG;AAClD,QAAI,OAAO,cAAc,UAAU;AACjC,aAAO;AAAA,IACT;AACA,UAAM,SAAS,OAAO,KAAK,aAAa,CAAC,CAAC;AAE1C,QAAI,OAAO,WAAW,GAAG;AACvB;AAAA,IACF;AACA,UAAM,mBAAmB,QAAQ;AAAA,MAC/B,WACE,MAAM,OAAO,OAAO,OAAO,WAAW,KAAK,OAAO,CAAC,MAAM,MAAM;AAAA,IACnE;AACA,QAAI,CAAC,kBAAkB;AACrB,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAEA,eAAsB,uBACpB,SACA,MACA;AACA,QAAM,KAAK,YAAY;AACvB,MAAI,SACF,MAAM,GAAG,QAAQ;AAAA,IACf,MAAM;AAAA,IACN,cAAc;AAAA,EAChB,CAAC,GACD,KAAK,IAAI,SAAO,IAAI,GAAG;AACzB,MAAI,MAAM,SAAS;AACjB,YAAQ,mBAAmB,KAAK;AAAA,EAClC;AACA,SAAO;AACT;AAEA,eAAsB,gBAAgB;AACpC,QAAM,KAAK,YAAY;AACvB,QAAM,WAAW,kBAAoB,GAAG,SAAS;AACjD,QAAM,WAAW,MAAM,GAAG,QAAQ;AAAA,IAChC,UAAU;AAAA,IACV,QAAQ,GAAG,QAAQ,GAAG,WAAW;AAAA,EACnC,CAAC;AACD,SAAO,SAAS,KAAK,IAAI,SAAO,IAAI,EAAE;AACxC;AAEA,eAAsB,sBAAsB,OAAe;AACzD,QAAM,KAAK,YAAY;AACvB,SAAQ,MAAM,GAAG,SAAS,KAAK;AACjC;AAEA,eAAsB,QAAQ,IAAY,MAA+B;AACvE,QAAM,KAAa,YAAY;AAC/B,MAAI,OAAO,MAAM,GAAG,IAAU,EAAE;AAChC,MAAI,MAAM,SAAS;AACjB,WAAO,mBAAmB,IAAI;AAAA,EAChC;AACA,SAAO;AACT;AAMA,eAAsB,qBACpB,OACA,MAC2B;AAC3B,MAAI,SAAS,MAAM;AACjB,UAAM;AAAA,EACR;AAEA,QAAM,WAAW,MAAM,iDAA8C;AAAA,IACnE,KAAK,MAAM,YAAY;AAAA,IACvB,cAAc;AAAA,EAChB,CAAC;AAED,MAAI,MAAM,QAAQ,QAAQ,GAAG;AAE3B,UAAM,IAAI,MAAM,4CAA4C,KAAK,EAAE;AAAA,EACrE;AAEA,MAAI,OAAO;AACX,MAAI,MAAM,SAAS;AACjB,WAAO,mBAAmB,IAAI;AAAA,EAChC;AAEA,SAAO;AACT;AAEA,eAAsB,cAAc,OAAe;AACjD,MAAI;AACF,UAAM,OAAO,MAAM,qBAAqB,KAAK;AAC7C,QAAI,MAAM,QAAQ,IAAI,KAAK,QAAQ,MAAM;AACvC,aAAO;AAAA,IACT;AAAA,EACF,SAAS,KAAK;AACZ,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEA,eAAsB,uBACpB,OACA,MACA,SACA;AACA,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AACA,QAAMC,UAAS,oBAAoB,OAAO;AAAA,IACxC,cAAc;AAAA,EAChB,CAAC;AACD,EAAAA,QAAO,WAAW,QAAQ,KAAK,WAAW,KAAK,WAAWA,QAAO;AACjE,MAAI,WAAW,MAAM,4CAA4CA,OAAM;AAEvE,MAAI,CAAC,UAAU;AACb,eAAW,CAAC;AAAA,EACd;AACA,MAAI,QAAgB,MAAM,QAAQ,QAAQ,IAAI,WAAW,CAAC,QAAQ;AAClE,MAAI,SAAS,SAAS;AACpB,YAAQ,mBAAmB,KAAK;AAAA,EAClC;AACA,SAAO;AACT;AAMA,eAAsB,6BACpB,OACA,MACA;AACA,QAAM,eAAe,SAAS,KAAK;AAEnC,MAAI,UAAiB;AAAA,IACnB;AAAA,MACE,kBAAkB;AAAA,IACpB;AAAA,IACA;AAAA,MACE,gBAAgB;AAAA,IAClB;AAAA,EACF;AAEA,MAAI,OAAO;AACT,UAAM,YAAY;AAAA,MAChB,CAAC,YAAY,GAAG;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AACA,YAAQ,KAAK,SAAS;AAAA,EACxB;AAEA,MAAI,gBAAkC;AAAA,IACpC,UAAU;AAAA,MACR,KAAK;AAAA,MACL,KAAK;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,IACA,OAAO,MAAM,SAAS;AAAA,EACxB;AAEA,QAAM,OAAO,MAAM,gBAAwB,gBAAgB,GAAG,aAAa;AAC3E,SAAO,KAAK;AACd;AAEO,SAAS,uBAAuB,OAAe,MAAY;AAChE,MAAI,CAAC,MAAM;AACT;AAAA,EACF;AACA,SAAO,kBAAkBC,cAAa,KAAK,GAAI,KAAK,GAAI;AAC1D;AAKA,eAAsB,yBACpB,OACA,MACA,SACA;AACA,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AACA,QAAM,UAAU,MAAM,YAAY;AAElC,QAAM,WAAW,QAAQ,KAAK,WAAW,KAAK,WAAW;AACzD,MAAI,WAAW,MAAM,iDAA8C;AAAA,IACjE,GAAG;AAAA,IACH;AAAA,IACA,QAAQ,GAAG,OAAO,GAAG,WAAW;AAAA,EAClC,CAAC;AACD,MAAI,CAAC,UAAU;AACb,eAAW,CAAC;AAAA,EACd;AACA,MAAI,QAAgB,MAAM,QAAQ,QAAQ,IAAI,WAAW,CAAC,QAAQ;AAClE,MAAI,SAAS,SAAS;AACpB,YAAQ,mBAAmB,KAAK;AAAA,EAClC;AACA,SAAO;AACT;AAEA,IAAM,aAAa;AACnB,eAAsB,eAAe;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,IAAwB,CAAC,GAAG;AAC1B,QAAM,KAAK,YAAY;AACvB,QAAM,WAAW,SAAS;AAC1B,QAAM,YAAY,WAAW;AAE7B,QAAM,OAA0B;AAAA,IAC9B,cAAc;AAAA,IACd,OAAO;AAAA,EACT;AAEA,MAAI,UAAU;AACZ,SAAK,WAAW;AAAA,EAClB;AAEA,MAAI,UACF,WAAW,OACX;AACF,MAAI,OAAO,OAAO,KAAK;AACrB,eAAW,CAAC,MAAM,QAAQ,MAAM,MAAM,GAAG,CAAC;AAAA,EAC5C,WAAW,OAAO;AAChB,eAAW,MAAM,uBAAuB,OAAO,IAAI;AACnD,aAAS,CAAC,QAAa,uBAAuB,OAAO,GAAG;AAAA,EAC1D,WAAW,OAAO,QAAQ,OAAO;AAC/B,eAAW,MAAM,yBAAyB,OAAO,QAAQ,OAAO,IAAI;AACpE,eAAW;AAAA,EACb,OAAO;AAEL,UAAM,WAAW,MAAM,GAAG,QAAQ,oBAAoB,MAAM,IAAI,CAAC;AACjE,eAAW,SAAS,KAAK,IAAI,CAAC,QAAa,IAAI,GAAG;AAAA,EACpD;AACA,SAAO,WAAW,UAAU,UAAU;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAEA,eAAsB,eAAe;AACnC,QAAM,WAAW,MAAM,oDAA2C;AAAA,IAChE,OAAO;AAAA;AAAA,IACP,cAAc;AAAA,EAChB,CAAC;AACD,SAAO,SAAS;AAClB;AAEA,eAAsB,kBAAkB;AACtC,MAAI,WAAW;AACf,iBAAe,QAAQ,WAAoB;AACzC,UAAM,OAAO,MAAM,eAAe,EAAE,UAAU,UAAU,CAAC;AACzD,gBAAY,KAAK,KAAK,OAAOC,UAAS,EAAE;AACxC,QAAI,KAAK,aAAa;AACpB,YAAM,QAAQ,KAAK,QAAQ;AAAA,IAC7B;AAAA,EACF;AACA,QAAM,QAAQ;AACd,SAAO;AACT;AAIO,SAAS,4BAA4B,MAA0B;AACpE,SAAO,KAAK;AACZ,SAAO,KAAK;AACZ,SAAO;AACT;AAEO,SAAS,kBAAkB,MAA0B,MAAa;AACvE,SAAO,KAAK;AACZ,SAAO,KAAK;AACZ,SAAO,KAAK;AACZ,MAAI,MAAM;AACR,SAAK,QAAQ,KAAK;AAClB,SAAK,UAAU,KAAK;AACpB,SAAK,QAAQ,KAAK;AAAA,EACpB;AACA,SAAO;AACT;AAEA,eAAsB,cAAc,MAAY,OAAe;AAC7D,QAAM,YAAYD,cAAa,KAAK;AACpC,OAAK,YAAY,CAAC;AAClB,OAAK,QAAQ,UAAU;AACvB,OAAK,QAAQ,SAAS,CAAC;AACvB,OAAK,QAAQ,KAAK,KAAK,SAAS;AAChC,QAAM,OAAO,KAAK,MAAM,EAAE,cAAc,MAAM,CAAC;AACjD;AAEA,eAAsB,iBAAiB,MAAY,OAAe;AAChE,QAAM,YAAYA,cAAa,KAAK;AACpC,MAAI,KAAK,WAAW,KAAK,QAAQ,MAAM,SAAS,SAAS,GAAG;AAC1D,SAAK,QAAQ,OAAO,KAAK,QAAQ,KAAK,OAAO,QAAM,OAAO,SAAS;AAAA,EACrE;AACA,QAAM,OAAO,KAAK,MAAM,EAAE,cAAc,MAAM,CAAC;AACjD;;;AlB5VAE;AAGA,IAAMC,kBAAiB;AAKvB,eAAeC,gBAAe,QAAgB,UAAkB;AAC9D,QAAM,KAAa,YAAY,QAAQ;AACvC,QAAM,OAAO,MAAM,GAAG,IAAS,MAAM;AACrC,OAAK,iBAAiB;AACtB,MAAI,CAAC,oBAAI,eAAe,CAAC,oBAAI,wBAAwB;AACnD,UAAM,UAAU,MAAe,WAAW,KAAK,KAAK;AACpD,QAAI,SAAS;AACX,WAAK,UAAU;AACf,WAAK,sBAAsB;AAAA,IAC7B;AAAA,EACF;AAEA,SAAO;AACT;AAEA,eAAe,oBACb,SACoD;AACpD,QAAM,mBAAmB,MAAM,OAAO,QAAQ,OAAO;AAGrD,QAAM,cAAc,QAAQ,OAAO,CAAC,KAAK,MAAM,CAAC,iBAAiB,CAAC,CAAC;AAEnE,QAAM,QAAQ,iBAAiB,OAAO,OAAK,CAAC;AAE5C,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAc;AAC7B,WAAK,iBAAiB;AACtB,UAAI,CAAC,oBAAI,eAAe,CAAC,oBAAI,wBAAwB;AACnD,cAAM,UAAU,MAAe,WAAW,KAAK,KAAK;AACpD,YAAI,SAAS;AACX,eAAK,UAAU;AACf,eAAK,sBAAsB;AAAA,QAC7B;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,YAAY,QAAQ;AACtB,WAAO,EAAE,OAAO,YAAY;AAAA,EAC9B;AACA,SAAO,EAAE,MAAM;AACjB;AAWA,eAAsB,QACpB,QACA,UACA,cACA;AACA,MAAI,CAAC,cAAc;AACjB,mBAAeA;AAAA,EACjB;AACA,MAAI,CAAC,UAAU;AACb,QAAI;AACF,iBAAmB,YAAY;AAAA,IACjC,SAAS,KAAK;AACZ,iBAAW,MAAeC,eAAM,eAAe,MAAM;AAAA,IACvD;AAAA,EACF;AACA,QAAM,SAAS,MAAY,cAAc;AAEzC,MAAI,OAAO,MAAM,OAAO,IAAI,MAAM;AAClC,MAAI,CAAC,MAAM;AACT,WAAO,MAAM,aAAa,QAAQ,QAAQ;AAC1C,UAAM,OAAO,MAAM,QAAQ,MAAMF,eAAc;AAAA,EACjD;AACA,MAAI,QAAQ,CAAC,KAAK,YAAY,UAAU;AAEtC,SAAK,WAAW;AAAA,EAClB;AAEA,MAAI,KAAK,cAAc,CAAC,YAAI,MAAM,gBAAgB,IAAI,GAAG;AACvD,UAAc,WAAW,UAAU,YAAY;AAC7C,YAAM,SAAS,MAAM,OAAO,sBAAsB,IAAI;AACtD,UAAI,OAAO,QAAQ;AACjB,cAAM,WAAW,KAAK,SAAS,QAAQ,CAAC;AACxC,aAAK,UAAU;AAAA,UACb,MAAM,CAAC,GAAG,IAAI,IAAI,SAAS,OAAO,MAAM,CAAC,CAAC;AAAA,QAC5C;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AACA,SAAO;AACT;AAUA,eAAsB,SACpB,SACoD;AACpD,QAAM,SAAS,MAAY,cAAc;AAEzC,MAAI,iBAAiB,MAAM,OAAO,QAAc,OAAO;AACvD,QAAM,wBAAwB,QAAQ,OAAO,SAAO,CAAC,eAAe,GAAG,CAAC;AACxE,QAAM,QAAQ,OAAO,OAAO,cAAc;AAC1C,MAAI;AAEJ,MAAI,sBAAsB,QAAQ;AAChC,UAAM,cAAc,MAAM,oBAAoB,qBAAqB;AAEnE,kBAAc,YAAY;AAC1B,eAAW,eAAe,YAAY,OAAO;AAC3C,YAAM,OAAO,MAAM,YAAY,KAAM,aAAaA,eAAc;AAAA,IAClE;AACA,UAAM,KAAK,GAAG,YAAY,KAAK;AAAA,EACjC;AACA,SAAO,EAAE,OAAO,YAAyB;AAC3C;AAEA,eAAsB,eAAe,QAAgB;AACnD,QAAM,SAAS,MAAY,cAAc;AACzC,QAAM,OAAO,OAAO,MAAM;AAC5B;;;AH5IA;;;A6EFA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AAGA,IAAM,wBAAwB;AAC9B,IAAI,QAA0B;AAO9B,eAAe,WAAW;AACxB,MAAI,CAAC,OAAO;AACV,UAAM,SAAS,MAAM,sBAAsB;AAC3C,YAAQ,IAAI,UAAU,MAAM;AAAA,EAC9B;AACA,SAAO;AACT;AAEA,SAAS,aAAa,IAAc,KAAa;AAC/C,SAAO,GAAG,OAAO;AACnB;AAEA,SAAS,cACP,KACA,YAA2B,MACb;AACd,SAAO,EAAE,KAAK,WAAW,aAAa,KAAK,IAAI,EAAE;AACnD;AAEA,eAAe,IACb,IACA,KACA,cAAsB,uBACtB;AACA,QAAM,QAAQ,MAAM,SAAS;AAC7B,QAAM,MAAM,IAAI;AAChB,MAAI;AACJ,MAAI,KAAK;AACP,gBAAY,MAAM,MAAM,IAAI,aAAa,IAAI,GAAG,CAAC;AAAA,EACnD;AACA,QAAM,WAAW,CAAC,aAAa,UAAU,YAAY,KAAK,IAAI,IAAI;AAClE,MAAI,SAAS;AACb,MAAI,UAAU;AACZ,UAAM,eAAe,MAAY;AAAA,MAC/B;AAAA,QACE;AAAA,QACA;AAAA,QACA,UAAU;AAAA,QACV,KAAK;AAAA,MACP;AAAA,MACA,YAAY;AACV,cAAM,UAAU,OAAO,YAAiB;AAEtC,gBAAM,WAAW,MAAM,GAAG,IAAI,SAAS,EAAE,OAAO,KAAK,CAAC;AACtD,iBAAO,MAAM,SAAS;AACtB,iBAAO,OAAO,SAAS;AAAA,QACzB;AACA,YAAI;AACF,gBAAM,QAAQ,GAAG;AAAA,QACnB,SAAS,KAAU;AACjB,cAAI,IAAI,WAAW,KAAK;AACtB,kBAAM;AAAA,UACR,OAAO;AAEL,oBAAQ,0CAA0C;AAAA,UACpD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,CAAC,aAAa,UAAU;AAC1B,cAAQ,kDAAkD;AAAA,IAC5D;AAAA,EACF;AAEA,cAAY,cAAc,QAAQ,WAAW,OAAO,WAAW,SAAS;AACxE,MAAI,OAAO,KAAK;AACd,UAAM,MAAM,MAAM,aAAa,IAAI,OAAO,GAAG,GAAG,SAAS;AAAA,EAC3D;AACA,SAAO,EAAE,IAAI,MAAM,IAAI,OAAO,KAAK,KAAK,OAAO,KAAK;AACtD;AAEA,eAAeG,KAAwB,IAAc,IAAwB;AAC3E,QAAM,QAAQ,MAAM,SAAS;AAC7B,QAAM,WAAW,aAAa,IAAI,EAAE;AACpC,MAAI,YAA0B,MAAM,MAAM,IAAI,QAAQ;AACtD,MAAI,CAAC,WAAW;AACd,UAAM,MAAM,MAAM,GAAG,IAAO,EAAE;AAC9B,gBAAY,cAAc,GAAG;AAC7B,UAAM,MAAM,MAAM,UAAU,SAAS;AAAA,EACvC;AACA,SAAO,UAAU;AACnB;AAEA,eAAe,OAAO,IAAc,SAAc,KAA0B;AAC1E,QAAM,QAAQ,MAAM,SAAS;AAC7B,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,qBAAqB;AAAA,EACvC;AACA,QAAM,KAAK,OAAO,YAAY,WAAW,UAAU,QAAQ;AAC3D,QAAM,OAAO,YAAY,WAAW,MAAM,QAAQ;AAClD,MAAI;AACF,UAAM,MAAM,OAAO,aAAa,IAAI,EAAE,CAAC;AAAA,EACzC,UAAE;AACA,UAAM,GAAG,OAAO,IAAI,GAAG;AAAA,EACzB;AACF;AAEO,IAAM,eAAN,MAAmB;AAAA,EAIxB,YAAY,IAAc,cAAsB,uBAAuB;AACrE,SAAK,KAAK;AACV,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,MAAM,IAAI,KAAU,cAAsB,KAAK,aAAa;AAC1D,WAAO,IAAI,KAAK,IAAI,KAAK,WAAW;AAAA,EACtC;AAAA,EAEA,MAAM,IAAwB,IAAY;AACxC,WAAOA,KAAO,KAAK,IAAI,EAAE;AAAA,EAC3B;AAAA,EAEA,MAAM,OAAO,SAAc,KAAW;AACpC,WAAO,OAAO,KAAK,IAAI,SAAS,GAAG;AAAA,EACrC;AACF;;;ACpIA;AAAA;AAAA,oBAAAC;AAAA,EAAA,eAAAC;AAAA,EAAA;AAAA;AAAA;AAIA,IAAMC,eAAc,SAAS,UAAU,CAAC,EAAE,UAAU;AAcpD,eAAsBC,YAAW,QAAgB,MAA4B;AAC3E,QAAM,OAAa,MAAM;AACzB,QAAM,SAAS,MAAY,uBAAuB;AAClD,QAAM,OAAO,MAAM,MAAM,EAAE,QAAQ,KAAK,GAAGD,YAAW;AACtD,SAAO;AACT;AAOA,eAAsBE,SAAQ,MAAsC;AAClE,QAAM,SAAS,MAAY,uBAAuB;AAClD,QAAM,QAAS,MAAM,OAAO,IAAI,IAAI;AACpC,MAAI,CAAC,OAAO;AACV,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAMA,eAAsB,eAAe,MAA6B;AAChE,QAAM,SAAS,MAAY,uBAAuB;AAClD,QAAM,OAAO,OAAO,IAAI;AAC1B;;;A/E/BAC;AACAC;AAQO,SAAS,iBAAiB,MAAkB;AACjD,SAAO,wBAAsB,GAAG,SAAS,GAAG,IAAI;AAClD;AAEA,eAAsB,UACpB,MACwB;AACxB,QAAM,KAAa,YAAY;AAC/B,MAAI;AAEF,WAAQ,MAAM,GAAG,IAAI,iBAAiB,IAAI,CAAC;AAAA,EAC7C,SAAS,GAAQ;AACf,QAAI,EAAE,WAAW,KAAK;AACpB;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAEA,eAAsB,KACpB,QACsC;AACtC,QAAM,KAAa,YAAY;AAC/B,SAAO,GAAG,IAAI,MAAM;AACtB;AAIA,eAAsB,uBAAgD;AACpE,MAAI,SAAS,MAAM,mCAA6C;AAEhE,MAAI,CAAC,QAAQ;AACX,aAAS;AAAA,MACP,KAAK,0CAAoC;AAAA,MACzC;AAAA,MACA,QAAQ,CAAC;AAAA,IACX;AAAA,EACF;AAGA,SAAO,OAAO,cAAc,MAAM,eAAe;AAAA,IAC/C,aAAa;AAAA,IACb,QAAQ,OAAO;AAAA,EACjB,CAAC;AACD,SAAO,OAAO,mBAAmB,MAAM,iBAAiB;AAAA,IACtD,QAAQ,OAAO;AAAA,EACjB,CAAC;AAED,SAAO;AACT;AAEA,eAAsB,oBAAkD;AACtE,UAAQ,MAAM,qBAAqB,GAAG;AACxC;AAEA,eAAsB,eACpB,OAA+D;AAAA,EAC7D,aAAa;AACf,GACA;AACA,MAAI,cAAc,oBAAI,gBAAgB;AAEtC,MAAI,CAAC,oBAAI,eAAe,oBAAI,iBAAiB,KAAK,aAAa;AAE7D,UAAM,WAAmB,YAAY;AACrC,QAAI,CAAC,YAAY,SAAS,YAAY,GAAG;AACvC,oBAAc,YAAY,QAAQ,OAAO,MAAM,QAAQ,GAAG;AAAA,IAC5D;AAAA,EACF,WAAW,oBAAI,aAAa;AAC1B,UAAM,SAAS,MAAM,SACjB,KAAK;AAAA;AAAA,OAEJ,MAAM,mCAA6C,IAAI;AAAA;AAC5D,QAAI,QAAQ,aAAa;AACvB,oBAAc,OAAO;AAAA,IACvB;AAAA,EACF;AAEA,SAAO;AACT;AAEO,IAAM,mBAAmB,OAAO,SAEjC;AAEJ,MAAI,CAAC,oBAAI,aAAa;AACpB,WAAO,CAAC,CAAC,oBAAI;AAAA,EACf;AAIA,QAAM,cAAc,MAAM;AAAA;AAAA;AAAA,IAGxB,YAAY;AACV,YAAM,SAAS,MAAM,SACjB,KAAK;AAAA;AAAA,SAEJ,MAAM,mCAA6C,IAAI;AAAA;AAG5D,UAAI,QAAQ,qBAAqB,OAAO;AACtC,eAAO;AAAA,MACT,WAAW,QAAQ,qBAAqB,MAAM;AAC5C,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,MAAI,gBAAgB,QAAW;AAC7B,WAAO;AAAA,EACT;AAIA,QAAM,aAAkB,oBAAI;AAC5B,MAAI,eAAe,KAAK,eAAe,OAAO;AAC5C,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAIA,eAAe,qBAAwD;AACrE,SAAO,MAAM,+BAAyC;AACxD;AAEA,eAAsB,kBAEpB;AACA,QAAM,SAAS,MAAM,mBAAmB;AACxC,SAAO,QAAQ;AACjB;AAEA,eAAsB,4BAEpB;AACA,MAAI,CAAC,oBAAI,aAAa;AAEpB,WAAO,uBAAuB;AAAA,EAChC;AAGA,MAAI,SAAS,MAAM,gBAAgB;AAGnC,MAAI,CAAC,UAAU,CAAC,OAAO,WAAW;AAChC,aAAS,uBAAuB;AAAA,EAClC;AAEA,SAAO;AACT;AAEO,SAAS,yBAAwD;AACtE,MAAI,oBAAI,oBAAoB,oBAAI,sBAAsB;AACpD,WAAO;AAAA,MACL,UAAU,oBAAI;AAAA,MACd,cAAc,oBAAI;AAAA,MAClB,WAAW;AAAA,IACb;AAAA,EACF;AACF;AAIA,eAAsB,kBAAwD;AAC5E,SAAO,uCAAgD;AACzD;AAEA,eAAe,mBAAoD;AACjE,SAAO,2BAAqC;AAC9C;AAEA,eAAsB,gBAAsD;AAC1E,QAAM,UAAU,MAAM,iBAAiB,IAAI;AAE3C,SAAO,QAAQ,WAAW,OAAO,QAAQ,CAAC;AAC5C;AAKA,eAAsB,kBACpB,UACsC;AACtC,QAAM,UAAU,MAAM,2BAAqC,IAAI;AAC/D,SAAO,UAAU,OAAO,QAAQ,OAAO,CAAC,MAAW,EAAE,SAAS,QAAQ,EAAE,CAAC;AAC3E;AAIA,eAAsB,mBAAoD;AACxE,SAAO,2BAAqC;AAC9C;AAEA,eAAsB,cACpB,cACsC;AACtC,QAAM,SAAS,MAAM,iBAAiB;AACtC,MAAI,QAAQ;AACV,WAAO,OAAO;AAAA,EAChB;AAIA,QAAM,gBAAgB,oBAAI,eAAe,CAAC;AAG1C,MAAI,oBAAI,yBAAyB,eAAe;AAC9C,WAAO;AAAA,MACL,MAAM,oBAAI;AAAA,MACV,MAAM,oBAAI;AAAA,MACV,QAAQ;AAAA,MACR,MAAM,oBAAI;AAAA,MACV,MAAM;AAAA,QACJ,MAAM,oBAAI;AAAA,QACV,MAAM,oBAAI;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AACF;AAIA,eAAsB,gBAAsD;AAC1E,QAAM,SAAS,MAAM,2BAAqC;AAC1D,SAAO,QAAQ;AACjB;;;AgF/PA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAAC;AACAC;AAOAC;AAEAC;AAEA;AAQO,IAAM,mBAAmB,OAAO,OAAY;AAEjD,MAAI;AACF,WAAO,MAAM,GAAG,iCAA2B;AAAA,EAC7C,SAAS,KAAU;AACjB,QAAI,IAAI,UAAU,IAAI,WAAW,KAAK;AACpC,aAAO,EAAE,mCAA6B;AAAA,IACxC,OAAO;AACL,cAAQ,MAAM,GAAG;AACjB,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEO,IAAM,yBAAyB,OAAO,SAA+B;AAE1E,QAAM,aAA0B,YAAY;AAAA,IAC1C,SAAO,IAAI,SAAS,KAAK;AAAA,EAC3B,EAAE,IAAI,QAAM,EAAE,GAAG,GAAG,IAAI,MAAM;AAAA,EAAC,EAAE,EAAE;AACnC,QAAM,cAAc,YAAY,EAAE,MAAM,KAAK,CAAC;AAChD;AAEO,IAAM,eAAe,OAC1B,WACAC,WAA4B,CAAC,MAC1B;AACH,QAAM,gBAAgB,UAAU;AAChC,MAAI;AACJ,MAAI,qDAA8C;AAChD,eAAmB,YAAY;AAAA,EACjC;AACA,QAAM,gBAAgB,UAAU;AAChC,QAAM,SAAS,UAAU;AAEzB,QAAM,MAAM,CAAC,YAAoB;AAC/B,QAAI,CAAC,QAAQ;AACX,cAAQ,IAAI,OAAO;AAAA,IACrB;AAAA,EACF;AAGA,MAAI;AACJ,MAAI,yCAAwC;AAC1C,cAAU,CAAC,gBAAgB,CAAC;AAAA,EAC9B,WAAW,mCAAqC;AAC9C,QAAIA,SAAQ,MAAM;AAChB,UAAI,CAACA,SAAQ,KAAK,OAAO;AACvB,cAAM,IAAI,MAAM,0CAA0C;AAAA,MAC5D;AACA,gBAAU,CAACA,SAAQ,KAAK,KAAK;AAAA,IAC/B,OAAO;AACL,YAAM,OAAQ,MAAM,WAAW,UAAU,OAAO;AAChD,gBAAU,KAAK,IAAI,SAAO,IAAI,KAAK;AAAA,IACrC;AAAA,EACF,WAAW,qDAA8C;AACvD,cAAU,CAAC,gBAAgB,cAAc,IAAI;AAAA,EAC/C,OAAO;AACL,UAAM,IAAI,MAAM,gCAAgC,aAAa,GAAG;AAAA,EAClE;AAEA,QAAM,SAAS,QAAQ;AACvB,MAAI,QAAQ;AAGZ,aAAW,UAAU,SAAS;AAC5B;AACA,UAAM,kBAAkB,SAAS,IAAI,IAAI,KAAK,IAAI,MAAM,MAAM;AAE9D,UAAM,KAAK,MAAM,MAAM;AAEvB,QAAI;AACF,YAAM,MAAM,MAAM,iBAAiB,EAAE;AAGrC,UAAI,IAAI,aAAa,GAAG;AAEtB,YACEA,SAAQ,SACRA,SAAQ,MAAM,aAAa,KAC3BA,SAAQ,MAAM,aAAa,EAAE,SAAS,aAAa,GACnD;AACA,cAAI,eAAe,aAAa,UAAU,MAAM,WAAW;AAAA,QAC7D,OAAO;AAEL;AAAA,QACF;AAAA,MACF;AAGA,UAAI,CAACA,SAAQ,MAAM;AACjB;AAAA,UACE,eAAe,aAAa,UAAU,MAAM,aAAa,eAAe;AAAA,QAC1E;AAEA,YAAI,UAAU,cAAc;AAG1B,cAAI,aAAa,IAAI,KAAK,IAAI;AAC9B,gBAAM,WAAW,MAAM,GAAG,IAAI,GAAG;AACjC,cAAI,OAAO,SAAS;AAAA,QACtB;AAGA,YAAI,mCAAqC;AACvC,gBAAc,eAAe,GAAG,MAAM,YAAY;AAChD,kBAAM,UAAU,GAAG,EAAE;AAAA,UACvB,CAAC;AAAA,QACH,OAAO;AACL,gBAAM,UAAU,GAAG,EAAE;AAAA,QACvB;AAEA,YAAI,eAAe,aAAa,UAAU,MAAM,YAAY;AAAA,MAC9D;AAGA,UAAI,aAAa,IAAI,KAAK,IAAI;AAC9B,YAAM,GAAG,IAAI,GAAG;AAAA,IAClB,SAAS,KAAK;AACZ,cAAQ;AAAA,QACN,eAAe,aAAa,UAAU,MAAM;AAAA,QAC5C;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEO,IAAM,gBAAgB,OAC3B,YACAA,WAA4B,CAAC,MAC1B;AACH,MAAI;AAEJ,MAAI,oBAAY,eAAe;AAC7B,QAAIA,SAAQ,MAAM;AAChB,kBAAY,CAACA,SAAQ,KAAK,QAAQ;AAAA,IACpC,WAAW,CAACA,SAAQ,aAAa,CAACA,SAAQ,UAAU,QAAQ;AAE1D,kBAAY,MAAe,gBAAQ,aAAa;AAAA,IAClD,OAAO;AACL,kBAAYA,SAAQ;AAAA,IACtB;AAAA,EACF,OAAO;AAEL,gBAAY,CAAC,iBAAiB;AAAA,EAChC;AAEA,MAAI,UAAU,SAAS,GAAG;AACxB,YAAQ,IAAI,2BAA2B,UAAU,MAAM,UAAU;AAAA,EACnE,OAAO;AACL,YAAQ,IAAI,qBAAqB;AAAA,EACnC;AAEA,MAAI,QAAQ;AAEZ,aAAW,YAAY,WAAW;AAChC;AACA,QAAI,UAAU,SAAS,GAAG;AACxB,cAAQ,IAAI,aAAa,KAAK,IAAI,UAAU,MAAM,GAAG;AAAA,IACvD;AAEA,eAAW,aAAa,YAAY;AAElC,YAAc;AAAA,QACZ;AAAA,QACA,YAAY,MAAM,aAAa,WAAWA,QAAO;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AACA,UAAQ,IAAI,qBAAqB;AACnC;;;AC9LA;AAMO,IAAM,cAAqC;AAAA,EAChD;AAAA,IACE;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,EACF;AACF;;;ACvCA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,qBAAoB;AACpB,uBAAsB;AAEtB;AAMO,IAAM,aAAN,MAAiB;AAAA,EAItB,YAAY,MAAsB,OAAwB;AACxD,SAAK,OAAO;AACZ,SAAK,QAAQ;AAAA,EACf;AACF;AAEO,SAAS,cAAc,MAAuB;AACnD,UAAQ,MAAM;AAAA,IAEZ;AACE,aAAO;AAAA,IACT;AACE,aAAO;AAAA,IACT;AACE,aAAO;AAAA,IACT;AACE,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAOO,SAAS,iBAAiB,eAA0C;AACzE,UAAQ,eAAe;AAAA,IACrB;AACE,aAAO,wBAAwB;AAAA,IACjC;AACE,aAAO,2CAA8C;AAAA,IACvD;AAAA,IACA;AACE,aAAO;AAAA;AAAA;AAAA;AAAA,MAIP;AAAA,IACF;AACE,aAAO,CAAC;AAAA,EACZ;AACF;AAEO,IAAK,sBAAL,kBAAKC,yBAAL;AACL,EAAAA,qBAAA,YAAS;AACT,EAAAA,qBAAA,eAAY;AACZ,EAAAA,qBAAA,WAAQ;AACR,EAAAA,qBAAA,WAAQ;AACR,EAAAA,qBAAA,WAAQ;AALE,SAAAA;AAAA,GAAA;AAQL,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,IACN,KAAK;AAAA,IACL,MAAM;AAAA,IACN,aAAa;AAAA,MACX,IAAI,2DAA0D;AAAA,IAChE;AAAA,EACF;AAAA,EACA,WAAW;AAAA,IACT,KAAK;AAAA,IACL,MAAM;AAAA,IACN,aAAa;AAAA,MACX,IAAI,iDAAqD;AAAA,MACzD,IAAI,iDAAqD;AAAA,MACzD,IAAI,6CAAmD;AAAA,IACzD;AAAA,EACF;AAAA,EACA,OAAO;AAAA,IACL,KAAK;AAAA,IACL,MAAM;AAAA,IACN,aAAa;AAAA,MACX,IAAI,mDAAsD;AAAA,MAC1D,IAAI,mDAAsD;AAAA,MAC1D,IAAI,iEAA6D;AAAA,MACjE,IAAI,6DAA2D;AAAA,MAC/D,IAAI,6CAAmD;AAAA,IACzD;AAAA,EACF;AAAA,EACA,OAAO;AAAA,IACL,KAAK;AAAA,IACL,MAAM;AAAA,IACN,aAAa;AAAA,MACX,IAAI,mDAAsD;AAAA,MAC1D,IAAI,+CAAoD;AAAA,MACxD,IAAI,iEAA6D;AAAA,MACjE,IAAI,qDAAuD;AAAA,MAC3D,IAAI,6DAA2D;AAAA,MAC/D,IAAI,6CAAmD;AAAA,IACzD;AAAA,EACF;AAAA,EACA,OAAO;AAAA,IACL,KAAK;AAAA,IACL,MAAM;AAAA,IACN,aAAa;AAAA,MACX,IAAI,mDAAsD;AAAA,MAC1D,IAAI,iDAAqD;AAAA,MACzD,IAAI,6DAA2D;AAAA,MAC/D,IAAI,qDAAuD;AAAA,MAC3D,IAAI,mDAAsD;AAAA,MAC1D,IAAI,6DAA2D;AAAA,MAC/D,IAAI,6CAAmD;AAAA,IACzD;AAAA,EACF;AACF;AAEO,SAAS,wBAAwB;AACtC,aAAO,iBAAAC,SAAU,mBAAmB;AACtC;AAEO,SAAS,yBAAyB,IAAY;AACnD,QAAM,QAAQ,OAAO,OAAO,mBAAmB;AAC/C,SAAO,MAAM,KAAK,UAAQ,KAAK,QAAQ,EAAE;AAC3C;AAEO,SAAS,uBACd,UACA,WACA,gBACA;AACA,QAAM,kBAAkB;AAAA,IACtB,GAAG,IAAI,IAAI,eAAe,IAAI,UAAQ,KAAK,YAAY,CAAC;AAAA,EAC1D;AACA,QAAM,WAAW,OAAO,OAAO,mBAAmB;AAClD,MAAI,kBAAc,eAAAC;AAAA,IAChB,SACG,OAAO,aAAW,gBAAgB,QAAQ,QAAQ,GAAG,MAAM,EAAE,EAC7D,IAAI,aAAW,QAAQ,WAAW;AAAA,EACvC;AACA,WAAS,cAAc,aAAa;AAClC,QACE,WAAW,SAAS,YACpB,iBAAiB,WAAW,KAAK,EAAE,QAAQ,SAAS,MAAM,IAC1D;AACA,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAEO,SAAS,gCAAgC,OAAwB;AACtE,SAAO,cAAc,KAAK,IAAI;AAChC;AAGO,IAAM;AACN,IAAM;AACN,IAAM;;;ADlKbC;AAOAC;AAEA,IAAAC,oBAAsB;AAEf,IAAM,mBAAmB;AAAA,EAC9B,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AACV;AAEA,IAAM,cAAc;AAAA,EAClB,GAAG;AAAA,EACH,SAAS;AACX;AAGA,IAAM,4BAA4B;AAAA,EAChC,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,YAAY;AACd;AAEO,IAAM,gBAAgB;AAAA;AAAA,EAE3B,MAAM;AAAA;AAAA,EAEN,MAAM;AACR;AAEO,IAAMC,QAAN,MAA8B;AAAA,EASnC,YAAY,IAAY,MAAc,cAAsB;AAF5D,uBAAc,CAAC;AAGb,SAAK,MAAM;AACX,SAAK,OAAO;AACZ,SAAK,eAAe;AAEpB,SAAK,UAAU,cAAc;AAAA,EAC/B;AAAA,EAEA,eAAe,UAAkB;AAC/B,SAAK,WAAW;AAChB,WAAO;AAAA,EACT;AACF;AAEA,IAAM,gBAAgB;AAAA,EACpB,OAAO,IAAIA;AAAA,IACT,YAAY;AAAA,IACZ;AAAA;AAAA,EAEF,EAAE,eAAe,YAAY,KAAK;AAAA,EAClC,OAAO,IAAIA;AAAA,IACT,YAAY;AAAA,IACZ;AAAA;AAAA,EAEF,EAAE,eAAe,YAAY,KAAK;AAAA,EAClC,OAAO,IAAIA;AAAA,IACT,YAAY;AAAA,IACZ;AAAA;AAAA,EAEF,EAAE,eAAe,YAAY,MAAM;AAAA,EACnC,QAAQ,IAAIA,MAAK,YAAY,QAAQ,+BAAoC;AAAA,EACzE,SAAS,IAAIA,MAAK,YAAY,SAAS,8BAAoC;AAC7E;AAEO,SAAS,kBAA8C;AAC5D,aAAO,kBAAAC,SAAU,aAAa;AAChC;AAEO,IAAM,wBAAwB,OAAO,OAAO,aAAa,EAAE;AAAA,EAChE,UAAQ,KAAK;AACf;AAEO,IAAM,0BAA0B,OAAO,OAAO,aAAa,EAAE;AAAA,EAClE,UAAQ,KAAK;AACf;AAEO,SAAS,UAAU,MAAe;AACvC,SAAO,sBAAsB,KAAK,aAAW,MAAM,SAAS,OAAO,CAAC;AACtE;AAKO,SAAS,oBAAoB,IAAa;AAC/C,MAAI,CAAC,IAAI;AACP,WAAO;AAAA,EACT;AACA,QAAM,WAAW,gBAAgB;AACjC,QAAM,MAAM,OAAO,OAAO,QAAQ,EAAE,SAAS;AAC7C,MAAI,OAAO,YAAY,SAAS,OAAO,YAAY,SAAS;AAC1D,WAAO;AAAA,EACT;AACA,MAAI,OAAO,SAAS,EAAE,GACpB,QAAQ;AACV,KAAG;AACD,QAAI,CAAC,MAAM;AACT;AAAA,IACF;AACA,WAAO,SAAS,KAAK,QAAS;AAC9B;AAAA,EACF,SAAS,SAAS;AAClB,SAAO;AACT;AAKA,eAAsB,aAAa,IAAa;AAC9C,MAAI,UAAU,EAAE,GAAG;AACjB,WAAO,oBAAoB,EAAE;AAAA,EAC/B;AACA,QAAM,YAAa,MAAM,qBAAqB,IAAI;AAAA,IAChD,eAAe;AAAA,EACjB,CAAC;AACD,WAAS,QAAQ,WAAW;AAC1B,QAAI,UAAU,MAAM,QAAQ,GAAG;AAC7B,aAAO,oBAAoB,KAAK,QAAQ,IAAI;AAAA,IAC9C;AAAA,EACF;AACA,SAAO;AACT;AAKO,SAAS,mBAAmB,SAAkB,SAA0B;AAC7E,MAAI,CAAC,SAAS;AACZ,WAAO;AAAA,EACT;AACA,MAAI,CAAC,SAAS;AACZ,WAAO;AAAA,EACT;AACA,SAAO,oBAAoB,OAAO,IAAI,oBAAoB,OAAO,IAC7D,UACA;AACN;AASA,eAAsB,QACpB,QACA,MAC8B;AAC9B,MAAI,CAAC,QAAQ;AACX,WAAO;AAAA,EACT;AACA,MAAI,OAAY,CAAC;AAGjB,MAAI,UAAU,MAAM,GAAG;AACrB,eAAO,kBAAAA;AAAA,MACL,OAAO,OAAO,aAAa,EAAE,KAAK,CAAAC,UAAQA,MAAK,QAAQ,MAAM;AAAA,IAC/D;AAAA,EACF,OAAO;AAEL,aAAS,aAAa,MAAM;AAAA,EAC9B;AACA,MAAI;AACF,UAAM,KAAK,SAAS;AACpB,UAAM,SAAS,MAAM,GAAG,IAAI,YAAY,MAAM,CAAC;AAC/C,WAAO,OAAO,OAAO,MAAM,MAAM;AAEjC,SAAK,MAAM,kBAAkB,KAAK,KAAK,KAAK,OAAO;AAAA,EACrD,SAAS,KAAK;AACZ,QAAI,CAAC,UAAU,MAAM,KAAK,MAAM,eAAe;AAC7C,iBAAO,kBAAAD,SAAU,cAAc,MAAM;AAAA,IACvC;AAEA,QAAI,OAAO,KAAK,IAAI,EAAE,WAAW,GAAG;AAClC,YAAM;AAAA,IACR;AAAA,EACF;AACA,SAAO;AACT;AAKA,eAAe,gBACb,YACA,MACoB;AAEpB,MAAI,eAAe,YAAY,OAAO;AACpC,WAAO,YAAY;AAAA,EACrB;AACA,MAAI,cAAc,MAAM,QAAQ,YAAY,IAAI;AAChD,MAAI,QAAQ,cAAc,CAAC,WAAW,IAAI,CAAC;AAC3C,MAAI,UAAU,CAAC,UAAU;AAEzB,SACE,eACA,YAAY,YACZ,QAAQ,QAAQ,YAAY,QAAQ,MAAM,IAC1C;AACA,YAAQ,KAAK,YAAY,QAAQ;AACjC,kBAAc,MAAM,QAAQ,YAAY,QAAQ;AAChD,QAAI,aAAa;AACf,YAAM,KAAK,WAAW;AAAA,IACxB;AAAA,EACF;AACA,SAAO;AACT;AAEA,eAAsB,uBACpB,YACmB;AACnB,QAAM,QAAQ,MAAM,qBAAqB,UAAU;AACnD,SAAO,MAAM,IAAI,UAAQ,KAAK,GAAI;AACpC;AAUA,eAAsB,qBACpB,YACA,MACA;AAEA,SAAO,gBAAgB,YAAY,IAAI;AACzC;AAKO,SAAS,0BACd,WACA,YACA;AACA,MAAI,aAAa,CAAC,MAAM,QAAQ,UAAU,UAAU,CAAC,GAAG;AACtD,UAAM,YAAY,UAAU,UAAU;AACtC,cAAU,UAAU,IAAI,CAAC,SAAS;AAClC,QAAI,mCAAqC;AACvC,gBAAU,UAAU,EAAE,sBAAyB;AAAA,IACjD;AAAA,EACF;AACA,SAAO;AACT;AAEA,eAAsB,cAAc,OAAgB;AAClD,QAAM,QAAQ,MAAM,YAAY,KAAK;AACrC,SAAO,MAAM,IAAI,UAAQ,KAAK,GAAG;AACnC;AAMA,eAAsB,YAAY,OAAoC;AACpE,MAAI,OAAO;AACT,WAAO,SAAS,OAAO,QAAQ;AAAA,EACjC,OAAO;AACL,QAAI;AACJ,QAAI;AACF,cAAQ,SAAS;AAAA,IACnB,SAAS,OAAO;AAAA,IAEhB;AACA,WAAO,SAAS,KAAK;AAAA,EACvB;AACA,iBAAe,SAAS,IAAS;AAC/B,QAAI,QAAmB,CAAC;AACxB,QAAI,IAAI;AACN,YAAME,QAAO,MAAM,GAAG;AAAA,QACpB,cAAc,MAAM;AAAA,UAClB,cAAc;AAAA,QAChB,CAAC;AAAA,MACH;AACA,cAAQA,MAAK,KAAK,IAAI,CAAC,QAAa,IAAI,GAAG;AAC3C,YAAM;AAAA,QACJ,UAAS,KAAK,MAAM,kBAAkB,KAAK,KAAM,KAAK,OAAO;AAAA,MAC/D;AAAA,IACF;AACA,UAAM,eAAe,gBAAgB;AAGrC,aAAS,iBAAiB,2BAA2B;AACnD,YAAM,cAAc,aAAa,aAAa;AAC9C,YAAM,YAAY,MAAM;AAAA,QACtB,YACE,kBAAkB,OAAO,KAAM,OAAO,OAAO,MAAM;AAAA,MACvD,EAAE,CAAC;AACH,UAAI,aAAa,MAAM;AACrB,cAAM,KAAK,eAAe,aAAa,KAAK;AAAA,MAC9C,OAAO;AAEL,gBAAQ,MAAM,OAAO,UAAQ,KAAK,QAAQ,UAAU,GAAG;AACvD,kBAAU,MAAM,kBAAkB,UAAU,KAAM,UAAU,OAAO;AACnE,cAAM,KAAK,OAAO,OAAO,aAAa,SAAS,CAAC;AAAA,MAClD;AAAA,IACF;AAEA,aAAS,QAAQ,OAAO;AACtB,UAAI,CAAC,KAAK,aAAa;AACrB;AAAA,MACF;AACA,eAAS,cAAc,OAAO,KAAK,KAAK,WAAW,GAAG;AACpD,aAAK,cAAc;AAAA,UACjB,KAAK;AAAA,UACL;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;AAEO,IAAM,mBAAN,MAAuB;AAAA,EAE5B,cAAc;AACZ,SAAK,kBAAkB,CAAC;AAAA,EAC1B;AAAA,EAEA,MAAM,UAAU,cAAuB,YAAqB;AAG1D,QACE,gBAAgB,QAChB,iBAAiB,MACjB,iBAAiB,cACjB,iBAAiB,YAAY,WAC7B,eAAe,YAAY,SAC3B;AACA,aAAO;AAAA,IACT;AACA,QAAI,UAAU,aAAa,KAAK,gBAAgB,UAAU,IAAI;AAC9D,QAAI,CAAC,WAAW,YAAY;AAC1B,gBAAU,MAAM,uBAAuB,UAAU;AACjD,WAAK,gBAAgB,UAAU,IAAI;AAAA,IACrC;AAEA,WAAO,SAAS,QAAQ,YAAY,MAAM;AAAA,EAC5C;AAAA,EAEA,MAAM,mBAAmB,SAAmB,YAAoB;AAC9D,QAAI,oBAAoB,CAAC;AAIzB,aAAS,UAAU,SAAS;AAC1B,YAAM,aAAa,MAAM,KAAK,kBAAkB,QAAQ,UAAU;AAClE,UAAI,YAAY;AACd,0BAAkB,KAAK,UAAU;AAAA,MACnC;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,kBAAkB,QAAgB,YAAoB;AAC1D,UAAM,SAAS,UAAU,OAAO,UAAU,OAAO,QAAQ,SAAS;AAClE,QAAI,MAAM,KAAK,UAAU,QAAQ,UAAU,GAAG;AAC5C,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AACF;AAKO,SAAS,YAAY,UAAkB;AAC5C,MAAI,UAAU,4BAA4B,GAAG;AAC3C,WAAO;AAAA,EACT;AACA,SAAO,aAAa,QAAQ;AAC9B;AAKO,SAAS,kBAAkB,QAAgB,SAAkB;AAElE,MACE,OAAO,4BAA4B,MAClC,UAAU,MAAM,KAAK,YAAY,cAAc,OAChD;AACA,WAAO,OAAO,MAAM,oBAAoB,GAAG,SAAS,EAAE,EAAE,CAAC;AAAA,EAC3D;AACA,SAAO;AACT;;;AEvZA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAAC;AACAC;;;ACDA,IAAAC,wBAAA;AAAA,SAAAA,uBAAA;AAAA;AAAA;AAAO,SAAS,qBACd,UACA,aACA;AACA,MAAI;AACJ,MAAI,CAAC,aAAa;AAChB,WAAO;AAAA,EACT,OAAO;AACL,WAAO,YAAY,MAAM,GAAG;AAAA,EAC9B;AACA,WAAS,WAAW,MAAM;AACxB,QAAI,CAAC,SAAS,SAAS,OAAO,GAAG;AAC/B,YAAM,IAAI,MAAM,YAAY,OAAO,2BAA2B;AAAA,IAChE;AAAA,EACF;AACA,SAAO;AACT;;;ADNO,SAAS,oBAAoB;AAClC,MAAI,CAAC,oBAAI,sBAAsB;AAC7B;AAAA,EACF;AAEA,QAAM,qBAA+C,CAAC;AAEtD,sBAAI,qBAAqB,MAAM,GAAG,EAAE,QAAQ,sBAAoB;AAC9D,UAAM,CAAC,UAAU,GAAG,QAAQ,IAAI,iBAAiB,MAAM,GAAG;AAE1D,aAAS,QAAQ,aAAW;AAC1B,UAAI,CAAC,mBAAmB,QAAQ,GAAG;AACjC,2BAAmB,QAAQ,IAAI,CAAC;AAAA,MAClC;AACA,yBAAmB,QAAQ,EAAE,KAAK,OAAO;AAAA,IAC3C,CAAC;AAAA,EACH,CAAC;AAED,SAAO;AACT;AAEO,SAAS,UAAU,aAAqB;AAC7C,QAAM,WAAmB,YAAY;AACrC,QAAM,QAAQ,sBAAsB,QAAQ;AAC5C,SAAO,MAAM,SAAS,WAAW;AACnC;AAEO,SAAS,sBAAsB,UAAkB;AACtD,MAAI,QAAkB,CAAC;AACvB,QAAM,WAAW,kBAAkB;AACnC,MAAI,UAAU;AACZ,UAAM,cAAc,SAAS,GAAG;AAChC,UAAM,cAAc,SAAS,QAAQ,KAAK,CAAC;AAI3C,UAAM,kBAAkB,YAAY;AAAA,MAClC,CAAC,KAAe,SAAiB;AAC/B,YAAI,KAAK,WAAW,GAAG,GAAG;AACxB,cAAI,WAAW,KAAK,UAAU,CAAC;AAC/B,cAAI,KAAK,QAAQ;AAAA,QACnB;AACA,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAEA,QAAI,aAAa;AACf,YAAM,KAAK,GAAG,WAAW;AAAA,IAC3B;AACA,QAAI,YAAY,QAAQ;AACtB,YAAM,KAAK,GAAG,WAAW;AAAA,IAC3B;AAGA,YAAQ,MAAM,OAAO,UAAQ;AAC3B,aAAO,gBAAgB,QAAQ,IAAI,KAAK,MAAM,CAAC,KAAK,WAAW,GAAG;AAAA,IACpE,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEO,IAAK,oBAAL,kBAAKC,uBAAL;AACL,EAAAA,mBAAA,eAAY;AACZ,EAAAA,mBAAA,mBAAgB;AAChB,EAAAA,mBAAA,iBAAc;AACd,EAAAA,mBAAA,qBAAkB;AAJR,SAAAA;AAAA,GAAA;;;AEzEZ;AAAA;AAAA;AAAA,kBAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACGAC;AACAC;;;ACJA;AAAA;AAAA;AAAA,kBAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA,qBAAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAAC;;;ACAAC;AAYO,SAAS,UAAU,MAAgB,SAAiB,KAAW;AACpE,SAAO;AAAA,IACL;AAAA,IACA;AAAA;AAAA,IACA,EAAE,QAAiB;AAAA,EACrB;AACF;AAEA,eAAsB,eACpB,MACA,QACA;AAEA,MAAI,UAAW,OAA6B,aAAa;AACvD,WAAQ,OAA6B;AAAA,EACvC;AACA,QAAM,iBAAiB,MAAc,kBAAkB;AAEvD,MAAI,cAAc;AAClB,MAAI,cAAc,GAAG;AACnB,mBAAe,IAAI,YAAY,CAAC;AAAA,EAClC;AACA,iBAAe,IAAI,IAAI;AAEvB,SAAO,GAAG,eAAe,WAAW,GAAG,WAAW;AACpD;;;AD/BA,IAAM,cAAc;AACpB,IAAM,UAAU;AAET,IAAM,UAAU;AAAA,EACrB,mBAAmB;AACrB;AAUA,eAAsB,aACpB,KACA,OACA,UACA,MACA;AACA,MAAI,CAAC;AAAO,WAAO,UAAU,MAAM,gBAAgB;AACnD,MAAI,CAAC;AAAU,WAAO,UAAU,MAAM,mBAAmB;AAEzD,QAAM,SAAS,MAAY,qBAAqB,KAAK;AACrD,MAAI,UAAU,MAAM;AAClB,YAAQ,KAAK,QAAQ,KAAK,qBAAqB;AAC/C,WAAO,UAAU,MAAM,WAAW;AAAA,EACpC;AAEA,MAAI,OAAO,sCAAgC;AACzC,YAAQ,KAAK,QAAQ,KAAK,gBAAgB,MAAM;AAChD,WAAO,UAAU,MAAM,WAAW;AAAA,EACpC;AAEA,MAAI,CAAC,OAAO,UAAU;AACpB,YAAQ,KAAK,QAAQ,KAAK,wBAAwB,MAAM;AACxD,WAAO,UAAU,MAAM,OAAO;AAAA,EAChC;AAEA,MAAI,CAAE,MAAM,QAAQ,UAAU,OAAO,QAAQ,GAAI;AAC/C,WAAO,UAAU,MAAM,WAAW;AAAA,EACpC;AAGA,SAAO,OAAO;AACd,SAAO,KAAK,MAAM,MAAM;AAC1B;;;AErDA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAAC;AAGAC;AACA,IAAAC,qBAAkB;AAYX,IAAM,kBAAuC,CAClD,MACA,SACG,QAAQ,QAAQ,IAAI;AAKzB,eAAsBC,cACpB,SACA,sBAA+B,MAC/B,MACA,YACA;AACA,MAAI,CAAC,YAAY;AACf,UAAM,IAAI,MAAM,qCAAqC;AAAA,EACvD;AACA,MAAI,CAAC,QAAQ,QAAQ;AACnB,WAAO,UAAU,MAAM,sBAAsB;AAAA,EAC/C;AACA,MAAI,CAAC,QAAQ,OAAO;AAClB,WAAO,UAAU,MAAM,yBAAyB;AAAA,EAClD;AAGA,QAAM,SAAS,qBAAqB,QAAQ,MAAM;AAElD,MAAI;AAGJ,MAAI;AACF,aAAS,MAAY,QAAQ,MAAM;AAAA,EACrC,SAAS,KAAU;AAEjB,QAAI,CAAC,IAAI,UAAU,IAAI,WAAW,KAAK;AACrC,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,MAAI,CAAC,QAAQ;AACX,aAAS,MAAY,qBAAqB,QAAQ,KAAK;AAAA,EACzD;AAGA,MAAI,CAAC,UAAU,qBAAqB;AAClC,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAGA,MAAI,CAAC,QAAQ;AAEX,aAAS;AAAA,MACP,KAAK;AAAA,MACL,OAAO,QAAQ;AAAA,MACf,OAAO,CAAC;AAAA,MACR,UAAkB,YAAY;AAAA,IAChC;AAAA,EACF;AAEA,MAAI,UAAU,MAAM,SAAS,QAAQ,OAAO;AAE5C,UAAQ,qBAAqB;AAE7B,MAAI;AAEF,WAAO,QAAQ;AAEf,cAAW,MAAM,WAAW,SAAS;AAAA,MACnC,cAAc;AAAA,MACd,iBAAiB;AAAA,IACnB,CAAC;AAAA,EACH,SAAS,KAAU;AACjB,WAAO,UAAU,MAAM,qBAAqB,GAAG;AAAA,EACjD;AAEA,SAAO,KAAK,MAAM,OAAO;AAC3B;AAEA,eAAe,qBAAqB,MAAY,SAAyB;AACvE,QAAM,aAAa,QAAQ,SAAS,MAAM;AAC1C,MAAI,YAAY;AACd,UAAM,WAAW,UAAM,mBAAAC,SAAM,UAAU;AACvC,QAAI,SAAS,WAAW,KAAK;AAC3B,YAAM,OAAO,SAAS,QAAQ,IAAI,cAAc;AAChD,UAAI,KAAK,WAAW,QAAQ,GAAG;AAC7B,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACF;AAKA,eAAe,SAAS,MAAY,SAA2C;AAC7E,MAAI;AACJ,MAAI;AACJ,MAAI;AACJ,MAAI;AACJ,MAAI;AAEJ,MAAI,QAAQ,SAAS;AACnB,UAAM,UAAU,QAAQ;AAExB,QAAI,QAAQ,MAAM;AAChB,YAAM,OAAO,QAAQ;AAErB,UAAI,KAAK,WAAW;AAClB,oBAAY,KAAK;AAAA,MACnB;AAEA,UAAI,KAAK,YAAY;AACnB,mBAAW,KAAK;AAAA,MAClB;AAAA,IACF;AAEA,iBAAa,MAAM,qBAAqB,MAAM,OAAO;AAErD,wBAAoB;AAAA,MAClB,GAAG,QAAQ;AAAA,IACb;AAAA,EACF;AAGA,MAAI,QAAQ,QAAQ;AAClB,aAAS;AAAA,MACP,GAAG,QAAQ;AAAA,IACb;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,UAAU,QAAQ;AAAA,IAClB,cAAc,QAAQ;AAAA,IACtB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;ADlKA;AASA,IAAM,iBAAiB,QAAQ,uBAAuB,EAAE;AAEjD,SAAS,cAAc,YAAiC;AAC7D,SAAO,CACL,aACA,cACA,SACA,SACG;AACH,UAAM,UAA0B;AAAA,MAC9B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ,QAAQ;AAAA,MAChB;AAAA,MACA,OAAO,QAAQ,MAAM;AAAA,MACrB,QAAQ;AAAA,QACN;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,WAAWC;AAAA,MACT;AAAA,MACA;AAAA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAOA,eAAsB,gBACpB,QACA,aACA,YACA;AACA,MAAI;AACF,UAAM,EAAE,UAAU,aAAa,IAAI;AAEnC,QAAI,CAAC,YAAY,CAAC,cAAc;AAC9B,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,SAAS,cAAc,UAAU;AACvC,WAAO,IAAI;AAAA,MACT;AAAA,QACE,UAAU,OAAO;AAAA,QACjB,cAAc,OAAO;AAAA,QACrB,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF;AAAA,EACF,SAAS,KAAU;AACjB,YAAQ,MAAM,GAAG;AACjB,UAAM,IAAI,MAAM,sDAAsD,GAAG,EAAE;AAAA,EAC7E;AACF;AAEA,eAAsB,eAAe,QAA2B;AAC9D,SAAO,sCAAkC,MAAM;AACjD;;;AE7EA;AAAA;AAAA,uBAAAC;AAAA,EAAA;AAAA,wBAAAC;AAAA,EAAA,uBAAAC;AAAA;AAAA,IAAAC,qBAAkB;AAIlB;AAWA,IAAM,eAAe,QAAQ,kCAAkC,EAAE;AAE1D,SAASC,eAAc,YAAiC;AAY7D,SAAO,OACL,QACA,KACA,SACA,WACA,aACA,cACA,SACAC,SACA,SACG;AACH,UAAM,UAA0B;AAAA;AAAA,MAE9B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ,QAAQ;AAAA,MAChB;AAAA,MACA,OAAO,SAAS,SAAS,SAAS;AAAA,MAClC,QAAQ;AAAA,QACN;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,WAAWC;AAAA,MACT;AAAA,MACA;AAAA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAMA,SAAS,SAAS,SAAqB,WAAsB;AAE3D,MAAI,QAAQ,MAAM,OAAO;AACvB,WAAO,QAAQ,MAAM;AAAA,EACvB;AAGA,MAAI,UAAU,OAAO;AACnB,WAAO,UAAU;AAAA,EACnB;AAGA,QAAM,WAAW,UAAU;AAC3B,MAAI,YAAY,WAAW,QAAQ,GAAG;AACpC,WAAO;AAAA,EACT;AAEA,QAAM,IAAI;AAAA,IACR,+CAA+C,KAAK;AAAA,MAClD;AAAA,IACF,CAAC,eAAe,KAAK,UAAU,SAAS,CAAC;AAAA,EAC3C;AACF;AAOA,eAAsBC,iBACpB,QACA,YACA;AACA,MAAI;AACF,UAAM,SAASH,eAAc,UAAU;AACvC,UAAM,WAAW,IAAI,aAAa,QAAQ,MAAM;AAChD,aAAS,OAAO;AAChB,WAAO;AAAA,EACT,SAAS,KAAU;AACjB,YAAQ,MAAM,GAAG;AACjB,UAAM,IAAI,MAAM,qDAAqD,GAAG,EAAE;AAAA,EAC5E;AACF;AAEA,eAAsB,oBACpB,YACA,aACoC;AACpC,MAAI;AACF,UAAM,EAAE,UAAU,cAAc,UAAU,IAAI;AAE9C,QAAI,CAAC,YAAY,CAAC,gBAAgB,CAAC,eAAe,CAAC,WAAW;AAE5D,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAW,UAAM,mBAAAI,SAAM,SAAS;AAEtC,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI;AAAA,QACR,2DAA2D,SAAS,UAAU;AAAA,MAChF;AAAA,IACF;AAEA,UAAMC,QAAO,MAAM,SAAS,KAAK;AAEjC,WAAO;AAAA,MACL,QAAQA,MAAK;AAAA,MACb,kBAAkBA,MAAK;AAAA,MACvB,UAAUA,MAAK;AAAA,MACf,aAAaA,MAAK;AAAA,MAClB;AAAA,MACA;AAAA,MACA,aAAa;AAAA,IACf;AAAA,EACF,SAAS,KAAK;AACZ,YAAQ,MAAM,GAAG;AACjB,UAAM,IAAI;AAAA,MACR,0DAA0D,GAAG;AAAA,IAC/D;AAAA,EACF;AACF;AAEA,eAAsBC,kBAAiB;AACrC,SAAO,gCAA8B;AACvC;;;ACzJA,IAAAC,kBAAA;AAAA,SAAAA,iBAAA;AAAA;AAAA;AAAA;AACAC;AAOA,IAAMC,kBAAiB,QAAQ,uBAAuB,EAAE;AAMxD,eAAe,mBAAmB;AAChC,MAAI,SAAS,MAAc,0BAA0B;AAErD,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,+BAA+B;AAAA,EACjD;AACA,SAAO;AACT;AAEA,eAAsB,QACpBC,WACA,KACA,MACA;AAEA,QAAM,eAAe,MAAM,iBAAiB;AAC5C,QAAM,cAAc,MAAc,eAAe,EAAE,aAAa,MAAM,CAAC;AAEvE,MAAI,cAAc,GAAG,WAAW;AAChC,QAAM,WAAW,MAAa;AAAA,IAC5B;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,MAAI,CAAC,IAAI,MAAM,OAAO;AACpB,QAAI,MAAM,KAAK,gCAAgC;AAAA,EACjD;AAEA,SAAOA,UAAS,aAAa,UAAU;AAAA,IACrC,OAAO,CAAC,WAAW,SAAS,8CAA8C;AAAA,IAC1E,YAAY;AAAA,IACZ,QAAQ;AAAA,EACV,CAAC,EAAE,KAAK,IAAI;AACd;AAEA,eAAsB,SACpBA,WACA,KACA,MACA;AAEA,QAAM,SAAS,MAAM,iBAAiB;AACtC,QAAM,cAAc,MAAc,eAAe,EAAE,aAAa,MAAM,CAAC;AAEvE,MAAI,cAAc,GAAG,WAAW;AAChC,QAAM,kBAAwB;AAAA,IAC5B;AAAA;AAAA,EAEF;AAEA,MAAI,CAAC,iBAAiB;AACpB,UAAM,IAAI,MAAM,wCAAwC;AAAA,EAC1D;AAEA,SAAOA,UAAS;AAAA,IACd,IAAID;AAAA,MACF;AAAA,QACE,UAAU,OAAO;AAAA,QACjB,cAAc,OAAO;AAAA,QACrB,aAAa;AAAA,MACf;AAAA,MACA,CACE,aACA,cACA,UACA,SACG;AACH,QAAM,YAAY,mDAA0B;AAC5C,aAAK,MAAM,EAAE,aAAa,aAAa,CAAC;AAAA,MAC1C;AAAA,IACF;AAAA,IACA,EAAE,iBAAiB,KAAK,iBAAiB,SAAS;AAAA,IAClD,OAAO,KAAU,WAAqB;AACpC,YAAM,UAAU,gBAAgB,gBAAgB,KAAK;AAErD,YAAM,KAAW,MAAM;AACvB,YAAY;AAAA,QACV,uBAAuB,gBAAgB,KAAK,WAAW,EAAE;AAAA,QACzD;AAAA,UACE;AAAA,QACF;AAAA,MACF;AAEA,UAAI,SAAS,GAAG,OAAO,8BAA8B,EAAE,EAAE;AAAA,IAC3D;AAAA,EACF,EAAE,KAAK,IAAI;AACb;;;ACrGAE;;;ACEA,IAAM,cAAc;AAEb,IAAM,oBAAoB,CAC/B,aACmB;AACnB,MAAI,CAAC,UAAU;AACb,WAAO,CAAC;AAAA,EACV;AACA,SAAO,SAAS,IAAI,aAAW;AAC7B,QAAI,QAAQ,QAAQ;AACpB,UAAM,SAAS,QAAQ;AACvB,UAAM,SAAS,QAAQ,SAAS,QAAQ,SAAS;AAIjD,UAAMC,WAAU,MAAM,MAAM,WAAW;AACvC,QAAIA,UAAS;AACX,eAAS,SAASA,UAAS;AACzB,cAAM,SAAS,MAAM,SAAS,GAAG,IAAI,MAAM;AAC3C,cAAMC,WAAU,QAAQ;AACxB,gBAAQ,MAAM,QAAQ,OAAOA,QAAO;AAAA,MACtC;AAAA,IACF;AAEA,WAAO,EAAE,OAAO,IAAI,OAAO,KAAK,GAAG,QAAQ,QAAQ,MAAM;AAAA,EAC3D,CAAC;AACH;AAEO,IAAM,UAAU,CAAC,KAAgBC,aAA4B;AAClE,SAAOA,SAAQ,KAAK,CAAC,EAAE,OAAO,QAAQ,QAAQ,MAAM,MAAM;AACxD,QAAI;AACJ,QAAI,QAAQ;AACV,iBAAW,IAAI,QAAQ,QAAQ;AAAA,IACjC,OAAO;AACL,iBAAW,MAAM,KAAK,IAAI,QAAQ,GAAG;AAAA,IACvC;AAEA,UAAM,cACJ,WAAW,QACP,OACA,IAAI,QAAQ,OAAO,YAAY,MAAM,OAAO,YAAY;AAE9D,WAAO,YAAY;AAAA,EACrB,CAAC;AACH;;;ADpCAC;AACAC;;;AEXA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,IAAAC,aAAe;AACf,IAAAC,eAAiB;AACjBC;AACA,IAAAC,eAAqB;AAErB,IAAM,OAAO;AACb,IAAMC,aAAY;AAClB,IAAM,aAAa;AACnB,IAAM,iBAAiB;AAEvB,IAAM,cAAc;AACpB,IAAM,YAAY;AAEX,IAAK,eAAL,kBAAKC,kBAAL;AACL,EAAAA,cAAA,SAAM;AACN,EAAAA,cAAA,gBAAa;AAFH,SAAAA;AAAA,GAAA;AAKL,SAAS,UAAU,cAAoC;AAC5D,MAAI,QAAQ;AACZ,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,eAAS,oBAAI;AACb,mBAAa;AACb;AAAA,IACF,KAAK;AAAA,IACL;AACE,eAAS,oBAAI;AACb,mBAAa;AACb;AAAA,EACJ;AACA,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,WAAW,UAAU,oCAAoC;AAAA,EAC3E;AACA,SAAO;AACT;AAEA,SAAS,cAAc,QAAgB,MAAc;AACnD,SAAO,cAAAC,QAAO,WAAW,QAAQ,MAAM,YAAY,gBAAgB,QAAQ;AAC7E;AAEO,SAAS,QACd,OACA,eAA6B,iBAC7B;AACA,QAAM,OAAO,cAAAA,QAAO,YAAY,WAAW;AAC3C,QAAM,YAAY,cAAc,UAAU,YAAY,GAAG,IAAI;AAC7D,QAAM,SAAS,cAAAA,QAAO,eAAe,MAAM,WAAW,IAAI;AAC1D,QAAM,OAAO,OAAO,OAAO,KAAK;AAChC,QAAM,QAAQ,OAAO,MAAM;AAC3B,QAAM,YAAY,OAAO,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE,SAAS,KAAK;AAC7D,SAAO,GAAG,KAAK,SAAS,KAAK,CAAC,GAAGF,UAAS,GAAG,SAAS;AACxD;AAEO,SAAS,QACd,OACA,eAA6B,iBAC7B;AACA,QAAM,CAAC,MAAM,SAAS,IAAI,MAAM,MAAMA,UAAS;AAC/C,QAAM,aAAa,OAAO,KAAK,MAAM,KAAK;AAC1C,QAAM,YAAY,cAAc,UAAU,YAAY,GAAG,UAAU;AACnE,QAAM,WAAW,cAAAE,QAAO,iBAAiB,MAAM,WAAW,UAAU;AACpE,QAAM,OAAO,SAAS,OAAO,OAAO,KAAK,WAAW,KAAK,CAAC;AAC1D,QAAM,QAAQ,SAAS,MAAM;AAC7B,SAAO,OAAO,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE,SAAS;AAC/C;AAEA,eAAsB,YACpB,EAAE,KAAK,SAAS,GAChB,QACA;AACA,QAAM,iBAAiB,GAAG,QAAQ;AAElC,QAAM,eAAW,mBAAK,KAAK,QAAQ;AACnC,MAAI,WAAAC,QAAG,UAAU,QAAQ,EAAE,YAAY,GAAG;AACxC,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC/C;AACA,QAAM,YAAY,WAAAA,QAAG,iBAAiB,QAAQ;AAC9C,QAAM,aAAa,WAAAA,QAAG,sBAAkB,mBAAK,KAAK,cAAc,CAAC;AAEjE,QAAM,OAAO,cAAAD,QAAO,YAAY,WAAW;AAC3C,QAAM,KAAK,cAAAA,QAAO,YAAY,SAAS;AACvC,QAAM,YAAY,cAAc,QAAQ,IAAI;AAC5C,QAAM,SAAS,cAAAA,QAAO,eAAe,MAAM,WAAW,EAAE;AAExD,aAAW,MAAM,IAAI;AACrB,aAAW,MAAM,EAAE;AAEnB,YAAU,KAAK,aAAAE,QAAK,WAAW,CAAC,EAAE,KAAK,MAAM,EAAE,KAAK,UAAU;AAE9D,SAAO,IAAI,QAA2C,OAAK;AACzD,eAAW,GAAG,UAAU,MAAM;AAC5B,QAAE;AAAA,QACA,UAAU;AAAA,QACV;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACH;AAEA,eAAe,aAAaC,OAAc;AACxC,QAAM,aAAa,WAAAF,QAAG,iBAAiBE,KAAI;AAE3C,QAAM,OAAO,MAAM,UAAU,YAAY,WAAW;AACpD,QAAM,KAAK,MAAM,UAAU,YAAY,SAAS;AAChD,aAAW,MAAM;AACjB,SAAO,EAAE,MAAM,GAAG;AACpB;AAEA,eAAsB,YACpB,WACA,YACA,QACA;AACA,MAAI,WAAAF,QAAG,UAAU,SAAS,EAAE,YAAY,GAAG;AACzC,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC/C;AACA,QAAM,EAAE,MAAM,GAAG,IAAI,MAAM,aAAa,SAAS;AACjD,QAAM,YAAY,WAAAA,QAAG,iBAAiB,WAAW;AAAA,IAC/C,OAAO,cAAc;AAAA,EACvB,CAAC;AAED,QAAM,aAAa,WAAAA,QAAG,kBAAkB,UAAU;AAElD,QAAM,YAAY,cAAc,QAAQ,IAAI;AAC5C,QAAM,WAAW,cAAAD,QAAO,iBAAiB,MAAM,WAAW,EAAE;AAE5D,QAAM,QAAQ,aAAAE,QAAK,aAAa;AAEhC,YAAU,KAAK,QAAQ,EAAE,KAAK,KAAK,EAAE,KAAK,UAAU;AAEpD,SAAO,IAAI,QAAc,CAAC,KAAK,QAAQ;AACrC,eAAW,GAAG,UAAU,MAAM;AAC5B,iBAAW,MAAM;AACjB,UAAI;AAAA,IACN,CAAC;AAED,cAAU,GAAG,SAAS,OAAK;AACzB,iBAAW,MAAM;AACjB,UAAI,CAAC;AAAA,IACP,CAAC;AAED,aAAS,GAAG,SAAS,OAAK;AACxB,iBAAW,MAAM;AACjB,UAAI,CAAC;AAAA,IACP,CAAC;AAED,UAAM,GAAG,SAAS,OAAK;AACrB,iBAAW,MAAM;AACjB,UAAI,CAAC;AAAA,IACP,CAAC;AAED,eAAW,GAAG,SAAS,OAAK;AAC1B,iBAAW,MAAM;AACjB,UAAI,CAAC;AAAA,IACP,CAAC;AAAA,EACH,CAAC;AACH;AAEA,SAAS,UAAUE,SAAuB,QAAgB;AACxD,SAAO,IAAI,QAAgB,CAAC,SAAS,WAAW;AAC9C,QAAI,YAAY;AAChB,UAAM,OAAiB,CAAC;AAExB,IAAAA,QAAO,GAAG,YAAY,MAAM;AAC1B,UAAI;AAEJ,cAAQ,QAAQA,QAAO,KAAK,SAAS,SAAS,OAAO,MAAM;AACzD,aAAK,KAAK,KAAK;AACf,qBAAa,MAAM;AAAA,MACrB;AAEA,cAAQ,OAAO,OAAO,IAAI,CAAC;AAAA,IAC7B,CAAC;AAED,IAAAA,QAAO,GAAG,OAAO,MAAM;AACrB,aAAO,IAAI,MAAM,kCAAkC,CAAC;AAAA,IACtD,CAAC;AAED,IAAAA,QAAO,GAAG,SAAS,WAAS;AAC1B,aAAO,KAAK;AAAA,IACd,CAAC;AAAA,EACH,CAAC;AACH;;;AF3KA;AACAC;AAGA,IAAAC,mBAAmB;AAEnB,IAAM,aAAa,oBAAI,wBACnB,SAAS,oBAAI,qBAAqB,IAClC,KAAK;AAUT,SAAS,qBAAqB;AAC5B,SAAO,IAAI,KAAK,KAAK,IAAI,IAAI,UAAU,EAAE,YAAY;AACvD;AAEA,SAAS,SAAS,KAAU,OAAqB,CAAC,GAAG;AACnD,MAAI,iBAAiB,KAAK,kBAAkB;AAC5C,MAAI,kBAAkB,KAAK,iBAAiB;AAC5C,MAAI,OAAO,KAAK;AAChB,MAAI,WAAW,KAAK,YAAY;AAChC,MAAI,UAAU,KAAK;AACrB;AAEA,eAAe,YAAY,QAAgB,cAAyB;AAGlE,MAAI,sBAAsB,MAAM,GAAG;AACjC,WAAO,EAAE,OAAO,MAAM,MAAM,OAAU;AAAA,EACxC;AACA,QAAM,YAAY,QAAQ,MAAM;AAChC,QAAM,WAAW,UAAU,MAAM,SAAS,EAAE,CAAC;AAC7C,SAAO,WAAW,UAAU,YAAY;AACtC,QAAI;AACJ,QAAI;AACF,YAAM,KAAK,YAAY;AAEvB,eAAU,MAAM;AAAA;AAAA,QAEd;AAAA,UACE,KAAK;AAAA,QACP;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS,KAAK;AACZ,eAAS;AAAA,IACX;AACA,QAAI,QAAQ;AACV,aAAO;AAAA,QACL,OAAO;AAAA,QACP,MAAM,MAAM,QAAQ,QAAQ,UAAU,YAAY;AAAA,MACpD;AAAA,IACF,OAAO;AACL,YAAM,IAAI,mBAAmB;AAAA,IAC/B;AAAA,EACF,CAAC;AACH;AAOe,SAAR,sBACL,iBAAoC,CAAC,GACrC,OAA6D;AAAA,EAC3D,eAAe;AACjB,GACA;AACA,QAAM,gBAAgB,iBAAiB,kBAAkB,cAAc,IAAI,CAAC;AAC5E,SAAO,OAAO,KAAgB,SAAc;AAC1C,QAAI,iBAAiB;AACrB,UAAM,UAAU,IAAI,QAAQ,8CAAsB;AAElD,UAAM,QAAQ,QAAQ,KAAK,aAAa;AACxC,QAAI,OAAO;AACT,uBAAiB;AAAA,IACnB;AACA,QAAI;AAEF,UAAI,cAAc,IAAI,QAAQ,sCAAoB;AAElD,YAAM,aACJ,UAAyB,+BAAgB,KACzC,QAAuB,WAAW;AACpC,UAAI,SAAS,IAAI,QAAQ,0CAAsB;AAE/C,UAAI,CAAC,UAAU,IAAI,QAAQ,2CAA4B,GAAG;AACxD,iBAAS,IAAI,QAAQ,2CAA4B,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,MACjE;AAEA,YAAM,WAAW,IAAI,QAAQ,8CAAwB;AACrD,UAAI,gBAAgB,OAClB,OAAO,MACP,WAAW;AACb,UAAI,cAAc,CAAC,QAAQ;AACzB,cAAM,YAAY,WAAW;AAC7B,cAAM,SAAS,WAAW;AAC1B,YAAI;AACJ,YAAI;AAEF,oBAAU,MAAM,WAAW,QAAQ,SAAS;AAC5C,cAAI,QAAQ,KAAK,cAAc;AAC7B,mBAAO,MAAM;AAAA,cACX;AAAA,cACA,QAAQ;AAAA,cACR,KAAK,aAAa,GAAG;AAAA,YACvB;AAAA,UACF,OAAO;AACL,mBAAO,MAAM,QAAQ,QAAQ,QAAQ,QAAQ;AAAA,UAC/C;AACA,eAAK,YAAY,QAAQ;AAEzB,cAAI,SAAS,iBAAiB,mBAAmB,GAAG;AAElD,kBAAM,iBAAiB,OAAO;AAAA,UAChC;AACA,0BAAgB;AAAA,QAClB,SAAS,KAAU;AACjB,0BAAgB;AAChB,kBAAQ,MAAM,eAAe,IAAI,OAAO,EAAE;AAC1C,kBAAQ,MAAM,GAAG;AAEjB,sBAAY,+BAAgB;AAAA,QAC9B;AAAA,MACF;AAEA,UAAI,CAAC,iBAAiB,QAAQ;AAC5B,cAAM,eAAe,KAAK,eAAe,KAAK,aAAa,GAAG,IAAI;AAClE,cAAM,EAAE,OAAO,MAAM,UAAU,IAAI,MAAM;AAAA,UACvC;AAAA,UACA;AAAA,QACF;AACA,YAAI,SAAS,WAAW;AACtB,0BAAgB;AAChB,iBAAO;AAAA,QACT,WAAW,OAAO;AAChB,0BAAgB;AAChB,qBAAW;AAAA,QACb;AAAA,MACF;AACA,UAAI,CAAC,QAAQ,UAAU;AACrB,eAAO,EAAE,SAAS;AAAA,MACpB,WAAW,MAAM;AACf,eAAO,KAAK;AAAA,MACd;AAEA,UAAI,CAAC,eAAe;AAClB,wBAAgB;AAAA,MAClB;AAEA,UAAI,MAAM;AACR,yBAAAC,QAAO,QAAQ;AAAA,UACb,IAAI,MAAM;AAAA,UACV,UAAU,MAAM;AAAA,UAChB,gBAAgB,MAAM;AAAA,UACtB,QAAQ,MAAM;AAAA,QAChB,CAAC;AAAA,MACH;AAGA,eAAS,KAAK,EAAE,eAAe,MAAM,UAAU,SAAS,eAAe,CAAC;AAExE,UAAI,QAAQ,KAAK,OAAO;AACtB,eAAgB,gBAAgB,MAAM,KAAK,IAAI;AAAA,MACjD,OAAO;AACL,eAAO,KAAK;AAAA,MACd;AAAA,IACF,SAAS,KAAU;AACjB,cAAQ,MAAM,eAAe,IAAI,OAAO,EAAE;AAC1C,cAAQ,MAAM,GAAG;AAEjB,UAAI,KAAK,SAAS,qBAAqB;AACrC,oBAAY,+BAAgB;AAAA,MAC9B,WAAW,KAAK,kDAAoC;AAClD,YAAI,MAAM,KAAK,IAAI,OAAO;AAAA,MAC5B;AAEA,UAAK,QAAQ,KAAK,iBAAkB,gBAAgB;AAClD,iBAAS,KAAK,EAAE,eAAe,OAAO,SAAS,eAAe,CAAC;AAC/D,eAAO,KAAK;AAAA,MACd,OAAO;AACL,YAAI,MAAM,IAAI,UAAU,KAAK,GAAG;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AACF;;;AG3MA,IAAOC,oBAAQ,OAAO,KAAsB,SAAc;AAExD,SAAO,KAAK;AACd;;;ACLAC;AAGAC;AACA;AAOe,SAAR,gBACL,0BACA,mBACA,OAAwC,EAAE,mBAAmB,MAAM,GACnE;AACA,QAAM,iBAAiB,kBAAkB,wBAAwB;AACjE,QAAM,mBAAmB,kBAAkB,iBAAiB;AAE5D,SAAO,eAAgB,KAAsB,MAAW;AACtD,UAAM,gBACJ,KAAK,qBAAqB,CAAC,CAAC,QAAQ,KAAK,gBAAgB;AAC3D,UAAM,aAAiC;AAAA,MACrC;AAAA,IACF;AAEA,UAAM,UAAU,CAAC,CAAC,QAAQ,KAAK,cAAc;AAC7C,QAAI,CAAC,SAAS;AACZ,iBAAW,oBAAoB,oBAA+B;AAAA,IAChE;AAEA,UAAM,WAAW,mBAAmB,KAAK,UAAU;AACnD,QAAI,4CAAsB,QAAkB;AAC5C,WAAO,WAAW,UAAU,IAAI;AAAA,EAClC;AACF;;;ACnCAC;AAOA,IAAO,sBAAQ,OAAO,KAAgB,SAAc;AAClD,QAAM,SAAS,IAAI,QAAQ,0CAAsB;AACjD,MAAI,CAAC,QAAQ;AACX,QAAI,MAAM,KAAK,cAAc;AAAA,EAC/B;AAEA,MAAI,MAAM,QAAQ,MAAM,GAAG;AACzB,QAAI,MAAM,KAAK,cAAc;AAAA,EAC/B;AAEA,MAAI,CAAC,sBAAsB,MAAM,GAAG;AAClC,QAAI,MAAM,KAAK,cAAc;AAAA,EAC/B;AAEA,SAAO,KAAK;AACd;;;ACtBAC;AAUA,IAAM,mBAAmB,CAAC,OAAO,QAAQ,SAAS;AAOlD,IAAM,yBAAyB;AAAA,EAC7B;AAAA,EACA;AAAA,EACA;AACF;AAae,SAAR,aACL,OAA8C,EAAE,gBAAgB,CAAC,EAAE,GACnE;AACA,QAAM,gBAAgB,kBAAkB,KAAK,cAAc;AAC3D,SAAO,OAAO,KAAsB,SAAc;AAEhD,UAAM,QAAQ,QAAQ,KAAK,aAAa;AACxC,QAAI,OAAO;AACT,aAAO,KAAK;AAAA,IACd;AAGA,QAAI,iBAAiB,QAAQ,IAAI,MAAM,MAAM,IAAI;AAC/C,aAAO,KAAK;AAAA,IACd;AAGA,QAAI,cAAc,IAAI,IAAI,cAAc,IACpC,IAAI,IAAI,cAAc,EAAE,YAAY,IACpC;AACJ,QACE,CAAC,uBAAuB,OAAO,UAAQ,YAAY,SAAS,IAAI,CAAC,EAAE,QACnE;AACA,aAAO,KAAK;AAAA,IACd;AAGA,QAAI,IAAI,UAAU;AAChB,aAAO,KAAK;AAAA,IACd;AAIA,UAAM,YAAY,IAAI,MAAM;AAC5B,QAAI,CAAC,WAAW;AACd,aAAO,KAAK;AAAA,IACd;AAGA,UAAM,eAAe,IAAI,mCAAqB;AAC9C,QAAI,CAAC,gBAAgB,iBAAiB,WAAW;AAC/C,UAAI,MAAM,KAAK,oBAAoB;AAAA,IACrC;AAEA,WAAO,KAAK;AAAA,EACd;AACF;;;AC7EA,IAAO,oBAAQ,OAAO,KAAc,SAAc;AAChD,MAAI,CAAC,IAAI,YAAY,CAACC,SAAQ,IAAI,IAAI,GAAG;AACvC,QAAI,MAAM,KAAK,2BAA2B;AAAA,EAC5C;AACA,SAAO,KAAK;AACd;;;ACNAC;AACAC;AAEA,IAAO,yBAAQ,OAAO,KAAc,SAAc;AAChD,QAAM,QAAQ,SAAS;AACvB,QAAM,YACJ,oBAAI,SAAS,KAAK,CAAC,QACfC,yBACA,oBAAI,OAAO,IACXC,aACA;AACN,MAAI,CAAC,WAAW;AACd,UAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AACA,MAAI,CAAC,IAAI,YAAY,CAAC,UAAU,IAAI,MAAM,KAAK,KAAK,CAACC,SAAQ,IAAI,IAAI,GAAG;AACtE,QAAI,MAAM,KAAK,mCAAmC;AAAA,EACpD;AACA,SAAO,KAAK;AACd;;;AClBAC;AACAC;AAEA,IAAO,sBAAQ,OAAO,KAAc,SAAc;AAChD,QAAM,QAAQ,SAAS;AACvB,QAAM,YACJ,oBAAI,SAAS,KAAK,CAAC,QACfC,yBACA,oBAAI,OAAO,IACXC,aACA;AACN,MAAI,CAAC,WAAW;AACd,UAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AACA,MAAI,CAAC,IAAI,YAAY,CAAC,UAAU,IAAI,MAAM,KAAK,GAAG;AAChD,QAAI,MAAM,KAAK,6BAA6B;AAAA,EAC9C;AACA,SAAO,KAAK;AACd;;;ACpBAC;AACA;AAGA,IAAMC,QAAO,QAAQ,iBAAiB;AAKtC,IAAMC,cAAa,QAAQ,gBAAgB;AAEpC,SAAS,eAAwB;AACtC,SAAO;AAAA,IACL;AAAA,IACA,UAAUA,YAAW;AAAA,IACrB,aAAa;AAAA,MACX,QAAQ,CAAC,QAAyB,CAAC,CAAC,IAAI,KAAK,SAAS,SAAS;AAAA,IACjE;AAAA,IACA,aAAa;AAAA,MACX,KAAK,SAAO;AACV,eAAO;AAAA,UACL,QAAQ,IAAI;AAAA,UACZ,KAAK,IAAI;AAAA,UACT,eAAe,IAAI;AAAA,QACrB;AAAA,MACF;AAAA,MACA,KAAK,SAAO;AACV,eAAO;AAAA,UACL,QAAQ,IAAI;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,SAAS,gBAAgB;AACvB,MAAI,oBAAI,cAAc;AACpB,WAAOD,MAAK,aAAa,CAAC;AAAA,EAC5B,OAAO;AACL,WAAO,CAAC,KAAU,SAAc;AAC9B,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AACF;AAEA,IAAM,iBAAiB,cAAc;AAErC,IAAO,qBAAQ;;;AC/CfE;AACA,IAAAC,eAA2B;AAE3B,IAAMC,cAAa,QAAQ,gBAAgB;AAE3C,IAAM,cAAc,CAAC,KAAU,SAAc;AAE3C,MAAI,gBAAgB,IAAI,wDAA6B;AACrD,MAAI,CAAC,eAAe;AAClB,wBAAgB,aAAAC,IAAK;AAAA,EACvB;AAEA,SAAOD,YAAW,OAAO,eAAe,MAAM;AAC5C,WAAO,KAAK;AAAA,EACd,CAAC;AACH;AAEA,IAAOE,sBAAQ;;;ACdf,eAAsB,cAAc,KAAU,MAAW;AACvD,MAAI;AACF,UAAM,KAAK;AAAA,EACb,SAAS,KAAU;AACjB,UAAM,SAAS,IAAI,UAAU,IAAI,cAAc;AAC/C,QAAI,SAAS;AAEb,QAAI,UAAU,OAAO,SAAS,KAAK;AACjC,cAAQ,KAAK,GAAG;AAAA,IAClB,OAAO;AACL,cAAQ,MAAM,GAAG;AAAA,IACnB;AAEA,UAAM,QAAe,eAAe,GAAG;AACvC,UAAMC,QAAiB;AAAA,MACrB,SAAS,IAAI;AAAA,MACb;AAAA,MACA,kBAAkB,IAAI;AAAA,MACtB;AAAA,IACF;AAEA,QAAI,OAAOA;AAAA,EACb;AACF;AAEA,IAAO,wBAAQ;;;ACrBA,SAAR,0BAAkB,KAAU,MAAW;AAC5C,QAAM,cAAc,IAAI,QAAQ,OAAO;AACvC,MAAI,IAAI,QAAQ,OAAO,YAAY,MAAM,OAAO;AAC9C,QAAI;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,aAAa;AAChB,WAAO,KAAK;AAAA,EACd;AACA,QAAM,UAAU,mBAAmB,WAAW;AAC9C,MAAI;AACJ,MAAI;AACF,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,SAAS,KAAK;AACZ,WAAO,KAAK;AAAA,EACd;AACA,MAAI,QAAQ,OAAO;AACnB,SAAO,KAAK;AACd;;;AC3BA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAkC;AAGlC,SAAS,SACP,QACA,UACA;AAEA,SAAO,CAAC,KAAgB,SAAc;AACpC,QAAI,CAAC,QAAQ;AACX,aAAO,KAAK;AAAA,IACd;AACA,QAAIC,UAAS;AAEb,QAAI,UAAU,IAAI,UAAU,QAAQ;AACpC,QAAI,IAAI,QAAQ,KAAK,MAAM;AACzB,MAAAA,UAAS,IAAI,QAAQ;AAAA,IACvB,WAAW,WAAW,MAAM;AAC1B,MAAAA,UAAS;AAAA,IACX;AAGA,QAAK,OAA4B,QAAQ;AACvC,eAAU,OAA4B,OAAO;AAAA,QAC3C,WAAW,WAAAC,QAAI,IAAI,EAAE,SAAS;AAAA,QAC9B,WAAW,WAAAA,QAAI,IAAI,EAAE,SAAS;AAAA,MAChC,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,MAAM,IAAI,OAAO,SAASD,OAAM;AACxC,QAAI,OAAO;AACT,UAAI,MAAM,KAAK,WAAW,QAAQ,MAAM,MAAM,OAAO,EAAE;AACvD;AAAA,IACF;AACA,WAAO,KAAK;AAAA,EACd;AACF;AAEO,SAAS,KAAK,QAA4C;AAC/D,SAAO,SAAS,QAAQ,MAAM;AAChC;AAEO,SAAS,OAAO,QAA4C;AACjE,SAAO,SAAS,QAAQ,QAAQ;AAClC;;;ArBvCO,IAAM,aAAa;AAAA,EACxB,QAAQE;AACV;;;ADSA;AAhBA,IAAM,YAAY,QAAQ,cAAc;AACxC,IAAM,gBAAgB,QAAQ,gBAAgB,EAAE;AA4BhD,IAAM,UAAU,QAAQ,yBAAyB;AAc1C,IAAM,sBAAsB;AAC5B,IAAM,yBAAyB;AAC/B,IAAM,sBAAsB;AAC5B,IAAM,WAAW;AAGxB,UAAU,IAAI,IAAI,cAAc,cAAM,SAAS,cAAM,YAAY,CAAC;AAElE,eAAe,uBACb,cACA,cAC0B;AAC1B,QAAM,cAAc,MAAM,aAAK,eAAe;AAC9C,MAAI;AACJ,MAAI;AAEJ,MAAI;AACF,qBAAiB,MAAM,aAAK,oBAAoB,cAAc,WAAW;AACzE,QAAI,CAAC,gBAAgB;AACnB,YAAM,IAAI,MAAM,8BAA8B;AAAA,IAChD;AACA,eAAW,MAAM,aAAK,gBAAgB,gBAAgB,eAAe;AAAA,EACvE,SAAS,KAAK;AACZ,YAAQ,MAAM,GAAG;AACjB,UAAM,IAAI,MAAM,+BAA+B;AAAA,EACjD;AAEA,UAAQ,IAAI,UAAU;AAAA,IACpB,mBAAmB;AACjB,aAAO,SAAS,iBAAiB,cAAc;AAAA,IACjD;AAAA,EACF,CAAC;AAED,SAAO,IAAI,QAAQ,aAAW;AAC5B,YAAQ;AAAA;AAAA,MAEN;AAAA,MACA,CAAC,KAAU,aAAqBC,eAAmBC,YAAgB;AACjE,gBAAQ,EAAE,KAAK,aAAa,cAAAD,eAAc,QAAAC,QAAO,CAAC;AAAA,MACpD;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEA,eAAe,yBACb,QACA,cAC0B;AAC1B,MAAI,cAAc,MAAM,eAAO,eAAe,MAAM;AAEpD,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,eAAO;AAAA,MACtB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,SAAS,KAAU;AACjB,YAAQ,MAAM,GAAG;AACjB,UAAM,IAAI;AAAA,MACR,qDAAqD,IAAI,OAAO;AAAA,IAClE;AAAA,EACF;AAEA,UAAQ,IAAI,QAAQ;AAEpB,SAAO,IAAI,QAAQ,aAAW;AAC5B,YAAQ;AAAA;AAAA,MAEN;AAAA,MACA,CAAC,KAAU,aAAqBD,eAAsBC,YAAgB;AACpE,gBAAQ,EAAE,KAAK,aAAa,cAAAD,eAAc,QAAAC,QAAO,CAAC;AAAA,MACpD;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAWA,eAAsB,kBACpB,cACA,cACA,UAC0B;AAC1B,UAAQ,cAAc;AAAA,IACpB;AACE,UAAI,CAAC,UAAU;AACb,eAAO,EAAE,KAAK,EAAE,MAAM,8BAA8B,EAAE;AAAA,MACxD;AACA,YAAM,aAAa,MAAc,kBAAkB,QAAQ;AAC3D,UAAI,CAAC,YAAY;AACf,eAAO,EAAE,KAAK,EAAE,MAAM,+BAA+B,EAAE;AAAA,MACzD;AACA,aAAO,uBAAuB,YAAY,YAAY;AAAA,IACxD;AACE,UAAI,eAAe,MAAc,gBAAgB;AACjD,UAAI,CAAC,cAAc;AACjB,eAAO,EAAE,KAAK,EAAE,MAAM,iCAAiC,EAAE;AAAA,MAC3D;AACA,aAAO,yBAAyB,cAAc,YAAY;AAAA,EAC9D;AACF;AAIA,eAAsB,gBAAgB,QAAgB,aAAkB;AACtE,QAAM,UAAU;AAAA,IACd,aAAa,YAAY;AAAA,IACzB,cAAc,YAAY;AAAA,EAC5B;AAEA,MAAI;AACF,UAAM,KAAK,YAAY;AACvB,UAAM,SAAS,MAAM,GAAG,IAAS,MAAM;AAGvC,QAAI,OAAO,QAAQ,iBAAiB,UAAU;AAC5C,aAAO,QAAQ;AAAA,IACjB;AAEA,WAAO,SAAS;AAAA,MACd,GAAG,OAAO;AAAA,MACV,GAAG;AAAA,IACL;AAEA,UAAM,GAAG,IAAI,MAAM;AAEnB,UAAM,eAAe,MAAM;AAAA,EAC7B,SAAS,GAAG;AACV,YAAQ,MAAM,mDAAmD,CAAC;AAAA,EACpE;AACF;AAKA,eAAsB,eAAe,MAA0B;AAC7D,QAAM,MAAM,KAAK;AACjB,QAAM,SAAS,KAAK;AACpB,QAAM,oBAAoB,KAAK;AAE/B,MAAI,CAAC;AAAK,UAAM,IAAI,MAAM,yCAAyC;AAEnE,QAAM,iBAAiB,UAAyB,+BAAgB;AAChE,MAAI,WAAW,MAAM,mBAAmB,MAAM;AAE9C,MAAI,kBAAkB,mBAAmB;AACvC,eAAW,SAAS;AAAA,MAClB,aAAW,QAAQ,cAAc,eAAe;AAAA,IAClD;AAAA,EACF,OAAO;AAEL,gBAAY,+BAAgB;AAAA,EAC9B;AAEA,QAAM,aAAa,SAAS,IAAI,CAAC,EAAE,UAAU,MAAM,SAAS;AAC5D,QAAM,mBAAmB,QAAQ,EAAE,YAAY,QAAQ,SAAS,CAAC;AACjE,QAAa,aAAK,OAAO,IAAI,MAAM,KAAK;AACxC,QAAgB,eAAe,MAAM;AACvC;;;A1FpMAC;AACA;;;AiHdA;AAAA;AAAA,kBAAAC;AAAA;;;ACAA;AAUA,IAAAC,cAAgB;AAEhB,IAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,SAAS,OAAO,WAAuB,QAAa;AAClD,QAAM,EAAE,MAAM,IAAI,UAAU,SAAS,MAAM;AAC3C,MAAI,OAAO;AACT,UAAM;AAAA,EACR;AACF;AAEA,SAAS,kBAAkB,QAAa;AACtC,QAAM,YAAY,YAAAC,QAAI,OAAO;AAAA,IAC3B,MAAM,YAAAA,QAAI,OAAO,EAAE,iCAA0B,EAAE,SAAS;AAAA,IACxD,UAAU,YAAAA,QAAI,OAAO,EAAE,QAAQ,IAAI,EAAE,SAAS;AAAA,IAC9C,MAAM,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,IAC5B,SAAS,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,IAC/B,QAAQ,YAAAA,QACL,OAAO;AAAA,MACN,MAAM,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,MAC5B,UAAU,YAAAA,QAAI,MAAM,EAAE,MAAM,YAAAA,QAAI,OAAO,EAAE,QAAQ,IAAI,CAAC,EAAE,SAAS;AAAA,IACnE,CAAC,EACA,QAAQ,IAAI;AAAA,EACjB,CAAC;AACD,SAAO,WAAW,MAAM;AAC1B;AAEA,SAAS,mBAAmB,QAAa;AACvC,QAAM,iBAAiB,YAAAA,QAAI,OAAO;AAAA,IAChC,MAAM,YAAAA,QACH,OAAO,EACP,MAAM,GAAG,OAAO,OAAO,mBAAmB,CAAC,EAC3C,SAAS;AAAA,IACZ,UAAU,YAAAA,QAAI,QAAQ,EAAE,SAAS;AAAA,IACjC,SAAS,YAAAA,QAAI,IAAI;AAAA,IACjB,SAAS,YAAAA,QAAI,OAAO;AAAA,EACtB,CAAC;AAED,QAAM,iBAAiB,YAAAA,QACpB,OAAO;AAAA,IACN,MAAM,YAAAA,QAAI,OAAO,EAAE,MAAM,GAAG,OAAO,OAAO,SAAS,CAAC;AAAA,IACpD,UAAU,YAAAA,QAAI,QAAQ;AAAA,IACtB,QAAQ,YAAAA,QAAI,OAAO,EAAE,QAAQ,YAAAA,QAAI,OAAO,GAAG,cAAc;AAAA,EAC3D,CAAC,EACA,SAAS;AAEZ,QAAM,YAAY,YAAAA,QAAI,OAAO;AAAA,IAC3B,MAAM,YAAAA,QAAI,OAAO,EAAE,mCAA2B,EAAE,SAAS;AAAA,IACzD,UAAU,YAAAA,QAAI,OAAO,EAAE,QAAQ,IAAI,EAAE,SAAS;AAAA,IAC9C,MAAM,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,IAC5B,SAAS,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,IAC/B,QAAQ,YAAAA,QAAI,OAAO;AAAA,MACjB,MAAM,YAAAA,QAAI,OAAO;AAAA,MACjB,MAAM,YAAAA,QAAI,QAAQ,EAAE,SAAS;AAAA,MAC7B,OAAO,YAAAA,QAAI,QAAQ,EAAE,SAAS;AAAA,MAC9B,MAAM,YAAAA,QACH,OAAO;AAAA,QACN,MAAM,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,MAC9B,CAAC,EACA,SAAS;AAAA,MACZ,UAAU,YAAAA,QACP;AAAA,QACC,OAAO;AAAA,UACL,OAAO,OAAO,iBAAiB,EAAE,IAAI,SAAO;AAAA,YAC1C;AAAA,YACA,YAAAA,QAAI,QAAQ,EAAE,SAAS;AAAA,UACzB,CAAC;AAAA,QACH;AAAA,MACF,EACC,SAAS;AAAA,MACZ,eAAe,YAAAA,QAAI,QAAQ,EAAE,SAAS;AAAA,MACtC,aAAa,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,MACnC,cAAc,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,MACpC,MAAM,YAAAA,QAAI,OAAO,EAAE,MAAM,GAAG,gBAAgB;AAAA,MAC5C,YAAY,YAAAA,QAAI,OAAO,EAAE,QAAQ,YAAAA,QAAI,OAAO,GAAG,cAAc,EAAE,SAAS;AAAA,MACxE,OAAO,YAAAA,QACJ,OAAO,EACP,QAAQ,YAAAA,QAAI,OAAO,GAAG,cAAc,EACpC,QAAQ,IAAI,EACZ,SAAS;AAAA,MACZ,OAAO,YAAAA,QAAI,OAAO,EAAE;AAAA,QAClB,YAAAA,QAAI,OAAO;AAAA,QACX,YAAAA,QAAI,OAAO;AAAA,UACT,MAAM,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,UAC5B,aAAa,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,UACnC,UAAU,YAAAA,QAAI,QAAQ;AAAA,UACtB,MAAM,YAAAA,QAAI,OAAO;AAAA,QACnB,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACD,SAAO,WAAW,MAAM;AAC1B;AAEA,SAAS,mBAAmB,QAAa;AACvC,QAAM,qBAAqB,YAAAA,QAAI,OAAO,EAAE,QAAQ,YAAAA,QAAI,OAAO,GAAG;AAAA,IAC5D,MAAM,YAAAA,QACH,OAAO,EACP,MAAM,GAAG,OAAO,OAAO,gBAAgB,CAAC,EACxC,SAAS;AAAA,IACZ,YAAY,YAAAA,QAAI,OAAO,EAAE,MAAM,GAAG,OAAO,OAAO,sBAAsB,CAAC;AAAA,IACvE,OAAO,YAAAA,QAAI,OAAO;AAAA,IAClB,aAAa,YAAAA,QAAI,OAAO;AAAA,IACxB,MAAM,YAAAA,QAAI,MAAM,EAAE,MAAM,YAAAA,QAAI,OAAO,CAAC;AAAA,IACpC,QAAQ,YAAAA,QAAI,MAAM,EAAE,MAAM,YAAAA,QAAI,OAAO,CAAC;AAAA,EACxC,CAAC;AACD,QAAM,sBAAsB,YAAAA,QACzB,OAAO;AAAA,IACN,YAAY;AAAA,IACZ,UAAU,YAAAA,QAAI,MAAM,EAAE,MAAM,YAAAA,QAAI,OAAO,CAAC;AAAA,EAC1C,CAAC,EACA,OAAO,kBAAkB,EACzB,SAAS;AACZ,QAAM,YAAY,YAAAA,QAAI,OAAO;AAAA,IAC3B,MAAM,YAAAA,QAAI,OAAO,EAAE,mCAA2B,EAAE,SAAS;AAAA,IACzD,UAAU,YAAAA,QAAI,OAAO,EAAE,QAAQ,IAAI,EAAE,SAAS;AAAA,IAC9C,MAAM,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,IAC5B,SAAS,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,IAC/B,QAAQ,YAAAA,QAAI,OAAO;AAAA,MACjB,MAAM,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,MAC5B,SAAS,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,MAC/B,MAAM,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,MAC5B,aAAa,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,MACnC,MAAM,YAAAA,QACH,OAAO,EACP,gDAAyD,EACzD,SAAS;AAAA,MACZ,QAAQ,YAAAA,QACL,OAAO,EACP,SAAS,GAAG,qBAAqB,EACjC,SAAS;AAAA,MACZ,QAAQ,YAAAA,QAAI,OAAO,EAAE,SAAS;AAAA,MAC9B,QAAQ,YAAAA,QACL,OAAO;AAAA,QACN,QAAQ;AAAA,QACR,SAAS;AAAA,MACX,CAAC,EACA,SAAS;AAAA,IACd,CAAC;AAAA,EACH,CAAC;AACD,SAAO,WAAW,MAAM;AAC1B;AAEO,SAASC,UAAS,QAAa;AACpC,UAAQ,QAAQ,MAAM;AAAA,IACpB;AACE,wBAAkB,MAAM;AACxB;AAAA,IACF;AACE,yBAAmB,MAAM;AACzB;AAAA,IACF;AACE,yBAAmB,MAAM;AACzB;AAAA,IACF;AACE,YAAM,IAAI,MAAM,4CAA4C,OAAO,IAAI,EAAE;AAAA,EAC7E;AACF;;;AlH3JAC;AACAC;AAEAC;;;AmHtBA;AAAA;AAAA;AAAA;AAAA;AAAA,eAAAC;AAAA;AAEA;AACAC;AACA;;;AnHwBAC;AACAC;;;AoH7BA;AAAA;AAAA;AAAA;AAAA;;;ACAA,iBAAgB;AAChB,iBAAgB;AAChBC;AACA,IAAAC,eAA0B;AAE1B,IAAI;AACJ,IAAM,oBAAgB,wBAAU,WAAAC,QAAI,MAAM;AAE1C,eAAe,OAAO,SAAoC;AACxD,MAAI,CAAC,WAAAC,QAAI,KAAK,OAAO,GAAG;AAEtB,QAAI,CAAC,QAAQ,WAAW,MAAM,GAAG;AAC/B,gBAAU,WAAW,OAAO;AAAA,IAC9B;AACA,cAAU,IAAI,IAAI,OAAO,EAAE;AAAA,EAC7B;AACA,QAAM,YAAY,MAAM,cAAc,SAAS;AAAA,IAC7C,KAAK;AAAA,EACP,CAAC;AACD,SAAO,UAAU,IAAI,UAAQ,KAAK,OAAO;AAC3C;AAEA,eAAsB,mBAAmB;AACvC,QAAM,YAAY,oBAAI;AACtB,QAAM,OAAO,WAAW,MAAM,GAAG,KAAK,CAAC;AACvC,MAAI,QAAkB,CAAC;AACvB,WAAS,QAAQ,MAAM;AACrB,UAAM,UAAU,KAAK,KAAK;AAC1B,QAAI,CAAC,WAAAA,QAAI,KAAK,OAAO,GAAG;AACtB,YAAM,YAAY,MAAM,OAAO,OAAO;AACtC,cAAQ,MAAM,OAAO,SAAS;AAAA,IAChC,OAAO;AACL,YAAM,KAAK,OAAO;AAAA,IACpB;AAAA,EACF;AACA,mBAAiB;AACnB;AAEA,eAAsB,cAAc,SAAmC;AACrE,MAAI,CAAC,gBAAgB;AACnB,UAAM,iBAAiB;AAAA,EACzB;AACA,MAAI,gBAAgB,WAAW,GAAG;AAChC,WAAO;AAAA,EACT;AAEA,MAAI;AACJ,MAAI,CAAC,WAAAA,QAAI,KAAK,OAAO,GAAG;AACtB,UAAM,MAAM,OAAO,OAAO;AAAA,EAC5B,OAAO;AACL,UAAM,CAAC,OAAO;AAAA,EAChB;AACA,SAAO,CAAC,CAAC,gBAAgB,KAAK,UAAQ,IAAI,SAAS,IAAI,CAAC;AAC1D;;;ACrDA;AAAA;AAAA,cAAAC;AAAA;;;ACEAC;;;ACFA;AAeA,IAAM,qBAGF;AAAA,EACF,kCAAmB,GAAG,CAAC,eAAiC,WAAW;AAAA,EACnE,kCAAmB,GAAG,CAAC,eAAiC,WAAW;AAAA,EACnE,kCAAmB,GAAG,CAAC,eAAiC,WAAW;AAAA,EACnE,2DAAqC,GAAG,CACtC,eACG,WAAW;AAAA,EAChB,yDAAoC,GAAG,CACrC,eACG,WAAW;AAAA,EAChB,+DAAuC,GAAG,CACxC,eACG,WAAW;AAAA,EAChB,6DAAsC,GAAG,CACvC,eACG,WAAW;AAAA,EAChB,8CAAyB,GAAG,CAAC,eAC3B,WAAW;AAAA,EACb,8CAAyB,GAAG,CAAC,eAC3B,WAAW;AAAA,EACb,8CAAyB,GAAG,CAAC,eAC3B,WAAW;AAAA,EACb,qDAA6B,GAAG,CAAC,eAC/B,WAAW;AAAA,EACb,0DAA+B,GAAG,CAAC,eACjC,WAAW;AAAA,EACb,oEAAoC,GAAG,CACrC,eACG,WAAW;AAClB;AAEO,SAAS,cAAc,OAAc,YAAiB;AAC3D,QAAM,YAAY,mBAAmB,KAAK;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI,MAAM,wDAAwD;AAAA,EAC1E;AACA,SAAO,UAAU,UAAU;AAC7B;;;AD9CA,IAAqB,0BAArB,MAAuE;AAAA,EAGrE,YAAYC,aAA0B;AAFtC,sBAA2B,CAAC;AAG1B,SAAK,aAAaA;AAAA,EACpB;AAAA,EAEA,MAAM,aACJ,OACA,UACA,YACA,WACA;AACA,UAAM,WAAW,SAAS;AAC1B,UAAM,QAAQ,cAAc,OAAO,UAAU;AAC7C,QAAI,CAAC,YAAY,CAAC,OAAO;AACvB;AAAA,IACF;AACA,aAAS,EAAE,QAAAC,SAAQ,UAAU,KAAK,KAAK,YAAY;AACjD,UAAIA,QAAO,SAAS,KAAK,GAAG;AAC1B,cAAM,WAAW,UAAU,YAAY;AACrC,gBAAM,UAAU;AAAA,YACd,IAAI;AAAA,YACJ;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEA,WAAW;AACT,WAAOC,UAAS;AAAA,EAClB;AACF;;;ADpCA,IAAI;AACJ,IAAI;AAEG,SAASC,MAAKC,aAA0B;AAC7C,MAAI,CAAC,iBAAiB;AACpB,IAAAD,MAAU;AAAA,EACZ;AACA,MAAI,CAAC,mBAAmB;AACtB,wBAAoB,IAAI,wBAAwBC,WAAU;AAAA,EAC5D;AAEA,MAAI,CAAC,mBAAmB;AACtB,wBAAoB,gBAAgB,QAAQ,OAAM,QAAO;AACvD,YAAM,EAAE,OAAO,UAAU,YAAY,UAAU,IAAI,IAAI;AACvD,YAAM,kBAAkB;AAAA,QACtB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AACF;;;AtHKAC;AACA;AAKAC;AAYAC;AAGAC;AAZO,IAAM,UAAU;AAAA,EACrB,GAAG;AAAA,EACH,GAAG;AACL;AAWO,IAAMC,QAAO,CAAC,OAAY,CAAC,MAAM;AACtC,EAAG,KAAK,KAAK,EAAE;AACjB;",
6
6
  "names": ["init_license", "init_license", "QueryType", "DatasourceFieldType", "SourceName", "DatasourceFeature", "init_auth", "TenantResolutionStrategy", "init_user", "PermissionLevel", "PermissionType", "init_auth", "init_user", "init_account", "init_user", "init_account", "init_user", "init_app", "init_automation", "AutomationIOType", "AutomationCustomIOType", "AutomationTriggerStepId", "AutomationActionStepId", "init_datasource", "init_layout", "init_query", "init_role", "init_table", "init_row", "init_constants", "init_row", "init_constants", "init_table", "init_constants", "init_screen", "init_view", "DocumentType", "InternalTable", "init_user", "init_backup", "init_app", "init_automation", "init_datasource", "init_layout", "init_query", "init_role", "init_table", "init_screen", "init_view", "init_row", "init_user", "init_backup", "init_user", "init_userGroup", "init_plugin", "PluginType", "init_environmentVariables", "init_auditLogs", "init_user", "init_userGroup", "init_plugin", "init_environmentVariables", "init_auditLogs", "init_account", "init_app", "init_accounts", "init_user", "init_license", "init_account", "init_accounts", "init_user", "init_license", "init_auth", "init_user", "init_schedule", "init_backup", "init_datasource", "init_row", "init_view", "init_rows", "init_table", "init_user", "init_app", "init_backup", "init_datasource", "init_row", "init_view", "init_rows", "init_table", "init_user", "init_environmentVariables", "init_auditLogs", "init_events", "init_users", "init_users", "init_license", "init_global", "init_environmentVariables", "init_auditLogs", "init_events", "init_license", "init_auth", "init_user", "init_schedule", "init_app", "init_global", "init_account", "init_shared", "init_shared", "init_db", "AutomationViewMode", "ViewName", "init_api", "Header", "init_constants", "init_api", "module", "t", "e", "n", "r", "i", "s", "u", "_", "a", "M", "m", "f", "l", "$", "y", "v", "g", "D", "o", "d", "c", "h", "init_helpers", "init_constants", "init_helpers", "index", "init_constants", "APP_DEV_PREFIX", "APP_PREFIX", "isGlobalBuilder", "init_users", "init_documents", "init_users", "init_sdk", "init_documents", "init_table", "init_src", "init_constants", "init_helpers", "init_sdk", "init_table", "init_src", "UserStatus", "Cookie", "GlobalRole", "Config", "constants_exports", "init_constants", "init_db", "doInIdentityContext", "getIdentity", "init_context", "init_environment", "getProdAppID", "getDevAppID", "init_constants", "init_environment", "init_helpers", "path", "body", "params", "fetch", "init_utils", "init_helpers", "options", "init_environment", "url", "opts", "PouchDB", "find", "tracer", "params", "stream", "Nano", "init_utils", "exists", "params", "stream", "init_constants", "init_utils", "init_constants", "init_db", "params", "values", "getProdAppID", "init_environment", "init_db", "init_constants", "init_context", "init_constants", "utils_exports", "SEPARATOR", "init_utils", "init_environment", "Databases", "SelectableDatabase", "init_constants", "init_correlation", "import_fs", "init_utils", "init_environment", "fs", "init_db", "AppState", "init_constants", "getProdAppID", "tenancy", "init_constants", "getProdAppID", "init_utils", "init_environment", "init_constants", "init_context", "init_db", "params", "init_constants", "init_context", "init_db", "init_environment", "CreateFuncByName", "init_constants", "body", "fetch", "index", "params", "import_node_fetch", "init_context", "init_searchIndexes", "init_errors", "getDevAppID", "getProdAppID", "init_db", "init_utils", "init_context", "init_searchIndexes", "init_errors", "AWS", "path", "fs", "stream", "params", "fetch", "zlib", "tar", "import_node_fetch", "import_path", "import_fs", "import_uuid", "init_environment", "init_utils", "init_db", "getPresignedUrl", "init_environment", "qs", "getPresignedUrl", "init_app", "init_environment", "init_context", "init_global", "init_environment", "init_context", "getPresignedUrl", "getPresignedUrl", "init_environment", "init_context", "init_app", "init_global", "objectStore_exports", "init_objectStore", "init_utils", "path", "fs", "historyFile", "import_fs", "import_path", "init_system", "init_environment", "init_objectStore", "import_dd_trace", "isPlainObject", "isError", "isMessage", "getLogParams", "init_environment", "init_context", "init_correlation", "init_system", "getIdentity", "getTenantId", "getAppId", "getAutomationId", "tracer", "level", "pinoPretty", "pino", "init_system", "start", "end", "init_timers", "timeout", "init", "Redis", "stream", "keys", "init_environment", "init_utils", "init_timers", "SEPARATOR", "init", "init_utils", "constants_exports", "installation_exports", "init", "objectStore_exports", "users_exports", "utils_exports", "init_constants", "get", "get", "init_context", "CacheKey", "TTL", "get", "init_db", "init_context", "init_context", "init_constants", "params", "init_context", "users_exports", "users_exports", "init_constants", "init_db", "init_constants", "init_environment", "keys", "init_constants", "init_context", "init_src", "utils_exports", "init_environment", "init_db", "init_constants", "init_environment", "init_context", "APP_PREFIX", "options", "jwt", "DurationType", "options", "Redlock", "timeout", "init_environment", "import_node_fetch", "options", "fetch", "init_environment", "init_constants", "users_exports", "hasAdminPermissions", "hasAppBuilderPermissions", "hasBuilderPermissions", "isAdmin", "isAdminOrBuilder", "isBuilder", "isCreator", "isGlobalBuilder", "init_db", "init_context", "init_environment", "init_db", "init_constants", "init_environment", "init_context", "params", "ErrorCode", "init_context", "init_src", "isBuilder", "isAdmin", "isCreator", "isGlobalBuilder", "isAdminOrBuilder", "hasAdminPermissions", "hasBuilderPermissions", "hasAppBuilderPermissions", "init_environment", "init_environment", "shutdown", "init", "init_environment", "init_environment", "init_context", "init_context", "get", "PostHog", "init_environment", "init_context", "shutdown", "init_environment", "init_utils", "import_events", "events", "JobQueue", "init_context", "init_timers", "cleanup", "BullQueue", "shutdown", "init_environment", "processors", "init", "init_context", "init_environment", "init_db", "init_context", "init_environment", "semver", "getIdentity", "environment", "hasBuilderPermissions", "hasAdminPermissions", "verified", "recordEvent", "init_context", "events", "recordEvent", "eventKey", "get", "keys", "init", "shutdown", "init", "recordEvent", "created", "deleted", "created", "deleted", "datasource", "created", "updated", "deleted", "created", "deleted", "created", "datasource", "updated", "deleted", "run", "created", "updated", "deleted", "created", "deleted", "created", "imported", "created", "updated", "deleted", "exported", "imported", "init_context", "created", "updated", "deleted", "created", "updated", "deleted", "exported", "init_environment", "init_context", "created", "updated", "deleted", "init", "imported", "deleted", "created", "deleted", "shutdown", "init_context", "hasBuilderPermissions", "hasAdminPermissions", "init_context", "init_db", "init_environment", "logWarn", "EXPIRY_SECONDS", "init_environment", "users_exports", "isAdmin", "isCreator", "params", "getProdAppID", "isCreator", "init_src", "EXPIRY_SECONDS", "populateFromDB", "users_exports", "get", "createCode", "getCode", "TTL_SECONDS", "createCode", "getCode", "init_context", "init_environment", "init_constants", "init_db", "init_environment", "init_context", "options", "Role", "BuiltinPermissionID", "cloneDeep", "flatten", "init_db", "init_context", "import_cloneDeep", "Role", "cloneDeep", "role", "body", "init_environment", "init_context", "installation_exports", "TenantFeatureFlag", "auditLog_default", "init_context", "init_constants", "auditLog_default", "middleware_default", "init_constants", "init_context", "init_db", "init_context", "import_node_fetch", "authenticate", "fetch", "authenticate", "buildVerifyFn", "getCallbackUrl", "strategyFactory", "import_node_fetch", "buildVerifyFn", "params", "authenticate", "strategyFactory", "fetch", "body", "getCallbackUrl", "google_exports", "init_constants", "GoogleStrategy", "passport", "init_constants", "matches", "pattern", "options", "init_db", "init_context", "import_fs", "import_zlib", "init_environment", "import_path", "SEPARATOR", "SecretOption", "crypto", "fs", "zlib", "path", "stream", "init_environment", "import_dd_trace", "tracer", "auditLog_default", "init_context", "init_constants", "init_constants", "init_constants", "isAdmin", "init_context", "init_environment", "hasBuilderPermissions", "isBuilder", "isAdmin", "init_context", "init_environment", "hasBuilderPermissions", "isBuilder", "init_environment", "pino", "correlator", "init_constants", "import_uuid", "correlator", "uuid", "middleware_default", "body", "params", "Joi", "google_exports", "refreshToken", "params", "init_constants", "validate", "import_joi", "joi", "validate", "init_db", "init_context", "init_objectStore", "utils_exports", "init_utils", "init_timers", "init_environment", "init_environment", "import_util", "dns", "net", "init", "init_context", "processors", "events", "shutdown", "init", "processors", "init_db", "init_context", "init_constants", "init_db", "init"]
7
7
  }