flowtask 5.8.4__cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowtask/__init__.py +93 -0
- flowtask/__main__.py +38 -0
- flowtask/bots/__init__.py +6 -0
- flowtask/bots/check.py +93 -0
- flowtask/bots/codebot.py +51 -0
- flowtask/components/ASPX.py +148 -0
- flowtask/components/AddDataset.py +352 -0
- flowtask/components/Amazon.py +523 -0
- flowtask/components/AutoTask.py +314 -0
- flowtask/components/Azure.py +80 -0
- flowtask/components/AzureUsers.py +106 -0
- flowtask/components/BaseAction.py +91 -0
- flowtask/components/BaseLoop.py +198 -0
- flowtask/components/BestBuy.py +800 -0
- flowtask/components/CSVToGCS.py +120 -0
- flowtask/components/CompanyScraper/__init__.py +1 -0
- flowtask/components/CompanyScraper/parsers/__init__.py +6 -0
- flowtask/components/CompanyScraper/parsers/base.py +102 -0
- flowtask/components/CompanyScraper/parsers/explorium.py +192 -0
- flowtask/components/CompanyScraper/parsers/leadiq.py +206 -0
- flowtask/components/CompanyScraper/parsers/rocket.py +133 -0
- flowtask/components/CompanyScraper/parsers/siccode.py +109 -0
- flowtask/components/CompanyScraper/parsers/visualvisitor.py +130 -0
- flowtask/components/CompanyScraper/parsers/zoominfo.py +118 -0
- flowtask/components/CompanyScraper/scrapper.py +1054 -0
- flowtask/components/CopyTo.py +177 -0
- flowtask/components/CopyToBigQuery.py +243 -0
- flowtask/components/CopyToMongoDB.py +291 -0
- flowtask/components/CopyToPg.py +609 -0
- flowtask/components/CopyToRethink.py +207 -0
- flowtask/components/CreateGCSBucket.py +102 -0
- flowtask/components/CreateReport/CreateReport.py +228 -0
- flowtask/components/CreateReport/__init__.py +9 -0
- flowtask/components/CreateReport/charts/__init__.py +15 -0
- flowtask/components/CreateReport/charts/bar.py +51 -0
- flowtask/components/CreateReport/charts/base.py +66 -0
- flowtask/components/CreateReport/charts/pie.py +64 -0
- flowtask/components/CreateReport/utils.py +9 -0
- flowtask/components/CustomerSatisfaction.py +196 -0
- flowtask/components/DataInput.py +200 -0
- flowtask/components/DateList.py +255 -0
- flowtask/components/DbClient.py +163 -0
- flowtask/components/DialPad.py +146 -0
- flowtask/components/DocumentDBQuery.py +200 -0
- flowtask/components/DownloadFrom.py +371 -0
- flowtask/components/DownloadFromD2L.py +113 -0
- flowtask/components/DownloadFromFTP.py +181 -0
- flowtask/components/DownloadFromIMAP.py +315 -0
- flowtask/components/DownloadFromS3.py +198 -0
- flowtask/components/DownloadFromSFTP.py +265 -0
- flowtask/components/DownloadFromSharepoint.py +110 -0
- flowtask/components/DownloadFromSmartSheet.py +114 -0
- flowtask/components/DownloadS3File.py +229 -0
- flowtask/components/Dummy.py +59 -0
- flowtask/components/DuplicatePhoto.py +411 -0
- flowtask/components/EmployeeEvaluation.py +237 -0
- flowtask/components/ExecuteSQL.py +323 -0
- flowtask/components/ExtractHTML.py +178 -0
- flowtask/components/FileBase.py +178 -0
- flowtask/components/FileCopy.py +181 -0
- flowtask/components/FileDelete.py +82 -0
- flowtask/components/FileExists.py +146 -0
- flowtask/components/FileIteratorDelete.py +112 -0
- flowtask/components/FileList.py +194 -0
- flowtask/components/FileOpen.py +75 -0
- flowtask/components/FileRead.py +120 -0
- flowtask/components/FileRename.py +106 -0
- flowtask/components/FilterIf.py +284 -0
- flowtask/components/FilterRows/FilterRows.py +200 -0
- flowtask/components/FilterRows/__init__.py +10 -0
- flowtask/components/FilterRows/functions.py +4 -0
- flowtask/components/GCSToBigQuery.py +103 -0
- flowtask/components/GoogleA4.py +150 -0
- flowtask/components/GoogleGeoCoding.py +344 -0
- flowtask/components/GooglePlaces.py +315 -0
- flowtask/components/GoogleSearch.py +539 -0
- flowtask/components/HTTPClient.py +268 -0
- flowtask/components/ICIMS.py +146 -0
- flowtask/components/IF.py +179 -0
- flowtask/components/IcimsFolderCopy.py +173 -0
- flowtask/components/ImageFeatures/__init__.py +5 -0
- flowtask/components/ImageFeatures/process.py +233 -0
- flowtask/components/IteratorBase.py +251 -0
- flowtask/components/LangchainLoader/__init__.py +5 -0
- flowtask/components/LangchainLoader/loader.py +194 -0
- flowtask/components/LangchainLoader/loaders/__init__.py +22 -0
- flowtask/components/LangchainLoader/loaders/abstract.py +362 -0
- flowtask/components/LangchainLoader/loaders/basepdf.py +50 -0
- flowtask/components/LangchainLoader/loaders/docx.py +91 -0
- flowtask/components/LangchainLoader/loaders/html.py +119 -0
- flowtask/components/LangchainLoader/loaders/pdfblocks.py +146 -0
- flowtask/components/LangchainLoader/loaders/pdfmark.py +79 -0
- flowtask/components/LangchainLoader/loaders/pdftables.py +135 -0
- flowtask/components/LangchainLoader/loaders/qa.py +67 -0
- flowtask/components/LangchainLoader/loaders/txt.py +55 -0
- flowtask/components/LeadIQ.py +650 -0
- flowtask/components/Loop.py +253 -0
- flowtask/components/Lowes.py +334 -0
- flowtask/components/MS365Usage.py +156 -0
- flowtask/components/MSTeamsMessages.py +320 -0
- flowtask/components/MarketClustering.py +1051 -0
- flowtask/components/MergeFiles.py +362 -0
- flowtask/components/MilvusOutput.py +87 -0
- flowtask/components/NearByStores.py +175 -0
- flowtask/components/NetworkNinja/__init__.py +6 -0
- flowtask/components/NetworkNinja/models/__init__.py +52 -0
- flowtask/components/NetworkNinja/models/abstract.py +177 -0
- flowtask/components/NetworkNinja/models/account.py +39 -0
- flowtask/components/NetworkNinja/models/client.py +19 -0
- flowtask/components/NetworkNinja/models/district.py +14 -0
- flowtask/components/NetworkNinja/models/events.py +101 -0
- flowtask/components/NetworkNinja/models/forms.py +499 -0
- flowtask/components/NetworkNinja/models/market.py +16 -0
- flowtask/components/NetworkNinja/models/organization.py +34 -0
- flowtask/components/NetworkNinja/models/photos.py +125 -0
- flowtask/components/NetworkNinja/models/project.py +44 -0
- flowtask/components/NetworkNinja/models/region.py +28 -0
- flowtask/components/NetworkNinja/models/store.py +203 -0
- flowtask/components/NetworkNinja/models/user.py +151 -0
- flowtask/components/NetworkNinja/router.py +854 -0
- flowtask/components/Odoo.py +175 -0
- flowtask/components/OdooInjector.py +192 -0
- flowtask/components/OpenFromXML.py +126 -0
- flowtask/components/OpenWeather.py +41 -0
- flowtask/components/OpenWithBase.py +616 -0
- flowtask/components/OpenWithPandas.py +715 -0
- flowtask/components/PGPDecrypt.py +199 -0
- flowtask/components/PandasIterator.py +187 -0
- flowtask/components/PandasToFile.py +189 -0
- flowtask/components/Paradox.py +339 -0
- flowtask/components/ParamIterator.py +117 -0
- flowtask/components/ParseHTML.py +84 -0
- flowtask/components/PlacerStores.py +249 -0
- flowtask/components/Pokemon.py +507 -0
- flowtask/components/PositiveBot.py +62 -0
- flowtask/components/PowerPointSlide.py +400 -0
- flowtask/components/PrintMessage.py +127 -0
- flowtask/components/ProductCompetitors/__init__.py +5 -0
- flowtask/components/ProductCompetitors/parsers/__init__.py +7 -0
- flowtask/components/ProductCompetitors/parsers/base.py +72 -0
- flowtask/components/ProductCompetitors/parsers/bestbuy.py +86 -0
- flowtask/components/ProductCompetitors/parsers/lowes.py +103 -0
- flowtask/components/ProductCompetitors/scrapper.py +155 -0
- flowtask/components/ProductCompliant.py +169 -0
- flowtask/components/ProductInfo/__init__.py +1 -0
- flowtask/components/ProductInfo/parsers/__init__.py +5 -0
- flowtask/components/ProductInfo/parsers/base.py +83 -0
- flowtask/components/ProductInfo/parsers/brother.py +97 -0
- flowtask/components/ProductInfo/parsers/canon.py +167 -0
- flowtask/components/ProductInfo/parsers/epson.py +118 -0
- flowtask/components/ProductInfo/parsers/hp.py +131 -0
- flowtask/components/ProductInfo/parsers/samsung.py +97 -0
- flowtask/components/ProductInfo/scraper.py +319 -0
- flowtask/components/ProductPricing.py +118 -0
- flowtask/components/QS.py +261 -0
- flowtask/components/QSBase.py +201 -0
- flowtask/components/QueryIterator.py +273 -0
- flowtask/components/QueryToInsert.py +327 -0
- flowtask/components/QueryToPandas.py +432 -0
- flowtask/components/RESTClient.py +195 -0
- flowtask/components/RethinkDBQuery.py +189 -0
- flowtask/components/Rsync.py +74 -0
- flowtask/components/RunSSH.py +59 -0
- flowtask/components/RunShell.py +71 -0
- flowtask/components/SalesForce.py +20 -0
- flowtask/components/SaveImageBank/__init__.py +257 -0
- flowtask/components/SchedulingVisits.py +592 -0
- flowtask/components/ScrapPage.py +216 -0
- flowtask/components/ScrapSearch.py +79 -0
- flowtask/components/SendNotify.py +257 -0
- flowtask/components/SentimentAnalysis.py +694 -0
- flowtask/components/ServiceScrapper/__init__.py +5 -0
- flowtask/components/ServiceScrapper/parsers/__init__.py +1 -0
- flowtask/components/ServiceScrapper/parsers/base.py +94 -0
- flowtask/components/ServiceScrapper/parsers/costco.py +93 -0
- flowtask/components/ServiceScrapper/scrapper.py +199 -0
- flowtask/components/SetVariables.py +156 -0
- flowtask/components/SubTask.py +182 -0
- flowtask/components/SuiteCRM.py +48 -0
- flowtask/components/Switch.py +175 -0
- flowtask/components/TableBase.py +148 -0
- flowtask/components/TableDelete.py +312 -0
- flowtask/components/TableInput.py +143 -0
- flowtask/components/TableOutput/TableOutput.py +384 -0
- flowtask/components/TableOutput/__init__.py +3 -0
- flowtask/components/TableSchema.py +534 -0
- flowtask/components/Target.py +223 -0
- flowtask/components/ThumbnailGenerator.py +156 -0
- flowtask/components/ToPandas.py +67 -0
- flowtask/components/TransformRows/TransformRows.py +507 -0
- flowtask/components/TransformRows/__init__.py +9 -0
- flowtask/components/TransformRows/functions.py +559 -0
- flowtask/components/TransposeRows.py +176 -0
- flowtask/components/UPCDatabase.py +86 -0
- flowtask/components/UnGzip.py +171 -0
- flowtask/components/Uncompress.py +172 -0
- flowtask/components/UniqueRows.py +126 -0
- flowtask/components/Unzip.py +107 -0
- flowtask/components/UpdateOperationalVars.py +147 -0
- flowtask/components/UploadTo.py +299 -0
- flowtask/components/UploadToS3.py +136 -0
- flowtask/components/UploadToSFTP.py +160 -0
- flowtask/components/UploadToSharepoint.py +205 -0
- flowtask/components/UserFunc.py +122 -0
- flowtask/components/VivaTracker.py +140 -0
- flowtask/components/WSDLClient.py +123 -0
- flowtask/components/Wait.py +18 -0
- flowtask/components/Walmart.py +199 -0
- flowtask/components/Workplace.py +134 -0
- flowtask/components/XMLToPandas.py +267 -0
- flowtask/components/Zammad/__init__.py +41 -0
- flowtask/components/Zammad/models.py +0 -0
- flowtask/components/ZoomInfoScraper.py +409 -0
- flowtask/components/__init__.py +104 -0
- flowtask/components/abstract.py +18 -0
- flowtask/components/flow.py +530 -0
- flowtask/components/google.py +335 -0
- flowtask/components/group.py +221 -0
- flowtask/components/py.typed +0 -0
- flowtask/components/reviewscrap.py +132 -0
- flowtask/components/tAutoincrement.py +117 -0
- flowtask/components/tConcat.py +109 -0
- flowtask/components/tExplode.py +119 -0
- flowtask/components/tFilter.py +184 -0
- flowtask/components/tGroup.py +236 -0
- flowtask/components/tJoin.py +270 -0
- flowtask/components/tMap/__init__.py +9 -0
- flowtask/components/tMap/functions.py +54 -0
- flowtask/components/tMap/tMap.py +450 -0
- flowtask/components/tMelt.py +112 -0
- flowtask/components/tMerge.py +114 -0
- flowtask/components/tOrder.py +93 -0
- flowtask/components/tPandas.py +94 -0
- flowtask/components/tPivot.py +71 -0
- flowtask/components/tPluckCols.py +76 -0
- flowtask/components/tUnnest.py +82 -0
- flowtask/components/user.py +401 -0
- flowtask/conf.py +457 -0
- flowtask/download.py +102 -0
- flowtask/events/__init__.py +11 -0
- flowtask/events/events/__init__.py +20 -0
- flowtask/events/events/abstract.py +95 -0
- flowtask/events/events/alerts/__init__.py +362 -0
- flowtask/events/events/alerts/colfunctions.py +131 -0
- flowtask/events/events/alerts/functions.py +158 -0
- flowtask/events/events/dummy.py +12 -0
- flowtask/events/events/exec.py +124 -0
- flowtask/events/events/file/__init__.py +7 -0
- flowtask/events/events/file/base.py +51 -0
- flowtask/events/events/file/copy.py +23 -0
- flowtask/events/events/file/delete.py +16 -0
- flowtask/events/events/interfaces/__init__.py +9 -0
- flowtask/events/events/interfaces/client.py +67 -0
- flowtask/events/events/interfaces/credentials.py +28 -0
- flowtask/events/events/interfaces/notifications.py +58 -0
- flowtask/events/events/jira.py +122 -0
- flowtask/events/events/log.py +26 -0
- flowtask/events/events/logerr.py +52 -0
- flowtask/events/events/notify.py +59 -0
- flowtask/events/events/notify_event.py +160 -0
- flowtask/events/events/publish.py +54 -0
- flowtask/events/events/sendfile.py +104 -0
- flowtask/events/events/task.py +97 -0
- flowtask/events/events/teams.py +98 -0
- flowtask/events/events/webhook.py +58 -0
- flowtask/events/manager.py +287 -0
- flowtask/exceptions.c +39393 -0
- flowtask/exceptions.cpython-310-x86_64-linux-gnu.so +0 -0
- flowtask/extensions/__init__.py +3 -0
- flowtask/extensions/abstract.py +82 -0
- flowtask/extensions/logging/__init__.py +65 -0
- flowtask/hooks/__init__.py +9 -0
- flowtask/hooks/actions/__init__.py +22 -0
- flowtask/hooks/actions/abstract.py +66 -0
- flowtask/hooks/actions/dummy.py +23 -0
- flowtask/hooks/actions/jira.py +74 -0
- flowtask/hooks/actions/rest.py +320 -0
- flowtask/hooks/actions/sampledata.py +37 -0
- flowtask/hooks/actions/sensor.py +23 -0
- flowtask/hooks/actions/task.py +9 -0
- flowtask/hooks/actions/ticket.py +37 -0
- flowtask/hooks/actions/zammad.py +55 -0
- flowtask/hooks/hook.py +62 -0
- flowtask/hooks/models.py +17 -0
- flowtask/hooks/service.py +187 -0
- flowtask/hooks/step.py +91 -0
- flowtask/hooks/types/__init__.py +23 -0
- flowtask/hooks/types/base.py +129 -0
- flowtask/hooks/types/brokers/__init__.py +11 -0
- flowtask/hooks/types/brokers/base.py +54 -0
- flowtask/hooks/types/brokers/mqtt.py +35 -0
- flowtask/hooks/types/brokers/rabbitmq.py +82 -0
- flowtask/hooks/types/brokers/redis.py +83 -0
- flowtask/hooks/types/brokers/sqs.py +44 -0
- flowtask/hooks/types/fs.py +232 -0
- flowtask/hooks/types/http.py +49 -0
- flowtask/hooks/types/imap.py +200 -0
- flowtask/hooks/types/jira.py +279 -0
- flowtask/hooks/types/mail.py +205 -0
- flowtask/hooks/types/postgres.py +98 -0
- flowtask/hooks/types/responses/__init__.py +8 -0
- flowtask/hooks/types/responses/base.py +5 -0
- flowtask/hooks/types/sharepoint.py +288 -0
- flowtask/hooks/types/ssh.py +141 -0
- flowtask/hooks/types/tagged.py +59 -0
- flowtask/hooks/types/upload.py +85 -0
- flowtask/hooks/types/watch.py +71 -0
- flowtask/hooks/types/web.py +36 -0
- flowtask/interfaces/AzureClient.py +137 -0
- flowtask/interfaces/AzureGraph.py +839 -0
- flowtask/interfaces/Boto3Client.py +326 -0
- flowtask/interfaces/DropboxClient.py +173 -0
- flowtask/interfaces/ExcelHandler.py +94 -0
- flowtask/interfaces/FTPClient.py +131 -0
- flowtask/interfaces/GoogleCalendar.py +201 -0
- flowtask/interfaces/GoogleClient.py +133 -0
- flowtask/interfaces/GoogleDrive.py +127 -0
- flowtask/interfaces/GoogleGCS.py +89 -0
- flowtask/interfaces/GoogleGeocoding.py +93 -0
- flowtask/interfaces/GoogleLang.py +114 -0
- flowtask/interfaces/GooglePub.py +61 -0
- flowtask/interfaces/GoogleSheet.py +68 -0
- flowtask/interfaces/IMAPClient.py +137 -0
- flowtask/interfaces/O365Calendar.py +113 -0
- flowtask/interfaces/O365Client.py +220 -0
- flowtask/interfaces/OneDrive.py +284 -0
- flowtask/interfaces/Outlook.py +155 -0
- flowtask/interfaces/ParrotBot.py +130 -0
- flowtask/interfaces/SSHClient.py +378 -0
- flowtask/interfaces/Sharepoint.py +496 -0
- flowtask/interfaces/__init__.py +36 -0
- flowtask/interfaces/azureauth.py +119 -0
- flowtask/interfaces/cache.py +201 -0
- flowtask/interfaces/client.py +82 -0
- flowtask/interfaces/compress.py +525 -0
- flowtask/interfaces/credentials.py +124 -0
- flowtask/interfaces/d2l.py +239 -0
- flowtask/interfaces/databases/__init__.py +5 -0
- flowtask/interfaces/databases/db.py +223 -0
- flowtask/interfaces/databases/documentdb.py +55 -0
- flowtask/interfaces/databases/rethink.py +39 -0
- flowtask/interfaces/dataframes/__init__.py +11 -0
- flowtask/interfaces/dataframes/abstract.py +21 -0
- flowtask/interfaces/dataframes/arrow.py +71 -0
- flowtask/interfaces/dataframes/dt.py +69 -0
- flowtask/interfaces/dataframes/pandas.py +167 -0
- flowtask/interfaces/dataframes/polars.py +60 -0
- flowtask/interfaces/db.py +263 -0
- flowtask/interfaces/env.py +46 -0
- flowtask/interfaces/func.py +137 -0
- flowtask/interfaces/http.py +1780 -0
- flowtask/interfaces/locale.py +40 -0
- flowtask/interfaces/log.py +75 -0
- flowtask/interfaces/mask.py +143 -0
- flowtask/interfaces/notification.py +154 -0
- flowtask/interfaces/playwright.py +339 -0
- flowtask/interfaces/powerpoint.py +368 -0
- flowtask/interfaces/py.typed +0 -0
- flowtask/interfaces/qs.py +376 -0
- flowtask/interfaces/result.py +87 -0
- flowtask/interfaces/selenium_service.py +779 -0
- flowtask/interfaces/smartsheet.py +154 -0
- flowtask/interfaces/stat.py +39 -0
- flowtask/interfaces/task.py +96 -0
- flowtask/interfaces/template.py +118 -0
- flowtask/interfaces/vectorstores/__init__.py +1 -0
- flowtask/interfaces/vectorstores/abstract.py +133 -0
- flowtask/interfaces/vectorstores/milvus.py +669 -0
- flowtask/interfaces/zammad.py +107 -0
- flowtask/models.py +193 -0
- flowtask/parsers/__init__.py +15 -0
- flowtask/parsers/_yaml.c +11978 -0
- flowtask/parsers/_yaml.cpython-310-x86_64-linux-gnu.so +0 -0
- flowtask/parsers/argparser.py +235 -0
- flowtask/parsers/base.c +15155 -0
- flowtask/parsers/base.cpython-310-x86_64-linux-gnu.so +0 -0
- flowtask/parsers/json.c +11968 -0
- flowtask/parsers/json.cpython-310-x86_64-linux-gnu.so +0 -0
- flowtask/parsers/maps.py +49 -0
- flowtask/parsers/toml.c +11968 -0
- flowtask/parsers/toml.cpython-310-x86_64-linux-gnu.so +0 -0
- flowtask/plugins/__init__.py +16 -0
- flowtask/plugins/components/__init__.py +0 -0
- flowtask/plugins/handler/__init__.py +45 -0
- flowtask/plugins/importer.py +31 -0
- flowtask/plugins/sources/__init__.py +0 -0
- flowtask/runner.py +283 -0
- flowtask/scheduler/__init__.py +9 -0
- flowtask/scheduler/functions.py +493 -0
- flowtask/scheduler/handlers/__init__.py +8 -0
- flowtask/scheduler/handlers/manager.py +504 -0
- flowtask/scheduler/handlers/models.py +58 -0
- flowtask/scheduler/handlers/service.py +72 -0
- flowtask/scheduler/notifications.py +65 -0
- flowtask/scheduler/scheduler.py +993 -0
- flowtask/services/__init__.py +0 -0
- flowtask/services/bots/__init__.py +0 -0
- flowtask/services/bots/telegram.py +264 -0
- flowtask/services/files/__init__.py +11 -0
- flowtask/services/files/manager.py +522 -0
- flowtask/services/files/model.py +37 -0
- flowtask/services/files/service.py +767 -0
- flowtask/services/jira/__init__.py +3 -0
- flowtask/services/jira/jira_actions.py +191 -0
- flowtask/services/tasks/__init__.py +13 -0
- flowtask/services/tasks/launcher.py +213 -0
- flowtask/services/tasks/manager.py +323 -0
- flowtask/services/tasks/service.py +275 -0
- flowtask/services/tasks/task_manager.py +376 -0
- flowtask/services/tasks/tasks.py +155 -0
- flowtask/storages/__init__.py +16 -0
- flowtask/storages/exceptions.py +12 -0
- flowtask/storages/files/__init__.py +8 -0
- flowtask/storages/files/abstract.py +29 -0
- flowtask/storages/files/filesystem.py +66 -0
- flowtask/storages/tasks/__init__.py +19 -0
- flowtask/storages/tasks/abstract.py +26 -0
- flowtask/storages/tasks/database.py +33 -0
- flowtask/storages/tasks/filesystem.py +108 -0
- flowtask/storages/tasks/github.py +119 -0
- flowtask/storages/tasks/memory.py +45 -0
- flowtask/storages/tasks/row.py +25 -0
- flowtask/tasks/__init__.py +0 -0
- flowtask/tasks/abstract.py +526 -0
- flowtask/tasks/command.py +118 -0
- flowtask/tasks/pile.py +486 -0
- flowtask/tasks/py.typed +0 -0
- flowtask/tasks/task.py +778 -0
- flowtask/template/__init__.py +161 -0
- flowtask/tests.py +257 -0
- flowtask/types/__init__.py +8 -0
- flowtask/types/typedefs.c +11347 -0
- flowtask/types/typedefs.cpython-310-x86_64-linux-gnu.so +0 -0
- flowtask/utils/__init__.py +24 -0
- flowtask/utils/constants.py +117 -0
- flowtask/utils/encoders.py +21 -0
- flowtask/utils/executor.py +112 -0
- flowtask/utils/functions.cpp +14280 -0
- flowtask/utils/functions.cpython-310-x86_64-linux-gnu.so +0 -0
- flowtask/utils/json.cpp +13349 -0
- flowtask/utils/json.cpython-310-x86_64-linux-gnu.so +0 -0
- flowtask/utils/mail.py +63 -0
- flowtask/utils/parseqs.c +13324 -0
- flowtask/utils/parserqs.cpython-310-x86_64-linux-gnu.so +0 -0
- flowtask/utils/stats.py +308 -0
- flowtask/utils/transformations.py +74 -0
- flowtask/utils/uv.py +12 -0
- flowtask/utils/validators.py +97 -0
- flowtask/version.py +11 -0
- flowtask-5.8.4.dist-info/LICENSE +201 -0
- flowtask-5.8.4.dist-info/METADATA +209 -0
- flowtask-5.8.4.dist-info/RECORD +470 -0
- flowtask-5.8.4.dist-info/WHEEL +6 -0
- flowtask-5.8.4.dist-info/entry_points.txt +3 -0
- flowtask-5.8.4.dist-info/top_level.txt +2 -0
- plugins/components/CreateQR.py +39 -0
- plugins/components/TestComponent.py +28 -0
- plugins/components/Use1.py +13 -0
- plugins/components/Workplace.py +117 -0
- plugins/components/__init__.py +3 -0
- plugins/sources/__init__.py +0 -0
- plugins/sources/get_populartimes.py +78 -0
- plugins/sources/google.py +150 -0
- plugins/sources/hubspot.py +679 -0
- plugins/sources/icims.py +679 -0
- plugins/sources/mobileinsight.py +501 -0
- plugins/sources/newrelic.py +262 -0
- plugins/sources/uap.py +268 -0
- plugins/sources/venu.py +244 -0
- plugins/sources/vocinity.py +314 -0
@@ -0,0 +1,993 @@
|
|
1
|
+
"""
|
2
|
+
NavScheduler.
|
3
|
+
|
4
|
+
Job for attaching tasks to the Scheduler.
|
5
|
+
"""
|
6
|
+
import asyncio
|
7
|
+
import locale
|
8
|
+
import os
|
9
|
+
import socket
|
10
|
+
import sys
|
11
|
+
import traceback
|
12
|
+
import zoneinfo
|
13
|
+
from collections.abc import Callable
|
14
|
+
from concurrent.futures import ThreadPoolExecutor as ThreadExecutor
|
15
|
+
from datetime import datetime, timedelta
|
16
|
+
from functools import partial
|
17
|
+
from redis import asyncio as aioredis
|
18
|
+
from aiohttp import web
|
19
|
+
from apscheduler.events import (
|
20
|
+
EVENT_JOB_ADDED,
|
21
|
+
EVENT_JOB_ERROR,
|
22
|
+
EVENT_JOB_EXECUTED,
|
23
|
+
EVENT_JOB_MAX_INSTANCES,
|
24
|
+
EVENT_JOB_MISSED,
|
25
|
+
EVENT_JOB_SUBMITTED,
|
26
|
+
EVENT_SCHEDULER_SHUTDOWN,
|
27
|
+
EVENT_SCHEDULER_STARTED,
|
28
|
+
JobExecutionEvent,
|
29
|
+
)
|
30
|
+
from apscheduler.executors.asyncio import AsyncIOExecutor
|
31
|
+
from apscheduler.executors.debug import DebugExecutor
|
32
|
+
from apscheduler.executors.pool import ProcessPoolExecutor, ThreadPoolExecutor
|
33
|
+
from apscheduler.jobstores.base import ConflictingIdError, JobLookupError
|
34
|
+
|
35
|
+
# Jobstores
|
36
|
+
from apscheduler.jobstores.memory import MemoryJobStore
|
37
|
+
from apscheduler.jobstores.redis import RedisJobStore
|
38
|
+
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
|
39
|
+
from apscheduler.jobstores.rethinkdb import RethinkDBJobStore
|
40
|
+
|
41
|
+
# apscheduler library #
|
42
|
+
# Default Scheduler:
|
43
|
+
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
44
|
+
from apscheduler.triggers.combining import AndTrigger, OrTrigger
|
45
|
+
from apscheduler.triggers.cron import CronTrigger
|
46
|
+
from apscheduler.triggers.date import DateTrigger
|
47
|
+
# from apscheduler.util import utc
|
48
|
+
from pytz import utc
|
49
|
+
|
50
|
+
# Triggers
|
51
|
+
from apscheduler.triggers.interval import IntervalTrigger
|
52
|
+
|
53
|
+
# navconfig
|
54
|
+
from navconfig import config as navConfig
|
55
|
+
from navconfig.logging import logging
|
56
|
+
|
57
|
+
# asyncdb:
|
58
|
+
from asyncdb import AsyncDB
|
59
|
+
from navigator.connections import PostgresPool
|
60
|
+
from querysource.types.validators import Entity
|
61
|
+
|
62
|
+
# Queue Worker Client:
|
63
|
+
from qw.client import QClient
|
64
|
+
|
65
|
+
# Configuration
|
66
|
+
from ..conf import (
|
67
|
+
CACHE_HOST,
|
68
|
+
CACHE_PORT,
|
69
|
+
ENABLE_JOBS,
|
70
|
+
ENVIRONMENT,
|
71
|
+
SCHEDULER_GRACE_TIME,
|
72
|
+
SCHEDULER_MAX_INSTANCES,
|
73
|
+
SYSTEM_LOCALE,
|
74
|
+
TIMEZONE,
|
75
|
+
WORKER_HIGH_LIST,
|
76
|
+
WORKER_LIST,
|
77
|
+
default_dsn,
|
78
|
+
USE_TIMEZONE,
|
79
|
+
PUBSUB_REDIS,
|
80
|
+
ERROR_CHANNEL,
|
81
|
+
ALLOW_RESCHEDULE,
|
82
|
+
SCHEDULER_STARTUP_JOB
|
83
|
+
)
|
84
|
+
from ..utils.json import json_decoder
|
85
|
+
|
86
|
+
# Handler
|
87
|
+
from .handlers import SchedulerManager, JobManager
|
88
|
+
from .notifications import send_notification
|
89
|
+
from .functions import TaskScheduler, get_function
|
90
|
+
|
91
|
+
|
92
|
+
# disable logging of APScheduler
|
93
|
+
logging.getLogger("apscheduler").setLevel(logging.WARNING)
|
94
|
+
|
95
|
+
|
96
|
+
jobstores = {
|
97
|
+
"default": MemoryJobStore(),
|
98
|
+
"db": RedisJobStore(
|
99
|
+
db=3,
|
100
|
+
jobs_key="apscheduler.jobs",
|
101
|
+
run_times_key="apscheduler.run_times",
|
102
|
+
host=CACHE_HOST,
|
103
|
+
port=CACHE_PORT,
|
104
|
+
),
|
105
|
+
# "rethink": RethinkDBJobStore(
|
106
|
+
# database="scheduler",
|
107
|
+
# table="jobs",
|
108
|
+
# )
|
109
|
+
}
|
110
|
+
|
111
|
+
job_defaults = {
|
112
|
+
"coalesce": True,
|
113
|
+
"max_instances": SCHEDULER_MAX_INSTANCES,
|
114
|
+
"misfire_grace_time": SCHEDULER_GRACE_TIME,
|
115
|
+
}
|
116
|
+
|
117
|
+
|
118
|
+
SCHEDULER_LOCK_KEY = "navigator_singleton_lock"
|
119
|
+
|
120
|
+
|
121
|
+
class NavScheduler:
|
122
|
+
"""NavScheduler.
|
123
|
+
|
124
|
+
Demonstrates how to use the asyncio compatible scheduler to schedule jobs.
|
125
|
+
"""
|
126
|
+
|
127
|
+
def __init__(self, event_loop=None):
|
128
|
+
self.db = None
|
129
|
+
self._pool = None
|
130
|
+
self._connection = None
|
131
|
+
self._redis = None
|
132
|
+
self._jobs: dict = {}
|
133
|
+
self._loop = None
|
134
|
+
self.scheduler = None
|
135
|
+
self._args = None
|
136
|
+
self._event = asyncio.Event()
|
137
|
+
if event_loop:
|
138
|
+
self._loop = event_loop
|
139
|
+
else:
|
140
|
+
try:
|
141
|
+
self._loop = asyncio.get_running_loop()
|
142
|
+
except RuntimeError:
|
143
|
+
self._loop = asyncio.new_event_loop()
|
144
|
+
asyncio.set_event_loop(self._loop)
|
145
|
+
# logging
|
146
|
+
self.logger = logging.getLogger(name="Flowtask.Scheduler")
|
147
|
+
# asyncio scheduler
|
148
|
+
if TIMEZONE == 'UTC':
|
149
|
+
self._timezone = utc
|
150
|
+
else:
|
151
|
+
self._timezone = zoneinfo.ZoneInfo(key=TIMEZONE)
|
152
|
+
self.jobstores = jobstores
|
153
|
+
# defining Locale
|
154
|
+
try:
|
155
|
+
locale.setlocale(locale.LC_ALL, SYSTEM_LOCALE)
|
156
|
+
except locale.Error as e:
|
157
|
+
self.logger.exception(e, exc_info=True)
|
158
|
+
# Interval to renew the lock in seconds
|
159
|
+
self._renewal_interval: int = 60
|
160
|
+
# Time to live for the lock in seconds
|
161
|
+
self._scheduler_ttl: int = 180
|
162
|
+
self._lock_check_interval: int = 120
|
163
|
+
self.set_workers()
|
164
|
+
|
165
|
+
def setup(self, app: web.Application):
|
166
|
+
self.db = PostgresPool(
|
167
|
+
dsn=default_dsn,
|
168
|
+
name="FlowTask.Scheduler",
|
169
|
+
startup=self.startup
|
170
|
+
)
|
171
|
+
self.db.configure(app, register="database") # pylint: disable=E1123
|
172
|
+
# add the scheduler to the current app
|
173
|
+
app["scheduler"] = self
|
174
|
+
# add the routes:
|
175
|
+
app.router.add_view("/api/v2/scheduler", SchedulerManager)
|
176
|
+
app.router.add_view("/api/v2/scheduler/{job}", SchedulerManager)
|
177
|
+
# Job Manager:
|
178
|
+
JobManager.configure(app)
|
179
|
+
# define subscribers
|
180
|
+
app.on_startup.append(self.start_subscriber)
|
181
|
+
app.on_cleanup.append(self.stop_subscriber)
|
182
|
+
|
183
|
+
async def _start_redis(self):
|
184
|
+
self._redis = aioredis.from_url(
|
185
|
+
PUBSUB_REDIS, encoding="utf-8", decode_responses=True
|
186
|
+
)
|
187
|
+
|
188
|
+
async def _stop_redis(self):
|
189
|
+
try:
|
190
|
+
await self._redis.close()
|
191
|
+
await self._redis.connection_pool.disconnect()
|
192
|
+
except Exception:
|
193
|
+
pass
|
194
|
+
|
195
|
+
async def listen_task_events(self):
|
196
|
+
try:
|
197
|
+
async with self._redis.pubsub() as pubsub:
|
198
|
+
await pubsub.subscribe(ERROR_CHANNEL)
|
199
|
+
future = asyncio.create_task(self.event_reader(pubsub))
|
200
|
+
await future
|
201
|
+
except Exception as exc:
|
202
|
+
print(exc)
|
203
|
+
|
204
|
+
async def start_subscriber(self, app):
|
205
|
+
## listen subscriber:
|
206
|
+
self.logger.notice(':: Starting Task Subscriber :: ')
|
207
|
+
app["redis_listener"] = asyncio.get_event_loop().create_task(
|
208
|
+
self.listen_task_events()
|
209
|
+
)
|
210
|
+
|
211
|
+
async def stop_subscriber(self, app):
|
212
|
+
self.logger.notice("Stopping subscriber")
|
213
|
+
self._event.set()
|
214
|
+
app["redis_listener"].cancel()
|
215
|
+
|
216
|
+
async def get_job_id(self, task_id: str) -> str:
|
217
|
+
try:
|
218
|
+
if job := await self._redis.get(task_id):
|
219
|
+
# delete this task id from redis:
|
220
|
+
await self._redis.delete(task_id)
|
221
|
+
return job
|
222
|
+
except Exception as err:
|
223
|
+
self.logger.warning(f"Error Getting Task ID from Redis: {err}")
|
224
|
+
return None
|
225
|
+
|
226
|
+
async def event_reader(self, channel: aioredis.client.PubSub):
|
227
|
+
try:
|
228
|
+
while not self._event.is_set():
|
229
|
+
msg = await channel.get_message(ignore_subscribe_messages=True)
|
230
|
+
if msg is not None:
|
231
|
+
message = json_decoder(msg["data"])
|
232
|
+
status = message.get("status", "error")
|
233
|
+
task = message.get("task", None)
|
234
|
+
task_id = message.get("task_id", None)
|
235
|
+
if status != "task not found":
|
236
|
+
# check if the task was dispatched by me:
|
237
|
+
if job_id := await self.get_job_id(task_id):
|
238
|
+
self.logger.error(f"Received Failed Task: {msg}")
|
239
|
+
self.process_failed_task(task, job_id)
|
240
|
+
except Exception as err:
|
241
|
+
self.logger.warning(
|
242
|
+
f"Stopping Task Event Subscription {err}"
|
243
|
+
)
|
244
|
+
finally:
|
245
|
+
await channel.unsubscribe(ERROR_CHANNEL)
|
246
|
+
|
247
|
+
def process_failed_task(self, task_name: str, job_id: str = None):
|
248
|
+
program, task = task_name.split(".")
|
249
|
+
if not job_id:
|
250
|
+
job_id = f"{program}_{task}"
|
251
|
+
try:
|
252
|
+
info = self._jobs[job_id]
|
253
|
+
job_info = info.get("data", None)
|
254
|
+
if not job_info:
|
255
|
+
return
|
256
|
+
_max = job_info.get("rescheduled_max", None)
|
257
|
+
if ALLOW_RESCHEDULE is True and job_info["reschedule"] is True:
|
258
|
+
if info["status"] not in ("dispatched", "success", "retry"):
|
259
|
+
# This task was not dispatched by me, returning
|
260
|
+
return
|
261
|
+
job = self.scheduler.get_job(job_id)
|
262
|
+
if info["retry"] > 0 and _max is None:
|
263
|
+
# we need to stop the rescheduling:
|
264
|
+
return
|
265
|
+
if _max is not None and info["retry"] >= _max:
|
266
|
+
# We cannot reschedule this task again, returning
|
267
|
+
return
|
268
|
+
# Jitter for re-scheduling in minutes
|
269
|
+
jitter = job_info.get("reschedule_jitter", 5)
|
270
|
+
old_trigger = job.trigger
|
271
|
+
if not info["trigger"]:
|
272
|
+
# only set if not already set:
|
273
|
+
self._jobs[job_id]["trigger"] = old_trigger
|
274
|
+
job.reschedule(trigger=IntervalTrigger(minutes=jitter))
|
275
|
+
self._jobs[job_id]["retry"] += 1
|
276
|
+
self._jobs[job_id]["status"] = "retry"
|
277
|
+
self.logger.warning(
|
278
|
+
f"Job {job_id} was re-scheduled to {jitter} minutes"
|
279
|
+
)
|
280
|
+
except KeyError:
|
281
|
+
return
|
282
|
+
|
283
|
+
async def acquire_scheduler_lock(self):
|
284
|
+
if await self._redis.set(
|
285
|
+
SCHEDULER_LOCK_KEY,
|
286
|
+
"locked",
|
287
|
+
ex=self._scheduler_ttl,
|
288
|
+
nx=True
|
289
|
+
):
|
290
|
+
return True
|
291
|
+
return False
|
292
|
+
|
293
|
+
async def renew_scheduler_lock(self):
|
294
|
+
await self._redis.expire(SCHEDULER_LOCK_KEY, self._scheduler_ttl)
|
295
|
+
print("Scheduler lock key renewed.")
|
296
|
+
|
297
|
+
def _create_scheduler(self) -> AsyncIOScheduler:
|
298
|
+
"""_create_scheduler.
|
299
|
+
|
300
|
+
Create a new APScheduler AsyncIOScheduler instance.
|
301
|
+
"""
|
302
|
+
return AsyncIOScheduler(
|
303
|
+
jobstores=self.jobstores,
|
304
|
+
executors={
|
305
|
+
"default": AsyncIOExecutor(),
|
306
|
+
"process": ProcessPoolExecutor(max_workers=12),
|
307
|
+
"asyncio": AsyncIOExecutor(),
|
308
|
+
"thread": ThreadPoolExecutor(max_workers=16),
|
309
|
+
"debug": DebugExecutor(),
|
310
|
+
},
|
311
|
+
job_defaults=job_defaults,
|
312
|
+
timezone=self._timezone,
|
313
|
+
)
|
314
|
+
|
315
|
+
async def start_scheduler(self):
|
316
|
+
self.scheduler = self._create_scheduler()
|
317
|
+
# getting Jobs
|
318
|
+
await self.create_jobs()
|
319
|
+
# Create the Leadership Lock
|
320
|
+
job_id = 'renewal_lock'
|
321
|
+
# Add the renewal job to run every `RENEWAL_INTERVAL` seconds
|
322
|
+
job = self.scheduler.add_job(
|
323
|
+
self.renew_scheduler_lock,
|
324
|
+
id=job_id,
|
325
|
+
name="renewal_lock",
|
326
|
+
logger=self.logger,
|
327
|
+
jobstore_retry_interval=30,
|
328
|
+
jobstore="default",
|
329
|
+
executor="default",
|
330
|
+
trigger=IntervalTrigger(
|
331
|
+
seconds=self._renewal_interval
|
332
|
+
),
|
333
|
+
replace_existing=True,
|
334
|
+
remove_job_on_completion=True,
|
335
|
+
)
|
336
|
+
self._jobs[job_id] = {
|
337
|
+
"data": None,
|
338
|
+
"job": job,
|
339
|
+
"status": "idle",
|
340
|
+
"trigger": None,
|
341
|
+
"retry": 0,
|
342
|
+
}
|
343
|
+
# adding listeners
|
344
|
+
self.add_listeners()
|
345
|
+
self.logger.info(
|
346
|
+
f"Scheduled Started at {datetime.now()}"
|
347
|
+
)
|
348
|
+
try:
|
349
|
+
# starting scheduler
|
350
|
+
self.scheduler.start()
|
351
|
+
except Exception as err:
|
352
|
+
st = self.get_stacktrace()
|
353
|
+
message = f"Error Starting Scheduler: {err!s} {st!s}"
|
354
|
+
self.send_message(message)
|
355
|
+
raise RuntimeError(
|
356
|
+
f"Error Starting Scheduler {err!r}"
|
357
|
+
) from err
|
358
|
+
|
359
|
+
def send_message(self, message: str) -> None:
|
360
|
+
try:
|
361
|
+
# send notification:
|
362
|
+
event_loop = asyncio.new_event_loop()
|
363
|
+
fn = partial(
|
364
|
+
send_notification,
|
365
|
+
event_loop=event_loop,
|
366
|
+
message=message,
|
367
|
+
provider="telegram",
|
368
|
+
)
|
369
|
+
# sending function coroutine to a thread
|
370
|
+
with ThreadExecutor(max_workers=1) as pool:
|
371
|
+
event_loop.run_in_executor(pool, fn)
|
372
|
+
finally:
|
373
|
+
event_loop.close()
|
374
|
+
|
375
|
+
def set_workers(self) -> None:
|
376
|
+
"""
|
377
|
+
Create the instance of Queue Workers.
|
378
|
+
"""
|
379
|
+
if WORKER_LIST:
|
380
|
+
self.qworker = QClient(worker_list=WORKER_LIST)
|
381
|
+
self.qworker_high = QClient(worker_list=WORKER_HIGH_LIST)
|
382
|
+
else:
|
383
|
+
self.qworker = QClient() # auto-discovering of workers
|
384
|
+
self.qworker_high = self.qworker
|
385
|
+
|
386
|
+
async def startup(self, app: web.Application, conn: Callable):
|
387
|
+
"""
|
388
|
+
Scheduler Startup.
|
389
|
+
"""
|
390
|
+
await self._start_redis()
|
391
|
+
try:
|
392
|
+
self._pool = conn
|
393
|
+
except Exception as err:
|
394
|
+
self.logger.exception(err)
|
395
|
+
raise RuntimeError(
|
396
|
+
f"{err!s}"
|
397
|
+
) from err
|
398
|
+
# auxiliary connection
|
399
|
+
if self._pool:
|
400
|
+
self._connection = await self._pool.acquire()
|
401
|
+
# set Zoneinfo:
|
402
|
+
if USE_TIMEZONE is True:
|
403
|
+
tz = f"SET timezone TO '{TIMEZONE}'"
|
404
|
+
await self._connection.execute(tz)
|
405
|
+
if await self.acquire_scheduler_lock() is True:
|
406
|
+
# we have the lock, let's start the scheduler
|
407
|
+
await self.start_scheduler()
|
408
|
+
## Add Scheduler to Application:
|
409
|
+
app["_scheduler_"] = self.scheduler
|
410
|
+
else:
|
411
|
+
self.logger.warning(
|
412
|
+
":: Another worker is running the scheduler. Checking again shortly ::"
|
413
|
+
)
|
414
|
+
|
415
|
+
# Start periodic lock check to handle the failover
|
416
|
+
asyncio.create_task(self.periodic_lock_check(app))
|
417
|
+
|
418
|
+
async def periodic_lock_check(self, app: web.Application):
|
419
|
+
"""Periodically checks if the lock is available, attempting to acquire if not held."""
|
420
|
+
while True:
|
421
|
+
# check if scheduler lock is still alive
|
422
|
+
if not await self._redis.exists(SCHEDULER_LOCK_KEY):
|
423
|
+
self.logger.info(
|
424
|
+
"Scheduler lock key was lost, trying to acquire again."
|
425
|
+
)
|
426
|
+
try:
|
427
|
+
# try to shutting down the scheduler:
|
428
|
+
self.scheduler.shutdown(wait=True)
|
429
|
+
except Exception:
|
430
|
+
pass
|
431
|
+
if await self.acquire_scheduler_lock() is True:
|
432
|
+
print("Scheduler lock acquired. Starting scheduler.")
|
433
|
+
await self.start_scheduler()
|
434
|
+
# if start, add to the aiohttp application:
|
435
|
+
app["_scheduler_"] = self.scheduler
|
436
|
+
await asyncio.sleep(self._lock_check_interval)
|
437
|
+
|
438
|
+
@property
|
439
|
+
def event_loop(self):
|
440
|
+
return self._loop
|
441
|
+
|
442
|
+
def set_test_job(self):
|
443
|
+
self.logger.debug("Scheduler: Adding a Test job")
|
444
|
+
run_date = datetime.now(self._timezone) + timedelta(minutes=1)
|
445
|
+
# define Task:
|
446
|
+
program = "navigator"
|
447
|
+
task = "startup_job"
|
448
|
+
job_id = "on_startup_job"
|
449
|
+
sched = TaskScheduler(program, task, job_id, worker=self.qworker)
|
450
|
+
sched.__class__.__name__ = f"Task({program}.{task})"
|
451
|
+
self.scheduler.add_job(
|
452
|
+
sched,
|
453
|
+
id=job_id,
|
454
|
+
name="startup_test_job",
|
455
|
+
logger=self.logger,
|
456
|
+
jobstore_retry_interval=30,
|
457
|
+
jobstore="default",
|
458
|
+
executor="default",
|
459
|
+
trigger=DateTrigger(run_date=run_date, timezone=self._timezone),
|
460
|
+
replace_existing=True,
|
461
|
+
remove_job_on_completion=True,
|
462
|
+
)
|
463
|
+
|
464
|
+
async def create_jobs(self):
|
465
|
+
self._jobs = {}
|
466
|
+
jobs = []
|
467
|
+
error = None
|
468
|
+
if ENABLE_JOBS is True:
|
469
|
+
# Job for self-service discovering
|
470
|
+
async with await self._pool.acquire() as conn:
|
471
|
+
sql_jobs = "SELECT * FROM troc.jobs WHERE enabled = true"
|
472
|
+
try:
|
473
|
+
jobs, error = await conn.query(sql_jobs)
|
474
|
+
except Exception as err:
|
475
|
+
# try to reconnect to DB:
|
476
|
+
error = f"Error getting Jobs, Unable to Connect to DB: {err!s}"
|
477
|
+
if error:
|
478
|
+
raise RuntimeError(
|
479
|
+
f"[{ENVIRONMENT} - Scheduler] Error getting Jobs: {error!s}"
|
480
|
+
)
|
481
|
+
# Add a Job for testing purposes.
|
482
|
+
if SCHEDULER_STARTUP_JOB is True:
|
483
|
+
self.set_test_job()
|
484
|
+
for job in jobs:
|
485
|
+
try:
|
486
|
+
await self.add_job(job)
|
487
|
+
except ValueError as err:
|
488
|
+
self.logger.error(
|
489
|
+
f"Scheduler: Work Job info {job!r}: {err!s}"
|
490
|
+
)
|
491
|
+
continue
|
492
|
+
except Exception as err:
|
493
|
+
self.logger.error(
|
494
|
+
f"Scheduler: Error Adding Job {job['job_id']}: {err!s}"
|
495
|
+
)
|
496
|
+
continue
|
497
|
+
return self._jobs
|
498
|
+
|
499
|
+
async def add_job(self, job: dict):
|
500
|
+
"""add_job.
|
501
|
+
|
502
|
+
Add a Job to the Scheduler.
|
503
|
+
"""
|
504
|
+
jitter = None
|
505
|
+
job_id = job["job_id"]
|
506
|
+
if job["jitter"]:
|
507
|
+
jitter = job["jitter"]
|
508
|
+
# function or other call
|
509
|
+
priority = job.get("priority", "low")
|
510
|
+
attributes = []
|
511
|
+
if priority == "high":
|
512
|
+
worker = self.qworker_high
|
513
|
+
else:
|
514
|
+
worker = self.qworker
|
515
|
+
try:
|
516
|
+
func = get_function(job, priority=priority, worker=worker)
|
517
|
+
except ValueError:
|
518
|
+
raise
|
519
|
+
if job["executor"] == "process":
|
520
|
+
task, program = job["job"]["task"].values()
|
521
|
+
attributes = [program, task]
|
522
|
+
schedule_type = job["schedule_type"]
|
523
|
+
if schedule_type is None:
|
524
|
+
raise ValueError(
|
525
|
+
f"Schedule Type for job {job_id} is None"
|
526
|
+
)
|
527
|
+
try:
|
528
|
+
if job["schedule"] is None:
|
529
|
+
raise ValueError(
|
530
|
+
f"Schedule for job {job_id} is None"
|
531
|
+
)
|
532
|
+
if schedule_type == "interval":
|
533
|
+
t = job["schedule"]
|
534
|
+
if job["start_date"]:
|
535
|
+
t = {**t, **{"start_date": job["start_date"]}}
|
536
|
+
if job["end_date"]:
|
537
|
+
t = {**t, **{"end_date": job["end_date"]}}
|
538
|
+
trigger = IntervalTrigger(**t)
|
539
|
+
elif schedule_type == "crontab":
|
540
|
+
t = job["schedule"]["crontab"]
|
541
|
+
tz = job["schedule"].get("timezone", TIMEZONE)
|
542
|
+
trigger = CronTrigger.from_crontab(t, timezone=tz)
|
543
|
+
elif schedule_type == "cron":
|
544
|
+
# trigger = self.get_cron_params(job['schedule'])
|
545
|
+
trigger = job["schedule"]
|
546
|
+
if job["start_date"]:
|
547
|
+
trigger = {**trigger, **{"start_date": job["start_date"]}}
|
548
|
+
if job["end_date"]:
|
549
|
+
trigger = {**trigger, **{"end_date": job["end_date"]}}
|
550
|
+
if jitter:
|
551
|
+
trigger = {**trigger, **{"jitter": jitter}}
|
552
|
+
trigger = CronTrigger(**trigger)
|
553
|
+
elif schedule_type == "date":
|
554
|
+
trigger = DateTrigger(run_date=job["run_date"], timezone=self._timezone)
|
555
|
+
elif schedule_type == "combined":
|
556
|
+
# syntax:
|
557
|
+
# { type="and", "schedule": [{"cron": "cron"}, {"cron": "cron"} ] }
|
558
|
+
t = job["schedule"]
|
559
|
+
try:
|
560
|
+
jointype = t["type"]
|
561
|
+
except KeyError:
|
562
|
+
jointype = "and"
|
563
|
+
steps = []
|
564
|
+
for trigger in t["schedule"]:
|
565
|
+
# the expression need to be equal to Trigger Requirements
|
566
|
+
for step, value in trigger.items():
|
567
|
+
obj = self.get_trigger(step)
|
568
|
+
tg = obj(**value)
|
569
|
+
steps.append(tg)
|
570
|
+
if jointype == "and":
|
571
|
+
trigger = AndTrigger(steps)
|
572
|
+
else:
|
573
|
+
trigger = OrTrigger(steps)
|
574
|
+
else:
|
575
|
+
raise ValueError(f"Unknown schedule type: {schedule_type}")
|
576
|
+
except TypeError as err:
|
577
|
+
message = f'Error Adding Job with Id {job_id}: {err}'
|
578
|
+
self.logger.error(message)
|
579
|
+
self.send_message(message)
|
580
|
+
return
|
581
|
+
## Building Job for Scheduler:
|
582
|
+
job_struct = {
|
583
|
+
"id": f"{job_id}",
|
584
|
+
"name": f"{job_id}",
|
585
|
+
"replace_existing": True,
|
586
|
+
"jobstore": job["jobstore"],
|
587
|
+
"executor": job["executor"],
|
588
|
+
}
|
589
|
+
arguments = {}
|
590
|
+
if job["params"]:
|
591
|
+
arguments = {**job["params"]}
|
592
|
+
# agregar al args que recibe la tarea:
|
593
|
+
if job["executor"] != "process":
|
594
|
+
# we cannot pass an event loop to ProcessPoolExecutor
|
595
|
+
arguments["loop"] = self._loop
|
596
|
+
arguments["ENV"] = navConfig
|
597
|
+
if job["attributes"]:
|
598
|
+
attributes = job["attributes"]
|
599
|
+
## add this job
|
600
|
+
if job_struct:
|
601
|
+
try:
|
602
|
+
j = self.scheduler.add_job(
|
603
|
+
func,
|
604
|
+
logger=self.logger,
|
605
|
+
jobstore_retry_interval=30,
|
606
|
+
trigger=trigger,
|
607
|
+
kwargs=arguments,
|
608
|
+
args=attributes,
|
609
|
+
**job_struct,
|
610
|
+
)
|
611
|
+
info = {
|
612
|
+
"data": job,
|
613
|
+
"job": j,
|
614
|
+
"status": "idle",
|
615
|
+
"trigger": None,
|
616
|
+
"retry": 0,
|
617
|
+
}
|
618
|
+
self._jobs[job_id] = info
|
619
|
+
except TypeError as exc:
|
620
|
+
self.logger.error(
|
621
|
+
f"Scheduler: Error Adding Job {job_id}: {exc}"
|
622
|
+
)
|
623
|
+
return
|
624
|
+
except ConflictingIdError as err:
|
625
|
+
self.logger.error(
|
626
|
+
f"Scheduler: Conflicting Error with ID {job_id}: {err}"
|
627
|
+
)
|
628
|
+
return
|
629
|
+
except Exception as err:
|
630
|
+
self.logger.exception(
|
631
|
+
f"Scheduler: Error Adding Job {job_id}: {err}",
|
632
|
+
stack_info=True
|
633
|
+
)
|
634
|
+
return
|
635
|
+
else:
|
636
|
+
self.logger.error(
|
637
|
+
"Scheduler: Missing Scheduled Job Structure"
|
638
|
+
)
|
639
|
+
|
640
|
+
def add_listeners(self):
|
641
|
+
# Asyncio Scheduler
|
642
|
+
self.scheduler.add_listener(self.scheduler_status, EVENT_SCHEDULER_STARTED)
|
643
|
+
self.scheduler.add_listener(self.scheduler_shutdown, EVENT_SCHEDULER_SHUTDOWN)
|
644
|
+
self.scheduler.add_listener(self.job_success, EVENT_JOB_EXECUTED)
|
645
|
+
self.scheduler.add_listener(self.job_status, EVENT_JOB_ERROR | EVENT_JOB_MISSED)
|
646
|
+
# job was submitted:
|
647
|
+
self.scheduler.add_listener(self.job_submitted, EVENT_JOB_SUBMITTED)
|
648
|
+
# a new job was added:
|
649
|
+
self.scheduler.add_listener(self.job_added, EVENT_JOB_ADDED)
|
650
|
+
|
651
|
+
def job_added(self, event: JobExecutionEvent, *args, **kwargs):
|
652
|
+
try:
|
653
|
+
job = self.scheduler.get_job(event.job_id)
|
654
|
+
job_name = job.name
|
655
|
+
# TODO: using to check if tasks were added
|
656
|
+
self.logger.info(f"Job Added: {job_name} with args: {args!s}/{kwargs!r}")
|
657
|
+
# self.logger.notice(
|
658
|
+
# f'Job {job_name!s} was added with args: {args!s}/{kwargs!r}'
|
659
|
+
# )
|
660
|
+
except Exception:
|
661
|
+
pass
|
662
|
+
|
663
|
+
def get_jobs(self):
|
664
|
+
return [job.id for job in self.scheduler.get_jobs()]
|
665
|
+
|
666
|
+
def get_all_jobs(self):
|
667
|
+
if self.scheduler is None:
|
668
|
+
raise RuntimeError(
|
669
|
+
"Scheduler not started. Please start the scheduler first."
|
670
|
+
)
|
671
|
+
return None
|
672
|
+
try:
|
673
|
+
return self.scheduler.get_jobs()
|
674
|
+
except AttributeError:
|
675
|
+
raise RuntimeError(
|
676
|
+
"Scheduler not started. Please start the scheduler first."
|
677
|
+
)
|
678
|
+
|
679
|
+
def get_job(self, job_id):
|
680
|
+
try:
|
681
|
+
return self._jobs[job_id]
|
682
|
+
except JobLookupError as err:
|
683
|
+
self.logger.warning(f"Error found a Job with ID: {err}")
|
684
|
+
return None
|
685
|
+
except KeyError:
|
686
|
+
return None
|
687
|
+
|
688
|
+
def scheduler_status(self, event):
|
689
|
+
print(event)
|
690
|
+
self.logger.debug(f"[{ENVIRONMENT} - NAV Scheduler] :: Started.")
|
691
|
+
self.logger.info(
|
692
|
+
f"[{ENVIRONMENT} - NAV Scheduler] START time is: {datetime.now()}"
|
693
|
+
)
|
694
|
+
|
695
|
+
def scheduler_shutdown(self, event):
|
696
|
+
self.logger.info(
|
697
|
+
f"[{ENVIRONMENT}] Scheduler {event} Stopped at: {datetime.now()}"
|
698
|
+
)
|
699
|
+
|
700
|
+
def fix_job_schedule(self, job, job_id):
|
701
|
+
"""fix_job_schedule.
|
702
|
+
|
703
|
+
Return a re-scheduled Job to cuirrent schedule.
|
704
|
+
|
705
|
+
Args:
|
706
|
+
job (APscheduler.job.Job): instance of APScheduler Job.
|
707
|
+
job_id (str): ID of the job
|
708
|
+
"""
|
709
|
+
if job_id in self._jobs and "trigger" in self._jobs[job_id]:
|
710
|
+
try:
|
711
|
+
trigger = self._jobs[job_id]["trigger"]
|
712
|
+
if trigger and job.trigger != trigger:
|
713
|
+
# This task was rescheduled:
|
714
|
+
job.reschedule(trigger=trigger)
|
715
|
+
self._jobs[job_id]["trigger"] = None
|
716
|
+
self._jobs[job_id]["status"] = "reverted"
|
717
|
+
self.logger.info(f"Job {job_id} reverted to its original schedule.")
|
718
|
+
except Exception as err:
|
719
|
+
self.logger.warning(
|
720
|
+
f"Error while reverted job {job_id} to original schedule: {err}"
|
721
|
+
)
|
722
|
+
|
723
|
+
def job_success(self, event: JobExecutionEvent):
|
724
|
+
"""Job Success.
|
725
|
+
|
726
|
+
Event when a Job was executed successfully.
|
727
|
+
|
728
|
+
:param apscheduler.events.JobExecutionEvent event: job execution event
|
729
|
+
"""
|
730
|
+
job_id = event.job_id
|
731
|
+
try:
|
732
|
+
job = self.scheduler.get_job(job_id)
|
733
|
+
except JobLookupError as err:
|
734
|
+
self.logger.warning(f"Error found a Job with ID: {err}")
|
735
|
+
return False
|
736
|
+
try:
|
737
|
+
self.fix_job_schedule(job, job_id)
|
738
|
+
self._jobs[job_id]["status"] = "success"
|
739
|
+
except KeyError:
|
740
|
+
# Job is missing from the Job Store.
|
741
|
+
return False
|
742
|
+
job_name = job.name
|
743
|
+
self.logger.info(
|
744
|
+
f"[Scheduler - {ENVIRONMENT}]: {job_name} with id {event.job_id!s} \
|
745
|
+
was queued/executed successfully @ {event.scheduled_run_time!s}"
|
746
|
+
)
|
747
|
+
# saving into Database
|
748
|
+
event_loop = asyncio.new_event_loop()
|
749
|
+
fn = partial(self.save_db_event, event_loop=event_loop, event=event, job=job)
|
750
|
+
try:
|
751
|
+
with ThreadExecutor(max_workers=1) as pool:
|
752
|
+
event_loop.run_in_executor(pool, fn)
|
753
|
+
finally:
|
754
|
+
event_loop.close()
|
755
|
+
|
756
|
+
def job_status(self, event: JobExecutionEvent):
|
757
|
+
"""React on Error events from scheduler.
|
758
|
+
|
759
|
+
:param apscheduler.events.JobExecutionEvent event: job execution event.
|
760
|
+
|
761
|
+
TODO: add the reschedule_job
|
762
|
+
scheduler = sched.scheduler #it returns the native apscheduler instance
|
763
|
+
scheduler.reschedule_job('my_job_id', trigger='cron', minute='*/5')
|
764
|
+
|
765
|
+
"""
|
766
|
+
job_id = event.job_id
|
767
|
+
job = self.scheduler.get_job(job_id)
|
768
|
+
self.fix_job_schedule(job, job_id)
|
769
|
+
try:
|
770
|
+
saved_job = self._jobs[job_id]
|
771
|
+
except KeyError as exc:
|
772
|
+
self.logger.warning(f"Error found a Job with ID: {exc}")
|
773
|
+
return
|
774
|
+
job_name = job.name
|
775
|
+
scheduled = event.scheduled_run_time
|
776
|
+
stack = event.traceback
|
777
|
+
if event.code == EVENT_JOB_MISSED:
|
778
|
+
self._jobs[job_id]["status"] = "missed"
|
779
|
+
self.logger.warning(
|
780
|
+
f"[{ENVIRONMENT} - NAV Scheduler] Job {job_name} \
|
781
|
+
was missed for scheduled run at {scheduled}"
|
782
|
+
)
|
783
|
+
message = f"⚠️ :: [{ENVIRONMENT} - NAV Scheduler] Job {job_name} was missed \
|
784
|
+
for scheduled run at {scheduled}"
|
785
|
+
elif event.code == EVENT_JOB_ERROR:
|
786
|
+
saved_job["status"] = "error"
|
787
|
+
self.logger.error(
|
788
|
+
f"[{ENVIRONMENT} - NAV Scheduler] Job {job_name} scheduled at \
|
789
|
+
{scheduled!s} failed with Exception: {event.exception!s}"
|
790
|
+
)
|
791
|
+
message = f"🛑 :: [{ENVIRONMENT} - NAV Scheduler] Job **{job_name}** \
|
792
|
+
scheduled at {scheduled!s} failed with Error {event.exception!s}"
|
793
|
+
if stack:
|
794
|
+
self.logger.exception(
|
795
|
+
f"[{ENVIRONMENT} - NAV Scheduler] Job {job_name} id: {job_id!s} \
|
796
|
+
StackTrace: {stack!s}"
|
797
|
+
)
|
798
|
+
message = f"🛑 :: [{ENVIRONMENT} - NAV Scheduler] Job \
|
799
|
+
**{job_name}**:**{job_id!s}** failed with Exception {event.exception!s}"
|
800
|
+
# send a Notification error from Scheduler
|
801
|
+
elif event.code == EVENT_JOB_MAX_INSTANCES:
|
802
|
+
saved_job["status"] = "Not Submitted"
|
803
|
+
self.logger.exception(
|
804
|
+
f"[{ENVIRONMENT} - Scheduler] Job {job_name} could not be submitted \
|
805
|
+
Maximum number of running instances was reached."
|
806
|
+
)
|
807
|
+
message = f"⚠️ :: [{ENVIRONMENT} - NAV Scheduler] Job **{job_name}** was \
|
808
|
+
missed for scheduled run at {scheduled}"
|
809
|
+
else:
|
810
|
+
saved_job["status"] = "exception"
|
811
|
+
# will be an exception
|
812
|
+
message = f"🛑 :: [{ENVIRONMENT} - NAV Scheduler] Job \
|
813
|
+
{job_name}:{job_id!s} failed with Exception {stack!s}"
|
814
|
+
# send a Notification Exception from Scheduler
|
815
|
+
# send notification:
|
816
|
+
event_loop = asyncio.new_event_loop()
|
817
|
+
fn = partial(
|
818
|
+
send_notification,
|
819
|
+
event_loop=event_loop,
|
820
|
+
message=message,
|
821
|
+
provider="telegram",
|
822
|
+
)
|
823
|
+
saved = partial(self.save_db_event, event_loop=event_loop, event=event, job=job)
|
824
|
+
# sending function coroutine to a thread
|
825
|
+
try:
|
826
|
+
with ThreadExecutor(max_workers=1) as pool:
|
827
|
+
event_loop.run_in_executor(pool, saved)
|
828
|
+
event_loop.run_in_executor(pool, fn)
|
829
|
+
finally:
|
830
|
+
event_loop.close()
|
831
|
+
|
832
|
+
def save_db_event(self, event_loop, event, job):
|
833
|
+
asyncio.set_event_loop(event_loop)
|
834
|
+
state = Entity.escapeString(event.exception)
|
835
|
+
trace = Entity.escapeString(event.traceback)
|
836
|
+
if event.code == EVENT_JOB_MISSED:
|
837
|
+
status = 3
|
838
|
+
elif event.code == EVENT_JOB_ERROR:
|
839
|
+
status = 2
|
840
|
+
elif event.code == EVENT_JOB_MAX_INSTANCES:
|
841
|
+
status = 4
|
842
|
+
else:
|
843
|
+
state = "null"
|
844
|
+
trace = "null"
|
845
|
+
status = 1
|
846
|
+
status = {
|
847
|
+
"last_exec_time": event.scheduled_run_time,
|
848
|
+
"next_run_time": job.next_run_time,
|
849
|
+
"job_state": state,
|
850
|
+
"job_status": status,
|
851
|
+
"traceback": trace,
|
852
|
+
"job_id": event.job_id,
|
853
|
+
}
|
854
|
+
try:
|
855
|
+
result = event_loop.run_until_complete(
|
856
|
+
self.update_task_status(event_loop, status)
|
857
|
+
)
|
858
|
+
if isinstance(result, Exception):
|
859
|
+
self.logger.exception(result)
|
860
|
+
except Exception as err:
|
861
|
+
print(err)
|
862
|
+
self.logger.exception(err)
|
863
|
+
|
864
|
+
async def update_task_status(self, event_loop, status):
|
865
|
+
# TODO: migrate to Prepared statements
|
866
|
+
asyncio.set_event_loop(event_loop)
|
867
|
+
sql = """UPDATE troc.jobs
|
868
|
+
SET last_exec_time='{last_exec_time}', next_run_time='{next_run_time}',
|
869
|
+
job_status='{job_status}', job_state='{job_state}', traceback='{traceback}'
|
870
|
+
WHERE job_id = '{job_id}';"""
|
871
|
+
sentence = sql.format(**status)
|
872
|
+
result = None
|
873
|
+
options = {
|
874
|
+
"server_settings": {
|
875
|
+
"application_name": "Flowtask.Scheduler",
|
876
|
+
"client_min_messages": "notice",
|
877
|
+
"jit": "on",
|
878
|
+
},
|
879
|
+
"timeout": 360,
|
880
|
+
}
|
881
|
+
conn = AsyncDB("pg", dsn=default_dsn, loop=event_loop, **options)
|
882
|
+
try:
|
883
|
+
async with await conn.connection() as conn:
|
884
|
+
result, error = await conn.execute(sentence)
|
885
|
+
if error:
|
886
|
+
self.logger.error(error)
|
887
|
+
return result
|
888
|
+
except Exception as err:
|
889
|
+
self.logger.exception(err, stack_info=True)
|
890
|
+
|
891
|
+
def job_submitted(self, event):
|
892
|
+
try:
|
893
|
+
job_id = event.job_id
|
894
|
+
job = self.scheduler.get_job(job_id)
|
895
|
+
except JobLookupError as exc:
|
896
|
+
raise RuntimeError(
|
897
|
+
f"Scheduler: There is no such Job {job_id}: {exc}"
|
898
|
+
) from exc
|
899
|
+
except Exception as err:
|
900
|
+
raise RuntimeError(f"Scheduler: Error on {job_id} {err}") from err
|
901
|
+
try:
|
902
|
+
job_name = job.name
|
903
|
+
now = datetime.now()
|
904
|
+
self.logger.info(
|
905
|
+
f"Sched: Job {job_name} with id {job_id!s} was submitted @ {now}"
|
906
|
+
)
|
907
|
+
self._jobs[job_id]["status"] = "dispatched"
|
908
|
+
except AttributeError as exc:
|
909
|
+
# we don't need to worry about startup_job.
|
910
|
+
if event.job_id == "on_startup_job":
|
911
|
+
return
|
912
|
+
raise RuntimeError(f"Scheduler: Error {exc}") from exc
|
913
|
+
|
914
|
+
def get_stacktrace(self):
|
915
|
+
"""Returns the full stack trace."""
|
916
|
+
|
917
|
+
type_, value_, traceback_ = sys.exc_info()
|
918
|
+
return "".join(traceback.format_exception(type_, value_, traceback_))
|
919
|
+
|
920
|
+
def get_hostname(self):
|
921
|
+
"""Returns the host name."""
|
922
|
+
return socket.gethostname()
|
923
|
+
|
924
|
+
def get_pid(self):
|
925
|
+
"""Returns the process ID"""
|
926
|
+
return os.getpid()
|
927
|
+
|
928
|
+
async def start(self):
|
929
|
+
try:
|
930
|
+
# asyncio scheduler
|
931
|
+
self.scheduler.start()
|
932
|
+
except Exception as err:
|
933
|
+
raise RuntimeError(f"Error Starting Scheduler {err!r}") from err
|
934
|
+
|
935
|
+
async def shutdown(self, app: web.Application):
|
936
|
+
try:
|
937
|
+
await self._stop_redis()
|
938
|
+
except Exception:
|
939
|
+
pass
|
940
|
+
try:
|
941
|
+
self.scheduler.shutdown(wait=True)
|
942
|
+
except AttributeError:
|
943
|
+
pass
|
944
|
+
except Exception as err:
|
945
|
+
self.logger.exception(f"Error Shutting Down The Scheduler {err!r}")
|
946
|
+
try:
|
947
|
+
if self._connection:
|
948
|
+
await self._pool.release(self._connection)
|
949
|
+
await self.db.shutdown(app)
|
950
|
+
except Exception as err:
|
951
|
+
self.logger.exception(
|
952
|
+
f"Error on Scheduler Shutdown {err!r}"
|
953
|
+
)
|
954
|
+
|
955
|
+
def get_cron_params(self, expression):
|
956
|
+
trigger = {
|
957
|
+
"year": "*",
|
958
|
+
"month": "*",
|
959
|
+
"day": "*",
|
960
|
+
"week": "*",
|
961
|
+
"day_of_week": "*",
|
962
|
+
"hour": "*",
|
963
|
+
"minute": "*",
|
964
|
+
"second": "0",
|
965
|
+
}
|
966
|
+
return {**trigger, **expression}
|
967
|
+
|
968
|
+
def get_cron_strings(self, expression):
|
969
|
+
"""Returns cron strings.
|
970
|
+
:param dict expression: an array of cron structures.
|
971
|
+
:return: cron strings
|
972
|
+
:rtype: dict
|
973
|
+
"""
|
974
|
+
trigger = expression["cron"]
|
975
|
+
return {
|
976
|
+
"month": str(trigger[1]),
|
977
|
+
"day": str(trigger[2]),
|
978
|
+
"week": str(trigger[3]),
|
979
|
+
"day_of_week": str(trigger[4]),
|
980
|
+
"hour": str(trigger[5]),
|
981
|
+
"minute": str(trigger[6]),
|
982
|
+
}
|
983
|
+
|
984
|
+
def get_trigger(self, expression):
|
985
|
+
if expression == "cron" or expression == "crontab":
|
986
|
+
return CronTrigger
|
987
|
+
elif expression == "date":
|
988
|
+
return DateTrigger
|
989
|
+
elif expression == "interval":
|
990
|
+
return IntervalTrigger
|
991
|
+
else:
|
992
|
+
self.logger.exception(f"Wrong Trigger type: {expression}")
|
993
|
+
return None
|