@kalisio/kdk 2.2.2 → 2.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintignore +1 -0
- package/.github/workflows/main.yaml +67 -0
- package/.gitmodules +3 -0
- package/README.md +1 -2
- package/core/api/hooks/hooks.query.js +15 -2
- package/core/api/marshall.js +35 -4
- package/core/client/api.js +1 -1
- package/core/client/components/KActivity.vue +73 -0
- package/core/client/components/KContent.vue +1 -1
- package/core/client/components/KSponsor.vue +2 -13
- package/core/client/components/account/KDeleteAccountManager.vue +1 -1
- package/core/client/components/account/KPasswordManager.vue +1 -1
- package/core/client/components/action/KAction.vue +294 -0
- package/core/client/components/action/KBugReportAction.vue +37 -0
- package/core/client/components/action/index.js +7 -0
- package/core/client/components/app/KAbout.vue +16 -63
- package/core/client/components/app/KPlatform.vue +1 -1
- package/core/client/components/app/KSettings.vue +14 -14
- package/core/client/components/app/KTour.vue +6 -8
- package/core/client/components/app/KWelcome.vue +1 -1
- package/core/client/components/app/index.js +4 -0
- package/core/client/components/chart/KDataTable.vue +40 -25
- package/core/client/components/chart/KTimeSeriesChart.vue +20 -12
- package/core/client/components/collection/KCard.vue +1 -1
- package/core/client/components/collection/KCardSection.vue +3 -2
- package/core/client/components/collection/KColumn.vue +1 -1
- package/core/client/components/collection/KFilter.vue +6 -1
- package/core/client/components/document/KDocument.vue +83 -0
- package/core/client/components/document/KHtml.vue +23 -0
- package/core/client/components/document/KMarkdown.vue +37 -0
- package/core/client/components/document/index.js +9 -0
- package/core/client/components/form/KForm.vue +6 -2
- package/core/client/components/form/KSelectField.vue +1 -1
- package/core/client/components/index.js +1 -4
- package/core/client/components/input/KOptionsChooser.vue +1 -1
- package/core/client/components/layout/KFab.vue +1 -1
- package/core/client/components/layout/KPage.vue +3 -2
- package/core/client/components/layout/KWindow.vue +1 -1
- package/core/client/components/media/KColorScale.vue +16 -6
- package/core/client/components/screen/KLoginScreen.vue +1 -1
- package/core/client/components/screen/KRegisterScreen.vue +1 -1
- package/core/client/components/team/KAddMember.vue +7 -7
- package/core/client/components/team/KGroupCard.vue +1 -1
- package/core/client/components/team/KMemberFilter.vue +1 -1
- package/core/client/components/team/KTagCard.vue +1 -1
- package/core/client/components/time/KRelativeTimeRanges.vue +16 -1
- package/core/client/components/time/KTimeControl.vue +1 -0
- package/core/client/components/tool/KExportTool.vue +1 -1
- package/core/client/composables/collection.js +1 -1
- package/core/client/composables/index.js +1 -0
- package/core/client/composables/layout.js +50 -0
- package/core/client/composables/session.js +6 -0
- package/core/client/filter.js +9 -6
- package/core/client/guards.js +29 -6
- package/core/client/i18n/core_en.json +4 -1
- package/core/client/i18n/core_fr.json +8 -5
- package/core/client/i18n.js +14 -0
- package/core/client/layout.js +25 -14
- package/core/client/mixins/mixin.base-activity.js +16 -0
- package/core/client/services/index.js +27 -26
- package/core/client/services/local-settings.service.js +2 -3
- package/core/client/units.js +6 -1
- package/core/client/utils/index.js +3 -0
- package/core/client/utils/utils.actions.js +93 -0
- package/core/client/utils/utils.colors.js +1 -1
- package/core/client/utils/utils.data.js +22 -0
- package/core/client/utils/utils.shapes.js +16 -6
- package/coverage/base.css +224 -0
- package/coverage/block-navigation.js +87 -0
- package/coverage/core/api/application.js.html +1870 -0
- package/coverage/core/api/authentication.js.html +742 -0
- package/coverage/core/api/db.js.html +778 -0
- package/coverage/core/api/hooks/hooks.authentication.js.html +313 -0
- package/coverage/core/api/hooks/hooks.authorisations.js.html +1243 -0
- package/coverage/core/api/hooks/hooks.groups.js.html +229 -0
- package/coverage/core/api/hooks/hooks.logger.js.html +163 -0
- package/coverage/core/api/hooks/hooks.model.js.html +955 -0
- package/coverage/core/api/hooks/hooks.organisations.js.html +541 -0
- package/coverage/core/api/hooks/hooks.push.js.html +253 -0
- package/coverage/core/api/hooks/hooks.query.js.html +862 -0
- package/coverage/core/api/hooks/hooks.schemas.js.html +304 -0
- package/coverage/core/api/hooks/hooks.service.js.html +319 -0
- package/coverage/core/api/hooks/hooks.storage.js.html +193 -0
- package/coverage/core/api/hooks/hooks.users.js.html +868 -0
- package/coverage/core/api/hooks/index.html +296 -0
- package/coverage/core/api/hooks/index.js.html +121 -0
- package/coverage/core/api/index.html +191 -0
- package/coverage/core/api/index.js.html +148 -0
- package/coverage/core/api/marshall.js.html +448 -0
- package/coverage/core/api/models/groups.model.mongodb.js.html +109 -0
- package/coverage/core/api/models/index.html +161 -0
- package/coverage/core/api/models/organisations.model.mongodb.js.html +94 -0
- package/coverage/core/api/models/tags.model.mongodb.js.html +115 -0
- package/coverage/core/api/models/users.model.mongodb.js.html +115 -0
- package/coverage/core/api/services/account/account.hooks.js.html +208 -0
- package/coverage/core/api/services/account/account.service.js.html +436 -0
- package/coverage/core/api/services/account/index.html +131 -0
- package/coverage/core/api/services/authorisations/authorisations.hooks.js.html +184 -0
- package/coverage/core/api/services/authorisations/authorisations.service.js.html +502 -0
- package/coverage/core/api/services/authorisations/index.html +131 -0
- package/coverage/core/api/services/databases/databases.hooks.js.html +193 -0
- package/coverage/core/api/services/databases/databases.service.js.html +100 -0
- package/coverage/core/api/services/databases/index.html +131 -0
- package/coverage/core/api/services/groups/groups.hooks.js.html +178 -0
- package/coverage/core/api/services/groups/index.html +116 -0
- package/coverage/core/api/services/import-export/import-export.hooks.js.html +184 -0
- package/coverage/core/api/services/import-export/import-export.service.js.html +118 -0
- package/coverage/core/api/services/import-export/index.html +131 -0
- package/coverage/core/api/services/index.html +116 -0
- package/coverage/core/api/services/index.js.html +499 -0
- package/coverage/core/api/services/mailer/index.html +131 -0
- package/coverage/core/api/services/mailer/mailer.hooks.js.html +190 -0
- package/coverage/core/api/services/mailer/mailer.service.js.html +118 -0
- package/coverage/core/api/services/organisations/index.html +131 -0
- package/coverage/core/api/services/organisations/organisations.hooks.js.html +178 -0
- package/coverage/core/api/services/organisations/organisations.service.js.html +343 -0
- package/coverage/core/api/services/push/index.html +131 -0
- package/coverage/core/api/services/push/push.hooks.js.html +190 -0
- package/coverage/core/api/services/push/push.service.js.html +121 -0
- package/coverage/core/api/services/storage/index.html +131 -0
- package/coverage/core/api/services/storage/storage.hooks.js.html +190 -0
- package/coverage/core/api/services/storage/storage.service.js.html +172 -0
- package/coverage/core/api/services/tags/index.html +116 -0
- package/coverage/core/api/services/tags/tags.hooks.js.html +178 -0
- package/coverage/core/api/services/users/index.html +116 -0
- package/coverage/core/api/services/users/users.hooks.js.html +307 -0
- package/coverage/core/api/utils.js.html +118 -0
- package/coverage/core/common/errors.js.html +88 -0
- package/coverage/core/common/index.html +176 -0
- package/coverage/core/common/index.js.html +115 -0
- package/coverage/core/common/permissions.js.html +1048 -0
- package/coverage/core/common/schema.js.html +190 -0
- package/coverage/core/common/utils.js.html +220 -0
- package/coverage/favicon.png +0 -0
- package/coverage/index.html +491 -0
- package/coverage/lcov-report/base.css +224 -0
- package/coverage/lcov-report/block-navigation.js +87 -0
- package/coverage/lcov-report/core/api/application.js.html +1870 -0
- package/coverage/lcov-report/core/api/authentication.js.html +742 -0
- package/coverage/lcov-report/core/api/db.js.html +778 -0
- package/coverage/lcov-report/core/api/hooks/hooks.authentication.js.html +313 -0
- package/coverage/lcov-report/core/api/hooks/hooks.authorisations.js.html +1243 -0
- package/coverage/lcov-report/core/api/hooks/hooks.groups.js.html +229 -0
- package/coverage/lcov-report/core/api/hooks/hooks.logger.js.html +163 -0
- package/coverage/lcov-report/core/api/hooks/hooks.model.js.html +955 -0
- package/coverage/lcov-report/core/api/hooks/hooks.organisations.js.html +541 -0
- package/coverage/lcov-report/core/api/hooks/hooks.push.js.html +253 -0
- package/coverage/lcov-report/core/api/hooks/hooks.query.js.html +862 -0
- package/coverage/lcov-report/core/api/hooks/hooks.schemas.js.html +304 -0
- package/coverage/lcov-report/core/api/hooks/hooks.service.js.html +319 -0
- package/coverage/lcov-report/core/api/hooks/hooks.storage.js.html +193 -0
- package/coverage/lcov-report/core/api/hooks/hooks.users.js.html +868 -0
- package/coverage/lcov-report/core/api/hooks/index.html +296 -0
- package/coverage/lcov-report/core/api/hooks/index.js.html +121 -0
- package/coverage/lcov-report/core/api/index.html +191 -0
- package/coverage/lcov-report/core/api/index.js.html +148 -0
- package/coverage/lcov-report/core/api/marshall.js.html +448 -0
- package/coverage/lcov-report/core/api/models/groups.model.mongodb.js.html +109 -0
- package/coverage/lcov-report/core/api/models/index.html +161 -0
- package/coverage/lcov-report/core/api/models/organisations.model.mongodb.js.html +94 -0
- package/coverage/lcov-report/core/api/models/tags.model.mongodb.js.html +115 -0
- package/coverage/lcov-report/core/api/models/users.model.mongodb.js.html +115 -0
- package/coverage/lcov-report/core/api/services/account/account.hooks.js.html +208 -0
- package/coverage/lcov-report/core/api/services/account/account.service.js.html +436 -0
- package/coverage/lcov-report/core/api/services/account/index.html +131 -0
- package/coverage/lcov-report/core/api/services/authorisations/authorisations.hooks.js.html +184 -0
- package/coverage/lcov-report/core/api/services/authorisations/authorisations.service.js.html +502 -0
- package/coverage/lcov-report/core/api/services/authorisations/index.html +131 -0
- package/coverage/lcov-report/core/api/services/databases/databases.hooks.js.html +193 -0
- package/coverage/lcov-report/core/api/services/databases/databases.service.js.html +100 -0
- package/coverage/lcov-report/core/api/services/databases/index.html +131 -0
- package/coverage/lcov-report/core/api/services/groups/groups.hooks.js.html +178 -0
- package/coverage/lcov-report/core/api/services/groups/index.html +116 -0
- package/coverage/lcov-report/core/api/services/import-export/import-export.hooks.js.html +184 -0
- package/coverage/lcov-report/core/api/services/import-export/import-export.service.js.html +118 -0
- package/coverage/lcov-report/core/api/services/import-export/index.html +131 -0
- package/coverage/lcov-report/core/api/services/index.html +116 -0
- package/coverage/lcov-report/core/api/services/index.js.html +499 -0
- package/coverage/lcov-report/core/api/services/mailer/index.html +131 -0
- package/coverage/lcov-report/core/api/services/mailer/mailer.hooks.js.html +190 -0
- package/coverage/lcov-report/core/api/services/mailer/mailer.service.js.html +118 -0
- package/coverage/lcov-report/core/api/services/organisations/index.html +131 -0
- package/coverage/lcov-report/core/api/services/organisations/organisations.hooks.js.html +178 -0
- package/coverage/lcov-report/core/api/services/organisations/organisations.service.js.html +343 -0
- package/coverage/lcov-report/core/api/services/push/index.html +131 -0
- package/coverage/lcov-report/core/api/services/push/push.hooks.js.html +190 -0
- package/coverage/lcov-report/core/api/services/push/push.service.js.html +121 -0
- package/coverage/lcov-report/core/api/services/storage/index.html +131 -0
- package/coverage/lcov-report/core/api/services/storage/storage.hooks.js.html +190 -0
- package/coverage/lcov-report/core/api/services/storage/storage.service.js.html +172 -0
- package/coverage/lcov-report/core/api/services/tags/index.html +116 -0
- package/coverage/lcov-report/core/api/services/tags/tags.hooks.js.html +178 -0
- package/coverage/lcov-report/core/api/services/users/index.html +116 -0
- package/coverage/lcov-report/core/api/services/users/users.hooks.js.html +307 -0
- package/coverage/lcov-report/core/api/utils.js.html +118 -0
- package/coverage/lcov-report/core/common/errors.js.html +88 -0
- package/coverage/lcov-report/core/common/index.html +176 -0
- package/coverage/lcov-report/core/common/index.js.html +115 -0
- package/coverage/lcov-report/core/common/permissions.js.html +1048 -0
- package/coverage/lcov-report/core/common/schema.js.html +190 -0
- package/coverage/lcov-report/core/common/utils.js.html +220 -0
- package/coverage/lcov-report/favicon.png +0 -0
- package/coverage/lcov-report/index.html +491 -0
- package/coverage/lcov-report/map/api/hooks/hooks.catalog.js.html +457 -0
- package/coverage/lcov-report/map/api/hooks/hooks.features.js.html +397 -0
- package/coverage/lcov-report/map/api/hooks/hooks.query.js.html +1309 -0
- package/coverage/lcov-report/map/api/hooks/index.html +161 -0
- package/coverage/lcov-report/map/api/hooks/index.js.html +94 -0
- package/coverage/lcov-report/map/api/index.html +131 -0
- package/coverage/lcov-report/map/api/index.js.html +139 -0
- package/coverage/lcov-report/map/api/marshall.js.html +178 -0
- package/coverage/lcov-report/map/api/models/alerts.model.mongodb.js.html +106 -0
- package/coverage/lcov-report/map/api/models/catalog.model.mongodb.js.html +127 -0
- package/coverage/lcov-report/map/api/models/features.model.mongodb.js.html +196 -0
- package/coverage/lcov-report/map/api/models/index.html +161 -0
- package/coverage/lcov-report/map/api/models/projects.model.mongodb.js.html +109 -0
- package/coverage/lcov-report/map/api/services/alerts/alerts.hooks.js.html +274 -0
- package/coverage/lcov-report/map/api/services/alerts/alerts.service.js.html +610 -0
- package/coverage/lcov-report/map/api/services/alerts/index.html +131 -0
- package/coverage/lcov-report/map/api/services/catalog/catalog.hooks.js.html +316 -0
- package/coverage/lcov-report/map/api/services/catalog/index.html +116 -0
- package/coverage/lcov-report/map/api/services/daptiles/daptiles.service.js.html +1510 -0
- package/coverage/lcov-report/map/api/services/daptiles/index.html +116 -0
- package/coverage/lcov-report/map/api/services/features/features.hooks.js.html +241 -0
- package/coverage/lcov-report/map/api/services/features/features.service.js.html +241 -0
- package/coverage/lcov-report/map/api/services/features/index.html +131 -0
- package/coverage/lcov-report/map/api/services/index.html +116 -0
- package/coverage/lcov-report/map/api/services/index.js.html +817 -0
- package/coverage/lcov-report/map/api/services/projects/index.html +116 -0
- package/coverage/lcov-report/map/api/services/projects/projects.hooks.js.html +439 -0
- package/coverage/lcov-report/map/common/dynamic-grid-source.js.html +466 -0
- package/coverage/lcov-report/map/common/errors.js.html +94 -0
- package/coverage/lcov-report/map/common/geotiff-grid-source.js.html +541 -0
- package/coverage/lcov-report/map/common/grid.js.html +1612 -0
- package/coverage/lcov-report/map/common/index.html +371 -0
- package/coverage/lcov-report/map/common/index.js.html +172 -0
- package/coverage/lcov-report/map/common/meteo-model-grid-source.js.html +556 -0
- package/coverage/lcov-report/map/common/moment-utils.js.html +157 -0
- package/coverage/lcov-report/map/common/opendap-grid-source.js.html +868 -0
- package/coverage/lcov-report/map/common/opendap-utils.js.html +826 -0
- package/coverage/lcov-report/map/common/permissions.js.html +124 -0
- package/coverage/lcov-report/map/common/time-based-grid-source.js.html +418 -0
- package/coverage/lcov-report/map/common/tms-utils.js.html +274 -0
- package/coverage/lcov-report/map/common/wcs-grid-source.js.html +364 -0
- package/coverage/lcov-report/map/common/wcs-utils.js.html +586 -0
- package/coverage/lcov-report/map/common/weacast-grid-source.js.html +1033 -0
- package/coverage/lcov-report/map/common/wfs-utils.js.html +574 -0
- package/coverage/lcov-report/map/common/wms-utils.js.html +451 -0
- package/coverage/lcov-report/map/common/wmts-utils.js.html +547 -0
- package/coverage/lcov-report/prettify.css +1 -0
- package/coverage/lcov-report/prettify.js +2 -0
- package/coverage/lcov-report/sort-arrow-sprite.png +0 -0
- package/coverage/lcov-report/sorter.js +196 -0
- package/coverage/lcov.info +11128 -0
- package/coverage/map/api/hooks/hooks.catalog.js.html +457 -0
- package/coverage/map/api/hooks/hooks.features.js.html +397 -0
- package/coverage/map/api/hooks/hooks.query.js.html +1309 -0
- package/coverage/map/api/hooks/index.html +161 -0
- package/coverage/map/api/hooks/index.js.html +94 -0
- package/coverage/map/api/index.html +131 -0
- package/coverage/map/api/index.js.html +139 -0
- package/coverage/map/api/marshall.js.html +178 -0
- package/coverage/map/api/models/alerts.model.mongodb.js.html +106 -0
- package/coverage/map/api/models/catalog.model.mongodb.js.html +127 -0
- package/coverage/map/api/models/features.model.mongodb.js.html +196 -0
- package/coverage/map/api/models/index.html +161 -0
- package/coverage/map/api/models/projects.model.mongodb.js.html +109 -0
- package/coverage/map/api/services/alerts/alerts.hooks.js.html +274 -0
- package/coverage/map/api/services/alerts/alerts.service.js.html +610 -0
- package/coverage/map/api/services/alerts/index.html +131 -0
- package/coverage/map/api/services/catalog/catalog.hooks.js.html +316 -0
- package/coverage/map/api/services/catalog/index.html +116 -0
- package/coverage/map/api/services/daptiles/daptiles.service.js.html +1510 -0
- package/coverage/map/api/services/daptiles/index.html +116 -0
- package/coverage/map/api/services/features/features.hooks.js.html +241 -0
- package/coverage/map/api/services/features/features.service.js.html +241 -0
- package/coverage/map/api/services/features/index.html +131 -0
- package/coverage/map/api/services/index.html +116 -0
- package/coverage/map/api/services/index.js.html +817 -0
- package/coverage/map/api/services/projects/index.html +116 -0
- package/coverage/map/api/services/projects/projects.hooks.js.html +439 -0
- package/coverage/map/common/dynamic-grid-source.js.html +466 -0
- package/coverage/map/common/errors.js.html +94 -0
- package/coverage/map/common/geotiff-grid-source.js.html +541 -0
- package/coverage/map/common/grid.js.html +1612 -0
- package/coverage/map/common/index.html +371 -0
- package/coverage/map/common/index.js.html +172 -0
- package/coverage/map/common/meteo-model-grid-source.js.html +556 -0
- package/coverage/map/common/moment-utils.js.html +157 -0
- package/coverage/map/common/opendap-grid-source.js.html +868 -0
- package/coverage/map/common/opendap-utils.js.html +826 -0
- package/coverage/map/common/permissions.js.html +124 -0
- package/coverage/map/common/time-based-grid-source.js.html +418 -0
- package/coverage/map/common/tms-utils.js.html +274 -0
- package/coverage/map/common/wcs-grid-source.js.html +364 -0
- package/coverage/map/common/wcs-utils.js.html +586 -0
- package/coverage/map/common/weacast-grid-source.js.html +1033 -0
- package/coverage/map/common/wfs-utils.js.html +574 -0
- package/coverage/map/common/wms-utils.js.html +451 -0
- package/coverage/map/common/wmts-utils.js.html +547 -0
- package/coverage/prettify.css +1 -0
- package/coverage/prettify.js +2 -0
- package/coverage/sort-arrow-sprite.png +0 -0
- package/coverage/sorter.js +196 -0
- package/coverage/tmp/coverage-137435-1719398750767-0.json +1 -0
- package/coverage/tmp/coverage-137447-1719398750752-0.json +1 -0
- package/coverage/tmp/coverage-137458-1719398750740-0.json +1 -0
- package/coverage/tmp/coverage-137470-1719398750728-0.json +1 -0
- package/coverage/tmp/coverage-137477-1719398750691-0.json +1 -0
- package/map/api/hooks/hooks.query.js +5 -2
- package/map/api/services/catalog/catalog.hooks.js +4 -5
- package/map/client/cesium/utils/index.js +2 -1
- package/map/client/cesium/utils/utils.cesium.js +8 -0
- package/map/client/cesium/utils/utils.features.js +2 -2
- package/map/client/cesium/utils/utils.style.js +19 -17
- package/map/client/components/KCompass.vue +25 -3
- package/map/client/components/KEditLayerData.vue +1 -1
- package/map/client/components/KPositionIndicator.vue +1 -1
- package/map/client/components/catalog/KConnectLayer.vue +2 -2
- package/map/client/components/catalog/KCreateView.vue +2 -2
- package/map/client/components/form/KDirectionField.vue +4 -0
- package/map/client/components/form/KOwsLayerField.vue +4 -4
- package/map/client/components/form/KOwsServiceField.vue +3 -4
- package/map/client/components/legend/KLegend.vue +13 -15
- package/map/client/components/tools/KGeolocateTool.vue +1 -1
- package/map/client/components/widget/KStackableTimeSeries.vue +3 -0
- package/map/client/composables/highlight.js +4 -1
- package/map/client/elevation-utils.js +2 -2
- package/map/client/i18n/map_en.json +3 -1
- package/map/client/i18n/map_fr.json +3 -1
- package/map/client/mixins/globe/mixin.base-globe.js +121 -80
- package/map/client/mixins/globe/mixin.file-layers.js +2 -2
- package/map/client/mixins/globe/mixin.geojson-layers.js +24 -19
- package/map/client/mixins/globe/mixin.globe-activity.js +3 -3
- package/map/client/mixins/globe/mixin.opendap-layers.js +3 -3
- package/map/client/mixins/globe/mixin.style.js +5 -5
- package/map/client/mixins/globe/mixin.tooltip.js +5 -3
- package/map/client/mixins/map/mixin.base-map.js +42 -4
- package/map/client/mixins/map/mixin.canvas-layers.js +0 -1
- package/map/client/mixins/map/mixin.geojson-layers.js +10 -5
- package/map/client/mixins/mixin.activity.js +2 -2
- package/map/client/mixins/mixin.feature-selection.js +7 -5
- package/map/client/mixins/mixin.levels.js +1 -1
- package/map/client/utils/utils.catalog.js +15 -0
- package/map/client/utils/utils.location.js +2 -1
- package/map/client/utils/utils.style.js +1 -1
- package/map/common/geotiff-grid-source.js +5 -3
- package/map/common/grid.js +2 -2
- package/map/common/meteo-model-grid-source.js +1 -1
- package/map/common/time-based-grid-source.js +1 -1
- package/map/common/wmts-utils.js +11 -11
- package/package.json +12 -8
- package/scripts/build_docs.sh +37 -0
- package/scripts/init_runner.sh +30 -0
- package/scripts/kash/.github/workflows/run_tests.yaml +33 -0
- package/scripts/kash/README.md +2 -0
- package/scripts/kash/kash.sh +1657 -0
- package/scripts/kash/scripts/run_tests.sh +151 -0
- package/scripts/run_tests.sh +48 -0
- package/scripts/setup_workspace.sh +42 -0
- package/test/api/core/hooks.test.js +31 -0
- package/test/api/core/test-log-2023-12-19.log +7 -0
- package/test/api/core/test-log-2024-01-04.log +14 -0
- package/test/api/core/test-log-2024-05-14.log +6 -0
- package/test/api/core/test-log-2024-06-06.log +23 -0
- package/test/api/core/test-log-2024-06-26.log +25 -0
- package/test/api/core/test-log-2024-06-28.log +2 -0
- package/test/api/map/grid-sources.test.js +3 -1
- package/test/api/map/hooks.test.js +58 -12
- package/test/api/map/test-log-2023-11-24.log +121 -0
- package/test/api/map/test-log-2023-12-12.log +29 -0
- package/test/api/map/test-log-2023-12-13.log +5 -0
- package/test/api/map/test-log-2024-01-04.log +2 -0
- package/test/api/map/test-log-2024-01-11.log +1 -0
- package/test/api/map/test-log-2024-01-25.log +19 -0
- package/test/api/map/test-log-2024-06-06.log +39 -0
- package/test/client/core/collection.js +2 -2
- package/test/client/core/dialogs.js +13 -0
- package/test/client/core/index.js +6 -5
- package/test/client/core/layout.js +1 -13
- package/test/client/core/runner.js +41 -20
- package/test/client/core/screens.js +6 -0
- package/test/client/core/utils.js +23 -19
- package/.travis.doc.sh +0 -8
- package/.travis.test.sh +0 -72
- package/core/client/components/KAction.vue +0 -393
- package/core/client/components/KBlock.vue +0 -67
- package/core/client/components/app/KTerms.vue +0 -41
|
@@ -0,0 +1,1657 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
|
|
3
|
+
### Variables provided by this script
|
|
4
|
+
### - TMP_DIR: a path where to write temp files
|
|
5
|
+
### - OS_ID: debian or ubuntu or alpine ...
|
|
6
|
+
### - OS_VERSION:
|
|
7
|
+
### - CI: true or false
|
|
8
|
+
### - CI_ID: github or gitlab or travis or empty (CI = false)
|
|
9
|
+
|
|
10
|
+
### Docs for CI systems:
|
|
11
|
+
### - github actions: https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables
|
|
12
|
+
### - gitlab ci: https://docs.gitlab.com/ee/ci/variables/predefined_variables.html
|
|
13
|
+
### - travis ci: https://docs.travis-ci.com/user/environment-variables/#default-environment-variables
|
|
14
|
+
|
|
15
|
+
### Host detection
|
|
16
|
+
###
|
|
17
|
+
|
|
18
|
+
. /etc/os-release
|
|
19
|
+
|
|
20
|
+
OS_ID=$ID
|
|
21
|
+
OS_VERSION=$VERSION_ID
|
|
22
|
+
|
|
23
|
+
echo "Running on ${OS_ID}-${OS_VERSION}"
|
|
24
|
+
|
|
25
|
+
CI=false
|
|
26
|
+
CI_ID=
|
|
27
|
+
|
|
28
|
+
if [ "${GITHUB_ACTIONS:-}" = true ]; then
|
|
29
|
+
CI_ID="github"
|
|
30
|
+
|
|
31
|
+
# Add ~/.local/bin to PATH
|
|
32
|
+
mkdir -p "$HOME/.local/bin"
|
|
33
|
+
export PATH=$PATH:$HOME/.local/bin
|
|
34
|
+
elif [ "${GITLAB_CI:-}" = true ]; then
|
|
35
|
+
CI_ID="gitlab"
|
|
36
|
+
|
|
37
|
+
# Add ~/.local/bin to PATH
|
|
38
|
+
mkdir -p "$HOME/.local/bin"
|
|
39
|
+
export PATH=$PATH:$HOME/.local/bin
|
|
40
|
+
elif [ "${TRAVIS:-}" = true ]; then
|
|
41
|
+
CI_ID="travis"
|
|
42
|
+
fi
|
|
43
|
+
|
|
44
|
+
if [ -n "$CI_ID" ]; then
|
|
45
|
+
CI=true
|
|
46
|
+
echo "Running in CI mode ($CI_ID)..."
|
|
47
|
+
|
|
48
|
+
# Make sure we have the requirements to run kash functions
|
|
49
|
+
set +e
|
|
50
|
+
command -v curl >/dev/null 2>&1 && command -v git >/dev/null 2>&1 && command -v sha256sum >/dev/null 2>&1
|
|
51
|
+
RC=$?
|
|
52
|
+
set -e
|
|
53
|
+
|
|
54
|
+
if [ "$RC" -ne 0 ]; then
|
|
55
|
+
case "$OS_ID" in
|
|
56
|
+
debian | ubuntu)
|
|
57
|
+
if [ "$(id -u)" -eq 0 ]; then
|
|
58
|
+
apt-get update && apt-get --no-install-recommends --yes install sudo curl ca-certificates coreutils git
|
|
59
|
+
else
|
|
60
|
+
sudo apt-get update && sudo apt-get --no-install-recommends --yes install curl ca-certificates coreutils git
|
|
61
|
+
fi
|
|
62
|
+
;;
|
|
63
|
+
alpine)
|
|
64
|
+
apk update && apk add curl ca-certificates coreutils git
|
|
65
|
+
;;
|
|
66
|
+
*)
|
|
67
|
+
;;
|
|
68
|
+
esac
|
|
69
|
+
fi
|
|
70
|
+
|
|
71
|
+
# Emulate development k-mongo when running on CI
|
|
72
|
+
cat <<EOF > ~/.local/bin/k-mongo
|
|
73
|
+
#!/usr/bin/env bash
|
|
74
|
+
mongod --dbpath /var/lib/mongo --logpath /var/log/mongodb/mongod.log --fork --port 27017
|
|
75
|
+
EOF
|
|
76
|
+
chmod a+x ~/.local/bin/k-mongo
|
|
77
|
+
|
|
78
|
+
# Most CI services understand ANSI colors
|
|
79
|
+
export TERM=xterm-color
|
|
80
|
+
|
|
81
|
+
# Allow nvm to work on alpine distro (downloads an unofficial build targeting musl libc)
|
|
82
|
+
# See:
|
|
83
|
+
# - https://github.com/nvm-sh/nvm/issues/1102#issuecomment-1112898778
|
|
84
|
+
# - https://github.com/nvm-sh/nvm/pull/3212
|
|
85
|
+
if [ "$OS_ID" = "alpine" ]; then
|
|
86
|
+
export NVM_NODEJS_ORG_MIRROR="https://unofficial-builds.nodejs.org/download/release" # Set up unofficial builds
|
|
87
|
+
fi
|
|
88
|
+
fi
|
|
89
|
+
|
|
90
|
+
# If nvm is present, make it available to script
|
|
91
|
+
if [ -d "$HOME/.nvm" ]; then
|
|
92
|
+
. "$HOME/.nvm/nvm.sh"
|
|
93
|
+
fi
|
|
94
|
+
|
|
95
|
+
# Define a TMP_DIR to operate with temp files
|
|
96
|
+
if [ -n "${RUNNER_TEMP:-}" ]; then # RUNNER_TEMP is Github Action specific
|
|
97
|
+
TMP_DIR="$RUNNER_TEMP"
|
|
98
|
+
else
|
|
99
|
+
TMP_DIR="$(mktemp -d -p "${XDG_RUNTIME_DIR:-}" kalisio.XXXXXX)"
|
|
100
|
+
fi
|
|
101
|
+
|
|
102
|
+
### Requirements
|
|
103
|
+
###
|
|
104
|
+
|
|
105
|
+
# https://github.com/mikefarah/yq/releases
|
|
106
|
+
YQ_VERSION=4.40.5
|
|
107
|
+
# https://github.com/FiloSottile/age/releases
|
|
108
|
+
AGE_VERSION=1.1.1
|
|
109
|
+
# https://github.com/getsops/sops/releases
|
|
110
|
+
SOPS_VERSION=3.8.1
|
|
111
|
+
|
|
112
|
+
# https://github.com/kubernetes/kubernetes/tree/master/CHANGELOG
|
|
113
|
+
KUBECTL_VERSION=1.25.16
|
|
114
|
+
# https://github.com/helm/helm/releases
|
|
115
|
+
HELM_VERSION=3.11.3
|
|
116
|
+
# https://github.com/helmfile/helmfile/releases
|
|
117
|
+
HELMFILE_VERSION=0.153.0
|
|
118
|
+
# https://github.com/derailed/k9s/releases
|
|
119
|
+
K9S_VERSION=0.32.4
|
|
120
|
+
|
|
121
|
+
# https://github.com/nvm-sh/nvm/releases
|
|
122
|
+
NVM_VERSION=0.39.7
|
|
123
|
+
# https://nodejs.org/en/about/previous-releases#looking-for-latest-release-of-a-version-branch
|
|
124
|
+
NODE16_VERSION=16.20.2
|
|
125
|
+
NODE18_VERSION=18.19.1
|
|
126
|
+
NODE20_VERSION=20.11.1
|
|
127
|
+
|
|
128
|
+
# https://www.mongodb.com/try/download/community
|
|
129
|
+
MONGODB4_VERSION=4.4.28
|
|
130
|
+
MONGODB5_VERSION=5.0.24
|
|
131
|
+
MONGODB6_VERSION=6.0.13
|
|
132
|
+
MONGODB7_VERSION=7.0.5
|
|
133
|
+
|
|
134
|
+
# Install yq in ~/.local/bin
|
|
135
|
+
# Arg1: a writable folder where to write downloaded files
|
|
136
|
+
install_yq() {
|
|
137
|
+
local DL_ROOT=$1
|
|
138
|
+
local DL_PATH="$DL_ROOT/yq"
|
|
139
|
+
if [ ! -d "$DL_PATH" ]; then
|
|
140
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
141
|
+
curl -OLsS https://github.com/mikefarah/yq/releases/download/v${YQ_VERSION}/yq_linux_amd64.tar.gz
|
|
142
|
+
# checksum has to be extracted from custom file ...
|
|
143
|
+
curl -OLsS https://github.com/mikefarah/yq/releases/download/v${YQ_VERSION}/checksums
|
|
144
|
+
curl -OLsS https://github.com/mikefarah/yq/releases/download/v${YQ_VERSION}/checksums_hashes_order
|
|
145
|
+
curl -OLsS https://github.com/mikefarah/yq/releases/download/v${YQ_VERSION}/extract-checksum.sh
|
|
146
|
+
chmod u+x extract-checksum.sh
|
|
147
|
+
./extract-checksum.sh "SHA-256" "yq_linux_amd64.tar.gz" | awk '{ print $2 " " $1}' | sha256sum --check
|
|
148
|
+
cd ~-
|
|
149
|
+
fi
|
|
150
|
+
cd "$DL_PATH"
|
|
151
|
+
tar xf yq_linux_amd64.tar.gz
|
|
152
|
+
mv yq_linux_amd64 ~/.local/bin/yq
|
|
153
|
+
chmod u+x ~/.local/bin/yq
|
|
154
|
+
cd ~-
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
# Call this to ensure yq is available
|
|
158
|
+
ensure_yq() {
|
|
159
|
+
set +e
|
|
160
|
+
command -v yq >/dev/null 2>&1
|
|
161
|
+
local RC=$?
|
|
162
|
+
set -e
|
|
163
|
+
|
|
164
|
+
if [ "$RC" -ne 0 ]; then
|
|
165
|
+
mkdir -p "$TMP_DIR/dl"
|
|
166
|
+
install_yq "$TMP_DIR/dl"
|
|
167
|
+
fi
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
# Install age in ~/.local/bin
|
|
171
|
+
# Arg1: a writable folder where to write downloaded files
|
|
172
|
+
install_age() {
|
|
173
|
+
local DL_ROOT=$1
|
|
174
|
+
local DL_PATH="$DL_ROOT/age"
|
|
175
|
+
if [ ! -d "$DL_PATH" ]; then
|
|
176
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
177
|
+
curl -OLsS https://github.com/FiloSottile/age/releases/download/v${AGE_VERSION}/age-v${AGE_VERSION}-linux-amd64.tar.gz
|
|
178
|
+
# no checksum ...
|
|
179
|
+
cd ~-
|
|
180
|
+
fi
|
|
181
|
+
cd "$DL_PATH"
|
|
182
|
+
tar xf age-v${AGE_VERSION}-linux-amd64.tar.gz
|
|
183
|
+
cp age/age ~/.local/bin
|
|
184
|
+
cp age/age-keygen ~/.local/bin
|
|
185
|
+
cd ~-
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
# Call this to ensure age is available
|
|
189
|
+
ensure_age() {
|
|
190
|
+
set +e
|
|
191
|
+
command -v age >/dev/null 2>&1
|
|
192
|
+
local RC=$?
|
|
193
|
+
set -e
|
|
194
|
+
|
|
195
|
+
if [ "$RC" -ne 0 ]; then
|
|
196
|
+
mkdir -p "$TMP_DIR/dl"
|
|
197
|
+
install_age "$TMP_DIR/dl"
|
|
198
|
+
fi
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
# Install sops in ~/.local/bin
|
|
202
|
+
# Arg1: a writable folder where to write downloaded files
|
|
203
|
+
install_sops() {
|
|
204
|
+
local DL_ROOT=$1
|
|
205
|
+
local DL_PATH="$DL_ROOT/sops"
|
|
206
|
+
if [ ! -d "$DL_PATH" ]; then
|
|
207
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
208
|
+
curl -OLsS https://github.com/getsops/sops/releases/download/v${SOPS_VERSION}/sops-v${SOPS_VERSION}.linux.amd64
|
|
209
|
+
curl -OLsS https://github.com/getsops/sops/releases/download/v${SOPS_VERSION}/sops-v${SOPS_VERSION}.checksums.txt
|
|
210
|
+
sha256sum --ignore-missing --quiet -c sops-v${SOPS_VERSION}.checksums.txt
|
|
211
|
+
cd ~-
|
|
212
|
+
fi
|
|
213
|
+
cd "$DL_PATH"
|
|
214
|
+
cp sops-v${SOPS_VERSION}.linux.amd64 ~/.local/bin/sops
|
|
215
|
+
chmod u+x ~/.local/bin/sops
|
|
216
|
+
cd ~-
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
# Call this to ensure sops is available
|
|
220
|
+
ensure_sops() {
|
|
221
|
+
set +e
|
|
222
|
+
command -v sops >/dev/null 2>&1
|
|
223
|
+
local RC=$?
|
|
224
|
+
set -e
|
|
225
|
+
|
|
226
|
+
if [ "$RC" -ne 0 ]; then
|
|
227
|
+
mkdir -p "$TMP_DIR/dl"
|
|
228
|
+
install_sops "$TMP_DIR/dl"
|
|
229
|
+
fi
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
# Install code climate test reporter in ~/.local/bin
|
|
233
|
+
# Arg1: a writable folder where to write downloaded files
|
|
234
|
+
install_cc_test_reporter() {
|
|
235
|
+
local DL_ROOT=$1
|
|
236
|
+
local DL_PATH="$DL_ROOT/cc"
|
|
237
|
+
if [ ! -d "$DL_PATH" ]; then
|
|
238
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
239
|
+
curl -OLsS https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64
|
|
240
|
+
curl -OLsS https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64.sha256
|
|
241
|
+
sha256sum --ignore-missing --quiet -c test-reporter-latest-linux-amd64.sha256
|
|
242
|
+
cd ~-
|
|
243
|
+
fi
|
|
244
|
+
cd "$DL_PATH"
|
|
245
|
+
cp test-reporter-latest-linux-amd64 ~/.local/bin/cc-test-reporter
|
|
246
|
+
chmod +x ~/.local/bin/cc-test-reporter
|
|
247
|
+
cd ~-
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
# Sends test coverage to code climate
|
|
251
|
+
# Arg1: code climate identifier for authentication
|
|
252
|
+
send_coverage_to_cc() {
|
|
253
|
+
local CC_TEST_REPORTER_ID=$1
|
|
254
|
+
~/.local/bin/cc-test-reporter format-coverage -t lcov coverage/lcov.info
|
|
255
|
+
~/.local/bin/cc-test-reporter upload-coverage -r "$CC_TEST_REPORTER_ID"
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
# Make sure nvm is installed
|
|
259
|
+
# Arg1: a writable folder where to write downloaded files
|
|
260
|
+
# NOTE: also define 'yarn' as a default package, ie. it'll be automatically
|
|
261
|
+
# installed with each node version
|
|
262
|
+
install_nvm() {
|
|
263
|
+
local DL_ROOT=$1
|
|
264
|
+
local DL_PATH="$DL_ROOT/nvm"
|
|
265
|
+
|
|
266
|
+
# Node builds for alpine x64/musl required libstdc++
|
|
267
|
+
# See. https://github.com/nvm-sh/nvm/issues/1102#issuecomment-550572252
|
|
268
|
+
if [ "$OS_ID" = "alpine" ] && [ "$CI" = true ]; then
|
|
269
|
+
apk add libstdc++
|
|
270
|
+
fi
|
|
271
|
+
|
|
272
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
273
|
+
curl -OLsS https://raw.githubusercontent.com/nvm-sh/nvm/v${NVM_VERSION}/install.sh
|
|
274
|
+
# Make sure current user has a .bashrc where nvm installer will setup things since we mandate bash as execution shell
|
|
275
|
+
if [ ! -f "$HOME/.bashrc" ]; then touch "$HOME/.bashrc"; fi
|
|
276
|
+
bash ./install.sh
|
|
277
|
+
# We always use yarn as package manager, so tell nvm to install it with every node installation
|
|
278
|
+
# cf. https://github.com/nvm-sh/nvm?tab=readme-ov-file#default-global-packages-from-file-while-installing
|
|
279
|
+
bash -i -c 'echo yarn >> $NVM_DIR/default-packages'
|
|
280
|
+
cd ~-
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
# Install node16, requires nvm to be installed
|
|
284
|
+
install_node16() {
|
|
285
|
+
bash -i -c "nvm install $NODE16_VERSION"
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
# Install node18, requires nvm to be installed
|
|
289
|
+
install_node18() {
|
|
290
|
+
bash -i -c "nvm install $NODE18_VERSION"
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
# Install node20, requires nvm to be installed
|
|
294
|
+
install_node20() {
|
|
295
|
+
bash -i -c "nvm install $NODE20_VERSION"
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
# Install mongo4 in ~/.local/bin/mongo4
|
|
299
|
+
# Arg1: a writable folder where to write downloaded files
|
|
300
|
+
# NOTE: each mongo version is installed in a separate folder to support multiple versions
|
|
301
|
+
install_mongo4() {
|
|
302
|
+
local DL_ROOT=$1
|
|
303
|
+
local DL_PATH="$DL_ROOT/mongo4"
|
|
304
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
305
|
+
case "$OS_ID" in
|
|
306
|
+
debian)
|
|
307
|
+
curl -OLsS http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.1w-0+deb11u1_amd64.deb
|
|
308
|
+
DEBIAN_FRONTEND=noninteractive && dpkg -i libssl1.1_1.1.1w-0+deb11u1_amd64.deb
|
|
309
|
+
local MONGODB_SUFFIX=debian10-${MONGODB4_VERSION}
|
|
310
|
+
;;
|
|
311
|
+
ubuntu)
|
|
312
|
+
# NOTE: this assumes ubuntu 22.04
|
|
313
|
+
curl -OLss http://launchpadlibrarian.net/715615335/libssl1.1_1.1.1f-1ubuntu2.22_amd64.deb
|
|
314
|
+
DEBIAN_FRONTEND=noninteractive && sudo dpkg -i libssl1.1_1.1.1f-1ubuntu2.22_amd64.deb
|
|
315
|
+
local MONGODB_SUFFIX=ubuntu2004-${MONGODB4_VERSION}
|
|
316
|
+
;;
|
|
317
|
+
*)
|
|
318
|
+
esac
|
|
319
|
+
|
|
320
|
+
curl -OLsS "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB_SUFFIX}.tgz"
|
|
321
|
+
tar xf "mongodb-linux-x86_64-${MONGODB_SUFFIX}.tgz"
|
|
322
|
+
mkdir -p ~/.local/bin/mongo4
|
|
323
|
+
cp -fR "mongodb-linux-x86_64-${MONGODB_SUFFIX}/bin/mongod" ~/.local/bin/mongo4
|
|
324
|
+
sudo mkdir -p /var/lib/mongo4 && sudo mkdir -p /var/log/mongodb4
|
|
325
|
+
sudo chmod a+rwx /var/lib/mongo4 && sudo chmod a+rwx /var/log/mongodb4
|
|
326
|
+
cd ~-
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
# Install mongo5 in ~/.local/bin/mongo5
|
|
330
|
+
# Arg1: a writable folder where to write downloaded files
|
|
331
|
+
# NOTE: each mongo version is installed in a separate folder to support multiple versions
|
|
332
|
+
install_mongo5() {
|
|
333
|
+
local DL_ROOT=$1
|
|
334
|
+
local DL_PATH="$DL_ROOT/mongo5"
|
|
335
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
336
|
+
case "$OS_ID" in
|
|
337
|
+
debian)
|
|
338
|
+
curl -OLsS http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.1w-0+deb11u1_amd64.deb
|
|
339
|
+
DEBIAN_FRONTEND=noninteractive && dpkg -i libssl1.1_1.1.1w-0+deb11u1_amd64.deb
|
|
340
|
+
local MONGODB_SUFFIX=debian11-${MONGODB5_VERSION}
|
|
341
|
+
;;
|
|
342
|
+
ubuntu)
|
|
343
|
+
# NOTE: this assumes ubuntu 22.04
|
|
344
|
+
curl -OLss http://launchpadlibrarian.net/715615335/libssl1.1_1.1.1f-1ubuntu2.22_amd64.deb
|
|
345
|
+
DEBIAN_FRONTEND=noninteractive && sudo dpkg -i libssl1.1_1.1.1f-1ubuntu2.22_amd64.deb
|
|
346
|
+
local MONGODB_SUFFIX=ubuntu2004-${MONGODB5_VERSION}
|
|
347
|
+
;;
|
|
348
|
+
*)
|
|
349
|
+
esac
|
|
350
|
+
|
|
351
|
+
curl -OLsS "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB_SUFFIX}.tgz"
|
|
352
|
+
tar xf "mongodb-linux-x86_64-${MONGODB_SUFFIX}.tgz"
|
|
353
|
+
mkdir -p ~/.local/bin/mongo5
|
|
354
|
+
cp -fR "mongodb-linux-x86_64-${MONGODB_SUFFIX}/bin/mongod" ~/.local/bin/mongo5
|
|
355
|
+
sudo mkdir -p /var/lib/mongo5 && sudo mkdir -p /var/log/mongodb5
|
|
356
|
+
sudo chmod a+rwx /var/lib/mongo5 && sudo chmod a+rwx /var/log/mongodb5
|
|
357
|
+
cd ~-
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
# Install mongo6 in ~/.local/bin/mongo6
|
|
361
|
+
# Arg1: a writable folder where to write downloaded files
|
|
362
|
+
# NOTE: each mongo version is installed in a separate folder to support multiple versions
|
|
363
|
+
install_mongo6() {
|
|
364
|
+
local DL_ROOT=$1
|
|
365
|
+
local DL_PATH="$DL_ROOT/mongo6"
|
|
366
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
367
|
+
case "$OS_ID" in
|
|
368
|
+
debian)
|
|
369
|
+
curl -OLsS http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.1w-0+deb11u1_amd64.deb
|
|
370
|
+
DEBIAN_FRONTEND=noninteractive && dpkg -i libssl1.1_1.1.1w-0+deb11u1_amd64.deb
|
|
371
|
+
local MONGODB_SUFFIX=debian11-${MONGODB6_VERSION}
|
|
372
|
+
;;
|
|
373
|
+
ubuntu)
|
|
374
|
+
local MONGODB_SUFFIX=ubuntu2204-${MONGODB6_VERSION}
|
|
375
|
+
;;
|
|
376
|
+
*)
|
|
377
|
+
esac
|
|
378
|
+
|
|
379
|
+
curl -OLsS "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB_SUFFIX}.tgz"
|
|
380
|
+
tar xf "mongodb-linux-x86_64-${MONGODB_SUFFIX}.tgz"
|
|
381
|
+
mkdir -p ~/.local/bin/mongo6
|
|
382
|
+
cp -fR "mongodb-linux-x86_64-${MONGODB_SUFFIX}/bin/mongod" ~/.local/bin/mongo6
|
|
383
|
+
sudo mkdir -p /var/lib/mongo6 && sudo mkdir -p /var/log/mongodb6
|
|
384
|
+
sudo chmod a+rwx /var/lib/mongo6 && sudo chmod a+rwx /var/log/mongodb6
|
|
385
|
+
cd ~-
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
# Install mongo7 in ~/.local/bin/mongo7
|
|
389
|
+
# Arg1: a writable folder where to write downloaded files
|
|
390
|
+
# NOTE: each mongo version is installed in a separate folder to support multiple versions
|
|
391
|
+
install_mongo7() {
|
|
392
|
+
local DL_ROOT=$1
|
|
393
|
+
local DL_PATH="$DL_ROOT/mongo7"
|
|
394
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
395
|
+
case "$OS_ID" in
|
|
396
|
+
debian)
|
|
397
|
+
curl -OLsS http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.1w-0+deb11u1_amd64.deb
|
|
398
|
+
DEBIAN_FRONTEND=noninteractive && dpkg -i libssl1.1_1.1.1w-0+deb11u1_amd64.deb
|
|
399
|
+
local MONGODB_SUFFIX=debian11-${MONGODB7_VERSION}
|
|
400
|
+
;;
|
|
401
|
+
ubuntu)
|
|
402
|
+
local MONGODB_SUFFIX=ubuntu2204-${MONGODB7_VERSION}
|
|
403
|
+
;;
|
|
404
|
+
*)
|
|
405
|
+
esac
|
|
406
|
+
|
|
407
|
+
curl -OLsS "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB_SUFFIX}.tgz"
|
|
408
|
+
tar xf "mongodb-linux-x86_64-${MONGODB_SUFFIX}.tgz"
|
|
409
|
+
mkdir -p ~/.local/bin/mongo7
|
|
410
|
+
cp -fR "mongodb-linux-x86_64-${MONGODB_SUFFIX}/bin/mongod" ~/.local/bin/mongo7
|
|
411
|
+
sudo mkdir -p /var/lib/mongo7 && sudo mkdir -p /var/log/mongodb7
|
|
412
|
+
sudo chmod a+rwx /var/lib/mongo7 && sudo chmod a+rwx /var/log/mongodb7
|
|
413
|
+
cd ~-
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
# Install kubectl in ~/.local/bin
|
|
417
|
+
# Expected args:
|
|
418
|
+
# 1. a writable folder where to write downloaded files
|
|
419
|
+
install_kubectl() {
|
|
420
|
+
local DL_ROOT=$1
|
|
421
|
+
local DL_PATH="$DL_ROOT/kubectl"
|
|
422
|
+
if [ ! -d "$DL_PATH" ]; then
|
|
423
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
424
|
+
curl -OLsS https://dl.k8s.io/release/v${KUBECTL_VERSION}/bin/linux/amd64/kubectl && \
|
|
425
|
+
curl -OLsS https://dl.k8s.io/release/v${KUBECTL_VERSION}/bin/linux/amd64/kubectl.sha256 && \
|
|
426
|
+
echo "$(cat kubectl.sha256) kubectl" | sha256sum --check
|
|
427
|
+
cd ~-
|
|
428
|
+
fi
|
|
429
|
+
cd "$DL_PATH"
|
|
430
|
+
mv kubectl ~/.local/bin/kubectl
|
|
431
|
+
chmod u+x ~/.local/bin/yq
|
|
432
|
+
cd ~-
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
# Install helm in ~/.local/bin
|
|
436
|
+
# Expected args:
|
|
437
|
+
# 1. a writable folder where to write downloaded files
|
|
438
|
+
install_helm() {
|
|
439
|
+
local DL_ROOT=$1
|
|
440
|
+
local DL_PATH="$DL_ROOT/helm"
|
|
441
|
+
if [ ! -d "$DL_PATH" ]; then
|
|
442
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
443
|
+
curl -OLsS https://get.helm.sh/helm-v${HELM_VERSION}-linux-amd64.tar.gz
|
|
444
|
+
curl -OLsS https://get.helm.sh/helm-v${HELM_VERSION}-linux-amd64.tar.gz.sha256sum
|
|
445
|
+
sha256sum --ignore-missing --quiet -c helm-v${HELM_VERSION}-linux-amd64.tar.gz.sha256sum
|
|
446
|
+
cd ~-
|
|
447
|
+
fi
|
|
448
|
+
cd "$DL_PATH"
|
|
449
|
+
tar xf helm-v${HELM_VERSION}-linux-amd64.tar.gz
|
|
450
|
+
cp linux-amd64/helm ~/.local/bin
|
|
451
|
+
cd ~-
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
# Install helmfile in ~/.local/bin
|
|
455
|
+
# Expected args:
|
|
456
|
+
# 1. a writable folder where to write downloaded files
|
|
457
|
+
install_helmfile() {
|
|
458
|
+
local DL_ROOT=$1
|
|
459
|
+
local DL_PATH="$DL_ROOT/helmfile"
|
|
460
|
+
if [ ! -d "$DL_PATH" ]; then
|
|
461
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
462
|
+
curl -OLsS https://github.com/helmfile/helmfile/releases/download/v${HELMFILE_VERSION}/helmfile_${HELMFILE_VERSION}_linux_amd64.tar.gz
|
|
463
|
+
curl -OLsS https://github.com/helmfile/helmfile/releases/download/v${HELMFILE_VERSION}/helmfile_${HELMFILE_VERSION}_checksums.txt
|
|
464
|
+
sha256sum --ignore-missing --quiet -c helmfile_${HELMFILE_VERSION}_checksums.txt
|
|
465
|
+
cd ~-
|
|
466
|
+
fi
|
|
467
|
+
cd "$DL_PATH"
|
|
468
|
+
tar xf helmfile_${HELMFILE_VERSION}_linux_amd64.tar.gz
|
|
469
|
+
cp helmfile ~/.local/bin
|
|
470
|
+
cd ~-
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
# Install k9s in ~/.local/bin
|
|
474
|
+
# Expected args:
|
|
475
|
+
# 1. a writable folder where to write downloaded files
|
|
476
|
+
install_k9s() {
|
|
477
|
+
local DL_ROOT=$1
|
|
478
|
+
local DL_PATH="$DL_ROOT/k9s"
|
|
479
|
+
if [ ! -d "$DL_PATH" ]; then
|
|
480
|
+
mkdir -p "$DL_PATH" && cd "$DL_PATH"
|
|
481
|
+
curl -OLsS https://github.com/derailed/k9s/releases/download/v${K9S_VERSION}/k9s_Linux_amd64.tar.gz
|
|
482
|
+
curl -OLsS https://github.com/derailed/k9s/releases/download/v${K9S_VERSION}/checksums.sha256
|
|
483
|
+
sha256sum --ignore-missing --quiet -c checksums.sha256
|
|
484
|
+
cd ~-
|
|
485
|
+
fi
|
|
486
|
+
cd "$DL_PATH"
|
|
487
|
+
tar xf k9s_Linux_amd64.tar.gz
|
|
488
|
+
cp k9s ~/.local/bin
|
|
489
|
+
cd ~-
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
# Install listed requirements
|
|
493
|
+
# Usage: install_reqs mongo7 nvm node16 yq
|
|
494
|
+
install_reqs() {
|
|
495
|
+
mkdir -p "$TMP_DIR/dl"
|
|
496
|
+
|
|
497
|
+
for REQ in "$@"; do
|
|
498
|
+
echo "Installing $REQ ..."
|
|
499
|
+
install_"$REQ" "$TMP_DIR/dl"
|
|
500
|
+
done
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
# Select which node version is active (ie. which one is started when calling node)
|
|
504
|
+
use_node() {
|
|
505
|
+
local VERSION=$1
|
|
506
|
+
|
|
507
|
+
nvm use "$VERSION"
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
# Select which mongo version is active (ie. which one is started when calling mongod)
|
|
511
|
+
use_mongo() {
|
|
512
|
+
local VERSION=$1
|
|
513
|
+
|
|
514
|
+
if [ "$CI" = true ]; then
|
|
515
|
+
# CI k-mongo will use whatever binary is in $HOME/.local/bin
|
|
516
|
+
|
|
517
|
+
# Binaries
|
|
518
|
+
ln -sf "$HOME/.local/bin/mongo$VERSION/mongod" ~/.local/bin
|
|
519
|
+
# And working dirs
|
|
520
|
+
sudo ln -sf "/var/lib/mongo$VERSION" /var/lib/mongo
|
|
521
|
+
sudo ln -sf "/var/log/mongodb$VERSION" /var/log/mongodb
|
|
522
|
+
|
|
523
|
+
echo "Now using mongo $VERSION:"
|
|
524
|
+
"$HOME/.local/bin/mongod" --version
|
|
525
|
+
else
|
|
526
|
+
# Developer's k-mongo will use MONGO_VERSION
|
|
527
|
+
export MONGO_VERSION="$VERSION"
|
|
528
|
+
fi
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
### Utils
|
|
532
|
+
###
|
|
533
|
+
|
|
534
|
+
get_json_value() {
|
|
535
|
+
local JSON_SRC="$1"
|
|
536
|
+
local JSON_FIELD="$2"
|
|
537
|
+
|
|
538
|
+
ensure_yq
|
|
539
|
+
yq --output-format=yaml ".$JSON_FIELD" "$JSON_SRC"
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
### Git
|
|
543
|
+
###
|
|
544
|
+
|
|
545
|
+
# Returns the current git tag (or empty string if not on a tag)
|
|
546
|
+
# Arg1: the repository root
|
|
547
|
+
get_git_tag() {
|
|
548
|
+
local REPO_ROOT="$1"
|
|
549
|
+
cd "$REPO_ROOT"
|
|
550
|
+
git tag --points-at
|
|
551
|
+
|
|
552
|
+
# case "$CI_ID" in
|
|
553
|
+
# github)
|
|
554
|
+
# if [ "$GITHUB_REF_TYPE" = "tag" ]; then
|
|
555
|
+
# echo "$GITHUB_REF_NAME"
|
|
556
|
+
# fi
|
|
557
|
+
# ;;
|
|
558
|
+
# gitlab)
|
|
559
|
+
# echo "${CI_COMMIT_TAG:-}"
|
|
560
|
+
# ;;
|
|
561
|
+
# travis)
|
|
562
|
+
# echo "${TRAVIS_TAG:-}"
|
|
563
|
+
# ;;
|
|
564
|
+
# *)
|
|
565
|
+
# git tag --points-at
|
|
566
|
+
# ;;
|
|
567
|
+
# esac
|
|
568
|
+
cd ~-
|
|
569
|
+
}
|
|
570
|
+
|
|
571
|
+
# Returns the current git branch (might be empty string if on a tag and repo was checked out with --depth 1)
|
|
572
|
+
# Arg1: the repository root
|
|
573
|
+
get_git_branch() {
|
|
574
|
+
local REPO_ROOT="$1"
|
|
575
|
+
cd "$REPO_ROOT"
|
|
576
|
+
git branch --show-current
|
|
577
|
+
|
|
578
|
+
# case "$CI_ID" in
|
|
579
|
+
# github)
|
|
580
|
+
# if [ "$GITHUB_REF_TYPE" = "branch" ]; then
|
|
581
|
+
# echo "$GITHUB_REF_NAME"
|
|
582
|
+
# fi
|
|
583
|
+
# ;;
|
|
584
|
+
# gitlab)
|
|
585
|
+
# if [ -z "$CI_COMMIT_TAG" ]; then
|
|
586
|
+
# echo "$CI_COMMIT_REF_NAME"
|
|
587
|
+
# fi
|
|
588
|
+
# ;;
|
|
589
|
+
# travis)
|
|
590
|
+
# if [ -z "$TRAVIS_TAG" ]; then
|
|
591
|
+
# echo "$TRAVIS_BRANCH"
|
|
592
|
+
# fi
|
|
593
|
+
# ;;
|
|
594
|
+
# *)
|
|
595
|
+
# git branch --show-current
|
|
596
|
+
# ;;
|
|
597
|
+
# esac
|
|
598
|
+
cd ~-
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
# Returns the current git commit sha, always defined
|
|
602
|
+
# Arg1: the repository root
|
|
603
|
+
get_git_commit_sha() {
|
|
604
|
+
local REPO_ROOT="$1"
|
|
605
|
+
cd "$REPO_ROOT"
|
|
606
|
+
git rev-parse HEAD
|
|
607
|
+
|
|
608
|
+
# case "$CI_ID" in
|
|
609
|
+
# github)
|
|
610
|
+
# echo "$GITHUB_SHA"
|
|
611
|
+
# ;;
|
|
612
|
+
# gitlab)
|
|
613
|
+
# echo "$CI_COMMIT_SHA"
|
|
614
|
+
# ;;
|
|
615
|
+
# travis)
|
|
616
|
+
# echo "$TRAVIS_COMMIT"
|
|
617
|
+
# ;;
|
|
618
|
+
# *)
|
|
619
|
+
# git rev-parse HEAD
|
|
620
|
+
# ;;
|
|
621
|
+
# esac
|
|
622
|
+
cd ~-
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
# Returns the current git commit _short_ sha, always defined
|
|
626
|
+
# Arg1: the repository root
|
|
627
|
+
get_git_commit_short_sha() {
|
|
628
|
+
local REPO_ROOT="$1"
|
|
629
|
+
cd "$REPO_ROOT"
|
|
630
|
+
git rev-parse --short HEAD
|
|
631
|
+
cd ~-
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
# Returns the list of changed files between two commits
|
|
635
|
+
# Arg1: commit to (default to latest known)
|
|
636
|
+
# Arg2: commit from (defaults to the one before arg1)
|
|
637
|
+
# NOTE: requires git history to work (ie probably not with shallow clone)
|
|
638
|
+
# NOTE: needs to be called from inside a git repo
|
|
639
|
+
# get_git_changed_files() {
|
|
640
|
+
# local COMMIT0=${1:-HEAD}
|
|
641
|
+
# local COMMIT1=${2:-"$COMMIT0"^}
|
|
642
|
+
|
|
643
|
+
# cd "$REPO_ROOT"
|
|
644
|
+
# if [ -z "$CI_ID" ]; then
|
|
645
|
+
# git diff --name-only "$COMMIT0" "$COMMIT1"
|
|
646
|
+
# fi
|
|
647
|
+
# cd ~-
|
|
648
|
+
# }
|
|
649
|
+
|
|
650
|
+
# Returns current commit message
|
|
651
|
+
# Arg1: the repository root
|
|
652
|
+
get_git_commit_message() {
|
|
653
|
+
local REPO_ROOT="$1"
|
|
654
|
+
cd "$REPO_ROOT"
|
|
655
|
+
git show -s --pretty=%B
|
|
656
|
+
cd ~-
|
|
657
|
+
}
|
|
658
|
+
|
|
659
|
+
# Returns current commit author name
|
|
660
|
+
# Arg1: the repository root
|
|
661
|
+
get_git_commit_author_name() {
|
|
662
|
+
local REPO_ROOT="$1"
|
|
663
|
+
cd "$REPO_ROOT"
|
|
664
|
+
git show -s --pretty=%an
|
|
665
|
+
cd ~-
|
|
666
|
+
}
|
|
667
|
+
|
|
668
|
+
# Returns current commit author email
|
|
669
|
+
# Arg1: the repository root
|
|
670
|
+
get_git_commit_author_email() {
|
|
671
|
+
local REPO_ROOT="$1"
|
|
672
|
+
cd "$REPO_ROOT"
|
|
673
|
+
git show -s --pretty=%ae
|
|
674
|
+
cd ~-
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
# Shallow clone a repo (with no history), will also (shallow) clone submodules.
|
|
678
|
+
# Expected args:
|
|
679
|
+
# 1. the url of the repo to clone
|
|
680
|
+
# 2. the directory where to clone the repo
|
|
681
|
+
# 3. the ref to clone (branch or tag)
|
|
682
|
+
git_shallow_clone() {
|
|
683
|
+
local REPO_URL=$1
|
|
684
|
+
local REPO_DIR=$2
|
|
685
|
+
local REPO_REF=${3:-}
|
|
686
|
+
|
|
687
|
+
local GIT_OPS="--depth 1 --recurse-submodules --shallow-submodules"
|
|
688
|
+
if [ -n "$REPO_REF" ]; then
|
|
689
|
+
GIT_OPS="$GIT_OPS --branch $REPO_REF"
|
|
690
|
+
fi
|
|
691
|
+
|
|
692
|
+
git clone $GIT_OPS "$REPO_URL" "$REPO_DIR"
|
|
693
|
+
}
|
|
694
|
+
|
|
695
|
+
### Github
|
|
696
|
+
###
|
|
697
|
+
|
|
698
|
+
# Deploys generated documentation using github pages system.
|
|
699
|
+
# Arg1: the repository url
|
|
700
|
+
# Arg2: the folder where documentation has been generated
|
|
701
|
+
# Arg3: the author name to use when commiting the updated documentation.
|
|
702
|
+
# Arg4: the author email to use when commiting the updated documentation.
|
|
703
|
+
# Arg5: the commit message.
|
|
704
|
+
# Arg6: the branch where to commit the documentation (defaults to gh-pages)
|
|
705
|
+
deploy_gh_pages() {
|
|
706
|
+
local REPO_URL="$1"
|
|
707
|
+
local DOCS_DIR="$2"
|
|
708
|
+
local COMMIT_AUTHOR_NAME="$3"
|
|
709
|
+
local COMMIT_AUTHOR_EMAIL="$4"
|
|
710
|
+
local COMMIT_MESSAGE="$5"
|
|
711
|
+
local DOCS_BRANCH="${6:-gh-pages}"
|
|
712
|
+
|
|
713
|
+
local WORK_DIR
|
|
714
|
+
WORK_DIR="$(mktemp -d -p "$TMP_DIR" gh_pages.XXXXXX)"
|
|
715
|
+
|
|
716
|
+
# Clone repo to a temp location
|
|
717
|
+
git clone --depth 1 --branch "$DOCS_BRANCH" "$REPO_URL" "$WORK_DIR"
|
|
718
|
+
# Copy built doc
|
|
719
|
+
cp -fR "$DOCS_DIR"/* "$WORK_DIR"
|
|
720
|
+
# Add new doc and commit (add a .nojekyll file to skip Github jekyll processing)
|
|
721
|
+
cd "$WORK_DIR" && touch .nojekyll && git add --all && git -c user.name="$COMMIT_AUTHOR_NAME" -c user.email="$COMMIT_AUTHOR_EMAIL" commit --message "$COMMIT_MESSAGE"
|
|
722
|
+
# Push
|
|
723
|
+
git push origin "$DOCS_BRANCH"
|
|
724
|
+
}
|
|
725
|
+
|
|
726
|
+
### Log
|
|
727
|
+
###
|
|
728
|
+
|
|
729
|
+
KASH_TXT_B="\e["
|
|
730
|
+
KASH_TXT_E="m"
|
|
731
|
+
KASH_TXT_BOLD="${KASH_TXT_B}1${KASH_TXT_E}"
|
|
732
|
+
KASH_TXT_RESET="${KASH_TXT_B}0${KASH_TXT_E}"
|
|
733
|
+
|
|
734
|
+
# Creates a foldable log section in CI systems
|
|
735
|
+
# Arg1: the section title
|
|
736
|
+
# NOTE: foldable section must be terminated using end_group and the same $TITLE
|
|
737
|
+
begin_group() {
|
|
738
|
+
local TITLE="$1"
|
|
739
|
+
|
|
740
|
+
if [ "$CI" = true ]; then
|
|
741
|
+
if [ "$CI_ID" = "github" ]; then
|
|
742
|
+
# see https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#grouping-log-lines
|
|
743
|
+
echo "::group::$TITLE"
|
|
744
|
+
elif [ "$CI_ID" = "gitlab" ]; then
|
|
745
|
+
# see https://docs.gitlab.com/ee/ci/jobs/#custom-collapsible-sections
|
|
746
|
+
local SECTION
|
|
747
|
+
SECTION=$(echo "$TITLE" | tr ' .' '_')
|
|
748
|
+
echo -e "section_start:$(date +%s):$SECTION\r\e[0K\e[95m$TITLE\e[0m"
|
|
749
|
+
elif [ "$CI_ID" = "travis" ]; then
|
|
750
|
+
# see
|
|
751
|
+
echo "travis_fold:start:$TITLE"
|
|
752
|
+
fi
|
|
753
|
+
else
|
|
754
|
+
echo "%< --- $TITLE ------"
|
|
755
|
+
fi
|
|
756
|
+
}
|
|
757
|
+
|
|
758
|
+
# Terminates a foldable log section in CI systems
|
|
759
|
+
# Arg1: the section title
|
|
760
|
+
end_group() {
|
|
761
|
+
local TITLE="$1"
|
|
762
|
+
|
|
763
|
+
if [ "$CI" = true ]; then
|
|
764
|
+
if [ "$CI_ID" = "github" ]; then
|
|
765
|
+
echo "::endgroup::"
|
|
766
|
+
elif [ "$CI_ID" = "gitlab" ]; then
|
|
767
|
+
local SECTION
|
|
768
|
+
SECTION=$(echo "$TITLE" | tr ' .' '_')
|
|
769
|
+
echo -e "section_end:$(date +%s):$SECTION\r\e[0K"
|
|
770
|
+
elif [ "$CI_ID" = "travis" ]; then
|
|
771
|
+
echo "travis_fold:end:$TITLE"
|
|
772
|
+
fi
|
|
773
|
+
else
|
|
774
|
+
echo "------ $TITLE --- >%"
|
|
775
|
+
fi
|
|
776
|
+
}
|
|
777
|
+
|
|
778
|
+
### Slack
|
|
779
|
+
###
|
|
780
|
+
|
|
781
|
+
slack_send() {
|
|
782
|
+
local PAYLOAD="$1"
|
|
783
|
+
local URL="$2"
|
|
784
|
+
|
|
785
|
+
curl -X POST -H "Content-type: application/json" --data "$PAYLOAD" "$URL"
|
|
786
|
+
}
|
|
787
|
+
|
|
788
|
+
# Push a simple message to a slack channel
|
|
789
|
+
# Arg1: the Slack webhook where to push the message
|
|
790
|
+
# Arg2: the message (can be markdown formatted)
|
|
791
|
+
slack_log() {
|
|
792
|
+
local URL="$1"
|
|
793
|
+
local MSG="$2"
|
|
794
|
+
|
|
795
|
+
local PAYLOAD="{ blocks: [ { \"type\": \"section\", \"text\": { \"type\": \"mrkdwn\", \"text\": \"$MSG\" } } ] }"
|
|
796
|
+
slack_send "$PAYLOAD" "$URL"
|
|
797
|
+
}
|
|
798
|
+
|
|
799
|
+
# Push a colored message to a slack channel
|
|
800
|
+
# Arg1: the Slack webhook where to push the message
|
|
801
|
+
# Arg2: the message (can be markdown formatted)
|
|
802
|
+
# Arg3: the color to use (as and hex value)
|
|
803
|
+
slack_color_log() {
|
|
804
|
+
local URL="$1"
|
|
805
|
+
local MSG="$2"
|
|
806
|
+
local COLOR="$3"
|
|
807
|
+
|
|
808
|
+
local PAYLOAD="{ attachments: [ { \"color\": \"$COLOR\", blocks: [ { \"type\": \"section\", \"text\": { \"type\": \"mrkdwn\", \"text\": \"$MSG\" } } ] } ] }"
|
|
809
|
+
slack_send "$PAYLOAD" "$URL"
|
|
810
|
+
}
|
|
811
|
+
|
|
812
|
+
# Report ci job result to slack channel
|
|
813
|
+
# Expected usage is to do the following:
|
|
814
|
+
# trap 'slack_ci_report "$ROOT_DIR" "CI step name" "$?" "$SLACK_WEBHOOK_APPS"' EXIT
|
|
815
|
+
# Exit code 0 = success, anything else is failure
|
|
816
|
+
# Arg1: the repository root
|
|
817
|
+
# Arg2: the exit code of the ci job
|
|
818
|
+
# Arg3: the slack webhook where to push report
|
|
819
|
+
slack_ci_report() {
|
|
820
|
+
local REPO_DIR="$1"
|
|
821
|
+
local CI_STEP_NAME="$2"
|
|
822
|
+
local RET_CODE="$3"
|
|
823
|
+
local SLACK_WEBHOOK="$4"
|
|
824
|
+
|
|
825
|
+
local STATUS="success"
|
|
826
|
+
local COLOR="#2eb886"
|
|
827
|
+
if [ "$RET_CODE" != "0" ]; then STATUS="failed"; COLOR="#a30200"; fi
|
|
828
|
+
|
|
829
|
+
local MESSAGE
|
|
830
|
+
case "$CI_ID" in
|
|
831
|
+
github)
|
|
832
|
+
MESSAGE=$(printf "*%s*: %s %s *@%s* (%s, <%s|repo>, <%s|commit>, <%s|run>)" \
|
|
833
|
+
"$GITHUB_REPOSITORY" \
|
|
834
|
+
"$CI_STEP_NAME" \
|
|
835
|
+
"$STATUS" \
|
|
836
|
+
"$GITHUB_REF_NAME" \
|
|
837
|
+
"$GITHUB_ACTOR" \
|
|
838
|
+
"$GITHUB_SERVER_URL/$GITHUB_REPOSITORY" \
|
|
839
|
+
"$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/commit/$GITHUB_SHA" \
|
|
840
|
+
"$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID")
|
|
841
|
+
;;
|
|
842
|
+
*)
|
|
843
|
+
;;
|
|
844
|
+
esac
|
|
845
|
+
|
|
846
|
+
slack_color_log "$SLACK_WEBHOOK" "$MESSAGE" "$COLOR"
|
|
847
|
+
}
|
|
848
|
+
|
|
849
|
+
# Report e2e test result to slack channel
|
|
850
|
+
# Arg1: the app name
|
|
851
|
+
# Arg2: the exit code of the tests
|
|
852
|
+
# Arg3: the slack webhook where to push report
|
|
853
|
+
# Arg4: link to chrome logs
|
|
854
|
+
# Arg5: link to screenshots
|
|
855
|
+
slack_e2e_report() {
|
|
856
|
+
local APP="$1"
|
|
857
|
+
local RET_CODE="$2"
|
|
858
|
+
local SLACK_WEBHOOK="$3"
|
|
859
|
+
local CHROME_LOGS_LINK="$4"
|
|
860
|
+
local SCREEN_LINK="$5"
|
|
861
|
+
|
|
862
|
+
local STATUS="success"
|
|
863
|
+
local COLOR="#2eb886"
|
|
864
|
+
if [ "$RET_CODE" != "0" ]; then STATUS="failed"; COLOR="#a30200"; fi
|
|
865
|
+
|
|
866
|
+
local MESSAGE
|
|
867
|
+
MESSAGE=$(printf "*%s*: run_e2e_tests %s" \
|
|
868
|
+
"$APP" \
|
|
869
|
+
"$STATUS")
|
|
870
|
+
if [ -n "$CHROME_LOGS_LINK" ] && [ -n "$SCREEN_LINK" ]; then
|
|
871
|
+
MESSAGE+=" (<${CHROME_LOGS_LINK}|chrome logs> | <${SCREEN_LINK}|screenshots>)"
|
|
872
|
+
elif [ -n "$CHROME_LOGS_LINK" ]; then
|
|
873
|
+
MESSAGE+=" (<${CHROME_LOGS_LINK}|chrome logs>"
|
|
874
|
+
elif [ -n "$SCREEN_LINK" ]; then
|
|
875
|
+
MESSAGE+=" (<${SCREEN_LINK}|screenshots>)"
|
|
876
|
+
fi
|
|
877
|
+
|
|
878
|
+
slack_color_log "$SLACK_WEBHOOK" "$MESSAGE" "$COLOR"
|
|
879
|
+
}
|
|
880
|
+
|
|
881
|
+
### SOPS
|
|
882
|
+
###
|
|
883
|
+
|
|
884
|
+
# Generates the decrypted filename for the given encrypted file
|
|
885
|
+
# Arg1: the encrypted filename
|
|
886
|
+
# NOTE: decrypted file will be XXXXX.dec.ext
|
|
887
|
+
enc2dec() {
|
|
888
|
+
local ENC="$1"
|
|
889
|
+
local BASENAME
|
|
890
|
+
BASENAME="$(basename "$ENC")"
|
|
891
|
+
local FILENAME="${BASENAME%%.*}"
|
|
892
|
+
local EXTENSION="${BASENAME##*.}"
|
|
893
|
+
local DEC
|
|
894
|
+
DEC="$(dirname "$ENC")/${FILENAME}.dec.${EXTENSION}"
|
|
895
|
+
echo "$DEC"
|
|
896
|
+
}
|
|
897
|
+
|
|
898
|
+
# Generates the encrypted filename for the given decrypted file
|
|
899
|
+
# Arg1: the decrypted filename
|
|
900
|
+
# NOTE: decrypted file will be XXXXX.dec.ext
|
|
901
|
+
dec2enc() {
|
|
902
|
+
local DEC="$1"
|
|
903
|
+
local BASENAME
|
|
904
|
+
BASENAME="$(basename "$DEC")"
|
|
905
|
+
local FILENAME="${BASENAME%%.*}"
|
|
906
|
+
local EXTENSION="${BASENAME##*.}"
|
|
907
|
+
local ENC
|
|
908
|
+
ENC="$(dirname "$DEC")/${FILENAME}.enc.${EXTENSION}"
|
|
909
|
+
echo "$ENC"
|
|
910
|
+
}
|
|
911
|
+
|
|
912
|
+
# Loads environment variables from encrypted env files
|
|
913
|
+
# Usage: load_env_files /path/to/file1.enc.env /path/to/file2.enc.env /path/to/file3.enc.env
|
|
914
|
+
# NOTE: requires SOPS_AGE_KEY or SOPS_AGE_KEY_FILE to be defined. If not, will defaults to $DEVELOPMENT_DIR/age/keys.txt
|
|
915
|
+
load_env_files() {
|
|
916
|
+
# Use developer key unless one of SOPS_AGE_KEY or SOPS_AGE_KEY_FILE is already defined (eg. CI)
|
|
917
|
+
if [ -z "${SOPS_AGE_KEY-}" ] && [ -z "${SOPS_AGE_KEY_FILE-}" ]; then
|
|
918
|
+
export SOPS_AGE_KEY_FILE="$DEVELOPMENT_DIR/age/keys.txt"
|
|
919
|
+
fi
|
|
920
|
+
|
|
921
|
+
for ENC in "$@"; do
|
|
922
|
+
if [ -f "$ENC" ]; then
|
|
923
|
+
local DEC
|
|
924
|
+
DEC="$(enc2dec "$ENC")"
|
|
925
|
+
sops --decrypt --output "$DEC" "$ENC"
|
|
926
|
+
set -a && . "$DEC" && set +a
|
|
927
|
+
fi
|
|
928
|
+
done
|
|
929
|
+
}
|
|
930
|
+
|
|
931
|
+
# Decrypt files containing secrets and define an environment variable pointing on the decrypted filename
|
|
932
|
+
# Usage: load_value_files /path/to/FOO_PASSWORD.enc.value /path/to/BLAH_SECRET.enc.value
|
|
933
|
+
# Will decrypt the files and define FOO_PASSWORD to the decrypted filename. It can be used to feed the decrypted value from stdin.
|
|
934
|
+
load_value_files() {
|
|
935
|
+
# Use developer key unless one of SOPS_AGE_KEY or SOPS_AGE_KEY_FILE is already defined (eg. CI)
|
|
936
|
+
if [ -z "${SOPS_AGE_KEY-}" ] && [ -z "${SOPS_AGE_KEY_FILE-}" ]; then
|
|
937
|
+
export SOPS_AGE_KEY_FILE="$DEVELOPMENT_DIR/age/keys.txt"
|
|
938
|
+
fi
|
|
939
|
+
|
|
940
|
+
for ENC in "$@"; do
|
|
941
|
+
if [ -f "$ENC" ]; then
|
|
942
|
+
local DEC
|
|
943
|
+
DEC="$(enc2dec "$ENC")"
|
|
944
|
+
sops --decrypt --output "$DEC" "$ENC"
|
|
945
|
+
|
|
946
|
+
local BASENAME
|
|
947
|
+
BASENAME=$(basename "$DEC")
|
|
948
|
+
local VAR_NAME="${BASENAME%%.*}"
|
|
949
|
+
# Define (and export) $filename as env var with path to decrypted file as value
|
|
950
|
+
declare -gx "$VAR_NAME"="$DEC"
|
|
951
|
+
fi
|
|
952
|
+
done
|
|
953
|
+
}
|
|
954
|
+
|
|
955
|
+
### Kalisio
|
|
956
|
+
###
|
|
957
|
+
|
|
958
|
+
# Returns the kalisio flavor (prod, test, dev) according to current branch/tag name
|
|
959
|
+
# Expected args:
|
|
960
|
+
# 1. the repository root folder
|
|
961
|
+
get_flavor_from_git() {
|
|
962
|
+
local REPO_DIR=$1
|
|
963
|
+
# Matches 'test' but also 'test-v1.2'
|
|
964
|
+
local TEST_FLAVOR_REGEX="^test(-v[0-9]+\.[0-9]+)?$"
|
|
965
|
+
# Only matches 'prod-v1.2.3'
|
|
966
|
+
local PROD_FLAVOR_REGEX="^prod-v[0-9]+\.[0-9]+\.[0-9]+$"
|
|
967
|
+
|
|
968
|
+
local GIT_TAG
|
|
969
|
+
GIT_TAG=$(get_git_tag "$REPO_DIR")
|
|
970
|
+
local GIT_BRANCH
|
|
971
|
+
GIT_BRANCH=$(get_git_branch "$REPO_DIR")
|
|
972
|
+
|
|
973
|
+
if [[ "$GIT_TAG" =~ $PROD_FLAVOR_REGEX ]]; then
|
|
974
|
+
printf "prod"
|
|
975
|
+
elif [[ "$GIT_BRANCH" =~ $TEST_FLAVOR_REGEX ]]; then
|
|
976
|
+
printf "test"
|
|
977
|
+
else
|
|
978
|
+
printf "dev"
|
|
979
|
+
fi
|
|
980
|
+
}
|
|
981
|
+
|
|
982
|
+
# Returns the git ref that produced the given container tag.
|
|
983
|
+
# Eg.
|
|
984
|
+
# - container tag is 2.2.0-test => git ref = test-v2.2
|
|
985
|
+
# - container tag is 2.2.2-prod => git ref = prod-v2.2.2
|
|
986
|
+
# - container tag is 2.2.2-dev => git ref = master
|
|
987
|
+
# Expected args:
|
|
988
|
+
# 1. the container tag
|
|
989
|
+
get_git_ref_from_container_tag() {
|
|
990
|
+
local CONTAINER_TAG=$1
|
|
991
|
+
local CONTAINER_TAG_REGEX="^([0-9]+\.[0-9]+\.[0-9]+)-(.*)$"
|
|
992
|
+
if [[ "$CONTAINER_TAG" =~ $CONTAINER_TAG_REGEX ]]; then
|
|
993
|
+
if [ "${BASH_REMATCH[2]}" = "prod" ]; then
|
|
994
|
+
# Prod container => tag is prod-v1.2.3
|
|
995
|
+
printf "%s-v%s" "${BASH_REMATCH[2]}" "${BASH_REMATCH[1]}"
|
|
996
|
+
elif [ "${BASH_REMATCH[2]}" = "test" ]; then
|
|
997
|
+
# Test container => branch is test-v1.2 (or just test)
|
|
998
|
+
printf "%s-v%s" "${BASH_REMATCH[2]}" "${BASH_REMATCH[1]%.*}"
|
|
999
|
+
else
|
|
1000
|
+
# Dev container => branch is master
|
|
1001
|
+
printf "master"
|
|
1002
|
+
fi
|
|
1003
|
+
fi
|
|
1004
|
+
|
|
1005
|
+
# Also possible to find just 'test' or 'dev' as container tags
|
|
1006
|
+
if [ "$CONTAINER_TAG" = "test" ]; then
|
|
1007
|
+
printf "test"
|
|
1008
|
+
elif [ "$CONTAINER_TAG" = "dev" ]; then
|
|
1009
|
+
printf "master"
|
|
1010
|
+
fi
|
|
1011
|
+
}
|
|
1012
|
+
|
|
1013
|
+
# Runs kli in a separate folder.
|
|
1014
|
+
# Expected args:
|
|
1015
|
+
# 1. the folder where to install everything
|
|
1016
|
+
# 2. the node version to use (16, 18 ...)
|
|
1017
|
+
# 3. the kli file to use
|
|
1018
|
+
# 4. 'klifull' to install and link using kli, anything else will only clone
|
|
1019
|
+
# NOTE: you should probably 'undef' environment variables before calling this
|
|
1020
|
+
# like KALISIO_DEVELOPMENT_DIR ...
|
|
1021
|
+
run_kli() {
|
|
1022
|
+
local WORK_DIR="$1"
|
|
1023
|
+
local NODE_VERSION="$2"
|
|
1024
|
+
local KLI_FILE="$3"
|
|
1025
|
+
local KLI_RUN="${4:-klifull}"
|
|
1026
|
+
|
|
1027
|
+
# Clone kli in venv if not there
|
|
1028
|
+
if [ ! -d "$WORK_DIR/kli" ]; then
|
|
1029
|
+
git_shallow_clone "https://github.com/kalisio/kli.git" "$WORK_DIR/kli"
|
|
1030
|
+
cd "$WORK_DIR/kli" && nvm exec "$NODE_VERSION" yarn install && cd ~-
|
|
1031
|
+
fi
|
|
1032
|
+
|
|
1033
|
+
cd "$WORK_DIR"
|
|
1034
|
+
nvm exec "$NODE_VERSION" node "$WORK_DIR/kli/index.js" "$KLI_FILE" --clone --shallow-clone
|
|
1035
|
+
if [ "$KLI_RUN" = klifull ]; then
|
|
1036
|
+
nvm exec "$NODE_VERSION" node "$WORK_DIR/kli/index.js" "$KLI_FILE" --install
|
|
1037
|
+
nvm exec "$NODE_VERSION" node "$WORK_DIR/kli/index.js" "$KLI_FILE" --link --link-folder "$WORK_DIR/yarn-links"
|
|
1038
|
+
fi
|
|
1039
|
+
cd ~-
|
|
1040
|
+
}
|
|
1041
|
+
|
|
1042
|
+
# Setup the workspace for a 'simple' project.
|
|
1043
|
+
# A 'simple' project has no kli file, but require a 'development' repo.
|
|
1044
|
+
# It can also depend on other repo but they must be specified as additional args
|
|
1045
|
+
# Expected args:
|
|
1046
|
+
# 1. the workspace directory
|
|
1047
|
+
# 2. the url of the 'development' repository
|
|
1048
|
+
# ... additional repo url to pull. Those additional repository will be cloned in the
|
|
1049
|
+
# workspace directory, using the basename of the repo url as repo directory.
|
|
1050
|
+
setup_workspace() {
|
|
1051
|
+
local WORKSPACE_DIR="$1"
|
|
1052
|
+
local DEVELOPMENT_REPO_URL="$2"
|
|
1053
|
+
|
|
1054
|
+
# Clone development repo
|
|
1055
|
+
git_shallow_clone "$DEVELOPMENT_REPO_URL" "$WORKSPACE_DIR/development"
|
|
1056
|
+
|
|
1057
|
+
shift 2
|
|
1058
|
+
# And then additional dependencies
|
|
1059
|
+
for DEPENDENCY_URL in "$@"; do
|
|
1060
|
+
git_shallow_clone "$DEPENDENCY_URL" "$WORKSPACE_DIR/$(basename "$DEPENDENCY_URL" .git)"
|
|
1061
|
+
done
|
|
1062
|
+
}
|
|
1063
|
+
|
|
1064
|
+
# Setup a suitable workspace for the given app.
|
|
1065
|
+
# Expected args:
|
|
1066
|
+
# 1. the app repository dir
|
|
1067
|
+
# 2. the workspace dir
|
|
1068
|
+
# 3. the url to use to clone the corresponding 'development' repository
|
|
1069
|
+
# 4. the node version to use to setup the workspace (matter because we run kli using this node version)
|
|
1070
|
+
# 5. the directory in which we'll find kli files relative to the 'development' repository root directory
|
|
1071
|
+
# 6. the kind of kli we want to run (nokli, kli or klifull => cf. run_kli())
|
|
1072
|
+
# 7. (only in dev mode) the ref (ie. tag or branch) to checkout in the workspace
|
|
1073
|
+
setup_app_workspace() {
|
|
1074
|
+
ensure_yq
|
|
1075
|
+
|
|
1076
|
+
local REPO_DIR="$1"
|
|
1077
|
+
local WORKSPACE_DIR="$2"
|
|
1078
|
+
local DEVELOPMENT_REPO_URL="$3"
|
|
1079
|
+
local NODE_VER="$4"
|
|
1080
|
+
local KLI_BASE="$5"
|
|
1081
|
+
local KLI_RUN="${6:-klifull}"
|
|
1082
|
+
|
|
1083
|
+
# clone development in $WORKSPACE_DIR
|
|
1084
|
+
local DEVELOPMENT_DIR="$WORKSPACE_DIR/development"
|
|
1085
|
+
git_shallow_clone "$DEVELOPMENT_REPO_URL" "$DEVELOPMENT_DIR"
|
|
1086
|
+
|
|
1087
|
+
# fetch app name and ref (tag or branch) required
|
|
1088
|
+
local APP_NAME
|
|
1089
|
+
APP_NAME=$(yq --output-format=yaml '.name' "$REPO_DIR/package.json")
|
|
1090
|
+
local GIT_REF=""
|
|
1091
|
+
if [ "$CI" = true ]; then
|
|
1092
|
+
# fetch ref using git on local repo
|
|
1093
|
+
GIT_REF=$(get_git_tag "$REPO_DIR")
|
|
1094
|
+
if [ -z "$GIT_REF" ]; then
|
|
1095
|
+
GIT_REF=$(get_git_branch "$REPO_DIR")
|
|
1096
|
+
fi
|
|
1097
|
+
else
|
|
1098
|
+
# fetch ref from argument
|
|
1099
|
+
GIT_REF="$7"
|
|
1100
|
+
fi
|
|
1101
|
+
|
|
1102
|
+
if [ -z "$KLI_BASE" ]; then
|
|
1103
|
+
KLI_BASE="$DEVELOPMENT_DIR/$APP_NAME"
|
|
1104
|
+
else
|
|
1105
|
+
KLI_BASE="$DEVELOPMENT_DIR/$KLI_BASE/$APP_NAME"
|
|
1106
|
+
fi
|
|
1107
|
+
|
|
1108
|
+
# determine associated kli file
|
|
1109
|
+
local KLI_FILE
|
|
1110
|
+
local PROD_REGEX="^prod-v([0-9]+\.[0-9]+\.[0-9]+)$"
|
|
1111
|
+
local TEST_REGEX="^test-v([0-9]+\.[0-9]+)$"
|
|
1112
|
+
if [[ "$GIT_REF" =~ $PROD_REGEX ]]; then
|
|
1113
|
+
KLI_FILE="$KLI_BASE/prod/$APP_NAME-${BASH_REMATCH[1]}.js"
|
|
1114
|
+
elif [[ "$GIT_REF" =~ $TEST_REGEX ]]; then
|
|
1115
|
+
KLI_FILE="$KLI_BASE/test/$APP_NAME-${BASH_REMATCH[1]}.js"
|
|
1116
|
+
else
|
|
1117
|
+
KLI_FILE="$KLI_BASE/dev/$APP_NAME-$GIT_REF.js"
|
|
1118
|
+
if [ ! -f "$KLI_FILE" ]; then
|
|
1119
|
+
KLI_FILE="$KLI_BASE/dev/$APP_NAME.js"
|
|
1120
|
+
fi
|
|
1121
|
+
fi
|
|
1122
|
+
|
|
1123
|
+
# run kli !
|
|
1124
|
+
if [ "$KLI_RUN" = kli ] || [ "$KLI_RUN" = klifull ]; then
|
|
1125
|
+
echo "About to populate workspace using $KLI_FILE ..."
|
|
1126
|
+
# if [ "$CI" != true ]; then
|
|
1127
|
+
# unset KALISIO_DEVELOPMENT_DIR
|
|
1128
|
+
# fi
|
|
1129
|
+
run_kli "$WORKSPACE_DIR" "$NODE_VER" "$KLI_FILE" "$KLI_RUN"
|
|
1130
|
+
fi
|
|
1131
|
+
}
|
|
1132
|
+
|
|
1133
|
+
# Gather information about an app
|
|
1134
|
+
# Defines APP_INFOS variable as an array. This array contains the app name, the app version,
|
|
1135
|
+
# the flavor based on the git tag and branch ...
|
|
1136
|
+
# Arg1: the repository root
|
|
1137
|
+
# Arg2: the folder where to search for kli files
|
|
1138
|
+
# NOTE: the results should be extracted using get_app_xxx functions below.
|
|
1139
|
+
init_app_infos() {
|
|
1140
|
+
ensure_yq
|
|
1141
|
+
|
|
1142
|
+
local REPO_ROOT="$1"
|
|
1143
|
+
local KLI_BASE="$2"
|
|
1144
|
+
|
|
1145
|
+
local APP_NAME
|
|
1146
|
+
APP_NAME=$(yq --output-format=yaml '.name' "$REPO_ROOT/package.json")
|
|
1147
|
+
local APP_VERSION
|
|
1148
|
+
APP_VERSION=$(yq --output-format=yaml '.version' "$REPO_ROOT/package.json")
|
|
1149
|
+
|
|
1150
|
+
KLI_BASE="$KLI_BASE/$APP_NAME"
|
|
1151
|
+
|
|
1152
|
+
local GIT_TAG
|
|
1153
|
+
GIT_TAG=$(get_git_tag "$REPO_ROOT")
|
|
1154
|
+
local GIT_BRANCH
|
|
1155
|
+
GIT_BRANCH=$(get_git_branch "$REPO_ROOT")
|
|
1156
|
+
|
|
1157
|
+
local GIT_REF="${GIT_TAG:-$GIT_BRANCH}"
|
|
1158
|
+
|
|
1159
|
+
local APP_FLAVOR
|
|
1160
|
+
local KLI_FILE
|
|
1161
|
+
local PROD_REGEX="^prod-v([0-9]+\.[0-9]+\.[0-9]+)$"
|
|
1162
|
+
local TEST_REGEX="^test-v([0-9]+\.[0-9]+)$"
|
|
1163
|
+
if [[ "$GIT_REF" =~ $PROD_REGEX ]]; then
|
|
1164
|
+
APP_FLAVOR="prod"
|
|
1165
|
+
KLI_FILE="$KLI_BASE/$APP_FLAVOR/$APP_NAME-${BASH_REMATCH[1]}.js"
|
|
1166
|
+
elif [[ "$GIT_REF" =~ $TEST_REGEX ]]; then
|
|
1167
|
+
APP_FLAVOR="test"
|
|
1168
|
+
KLI_FILE="$KLI_BASE/$APP_FLAVOR/$APP_NAME-${BASH_REMATCH[1]}.js"
|
|
1169
|
+
else
|
|
1170
|
+
APP_FLAVOR="dev"
|
|
1171
|
+
KLI_FILE="$KLI_BASE/$APP_FLAVOR/$APP_NAME-$GIT_REF.js"
|
|
1172
|
+
if [ ! -f "$KLI_FILE" ]; then
|
|
1173
|
+
KLI_FILE="$KLI_BASE/$APP_FLAVOR/$APP_NAME.js"
|
|
1174
|
+
fi
|
|
1175
|
+
fi
|
|
1176
|
+
|
|
1177
|
+
APP_INFOS=("$APP_NAME" "$APP_VERSION" "$APP_FLAVOR" "$GIT_TAG" "$GIT_BRANCH" "$KLI_FILE")
|
|
1178
|
+
}
|
|
1179
|
+
|
|
1180
|
+
# Extract app name from app infos
|
|
1181
|
+
# NOTE: requires a call to init_app_infos first
|
|
1182
|
+
get_app_name() {
|
|
1183
|
+
echo "${APP_INFOS[0]}"
|
|
1184
|
+
}
|
|
1185
|
+
|
|
1186
|
+
# Extract app version from app infos
|
|
1187
|
+
# NOTE: requires a call to init_app_infos first
|
|
1188
|
+
get_app_version() {
|
|
1189
|
+
echo "${APP_INFOS[1]}"
|
|
1190
|
+
}
|
|
1191
|
+
|
|
1192
|
+
# Extract app flavor from app infos
|
|
1193
|
+
# NOTE: requires a call to init_app_infos first
|
|
1194
|
+
get_app_flavor() {
|
|
1195
|
+
echo "${APP_INFOS[2]}"
|
|
1196
|
+
}
|
|
1197
|
+
|
|
1198
|
+
# Extract app tag from app infos
|
|
1199
|
+
# NOTE: requires a call to init_app_infos first
|
|
1200
|
+
get_app_tag() {
|
|
1201
|
+
echo "${APP_INFOS[3]}"
|
|
1202
|
+
}
|
|
1203
|
+
|
|
1204
|
+
# Extract app branch from app infos
|
|
1205
|
+
# NOTE: requires a call to init_app_infos first
|
|
1206
|
+
get_app_branch() {
|
|
1207
|
+
echo "${APP_INFOS[4]}"
|
|
1208
|
+
}
|
|
1209
|
+
|
|
1210
|
+
# Extract app kli file from app infos
|
|
1211
|
+
# NOTE: requires a call to init_app_infos first
|
|
1212
|
+
get_app_kli_file() {
|
|
1213
|
+
echo "${APP_INFOS[5]}"
|
|
1214
|
+
}
|
|
1215
|
+
|
|
1216
|
+
# Run backend tests for the given app.
|
|
1217
|
+
# Expected arguments:
|
|
1218
|
+
# 1. the app repository directory
|
|
1219
|
+
# 2. the directory in which we'll find kli files relative to the 'development' repository root directory
|
|
1220
|
+
# 3. wether to publish code coverage results (boolean)
|
|
1221
|
+
# 4. the node version to use (16, 18, ...)
|
|
1222
|
+
# 5. the mongo version to use (5, 6, ...). Mongo will not be started if not provided
|
|
1223
|
+
run_app_tests() {
|
|
1224
|
+
local REPO_DIR="$1"
|
|
1225
|
+
local KLI_BASE="$2"
|
|
1226
|
+
local CODE_COVERAGE="$3"
|
|
1227
|
+
local NODE_VER="$4"
|
|
1228
|
+
local MONGO_VER="$5"
|
|
1229
|
+
local WORKSPACE_DIR
|
|
1230
|
+
WORKSPACE_DIR="$(dirname "$REPO_DIR")"
|
|
1231
|
+
|
|
1232
|
+
init_app_infos "$REPO_DIR" "$WORKSPACE_DIR/development/$KLI_BASE"
|
|
1233
|
+
|
|
1234
|
+
local APP
|
|
1235
|
+
APP=$(get_app_name)
|
|
1236
|
+
local VERSION
|
|
1237
|
+
VERSION=$(get_app_version)
|
|
1238
|
+
local FLAVOR
|
|
1239
|
+
FLAVOR=$(get_app_flavor)
|
|
1240
|
+
|
|
1241
|
+
echo "About to run tests for $APP v$VERSION-$FLAVOR ..."
|
|
1242
|
+
|
|
1243
|
+
## Start mongo
|
|
1244
|
+
##
|
|
1245
|
+
|
|
1246
|
+
if [ -n "$MONGO_VER" ]; then
|
|
1247
|
+
begin_group "Starting mongo $MONGO_VER ..."
|
|
1248
|
+
|
|
1249
|
+
use_mongo "$MONGO_VER"
|
|
1250
|
+
k-mongo
|
|
1251
|
+
|
|
1252
|
+
end_group "Starting mongo $MONGO_VER ..."
|
|
1253
|
+
fi
|
|
1254
|
+
|
|
1255
|
+
## Run tests
|
|
1256
|
+
##
|
|
1257
|
+
|
|
1258
|
+
use_node "$NODE_VER"
|
|
1259
|
+
yarn test:server
|
|
1260
|
+
|
|
1261
|
+
## Publish code coverage
|
|
1262
|
+
##
|
|
1263
|
+
|
|
1264
|
+
if [ "$CODE_COVERAGE" = true ]; then
|
|
1265
|
+
send_coverage_to_cc "$CC_TEST_REPORTER_ID"
|
|
1266
|
+
fi
|
|
1267
|
+
}
|
|
1268
|
+
|
|
1269
|
+
# Setup the workspace for a lib project.
|
|
1270
|
+
# A lib project has no kli file, but require a 'development' repo.
|
|
1271
|
+
# It can also depend on other repo but they must be specified as additional args
|
|
1272
|
+
# Expected args:
|
|
1273
|
+
# 1. the workspace directory
|
|
1274
|
+
# 2. the url of the 'development' repository
|
|
1275
|
+
# ... additional repo url to pull. Those additional repository will be cloned in the
|
|
1276
|
+
# workspace directory, using the basename of the repo url as repo directory.
|
|
1277
|
+
setup_lib_workspace() {
|
|
1278
|
+
setup_workspace $@
|
|
1279
|
+
}
|
|
1280
|
+
|
|
1281
|
+
# Gather information about a library
|
|
1282
|
+
# Defines LIB_INFOS variable as an array. This array contains the lib name & the lib version
|
|
1283
|
+
# Arg1: the repository root
|
|
1284
|
+
# NOTE: the results should be extracted using get_lib_xxx functions below.
|
|
1285
|
+
init_lib_infos() {
|
|
1286
|
+
ensure_yq
|
|
1287
|
+
|
|
1288
|
+
local REPO_ROOT="$1"
|
|
1289
|
+
local LIB_NAME
|
|
1290
|
+
LIB_NAME=$(yq --output-format=yaml '.name' "$REPO_ROOT/package.json")
|
|
1291
|
+
local LIB_VERSION
|
|
1292
|
+
LIB_VERSION=$(yq --output-format=yaml '.version' "$REPO_ROOT/package.json")
|
|
1293
|
+
|
|
1294
|
+
local GIT_TAG
|
|
1295
|
+
GIT_TAG=$(get_git_tag "$REPO_ROOT")
|
|
1296
|
+
local GIT_BRANCH
|
|
1297
|
+
GIT_BRANCH=$(get_git_branch "$REPO_ROOT")
|
|
1298
|
+
|
|
1299
|
+
LIB_INFOS=("$LIB_NAME" "$LIB_VERSION" "$GIT_TAG" "$GIT_BRANCH")
|
|
1300
|
+
}
|
|
1301
|
+
|
|
1302
|
+
# Extract lib name from lib infos
|
|
1303
|
+
# NOTE: requires a call to init_lib_infos first
|
|
1304
|
+
get_lib_name() {
|
|
1305
|
+
echo "${LIB_INFOS[0]}"
|
|
1306
|
+
}
|
|
1307
|
+
|
|
1308
|
+
# Extract lib version from lib infos
|
|
1309
|
+
# NOTE: requires a call to init_lib_infos first
|
|
1310
|
+
get_lib_version() {
|
|
1311
|
+
echo "${LIB_INFOS[1]}"
|
|
1312
|
+
}
|
|
1313
|
+
|
|
1314
|
+
# Extract lib tag from lib infos
|
|
1315
|
+
# NOTE: requires a call to init_lib_infos first
|
|
1316
|
+
get_lib_tag() {
|
|
1317
|
+
echo "${LIB_INFOS[2]}"
|
|
1318
|
+
}
|
|
1319
|
+
|
|
1320
|
+
# Extract lib branch from lib infos
|
|
1321
|
+
# NOTE: requires a call to init_lib_infos first
|
|
1322
|
+
get_lib_branch() {
|
|
1323
|
+
echo "${LIB_INFOS[3]}"
|
|
1324
|
+
}
|
|
1325
|
+
|
|
1326
|
+
# Run tests for a library module
|
|
1327
|
+
# Expected arguments
|
|
1328
|
+
# 1. Root directory
|
|
1329
|
+
# 2. true to publish code coverage to code climate (CC_TEST_REPORTER_ID env var should be defined in this case)
|
|
1330
|
+
# 3. node version to be used
|
|
1331
|
+
# 4. mongo version to be used if required by tests
|
|
1332
|
+
run_lib_tests () {
|
|
1333
|
+
local ROOT_DIR="$1"
|
|
1334
|
+
local CODE_COVERAGE="$2"
|
|
1335
|
+
local NODE_VER="$3"
|
|
1336
|
+
local MONGO_VER="$4"
|
|
1337
|
+
local WORKSPACE_DIR
|
|
1338
|
+
WORKSPACE_DIR="$(dirname "$ROOT_DIR")"
|
|
1339
|
+
|
|
1340
|
+
init_lib_infos "$ROOT_DIR"
|
|
1341
|
+
|
|
1342
|
+
local LIB
|
|
1343
|
+
LIB=$(get_lib_name)
|
|
1344
|
+
local VERSION
|
|
1345
|
+
VERSION=$(get_lib_version)
|
|
1346
|
+
local GIT_TAG
|
|
1347
|
+
GIT_TAG=$(get_lib_tag)
|
|
1348
|
+
|
|
1349
|
+
echo "About to run tests for $LIB v$VERSION..."
|
|
1350
|
+
|
|
1351
|
+
## Start mongo
|
|
1352
|
+
##
|
|
1353
|
+
|
|
1354
|
+
if [ -n "$MONGO_VER" ]; then
|
|
1355
|
+
begin_group "Starting mongo $MONGO_VER ..."
|
|
1356
|
+
|
|
1357
|
+
use_mongo "$MONGO_VER"
|
|
1358
|
+
k-mongo
|
|
1359
|
+
|
|
1360
|
+
end_group "Starting mongo $MONGO_VER ..."
|
|
1361
|
+
fi
|
|
1362
|
+
|
|
1363
|
+
## Run tests
|
|
1364
|
+
##
|
|
1365
|
+
|
|
1366
|
+
use_node "$NODE_VER"
|
|
1367
|
+
yarn && yarn test
|
|
1368
|
+
|
|
1369
|
+
## Publish code coverage
|
|
1370
|
+
##
|
|
1371
|
+
|
|
1372
|
+
if [ "$CODE_COVERAGE" = true ]; then
|
|
1373
|
+
send_coverage_to_cc "$CC_TEST_REPORTER_ID"
|
|
1374
|
+
fi
|
|
1375
|
+
}
|
|
1376
|
+
|
|
1377
|
+
# Setup the workspace for a krawler job project.
|
|
1378
|
+
# Cf. setup_lib_workspace
|
|
1379
|
+
setup_job_workspace() {
|
|
1380
|
+
setup_workspace $@
|
|
1381
|
+
}
|
|
1382
|
+
|
|
1383
|
+
# Gather information about a job
|
|
1384
|
+
# Defines JOB_INFOS variable as an array. This array contains the job name & the job version along with the krawler version
|
|
1385
|
+
# Arg1: the repository root
|
|
1386
|
+
# NOTE: the results should be extracted using get_job_xxx functions below.
|
|
1387
|
+
init_job_infos() {
|
|
1388
|
+
ensure_yq
|
|
1389
|
+
|
|
1390
|
+
local REPO_ROOT="$1"
|
|
1391
|
+
local JOB_NAME
|
|
1392
|
+
JOB_NAME=$(yq --output-format=yaml '.name' "$REPO_ROOT/package.json")
|
|
1393
|
+
local JOB_VERSION
|
|
1394
|
+
JOB_VERSION=$(yq --output-format=yaml '.version' "$REPO_ROOT/package.json")
|
|
1395
|
+
local KRAWLER_VERSION
|
|
1396
|
+
KRAWLER_VERSION=$(yq --output-format=yaml '.peerDependencies["@kalisio/krawler"]' "$REPO_ROOT/package.json")
|
|
1397
|
+
|
|
1398
|
+
local GIT_TAG
|
|
1399
|
+
GIT_TAG=$(get_git_tag "$REPO_ROOT")
|
|
1400
|
+
local GIT_BRANCH
|
|
1401
|
+
GIT_BRANCH=$(get_git_branch "$REPO_ROOT")
|
|
1402
|
+
|
|
1403
|
+
JOB_INFOS=("$JOB_NAME" "$JOB_VERSION" "$GIT_TAG" "$GIT_BRANCH" "$KRAWLER_VERSION")
|
|
1404
|
+
}
|
|
1405
|
+
|
|
1406
|
+
# Extract job name from job infos
|
|
1407
|
+
# NOTE: requires a call to init_job_infos first
|
|
1408
|
+
get_job_name() {
|
|
1409
|
+
echo "${JOB_INFOS[0]}"
|
|
1410
|
+
}
|
|
1411
|
+
|
|
1412
|
+
# Extract job version from job infos
|
|
1413
|
+
# NOTE: requires a call to init_job_infos first
|
|
1414
|
+
get_job_version() {
|
|
1415
|
+
echo "${JOB_INFOS[1]}"
|
|
1416
|
+
}
|
|
1417
|
+
|
|
1418
|
+
# Extract job tag from job infos
|
|
1419
|
+
# NOTE: requires a call to init_job_infos first
|
|
1420
|
+
get_job_tag() {
|
|
1421
|
+
echo "${JOB_INFOS[2]}"
|
|
1422
|
+
}
|
|
1423
|
+
|
|
1424
|
+
# Extract job branch from job infos
|
|
1425
|
+
# NOTE: requires a call to init_job_infos first
|
|
1426
|
+
get_job_branch() {
|
|
1427
|
+
echo "${JOB_INFOS[3]}"
|
|
1428
|
+
}
|
|
1429
|
+
|
|
1430
|
+
# Extract krawler version from job infos
|
|
1431
|
+
# NOTE: requires a call to init_job_infos first
|
|
1432
|
+
get_job_krawler_version() {
|
|
1433
|
+
echo "${JOB_INFOS[4]}"
|
|
1434
|
+
}
|
|
1435
|
+
|
|
1436
|
+
# Build a krawler job container.
|
|
1437
|
+
# Expected args
|
|
1438
|
+
# 1. the job repository directory
|
|
1439
|
+
# 2. the prefix to use before the image name (ie. kalisio, some_other_namespace, ...)
|
|
1440
|
+
# 3. the job variant to build (or empty if no variant)
|
|
1441
|
+
# 4. the registry url where to push the built container
|
|
1442
|
+
# 5. the registry username to use
|
|
1443
|
+
# 6. the file containing the registry password
|
|
1444
|
+
# 7. true to push the built container on the registry
|
|
1445
|
+
build_job() {
|
|
1446
|
+
local REPO_DIR="$1"
|
|
1447
|
+
local IMAGE_PREFIX="$2"
|
|
1448
|
+
local JOB_VARIANT="$3"
|
|
1449
|
+
local REGISTRY_URL="$4"
|
|
1450
|
+
local REGISTRY_USERNAME="$5"
|
|
1451
|
+
local REGISTRY_PASSWORD_FILE="$6"
|
|
1452
|
+
local PUBLISH="$7"
|
|
1453
|
+
|
|
1454
|
+
## Init workspace
|
|
1455
|
+
##
|
|
1456
|
+
|
|
1457
|
+
init_job_infos "$REPO_DIR"
|
|
1458
|
+
|
|
1459
|
+
local JOB
|
|
1460
|
+
JOB=$(get_job_name)
|
|
1461
|
+
local VERSION
|
|
1462
|
+
VERSION=$(get_job_version)
|
|
1463
|
+
local KRAWLER_VERSION
|
|
1464
|
+
KRAWLER_VERSION=$(get_job_krawler_version)
|
|
1465
|
+
local GIT_TAG
|
|
1466
|
+
GIT_TAG=$(get_job_tag)
|
|
1467
|
+
|
|
1468
|
+
if [ -z "$GIT_TAG" ]; then
|
|
1469
|
+
echo "About to build $JOB development version based on krawler development version..."
|
|
1470
|
+
else
|
|
1471
|
+
echo "About to build $JOB v$VERSION based on krawler $KRAWLER_VERSION..."
|
|
1472
|
+
fi
|
|
1473
|
+
|
|
1474
|
+
## Build container
|
|
1475
|
+
##
|
|
1476
|
+
|
|
1477
|
+
local DOCKERFILE="dockerfile"
|
|
1478
|
+
local IMAGE_NAME="$REGISTRY_URL/$IMAGE_PREFIX/$JOB"
|
|
1479
|
+
local IMAGE_TAG="latest"
|
|
1480
|
+
local KRAWLER_TAG="latest"
|
|
1481
|
+
|
|
1482
|
+
# If building from a tag, make a tagged image and use specified krawler
|
|
1483
|
+
if [ -n "$GIT_TAG" ]; then
|
|
1484
|
+
IMAGE_TAG="$VERSION"
|
|
1485
|
+
KRAWLER_TAG="$KRAWLER_VERSION"
|
|
1486
|
+
fi
|
|
1487
|
+
|
|
1488
|
+
# In case of job variant, update image name and source dockerfile
|
|
1489
|
+
if [ -n "$JOB_VARIANT" ]; then
|
|
1490
|
+
IMAGE_TAG="$JOB_VARIANT-$IMAGE_TAG"
|
|
1491
|
+
DOCKERFILE="$DOCKERFILE.$JOB_VARIANT"
|
|
1492
|
+
fi
|
|
1493
|
+
|
|
1494
|
+
begin_group "Building $IMAGE_NAME:$IMAGE_TAG ..."
|
|
1495
|
+
|
|
1496
|
+
docker login --username "$REGISTRY_USERNAME" --password-stdin "$REGISTRY_URL" < "$REGISTRY_PASSWORD_FILE"
|
|
1497
|
+
# DOCKER_BUILDKIT is here to be able to use Dockerfile specific dockerginore (job.Dockerfile.dockerignore)
|
|
1498
|
+
DOCKER_BUILDKIT=1 docker build \
|
|
1499
|
+
--build-arg KRAWLER_TAG="$KRAWLER_TAG" \
|
|
1500
|
+
-f "$DOCKERFILE" \
|
|
1501
|
+
-t "$IMAGE_NAME:$IMAGE_TAG" \
|
|
1502
|
+
"$REPO_DIR"
|
|
1503
|
+
|
|
1504
|
+
if [ "$PUBLISH" = true ]; then
|
|
1505
|
+
docker push "$IMAGE_NAME:$IMAGE_TAG"
|
|
1506
|
+
fi
|
|
1507
|
+
|
|
1508
|
+
docker logout
|
|
1509
|
+
|
|
1510
|
+
end_group "Building $IMAGE_NAME:$IMAGE_TAG ..."
|
|
1511
|
+
}
|
|
1512
|
+
|
|
1513
|
+
# Build vitepress docs and possibly publish it on github pages
|
|
1514
|
+
# Expected arguments
|
|
1515
|
+
# 1. Root directory
|
|
1516
|
+
# 2. Repository name like kalisio/krawler
|
|
1517
|
+
# 3. true to publish result on github pages
|
|
1518
|
+
build_docs () {
|
|
1519
|
+
local ROOT_DIR="$1"
|
|
1520
|
+
local REPOSITORY="$2"
|
|
1521
|
+
local PUBLISH="$3"
|
|
1522
|
+
local WORKSPACE_DIR
|
|
1523
|
+
WORKSPACE_DIR="$(dirname "$ROOT_DIR")"
|
|
1524
|
+
local ORGANISATION="$(dirname $REPOSITORY)"
|
|
1525
|
+
|
|
1526
|
+
begin_group "Building docs for $REPOSITORY ..."
|
|
1527
|
+
|
|
1528
|
+
# Build process requires node 18
|
|
1529
|
+
use_node 18
|
|
1530
|
+
|
|
1531
|
+
rm -f .postcssrc.js && cd docs && yarn install && yarn build
|
|
1532
|
+
|
|
1533
|
+
if [ "$PUBLISH" = true ]; then
|
|
1534
|
+
# Extract organisation from token and get load corresponding env (filename is uppercase)
|
|
1535
|
+
load_env_files "$WORKSPACE_DIR/development/common/${ORGANISATION^^}_GH_PAGES_PUSH_TOKEN.enc.env"
|
|
1536
|
+
|
|
1537
|
+
local COMMIT_SHA
|
|
1538
|
+
COMMIT_SHA=$(get_git_commit_sha "$ROOT_DIR")
|
|
1539
|
+
local COMMIT_AUTHOR_NAME
|
|
1540
|
+
COMMIT_AUTHOR_NAME=$(get_git_commit_author_name "$ROOT_DIR")
|
|
1541
|
+
local COMMIT_AUTHOR_EMAIL
|
|
1542
|
+
COMMIT_AUTHOR_EMAIL=$(get_git_commit_author_email "$ROOT_DIR")
|
|
1543
|
+
deploy_gh_pages \
|
|
1544
|
+
"https://oauth2:$GH_PAGES_PUSH_TOKEN@github.com/$REPOSITORY.git" \
|
|
1545
|
+
"$ROOT_DIR/docs/.vitepress/dist" \
|
|
1546
|
+
"$COMMIT_AUTHOR_NAME" \
|
|
1547
|
+
"$COMMIT_AUTHOR_EMAIL" \
|
|
1548
|
+
"Docs built from $COMMIT_SHA"
|
|
1549
|
+
fi
|
|
1550
|
+
|
|
1551
|
+
end_group "Building docs for $REPOSITORY ..."
|
|
1552
|
+
}
|
|
1553
|
+
|
|
1554
|
+
# Build e2e tests
|
|
1555
|
+
# Expected arguments
|
|
1556
|
+
# 1. Root directory
|
|
1557
|
+
# 2: true to publish result on harbor
|
|
1558
|
+
build_e2e_tests () {
|
|
1559
|
+
local ROOT_DIR="$1"
|
|
1560
|
+
local PUBLISH="$2"
|
|
1561
|
+
|
|
1562
|
+
## Init workspace
|
|
1563
|
+
##
|
|
1564
|
+
|
|
1565
|
+
local WORKSPACE_DIR
|
|
1566
|
+
WORKSPACE_DIR="$(dirname "$ROOT_DIR")"
|
|
1567
|
+
init_app_infos "$ROOT_DIR" "$WORKSPACE_DIR/development/workspaces/apps"
|
|
1568
|
+
|
|
1569
|
+
local APP
|
|
1570
|
+
APP=$(get_app_name)
|
|
1571
|
+
local VERSION
|
|
1572
|
+
VERSION=$(get_app_version)
|
|
1573
|
+
local FLAVOR
|
|
1574
|
+
FLAVOR=$(get_app_flavor)
|
|
1575
|
+
|
|
1576
|
+
echo "About to build ${APP} v${VERSION}-$FLAVOR ..."
|
|
1577
|
+
|
|
1578
|
+
load_env_files "$WORKSPACE_DIR/development/common/kalisio_harbor.enc.env"
|
|
1579
|
+
load_value_files "$WORKSPACE_DIR/development/common/KALISIO_HARBOR_PASSWORD.enc.value"
|
|
1580
|
+
|
|
1581
|
+
## Build container
|
|
1582
|
+
##
|
|
1583
|
+
|
|
1584
|
+
local IMAGE_NAME="$KALISIO_HARBOR_URL/kalisio/$APP-e2e-tests"
|
|
1585
|
+
local IMAGE_TAG="$VERSION-$FLAVOR"
|
|
1586
|
+
|
|
1587
|
+
begin_group "Building container ..."
|
|
1588
|
+
|
|
1589
|
+
docker login --username "$KALISIO_HARBOR_USERNAME" --password-stdin "$KALISIO_HARBOR_URL" < "$KALISIO_HARBOR_PASSWORD"
|
|
1590
|
+
# DOCKER_BUILDKIT is here to be able to use Dockerfile specific dockerginore (e2e-tests.Dockerfile.dockerignore)
|
|
1591
|
+
DOCKER_BUILDKIT=1 docker build \
|
|
1592
|
+
--build-arg APP="$APP" \
|
|
1593
|
+
--build-arg NODE_APP_INSTANCE="$FLAVOR" \
|
|
1594
|
+
--build-arg SUBDOMAIN="$FLAVOR.kalisio.xyz" \
|
|
1595
|
+
--build-arg HEADLESS=true \
|
|
1596
|
+
-f "$ROOT_DIR/e2e-tests.Dockerfile" \
|
|
1597
|
+
-t "$IMAGE_NAME:$IMAGE_TAG" \
|
|
1598
|
+
"$WORKSPACE_DIR"
|
|
1599
|
+
docker tag "$IMAGE_NAME:$IMAGE_TAG" "$IMAGE_NAME:$FLAVOR"
|
|
1600
|
+
|
|
1601
|
+
if [ "$PUBLISH" = true ]; then
|
|
1602
|
+
docker push "$IMAGE_NAME:$IMAGE_TAG"
|
|
1603
|
+
docker push "$IMAGE_NAME:$FLAVOR"
|
|
1604
|
+
fi
|
|
1605
|
+
|
|
1606
|
+
docker logout "$KALISIO_HARBOR_URL"
|
|
1607
|
+
|
|
1608
|
+
end_group "Building container ..."
|
|
1609
|
+
}
|
|
1610
|
+
|
|
1611
|
+
# Run e2e tests
|
|
1612
|
+
# Specific error handling: set -uo pipefail to bypass errors
|
|
1613
|
+
# Expected arguments
|
|
1614
|
+
# 1. Root directory
|
|
1615
|
+
# 2: the app name
|
|
1616
|
+
# 3: the slack webhook apps
|
|
1617
|
+
run_e2e_tests () {
|
|
1618
|
+
local ROOT_DIR="$1"
|
|
1619
|
+
local APP="$2"
|
|
1620
|
+
local SLACK_WEBHOOK_APPS="$3"
|
|
1621
|
+
|
|
1622
|
+
## Run tests & redirect output to a log file
|
|
1623
|
+
##
|
|
1624
|
+
|
|
1625
|
+
# Chrome
|
|
1626
|
+
mkdir -p "$ROOT_DIR/test/run/chrome"
|
|
1627
|
+
yarn test:client > "$ROOT_DIR/test/run/chrome/chrome_logs.txt" 2>&1
|
|
1628
|
+
local RET_CODE=$?
|
|
1629
|
+
|
|
1630
|
+
# Firefox
|
|
1631
|
+
# PUPPETEER_PRODUCT=firefox yarn add puppeteer
|
|
1632
|
+
# yarn link "@kalisio/kdk" --link-folder /opt/kalisio/yarn-links
|
|
1633
|
+
# export BROWSER="firefox"bucket
|
|
1634
|
+
# mkdir -p "$ROOT_DIR/test/run/firefox"
|
|
1635
|
+
# yarn test:client > "$ROOT_DIR/test/run/chrome/firefox_logs.txt" 2>&1
|
|
1636
|
+
|
|
1637
|
+
## Upload logs & screenshots to S3
|
|
1638
|
+
##
|
|
1639
|
+
|
|
1640
|
+
local CURRENT_DATE
|
|
1641
|
+
CURRENT_DATE=$(date +"%d-%m-%Y")
|
|
1642
|
+
local CHROME_LOGS_LINK=""
|
|
1643
|
+
local SCREEN_LINK=""
|
|
1644
|
+
|
|
1645
|
+
zip -r "$TMP_DIR/screenshots.zip" "$ROOT_DIR/test/run"
|
|
1646
|
+
|
|
1647
|
+
rclone copy "$ROOT_DIR/test/run/chrome/chrome_logs.txt" "ovh-s3:/dev/e2e-tests/$APP/$CURRENT_DATE"
|
|
1648
|
+
CHROME_LOGS_LINK=$(rclone link "ovh-s3:/dev/e2e-tests/$APP/$CURRENT_DATE/chrome_logs.txt")
|
|
1649
|
+
|
|
1650
|
+
rclone copy "$TMP_DIR/screenshots.zip" "ovh-s3:/dev/e2e-tests/$APP/$CURRENT_DATE"
|
|
1651
|
+
SCREEN_LINK=$(rclone link "ovh-s3:/dev/e2e-tests/$APP/$CURRENT_DATE/screenshots.zip")
|
|
1652
|
+
|
|
1653
|
+
## Report outcome to slack
|
|
1654
|
+
##
|
|
1655
|
+
|
|
1656
|
+
slack_e2e_report "$APP" "$RET_CODE" "$SLACK_WEBHOOK_APPS" "$CHROME_LOGS_LINK" "$SCREEN_LINK"
|
|
1657
|
+
}
|