qontract-reconcile 0.10.2.dev345__py3-none-any.whl → 0.10.2.dev408__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {qontract_reconcile-0.10.2.dev345.dist-info → qontract_reconcile-0.10.2.dev408.dist-info}/METADATA +11 -10
- {qontract_reconcile-0.10.2.dev345.dist-info → qontract_reconcile-0.10.2.dev408.dist-info}/RECORD +126 -120
- reconcile/aus/base.py +17 -14
- reconcile/automated_actions/config/integration.py +12 -0
- reconcile/aws_account_manager/integration.py +2 -2
- reconcile/aws_ami_cleanup/integration.py +6 -7
- reconcile/aws_ami_share.py +69 -62
- reconcile/aws_cloudwatch_log_retention/integration.py +155 -126
- reconcile/aws_ecr_image_pull_secrets.py +2 -2
- reconcile/aws_iam_keys.py +1 -0
- reconcile/aws_saml_idp/integration.py +7 -1
- reconcile/aws_saml_roles/integration.py +9 -3
- reconcile/change_owners/change_owners.py +1 -1
- reconcile/change_owners/diff.py +2 -4
- reconcile/checkpoint.py +11 -3
- reconcile/cli.py +33 -8
- reconcile/dashdotdb_dora.py +4 -11
- reconcile/database_access_manager.py +118 -111
- reconcile/endpoints_discovery/integration.py +4 -1
- reconcile/endpoints_discovery/merge_request_manager.py +9 -11
- reconcile/external_resources/factories.py +5 -12
- reconcile/external_resources/integration.py +1 -1
- reconcile/external_resources/manager.py +5 -3
- reconcile/external_resources/meta.py +0 -1
- reconcile/external_resources/model.py +10 -10
- reconcile/external_resources/reconciler.py +5 -2
- reconcile/external_resources/secrets_sync.py +4 -6
- reconcile/external_resources/state.py +5 -4
- reconcile/gabi_authorized_users.py +8 -5
- reconcile/gitlab_housekeeping.py +13 -15
- reconcile/gitlab_mr_sqs_consumer.py +2 -2
- reconcile/gitlab_owners.py +15 -11
- reconcile/gql_definitions/automated_actions/instance.py +41 -2
- reconcile/gql_definitions/aws_ami_cleanup/aws_accounts.py +10 -0
- reconcile/gql_definitions/aws_cloudwatch_log_retention/aws_accounts.py +22 -61
- reconcile/gql_definitions/aws_saml_idp/aws_accounts.py +10 -0
- reconcile/gql_definitions/aws_saml_roles/aws_accounts.py +10 -0
- reconcile/gql_definitions/common/aws_vpc_requests.py +10 -0
- reconcile/gql_definitions/common/clusters.py +2 -0
- reconcile/gql_definitions/external_resources/external_resources_namespaces.py +84 -1
- reconcile/gql_definitions/external_resources/external_resources_settings.py +2 -0
- reconcile/gql_definitions/fragments/aws_account_common.py +2 -0
- reconcile/gql_definitions/fragments/aws_organization.py +33 -0
- reconcile/gql_definitions/fragments/aws_vpc_request.py +2 -0
- reconcile/gql_definitions/introspection.json +3474 -1986
- reconcile/gql_definitions/jira_permissions_validator/jira_boards_for_permissions_validator.py +4 -0
- reconcile/gql_definitions/terraform_init/aws_accounts.py +14 -0
- reconcile/gql_definitions/terraform_resources/terraform_resources_namespaces.py +33 -1
- reconcile/gql_definitions/terraform_tgw_attachments/aws_accounts.py +10 -0
- reconcile/jenkins_worker_fleets.py +1 -0
- reconcile/jira_permissions_validator.py +236 -121
- reconcile/ocm/types.py +6 -0
- reconcile/openshift_base.py +47 -1
- reconcile/openshift_cluster_bots.py +2 -1
- reconcile/openshift_resources_base.py +6 -2
- reconcile/openshift_saas_deploy.py +2 -2
- reconcile/openshift_saas_deploy_trigger_cleaner.py +3 -5
- reconcile/openshift_upgrade_watcher.py +3 -3
- reconcile/queries.py +131 -0
- reconcile/saas_auto_promotions_manager/subscriber.py +4 -3
- reconcile/slack_usergroups.py +4 -3
- reconcile/sql_query.py +1 -0
- reconcile/statuspage/integrations/maintenances.py +4 -3
- reconcile/statuspage/status.py +5 -8
- reconcile/templates/rosa-classic-cluster-creation.sh.j2 +4 -0
- reconcile/templates/rosa-hcp-cluster-creation.sh.j2 +3 -0
- reconcile/templating/renderer.py +2 -1
- reconcile/terraform_aws_route53.py +7 -1
- reconcile/terraform_init/integration.py +185 -21
- reconcile/terraform_resources.py +11 -1
- reconcile/terraform_tgw_attachments.py +7 -1
- reconcile/terraform_users.py +7 -0
- reconcile/terraform_vpc_peerings.py +14 -3
- reconcile/terraform_vpc_resources/integration.py +7 -0
- reconcile/typed_queries/aws_account_tags.py +41 -0
- reconcile/typed_queries/saas_files.py +2 -2
- reconcile/utils/aggregated_list.py +4 -3
- reconcile/utils/aws_api.py +51 -20
- reconcile/utils/aws_api_typed/api.py +38 -9
- reconcile/utils/aws_api_typed/cloudformation.py +149 -0
- reconcile/utils/aws_api_typed/logs.py +73 -0
- reconcile/utils/datetime_util.py +67 -0
- reconcile/utils/differ.py +2 -3
- reconcile/utils/early_exit_cache.py +3 -2
- reconcile/utils/expiration.py +7 -3
- reconcile/utils/external_resource_spec.py +24 -1
- reconcile/utils/filtering.py +1 -1
- reconcile/utils/helm.py +2 -1
- reconcile/utils/helpers.py +1 -1
- reconcile/utils/jinja2/utils.py +4 -96
- reconcile/utils/jira_client.py +82 -63
- reconcile/utils/jjb_client.py +9 -12
- reconcile/utils/jobcontroller/controller.py +1 -1
- reconcile/utils/jobcontroller/models.py +17 -1
- reconcile/utils/json.py +32 -0
- reconcile/utils/merge_request_manager/merge_request_manager.py +3 -3
- reconcile/utils/merge_request_manager/parser.py +2 -2
- reconcile/utils/mr/app_interface_reporter.py +2 -2
- reconcile/utils/mr/base.py +2 -2
- reconcile/utils/mr/notificator.py +2 -2
- reconcile/utils/mr/update_access_report_base.py +3 -4
- reconcile/utils/oc.py +113 -95
- reconcile/utils/oc_filters.py +3 -3
- reconcile/utils/ocm/products.py +6 -0
- reconcile/utils/ocm/search_filters.py +3 -6
- reconcile/utils/ocm/service_log.py +3 -5
- reconcile/utils/openshift_resource.py +10 -5
- reconcile/utils/output.py +3 -2
- reconcile/utils/pagerduty_api.py +5 -5
- reconcile/utils/runtime/integration.py +1 -2
- reconcile/utils/runtime/runner.py +2 -2
- reconcile/utils/saasherder/models.py +2 -1
- reconcile/utils/saasherder/saasherder.py +9 -7
- reconcile/utils/slack_api.py +24 -2
- reconcile/utils/sloth.py +171 -2
- reconcile/utils/sqs_gateway.py +2 -1
- reconcile/utils/state.py +2 -1
- reconcile/utils/terraform_client.py +4 -3
- reconcile/utils/terrascript_aws_client.py +165 -111
- reconcile/utils/vault.py +1 -1
- reconcile/vault_replication.py +107 -42
- tools/app_interface_reporter.py +4 -4
- tools/cli_commands/systems_and_tools.py +5 -1
- tools/qontract_cli.py +25 -13
- {qontract_reconcile-0.10.2.dev345.dist-info → qontract_reconcile-0.10.2.dev408.dist-info}/WHEEL +0 -0
- {qontract_reconcile-0.10.2.dev345.dist-info → qontract_reconcile-0.10.2.dev408.dist-info}/entry_points.txt +0 -0
reconcile/vault_replication.py
CHANGED
|
@@ -84,6 +84,54 @@ def deep_copy_versions(
|
|
|
84
84
|
dest_vault.write(secret=write_dict, decode_base64=False, force=True)
|
|
85
85
|
|
|
86
86
|
|
|
87
|
+
def _handle_missing_destination_secret(
|
|
88
|
+
dry_run: bool,
|
|
89
|
+
source_vault: VaultClient,
|
|
90
|
+
dest_vault: VaultClient,
|
|
91
|
+
source_data: dict,
|
|
92
|
+
source_version: int | None,
|
|
93
|
+
path: str,
|
|
94
|
+
) -> None:
|
|
95
|
+
"""Handles replication when destination secret is missing or has no accessible versions.
|
|
96
|
+
|
|
97
|
+
This covers two scenarios:
|
|
98
|
+
1. Secret doesn't exist at all in destination vault (SecretNotFoundError)
|
|
99
|
+
2. Secret exists but all versions are deleted in KV v2 (SecretVersionNotFoundError)
|
|
100
|
+
|
|
101
|
+
For both cases, we replicate from source starting from version 0 (or copy directly for v1).
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
dry_run: Whether this is a dry run
|
|
105
|
+
source_vault: Source vault client (needed for v2 deep copy)
|
|
106
|
+
dest_vault: Destination vault client
|
|
107
|
+
source_data: Already retrieved source secret data
|
|
108
|
+
source_version: Source secret version (None for v1 secrets)
|
|
109
|
+
path: Secret path
|
|
110
|
+
"""
|
|
111
|
+
if source_version is None:
|
|
112
|
+
# v1 secret - just copy it over using the already-retrieved source data
|
|
113
|
+
logging.info(["replicate_vault_secret", "Copying v1 secret", path])
|
|
114
|
+
if not dry_run:
|
|
115
|
+
write_dict = {"path": path, "data": source_data}
|
|
116
|
+
dest_vault.write(secret=write_dict, decode_base64=False, force=True)
|
|
117
|
+
else:
|
|
118
|
+
# v2 secret - deep copy all versions starting from 0
|
|
119
|
+
# Note: deep_copy_versions will read individual versions from source as needed
|
|
120
|
+
logging.info([
|
|
121
|
+
"replicate_vault_secret",
|
|
122
|
+
"Deep copying v2 secret versions",
|
|
123
|
+
path,
|
|
124
|
+
])
|
|
125
|
+
deep_copy_versions(
|
|
126
|
+
dry_run=dry_run,
|
|
127
|
+
source_vault=source_vault,
|
|
128
|
+
dest_vault=dest_vault,
|
|
129
|
+
current_dest_version=0,
|
|
130
|
+
current_source_version=source_version,
|
|
131
|
+
path=path,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
|
|
87
135
|
def write_dummy_versions(
|
|
88
136
|
dry_run: bool,
|
|
89
137
|
dest_vault: VaultClient,
|
|
@@ -133,48 +181,65 @@ def copy_vault_secret(
|
|
|
133
181
|
|
|
134
182
|
try:
|
|
135
183
|
dest_data, dest_version = dest_vault.read_all_with_version(secret_dict)
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
184
|
+
except SecretVersionNotFoundError:
|
|
185
|
+
# Handle KV v2 case where secret metadata exists but latest version is deleted
|
|
186
|
+
# This occurs when someone manually deletes the latest version but the secret
|
|
187
|
+
# metadata still exists in Vault. This should only happen for v2 secrets.
|
|
188
|
+
logging.info([
|
|
189
|
+
"replicate_vault_secret",
|
|
190
|
+
"KV v2 latest version deleted, replicating all versions",
|
|
191
|
+
path,
|
|
192
|
+
])
|
|
193
|
+
_handle_missing_destination_secret(
|
|
194
|
+
dry_run=dry_run,
|
|
195
|
+
source_vault=source_vault,
|
|
196
|
+
dest_vault=dest_vault,
|
|
197
|
+
source_data=source_data,
|
|
198
|
+
source_version=version,
|
|
199
|
+
path=path,
|
|
200
|
+
)
|
|
201
|
+
return
|
|
202
|
+
except SecretNotFoundError:
|
|
203
|
+
# Handle case where secret doesn't exist at all in destination vault
|
|
204
|
+
logging.info([
|
|
205
|
+
"replicate_vault_secret",
|
|
206
|
+
"Secret not found in destination",
|
|
207
|
+
path,
|
|
208
|
+
])
|
|
209
|
+
_handle_missing_destination_secret(
|
|
210
|
+
dry_run=dry_run,
|
|
211
|
+
source_vault=source_vault,
|
|
212
|
+
dest_vault=dest_vault,
|
|
213
|
+
source_data=source_data,
|
|
214
|
+
source_version=version,
|
|
215
|
+
path=path,
|
|
216
|
+
)
|
|
217
|
+
return
|
|
218
|
+
|
|
219
|
+
# If we reach here, we successfully read the destination secret
|
|
220
|
+
if dest_version is None and version is None:
|
|
221
|
+
# v1 secrets don't have version
|
|
222
|
+
if source_data == dest_data:
|
|
223
|
+
# If the secret is the same in both vaults, we don't need
|
|
224
|
+
# to copy it again
|
|
225
|
+
return
|
|
226
|
+
|
|
227
|
+
write_dict = {"path": path, "data": source_data}
|
|
228
|
+
logging.info(["replicate_vault_secret", path])
|
|
229
|
+
if not dry_run:
|
|
230
|
+
# Using force=True to write the secret to force the vault client even
|
|
231
|
+
# if the data is the same as the previous version. This happens in
|
|
232
|
+
# some secrets even tho the library does not create it
|
|
233
|
+
dest_vault.write(secret=write_dict, decode_base64=False, force=True)
|
|
234
|
+
elif dest_version < version:
|
|
235
|
+
deep_copy_versions(
|
|
236
|
+
dry_run=dry_run,
|
|
237
|
+
source_vault=source_vault,
|
|
238
|
+
dest_vault=dest_vault,
|
|
239
|
+
current_dest_version=dest_version,
|
|
240
|
+
current_source_version=version,
|
|
241
|
+
path=path,
|
|
242
|
+
)
|
|
178
243
|
|
|
179
244
|
|
|
180
245
|
def check_invalid_paths(
|
tools/app_interface_reporter.py
CHANGED
|
@@ -4,7 +4,6 @@ import os
|
|
|
4
4
|
import textwrap
|
|
5
5
|
from collections.abc import Mapping, MutableMapping
|
|
6
6
|
from datetime import (
|
|
7
|
-
UTC,
|
|
8
7
|
datetime,
|
|
9
8
|
)
|
|
10
9
|
|
|
@@ -29,6 +28,7 @@ from reconcile.cli import (
|
|
|
29
28
|
)
|
|
30
29
|
from reconcile.jenkins_job_builder import init_jjb
|
|
31
30
|
from reconcile.utils.constants import DEFAULT_THREAD_POOL_SIZE
|
|
31
|
+
from reconcile.utils.datetime_util import ensure_utc, utc_now
|
|
32
32
|
from reconcile.utils.mr import CreateAppInterfaceReporter
|
|
33
33
|
from reconcile.utils.runtime.environment import init_env
|
|
34
34
|
from reconcile.utils.secret_reader import SecretReader
|
|
@@ -189,8 +189,8 @@ def get_apps_data(
|
|
|
189
189
|
apps = queries.get_apps()
|
|
190
190
|
jjb = init_jjb(secret_reader)
|
|
191
191
|
jenkins_map = jenkins_base.get_jenkins_map()
|
|
192
|
-
time_limit = date - relativedelta(months=month_delta)
|
|
193
|
-
timestamp_limit = int(time_limit.
|
|
192
|
+
time_limit = ensure_utc(date) - relativedelta(months=month_delta)
|
|
193
|
+
timestamp_limit = int(time_limit.timestamp())
|
|
194
194
|
|
|
195
195
|
secret_content = secret_reader.read_all({"path": DASHDOTDB_SECRET})
|
|
196
196
|
dashdotdb_url = secret_content["url"]
|
|
@@ -411,7 +411,7 @@ def main(
|
|
|
411
411
|
) -> None:
|
|
412
412
|
init_env(log_level=log_level, config_file=configfile)
|
|
413
413
|
|
|
414
|
-
now =
|
|
414
|
+
now = utc_now()
|
|
415
415
|
apps = get_apps_data(now, thread_pool_size=thread_pool_size)
|
|
416
416
|
|
|
417
417
|
reports = [Report(app, now).to_message() for app in apps]
|
|
@@ -132,6 +132,7 @@ from reconcile.typed_queries.vault import get_vault_instances
|
|
|
132
132
|
from reconcile.utils import (
|
|
133
133
|
gql,
|
|
134
134
|
)
|
|
135
|
+
from reconcile.utils.slack_api import is_gov_slack_workspace
|
|
135
136
|
|
|
136
137
|
|
|
137
138
|
class SystemTool(BaseModel):
|
|
@@ -322,11 +323,14 @@ class SystemTool(BaseModel):
|
|
|
322
323
|
|
|
323
324
|
@classmethod
|
|
324
325
|
def init_from_slack_workspace(cls, s: SlackWorkspaceV1, enumeration: Any) -> Self:
|
|
326
|
+
# Automatically determine the correct Slack domain based on GOV_SLACK environment variable
|
|
327
|
+
domain = "slack-gov.com" if is_gov_slack_workspace() else "slack.com"
|
|
328
|
+
|
|
325
329
|
return cls(
|
|
326
330
|
system_type="slack",
|
|
327
331
|
system_id=s.name,
|
|
328
332
|
name=s.name,
|
|
329
|
-
url=f"https://{s.name}.
|
|
333
|
+
url=f"https://{s.name}.{domain}",
|
|
330
334
|
description=s.description,
|
|
331
335
|
enumeration=enumeration,
|
|
332
336
|
)
|
tools/qontract_cli.py
CHANGED
|
@@ -13,7 +13,6 @@ import tempfile
|
|
|
13
13
|
import textwrap
|
|
14
14
|
from collections import defaultdict
|
|
15
15
|
from datetime import (
|
|
16
|
-
UTC,
|
|
17
16
|
datetime,
|
|
18
17
|
timedelta,
|
|
19
18
|
)
|
|
@@ -122,6 +121,7 @@ from reconcile.utils.binary import (
|
|
|
122
121
|
binary,
|
|
123
122
|
binary_version,
|
|
124
123
|
)
|
|
124
|
+
from reconcile.utils.datetime_util import from_utc_iso_format, utc_now
|
|
125
125
|
from reconcile.utils.early_exit_cache import (
|
|
126
126
|
CacheKey,
|
|
127
127
|
CacheKeyWithDigest,
|
|
@@ -417,8 +417,8 @@ def get_upgrade_policies_data(
|
|
|
417
417
|
upgrade_next_run = None
|
|
418
418
|
upgrade_emoji = "💫"
|
|
419
419
|
if upgrade_next_run:
|
|
420
|
-
dt =
|
|
421
|
-
now =
|
|
420
|
+
dt = from_utc_iso_format(upgrade_next_run)
|
|
421
|
+
now = utc_now()
|
|
422
422
|
if dt > now:
|
|
423
423
|
upgrade_emoji = "⏰"
|
|
424
424
|
hours_ago = (now - dt).total_seconds() / 3600
|
|
@@ -841,7 +841,7 @@ def alert_report(
|
|
|
841
841
|
)
|
|
842
842
|
sys.exit(1)
|
|
843
843
|
|
|
844
|
-
now =
|
|
844
|
+
now = utc_now()
|
|
845
845
|
from_timestamp = int((now - timedelta(days=days)).timestamp())
|
|
846
846
|
to_timestamp = int(now.timestamp())
|
|
847
847
|
|
|
@@ -887,7 +887,9 @@ def alert_report(
|
|
|
887
887
|
"Triggered": str(data.triggered_alerts),
|
|
888
888
|
"Resolved": str(data.resolved_alerts),
|
|
889
889
|
"Median time to resolve (h:mm:ss)": median_elapsed,
|
|
890
|
-
"Response Rate": f"{data.responsed_alerts / data.triggered_alerts * 100:.2f}%"
|
|
890
|
+
"Response Rate": f"{data.responsed_alerts / data.triggered_alerts * 100:.2f}%"
|
|
891
|
+
if data.triggered_alerts != 0
|
|
892
|
+
else "0.00%",
|
|
891
893
|
})
|
|
892
894
|
|
|
893
895
|
# TODO(mafriedm, rporres): Fix this
|
|
@@ -2272,7 +2274,7 @@ def app_interface_merge_queue(ctx: click.Context) -> None:
|
|
|
2272
2274
|
"labels",
|
|
2273
2275
|
]
|
|
2274
2276
|
merge_queue_data = []
|
|
2275
|
-
now =
|
|
2277
|
+
now = utc_now()
|
|
2276
2278
|
for mr in merge_requests:
|
|
2277
2279
|
item = {
|
|
2278
2280
|
"id": f"[{mr['mr'].iid}]({mr['mr'].web_url})",
|
|
@@ -2281,7 +2283,7 @@ def app_interface_merge_queue(ctx: click.Context) -> None:
|
|
|
2281
2283
|
+ 1, # adding 1 for human readability
|
|
2282
2284
|
"approved_at": mr["approved_at"],
|
|
2283
2285
|
"approved_span_minutes": (
|
|
2284
|
-
now -
|
|
2286
|
+
now - from_utc_iso_format(mr["approved_at"])
|
|
2285
2287
|
).total_seconds()
|
|
2286
2288
|
/ 60,
|
|
2287
2289
|
"approved_by": mr["approved_by"],
|
|
@@ -2695,7 +2697,7 @@ def ec2_jenkins_workers(
|
|
|
2695
2697
|
client = boto3.client("autoscaling")
|
|
2696
2698
|
ec2 = boto3.resource("ec2")
|
|
2697
2699
|
results = []
|
|
2698
|
-
now =
|
|
2700
|
+
now = utc_now()
|
|
2699
2701
|
columns = [
|
|
2700
2702
|
"type",
|
|
2701
2703
|
"id",
|
|
@@ -2955,7 +2957,7 @@ def osd_component_versions(ctx: click.Context) -> None:
|
|
|
2955
2957
|
@get.command()
|
|
2956
2958
|
@click.pass_context
|
|
2957
2959
|
def maintenances(ctx: click.Context) -> None:
|
|
2958
|
-
now =
|
|
2960
|
+
now = utc_now()
|
|
2959
2961
|
maintenances = maintenances_gql.query(gql.get_api().query).maintenances or []
|
|
2960
2962
|
data = [
|
|
2961
2963
|
{
|
|
@@ -4097,7 +4099,9 @@ def sre_checkpoint_metadata(
|
|
|
4097
4099
|
) -> None:
|
|
4098
4100
|
"""Check an app path for checkpoint-related metadata."""
|
|
4099
4101
|
data = queries.get_app_metadata(app_path)
|
|
4100
|
-
|
|
4102
|
+
vault_settings = get_app_interface_vault_settings()
|
|
4103
|
+
secret_reader = create_secret_reader(use_vault=vault_settings.vault)
|
|
4104
|
+
|
|
4101
4105
|
app = data[0]
|
|
4102
4106
|
|
|
4103
4107
|
if jiradef:
|
|
@@ -4110,7 +4114,14 @@ def sre_checkpoint_metadata(
|
|
|
4110
4114
|
# Overrides for easier testing
|
|
4111
4115
|
if jiraboard:
|
|
4112
4116
|
board["name"] = jiraboard
|
|
4113
|
-
report_invalid_metadata(
|
|
4117
|
+
report_invalid_metadata(
|
|
4118
|
+
app=app,
|
|
4119
|
+
path=app_path,
|
|
4120
|
+
board=board,
|
|
4121
|
+
secret_reader=secret_reader,
|
|
4122
|
+
parent=parent_ticket,
|
|
4123
|
+
dry_run=dry_run,
|
|
4124
|
+
)
|
|
4114
4125
|
|
|
4115
4126
|
|
|
4116
4127
|
@root.command()
|
|
@@ -4839,11 +4850,12 @@ def top_talkers(ctx: click.Context, top: int) -> None:
|
|
|
4839
4850
|
assert project.organization # make mypy happy
|
|
4840
4851
|
assert project.pk # make mypy happy
|
|
4841
4852
|
|
|
4853
|
+
now = utc_now()
|
|
4842
4854
|
stat = client.project_statistics(
|
|
4843
4855
|
organization_slug=project.organization.slug,
|
|
4844
4856
|
project_pk=project.pk,
|
|
4845
|
-
start=
|
|
4846
|
-
end=
|
|
4857
|
+
start=now - timedelta(hours=24),
|
|
4858
|
+
end=now,
|
|
4847
4859
|
)
|
|
4848
4860
|
stats.append((project, stat))
|
|
4849
4861
|
|
{qontract_reconcile-0.10.2.dev345.dist-info → qontract_reconcile-0.10.2.dev408.dist-info}/WHEEL
RENAMED
|
File without changes
|
|
File without changes
|