threatstack-agent-ruby 0.2.1 → 0.2.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d56612a7470c1f394b66c6473479883be0ddc44810bebd77206acd42da1b31a6
4
- data.tar.gz: 0d1bbad69a0ef6bb2ab53b662a8f004d06d353b2b02746ffb5616731eaef1c90
3
+ metadata.gz: fa1398b7851406159b1b63a72d7c7f2b14b40e5929eee4328fb0177f50c623fe
4
+ data.tar.gz: 625e1357834a009d1584337854da8bc03b3af29d6f0d8acaa50d3863309faf9b
5
5
  SHA512:
6
- metadata.gz: 0c98e82db4c39fbfd7d930049b9ffe84c6efeda17e5f6b97ea971e240ffaec39c45683630af92ba95626a6571e0d9ecc02d01431c1c70284959b240576bb3e03
7
- data.tar.gz: 767fb9d9a56bb8b4efdd9d7d9a28a373e86851550ba8c9e4401d4b2803d6bda1964ba0f47b566c25c12b54f8db5df49c48249bc12734f29f6dcb57a7e706879c
6
+ metadata.gz: f8695fbc63b6e46234cdebd7374f0d4260a72921a370f1fab383c143257bf1a63ae38a601e5c77b53c13f26e72ed4ed61bbc4287ea4fab783242994e489baf83
7
+ data.tar.gz: aa23290287624820d3412bf543d1a118a9b5e298abc8259a88c1f24699da09b687eccb410ce16caf5fa3fccb84208bae01f61b084660c75c03cecea53079dc0a
@@ -0,0 +1,4 @@
1
+ python-gitlab==1.11.0
2
+ requests_mock==1.7.0
3
+ coverage==4.5.4
4
+ pytest==4.6.11
data/ci/trigger.py ADDED
@@ -0,0 +1,408 @@
1
+ #!/usr/bin/env python
2
+ # %%
3
+
4
+ import argparse
5
+ import sys
6
+ import urllib.parse
7
+ from functools import lru_cache
8
+ from time import sleep
9
+ from typing import Dict, List, Optional
10
+
11
+ import gitlab
12
+ import requests
13
+
14
+ STATUS_FAILED = 'failed'
15
+ STATUS_MANUAL = 'manual'
16
+ STATUS_CANCELED = 'canceled'
17
+ STATUS_SUCCESS = 'success'
18
+ STATUS_SKIPPED = 'skipped'
19
+
20
+ ACTION_FAIL = 'fail'
21
+ ACTION_PASS = 'pass'
22
+ ACTION_PLAY = 'play'
23
+
24
+ # see https://docs.gitlab.com/ee/ci/pipelines.html for states
25
+ finished_states = [
26
+ STATUS_FAILED,
27
+ STATUS_MANUAL,
28
+ STATUS_CANCELED,
29
+ STATUS_SUCCESS,
30
+ STATUS_SKIPPED,
31
+ ]
32
+
33
+
34
+ class PipelineFailure(Exception):
35
+ def __init__(self, return_code=None, pipeline_id=None):
36
+ self.return_code = return_code
37
+ self.pipeline_id = pipeline_id
38
+
39
+
40
+ @lru_cache(maxsize=None)
41
+ def get_gitlab(url, api_token, verifyssl):
42
+ return gitlab.Gitlab(url, private_token=api_token, ssl_verify=verifyssl)
43
+
44
+
45
+ @lru_cache(maxsize=None)
46
+ def get_project(url, api_token, proj_id, verifyssl):
47
+ return get_gitlab(url, api_token, verifyssl).projects.get(proj_id)
48
+
49
+
50
+ def parse_args(args: List[str]):
51
+ parser = argparse.ArgumentParser(
52
+ description='Tool to trigger and monitor a remote GitLab pipeline',
53
+ add_help=False)
54
+ parser.add_argument(
55
+ '-a', '--api-token', help='personal access token (not required when running detached)')
56
+ parser.add_argument('-d', '--detached', action='store_true', default=False)
57
+ parser.add_argument('-e', '--env', action='append')
58
+ parser.add_argument('-h', '--host', default='gitlab.com')
59
+ parser.add_argument(
60
+ '--help', action='help', help='show this help message and exit')
61
+ parser.add_argument('--jobs', help='comma-separated list of manual jobs to run on `--on-manual play`')
62
+ parser.add_argument('-o', '--output', action='store_true', default=False, help='Show triggered pipline job output upon completion')
63
+ parser.add_argument('--on-manual', default=ACTION_FAIL, choices=[ACTION_FAIL, ACTION_PASS, ACTION_PLAY], help='action if "manual" status occurs')
64
+ parser.add_argument('-p', '--pipeline-token', required=True, help='pipeline token')
65
+ parser.add_argument('--pid', type=int, default=None, help='optional pipeline id of remote pipeline to be retried (implies -r)')
66
+ parser.add_argument('-r', '--retry', action='store_true', default=False, help='retry latest pipeline for given TARGET_REF')
67
+ parser.add_argument('-s', '--sleep', type=int, default=5)
68
+ parser.add_argument('-t', '--target-ref', required=True, help='target ref (branch, tag, commit)')
69
+ parser.add_argument('-u', '--url-path', default='/api/v4/projects')
70
+ parser.add_argument('-v', '--verifyssl', type=str2bool, default=True, help='Activate the ssl verification, set false for Self-signed certificate')
71
+ parser.add_argument('--verbose', action='store_true', default=False, help='verbose logging of responses')
72
+ parser.add_argument('project_id')
73
+ parsed_args = parser.parse_args(args)
74
+ return parsed_args
75
+
76
+
77
+ def str2bool(v):
78
+ if isinstance(v, bool):
79
+ return v
80
+ if v.lower() in ('yes', 'true', 't', 'y', '1'):
81
+ return True
82
+ elif v.lower() in ('no', 'false', 'f', 'n', '0'):
83
+ return False
84
+ else:
85
+ raise argparse.ArgumentTypeError('Boolean value expected.')
86
+
87
+
88
+ def parse_env(envs: List[str]) -> List[Dict]:
89
+ res = {}
90
+ for e in envs:
91
+ k, v = e.split('=', 1)
92
+ res[f'variables[{k}]'] = v
93
+ return res
94
+
95
+
96
+ def create_pipeline(project_url, pipeline_token, ref, verifyssl, variables={}, verbose=False) -> Optional[int]:
97
+ data = variables.copy()
98
+ data.update(token=pipeline_token, ref=ref)
99
+ api_url = f'{project_url}/trigger/pipeline'
100
+ print(f'Request create_pipeline: url={api_url} data={data}')
101
+ r = requests.post(
102
+ api_url,
103
+ data=data,
104
+ verify=verifyssl
105
+ )
106
+ if verbose:
107
+ print(f'Response create_pipeline: {r.text}')
108
+ assert r.status_code == 201, f'Failed to create pipeline, api returned status code {r.status_code}'
109
+ pid = r.json().get('id', None)
110
+ print(f'Pipeline created (id: {pid})')
111
+ return pid
112
+
113
+
114
+ def get_pipeline(project_url, api_token, pid, verifyssl, verbose=False):
115
+ api_url = f'{project_url}/pipelines/{pid}'
116
+ print(f'Request get_pipeline: url={api_url}')
117
+ r = requests.get(
118
+ api_url,
119
+ headers={
120
+ 'PRIVATE-TOKEN': api_token
121
+ },
122
+ verify=verifyssl
123
+ )
124
+ if verbose:
125
+ print(f'Response get_pipeline: {r.text}')
126
+ assert r.status_code == 200, f'expected status code 200, was {r.status_code}'
127
+ return r.json()
128
+
129
+
130
+ def get_last_pipeline(project_url, api_token, ref, verifyssl, verbose=False):
131
+ api_url = f'{project_url}/pipelines'
132
+ print(f'Request get_last_pipeline: url={api_url}')
133
+ r = requests.get(
134
+ api_url,
135
+ headers={
136
+ 'PRIVATE-TOKEN': api_token
137
+ },
138
+ params=dict(
139
+ ref=ref,
140
+ order_by='id',
141
+ sort='desc'
142
+ ),
143
+ verify=verifyssl
144
+ )
145
+ if verbose:
146
+ print(f'Response get_last_pipeline: {r.text}')
147
+ assert r.status_code == 200, f'expected status code 200, was {r.status_code}'
148
+ res = r.json()
149
+ assert len(res) > 0, f'expected to find at least one pipeline for ref {ref}'
150
+ return res[0]
151
+
152
+
153
+ def get_pipeline_jobs(project_url, api_token, pipeline, verifyssl, verbose=False):
154
+ api_url = f'{project_url}/pipelines/{pipeline}/jobs'
155
+ print(f'Request get_pipeline_jobs: url={api_url}')
156
+ r = requests.get(
157
+ api_url,
158
+ headers={
159
+ 'PRIVATE-TOKEN': api_token
160
+ },
161
+ verify=verifyssl
162
+ )
163
+ if verbose:
164
+ print(f'Response get_pipeline_jobs: {r.text}')
165
+ assert r.status_code == 200, f'expected status code 200, was {r.status_code}'
166
+ res = r.json()
167
+ return res
168
+
169
+
170
+ def get_job_trace(project_url, api_token, job, verifyssl, verbose=False):
171
+ api_url = f'{project_url}/jobs/{job}/trace'
172
+ print(f'Request get_job_trace: url={api_url}')
173
+ r = requests.get(
174
+ api_url,
175
+ headers={
176
+ 'PRIVATE-TOKEN': api_token
177
+ },
178
+ verify=verifyssl
179
+ )
180
+ if verbose:
181
+ print(f'Response get_job_trace: {r.text}')
182
+ assert r.status_code == 200, f'expected status code 200, was {r.status_code}'
183
+ r.encoding = 'utf-8'
184
+ return r.text
185
+
186
+
187
+ def get_sha(project_url, api_token, ref, verifyssl, verbose=False) -> Optional[str]:
188
+ """ Get the sha at the tip of ref
189
+ """
190
+ r = requests.get(
191
+ f'{project_url}/repository/commits/{ref}',
192
+ headers={
193
+ 'PRIVATE-TOKEN': api_token
194
+ },
195
+ verify=verifyssl
196
+ )
197
+ if verbose:
198
+ print(f'Response get_sha: {r.text}')
199
+ assert r.status_code == 200, f'expected status code 200, was {r.status_code}'
200
+ return r.json().get('id')
201
+
202
+
203
+ def get_project_id(project_url, api_token, project_name, verifyssl, verbose=False):
204
+ assert project_name is not None, 'expected TRIGGER_PROJECT_NAME defined'
205
+ api_url = f"{project_url}/{urllib.parse.quote(project_name, safe='')}"
206
+ print(f'Request get_project_id: url={api_url}')
207
+ r = requests.get(
208
+ api_url,
209
+ headers={
210
+ 'PRIVATE-TOKEN': api_token
211
+ },
212
+ verify=verifyssl
213
+ )
214
+ if verbose:
215
+ print(f'Response get_project_id: {r.text}')
216
+ assert r.status_code == 200, f'expected status code 200, was {r.status_code}'
217
+ res = r.json()
218
+ return str(res['id'])
219
+
220
+
221
+ def isint(x):
222
+ try:
223
+ int(x)
224
+ except Exception:
225
+ return False
226
+ else:
227
+ return True
228
+
229
+
230
+ def handle_manual_pipeline(args, pipeline, proj, status):
231
+ defined_jobs = [item for item in args.jobs.split(',')] if args.jobs else []
232
+ manual_jobs = []
233
+ for job in pipeline.jobs.list(per_page=100):
234
+ if job.status == STATUS_MANUAL:
235
+ # pick the first manual job and exit the loop
236
+ if len(defined_jobs) == 0:
237
+ manual_jobs.append(job)
238
+ break
239
+ elif job.name in defined_jobs:
240
+ manual_jobs.append(job)
241
+ if len(manual_jobs) == 0:
242
+ print('\nNo manual jobs found!')
243
+ else:
244
+ # wipe status, because the pipeline will continue after playing the manual job
245
+ status = None
246
+ if len(defined_jobs) > 0:
247
+ # sort by name of --jobs argument to preserve the order of execution
248
+ manual_jobs.sort(key=lambda j: defined_jobs.index(j.name))
249
+ for manual_job in manual_jobs:
250
+ print(f'\nPlaying manual job "{manual_job.name}" from stage "{manual_job.stage}"...')
251
+ proj.jobs.get(manual_job.id, lazy=True).play()
252
+ return status
253
+
254
+
255
+ def check_pipeline_status(args, pid, proj, project_url):
256
+ pipeline = None
257
+ status = None
258
+ max_retries = 5
259
+ retries_left = max_retries
260
+ while retries_left >= 0:
261
+ try:
262
+ pipeline = proj.pipelines.get(pid)
263
+ status = pipeline.status
264
+ if status in [STATUS_MANUAL, STATUS_SKIPPED] and args.on_manual == ACTION_PLAY:
265
+ status = handle_manual_pipeline(args, pipeline, proj, status)
266
+
267
+ # reset retries_left if the status call succeeded (fail only on consecutive failures)
268
+ retries_left = max_retries
269
+ break
270
+ except Exception as e:
271
+ print(f'\nPolling for status failed: {e}')
272
+ if retries_left == 0:
273
+ print(f'Polling failed {max_retries} consecutive times. Please verify the pipeline url:')
274
+ print(f' curl -s -X GET -H "PRIVATE-TOKEN: <private token>" {project_url}/pipelines/{pid}')
275
+ print('check your api token, or check if there are connection issues.')
276
+ print()
277
+ raise PipelineFailure(return_code=2, pipeline_id=pid)
278
+ retries_left -= 1
279
+ return pipeline, status
280
+
281
+
282
+ def trigger(args: List[str]) -> int:
283
+ args = parse_args(args)
284
+
285
+ assert args.pipeline_token, 'pipeline token must be set'
286
+ assert args.project_id, 'project id must be set'
287
+ assert args.host, 'host must be set'
288
+ assert args.url_path, 'url path must be set'
289
+ assert args.target_ref, 'must provide target ref'
290
+ assert args.sleep > 0, 'sleep parameter must be > 0'
291
+
292
+ ref = args.target_ref
293
+ proj_id = args.project_id
294
+ pipeline_token = args.pipeline_token
295
+ verifyssl = args.verifyssl
296
+ verbose = args.verbose
297
+
298
+ if args.host.startswith('http://') or args.host.startswith('https://'):
299
+ base_url = args.host
300
+ else:
301
+ base_url = f'https://{args.host}'
302
+
303
+ if not isint(proj_id):
304
+ assert args.api_token is not None, 'finding project id by name requires an api token (-a parameter missing)'
305
+ proj_id = get_project_id(f"{base_url}{args.url_path}", args.api_token, proj_id, verifyssl, verbose)
306
+
307
+ project_url = f"{base_url}{args.url_path}/{proj_id}"
308
+ variables = {}
309
+ if args.env is not None:
310
+ variables = parse_env(args.env)
311
+
312
+ if args.retry or args.pid is not None:
313
+ assert args.api_token is not None, 'retry checks require an api token (-a parameter missing)'
314
+
315
+ if args.pid is None:
316
+ print(f"Looking for pipeline '{ref}' for project id {proj_id} ...")
317
+ pipeline = get_last_pipeline(project_url, args.api_token, ref, verifyssl, verbose)
318
+ pid = pipeline.get('id')
319
+ else:
320
+ pid = args.pid
321
+ print(f"Fetching for pipeline '{pid}' for project id {proj_id} ...")
322
+ pipeline = get_pipeline(project_url, args.api_token, pid, verifyssl, verbose)
323
+
324
+ status = pipeline.get('status')
325
+ assert pid, 'refresh pipeline id must not be none'
326
+ assert status, 'refresh pipeline status must not be none'
327
+
328
+ pipeline_sha = pipeline.get('sha')
329
+ ref_tip_sha = get_sha(project_url, args.api_token, ref, verifyssl, verbose)
330
+ outdated = pipeline_sha != ref_tip_sha
331
+
332
+ outdated_str = 'outdated' if outdated else 'up to date'
333
+ print(f"Found {outdated_str} pipeline {pid} with status '{status}'")
334
+
335
+ if outdated:
336
+ print(f"Pipeline {pid} for {ref} outdated (sha: {pipeline_sha[:6]}, tip is {ref_tip_sha[:6]}) - re-running ...")
337
+ pid = create_pipeline(project_url, pipeline_token, ref, verifyssl, variables, verbose)
338
+ elif status == STATUS_SUCCESS:
339
+ print(f"Pipeline {pid} already in state 'success' - re-running ...")
340
+ pid = create_pipeline(project_url, pipeline_token, ref, verifyssl, variables, verbose)
341
+ else:
342
+ print(f"Retrying pipeline {pid} ...")
343
+ proj = get_project(base_url, args.api_token, proj_id, verifyssl)
344
+ proj.pipelines.get(pid).retry()
345
+
346
+ else:
347
+ print(f"Triggering pipeline for ref '{ref}' for project id {proj_id}")
348
+ pid = create_pipeline(project_url, pipeline_token, ref, verifyssl, variables, verbose)
349
+ try:
350
+ proj = get_project(base_url, args.api_token, proj_id, verifyssl)
351
+ print(f"See pipeline at {proj.web_url}/pipelines/{pid}")
352
+ except Exception:
353
+ # get_projects can fail if no api_token has been provided
354
+ # since we're only logging here we simply ignore this
355
+ pass
356
+
357
+ assert pid is not None, 'must have a valid pipeline id'
358
+
359
+ if args.detached:
360
+ if args.on_manual == ACTION_PLAY: # detached for manual pipelines
361
+ proj = get_project(base_url, args.api_token, proj_id, verifyssl)
362
+ check_pipeline_status(args, pid, proj, project_url)
363
+ print('Detached mode: not monitoring pipeline status - exiting now.')
364
+ return pid
365
+
366
+ # after this point (i.e. not running detached) we require api_token to be set
367
+ api_token = args.api_token
368
+ assert api_token is not None, 'pipeline status checks require an api token (-a parameter missing)'
369
+
370
+ print(f"Waiting for pipeline {pid} to finish ...")
371
+
372
+ status = None
373
+ pipeline = None
374
+ proj = get_project(base_url, api_token, proj_id, verifyssl)
375
+
376
+ while status not in finished_states:
377
+ pipeline, status = check_pipeline_status(args, pid, proj, project_url)
378
+
379
+ print('.', end='', flush=True)
380
+ sleep(args.sleep)
381
+
382
+ print()
383
+ if args.output:
384
+ jobs = get_pipeline_jobs(project_url, api_token, pid, verifyssl, verbose)
385
+ print(f'Pipeline {pid} job output:')
386
+ for job in jobs:
387
+ name = job['name']
388
+ print(f'Job: {name}')
389
+ print(get_job_trace(project_url, api_token, job['id'], verifyssl, verbose))
390
+ print()
391
+
392
+ if status == STATUS_SUCCESS:
393
+ print('Pipeline succeeded')
394
+ return pid
395
+ elif status == STATUS_MANUAL and args.on_manual == ACTION_PASS:
396
+ print('Pipeline status is "manual", action "pass"')
397
+ return pid
398
+ else:
399
+ print(f"Pipeline failed! Check details at '{pipeline.web_url}'")
400
+ raise PipelineFailure(return_code=1, pipeline_id=pid)
401
+
402
+
403
+ if __name__ == "__main__": # pragma: nocover
404
+ try:
405
+ trigger(sys.argv[1:])
406
+ sys.exit(0)
407
+ except PipelineFailure as e:
408
+ sys.exit(e.return_code)
@@ -60,6 +60,15 @@ int libinjection_sqli(const char* s, size_t slen, char fingerprint[]);
60
60
  */
61
61
  int libinjection_xss(const char* s, size_t slen);
62
62
 
63
+ /** ALPHA version of path traversal detector.
64
+ *
65
+ * \param[in] s input string, may contain nulls, does not need to be null-terminated
66
+ * \param[in] slen input string length
67
+ * \return 1 if path traversal found, 0 if benign
68
+ *
69
+ */
70
+ int libinjection_pathtraversal(const char *s, size_t slen);
71
+
63
72
  LIBINJECTION_END_DECLS
64
73
 
65
74
  #endif /* LIBINJECTION_H */
@@ -0,0 +1,43 @@
1
+ /* Description: A program to check whether the input
2
+ * string contains any path traveral attacks
3
+ */
4
+ #include <stdlib.h>
5
+ #include <string.h>
6
+ #include <ctype.h>
7
+
8
+ #include "libinjection.h"
9
+ #include "libinjection_pathtraversal_data.h"
10
+
11
+ char* to_lower_string(const char *s, size_t slen)
12
+ {
13
+ // allocate
14
+ char *lower_s = malloc(slen);
15
+ for(unsigned long i = 0; i < slen; i++){
16
+ lower_s[i] = tolower(s[i]);
17
+ }
18
+ return lower_s;
19
+ }
20
+
21
+ /**
22
+ * API that checks strings for path traversal payloads
23
+ * Returns 1 when a payload is found, 0 otherwise.
24
+ */
25
+ int libinjection_pathtraversal(const char *s, size_t slen)
26
+ {
27
+ if (slen == 0) {
28
+ return 0;
29
+ }
30
+ // to lower string
31
+ char *lower_s = to_lower_string(s, slen);
32
+ for (unsigned long i = 0; i < path_traversal_payloads_size; i++) {
33
+ // check if input string contains one of the dangerous paths
34
+ if (strstr(lower_s, path_traversal_payloads[i])) {
35
+ // deallocate
36
+ free(lower_s);
37
+ return 1;
38
+ }
39
+ }
40
+ // deallocate
41
+ free(lower_s);
42
+ return 0;
43
+ }