datadog-sdk-testing 0.3.1 → 0.3.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 6380de3cb3b428ea882d15bcd718fc2961eb2725
4
- data.tar.gz: 7cce0ae89c7a1f548b50e0d026025b2c41565000
3
+ metadata.gz: ce8e9b3c1c4509524ebe8fe41224a2e94ceae3b6
4
+ data.tar.gz: 0a5e5afe0e0acb3e9a4d9d3923035d986e707e1c
5
5
  SHA512:
6
- metadata.gz: 1ed49b6cd0016bedbf95b57c31f855bc6246fc0be7f8b00b3df418cbe7241d136a9943909d26201d0d93cd0fb28569e7d3de556ba8084081846d1a975d39aae8
7
- data.tar.gz: 0ba4e311804e7050bf4f9de75c4cd5148fa438c472a29670312cc59c2cefdb663c8689271e8f9fbac108dd9fa10181a3f2f3cf3928a757f3671116ec365eed65
6
+ metadata.gz: e74a589991b317cfb61a40fd0ec70ecb8109c7e1d10f028d8dc2c781dae58ad100fb687d17a1e9dc76fdf4cd071daaeafd17c8a6e04782907a87d5898d95c011
7
+ data.tar.gz: 20d91d341ea1b33957b53f3f6f94075d25c9cfc320bd29504327c5b9538652b6db9ade51ff5ead41f4d2ea14592bd9525c8897bd243815bd001c69f0c701dd0d
@@ -0,0 +1,247 @@
1
+ [Main]
2
+
3
+ # The host of the Datadog intake server to send Agent data to
4
+ dd_url: https://app.datadoghq.com
5
+
6
+ # If you need a proxy to connect to the Internet, provide the settings here (default: disabled)
7
+ # proxy_host: my-proxy.com
8
+ # proxy_port: 3128
9
+ # proxy_user: user
10
+ # proxy_password: password
11
+ # To be used with some proxys that return a 302 which make curl switch from POST to GET
12
+ # See http://stackoverflow.com/questions/8156073/curl-violate-rfc-2616-10-3-2-and-switch-from-post-to-get
13
+ # proxy_forbid_method_switch: no
14
+
15
+ # If you run the agent behind haproxy, you might want to enable this
16
+ # skip_ssl_validation: no
17
+
18
+ # The Datadog api key to associate your Agent's data with your organization.
19
+ # Can be found here:
20
+ # https://app.datadoghq.com/account/settings
21
+ # This can be a comma-separated list of api keys.
22
+ # (default: None, the agent doesn't start without it)
23
+ api_key:
24
+
25
+ # Force the hostname to whatever you want. (default: auto-detected)
26
+ # hostname: mymachine.mydomain
27
+
28
+ # Set the host's tags (optional)
29
+ # tags: mytag, env:prod, role:database
30
+
31
+ # Set timeout in seconds for outgoing requests to Datadog. (default: 20)
32
+ # When a request timeout, it will be retried after some time.
33
+ # It will only be deleted if the forwarder queue becomes too big. (30 MB by default)
34
+ # forwarder_timeout: 20
35
+
36
+ # Set timeout in seconds for integrations that use HTTP to fetch metrics, since
37
+ # unbounded timeouts can potentially block the collector indefinitely and cause
38
+ # problems!
39
+ # default_integration_http_timeout: 9
40
+
41
+ # Add one "dd_check:checkname" tag per running check. It makes it possible to slice
42
+ # and dice per monitored app (= running Agent Check) on Datadog's backend.
43
+ # create_dd_check_tags: no
44
+
45
+ # Collect AWS EC2 custom tags as agent tags (requires an IAM role associated with the instance)
46
+ # collect_ec2_tags: no
47
+ # Incorporate security-groups into tags collected from AWS EC2
48
+ # collect_security_groups: no
49
+
50
+ # Enable Agent Developer Mode
51
+ # Agent Developer Mode collects and sends more fine-grained metrics about agent and check performance
52
+ # developer_mode: no
53
+ # In developer mode, the number of runs to be included in a single collector profile
54
+ # collector_profile_interval: 20
55
+
56
+ # use unique hostname for GCE hosts, see http://dtdg.co/1eAynZk
57
+ # when not specified, default: no
58
+ gce_updated_hostname: yes
59
+
60
+ # Set the threshold for accepting points to allow anything
61
+ # within recent_point_threshold seconds (default: 30)
62
+ # recent_point_threshold: 30
63
+
64
+ # Use mount points instead of volumes to track disk and fs metrics
65
+ # DEPRECATED: use conf.d/disk.yaml instead to configure it
66
+ # use_mount: no
67
+
68
+ # Forwarder listening port
69
+ # listen_port: 17123
70
+
71
+ # Graphite listener port
72
+ # graphite_listen_port: 17124
73
+
74
+ # Additional directory to look for Datadog checks (optional)
75
+ # additional_checksd: /etc/dd-agent/checks.d/
76
+
77
+ # Allow non-local traffic to this Agent
78
+ # This is required when using this Agent as a proxy for other Agents
79
+ # that might not have an internet connection
80
+ # For more information, please see
81
+ # https://github.com/DataDog/dd-agent/wiki/Network-Traffic-and-Proxy-Configuration
82
+ # non_local_traffic: no
83
+
84
+ # Select the Tornado HTTP Client to be used in the Forwarder,
85
+ # between curl client and simple http client (default: simple http client)
86
+ # use_curl_http_client: no
87
+
88
+ # The loopback address the Forwarder and Dogstatsd will bind.
89
+ # Optional, it is mainly used when running the agent on Openshift
90
+ # bind_host: localhost
91
+
92
+ # If enabled the collector will capture a metric for check run times.
93
+ # check_timings: no
94
+
95
+ # If you want to remove the 'ww' flag from ps catching the arguments of processes
96
+ # for instance for security reasons
97
+ # exclude_process_args: no
98
+
99
+ # histogram_aggregates: max, median, avg, count
100
+ # histogram_percentiles: 0.95
101
+
102
+ # ========================================================================== #
103
+ # Service Discovery #
104
+ # See https://github.com/DataDog/dd-agent/wiki/Service-Discovery for details #
105
+ # ========================================================================== #
106
+ #
107
+ # Service discovery allows the agent to look for running services
108
+ # and load a configuration object for the one it recognizes.
109
+ # This feature is disabled by default.
110
+ # Uncomment this line to enable it (works for docker containers only for now).
111
+ # service_discovery_backend: docker
112
+ #
113
+ # Define which key/value store must be used to look for configuration templates.
114
+ # Default is etcd. Consul is also supported.
115
+ # sd_config_backend: etcd
116
+ #
117
+ # Settings for connecting to the service discovery backend.
118
+ # sd_backend_host: 127.0.0.1
119
+ # sd_backend_port: 4001
120
+ #
121
+ # By default, the agent will look for the configuration templates under the
122
+ # `/datadog/check_configs` key in the back-end. If you wish otherwise, uncomment this option
123
+ # and modify its value.
124
+ # sd_template_dir: /datadog/check_configs
125
+ #
126
+ # ========================================================================== #
127
+ # Other #
128
+ # ========================================================================== #
129
+ #
130
+ # In some environments we may have the procfs file system mounted in a
131
+ # miscellaneous location. The procfs_path configuration paramenter allows
132
+ # us to override the standard default location '/proc'
133
+ # procfs_path: /proc
134
+
135
+ # ========================================================================== #
136
+ # DogStatsd configuration #
137
+ # DogStatsd is a small server that aggregates your custom app metrics. For #
138
+ # usage information, check out http://docs.datadoghq.com/guides/dogstatsd/ #
139
+ # ========================================================================== #
140
+
141
+ # If you don't want to enable the DogStatsd server, set this option to no
142
+ # use_dogstatsd: yes
143
+
144
+ # Make sure your client is sending to the same port.
145
+ # dogstatsd_port: 8125
146
+
147
+ # By default dogstatsd will post aggregate metrics to the Agent (which handles
148
+ # errors/timeouts/retries/etc). To send directly to the datadog api, set this
149
+ # to https://app.datadoghq.com.
150
+ # dogstatsd_target: http://localhost:17123
151
+
152
+ # If you want to forward every packet received by the dogstatsd server
153
+ # to another statsd server, uncomment these lines.
154
+ # WARNING: Make sure that forwarded packets are regular statsd packets and not "dogstatsd" packets,
155
+ # as your other statsd server might not be able to handle them.
156
+ # statsd_forward_host: address_of_own_statsd_server
157
+ # statsd_forward_port: 8125
158
+
159
+ # you may want all statsd metrics coming from this host to be namespaced
160
+ # in some way; if so, configure your namespace here. a metric that looks
161
+ # like `metric.name` will instead become `namespace.metric.name`
162
+ # statsd_metric_namespace:
163
+
164
+ # By default, dogstatsd supports only plain ASCII packets. However, most
165
+ # (dog)statsd client support UTF8 by encoding packets before sending them
166
+ # this option enables UTF8 decoding in case you need it.
167
+ # However, it comes with a performance overhead of ~10% in the dogstatsd
168
+ # server. This will be taken care of properly in the new gen agent core.
169
+ # utf8_decoding: false
170
+
171
+ # ========================================================================== #
172
+ # Service-specific configuration #
173
+ # ========================================================================== #
174
+
175
+ # -------------------------------------------------------------------------- #
176
+ # Ganglia #
177
+ # -------------------------------------------------------------------------- #
178
+
179
+ # Ganglia host where gmetad is running
180
+ # ganglia_host: localhost
181
+
182
+ # Ganglia port where gmetad is running
183
+ # ganglia_port: 8651
184
+
185
+ # -------------------------------------------------------------------------- #
186
+ # Dogstream (log file parser) #
187
+ # -------------------------------------------------------------------------- #
188
+
189
+ # Comma-separated list of logs to parse and optionally custom parsers to use.
190
+ # The form should look like this:
191
+ #
192
+ # dogstreams: /path/to/log1:parsers_module:custom_parser, /path/to/log2, /path/to/log3, ...
193
+ #
194
+ # Or this:
195
+ #
196
+ # dogstreams: /path/to/log1:/path/to/my/parsers_module.py:custom_parser, /path/to/log2, /path/to/log3, ...
197
+ #
198
+ # Each entry is a path to a log file and optionally a Python module/function pair
199
+ # separated by colons.
200
+ #
201
+ # Custom parsers should take a 2 parameters, a logger object and
202
+ # a string parameter of the current line to parse. It should return a tuple of
203
+ # the form:
204
+ # (metric (str), timestamp (unix timestamp), value (float), attributes (dict))
205
+ # where attributes should at least contain the key 'metric_type', specifying
206
+ # whether the given metric is a 'counter' or 'gauge'.
207
+ #
208
+ # Unless parsers are specified with an absolute path, the modules must exist in
209
+ # the Agent's PYTHONPATH. You can set this as an environment variable when
210
+ # starting the Agent. If the name of the custom parser function is not passed,
211
+ # 'parser' is assumed.
212
+ #
213
+ # If this value isn't specified, the default parser assumes this log format:
214
+ # metric timestamp value key0=val0 key1=val1 ...
215
+ #
216
+
217
+ # ========================================================================== #
218
+ # Custom Emitters #
219
+ # ========================================================================== #
220
+
221
+ # Comma-separated list of emitters to be used in addition to the standard one
222
+ #
223
+ # Expected to be passed as a comma-separated list of colon-delimited
224
+ # name/object pairs.
225
+ #
226
+ # custom_emitters: /usr/local/my-code/emitters/rabbitmq.py:RabbitMQEmitter
227
+ #
228
+ # If the name of the emitter function is not specified, 'emitter' is assumed.
229
+
230
+
231
+ # ========================================================================== #
232
+ # Logging
233
+ # ========================================================================== #
234
+
235
+ # log_level: INFO
236
+
237
+ collector_log_file: /tmp/collector.log
238
+ forwarder_log_file: /tmp/forwarder.log
239
+ dogstatsd_log_file: /tmp/dogstatsd.log
240
+ jmxfetch_log_file: /tmp/jmxfetch.log
241
+
242
+ # if syslog is enabled but a host and port are not set, a local domain socket
243
+ # connection will be attempted
244
+ #
245
+ # log_to_syslog: yes
246
+ # syslog_host:
247
+ # syslog_port:
@@ -39,10 +39,9 @@ def install_req(requirement, pip_options = nil, output = nil, use_venv = nil)
39
39
  pip_command = use_venv ? "#{ENV['SDK_HOME']}/venv/bin/pip" : 'pip'
40
40
  redirect_output = output ? "2>&1 >> #{output}" : ''
41
41
  pip_options = '' if pip_options.nil?
42
- unless requirement.empty? || requirement.start_with?('#')
43
- sh %(#{pip_command} install #{requirement} #{pip_options} #{redirect_output}\
44
- || echo 'Unable to install #{requirement}' #{redirect_output})
45
- end
42
+ return true if requirement.empty? || requirement.start_with?('#')
43
+ sh %(#{pip_command} install #{requirement} #{pip_options} #{redirect_output}\
44
+ || echo 'Unable to install #{requirement}' #{redirect_output})
46
45
  end
47
46
 
48
47
  def install_requirements(req_file, pip_options = nil, output = nil, use_venv = nil)
@@ -174,7 +173,7 @@ class Wait
174
173
  s = TCPSocket.new('localhost', port)
175
174
  s.close
176
175
  return true
177
- rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH
176
+ rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH, EOFError
178
177
  return false
179
178
  end
180
179
  end
@@ -187,7 +186,7 @@ class Wait
187
186
  begin
188
187
  r = HTTParty.get(url)
189
188
  return (200...300).cover? r.code
190
- rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH
189
+ rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH, EOFError
191
190
  return false
192
191
  end
193
192
  end
data/lib/tasks/sdk.rake CHANGED
@@ -33,6 +33,7 @@ task 'setup_env' do
33
33
  # install agent core dependencies
34
34
  `#{ENV['SDK_HOME']}/venv/bin/pip install -r #{ENV['SDK_HOME']}/embedded/dd-agent/requirements.txt`
35
35
  `echo "#{ENV['SDK_HOME']}/embedded/dd-agent/" > #{ENV['SDK_HOME']}/venv/lib/python2.7/site-packages/datadog-agent.pth`
36
+ `cp #{gem_home}/lib/config/datadog.conf #{ENV['SDK_HOME']}/embedded/dd-agent/datadog.conf`
36
37
  end
37
38
 
38
39
  desc 'Clean development environment for the SDK (remove!)'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: datadog-sdk-testing
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.1
4
+ version: 0.3.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Jaime Fullaondo
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-09-15 00:00:00.000000000 Z
11
+ date: 2016-11-14 00:00:00.000000000 Z
12
12
  dependencies: []
13
13
  description: Datadog Integration SDK testing/scaffolding gem
14
14
  email: jaime.fullaondo@datadoghq.com
@@ -22,6 +22,7 @@ files:
22
22
  - lib/config/check.py
23
23
  - lib/config/ci/skeleton.rake
24
24
  - lib/config/conf.yaml.example
25
+ - lib/config/datadog.conf
25
26
  - lib/config/manifest.json
26
27
  - lib/config/metadata.csv
27
28
  - lib/config/requirements.txt