vayacondios-server 0.1.2 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. data/Gemfile +1 -0
  2. data/app/http_shim.rb +1 -5
  3. data/lib/vayacondios-client.rb +2 -0
  4. data/lib/vayacondios-server.rb +1 -0
  5. data/lib/vayacondios/client/cube_client.rb +39 -0
  6. data/lib/vayacondios/client/itemset.rb +43 -28
  7. data/lib/vayacondios/client/notifier.rb +24 -1
  8. data/lib/vayacondios/client/zabbix_client.rb +148 -0
  9. data/lib/vayacondios/server/handlers/itemset_handler.rb +0 -1
  10. data/lib/vayacondios/server/model/itemset_document.rb +8 -4
  11. data/lib/vayacondios/server/rack/assume_json.rb +13 -0
  12. data/lib/vayacondios/server/rack/extract_methods.rb +11 -1
  13. data/lib/vayacondios/version.rb +1 -1
  14. data/pom.xml +97 -0
  15. data/scripts/hadoop_monitor/configurable.rb +1 -1
  16. data/scripts/hadoop_monitor/hadoop_attempt_scraper.rb +6 -3
  17. data/scripts/hadoop_monitor/hadoop_client.rb +20 -19
  18. data/scripts/hadoop_monitor/hadoop_monitor.rb +3 -3
  19. data/scripts/hadoop_monitor/hadoopable.rb +3 -3
  20. data/scripts/hadoop_monitor/machine_monitor.rb +2 -2
  21. data/spec/client/itemset_spec.rb +8 -8
  22. data/spec/server/itemset_spec.rb +4 -4
  23. data/src/main/java/com/infochimps/util/CurrentClass.java +26 -0
  24. data/src/main/java/com/infochimps/util/DebugUtil.java +38 -0
  25. data/src/main/java/com/infochimps/util/HttpHelper.java +112 -0
  26. data/src/main/java/com/infochimps/vayacondios/ItemSets.java +456 -0
  27. data/src/main/java/com/infochimps/vayacondios/Organization.java +49 -0
  28. data/src/main/java/com/infochimps/vayacondios/PathBuilder.java +13 -0
  29. data/src/main/java/com/infochimps/vayacondios/VCDIntegrationTest.java +68 -0
  30. data/src/main/java/com/infochimps/vayacondios/VayacondiosClient.java +88 -0
  31. data/vayacondios-client.gemspec +2 -2
  32. data/vayacondios-server.gemspec +4 -2
  33. metadata +37 -9
@@ -1,3 +1,3 @@
1
1
  class Vayacondios
2
- VERSION = '0.1.2'
2
+ VERSION = '0.1.6'
3
3
  end
data/pom.xml ADDED
@@ -0,0 +1,97 @@
1
+ <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
2
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
3
+ <modelVersion>4.0.0</modelVersion>
4
+ <groupId>com.infochimps</groupId>
5
+ <artifactId>vayacondios</artifactId>
6
+ <packaging>jar</packaging>
7
+ <version>1.0-SNAPSHOT</version>
8
+ <name>java-common</name>
9
+ <url>http://maven.apache.org</url>
10
+
11
+ <parent>
12
+ <groupId>com.infochimps</groupId>
13
+ <artifactId>parent-pom</artifactId>
14
+ <version>1.0.0-SNAPSHOT</version>
15
+ </parent>
16
+
17
+ <build>
18
+ <plugins>
19
+ <plugin>
20
+ <groupId>org.apache.maven.plugins</groupId>
21
+ <artifactId>maven-shade-plugin</artifactId>
22
+ <version>2.0</version>
23
+ <executions>
24
+ <execution>
25
+ <phase>package</phase>
26
+ <goals>
27
+ <goal>shade</goal>
28
+ </goals>
29
+ <configuration>
30
+ <transformers>
31
+ <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
32
+ <mainClass>com.infochimps.vayacondios.VCDIntegrationTest</mainClass>
33
+ </transformer>
34
+ </transformers>
35
+ </configuration>
36
+ </execution>
37
+ </executions>
38
+ </plugin>
39
+ <plugin>
40
+ <groupId>org.codehaus.mojo</groupId>
41
+ <artifactId>exec-maven-plugin</artifactId>
42
+ <version>1.2.1</version>
43
+ <configuration>
44
+ <executable>java</executable>
45
+ <arguments>
46
+ <argument>-cp</argument>
47
+ <classpath/>
48
+ <argument>-jar</argument>
49
+ <argument>target/${project.artifactId}-${project.version}.jar</argument>
50
+ </arguments>
51
+ </configuration>
52
+ </plugin>
53
+ </plugins>
54
+ </build>
55
+
56
+ <repositories>
57
+ <!-- Infochimps Repositories -->
58
+ <repository>
59
+ <id>infochimps.releases</id>
60
+ <name>Infochimps Internal Repository</name>
61
+ <url>https://s3.amazonaws.com/artifacts.chimpy.us/maven-s3p/releases</url>
62
+ </repository>
63
+ <repository>
64
+ <id>infochimps.snapshots</id>
65
+ <name>Infochimps Internal Repository</name>
66
+ <url>https://s3.amazonaws.com/artifacts.chimpy.us/maven-s3p/snapshots</url>
67
+ <snapshots>
68
+ <enabled>true</enabled>
69
+ <updatePolicy>always</updatePolicy>
70
+ </snapshots>
71
+ </repository>
72
+ </repositories>
73
+
74
+ <dependencies>
75
+ <dependency>
76
+ <groupId>com.google.code.gson</groupId>
77
+ <artifactId>gson</artifactId>
78
+ <version>2.2.2</version>
79
+ </dependency>
80
+ <dependency>
81
+ <groupId>commons-codec</groupId>
82
+ <artifactId>commons-codec</artifactId>
83
+ <version>1.2</version>
84
+ </dependency>
85
+ <dependency>
86
+ <groupId>org.slf4j</groupId>
87
+ <artifactId>slf4j-api</artifactId>
88
+ <version>1.7.2</version>
89
+ </dependency>
90
+ <dependency>
91
+ <groupId>junit</groupId>
92
+ <artifactId>junit</artifactId>
93
+ <version>3.8.1</version>
94
+ <scope>test</scope>
95
+ </dependency>
96
+ </dependencies>
97
+ </project>
@@ -1,7 +1,7 @@
1
1
  require 'configliere'
2
2
  require 'logger'
3
3
 
4
- module Vayacondios
4
+ class Vayacondios
5
5
 
6
6
  module Configurable
7
7
 
@@ -27,16 +27,19 @@ class HadoopAttemptScraper < Nibbler
27
27
 
28
28
  def to_attempts
29
29
  attempts.map do |attempt|
30
+ start_time = Time.parse(attempt.start_time) rescue nil
31
+ finish_time = attempt.finish_time.length > 0 ? Time.parse(attempt.finish_time) : nil
30
32
  {
31
33
  _id: attempt.attempt_id.to_s,
32
34
  task_id: task_id,
33
35
  host: attempt.machine.to_s.gsub(/^http:\/\//, '').gsub(/:[0-9]+$/, ''),
34
36
  status: attempt.status,
35
37
  progress: attempt.progress.to_f / 100.0,
36
- start_time: Time.parse(attempt.start_time),
37
- finish_time: attempt.finish_time.length > 0 ? Time.parse(attempt.finish_time) : nil,
38
+ start_time: start_time,
39
+ finish_time: finish_time,
40
+ duration: start_time ? (finish_time || Time.now) - start_time : nil,
38
41
  errors: attempt.errors
39
42
  }
40
43
  end
41
44
  end
42
- end
45
+ end
@@ -10,7 +10,7 @@ require 'pp'
10
10
  require 'gorillib/string/inflections'
11
11
  require 'swineherd-fs'
12
12
 
13
- module Vayacondios
13
+ class Vayacondios
14
14
 
15
15
  class HadoopClient
16
16
 
@@ -87,6 +87,11 @@ module Vayacondios
87
87
  finished_status = [:FAILED, :KILLED, :COMPLETE]
88
88
  failed_status = [:FAILED]
89
89
 
90
+
91
+ # not sure what is what. I'm guessing
92
+ # JobStatus.getStartTime corresponds to the
93
+ # launch time in the logs
94
+
90
95
  start_time = Time.at(job_status.get_start_time / 1000)
91
96
  reduce_progress = job.reduce_progress
92
97
  map_progress = job.map_progress
@@ -100,22 +105,16 @@ module Vayacondios
100
105
  _id: job_id.to_s,
101
106
  name: job.get_job_name.to_s,
102
107
 
103
- # not sure what is what. I'm guessing
104
- # JobStatus.getStartTime corresponds to the
105
- # launch time in the logs, but I'm going to
106
- # go ahead and use it twice here.
107
-
108
- launch_time: start_time,
109
- submit_time: start_time,
108
+ start_time: start_time,
110
109
  finish_time: finish_time,
111
110
 
112
- run_duration: run_duration,
111
+ duration: run_duration,
113
112
 
114
113
  map_eta: map_eta,
115
114
  reduce_eta: reduce_eta,
116
115
  eta: reduce_eta,
117
116
 
118
- job_status: case job_status.get_run_state
117
+ status: case job_status.get_run_state
119
118
  when JobStatus::FAILED then :FAILED
120
119
  when JobStatus::KILLED then :KILLED
121
120
  when JobStatus::PREP then :PREP
@@ -128,9 +127,7 @@ module Vayacondios
128
127
  failed_maps: num_tasks(job_id, :map, failed_status),
129
128
  failed_reduces: num_tasks(job_id, :reduce, failed_status),
130
129
 
131
- counters: parse_counters(job.get_counters),
132
- type: :job,
133
-
130
+ counters: parse_counters(job.get_counters)
134
131
  }
135
132
 
136
133
  job_event = {
@@ -195,13 +192,17 @@ module Vayacondios
195
192
  # object that represents it.
196
193
  #
197
194
  def parse_task task_report, task_type, parent_job_id
195
+ start_time = task_report.get_start_time > 0 ? Time.at(task_report.get_start_time / 1000) : nil
196
+ finish_time = task_report.get_finish_time > 0 ? Time.at(task_report.get_finish_time / 1000) : nil
197
+
198
198
  {
199
199
  _id: task_report.get_task_id.to_s,
200
- job_id: parent_job_id,
201
- task_type: task_type,
202
- task_status: task_report.get_current_status.to_s,
203
- start_time: Time.at(task_report.get_start_time / 1000),
204
- finish_time: task_report.get_finish_time > 0 ? Time.at(task_report.get_finish_time / 1000) : nil,
200
+ job_id: parent_job_id.to_s,
201
+ type: task_type,
202
+ status: task_report.get_current_status.to_s,
203
+ start_time: start_time,
204
+ finish_time: finish_time,
205
+ duration: start_time ? (finish_time || Time.now) - start_time : nil,
205
206
  counters: parse_counters(task_report.get_counters),
206
207
  diagnostics: task_report.get_diagnostics.map(&:to_s),
207
208
  successful_attempt_id: task_report.get_successful_task_attempt.to_s
@@ -212,7 +213,7 @@ module Vayacondios
212
213
  {
213
214
  t: Time.now,
214
215
  d: {
215
- job_id: task_report.get_task_id.to_s,
216
+ task_id: task_report.get_task_id.to_s,
216
217
  progress: task_report.get_progress,
217
218
  running_attempt_ids: task_report.get_running_task_attempts.map(&:to_s)
218
219
  }
@@ -10,7 +10,7 @@ require 'thread'
10
10
  require 'open-uri'
11
11
  require 'json'
12
12
 
13
- module Vayacondios
13
+ class Vayacondios
14
14
 
15
15
  class HadoopMonitor
16
16
  def initialize
@@ -34,8 +34,8 @@ module Vayacondios
34
34
  tasks: @db.create_collection('job_tasks'),
35
35
  attempts: @db.create_collection('job_task_attempts'),
36
36
 
37
- job_events: @db.create_collection('job_events', capped_collection_opts),
38
- task_events: @db.create_collection('job_tasks_events', capped_collection_opts),
37
+ job_events: @db.create_collection('job_events', capped_collection_opts),
38
+ task_events: @db.create_collection('job_task_events', capped_collection_opts),
39
39
  }
40
40
  end
41
41
 
@@ -1,12 +1,12 @@
1
1
  require 'stringio'
2
2
 
3
- module Vayacondios
3
+ class Vayacondios
4
4
 
5
5
  module Hadoopable
6
6
 
7
7
  include Configurable
8
8
 
9
- #--------------------------------------------------------------------------------
9
+ #--------------------------------------------------------------------------------
10
10
  # Initialize jruby and tell it about hadoop.
11
11
  #--------------------------------------------------------------------------------
12
12
 
@@ -22,7 +22,7 @@ module Vayacondios
22
22
 
23
23
  $CLASSPATH << File.join(File.join(hadoop_home, 'conf') || ENV['HADOOP_CONF_DIR'],
24
24
  '') # add trailing slash
25
-
25
+
26
26
  Dir["#{hadoop_home}/{hadoop*.jar,lib/*.jar}"].each{|jar| require jar}
27
27
 
28
28
  include_class org.apache.hadoop.mapred.JobConf
@@ -7,7 +7,7 @@ require 'scanf'
7
7
  require 'json'
8
8
  require 'mongo'
9
9
 
10
- module Vayacondios
10
+ class Vayacondios
11
11
 
12
12
  class StatServer
13
13
 
@@ -60,7 +60,7 @@ module Vayacondios
60
60
 
61
61
  # main loop
62
62
  loop do
63
-
63
+
64
64
  logger.debug "In main event loop. Waiting to see if the cluster is busy."
65
65
 
66
66
  # Get up-to-date on the state of the cluster.
@@ -16,28 +16,28 @@ describe Vayacondios::Client::ItemSet do
16
16
  itemset = Vayacondios::Client::ItemSet.new("foohost", 9999, "fooorg", "footopic", "fooid")
17
17
  ary = ["foo", "bar", "baz"]
18
18
 
19
- # Actually testing internals here to avoid
19
+ # testing internals here to avoid shimming up HTTP libraries.
20
20
 
21
21
  it "generates a put request without a patch header when asked to create" do
22
- req = itemset._req :create, ary
22
+ req = itemset.instance_eval{_req(:create, ary)}
23
23
 
24
24
  req.method.should eql('PUT')
25
- req.body.should eql(ary.to_json)
25
+ req.body.should eql(MultiJson.encode(contents: ary))
26
26
  req.path.should eql('/v1/fooorg/itemset/footopic/fooid')
27
27
  req.each_header.to_a.should_not include(["x_method", "PATCH"])
28
28
  end
29
29
 
30
30
  it "generates a put request with a patch header when asked to update" do
31
- req = itemset._req :update, ary
31
+ req = itemset.instance_eval{_req(:update, ary)}
32
32
 
33
33
  req.method.should eql('PUT')
34
- req.body.should eql(ary.to_json)
34
+ req.body.should eql(MultiJson.encode(contents: ary))
35
35
  req.path.should eql('/v1/fooorg/itemset/footopic/fooid')
36
36
  req.each_header.to_a.should include(["x-method", "PATCH"])
37
37
  end
38
38
 
39
39
  it "generates a get request when asked to fetch" do
40
- req = itemset._req :fetch
40
+ req = itemset.instance_eval{_req(:fetch)}
41
41
 
42
42
  req.method.should eql('GET')
43
43
  req.body.should be_nil
@@ -45,10 +45,10 @@ describe Vayacondios::Client::ItemSet do
45
45
  end
46
46
 
47
47
  it "generates a delete request when asked to remove" do
48
- req = itemset._req :remove, ary
48
+ req = itemset.instance_eval{_req(:remove, ary)}
49
49
 
50
50
  req.method.should eql('DELETE')
51
- req.body.should eql(ary.to_json)
51
+ req.body.should eql(MultiJson.encode(contents: ary))
52
52
  req.path.should eql('/v1/fooorg/itemset/footopic/fooid')
53
53
  end
54
54
  end
@@ -157,7 +157,7 @@ describe HttpShim do
157
157
  with_api(HttpShim) do |api|
158
158
  get_request({:path => '/v1/infochimps/itemset/power/level'}, err) do |c|
159
159
  c.response_header.status.should == 200
160
- MultiJson.load(c.response).should eql(["foo", "bar"])
160
+ MultiJson.load(c.response).should eql({"contents" => ["foo", "bar"]})
161
161
  end
162
162
  end
163
163
  end
@@ -215,7 +215,7 @@ describe HttpShim do
215
215
  with_api(HttpShim) do |api|
216
216
  get_request({:path => '/v1/infochimps/itemset/merge/test'}, err) do |c|
217
217
  c.response_header.status.should == 200
218
- MultiJson.load(c.response).should eql(["foo", "bar"])
218
+ MultiJson.load(c.response).should eql({"contents" => ["foo", "bar"]})
219
219
  end
220
220
  end
221
221
  end
@@ -277,7 +277,7 @@ describe HttpShim do
277
277
  with_api(HttpShim) do |api|
278
278
  get_request({:path => '/v1/infochimps/itemset/power/level'}, err) do |c|
279
279
  c.response_header.status.should == 200
280
- MultiJson.load(c.response).should eql ["foo"]
280
+ MultiJson.load(c.response).should eql({"contents" => ["foo"]})
281
281
  end
282
282
  end
283
283
  end
@@ -304,7 +304,7 @@ describe HttpShim do
304
304
  with_api(HttpShim) do |api|
305
305
  get_request({:path => '/v1/infochimps/itemset/power/level'}, err) do |c|
306
306
  c.response_header.status.should == 200
307
- MultiJson.load(c.response).should eql []
307
+ MultiJson.load(c.response).should eql({"contents" => []})
308
308
  end
309
309
  end
310
310
  end
@@ -0,0 +1,26 @@
1
+ package com.infochimps.util;
2
+
3
+ import org.slf4j.Logger;
4
+ import org.slf4j.LoggerFactory;
5
+
6
+ public class CurrentClass extends SecurityManager {
7
+ private static CurrentClass SINGLETON = new CurrentClass();
8
+
9
+ // must call this directly. behavior is dependent on call stack
10
+ public static Class get() {
11
+ return SINGLETON.getCurrentClass();
12
+ }
13
+
14
+ // must call this directly. behavior is dependent on call stack
15
+ public static Logger getLogger() {
16
+ return LoggerFactory.getLogger(SINGLETON.getCurrentClass(2));
17
+ }
18
+
19
+ private Class getCurrentClass(int i) {
20
+ return getClassContext()[i];
21
+ }
22
+
23
+ private Class getCurrentClass() {
24
+ return getCurrentClass(3);
25
+ }
26
+ }
@@ -0,0 +1,38 @@
1
+ package com.infochimps.util;
2
+
3
+ import java.net.InetSocketAddress;
4
+
5
+ import javax.net.ssl.HostnameVerifier;
6
+ import javax.net.ssl.HttpsURLConnection;
7
+ import javax.net.ssl.SSLContext;
8
+ import javax.net.ssl.SSLSession;
9
+ import javax.net.ssl.TrustManager;
10
+ import javax.net.ssl.X509TrustManager;
11
+
12
+ import java.security.cert.X509Certificate;
13
+ import java.security.SecureRandom;
14
+ import java.security.GeneralSecurityException;
15
+
16
+ import java.net.Proxy;
17
+
18
+ public class DebugUtil {
19
+ public static Proxy useCharles() {
20
+ trustAllCerts();
21
+ return new Proxy(Proxy.Type.HTTP, new InetSocketAddress("127.0.0.1", 8888));
22
+ }
23
+
24
+ public static void trustAllCerts() {
25
+ try {
26
+ SSLContext sc = SSLContext.getInstance("SSL");
27
+ sc.init(null,
28
+ new TrustManager[] {
29
+ new X509TrustManager() {
30
+ public X509Certificate[] getAcceptedIssuers() { return null;}
31
+ public void checkClientTrusted(X509Certificate[] certs, String authType) {}
32
+ public void checkServerTrusted(X509Certificate[] certs, String authType) {}
33
+ }
34
+ }, new SecureRandom());
35
+ HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
36
+ } catch (GeneralSecurityException e) {}
37
+ }
38
+ }
@@ -0,0 +1,112 @@
1
+ package com.infochimps.util;
2
+
3
+ import java.io.BufferedReader;
4
+ import java.io.InputStream;
5
+ import java.io.InputStreamReader;
6
+ import java.io.IOException;
7
+ import java.net.HttpURLConnection;
8
+ import java.net.MalformedURLException;
9
+ import java.net.URL;
10
+ import java.net.URLDecoder;
11
+ import java.nio.charset.Charset;
12
+ import java.util.HashMap;
13
+ import java.util.zip.GZIPInputStream;
14
+
15
+ import org.apache.commons.codec.binary.Base64;
16
+
17
+ import org.slf4j.Logger;
18
+ import org.slf4j.LoggerFactory;
19
+
20
+ import static java.util.Map.Entry;
21
+
22
+ public class HttpHelper {
23
+ private static final Base64 BASE64 = new Base64();
24
+ private static final boolean USE_CHARLES = false;
25
+
26
+ // opens or returns a null reader
27
+ public static BufferedReader openOrNull(Logger log, String urlString, Charset inputCharset) {
28
+ try { return open(log, urlString, inputCharset); }
29
+ catch (IOException e) {
30
+ log.warn("Got an exception trying to open {}: {}", urlString, e);
31
+ return null;
32
+ }
33
+ }
34
+
35
+ public static BufferedReader open(Logger log,
36
+ String urlString,
37
+ Charset inputCharset) throws IOException {
38
+ HttpURLConnection con = getConnection(urlString, log);
39
+ return getReader(con, log, inputCharset);
40
+ }
41
+
42
+ public static BufferedReader open(Logger log,
43
+ String urlString,
44
+ HashMap<String,String> extraHeaders,
45
+ Charset inputCharset) throws IOException {
46
+
47
+ HttpURLConnection con = getConnection(urlString, log);
48
+ for (Entry<String,String> header : extraHeaders.entrySet())
49
+ con.setRequestProperty(header.getKey(), header.getValue());
50
+ return getReader(con, log, inputCharset);
51
+ }
52
+
53
+ private static HttpURLConnection getConnection(String urlString, Logger log) throws IOException {
54
+ URL url = null;
55
+ try { url = new URL(urlString); }
56
+ catch (MalformedURLException e) {
57
+ log.warn("malformed URL: {}", url);
58
+ throw new IOException(e);
59
+ }
60
+
61
+ HttpURLConnection con = (HttpURLConnection)(USE_CHARLES ?
62
+ url.openConnection(DebugUtil.useCharles()) :
63
+ url.openConnection());
64
+
65
+ String userInfo = url.getUserInfo();
66
+ if (userInfo != null) {
67
+ userInfo = URLDecoder.decode(userInfo, "US-ASCII");
68
+ con.setRequestProperty("Authorization", "Basic " + new String(BASE64.encodeBase64(userInfo.getBytes())));
69
+ }
70
+ con.setRequestProperty("Accept-Encoding", "gzip,deflate");
71
+ return con;
72
+ }
73
+
74
+ private static BufferedReader getReader(HttpURLConnection con,
75
+ Logger log,
76
+ Charset inputCharset) throws IOException {
77
+ InputStream in = null;
78
+
79
+ try { in = con.getInputStream(); }
80
+ catch (IOException e) {
81
+ // Some HTTP responses will raise an exception, but the
82
+ // useful information is in the error stream.
83
+
84
+ log.warn("Exception opening {}", con.getURL().toString());
85
+
86
+ InputStream errorStream = con.getErrorStream();
87
+ if (errorStream != null) {
88
+ BufferedReader r = new BufferedReader(new InputStreamReader(errorStream));
89
+ try { for (String line; (line = r.readLine()) != null; log.error(line)); }
90
+ catch (IOException nested_exc) {
91
+ log.error("Got an exception in the exception handler: {}", nested_exc);
92
+ throw e;
93
+ }
94
+ }
95
+ throw e;
96
+ }
97
+
98
+ String encoding = con.getContentEncoding();
99
+ log.debug("Got HTTP stream with content encoding type '" + encoding + "'");
100
+
101
+ if (encoding != null && encoding.equals("gzip")) in = new GZIPInputStream(in);
102
+
103
+ InputStreamReader istream_reader = new InputStreamReader(in, inputCharset);
104
+ BufferedReader reader = new BufferedReader(istream_reader);
105
+
106
+ log.debug("successfully opened connection to {} with character encoding {}",
107
+ con.getURL().toString(),
108
+ istream_reader.getEncoding());
109
+
110
+ return reader;
111
+ }
112
+ }