google_plus_archiver 0.0.1 → 0.0.2

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 7144b6b00ee7bdc554de12881d10de74c6630938
4
+ data.tar.gz: 10294ebca4f4512e0eaf10f49e08dd78dfd43f55
5
+ SHA512:
6
+ metadata.gz: 39de1a8368fe5e24026de17919b0d54befea6834248549a50e5436beafaa681be9b0a6a9ff8555e809ab27d1ab8724c27eab7487f44b9f988e349a407cc696b4
7
+ data.tar.gz: 0137ff5c4d3a6fdf96b0116160d0f0978b84acf3e06631ae7bc8662324c130edf7713728d5408b7817554b55b9f98457f756530cbdc184efd57e8ffdfa7b558b
@@ -15,22 +15,34 @@ OptionParser.new do |opts|
15
15
  options[:api_key] = api_key
16
16
  end
17
17
 
18
- opts.on("--user-id [USER_ID]" ,"Specify the ID of the user to be archived") do |user_id|
18
+ opts.on("--user-id [USER_ID]", "Specify the ID of the user to be archived") do |user_id|
19
19
  options[:user_id] = user_id
20
20
  end
21
21
 
22
- opts.on("--delay [SECONDS]" ,"Delay (in seconds) between two requests (0.2 by default, since Google set a 5 requests/second/user limit)") do |delay|
22
+ opts.on("--compress", "Do compression") do
23
+ options[:compress] = true
24
+ end
25
+
26
+ opts.on("--delay [SECONDS]", "Delay (in seconds) between two requests (0.2 by default, since Google set a 5 requests/second/user limit)") do |delay|
23
27
  options[:delay] = delay
24
28
  end
25
29
 
26
- opts.on("--output-path [OUTPUT_PATH]" ,"Output path (the current directory by default)") do |output_path|
30
+ opts.on("--output-path [OUTPUT_PATH]", "Output path (the current directory by default)") do |output_path|
27
31
  options[:output_path] = output_path
28
32
  end
29
33
 
34
+ opts.on("--post-limit [POST_LIMIT]", "Maximum number of posts to archive (in time descending order)") do |post_limit|
35
+ options[:post_limit] = post_limit
36
+ end
37
+
30
38
  opts.on("--quiet", "Silent mode") do
31
39
  options[:quiet] = true
32
40
  end
33
41
 
42
+ opts.on("--video-downloader [VIDEO_DOWNLOADER]", "Command used to download Google+ videos (`you-get` by default)") do |video_downloader|
43
+ options[:video_downloader] = video_downloader
44
+ end
45
+
34
46
  opts.on("--exclude-posts", "Don't archive posts") do
35
47
  options[:exclude_posts] = true
36
48
  end
@@ -51,19 +63,24 @@ OptionParser.new do |opts|
51
63
  options[:exclude_resharers] = true
52
64
  end
53
65
 
54
- opts.on("--version", "Display current version") do
66
+ opts.on("-V", "--version", "Display current version") do
55
67
  puts "google_plus_archiver #{GooglePlusArchiver::VERSION}"
56
68
  exit 0
57
69
  end
58
70
 
59
71
  end.parse!
60
72
 
73
+ options[:api_key] = ENV['GOOGLE_API_KEY'] if not options[:api_key]
74
+
61
75
  if not options[:api_key] or not options[:user_id]
62
76
  puts "You must specify both the user ID (-u) and your Google API key (-a)."
63
77
  exit 0
64
78
  end
65
79
 
66
80
  GooglePlusArchiver::register_client(options[:api_key])
67
- if GooglePlusArchiver::client_registered?
68
- GooglePlusArchiver::archive_user(options)
81
+ if not GooglePlusArchiver::client_registered?
82
+ puts "Client registration failed."
83
+ exit 0
69
84
  end
85
+
86
+ GooglePlusArchiver::archive_user(options)
@@ -0,0 +1,64 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ $LOAD_PATH.unshift File.dirname(__FILE__) + "/../lib"
4
+
5
+ require 'google_plus_archiver'
6
+ require 'google_plus_archiver/version.rb'
7
+
8
+ require 'optparse'
9
+ require 'net/http'
10
+
11
+ options = {}
12
+ OptionParser.new do |opts|
13
+ opts.banner = "Usage: gplus-image-get -a [API_KEY] [POST_URL]"
14
+
15
+ opts.on("--api-key [API_KEY]", "Specify the Google API key") do |api_key|
16
+ options[:api_key] = api_key
17
+ end
18
+
19
+ opts.on("--output-path [OUTPUT_PATH]", "Output path (the current directory by default)") do |output_path|
20
+ options[:output_path] = output_path
21
+ end
22
+
23
+ opts.on("-x", "--proxy [PROXY]", "Use proxy on given port") do |proxy|
24
+ options[:proxy] = proxy
25
+ end
26
+
27
+ opts.on("-V", "--version", "Display current version") do
28
+ puts "google_plus_archiver #{GooglePlusArchiver::VERSION}"
29
+ exit 0
30
+ end
31
+
32
+ end.parse!
33
+
34
+ options[:api_key] = ENV['GOOGLE_API_KEY'] if not options[:api_key]
35
+
36
+ if not options[:api_key] or ARGV.empty?
37
+ puts "You must specify both the post URL and your Google API key (-a)."
38
+ exit 0
39
+ end
40
+
41
+ proxy_addr, proxy_port = options[:proxy].split(':') if options[:proxy]
42
+
43
+ GooglePlusArchiver::register_client(options[:api_key])
44
+ if not GooglePlusArchiver::client_registered?
45
+ puts "Client registration failed."
46
+ exit 0
47
+ end
48
+
49
+ ARGV.each do |url|
50
+ uri = URI.parse(URI.escape("#{url}"))
51
+ if options[:proxy]
52
+ http = Net::HTTP.new(uri.host, uri.port, proxy_addr, proxy_port)
53
+ else
54
+ http = Net::HTTP.new(uri.host, uri.port)
55
+ end
56
+ if http.port == 443
57
+ http.use_ssl = true
58
+ http.verify_mode = OpenSSL::SSL::VERIFY_NONE
59
+ end
60
+ data = http.get(uri.request_uri)
61
+
62
+ activity_id = data.body.match(/<div id="update-([^"]+)"/)[1]
63
+ GooglePlusArchiver::fetch_post_image(:activity_id => activity_id, :output_path => options[:output_path])
64
+ end
@@ -24,7 +24,7 @@ module GooglePlusArchiver
24
24
  end
25
25
 
26
26
  def self.register_client(api_key)
27
- @@client = Google::APIClient.new
27
+ @@client = Google::APIClient.new(:application_name => 'GooglePlusArchiver', :application_version => VERSION)
28
28
  @@api_key = @@client.key = api_key
29
29
  @@request_num = 0
30
30
  begin
@@ -38,6 +38,20 @@ module GooglePlusArchiver
38
38
  defined? @@plus
39
39
  end
40
40
 
41
+ def self.get_full_image_url(url)
42
+ if url =~ /https:\/\/\w+\.googleusercontent\.com/
43
+ if url =~ /\/s\d+\/[^\/]+$/ or url =~ /\/w\d+-h\d+\/[^\/]+$/ or url =~ /\/w\d+-h\d+-\w+\/[^\/]+$/
44
+ url[0..url[0..(url.rindex('/') - 1)].rindex('/')] + 's0-d' + url[url.rindex('/')..-1]
45
+ elsif url =~ /\/photo.jpg$/ and not url =~ /\/s0-d\/[^\/]+$/
46
+ url[0..url.rindex('/')] + 's0-d' + url[url.rindex('/')..-1]
47
+ else
48
+ url
49
+ end
50
+ else
51
+ url
52
+ end
53
+ end
54
+
41
55
  def self.archive_user(params)
42
56
  begin
43
57
  raise "Unregistered client." unless client_registered?
@@ -46,29 +60,41 @@ module GooglePlusArchiver
46
60
  return
47
61
  end
48
62
 
49
- user_id, delay, output_path, quiet =
63
+ user_id, compress, delay, output_path, post_limit, quiet, video_downloader =
50
64
  (params[:user_id]),
65
+ (params[:compress]),
51
66
  (params[:delay] or 0.2),
52
67
  (params[:output_path] or FileUtils.pwd),
53
- (params[:quiet])
68
+ (params[:post_limit]),
69
+ (params[:quiet]),
70
+ (params[:video_downloader] or 'you-get')
54
71
 
55
72
  Dir.mktmpdir do |tmp_dir|
56
-
57
73
  begin
74
+ response = nil
58
75
 
59
76
  #>> profile
60
77
  puts "##{@@request_num+=1} Fetching people.get ..." unless quiet
61
- response = @@client.execute(
62
- :api_method => @@plus.people.get,
63
- :parameters => {
64
- 'collection' => 'public',
65
- 'userId' => user_id
66
- },
67
- :authenticated => false
68
- )
78
+ loop do
79
+ begin
80
+ response = @@client.execute(
81
+ :api_method => @@plus.people.get,
82
+ :parameters => {
83
+ 'collection' => 'public',
84
+ 'userId' => user_id
85
+ },
86
+ :authenticated => false
87
+ )
88
+ rescue
89
+ puts "##{@@request_num} Retrying people.get ..." unless quiet
90
+ next
91
+ else
92
+ break
93
+ end
94
+ end
69
95
 
70
96
  #<< profile
71
- File.open("#{tmp_dir}/profile.json", "w") do |f|
97
+ File.open("#{File.join(tmp_dir, 'profile.json')}", "w") do |f|
72
98
  f.puts response.body
73
99
  end
74
100
 
@@ -78,23 +104,41 @@ module GooglePlusArchiver
78
104
  if not params[:exclude_posts]
79
105
  next_page_token = nil
80
106
  page_num = 0
107
+ posts_left = post_limit.to_i
108
+
81
109
  loop do
82
110
  puts "##{@@request_num+=1} Fetching activities.list: page[#{page_num}] ..." unless quiet
83
- response = @@client.execute(
84
- :api_method => @@plus.activities.list,
85
- :parameters => {
86
- 'collection' => 'public',
87
- 'userId' => user_id,
88
- 'maxResults' => '100',
89
- 'pageToken' => next_page_token
90
- },
91
- :authenticated => false
92
- )
111
+ if post_limit
112
+ maxResults = (posts_left > 100) ? 100 : posts_left
113
+ posts_left -= maxResults
114
+ else
115
+ maxResults = 100
116
+ end
117
+ loop do
118
+ begin
119
+ response = @@client.execute(
120
+ :api_method => @@plus.activities.list,
121
+ :parameters => {
122
+ 'collection' => 'public',
123
+ 'userId' => user_id,
124
+ 'maxResults' => maxResults.to_s,
125
+ 'pageToken' => next_page_token
126
+ },
127
+ :authenticated => false
128
+ )
129
+ rescue
130
+ puts "##{@@request_num} Retrying activities.list: page[#{page_num}] ..." unless quiet
131
+ next
132
+ else
133
+ break
134
+ end
135
+ end
136
+
93
137
  activities = JSON.parse(response.body)
94
138
  next_page_token = activities['nextPageToken']
95
139
 
96
140
  #<< posts
97
- File.open("#{tmp_dir}/posts[#{page_num}].json", "w") do |f|
141
+ File.open("#{File.join(tmp_dir, 'posts')}[#{page_num}].json", "w") do |f|
98
142
  f.puts response.body
99
143
  end
100
144
 
@@ -104,37 +148,20 @@ module GooglePlusArchiver
104
148
  puts "##{@@request_num} Fetching activities.get: #{activity_id}" unless quiet
105
149
 
106
150
  #<< post
107
- File.open("#{tmp_dir}/#{activity_id}.json", "w") do |f|
151
+ File.open("#{File.join(tmp_dir, activity_id)}.json", "w") do |f|
108
152
  f.puts item.to_json
109
153
  end
110
154
 
111
155
  #>> attachments
112
156
  if not params[:exclude_attachments] and item['object']['attachments']
113
157
  item['object']['attachments'].each do |attachment|
114
- image = (attachment['fullImage'] or attachment['image'])
115
- if image
116
- puts "##{@@request_num} Fetching attachment: #{image['url']} ..." unless quiet
117
- uri = URI.parse(URI.escape("#{image['url']}"))
118
- http = Net::HTTP.new(uri.host, uri.port)
119
- if http.port == 443
120
- http.use_ssl = true
121
- http.verify_mode = OpenSSL::SSL::VERIFY_NONE
122
- end
123
- data = http.get(uri.request_uri)
124
- image_ext = uri.request_uri.split("/")[-1].split(".")[-1]
125
- image_ext = nil if image_ext.length > 4
126
-
127
- #<< attachment
128
- File.open("#{tmp_dir}/#{activity_id}_#{attachment['id']}#{image_ext ? ".#{image_ext}" : ""}", "w").puts data.body
129
- end
130
-
131
- thumbnails = attachment['thumbnails']
132
- if thumbnails
133
- thumbnails.each_index do |index|
134
- thumbnail = thumbnails[index]
135
- image = thumbnail['image']
136
- puts "##{@@request_num} Fetching attachment(thumbnail): #{image['url']} ..." unless quiet
137
- uri = URI.parse(URI.escape("#{image['url']}"))
158
+ if attachment['objectType'] == 'photo'
159
+ # Download full-size image
160
+ begin
161
+ image = attachment['fullImage']
162
+ image_url = get_full_image_url(image['url'])
163
+ puts "##{@@request_num} Fetching attachment: #{image_url} ..." unless quiet
164
+ uri = URI.parse(URI.escape("#{image_url}"))
138
165
  http = Net::HTTP.new(uri.host, uri.port)
139
166
  if http.port == 443
140
167
  http.use_ssl = true
@@ -145,8 +172,81 @@ module GooglePlusArchiver
145
172
  image_ext = nil if image_ext.length > 4
146
173
 
147
174
  #<< attachment
148
- File.open("#{tmp_dir}/#{activity_id}_#{attachment['id']}_#{index.to_s}#{image_ext ? ".#{image_ext}" : ""}", "w").puts data.body
175
+ File.open("#{File.join(tmp_dir, activity_id)}_#{attachment['id']}#{image_ext ? ".#{image_ext}" : ""}", "w").puts data.body
176
+ rescue
177
+ image = attachment['image']
178
+ image_url = get_full_image_url(image['url'])
179
+ puts "##{@@request_num} Fetching attachment: #{image_url} ..." unless quiet
180
+ uri = URI.parse(URI.escape("#{image_url}"))
181
+ http = Net::HTTP.new(uri.host, uri.port)
182
+ if http.port == 443
183
+ http.use_ssl = true
184
+ http.verify_mode = OpenSSL::SSL::VERIFY_NONE
185
+ end
186
+ data = http.get(uri.request_uri)
187
+ image_ext = uri.request_uri.split("/")[-1].split(".")[-1]
188
+ image_ext = nil if image_ext.length > 4
189
+
190
+ #<< attachment
191
+ File.open("#{File.join(tmp_dir, activity_id)}_#{attachment['id']}#{image_ext ? ".#{image_ext}" : ""}", "w").puts data.body
192
+ end
193
+
194
+ elsif attachment['objectType'] == 'album'
195
+ # Download full-size thumbnails
196
+ thumbnails = attachment['thumbnails']
197
+ if thumbnails
198
+ thumbnails.each_index do |index|
199
+ thumbnail = thumbnails[index]
200
+ image = thumbnail['image']
201
+ image_url = get_full_image_url(image['url'])
202
+ puts "##{@@request_num} Fetching attachment: #{image_url} ..." unless quiet
203
+ uri = URI.parse(URI.escape("#{image_url}"))
204
+ http = Net::HTTP.new(uri.host, uri.port)
205
+ if http.port == 443
206
+ http.use_ssl = true
207
+ http.verify_mode = OpenSSL::SSL::VERIFY_NONE
208
+ end
209
+ data = http.get(uri.request_uri)
210
+ image_ext = uri.request_uri.split("/")[-1].split(".")[-1]
211
+ image_ext = nil if image_ext.length > 4
212
+
213
+ #<< attachment
214
+ File.open("#{File.join(tmp_dir, activity_id)}_#{attachment['id']}[#{index}]#{image_ext ? ".#{image_ext}" : ""}", "w").puts data.body
215
+ end
216
+ end
217
+
218
+ elsif attachment['objectType'] == 'video'
219
+ # Download preview image
220
+ image = attachment['image']
221
+ image_url = get_full_image_url(image['url'])
222
+ puts "##{@@request_num} Fetching attachment: #{image_url} ..." unless quiet
223
+ uri = URI.parse(URI.escape("#{image_url}"))
224
+ http = Net::HTTP.new(uri.host, uri.port)
225
+ if http.port == 443
226
+ http.use_ssl = true
227
+ http.verify_mode = OpenSSL::SSL::VERIFY_NONE
149
228
  end
229
+ data = http.get(uri.request_uri)
230
+ image_ext = 'gif'
231
+
232
+ extname = data.header['Content-Type'].split('/')[-1]
233
+
234
+ #<< attachment
235
+ File.open("#{File.join(tmp_dir, activity_id)}_#{attachment['id']}.#{extname}", "w").puts data.body
236
+
237
+ # Download video
238
+ puts "##{@@request_num} Downloading video: #{attachment['url']} ..." unless quiet
239
+ FileUtils.mkdir("#{File.join(tmp_dir, 'video')}")
240
+ Dir.chdir("#{File.join(tmp_dir, 'video')}") do
241
+ if system("#{video_downloader} #{attachment['url']}")
242
+ Dir.glob("*").each do |video|
243
+ FileUtils.mv(video, "#{File.join(tmp_dir, activity_id)}_#{attachment['id']}_#{attachment['displayName'].split('/').join}.#{video.split('.')[-1]}")
244
+ end
245
+ else
246
+ puts "##{@@request_num} Video downloader failed. Download aborted."
247
+ end
248
+ end
249
+ FileUtils.rm_r("#{File.join(tmp_dir, 'video')}")
150
250
  end
151
251
  end
152
252
  end
@@ -157,19 +257,29 @@ module GooglePlusArchiver
157
257
  replies_page_num = 0
158
258
  loop do
159
259
  puts "##{@@request_num+=1} Fetching comments.list: page[#{replies_page_num}] ..." unless quiet
160
- response = @@client.execute(
161
- :api_method => @@plus.comments.list,
162
- :parameters => {
163
- 'activityId' => activity_id,
164
- 'maxResults' => '500',
165
- 'pageToken' => replies_next_page_token
166
- },
167
- :authenticated => false
168
- )
260
+ loop do
261
+ begin
262
+ response = @@client.execute(
263
+ :api_method => @@plus.comments.list,
264
+ :parameters => {
265
+ 'activityId' => activity_id,
266
+ 'maxResults' => '500',
267
+ 'pageToken' => replies_next_page_token
268
+ },
269
+ :authenticated => false
270
+ )
271
+ rescue
272
+ puts "##{@@request_num} Retrying comments.list: page[#{replies_page_num}] ..." unless quiet
273
+ next
274
+ else
275
+ break
276
+ end
277
+ end
278
+
169
279
  replies_next_page_token = JSON.parse(response.body)['nextPageToken']
170
280
 
171
281
  #<< replies
172
- File.open("#{tmp_dir}/#{activity_id}_replies#{replies_page_num == 0 && !replies_next_page_token ? "" : "[#{replies_page_num}]"}.json", "w") do |f|
282
+ File.open("#{File.join(tmp_dir, activity_id)}_replies#{replies_page_num == 0 && !replies_next_page_token ? "" : "[#{replies_page_num}]"}.json", "w") do |f|
173
283
  f.puts response.body
174
284
  end
175
285
 
@@ -185,20 +295,30 @@ module GooglePlusArchiver
185
295
  plusoners_page_num = 0
186
296
  loop do
187
297
  puts "##{@@request_num+=1} Fetching people.listByActivity(plusoners): page[#{plusoners_page_num}] ..." unless quiet
188
- response = @@client.execute(
189
- :api_method => @@plus.people.list_by_activity,
190
- :parameters => {
191
- 'activityId' => activity_id,
192
- 'collection' => 'plusoners',
193
- 'maxResults' => '100',
194
- 'pageToken' => plusoners_next_page_token
195
- },
196
- :authenticated => false
197
- )
298
+ loop do
299
+ begin
300
+ response = @@client.execute(
301
+ :api_method => @@plus.people.list_by_activity,
302
+ :parameters => {
303
+ 'activityId' => activity_id,
304
+ 'collection' => 'plusoners',
305
+ 'maxResults' => '100',
306
+ 'pageToken' => plusoners_next_page_token
307
+ },
308
+ :authenticated => false
309
+ )
310
+ rescue
311
+ puts "##{@@request_num} Retrying people.listByActivity(plusoners): page[#{plusoners_page_num}] ..." unless quiet
312
+ next
313
+ else
314
+ break
315
+ end
316
+ end
317
+
198
318
  plusoners_next_page_token = JSON.parse(response.body)['nextPageToken']
199
319
 
200
320
  #<< plusoners
201
- File.open("#{tmp_dir}/#{activity_id}_plusoners#{plusoners_page_num == 0 && !plusoners_next_page_token ? "" : "[#{plusoners_page_num}]"}.json", "w") do |f|
321
+ File.open("#{File.join(tmp_dir, activity_id)}_plusoners#{plusoners_page_num == 0 && !plusoners_next_page_token ? "" : "[#{plusoners_page_num}]"}.json", "w") do |f|
202
322
  f.puts response.body
203
323
  end
204
324
 
@@ -214,20 +334,30 @@ module GooglePlusArchiver
214
334
  resharers_page_num = 0
215
335
  loop do
216
336
  puts "##{@@request_num+=1} Fetching people.listByActivity(resharers): page[#{resharers_page_num}] ..." unless quiet
217
- response = @@client.execute(
218
- :api_method => @@plus.people.list_by_activity,
219
- :parameters => {
220
- 'activityId' => activity_id,
221
- 'collection' => 'resharers',
222
- 'maxResults' => '100',
223
- 'pageToken' => resharers_next_page_token
224
- },
225
- :authenticated => false
226
- )
337
+ loop do
338
+ begin
339
+ response = @@client.execute(
340
+ :api_method => @@plus.people.list_by_activity,
341
+ :parameters => {
342
+ 'activityId' => activity_id,
343
+ 'collection' => 'resharers',
344
+ 'maxResults' => '100',
345
+ 'pageToken' => resharers_next_page_token
346
+ },
347
+ :authenticated => false
348
+ )
349
+ rescue
350
+ puts "##{@@request_num} Retrying people.listByActivity(resharers): page[#{resharers_page_num}] ..." unless quiet
351
+ next
352
+ else
353
+ break
354
+ end
355
+ end
356
+
227
357
  resharers_next_page_token = JSON.parse(response.body)['nextPageToken']
228
358
 
229
359
  #<< resharers
230
- File.open("#{tmp_dir}/#{activity_id}_resharers#{replies_page_num == 0 && !resharers_next_page_token ? "" : "[#{resharers_page_num}]"}.json", "w") do |f|
360
+ File.open("#{File.join(tmp_dir, activity_id)}_resharers#{replies_page_num == 0 && !resharers_next_page_token ? "" : "[#{resharers_page_num}]"}.json", "w") do |f|
231
361
  f.puts response.body
232
362
  end
233
363
 
@@ -239,6 +369,8 @@ module GooglePlusArchiver
239
369
 
240
370
  end
241
371
 
372
+ break if post_limit and posts_left <= 0
373
+
242
374
  break unless next_page_token
243
375
  page_num += 1
244
376
  sleep delay
@@ -251,32 +383,113 @@ module GooglePlusArchiver
251
383
  puts "Archiving interrupted due to unexpected errors."
252
384
 
253
385
  ensure
254
- # Archive all the files
255
386
  archive_time = "#{Time.now.to_s[0..9]}-#{Time.now.to_s[11..-7]}#{Time.now.to_s[-5..-1]}"
256
- archive_filename = "#{output_path}/#{user_display_name}_#{archive_time}.tar.gz"
257
- FileUtils.cd(tmp_dir) do
258
-
259
- Tempfile.open("#{user_id}") do |tar|
260
- files = []
261
- Find.find("./") do |path|
262
- files << File.basename(path) unless File.basename(path) == '.'
387
+ archive_dest = "#{File.join(output_path, user_display_name)}_#{archive_time}"
388
+
389
+ FileUtils.mkdir_p(archive_dest)
390
+ FileUtils.cp_r("#{File.join(tmp_dir, '.')}", archive_dest)
391
+
392
+ if compress
393
+ begin
394
+ archive_filename = "#{File.join(output_path, user_display_name)}_#{archive_time}.tar.gz"
395
+ FileUtils.cd(archive_dest) do
396
+ Tempfile.open("#{user_id}") do |tar|
397
+ files = []
398
+ Find.find("./") do |path|
399
+ files << File.basename(path) unless File.basename(path) == '.'
400
+ end
401
+ Minitar.pack(files, tar)
402
+
403
+ Zlib::GzipWriter.open(archive_filename) do |gz|
404
+ gz.mtime = File.mtime(tar.path)
405
+ gz.orig_name = tar.path
406
+ gz.write IO.binread(tar.path)
407
+ end
408
+ end
263
409
  end
264
- Minitar.pack(files, tar)
265
410
 
266
- Zlib::GzipWriter.open(archive_filename) do |gz|
267
- gz.mtime = File.mtime(tar.path)
268
- gz.orig_name = tar.path
269
- gz.write IO.binread(tar.path)
411
+ FileUtils.rm_r(archive_dest)
412
+ rescue Exception => e
413
+ puts e.message
414
+ puts "Compression failed."
415
+ end
416
+ end
417
+ end
418
+ end
419
+ end
420
+
421
+ def self.fetch_post_image(params)
422
+ begin
423
+ raise "Unregistered client." unless client_registered?
424
+ rescue => e
425
+ puts e.message
426
+ return
427
+ end
428
+
429
+ activity_id, output_path =
430
+ (params[:activity_id]),
431
+ (params[:output_path] or FileUtils.pwd)
432
+
433
+ response = @@client.execute(
434
+ :api_method => @@plus.activities.get,
435
+ :parameters => {
436
+ 'activityId' => activity_id,
437
+ 'fields' => 'object/attachments'
438
+ },
439
+ :authenticated => false
440
+ )
441
+
442
+ attachments = JSON.parse(response.body)['object']['attachments']
443
+ attachments.each do |attachment|
444
+ if attachment['objectType'] == 'photo'
445
+ image = attachment['fullImage']
446
+ image_url = get_full_image_url(image['url'])
447
+ puts "Downloading image: #{image_url} ..."
448
+ uri = URI.parse(URI.escape("#{image_url}"))
449
+ http = Net::HTTP.new(uri.host, uri.port)
450
+ if http.port == 443
451
+ http.use_ssl = true
452
+ http.verify_mode = OpenSSL::SSL::VERIFY_NONE
453
+ end
454
+ data = http.get(uri.request_uri)
455
+
456
+ m = data.header['Content-Disposition'].match(/filename="([^"]+)"/)
457
+ if m
458
+ extname = m[1]
459
+ else
460
+ extname = data.header['Content-Type'].split('/')[-1]
461
+ end
462
+
463
+ File.open("#{File.join(output_path, activity_id)}.#{extname}", "w").puts data.body
464
+
465
+ elsif attachment['objectType'] == 'album'
466
+ thumbnails = attachment['thumbnails']
467
+ if thumbnails
468
+ thumbnails.each_index do |index|
469
+ thumbnail = thumbnails[index]
470
+ image = thumbnail['image']
471
+ image_url = get_full_image_url(image['url'])
472
+ puts "Downloading image: #{image_url} ..."
473
+ uri = URI.parse(URI.escape("#{image_url}"))
474
+ http = Net::HTTP.new(uri.host, uri.port)
475
+ if http.port == 443
476
+ http.use_ssl = true
477
+ http.verify_mode = OpenSSL::SSL::VERIFY_NONE
478
+ end
479
+ data = http.get(uri.request_uri)
480
+
481
+ m = data.header['Content-Disposition'].match(/filename="([^"]+)"/)
482
+ if m
483
+ extname = m[1]
484
+ else
485
+ extname = data.header['Content-Type'].split('/')[-1]
270
486
  end
271
487
 
488
+ File.open("#{File.join(output_path, activity_id)}[#{index}].#{extname}", "w").puts data.body
272
489
  end
273
-
274
490
  end
275
-
276
491
  end
277
-
278
492
  end
279
-
280
493
  end
281
494
 
282
495
  end
@@ -1,4 +1,4 @@
1
1
  module GooglePlusArchiver
2
- VERSION = "0.0.1"
3
- DATE = "2012-12-18"
2
+ VERSION = "0.0.2"
3
+ DATE = "2013-07-10"
4
4
  end
metadata CHANGED
@@ -1,20 +1,18 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google_plus_archiver
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.1
5
- prerelease:
4
+ version: 0.0.2
6
5
  platform: ruby
7
6
  authors:
8
7
  - Mort Yao
9
8
  autorequire:
10
9
  bindir: bin
11
10
  cert_chain: []
12
- date: 2012-12-18 00:00:00.000000000 Z
11
+ date: 2013-07-10 00:00:00.000000000 Z
13
12
  dependencies:
14
13
  - !ruby/object:Gem::Dependency
15
14
  name: google-api-client
16
15
  requirement: !ruby/object:Gem::Requirement
17
- none: false
18
16
  requirements:
19
17
  - - ~>
20
18
  - !ruby/object:Gem::Version
@@ -22,7 +20,6 @@ dependencies:
22
20
  type: :runtime
23
21
  prerelease: false
24
22
  version_requirements: !ruby/object:Gem::Requirement
25
- none: false
26
23
  requirements:
27
24
  - - ~>
28
25
  - !ruby/object:Gem::Version
@@ -30,7 +27,6 @@ dependencies:
30
27
  - !ruby/object:Gem::Dependency
31
28
  name: archive-tar-minitar
32
29
  requirement: !ruby/object:Gem::Requirement
33
- none: false
34
30
  requirements:
35
31
  - - ~>
36
32
  - !ruby/object:Gem::Version
@@ -38,7 +34,6 @@ dependencies:
38
34
  type: :runtime
39
35
  prerelease: false
40
36
  version_requirements: !ruby/object:Gem::Requirement
41
- none: false
42
37
  requirements:
43
38
  - - ~>
44
39
  - !ruby/object:Gem::Version
@@ -48,35 +43,36 @@ description: google_plus_archiver is a simple command-line tool to archive Googl
48
43
  email: mort.yao@gmail.com
49
44
  executables:
50
45
  - gplus-get
46
+ - gplus-image-get
51
47
  extensions: []
52
48
  extra_rdoc_files: []
53
49
  files:
54
50
  - bin/gplus-get
55
51
  - lib/google_plus_archiver.rb
56
52
  - lib/google_plus_archiver/version.rb
53
+ - bin/gplus-image-get
57
54
  homepage: https://github.com/soimort/google_plus_archiver
58
55
  licenses:
59
56
  - MIT
57
+ metadata: {}
60
58
  post_install_message:
61
59
  rdoc_options: []
62
60
  require_paths:
63
61
  - lib
64
62
  required_ruby_version: !ruby/object:Gem::Requirement
65
- none: false
66
63
  requirements:
67
- - - ! '>='
64
+ - - '>='
68
65
  - !ruby/object:Gem::Version
69
66
  version: '0'
70
67
  required_rubygems_version: !ruby/object:Gem::Requirement
71
- none: false
72
68
  requirements:
73
- - - ! '>='
69
+ - - '>='
74
70
  - !ruby/object:Gem::Version
75
71
  version: '0'
76
72
  requirements: []
77
73
  rubyforge_project:
78
- rubygems_version: 1.8.24
74
+ rubygems_version: 2.0.2
79
75
  signing_key:
80
- specification_version: 3
76
+ specification_version: 4
81
77
  summary: A simple command-line tool to archive Google+ profiles.
82
78
  test_files: []