viddl-rb 0.68 → 0.70

Sign up to get free protection for your applications and to get access to all the features.
@@ -11,7 +11,7 @@ class Metacafe < PluginBase
11
11
  url.include?("metacafe.com")
12
12
  end
13
13
 
14
- def self.get_urls_and_filenames(url)
14
+ def self.get_urls_and_filenames(url, options = {})
15
15
  video_id = get_video_id(url)
16
16
  info_url = API_BASE + "item/#{video_id}" #use the API to get the full video url
17
17
  info_doc = Nokogiri::XML(open(info_url))
@@ -25,16 +25,14 @@ class Metacafe < PluginBase
25
25
  file_info = get_file_info(redirect_url, video_id)
26
26
  key_string = get_file_key(redirect_url)
27
27
  file_url_with_key = file_info[:file_url] + "?__gda__=#{key_string}"
28
- escaped_url = CGI::escape(file_url_with_key)
29
-
30
- [{:url => escaped_url, :name => get_video_name(video_swf_url) + file_info[:extension]}]
28
+
29
+ [{:url => file_url_with_key, :name => get_video_name(video_swf_url) + file_info[:extension]}]
31
30
  end
32
31
 
33
32
  def self.get_video_id(url)
34
33
  id = url[/watch\/(\d+)/, 1]
35
34
  unless id
36
- puts "ERROR: Can only download videos that has the ID in the URL."
37
- exit
35
+ raise CouldNotDownloadVideoError, "Can only download videos that has the ID in the URL."
38
36
  end
39
37
  id
40
38
  end
@@ -1,3 +1,4 @@
1
+ require 'rest_client'
1
2
  class Soundcloud < PluginBase
2
3
  # this will be called by the main app to check whether this plugin is responsible for the url passed
3
4
  def self.matches_provider?(url)
@@ -5,8 +6,8 @@ class Soundcloud < PluginBase
5
6
  end
6
7
 
7
8
  # return the url for original video file and title
8
- def self.get_urls_and_filenames(url)
9
- doc = Nokogiri::XML(open(url))
9
+ def self.get_urls_and_filenames(url, options = {})
10
+ doc = Nokogiri::HTML(RestClient.get(url).body)
10
11
  download_filename = doc.at("#main-content-inner img[class=waveform]").attributes["src"].value.to_s.match(/\.com\/(.+)\_/)[1]
11
12
  download_url = "http://media.soundcloud.com/stream/#{download_filename}"
12
13
  file_name = transliterate("#{doc.at('//h1/em').text.chomp}") + ".mp3"
@@ -1,45 +1,45 @@
1
1
  class Veoh < PluginBase
2
- VEOH_API_BASE = "http://www.veoh.com/api/"
3
- PREFERRED_FORMATS = [:mp4, :flash] # mp4 is preferred because it enables downloading full videos and not just previews
4
-
5
- #this will be called by the main app to check whether this plugin is responsible for the url passed
6
- def self.matches_provider?(url)
7
- url.include?("veoh.com")
8
- end
2
+ VEOH_API_BASE = "http://www.veoh.com/api/"
3
+ PREFERRED_FORMATS = [:mp4, :flash] # mp4 is preferred because it enables downloading full videos and not just previews
4
+
5
+ #this will be called by the main app to check whether this plugin is responsible for the url passed
6
+ def self.matches_provider?(url)
7
+ url.include?("veoh.com")
8
+ end
9
9
 
10
- def self.get_urls_and_filenames(url)
11
- veoh_id = url[/\/watch\/([\w\d]+)/, 1]
12
- info_url = "#{VEOH_API_BASE}findByPermalink?permalink=#{veoh_id}"
13
- info_doc = Nokogiri::XML(open(info_url))
10
+ def self.get_urls_and_filenames(url, options = {})
11
+ veoh_id = url[/\/watch\/([\w\d]+)/, 1]
12
+ info_url = "#{VEOH_API_BASE}findByPermalink?permalink=#{veoh_id}"
13
+ info_doc = Nokogiri::XML(open(info_url))
14
14
 
15
- download_url = get_download_url(info_doc)
16
- file_name = get_file_name(info_doc, download_url)
15
+ download_url = get_download_url(info_doc)
16
+ file_name = get_file_name(info_doc, download_url)
17
17
 
18
- [{:url => download_url, :name => file_name}]
19
- end
20
-
21
- #returns the first valid download url string, in order of the prefered formats, that is found for the video
22
- def self.get_download_url(info_doc)
23
- PREFERRED_FORMATS.each do |format|
24
- a = get_attribute(format)
25
- download_attr = info_doc.xpath('//rsp/videoList/video').first.attributes[a]
26
- return(download_attr.content) unless download_attr.nil? || download_attr.content.empty?
27
- end
28
- end
29
-
30
- #the file name string is a combination of the video name and the extension
31
- def self.get_file_name(info_doc, download_url)
32
- name = info_doc.xpath('//rsp/videoList/video').first.attributes['title'].content
33
- extension = download_url[/\/[\w\d]+(\.[\w\d]+)\?ct/, 1]
34
- PluginBase.make_filename_safe(name) + extension
35
- end
18
+ [{:url => download_url, :name => file_name}]
19
+ end
20
+
21
+ #returns the first valid download url string, in order of the prefered formats, that is found for the video
22
+ def self.get_download_url(info_doc)
23
+ PREFERRED_FORMATS.each do |format|
24
+ a = get_attribute(format)
25
+ download_attr = info_doc.xpath('//rsp/videoList/video').first.attributes[a]
26
+ return(download_attr.content) unless download_attr.nil? || download_attr.content.empty?
27
+ end
28
+ end
29
+
30
+ #the file name string is a combination of the video name and the extension
31
+ def self.get_file_name(info_doc, download_url)
32
+ name = info_doc.xpath('//rsp/videoList/video').first.attributes['title'].content
33
+ extension = download_url[/\/[\w\d]+(\.[\w\d]+)\?ct/, 1]
34
+ PluginBase.make_filename_safe(name) + extension
35
+ end
36
36
 
37
- def self.get_attribute(format)
38
- case format
39
- when :mp4
40
- "ipodUrl"
41
- when :flash
42
- "previewUrl"
43
- end
44
- end
37
+ def self.get_attribute(format)
38
+ case format
39
+ when :mp4
40
+ "ipodUrl"
41
+ when :flash
42
+ "previewUrl"
43
+ end
44
+ end
45
45
  end
@@ -1,12 +1,10 @@
1
-
2
-
3
1
  class Vimeo < PluginBase
4
2
  #this will be called by the main app to check whether this plugin is responsible for the url passed
5
3
  def self.matches_provider?(url)
6
4
  url.include?("vimeo.com")
7
5
  end
8
6
 
9
- def self.get_urls_and_filenames(url)
7
+ def self.get_urls_and_filenames(url, options = {})
10
8
  #the vimeo ID consists of 7 decimal numbers in the URL
11
9
  vimeo_id = url[/\d{7,8}/]
12
10
 
@@ -15,9 +13,9 @@ class Vimeo < PluginBase
15
13
 
16
14
  video_page = agent.get("http://vimeo.com/#{vimeo_id}")
17
15
  page_html = video_page.root.inner_html
18
-
19
- title = page_html[/<meta\s+property="og:title"\s+content="(.+?)"/, 1]
20
- puts "[VIMEO] Title: #{title}"
16
+ doc = Nokogiri::HTML(page_html)
17
+ title = doc.at('meta[property="og:title"]').attributes['content'].value
18
+ puts "[VIMEO] Title: #{title.inspect}"
21
19
 
22
20
  #the timestamp and sig info is in the embedded player javascript in the video page
23
21
  timestamp = page_html[/"timestamp":(\d+),/, 1]
@@ -31,4 +29,4 @@ class Vimeo < PluginBase
31
29
 
32
30
  [{:url => download_url, :name => file_name}]
33
31
  end
34
- end
32
+ end
@@ -1,197 +1,196 @@
1
-
2
- class Youtube < PluginBase
3
- #this will be called by the main app to check whether this plugin is responsible for the url passed
4
- def self.matches_provider?(url)
5
- url.include?("youtube.com") || url.include?("youtu.be")
6
- end
7
-
8
- #get all videos and return their urls in an array
9
- def self.get_video_urls(feed_url)
10
- puts "[YOUTUBE] Retrieving videos..."
11
- urls_titles = Hash.new
12
- result_feed = Nokogiri::HTML(open(feed_url))
13
- urls_titles.merge!(grab_ut(result_feed))
14
-
15
- #as long as the feed has a next link we follow it and add the resulting video urls
16
- loop do
17
- next_link = result_feed.search("//feed/link[@rel='next']").first
18
- break if next_link.nil?
19
- result_feed = Nokogiri::HTML(open(next_link["href"]))
20
- urls_titles.merge!(grab_ut(result_feed))
21
- end
22
-
23
- self.filter_urls(urls_titles)
24
- end
25
-
26
- #returns only the urls that match the --filter argument regex (if present)
27
- def self.filter_urls(url_hash)
28
- #get the --filter arg or "" if it is not present (because nil would break the next line)
29
- filter = ARGV.find( proc {""} ) { |arg| arg =~ /--filter=/ }
30
- regex = filter[/--filter=(.+?)(?:\/|$)/, 1]
31
- if regex
32
- puts "[YOUTUBE] Using filter: #{regex}"
33
- ignore_case = filter.include?("/i")
34
- filtered = url_hash.select { |url, title| title =~ Regexp.new(regex, ignore_case) }
35
- filtered.keys
36
- else
37
- url_hash.keys
38
- end
39
- end
40
-
41
- #extract all video urls and their titles from a feed and return in a hash
42
- def self.grab_ut(feed)
43
- feed.remove_namespaces! #so that we can get to the titles easily
44
- urls = feed.search("//entry/link[@rel='alternate']").map { |link| link["href"] }
45
- titles = feed.search("//entry/group/title").map { |title| title.text }
46
- Hash[urls.zip(titles)] #hash like this: url => title
47
- end
48
-
49
- def self.parse_playlist(url)
50
- #http://www.youtube.com/view_play_list?p=F96B063007B44E1E&search_query=welt+auf+schwäbisch
51
- #http://www.youtube.com/watch?v=9WEP5nCxkEY&videos=jKY836_WMhE&playnext_from=TL&playnext=1
52
- #http://www.youtube.com/watch?v=Tk78sr5JMIU&videos=jKY836_WMhE
53
-
54
- playlist_ID = url[/(?:list=PL|p=)(\w{16})&?/,1]
55
- puts "[YOUTUBE] Playlist ID: #{playlist_ID}"
56
- feed_url = "http://gdata.youtube.com/feeds/api/playlists/#{playlist_ID}?&max-results=50&v=2"
57
- url_array = self.get_video_urls(feed_url)
58
- puts "[YOUTUBE] #{url_array.size} links found!"
59
- url_array
60
- end
61
-
62
- def self.parse_user(username)
63
- puts "[YOUTUBE] User: #{username}"
64
- feed_url = "http://gdata.youtube.com/feeds/api/users/#{username}/uploads?&max-results=50&v=2"
65
- url_array = get_video_urls(feed_url)
66
- puts "[YOUTUBE] #{url_array.size} links found!"
67
- url_array
68
- end
69
-
70
- def self.get_urls_and_filenames(url)
71
- return_values = []
72
- if url.include?("view_play_list") || url.include?("playlist?list=") #if playlist
73
- puts "[YOUTUBE] playlist found! analyzing..."
74
- files = self.parse_playlist(url)
75
- puts "[YOUTUBE] Starting playlist download"
76
- files.each do |file|
77
- puts "[YOUTUBE] Downloading next movie on the playlist (#{file})"
78
- return_values << self.grab_single_url_filename(file)
79
- end
80
- elsif match = url.match(/\/user\/([\w\d]+)$/) #if user url, e.g. youtube.com/user/woot
81
- username = match[1]
82
- video_urls = self.parse_user(username)
83
- puts "[YOUTUBE] Starting user videos download"
84
- video_urls.each do |url|
85
- puts "[YOUTUBE] Downloading next user video (#{url})"
86
- return_values << self.grab_single_url_filename(url)
87
- end
88
- else #if single video
89
- return_values << self.grab_single_url_filename(url)
90
- end
91
-
92
- return_values.reject! { |value| value == :no_embed } #remove results that can not be downloaded
93
- return_values.empty? ? exit : return_values #if no videos could be downloaded exit
94
- end
95
-
96
- def self.grab_single_url_filename(url)
97
- #the youtube video ID looks like this: [...]v=abc5a5_afe5agae6g&[...], we only want the ID (the \w in the brackets)
98
- #addition: might also look like this /v/abc5-a5afe5agae6g
99
- # alternative: video_id = url[/v[\/=]([\w-]*)&?/, 1]
100
- # First get the redirect
101
- if url.include?("youtu.be")
102
- url = open(url).base_uri.to_s
103
- end
104
- video_id = url[/(v|embed)[\/=]([^\/\?\&]*)/,2]
105
- if video_id.nil?
106
- puts "no video id found."
107
- exit
108
- else
109
- puts "[YOUTUBE] ID FOUND: #{video_id}"
110
- end
111
- #let's get some infos about the video. data is urlencoded
112
- yt_url = "http://www.youtube.com/get_video_info?video_id=#{video_id}"
113
- video_info = open(yt_url).read
114
- #converting the huge infostring into a hash. simply by splitting it at the & and then splitting it into key and value arround the =
115
- #[...]blabla=blubb&narf=poit&marc=awesome[...]
116
- video_info_hash = Hash[*video_info.split("&").collect { |v|
117
- key, encoded_value = v.split("=")
118
- if encoded_value.to_s.empty?
119
- value = ""
120
- else
121
- #decode until everything is "normal"
122
- while (encoded_value != CGI::unescape(encoded_value)) do
123
- #"decoding"
124
- encoded_value = CGI::unescape(encoded_value)
125
- end
126
- value = encoded_value
127
- end
128
-
129
- if key =~ /_map/
130
- orig_value = value
131
- value = value.split(",")
132
- if key == "url_encoded_fmt_stream_map"
133
- url_array = orig_value.split("url=").map{|url_string| url_string.chomp(",")}
134
- result_hash = {}
135
- url_array.each do |url|
136
- next if url.to_s.empty?
137
- format_id = url.match(/\&itag=(\d+)/)[1]
138
- result_hash[format_id] = url
139
- end
140
- value = result_hash
141
- elsif key == "fmt_map"
142
- value = Hash[*value.collect { |v|
143
- k2, *v2 = v.split("/")
144
- [k2, v2]
145
- }.flatten(1)]
146
- elsif key == "fmt_url_map" || key == "fmt_stream_map"
147
- Hash[*value.collect { |v| v.split("|")}.flatten]
148
- end
149
- end
150
- [key, value]
151
- }.flatten]
152
-
153
- if video_info_hash["status"] == "fail"
154
- puts "Error: embedding disabled, no video info found"
155
- return :no_embed
156
- end
157
-
158
- title = video_info_hash["title"]
159
- length_s = video_info_hash["length_seconds"]
160
- token = video_info_hash["token"]
161
-
162
- #for the formats, see: http://en.wikipedia.org/wiki/YouTube#Quality_and_codecs
163
- fmt_list = video_info_hash["fmt_list"].split(",")
164
- available_formats = fmt_list.map{|format| format.split("/").first}
165
-
166
- format_ext = {}
167
- format_ext["38"] = {:extension => "mp4", :name => "MP4 Highest Quality 4096x3027 (H.264, AAC)"}
168
- format_ext["37"] = {:extension => "mp4", :name => "MP4 Highest Quality 1920x1080 (H.264, AAC)"}
169
- format_ext["22"] = {:extension => "mp4", :name => "MP4 1280x720 (H.264, AAC)"}
170
- format_ext["45"] = {:extension => "webm", :name => "WebM 1280x720 (VP8, Vorbis)"}
171
- format_ext["44"] = {:extension => "webm", :name => "WebM 854x480 (VP8, Vorbis)"}
172
- format_ext["18"] = {:extension => "mp4", :name => "MP4 640x360 (H.264, AAC)"}
173
- format_ext["35"] = {:extension => "flv", :name => "FLV 854x480 (H.264, AAC)"}
174
- format_ext["34"] = {:extension => "flv", :name => "FLV 640x360 (H.264, AAC)"}
175
- format_ext["5"] = {:extension => "flv", :name => "FLV 400x240 (Soerenson H.263)"}
176
- format_ext["17"] = {:extension => "3gp", :name => "3gp"}
177
-
178
- #since 1.8 doesn't do ordered hashes
179
- prefered_order = ["38","37","22","45","44","18","35","34","5","17"]
180
-
181
- selected_format = prefered_order.select{|possible_format| available_formats.include?(possible_format)}.first
182
-
183
- puts "[YOUTUBE] Title: #{title}"
184
- puts "[YOUTUBE] Length: #{length_s} s"
185
- puts "[YOUTUBE] t-parameter: #{token}"
186
- #best quality seems always to be firsts
187
- puts "[YOUTUBE] formats available: #{available_formats.inspect} (downloading format #{selected_format} -> #{format_ext[selected_format][:name]})"
188
-
189
- #video_info_hash.keys.sort.each{|key| puts "#{key} : #{video_info_hash[key]}" }
190
- download_url = video_info_hash["url_encoded_fmt_stream_map"][selected_format]
191
- #if download url ends with a ';' followed by a codec string remove that part because it stops URI.parse from working
192
- download_url = $1 if download_url =~ /(.*?);\scodecs=/
193
- file_name = PluginBase.make_filename_safe(title) + "." + format_ext[selected_format][:extension]
194
- puts "downloading to " + file_name
195
- {:url => download_url, :name => file_name}
196
- end
197
- end
1
+
2
+ class Youtube < PluginBase
3
+ #this will be called by the main app to check whether this plugin is responsible for the url passed
4
+ def self.matches_provider?(url)
5
+ url.include?("youtube.com") || url.include?("youtu.be")
6
+ end
7
+
8
+ #get all videos and return their urls in an array
9
+ def self.get_video_urls(feed_url)
10
+ puts "[YOUTUBE] Retrieving videos..."
11
+ urls_titles = Hash.new
12
+ result_feed = Nokogiri::XML(open(feed_url))
13
+ urls_titles.merge!(grab_ut(result_feed))
14
+
15
+ #as long as the feed has a next link we follow it and add the resulting video urls
16
+ loop do
17
+ next_link = result_feed.search("//feed/link[@rel='next']").first
18
+ break if next_link.nil?
19
+ result_feed = Nokogiri::HTML(open(next_link["href"]))
20
+ urls_titles.merge!(grab_ut(result_feed))
21
+ end
22
+
23
+ self.filter_urls(urls_titles)
24
+ end
25
+
26
+ #returns only the urls that match the --filter argument regex (if present)
27
+ def self.filter_urls(url_hash)
28
+ if @filter
29
+ puts "[YOUTUBE] Using filter: #{@filter}"
30
+ filtered = url_hash.select { |url, title| title =~ @filter }
31
+ filtered.keys
32
+ else
33
+ url_hash.keys
34
+ end
35
+ end
36
+
37
+ #extract all video urls and their titles from a feed and return in a hash
38
+ def self.grab_ut(feed)
39
+ feed.remove_namespaces! #so that we can get to the titles easily
40
+ urls = feed.search("//entry/link[@rel='alternate']").map { |link| link["href"] }
41
+ titles = feed.search("//entry/group/title").map { |title| title.text }
42
+ Hash[urls.zip(titles)] #hash like this: url => title
43
+ end
44
+
45
+ def self.parse_playlist(url)
46
+ #http://www.youtube.com/view_play_list?p=F96B063007B44E1E&search_query=welt+auf+schwäbisch
47
+ #http://www.youtube.com/watch?v=9WEP5nCxkEY&videos=jKY836_WMhE&playnext_from=TL&playnext=1
48
+ #http://www.youtube.com/watch?v=Tk78sr5JMIU&videos=jKY836_WMhE
49
+
50
+ playlist_ID = url[/(?:list=PL|p=)(\w{16})&?/,1]
51
+ puts "[YOUTUBE] Playlist ID: #{playlist_ID}"
52
+ feed_url = "http://gdata.youtube.com/feeds/api/playlists/#{playlist_ID}?&max-results=50&v=2"
53
+ url_array = self.get_video_urls(feed_url)
54
+ puts "[YOUTUBE] #{url_array.size} links found!"
55
+ url_array
56
+ end
57
+
58
+ def self.parse_user(username)
59
+ puts "[YOUTUBE] User: #{username}"
60
+ feed_url = "http://gdata.youtube.com/feeds/api/users/#{username}/uploads?&max-results=50&v=2"
61
+ url_array = get_video_urls(feed_url)
62
+ puts "[YOUTUBE] #{url_array.size} links found!"
63
+ url_array
64
+ end
65
+
66
+ def self.get_urls_and_filenames(url, options = {})
67
+ @filter = options[:playlist_filter] #used to filter a playlist in self.filter_urls
68
+ return_values = []
69
+ if url.include?("view_play_list") || url.include?("playlist?list=") #if playlist
70
+ puts "[YOUTUBE] playlist found! analyzing..."
71
+ files = self.parse_playlist(url)
72
+ puts "[YOUTUBE] Starting playlist download"
73
+ files.each do |file|
74
+ puts "[YOUTUBE] Downloading next movie on the playlist (#{file})"
75
+ return_values << self.grab_single_url_filename(file)
76
+ end
77
+ elsif match = url.match(/\/user\/([\w\d]+)$/) #if user url, e.g. youtube.com/user/woot
78
+ username = match[1]
79
+ video_urls = self.parse_user(username)
80
+ puts "[YOUTUBE] Starting user videos download"
81
+ video_urls.each do |url|
82
+ puts "[YOUTUBE] Downloading next user video (#{url})"
83
+ return_values << self.grab_single_url_filename(url)
84
+ end
85
+ else #if single video
86
+ return_values << self.grab_single_url_filename(url)
87
+ end
88
+ return_values.reject! { |value| value == :no_embed } #remove results that can not be downloaded
89
+
90
+ if return_values.empty?
91
+ raise CouldNotDownloadVideoError, "No videos could be downloaded - embedding disabled."
92
+ else
93
+ return_values
94
+ end
95
+ end
96
+
97
+ def self.grab_single_url_filename(url)
98
+ #the youtube video ID looks like this: [...]v=abc5a5_afe5agae6g&[...], we only want the ID (the \w in the brackets)
99
+ #addition: might also look like this /v/abc5-a5afe5agae6g
100
+ # alternative: video_id = url[/v[\/=]([\w-]*)&?/, 1]
101
+ # First get the redirect
102
+ if url.include?("youtu.be")
103
+ url = open(url).base_uri.to_s
104
+ end
105
+ video_id = url[/(v|embed)[=\/]([^\/\?\&]*)/,2]
106
+ if video_id.nil?
107
+ raise CouldNotDownloadVideoError, "No video id found."
108
+ else
109
+ puts "[YOUTUBE] ID FOUND: #{video_id}"
110
+ end
111
+ #let's get some infos about the video. data is urlencoded
112
+ yt_url = "http://www.youtube.com/get_video_info?video_id=#{video_id}"
113
+ video_info = RestClient.get(yt_url).body
114
+ #converting the huge infostring into a hash. simply by splitting it at the & and then splitting it into key and value arround the =
115
+ #[...]blabla=blubb&narf=poit&marc=awesome[...]
116
+ video_info_hash = Hash[*video_info.split("&").collect { |v|
117
+ key, encoded_value = v.split("=")
118
+ if encoded_value.to_s.empty?
119
+ value = ""
120
+ else
121
+ #decode until everything is "normal"
122
+ while (encoded_value != CGI::unescape(encoded_value)) do
123
+ #"decoding"
124
+ encoded_value = CGI::unescape(encoded_value)
125
+ end
126
+ value = encoded_value
127
+ end
128
+
129
+ if key =~ /_map/
130
+ orig_value = value
131
+ value = value.split(",")
132
+ if key == "url_encoded_fmt_stream_map"
133
+ url_array = orig_value.split("url=").map{|url_string| url_string.chomp(",")}
134
+ result_hash = {}
135
+ url_array.each do |url|
136
+ next if url.to_s.empty? || url.to_s.match(/^itag/)
137
+ format_id = url[/\&itag=(\d+)/, 1]
138
+ result_hash[format_id] = url
139
+ end
140
+ value = result_hash
141
+ elsif key == "fmt_map"
142
+ value = Hash[*value.collect { |v|
143
+ k2, *v2 = v.split("/")
144
+ [k2, v2]
145
+ }.flatten(1)]
146
+ elsif key == "fmt_url_map" || key == "fmt_stream_map"
147
+ Hash[*value.collect { |v| v.split("|")}.flatten]
148
+ end
149
+ end
150
+ [key, value]
151
+ }.flatten]
152
+
153
+ if video_info_hash["status"] == "fail"
154
+ return :no_embed
155
+ end
156
+
157
+ title = video_info_hash["title"]
158
+ length_s = video_info_hash["length_seconds"]
159
+ token = video_info_hash["token"]
160
+
161
+ #for the formats, see: http://en.wikipedia.org/wiki/YouTube#Quality_and_codecs
162
+ fmt_list = video_info_hash["fmt_list"].split(",")
163
+ available_formats = fmt_list.map{|format| format.split("/").first}
164
+
165
+ format_ext = {}
166
+ format_ext["38"] = {:extension => "mp4", :name => "MP4 Highest Quality 4096x3027 (H.264, AAC)"}
167
+ format_ext["37"] = {:extension => "mp4", :name => "MP4 Highest Quality 1920x1080 (H.264, AAC)"}
168
+ format_ext["22"] = {:extension => "mp4", :name => "MP4 1280x720 (H.264, AAC)"}
169
+ format_ext["45"] = {:extension => "webm", :name => "WebM 1280x720 (VP8, Vorbis)"}
170
+ format_ext["44"] = {:extension => "webm", :name => "WebM 854x480 (VP8, Vorbis)"}
171
+ format_ext["18"] = {:extension => "mp4", :name => "MP4 640x360 (H.264, AAC)"}
172
+ format_ext["35"] = {:extension => "flv", :name => "FLV 854x480 (H.264, AAC)"}
173
+ format_ext["34"] = {:extension => "flv", :name => "FLV 640x360 (H.264, AAC)"}
174
+ format_ext["5"] = {:extension => "flv", :name => "FLV 400x240 (Soerenson H.263)"}
175
+ format_ext["17"] = {:extension => "3gp", :name => "3gp"}
176
+
177
+ #since 1.8 doesn't do ordered hashes
178
+ prefered_order = ["38","37","22","45","44","18","35","34","5","17"]
179
+
180
+ selected_format = prefered_order.select{|possible_format| available_formats.include?(possible_format)}.first
181
+
182
+ puts "[YOUTUBE] Title: #{title}"
183
+ puts "[YOUTUBE] Length: #{length_s} s"
184
+ puts "[YOUTUBE] t-parameter: #{token}"
185
+ #best quality seems always to be firsts
186
+ puts "[YOUTUBE] formats available: #{available_formats.inspect} (downloading format #{selected_format} -> #{format_ext[selected_format][:name]})"
187
+
188
+ #video_info_hash.keys.sort.each{|key| puts "#{key} : #{video_info_hash[key]}" }
189
+ download_url = video_info_hash["url_encoded_fmt_stream_map"][selected_format]
190
+ #if download url ends with a ';' followed by a codec string remove that part because it stops URI.parse from working
191
+ download_url = $1 if download_url =~ /(.*?);\scodecs=/
192
+ file_name = PluginBase.make_filename_safe(title) + "." + format_ext[selected_format][:extension]
193
+ puts "downloading to " + file_name
194
+ {:url => download_url, :name => file_name}
195
+ end
196
+ end