gotascii-crags 1.2.7 → 1.4.2

Sign up to get free protection for your applications and to get access to all the features.
data/Manifest.txt CHANGED
@@ -9,7 +9,6 @@ lib/crags/fetch.rb
9
9
  lib/crags/proxy.rb
10
10
  lib/crags/runner.rb
11
11
  lib/crags/searcher.rb
12
- lib/js/client.html
13
12
  test/crags/fetch_test.rb
14
13
  test/crags/proxy_test.rb
15
14
  test/crags/runner_test.rb
data/crags.gemspec CHANGED
@@ -1,20 +1,22 @@
1
+ # -*- encoding: utf-8 -*-
2
+
1
3
  Gem::Specification.new do |s|
2
4
  s.name = %q{crags}
3
- s.version = "1.2.7"
5
+ s.version = "1.4.2"
4
6
 
5
7
  s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
6
8
  s.authors = ["Justin Marney"]
7
- s.date = %q{2009-02-05}
9
+ s.date = %q{2009-06-16}
8
10
  s.description = %q{A library to help search across multiple craigslist locations.}
9
- s.email = %q{gotascii@gmail.com}
10
- s.extra_rdoc_files = ["History.txt", "README.txt", "lib/js/client.html"]
11
- s.files = [".gitignore", "History.txt", "Manifest.txt", "README.txt", "Rakefile", "crags.gemspec", "lib/crags.rb", "lib/crags/fetch.rb", "lib/crags/proxy.rb", "lib/crags/runner.rb", "lib/crags/searcher.rb", "lib/js/client.html", "tasks/ann.rake", "tasks/bones.rake", "tasks/gem.rake", "tasks/git.rake", "tasks/manifest.rake", "tasks/notes.rake", "tasks/post_load.rake", "tasks/rdoc.rake", "tasks/rubyforge.rake", "tasks/setup.rb", "tasks/spec.rake", "tasks/svn.rake", "tasks/test.rake", "test/crags/fetch_test.rb", "test/crags/proxy_test.rb", "test/crags/runner_test.rb", "test/crags/searcher_test.rb", "test/test_helper.rb"]
11
+ s.email = %q{justin.marney@viget.com}
12
+ s.extra_rdoc_files = ["History.txt", "README.txt"]
13
+ s.files = [".gitignore", "History.txt", "Manifest.txt", "README.txt", "Rakefile", "crags.gemspec", "lib/crags.rb", "lib/crags/fetch.rb", "lib/crags/proxy.rb", "lib/crags/runner.rb", "lib/crags/searcher.rb", "test/crags/fetch_test.rb", "test/crags/proxy_test.rb", "test/crags/runner_test.rb", "test/crags/searcher_test.rb", "test/test_helper.rb"]
12
14
  s.has_rdoc = true
13
- s.homepage = %q{http://github.com/gotascii/crags}
15
+ s.homepage = %q{http://github.com/vigetlabs/crags}
14
16
  s.rdoc_options = ["--main", "README.txt"]
15
17
  s.require_paths = ["lib"]
16
18
  s.rubyforge_project = %q{crags}
17
19
  s.rubygems_version = %q{1.3.1}
18
20
  s.summary = %q{A library to help search across multiple craigslist locations}
19
21
  s.test_files = ["test/crags/fetch_test.rb", "test/crags/proxy_test.rb", "test/crags/runner_test.rb", "test/crags/searcher_test.rb"]
20
- end
22
+ end
@@ -7,16 +7,16 @@ module Crags
7
7
  url.gsub(/http\:\/\/(.*)(\/|(.html))/,'\1\3')
8
8
  end
9
9
 
10
- def location_doc
11
- fetch_doc("http://geo.craigslist.org/iso/us")
10
+ def location_doc(country)
11
+ fetch_doc("http://geo.craigslist.org/iso/#{country}")
12
12
  end
13
13
 
14
- def location_links
15
- location_doc.search("#list a")
14
+ def location_links(country)
15
+ location_doc(country).search("#list a")
16
16
  end
17
17
 
18
- def locations
19
- location_links.collect{|link| strip_http(link["href"]) }
18
+ def locations(country)
19
+ location_links(country).collect{|link| strip_http(link["href"]) }
20
20
  end
21
21
 
22
22
  def categories
@@ -29,8 +29,8 @@ module Crags
29
29
  categories
30
30
  end
31
31
 
32
- def search(keyword, category = 'sss', &block)
33
- locations.collect do |loc|
32
+ def search(keyword, country = 'us', category = 'sss', &block)
33
+ locations(country).collect do |loc|
34
34
  sleep(1 + rand(3))
35
35
  search_location(keyword, loc, category, &block)
36
36
  end.flatten
data/lib/crags.rb CHANGED
@@ -1,10 +1,10 @@
1
1
  require 'rubygems'
2
2
  require 'curb'
3
3
  require 'hpricot'
4
- require "erb"
4
+ require 'erb'
5
5
 
6
6
  module Crags
7
- VERSION = '1.2.7'
7
+ VERSION = '1.4.2'
8
8
  LIBPATH = ::File.expand_path(::File.dirname(__FILE__)) + ::File::SEPARATOR
9
9
  PATH = ::File.dirname(LIBPATH) + ::File::SEPARATOR
10
10
 
@@ -19,6 +19,36 @@ module Crags
19
19
  def self.path( *args )
20
20
  args.empty? ? PATH : ::File.join(PATH, *args)
21
21
  end
22
+
23
+ COUNTRIES = [
24
+ 'jp',
25
+ 'ar',
26
+ 'bd',
27
+ 'br',
28
+ 'ca',
29
+ 'cl',
30
+ 'co',
31
+ 'cr',
32
+ 'cz',
33
+ 'eg',
34
+ 'hu',
35
+ 'id',
36
+ 'ie',
37
+ 'il',
38
+ 'lb',
39
+ 'my',
40
+ 'nl',
41
+ 'nz',
42
+ 'no',
43
+ 'pk',
44
+ 'pa',
45
+ 'ru',
46
+ 'th',
47
+ 'ae',
48
+ 'us',
49
+ 've',
50
+ 'vn'
51
+ ]
22
52
  end
23
53
 
24
54
  require 'crags/fetch'
@@ -1,20 +1,21 @@
1
- require '../test_helper'
1
+ require File.dirname(__FILE__) + '/../test_helper'
2
2
 
3
- context "Fetch" do
4
- setup do
5
- extend Crags::Fetch
6
- end
3
+ class Crags::FetchTest < Test::Unit::TestCase
4
+ context "Fetch" do
5
+ setup do
6
+ extend Crags::Fetch
7
+ end
7
8
 
8
- specify "fetch doc should hpricot fetched html" do
9
- stubs(:fetch_html).with("url").returns("html")
10
- Hpricot.expects(:parse).with("html").returns("doc")
11
- fetch_doc("url").should == "doc"
12
- end
9
+ should "fetch doc should hpricot fetched html" do
10
+ stubs(:fetch_html).with("url").returns("html")
11
+ Hpricot.expects(:parse).with("html").returns("doc")
12
+ fetch_doc("url").should == "doc"
13
+ end
13
14
 
14
- specify "fetch html should curl a url" do
15
- curb = stub(:body_str => "uhh")
16
- Curl::Easy.expects(:perform).with("url").returns(curb)
17
- fetch_html("url").should == "uhh"
15
+ should "fetch html should curl a url" do
16
+ curb = stub(:body_str => "uhh")
17
+ Curl::Easy.expects(:perform).with("url").returns(curb)
18
+ fetch_html("url").should == "uhh"
19
+ end
18
20
  end
19
- end
20
-
21
+ end
@@ -1,24 +1,25 @@
1
- require '../test_helper'
1
+ require File.dirname(__FILE__) + '/../test_helper'
2
2
 
3
- context "Proxy" do
4
- setup do
5
- extend Crags::Proxy
6
- end
3
+ class Crags::ProxyTest < Test::Unit::TestCase
4
+ context "Proxy" do
5
+ setup do
6
+ extend Crags::Proxy
7
+ end
7
8
 
8
- specify "lists should return a list of proxy list websites" do
9
- lists.should == ["http://www.proxy4free.com/page1.html"]
10
- end
9
+ should "lists should return a list of proxy list websites" do
10
+ lists.should == ["http://www.proxy4free.com/page1.html", "http://www.proxy4free.com/page3.html"]
11
+ end
11
12
 
12
- specify "fetch lists should fetch html for each site in lists" do
13
- stubs(:lists).returns(["1", "2"])
14
- expects(:fetch_html).with("1").returns("html_1")
15
- expects(:fetch_html).with("2").returns("html_2")
16
- fetch_lists.should == ["html_1", "html_2"]
17
- end
18
-
19
- specify "scan should return all ips in a text blizoc" do
20
- text = "192.168.1.2 omg dude!! wtf.f.f.asdasd9.8.9 78.900.42.32"
21
- scan(text).should == ["192.168.1.2", "78.900.42.32"]
22
- end
23
- end
13
+ should "fetch lists should fetch html for each site in lists" do
14
+ stubs(:lists).returns(["1", "2"])
15
+ expects(:fetch_html).with("1").returns("html_1")
16
+ expects(:fetch_html).with("2").returns("html_2")
17
+ fetch_lists.should == ["html_1", "html_2"]
18
+ end
24
19
 
20
+ should "scan should return all ips in a text blizoc" do
21
+ text = "192.168.1.2 omg dude!! wtf.f.f.asdasd9.8.9 78.900.42.32"
22
+ scan(text).should == ["192.168.1.2", "78.900.42.32"]
23
+ end
24
+ end
25
+ end
@@ -1,26 +1,28 @@
1
- require '../test_helper'
1
+ require File.dirname(__FILE__) + '/../test_helper'
2
2
 
3
- context "Runner" do
4
- setup do
5
- @runner = Crags::Runner.new
6
- @runner.stubs(:fetch_doc)
7
- @runner.stubs(:items).returns([])
8
- end
3
+ class Crags::RunnerTest < Test::Unit::TestCase
4
+ context "instance of Runner" do
5
+ setup do
6
+ @runner = Crags::Runner.new
7
+ @runner.stubs(:fetch_doc)
8
+ @runner.stubs(:items).returns([])
9
+ end
9
10
 
10
- specify "runner should include searcher" do
11
- Crags::Runner.ancestors.should.include Crags::Searcher
12
- end
13
-
14
- specify "search location should puts message with loc" do
15
- @runner.expects(:puts).with { |val| val =~ /location/ }
16
- @runner.search_location("", "location", "category")
17
- end
18
-
19
- specify "search location should take a category" do
20
- @runner.search_location("", "location", "category")
21
- end
11
+ should "runner should include searcher" do
12
+ Crags::Runner.ancestors.include?(Crags::Searcher).should == true
13
+ end
14
+
15
+ should "search location should puts message with loc" do
16
+ @runner.expects(:puts).with { |val| val =~ /location/ }
17
+ @runner.search_location("", "location", "category")
18
+ end
19
+
20
+ should "search location should take a category" do
21
+ @runner.search_location("", "location", "category")
22
+ end
22
23
 
23
- specify "search location should have default category sss" do
24
- @runner.search_location("", "location")
24
+ should "search location should have default category sss" do
25
+ @runner.search_location("", "location")
26
+ end
25
27
  end
26
28
  end
@@ -1,124 +1,127 @@
1
- require '../test_helper'
1
+ require File.dirname(__FILE__) + '/../test_helper'
2
2
 
3
- context "Searcher with stubbed fetch doc" do
4
- setup do
5
- extend Crags::Searcher
6
- stubs(:sleep)
7
- stubs(:fetch_doc)
8
- end
3
+ class Crags::SearcherTest < Test::Unit::TestCase
9
4
 
10
- specify "strip_http should remove http:// and trailing /" do
11
- url = "http://omg/"
12
- strip_http(url).should == "omg"
13
- end
5
+ context "Searcher with stubbed fetch doc" do
6
+ setup do
7
+ extend Crags::Searcher
8
+ stubs(:sleep)
9
+ stubs(:fetch_doc)
10
+ end
14
11
 
15
- specify "location doc should fetch doc at location url" do
16
- expects(:fetch_doc).with("http://geo.craigslist.org/iso/us").returns("doc")
17
- location_doc.should == "doc"
18
- end
12
+ should "strip_http should remove http:// and trailing /" do
13
+ url = "http://omg/"
14
+ strip_http(url).should == "omg"
15
+ end
19
16
 
20
- specify "location links should get all a tags from div with id list" do
21
- doc = mock { expects(:search).with("#list a").returns("links") }
22
- stubs(:location_doc).returns(doc)
23
- location_links.should == "links"
24
- end
17
+ should "location doc should fetch doc at location url" do
18
+ expects(:fetch_doc).with("http://geo.craigslist.org/iso/us").returns("doc")
19
+ location_doc('us').should == "doc"
20
+ end
25
21
 
26
- specify "locations should return array of urls using a location link's href" do
27
- links = []
28
- 2.times do |i|
29
- links << mock {|m| m.expects(:[]).with("href").returns("http://url#{i}/") }
22
+ should "location links should get all a tags from div with id list" do
23
+ doc = mock { expects(:search).with("#list a").returns("links") }
24
+ stubs(:location_doc).returns(doc)
25
+ location_links('us').should == "links"
30
26
  end
31
- stubs(:location_links).returns(links)
32
- locations.should == ["url0", "url1"]
33
- end
34
27
 
35
- specify "search should search location for each location with keyword and return list" do
36
- locations = ["url0", "url1"]
28
+ should "locations should return array of urls using a location link's href" do
29
+ links = []
30
+ 2.times do |i|
31
+ links << mock {|m| m.expects(:[]).with("href").returns("http://url#{i}/") }
32
+ end
33
+ stubs(:location_links).returns(links)
34
+ locations('us').should == ["url0", "url1"]
35
+ end
37
36
 
38
- locations.each do |loc|
39
- expects(:search_location).with("omg", loc, 'sss').returns(["1#{loc}", "2#{loc}"])
37
+ should "search should search location for each location with keyword and return list" do
38
+ locations = ["url0", "url1"]
39
+
40
+ locations.each do |loc|
41
+ expects(:search_location).with("omg", loc, 'sss').returns(["1#{loc}", "2#{loc}"])
42
+ end
43
+
44
+ stubs(:locations).returns(locations)
45
+ search("omg").should == ["1url0", "2url0", "1url1", "2url1"]
40
46
  end
41
47
 
42
- stubs(:locations).returns(locations)
43
- search("omg").should == ["1url0", "2url0", "1url1", "2url1"]
44
- end
48
+ should "search should call sleep for each location" do
49
+ expects(:sleep).times(2)
50
+ stubs(:locations).returns([1,2])
51
+ stubs(:search_location)
52
+ search("")
53
+ end
45
54
 
46
- specify "search should call sleep for each location" do
47
- expects(:sleep).times(2)
48
- stubs(:locations).returns([1,2])
49
- stubs(:search_location)
50
- search("")
51
- end
55
+ should "search location should fetch doc for search url" do
56
+ expects(:fetch_doc).with("http://url/search/sss?query=keyword&format=rss")
57
+ stubs(:items).returns([])
58
+ search_location("keyword", "url")
59
+ end
52
60
 
53
- specify "search location should fetch doc for search url" do
54
- expects(:fetch_doc).with("http://url/search/sss?query=keyword&format=rss")
55
- stubs(:items).returns([])
56
- search_location("keyword", "url")
57
- end
61
+ should "search location should create return items" do
62
+ items = [1,2,3]
63
+ expects(:items).returns(items)
64
+ search_location("keyword", "url").should == items
65
+ end
58
66
 
59
- specify "search location should create return items" do
60
- items = [1,2,3]
61
- expects(:items).returns(items)
62
- search_location("keyword", "url").should == items
63
- end
67
+ should "items should get all item elements from doc" do
68
+ item = stub
69
+ stubs(:hashify).with(item).returns(1)
70
+ doc = mock { expects(:search).with("item").returns([item]) }
71
+ items(doc).should == [1]
72
+ end
64
73
 
65
- specify "items should get all item elements from doc" do
66
- item = stub
67
- stubs(:hashify).with(item).returns(1)
68
- doc = mock { expects(:search).with("item").returns([item]) }
69
- items(doc).should == [1]
70
- end
74
+ should "items should hashify all item elements from doc" do
75
+ item = stub
76
+ expects(:hashify).with(item).returns(1)
77
+ doc = stub { stubs(:search).returns([item]) }
78
+ items(doc).should == [1]
79
+ end
71
80
 
72
- specify "items should hashify all item elements from doc" do
73
- item = stub
74
- expects(:hashify).with(item).returns(1)
75
- doc = stub { stubs(:search).returns([item]) }
76
- items(doc).should == [1]
77
- end
81
+ should "categories should fetch doc the main sfbay page" do
82
+ doc = stub(:search => [])
83
+ expects(:fetch_doc).with("http://sfbay.craigslist.org/").returns(doc)
84
+ categories
85
+ end
78
86
 
79
- specify "categories should fetch doc the main sfbay page" do
80
- doc = stub(:search => [])
81
- expects(:fetch_doc).with("http://sfbay.craigslist.org/").returns(doc)
82
- categories
83
- end
87
+ should "categories should search for all links in the table with property summary equal to for sale" do
88
+ doc = mock { expects(:search).with("table[@summary=\"for sale\"] a").returns([]) }
89
+ stubs(:fetch_doc).returns(doc)
90
+ categories
91
+ end
84
92
 
85
- specify "categories should search for all links in the table with property summary equal to for sale" do
86
- doc = mock { expects(:search).with("table[@summary=\"for sale\"] a").returns([]) }
87
- stubs(:fetch_doc).returns(doc)
88
- categories
89
- end
93
+ should "categories should return a hash with link inner html keys and link href values" do
94
+ link = stub(:inner_html => "inner_html") do
95
+ stubs(:[]).with("href").returns("href")
96
+ end
90
97
 
91
- specify "categories should return a hash with link inner html keys and link href values" do
92
- link = stub(:inner_html => "inner_html") do
93
- stubs(:[]).with("href").returns("href")
98
+ doc = stub(:search => [link, link])
99
+ stubs(:fetch_doc).returns(doc)
100
+ categories.should == {'inner_html' => 'href', 'inner_html' => 'href'}
94
101
  end
95
102
 
96
- doc = stub(:search => [link, link])
97
- stubs(:fetch_doc).returns(doc)
98
- categories.should == {'inner_html' => 'href', 'inner_html' => 'href'}
99
- end
103
+ should "search location should accept a category parameter" do
104
+ expects(:fetch_doc).with("http://loc/search/scram?query=keyword&format=rss")
105
+ stubs(:items).returns([])
106
+ search_location('keyword', 'loc', 'scram')
107
+ end
100
108
 
101
- specify "search location should accept a category parameter" do
102
- expects(:fetch_doc).with("http://loc/search/scram?query=keyword&format=rss")
103
- stubs(:items).returns([])
104
- search_location('keyword', 'loc', 'scram')
105
- end
109
+ should "search location default category is sss" do
110
+ expects(:fetch_doc).with("http://loc/search/sss?query=keyword&format=rss")
111
+ stubs(:items).returns([])
112
+ search_location('keyword', 'loc')
113
+ end
106
114
 
107
- specify "search location default category is sss" do
108
- expects(:fetch_doc).with("http://loc/search/sss?query=keyword&format=rss")
109
- stubs(:items).returns([])
110
- search_location('keyword', 'loc')
111
- end
115
+ should "search should pass parameter to search location" do
116
+ stubs(:locations).returns([0])
117
+ expects(:search_location).with('keyword', 0, 'chum')
118
+ search('keyword', 'us', 'chum')
119
+ end
112
120
 
113
- specify "search should pass parameter to search location" do
114
- stubs(:locations).returns([0])
115
- expects(:search_location).with('keyword', 0, 'chum')
116
- search('keyword', 'chum')
117
- end
118
-
119
- specify "search should have default category of sss" do
120
- stubs(:locations).returns([0])
121
- expects(:search_location).with('keyword', 0, 'sss')
122
- search('keyword')
121
+ should "search should have default category of sss" do
122
+ stubs(:locations).returns([0])
123
+ expects(:search_location).with('keyword', 0, 'sss')
124
+ search('keyword')
125
+ end
123
126
  end
124
- end
127
+ end
data/test/test_helper.rb CHANGED
@@ -1,4 +1,5 @@
1
1
  require 'rubygems'
2
- require 'test/spec'
2
+ require 'shoulda'
3
+ require 'matchy'
3
4
  require 'mocha'
4
5
  require 'crags'
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: gotascii-crags
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.7
4
+ version: 1.4.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Justin Marney
@@ -9,12 +9,12 @@ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
11
 
12
- date: 2009-02-05 00:00:00 -08:00
12
+ date: 2009-06-16 00:00:00 -07:00
13
13
  default_executable:
14
14
  dependencies: []
15
15
 
16
16
  description: A library to help search across multiple craigslist locations.
17
- email: gotascii@gmail.com
17
+ email: justin.marney@viget.com
18
18
  executables: []
19
19
 
20
20
  extensions: []
@@ -22,7 +22,6 @@ extensions: []
22
22
  extra_rdoc_files:
23
23
  - History.txt
24
24
  - README.txt
25
- - lib/js/client.html
26
25
  files:
27
26
  - .gitignore
28
27
  - History.txt
@@ -35,27 +34,13 @@ files:
35
34
  - lib/crags/proxy.rb
36
35
  - lib/crags/runner.rb
37
36
  - lib/crags/searcher.rb
38
- - lib/js/client.html
39
- - tasks/ann.rake
40
- - tasks/bones.rake
41
- - tasks/gem.rake
42
- - tasks/git.rake
43
- - tasks/manifest.rake
44
- - tasks/notes.rake
45
- - tasks/post_load.rake
46
- - tasks/rdoc.rake
47
- - tasks/rubyforge.rake
48
- - tasks/setup.rb
49
- - tasks/spec.rake
50
- - tasks/svn.rake
51
- - tasks/test.rake
52
37
  - test/crags/fetch_test.rb
53
38
  - test/crags/proxy_test.rb
54
39
  - test/crags/runner_test.rb
55
40
  - test/crags/searcher_test.rb
56
41
  - test/test_helper.rb
57
42
  has_rdoc: true
58
- homepage: http://github.com/gotascii/crags
43
+ homepage: http://github.com/vigetlabs/crags
59
44
  post_install_message:
60
45
  rdoc_options:
61
46
  - --main
data/lib/js/client.html DELETED
@@ -1,81 +0,0 @@
1
- <html>
2
- <head>
3
- <script language="javascript">
4
- var IFrameObj; // our IFrame object
5
- var IFrameDoc;
6
- function callToServer() {
7
- if (!document.createElement) {return true};
8
- var URL = 'http://washingtondc.craigslist.org/bik/index.rss';
9
- if (!IFrameObj && document.createElement) {
10
- // create the IFrame and assign a reference to the
11
- // object to our global variable IFrameObj.
12
- // this will only happen the first time
13
- // callToServer() is called
14
- try {
15
- var tempIFrame=document.createElement('iframe');
16
- tempIFrame.setAttribute('id','RSIFrame');
17
- tempIFrame.style.border='0px';
18
- tempIFrame.style.width='0px';
19
- tempIFrame.style.height='0px';
20
- IFrameObj = document.body.appendChild(tempIFrame);
21
-
22
- if (document.frames) {
23
- // this is for IE5 Mac, because it will only
24
- // allow access to the document object
25
- // of the IFrame if we access it through
26
- // the document.frames array
27
- IFrameObj = document.frames['RSIFrame'];
28
- }
29
- } catch(exception) {
30
- // This is for IE5 PC, which does not allow dynamic creation
31
- // and manipulation of an iframe object. Instead, we'll fake
32
- // it up by creating our own objects.
33
- iframeHTML='\<iframe id="RSIFrame" style="';
34
- iframeHTML+='border:0px;';
35
- iframeHTML+='width:0px;';
36
- iframeHTML+='height:0px;';
37
- iframeHTML+='"><\/iframe>';
38
- document.body.innerHTML+=iframeHTML;
39
- IFrameObj = new Object();
40
- IFrameObj.document = new Object();
41
- IFrameObj.document.location = new Object();
42
- IFrameObj.document.location.iframe = document.getElementById('RSIFrame');
43
- IFrameObj.document.location.replace = function(location) {
44
- this.iframe.src = location;
45
- }
46
- }
47
- }
48
-
49
- if (navigator.userAgent.indexOf('Gecko') !=-1 && !IFrameObj.contentDocument) {
50
- // we have to give NS6 a fraction of a second
51
- // to recognize the new IFrame
52
- setTimeout('callToServer()',10);
53
- return false;
54
- }
55
-
56
- if (IFrameObj.contentDocument) {
57
- // For NS6
58
- IFrameDoc = IFrameObj.contentDocument;
59
- } else if (IFrameObj.contentWindow) {
60
- // For IE5.5 and IE6
61
- IFrameDoc = IFrameObj.contentWindow.document;
62
- } else if (IFrameObj.document) {
63
- // For IE5
64
- IFrameDoc = IFrameObj.document;
65
- } else {
66
- return true;
67
- }
68
-
69
- IFrameDoc.location.replace(URL);
70
- return false;
71
- }
72
-
73
- </script>
74
- </head>
75
- <body>
76
- <script>
77
- callToServer();
78
- alert(IFrameDoc.innerHTML);
79
- </script>
80
- </body>
81
- </html>