acts_as_tokenizable 0.3.3 → 0.4.0

Sign up to get free protection for your applications and to get access to all the features.
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.3.0
1
+ 0.4.0
@@ -1,54 +1,52 @@
1
1
  # Generated by jeweler
2
2
  # DO NOT EDIT THIS FILE DIRECTLY
3
- # Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
3
+ # Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
4
4
  # -*- encoding: utf-8 -*-
5
5
 
6
6
  Gem::Specification.new do |s|
7
7
  s.name = %q{acts_as_tokenizable}
8
- s.version = "0.3.3"
8
+ s.version = "0.4.0"
9
9
 
10
10
  s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
11
- s.authors = ["Enrique Garcia Cota", "Francisco de Juan"]
12
- s.date = %q{2010-01-12}
11
+ s.authors = [%q{Enrique Garcia Cota}, %q{Francisco de Juan}]
12
+ s.date = %q{2012-03-20}
13
13
  s.description = %q{Make ActiveRecord models easily searchable via tokens.}
14
14
  s.email = %q{github@splendeo.es}
15
15
  s.extra_rdoc_files = [
16
16
  "LICENSE",
17
- "README",
18
- "README.rdoc"
17
+ "README",
18
+ "README.rdoc"
19
19
  ]
20
20
  s.files = [
21
21
  "LICENSE",
22
- "README",
23
- "README.rdoc",
24
- "Rakefile",
25
- "VERSION",
26
- "acts_as_tokenizable.gemspec",
27
- "init.rb",
28
- "lib/acts_as_tokenizable.rb",
29
- "lib/acts_as_tokenizable/acts_as_tokenizable.rb",
30
- "lib/acts_as_tokenizable/string_extensions.rb",
31
- "lib/tasks/acts_as_tokenizable.rake",
32
- "pkg/acts_as_tokenizable-0.1.0.gem",
33
- "pkg/acts_as_tokenizable-0.2.0.gem",
34
- "test/helper.rb",
35
- "test/test_acts_as_tokenizable.rb"
22
+ "README",
23
+ "README.rdoc",
24
+ "Rakefile",
25
+ "VERSION",
26
+ "acts_as_tokenizable.gemspec",
27
+ "init.rb",
28
+ "lib/acts_as_tokenizable.rb",
29
+ "lib/acts_as_tokenizable/acts_as_tokenizable.rb",
30
+ "lib/acts_as_tokenizable/string_extensions.rb",
31
+ "lib/tasks/acts_as_tokenizable.rake",
32
+ "pkg/acts_as_tokenizable-0.1.0.gem",
33
+ "pkg/acts_as_tokenizable-0.2.0.gem",
34
+ "test/helper.rb",
35
+ "test/test_acts_as_tokenizable.rb"
36
36
  ]
37
37
  s.homepage = %q{http://github.com/splendeo/acts_as_tokenizable}
38
- s.rdoc_options = ["--charset=UTF-8"]
39
- s.require_paths = ["lib"]
40
- s.rubygems_version = %q{1.3.5}
38
+ s.require_paths = [%q{lib}]
39
+ s.rubygems_version = %q{1.8.6}
41
40
  s.summary = %q{Acts as tokenizable}
42
41
  s.test_files = [
43
42
  "test/helper.rb",
44
- "test/test_acts_as_tokenizable.rb"
43
+ "test/test_acts_as_tokenizable.rb"
45
44
  ]
46
45
 
47
46
  if s.respond_to? :specification_version then
48
- current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
49
47
  s.specification_version = 3
50
48
 
51
- if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
49
+ if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
52
50
  s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"])
53
51
  else
54
52
  s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
@@ -1,37 +1,38 @@
1
+ require 'acts_as_tokenizable/string_extensions'
2
+
1
3
  module ActsAsTokenizable
2
- require 'acts_as_tokenizable/string_extensions'
3
-
4
+
4
5
  # default to_token method. needs to have a "name" property on the object.
5
6
  # override for more complex token generation
6
7
  def to_token
7
8
  raise NoMethodError.new("You must redefine to_token in your model. Example: self.name.to_token()")
8
9
  end
9
-
10
- #makes self.token=self.to_token, in a convoluted way
10
+
11
+ #makes self.<token_field_name>=self.to_token
11
12
  def tokenize
12
13
  self.send("#{self.class.token_field_name}=", self.to_token)
13
14
  end
14
-
15
+
15
16
  module ClassMethods
16
17
  attr_accessor :token_field_name
17
-
18
+
18
19
  # search_token parameter is used by tokenized_by. This function allows for preparation
19
20
  # before tokenized_by function is invoked. Usually this means removing
20
21
  # stop words, replacing words.
21
22
  # By default it tokenizes each word and removes duplicates.
22
23
  def prepare_search_token(search_token)
23
- search_token.words_to_token
24
+ StringExtensions::words_to_token(search_token)
24
25
  end
25
26
  end
26
-
27
+
27
28
  def self.included(base)
28
29
  base.class_eval do
29
30
  extend ClassMethods
30
-
31
+
31
32
  named_scope :tokenized_by, lambda {|search_token|
32
33
  search_strings = []
33
34
  search_values = []
34
- prepare_search_token(search_token).words.each do |w|
35
+ StringExtensions::words(prepare_search_token(search_token)).each do |w|
35
36
  if w[0,1] == '-'
36
37
  search_strings.push("#{table_name}.#{token_field_name} NOT LIKE ?")
37
38
  search_values.push("%#{w[1,w.length]}%")
@@ -1,66 +1,67 @@
1
- String.class_eval do
2
-
3
- #converts accented letters into ascii equivalents (i.e. ñ becomes n)
4
- def normalize
5
- #this version is in the forums but didnt work for me
6
- #string = string.chars.normalize(:kd).gsub!(/[^\x00-\x7F]/n,'').to_s
7
- mb_chars.normalize(:d).gsub(/[^\x00-\x7F]/n,'').to_s
8
- end
9
-
10
- #returns true if numeric, false, otherwise
11
- def numeric?
12
- true if Float(self) rescue
13
- return false
14
- end
15
-
16
- #returns an array of strings containing the words on this string. removes spaces, strange chars, etc
17
- def words
18
- gsub(/[^\w|-]/, ' ').split
19
- end
20
-
21
- #removes certain words from a string.
22
- # As a side-effect, all word-separators are converted to the separator char
23
- def remove_words(words_array, separator = ' ')
24
- (words - words_array).join separator
25
- end
26
-
27
- # replaces certain words on a string.
28
- # As a side-effect, all word-separators are converted to the separator char
29
- def replace_words(replacements, separator = ' ')
30
- replaced_words = words
31
- replacements.each do |candidates,replacement|
32
- candidates.each do |candidate|
33
- replaced_words=replaced_words.collect {|w| w==candidate ? replacement : w}
1
+ module ActsAsTokenizable
2
+
3
+ module StringExtensions
4
+
5
+ #converts accented letters into ascii equivalents (i.e. ñ becomes n)
6
+ def self.normalize(str)
7
+ str.mb_chars.normalize(:d).gsub(/[^\x00-\x7F]/n,'').to_s
8
+ end
9
+
10
+ #returns true if numeric, false, otherwise
11
+ def self.numeric?(str)
12
+ true if Float(str) rescue
13
+ false
14
+ end
15
+
16
+ #returns an array of strings containing the words on this string. removes spaces, strange chars, etc
17
+ def self.words(str)
18
+ str.gsub(/[^\w|-]/, ' ').split
19
+ end
20
+
21
+ #removes certain words from a string.
22
+ # As a side-effect, all word-separators are converted to the separator char
23
+ def self.remove_words(str, words_array, separator = ' ')
24
+ (words(str) - words_array).join separator
25
+ end
26
+
27
+ # replaces certain words on a string.
28
+ # As a side-effect, all word-separators are converted to the separator char
29
+ def self.replace_words(str, replacements, separator = ' ')
30
+ replaced_words = words(str)
31
+ replacements.each do |candidates,replacement|
32
+ candidates.each do |candidate|
33
+ replaced_words=replaced_words.collect {|w| w==candidate ? replacement : w}
34
+ end
34
35
  end
36
+ replaced_words.join separator
37
+ end
38
+
39
+ # returns an array that contains, in order:
40
+ # * the numeric parts, converted to numbers
41
+ # * the non-numeric parts, as text
42
+ # this is useful for sorting alphanumerically. For example:
43
+ # ["A1", "A12", "A2"].sort_by{|x| x.alphanumerics} => ["A1", "A2", "A12"]
44
+ #
45
+ # inspired by : http://blog.labnotes.org/2007/12/13/rounded-corners-173-beautiful-code/
46
+ def self.alphanumerics(str)
47
+ str.split(/(\d+)/).map { |v| v =~ /\d/ ? v.to_i : v }
48
+ end
49
+
50
+ #convert into something that can be used as an indexation key
51
+ def self.to_token(str, max_length=255)
52
+ str = normalize(str).strip.downcase.gsub(/[^\w|-]/, '') #remove all non-alphanumeric but hyphen (-)
53
+ str = str.squeeze unless numeric?(str) #remove duplicates, except on pure numbers
54
+ return str[0..(max_length-1)]
55
+ end
56
+
57
+ #convert into something that can be used on links
58
+ def self.to_slug(str, separator='-')
59
+ words(normalize(str.strip.downcase)).join(separator)
60
+ end
61
+
62
+ #tokenizes each word individually, and joins the word with the separator char.
63
+ def self.words_to_token(str, max_length=255, separator = ' ')
64
+ words(str).collect{|w| to_token(w)}.uniq.join(separator)[0..(max_length-1)]
35
65
  end
36
- replaced_words.join separator
37
- end
38
-
39
- # returns an array that contains, in order:
40
- # * the numeric parts, converted to numbers
41
- # * the non-numeric parts, as text
42
- # this is useful for sorting alphanumerically. For example:
43
- # ["A1", "A12", "A2"].sort_by{|x| x.alphanumerics} => ["A1", "A2", "A12"]
44
- #
45
- # inspired by : http://blog.labnotes.org/2007/12/13/rounded-corners-173-beautiful-code/
46
- def alphanumerics
47
- split(/(\d+)/).map { |v| v =~ /\d/ ? v.to_i : v }
48
- end
49
-
50
- #convert into something that can be used as an indexation key
51
- def to_token(max_length=255)
52
- string = self.normalize.strip.downcase.gsub(/[^\w|-]/, '') #remove all non-alphanumeric but hyphen (-)
53
- string = string.squeeze unless string.numeric? #remove duplicates, except on pure numbers
54
- return string[0..(max_length-1)]
55
- end
56
-
57
- #convert into something that can be used on links
58
- def to_slug(separator='-')
59
- self.strip.downcase.normalize.words.join(separator)
60
- end
61
-
62
- #tokenizes each word individually, and joins the word with the separator char.
63
- def words_to_token(max_length=255, separator = ' ')
64
- words.collect{|w| w.to_token}.uniq.join(separator)[0..(max_length-1)]
65
66
  end
66
67
  end
@@ -1,5 +1,3 @@
1
- require 'config/environment'
2
-
3
1
  namespace :tokens do
4
2
  desc "Generates the token for objects without tokens."
5
3
  task :generate => :environment do
metadata CHANGED
@@ -1,46 +1,37 @@
1
- --- !ruby/object:Gem::Specification
1
+ --- !ruby/object:Gem::Specification
2
2
  name: acts_as_tokenizable
3
- version: !ruby/object:Gem::Version
4
- prerelease: false
5
- segments:
6
- - 0
7
- - 3
8
- - 3
9
- version: 0.3.3
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.4.0
5
+ prerelease:
10
6
  platform: ruby
11
- authors:
7
+ authors:
12
8
  - Enrique Garcia Cota
13
9
  - Francisco de Juan
14
10
  autorequire:
15
11
  bindir: bin
16
12
  cert_chain: []
17
-
18
- date: 2010-01-12 00:00:00 +01:00
19
- default_executable:
20
- dependencies:
21
- - !ruby/object:Gem::Dependency
13
+ date: 2012-03-20 00:00:00.000000000Z
14
+ dependencies:
15
+ - !ruby/object:Gem::Dependency
22
16
  name: thoughtbot-shoulda
23
- prerelease: false
24
- requirement: &id001 !ruby/object:Gem::Requirement
25
- requirements:
26
- - - ">="
27
- - !ruby/object:Gem::Version
28
- segments:
29
- - 0
30
- version: "0"
17
+ requirement: &9777180 !ruby/object:Gem::Requirement
18
+ none: false
19
+ requirements:
20
+ - - ! '>='
21
+ - !ruby/object:Gem::Version
22
+ version: '0'
31
23
  type: :development
32
- version_requirements: *id001
24
+ prerelease: false
25
+ version_requirements: *9777180
33
26
  description: Make ActiveRecord models easily searchable via tokens.
34
27
  email: github@splendeo.es
35
28
  executables: []
36
-
37
29
  extensions: []
38
-
39
- extra_rdoc_files:
30
+ extra_rdoc_files:
40
31
  - LICENSE
41
32
  - README
42
33
  - README.rdoc
43
- files:
34
+ files:
44
35
  - LICENSE
45
36
  - README
46
37
  - README.rdoc
@@ -56,36 +47,30 @@ files:
56
47
  - pkg/acts_as_tokenizable-0.2.0.gem
57
48
  - test/helper.rb
58
49
  - test/test_acts_as_tokenizable.rb
59
- has_rdoc: true
60
50
  homepage: http://github.com/splendeo/acts_as_tokenizable
61
51
  licenses: []
62
-
63
52
  post_install_message:
64
- rdoc_options:
65
- - --charset=UTF-8
66
- require_paths:
53
+ rdoc_options: []
54
+ require_paths:
67
55
  - lib
68
- required_ruby_version: !ruby/object:Gem::Requirement
69
- requirements:
70
- - - ">="
71
- - !ruby/object:Gem::Version
72
- segments:
73
- - 0
74
- version: "0"
75
- required_rubygems_version: !ruby/object:Gem::Requirement
76
- requirements:
77
- - - ">="
78
- - !ruby/object:Gem::Version
79
- segments:
80
- - 0
81
- version: "0"
56
+ required_ruby_version: !ruby/object:Gem::Requirement
57
+ none: false
58
+ requirements:
59
+ - - ! '>='
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
62
+ required_rubygems_version: !ruby/object:Gem::Requirement
63
+ none: false
64
+ requirements:
65
+ - - ! '>='
66
+ - !ruby/object:Gem::Version
67
+ version: '0'
82
68
  requirements: []
83
-
84
69
  rubyforge_project:
85
- rubygems_version: 1.3.6
70
+ rubygems_version: 1.8.6
86
71
  signing_key:
87
72
  specification_version: 3
88
73
  summary: Acts as tokenizable
89
- test_files:
74
+ test_files:
90
75
  - test/helper.rb
91
76
  - test/test_acts_as_tokenizable.rb