magick_columns 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/README.md +93 -0
- data/lib/magick_columns.rb +56 -4
- data/lib/magick_columns/active_record.rb +56 -0
- data/lib/magick_columns/defaults.rb +9 -1
- data/lib/magick_columns/locale/en.yml +15 -0
- data/lib/magick_columns/locale/es.yml +13 -0
- data/lib/magick_columns/railtie.rb +9 -0
- data/lib/magick_columns/rules.rb +20 -0
- data/lib/magick_columns/tokenizer.rb +56 -0
- data/lib/magick_columns/version.rb +1 -1
- data/test/dummy/log/development.log +93 -0
- data/test/dummy/log/test.log +2143 -0
- data/test/{query_builder_test.rb → magick_columns_test.rb} +0 -14
- data/test/tokenizer_test.rb +47 -0
- metadata +16 -9
- data/README.rdoc +0 -45
- data/lib/magick_columns/magick_columns.rb +0 -80
data/README.md
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
# MagickColumns
|
|
2
|
+
|
|
3
|
+
This gem extends ActiveRecord to provide queries built from *simple* strings
|
|
4
|
+
|
|
5
|
+
## Instalation
|
|
6
|
+
|
|
7
|
+
Add to your Gemfile:
|
|
8
|
+
|
|
9
|
+
```ruby
|
|
10
|
+
gem 'magick_columns'
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Usage
|
|
14
|
+
|
|
15
|
+
You must declare `has_magick_columns` in your model:
|
|
16
|
+
|
|
17
|
+
```ruby
|
|
18
|
+
class Person < ActiveRecord::Base
|
|
19
|
+
has_magick_columns name: :string, email: :email, birth: :date
|
|
20
|
+
end
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
And now you can do something like this:
|
|
24
|
+
|
|
25
|
+
```ruby
|
|
26
|
+
people = Person.magick_search('anakin or luke')
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
The method returns a Relation, so you can apply any aditional method you want:
|
|
30
|
+
|
|
31
|
+
```ruby
|
|
32
|
+
people.order('name').limit(5)
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
And of course you can "spy" the query with:
|
|
36
|
+
|
|
37
|
+
```ruby
|
|
38
|
+
people.to_sql
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
## Rules
|
|
42
|
+
|
|
43
|
+
There is also a set of rules, for tokenize and replace some types of "patterns".
|
|
44
|
+
For example, you can write:
|
|
45
|
+
|
|
46
|
+
```ruby
|
|
47
|
+
people = Person.magick_search('from 01/01/2000')
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
And you get the people born in the XXI century =)
|
|
51
|
+
|
|
52
|
+
## Custom configuration
|
|
53
|
+
|
|
54
|
+
If you want to define your own rules, or replace some existing configuration add
|
|
55
|
+
in config/initializers one ruby file, for example magick_columns_config.rb
|
|
56
|
+
|
|
57
|
+
```ruby
|
|
58
|
+
MagickColumns.setup do |config|
|
|
59
|
+
config.and_operators = ['and']
|
|
60
|
+
config.or_operators = ['or']
|
|
61
|
+
config.from_operators = ['from', 'since']
|
|
62
|
+
config.until_operators = ['to', 'until']
|
|
63
|
+
config.today_operators = ['today', 'now']
|
|
64
|
+
# Each replacement rule consists in a pattern and a replacement proc or lambda
|
|
65
|
+
config.replacement_rules[:new_replacement_rule] =
|
|
66
|
+
pattern: /today/,
|
|
67
|
+
replacement: ->(match) { Date.today.to_s(:db) }
|
|
68
|
+
}
|
|
69
|
+
# Each tokenizer rule consists in a pattern and a tokenizer proc or lambda.
|
|
70
|
+
# The proc must return a hash with a valid SQL operator and a term for the
|
|
71
|
+
# condition.
|
|
72
|
+
config.tokenize_rules[:new_tokenize_rule] = {
|
|
73
|
+
pattern: /(\A\s*|\s+)(from|since)\s+(\S+)/,
|
|
74
|
+
tokenizer: ->(match) { { operator: '>=', term: match[3] } }
|
|
75
|
+
}
|
|
76
|
+
end
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
## How to contribute
|
|
80
|
+
|
|
81
|
+
If you find what you might think is a bug:
|
|
82
|
+
|
|
83
|
+
1. Check the GitHub issue tracker to see if anyone else has had the same issue.
|
|
84
|
+
https://github.com/francocatena/magick_columns/issues/
|
|
85
|
+
2. If you do not see anything, create an issue with information on how to reproduce it.
|
|
86
|
+
|
|
87
|
+
If you want to contribute an enhancement or a fix:
|
|
88
|
+
|
|
89
|
+
1. Fork the project on GitHub.
|
|
90
|
+
https://github.com/francocatena/magick_columns/
|
|
91
|
+
2. Make your changes with tests.
|
|
92
|
+
3. Commit the changes without making changes to the Rakefile, VERSION, or any other files that are not related to your enhancement or fix
|
|
93
|
+
4. Send a pull request.
|
data/lib/magick_columns.rb
CHANGED
|
@@ -1,6 +1,58 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
module MagickColumns
|
|
2
|
+
autoload :DEFAULTS, 'magick_columns/defaults'
|
|
3
|
+
autoload :I18N_DEFAULTS, 'magick_columns/defaults'
|
|
4
|
+
autoload :TOKENIZE_RULES, 'magick_columns/rules'
|
|
5
|
+
autoload :REPLACEMENT_RULES, 'magick_columns/rules'
|
|
6
|
+
autoload :Tokenizer, 'magick_columns/tokenizer'
|
|
7
|
+
|
|
8
|
+
class << self
|
|
9
|
+
private
|
|
3
10
|
|
|
4
|
-
|
|
11
|
+
def _default_setup_for(config)
|
|
12
|
+
translation = I18n.t("magick_columns.#{config}", raise: true) rescue MagickColumns::I18N_DEFAULTS[config]
|
|
5
13
|
|
|
6
|
-
|
|
14
|
+
if translation.respond_to?(:map)
|
|
15
|
+
translation.map { |c| Regexp.quote(c) }.join('|')
|
|
16
|
+
else
|
|
17
|
+
translation
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
# Strings considered "and" spliters
|
|
23
|
+
mattr_accessor :and_operators
|
|
24
|
+
@@and_operators = _default_setup_for :and
|
|
25
|
+
|
|
26
|
+
# Strings considered "or" spliters
|
|
27
|
+
mattr_accessor :or_operators
|
|
28
|
+
@@or_operators = _default_setup_for :or
|
|
29
|
+
|
|
30
|
+
# Strings considered "from" terms (like "from 01/01/2012")
|
|
31
|
+
mattr_accessor :from_operators
|
|
32
|
+
@@from_operators = _default_setup_for :from
|
|
33
|
+
|
|
34
|
+
# Strings considered "until" terms (like "until 01/01/2012")
|
|
35
|
+
mattr_accessor :until_operators
|
|
36
|
+
@@until_operators = _default_setup_for :until
|
|
37
|
+
|
|
38
|
+
# Strings considered "today" strings (like "from today")
|
|
39
|
+
mattr_accessor :today_operators
|
|
40
|
+
@@today_operators = _default_setup_for :today
|
|
41
|
+
|
|
42
|
+
# Rules to replace text in the natural string
|
|
43
|
+
mattr_accessor :replacement_rules
|
|
44
|
+
@@replacement_rules = REPLACEMENT_RULES.dup
|
|
45
|
+
|
|
46
|
+
# Rules for tokenize the natural string
|
|
47
|
+
mattr_accessor :tokenize_rules
|
|
48
|
+
@@tokenize_rules = TOKENIZE_RULES.dup
|
|
49
|
+
|
|
50
|
+
# Setup method for plugin configuration
|
|
51
|
+
def self.setup
|
|
52
|
+
yield self
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
autoload :Timeliness, 'timeliness'
|
|
57
|
+
|
|
58
|
+
require 'magick_columns/railtie' if defined?(Rails)
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
module MagickColumns
|
|
2
|
+
module ActiveRecord
|
|
3
|
+
def has_magick_columns(options = {})
|
|
4
|
+
@@_magick_columns ||= {}
|
|
5
|
+
@@_magick_columns[name] ||= []
|
|
6
|
+
|
|
7
|
+
options.each do |field, type|
|
|
8
|
+
column_options = _magick_column_options(type)
|
|
9
|
+
|
|
10
|
+
@@_magick_columns[name] << { field: field }.merge(column_options)
|
|
11
|
+
end
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def magick_search(query)
|
|
15
|
+
or_queries = []
|
|
16
|
+
terms = {}
|
|
17
|
+
|
|
18
|
+
MagickColumns::Tokenizer.new(query).extract_terms.each_with_index do |or_term, i|
|
|
19
|
+
and_queries = []
|
|
20
|
+
|
|
21
|
+
or_term.each_with_index do |and_term, j|
|
|
22
|
+
mini_query = []
|
|
23
|
+
|
|
24
|
+
@@_magick_columns[name].each_with_index do |column, k|
|
|
25
|
+
if column[:condition].call(and_term[:term])
|
|
26
|
+
operator = and_term[:operator] || _map_magick_column_operator(column[:operator])
|
|
27
|
+
terms[:"t_#{i}_#{j}_#{k}"] = column[:mask] % {t: and_term[:term]}
|
|
28
|
+
|
|
29
|
+
mini_query << "#{column[:field]} #{operator} :t_#{i}_#{j}_#{k}"
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
and_queries << mini_query.join(' OR ')
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
or_queries << and_queries.map { |a_q| "(#{a_q})" }.join(' AND ')
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
where(or_queries.map { |o_q| "(#{o_q})" }.join(' OR '), terms)
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
private
|
|
43
|
+
|
|
44
|
+
def _magick_column_options(type)
|
|
45
|
+
type.kind_of?(Hash) ? type : MagickColumns::DEFAULTS[type.to_sym]
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def _map_magick_column_operator(operator, db = nil)
|
|
49
|
+
db ||= ::ActiveRecord::Base.connection.adapter_name
|
|
50
|
+
|
|
51
|
+
operator == :like ? (db == 'PostgreSQL' ? 'ILIKE' : 'LIKE') : operator
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
ActiveRecord::Base.extend MagickColumns::ActiveRecord
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
module
|
|
1
|
+
module MagickColumns
|
|
2
2
|
DEFAULTS = {
|
|
3
3
|
string: {
|
|
4
4
|
operator: :like,
|
|
@@ -28,4 +28,12 @@ module MagicColumns
|
|
|
28
28
|
convert: ->(t) { ::Timeliness.parse(t.to_s) }
|
|
29
29
|
}
|
|
30
30
|
}
|
|
31
|
+
|
|
32
|
+
I18N_DEFAULTS = {
|
|
33
|
+
from: ['from', 'since'],
|
|
34
|
+
until: ['to', 'until'],
|
|
35
|
+
and: ['and'],
|
|
36
|
+
or: ['or'],
|
|
37
|
+
today: ['today', 'now']
|
|
38
|
+
}
|
|
31
39
|
end
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# TODO: These rules should be added to a live array, not a constant one
|
|
2
|
+
module MagickColumns
|
|
3
|
+
TOKENIZE_RULES = {
|
|
4
|
+
from: {
|
|
5
|
+
pattern: /(\A\s*|\s+)(#{MagickColumns.from_operators})\s+(\S+)/,
|
|
6
|
+
tokenizer: ->(match) { { operator: '>=', term: match[3] } }
|
|
7
|
+
},
|
|
8
|
+
until:{
|
|
9
|
+
pattern: /(\A\s*|\s+)(#{MagickColumns.until_operators})\s+(\S+)/,
|
|
10
|
+
tokenizer: ->(match) { { operator: '<=', term: match[3] } }
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
REPLACEMENT_RULES = {
|
|
15
|
+
today: {
|
|
16
|
+
pattern: /#{MagickColumns.today_operators}/,
|
|
17
|
+
replacement: ->(match) { Date.today.to_s(:db) }
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
end
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
module MagickColumns
|
|
2
|
+
class Tokenizer
|
|
3
|
+
def initialize(query = '')
|
|
4
|
+
@query = query
|
|
5
|
+
end
|
|
6
|
+
|
|
7
|
+
def extract_terms
|
|
8
|
+
terms = []
|
|
9
|
+
|
|
10
|
+
clean_query.split(%r{\s+(#{MagickColumns.or_operators})\s+}).each do |o_t|
|
|
11
|
+
unless o_t =~ %r{\A(#{MagickColumns.or_operators})\z}
|
|
12
|
+
and_terms = []
|
|
13
|
+
|
|
14
|
+
o_t.split(%r{\s+(#{MagickColumns.and_operators})\s+}).each do |t|
|
|
15
|
+
unless t =~ %r{\A(#{MagickColumns.and_operators})\z}
|
|
16
|
+
and_terms.concat split_term_in_terms(t)
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
terms << and_terms unless and_terms.empty?
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
terms.reject(&:empty?)
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def clean_query
|
|
28
|
+
@query.strip
|
|
29
|
+
.gsub(%r{\A(\s*(#{MagickColumns.and_operators})\s+)+}, '')
|
|
30
|
+
.gsub(%r{(\s+(#{MagickColumns.and_operators})\s*)+\z}, '')
|
|
31
|
+
.gsub(%r{\A(\s*(#{MagickColumns.or_operators})\s+)+}, '')
|
|
32
|
+
.gsub(%r{(\s+(#{MagickColumns.or_operators})\s*)+\z}, '')
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def split_term_in_terms(term)
|
|
36
|
+
term_copy = term.dup
|
|
37
|
+
terms = []
|
|
38
|
+
|
|
39
|
+
MagickColumns.replacement_rules.each do |rule, options|
|
|
40
|
+
while(match = term_copy.match(options[:pattern]))
|
|
41
|
+
term_copy.sub!(options[:pattern], options[:replacement].call(match))
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
MagickColumns.tokenize_rules.each do |rule, options|
|
|
46
|
+
while(match = term_copy.match(options[:pattern]))
|
|
47
|
+
terms << options[:tokenizer].call(match)
|
|
48
|
+
|
|
49
|
+
term_copy.sub!(options[:pattern], '')
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
terms + term_copy.strip.split(/\s+/).map { |t| { term: t } }
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
end
|
|
@@ -116,3 +116,96 @@ Migrating to CreateArticles (20120312175442)
|
|
|
116
116
|
[1m[36m (0.5ms)[0m [1mSELECT version FROM "schema_migrations"[0m
|
|
117
117
|
[1m[35m (3.2ms)[0m INSERT INTO "schema_migrations" (version) VALUES ('20120312175442')
|
|
118
118
|
[1m[36m (10.9ms)[0m [1mINSERT INTO "schema_migrations" (version) VALUES ('20120312175303')[0m
|
|
119
|
+
[1m[36m (0.6ms)[0m [1mSELECT "schema_migrations"."version" FROM "schema_migrations" [0m
|
|
120
|
+
[1m[35m (0.1ms)[0m SET search_path TO public
|
|
121
|
+
[1m[36m (222.3ms)[0m [1mDROP DATABASE IF EXISTS "dummy_test"[0m
|
|
122
|
+
[1m[35m (0.2ms)[0m SET search_path TO public
|
|
123
|
+
[1m[36m (884.3ms)[0m [1mCREATE DATABASE "dummy_test" ENCODING = 'unicode'[0m
|
|
124
|
+
[1m[35m (145.9ms)[0m CREATE TABLE "articles" ("id" serial primary key, "name" character varying(255), "code" integer, "created_at" timestamp NOT NULL, "updated_at" timestamp NOT NULL)
|
|
125
|
+
[1m[36m (276.6ms)[0m [1mCREATE TABLE "people" ("id" serial primary key, "name" character varying(255), "email" character varying(255), "birth" date, "created_at" timestamp NOT NULL, "updated_at" timestamp NOT NULL) [0m
|
|
126
|
+
[1m[35m (100.6ms)[0m CREATE TABLE "schema_migrations" ("version" character varying(255) NOT NULL)
|
|
127
|
+
[1m[36m (145.3ms)[0m [1m SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid
|
|
128
|
+
FROM pg_class t
|
|
129
|
+
INNER JOIN pg_index d ON t.oid = d.indrelid
|
|
130
|
+
INNER JOIN pg_class i ON d.indexrelid = i.oid
|
|
131
|
+
WHERE i.relkind = 'i'
|
|
132
|
+
AND d.indisprimary = 'f'
|
|
133
|
+
AND t.relname = 'schema_migrations'
|
|
134
|
+
AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname = ANY (current_schemas(false)) )
|
|
135
|
+
ORDER BY i.relname
|
|
136
|
+
[0m
|
|
137
|
+
[1m[35m (255.5ms)[0m CREATE UNIQUE INDEX "unique_schema_migrations" ON "schema_migrations" ("version")
|
|
138
|
+
[1m[36m (0.7ms)[0m [1mSELECT version FROM "schema_migrations"[0m
|
|
139
|
+
[1m[35m (74.6ms)[0m INSERT INTO "schema_migrations" (version) VALUES ('20120312175442')
|
|
140
|
+
[1m[36m (52.4ms)[0m [1mINSERT INTO "schema_migrations" (version) VALUES ('20120312175303')[0m
|
|
141
|
+
[1m[36m (42.1ms)[0m [1mALTER TABLE "schema_migrations" DISABLE TRIGGER ALL;ALTER TABLE "people" DISABLE TRIGGER ALL;ALTER TABLE "articles" DISABLE TRIGGER ALL[0m
|
|
142
|
+
[1m[35m (0.2ms)[0m BEGIN
|
|
143
|
+
[1m[36mFixture Delete (0.6ms)[0m [1mDELETE FROM "articles"[0m
|
|
144
|
+
[1m[35mFixture Insert (26.6ms)[0m INSERT INTO "articles" ("name", "code", "created_at", "updated_at", "id") VALUES ('Lightsaber', 1, '2012-03-15 17:41:34', '2012-03-15 17:41:34', 980403528)
|
|
145
|
+
[1m[36mFixture Insert (0.3ms)[0m [1mINSERT INTO "articles" ("name", "code", "created_at", "updated_at", "id") VALUES ('Ship', 2, '2012-03-15 17:41:34', '2012-03-15 17:41:34', 976284455)[0m
|
|
146
|
+
[1m[35mPK and serial sequence (2.2ms)[0m SELECT attr.attname, seq.relname
|
|
147
|
+
FROM pg_class seq,
|
|
148
|
+
pg_attribute attr,
|
|
149
|
+
pg_depend dep,
|
|
150
|
+
pg_namespace name,
|
|
151
|
+
pg_constraint cons
|
|
152
|
+
WHERE seq.oid = dep.objid
|
|
153
|
+
AND seq.relkind = 'S'
|
|
154
|
+
AND attr.attrelid = dep.refobjid
|
|
155
|
+
AND attr.attnum = dep.refobjsubid
|
|
156
|
+
AND attr.attrelid = cons.conrelid
|
|
157
|
+
AND attr.attnum = cons.conkey[1]
|
|
158
|
+
AND cons.contype = 'p'
|
|
159
|
+
AND dep.refobjid = '"articles"'::regclass
|
|
160
|
+
|
|
161
|
+
[1m[36mReset sequence (181.0ms)[0m [1m SELECT setval('"articles_id_seq"', (SELECT COALESCE(MAX("id")+(SELECT increment_by FROM "articles_id_seq"), (SELECT min_value FROM "articles_id_seq")) FROM "articles"), false)
|
|
162
|
+
[0m
|
|
163
|
+
[1m[35m (37.0ms)[0m COMMIT
|
|
164
|
+
[1m[36m (0.3ms)[0m [1mALTER TABLE "schema_migrations" ENABLE TRIGGER ALL;ALTER TABLE "people" ENABLE TRIGGER ALL;ALTER TABLE "articles" ENABLE TRIGGER ALL[0m
|
|
165
|
+
[1m[35m (0.3ms)[0m ALTER TABLE "schema_migrations" DISABLE TRIGGER ALL;ALTER TABLE "people" DISABLE TRIGGER ALL;ALTER TABLE "articles" DISABLE TRIGGER ALL
|
|
166
|
+
[1m[36m (0.2ms)[0m [1mBEGIN[0m
|
|
167
|
+
[1m[35mFixture Delete (0.4ms)[0m DELETE FROM "people"
|
|
168
|
+
[1m[36mFixture Insert (13.9ms)[0m [1mINSERT INTO "people" ("name", "email", "birth", "created_at", "updated_at", "id") VALUES ('Obi-Wan Kenobi', 'obi@sw.com', '3012-03-15', '2012-03-15 17:41:35', '2012-03-15 17:41:35', 632303495)[0m
|
|
169
|
+
[1m[35mFixture Insert (0.3ms)[0m INSERT INTO "people" ("name", "email", "birth", "created_at", "updated_at", "id") VALUES ('Luke Skywalker', 'luke@sw.com', '3052-03-15', '2012-03-15 17:41:35', '2012-03-15 17:41:35', 962534057)
|
|
170
|
+
[1m[36mFixture Insert (0.3ms)[0m [1mINSERT INTO "people" ("name", "email", "birth", "created_at", "updated_at", "id") VALUES ('Anakin Skywalker', 'anakin@sw.com', '3032-03-15', '2012-03-15 17:41:35', '2012-03-15 17:41:35', 222665832)[0m
|
|
171
|
+
[1m[35mPK and serial sequence (2.1ms)[0m SELECT attr.attname, seq.relname
|
|
172
|
+
FROM pg_class seq,
|
|
173
|
+
pg_attribute attr,
|
|
174
|
+
pg_depend dep,
|
|
175
|
+
pg_namespace name,
|
|
176
|
+
pg_constraint cons
|
|
177
|
+
WHERE seq.oid = dep.objid
|
|
178
|
+
AND seq.relkind = 'S'
|
|
179
|
+
AND attr.attrelid = dep.refobjid
|
|
180
|
+
AND attr.attnum = dep.refobjsubid
|
|
181
|
+
AND attr.attrelid = cons.conrelid
|
|
182
|
+
AND attr.attnum = cons.conkey[1]
|
|
183
|
+
AND cons.contype = 'p'
|
|
184
|
+
AND dep.refobjid = '"people"'::regclass
|
|
185
|
+
|
|
186
|
+
[1m[36mReset sequence (12.5ms)[0m [1m SELECT setval('"people_id_seq"', (SELECT COALESCE(MAX("id")+(SELECT increment_by FROM "people_id_seq"), (SELECT min_value FROM "people_id_seq")) FROM "people"), false)
|
|
187
|
+
[0m
|
|
188
|
+
[1m[35m (15.0ms)[0m COMMIT
|
|
189
|
+
[1m[36m (0.2ms)[0m [1mALTER TABLE "schema_migrations" ENABLE TRIGGER ALL;ALTER TABLE "people" ENABLE TRIGGER ALL;ALTER TABLE "articles" ENABLE TRIGGER ALL[0m
|
|
190
|
+
[1m[36m (0.9ms)[0m [1mSELECT "schema_migrations"."version" FROM "schema_migrations" [0m
|
|
191
|
+
[1m[35m (0.2ms)[0m SET search_path TO public
|
|
192
|
+
[1m[36m (537.3ms)[0m [1mDROP DATABASE IF EXISTS "dummy_test"[0m
|
|
193
|
+
[1m[35m (0.2ms)[0m SET search_path TO public
|
|
194
|
+
[1m[36m (2217.0ms)[0m [1mCREATE DATABASE "dummy_test" ENCODING = 'unicode'[0m
|
|
195
|
+
[1m[35m (176.4ms)[0m CREATE TABLE "articles" ("id" serial primary key, "name" character varying(255), "code" integer, "created_at" timestamp NOT NULL, "updated_at" timestamp NOT NULL)
|
|
196
|
+
[1m[36m (153.9ms)[0m [1mCREATE TABLE "people" ("id" serial primary key, "name" character varying(255), "email" character varying(255), "birth" date, "created_at" timestamp NOT NULL, "updated_at" timestamp NOT NULL) [0m
|
|
197
|
+
[1m[35m (10.3ms)[0m CREATE TABLE "schema_migrations" ("version" character varying(255) NOT NULL)
|
|
198
|
+
[1m[36m (3.1ms)[0m [1m SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid
|
|
199
|
+
FROM pg_class t
|
|
200
|
+
INNER JOIN pg_index d ON t.oid = d.indrelid
|
|
201
|
+
INNER JOIN pg_class i ON d.indexrelid = i.oid
|
|
202
|
+
WHERE i.relkind = 'i'
|
|
203
|
+
AND d.indisprimary = 'f'
|
|
204
|
+
AND t.relname = 'schema_migrations'
|
|
205
|
+
AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname = ANY (current_schemas(false)) )
|
|
206
|
+
ORDER BY i.relname
|
|
207
|
+
[0m
|
|
208
|
+
[1m[35m (85.1ms)[0m CREATE UNIQUE INDEX "unique_schema_migrations" ON "schema_migrations" ("version")
|
|
209
|
+
[1m[36m (0.5ms)[0m [1mSELECT version FROM "schema_migrations"[0m
|
|
210
|
+
[1m[35m (9.3ms)[0m INSERT INTO "schema_migrations" (version) VALUES ('20120312175442')
|
|
211
|
+
[1m[36m (10.8ms)[0m [1mINSERT INTO "schema_migrations" (version) VALUES ('20120312175303')[0m
|