rbbt-dm 1.1.55 → 1.1.58
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/LICENSE +1 -1
- data/lib/rbbt/vector/model/random_forest.rb +11 -1
- data/lib/rbbt/vector/model/spaCy.rb +13 -13
- data/lib/rbbt/vector/model/svm.rb +3 -3
- data/lib/rbbt/vector/model.rb +33 -8
- data/share/spaCy/cpu/textcat_multilabel_accuracy.conf +86 -0
- data/share/spaCy/cpu/textcat_multilabel_efficiency.conf +78 -0
- data/share/spaCy/gpu/textcat_multilabel_accuracy.conf +84 -0
- data/share/spaCy/gpu/textcat_multilabel_efficiency.conf +73 -0
- data/test/rbbt/vector/model/test_spaCy.rb +10 -5
- metadata +6 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 286385d90b276d30cd5e1b21ae38c5e6a203e2ce3ac10673c434c19a2f45cfb1
|
4
|
+
data.tar.gz: 7879d74a364886ea8cb507be51c4979cfb598bdb273f948c3c3930a5dce199e6
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: b82c77bd736c8422e49c4dc83b63d6a91da6e76857af4b5cf5aff0a9a58b4147bc50b49b1b1534e8b07ca1bce5f6a5a673c5d688fb8cd7856623370d19fd1bda
|
7
|
+
data.tar.gz: 1b267a85ab600b878e99f414f725255cf086165a27f8cdec42ed83349b4f36bdb9e29615e0aaada9b30f098df8382e4778cebaf2b8649e17b8985e79d9b8bd23
|
data/LICENSE
CHANGED
@@ -16,11 +16,21 @@ model = randomForest(as.factor(label) ~ ., data = features);
|
|
16
16
|
rbbt.require("randomForest");
|
17
17
|
pred = names(model$forest$xlevels)
|
18
18
|
for (p in pred) {
|
19
|
-
if (
|
19
|
+
if (is.factor(features[[p]])) {
|
20
20
|
features[[p]] = factor(features[[p]], levels=model$forest$xlevels[[p]])
|
21
21
|
}
|
22
22
|
}
|
23
23
|
label = predict(model, features);
|
24
24
|
EOF
|
25
25
|
end
|
26
|
+
|
27
|
+
def importance
|
28
|
+
TmpFile.with_file do |tmp|
|
29
|
+
tsv = R.run <<-EOF
|
30
|
+
load(file="#{model_file}");
|
31
|
+
rbbt.tsv.write('#{tmp}', model$importance)
|
32
|
+
EOF
|
33
|
+
TSV.open(tmp)
|
34
|
+
end
|
35
|
+
end
|
26
36
|
end
|
@@ -4,13 +4,13 @@ require 'rbbt/nlp/spaCy'
|
|
4
4
|
class SpaCyModel < VectorModel
|
5
5
|
attr_accessor :config
|
6
6
|
|
7
|
-
def spacy(&block)
|
7
|
+
def self.spacy(&block)
|
8
8
|
RbbtPython.run "spacy" do
|
9
9
|
RbbtPython.module_eval(&block)
|
10
10
|
end
|
11
11
|
end
|
12
12
|
|
13
|
-
def initialize(dir, config, lang = 'en_core_web_md')
|
13
|
+
def initialize(dir, config, categories = %w(positive negative), lang = 'en_core_web_md')
|
14
14
|
@config = case
|
15
15
|
when Path === config
|
16
16
|
config.read
|
@@ -30,20 +30,19 @@ class SpaCyModel < VectorModel
|
|
30
30
|
@train_model = Proc.new do |file, features, labels|
|
31
31
|
texts = features
|
32
32
|
docs = []
|
33
|
+
unique_labels = labels.uniq
|
33
34
|
tmpconfig = File.join(file, 'config')
|
34
35
|
tmptrain = File.join(file, 'train.spacy')
|
35
36
|
SpaCy.config(@config, tmpconfig)
|
36
|
-
spacy do
|
37
|
+
SpaCyModel.spacy do
|
37
38
|
nlp = SpaCy.nlp(lang)
|
38
39
|
docs = []
|
39
40
|
RbbtPython.iterate nlp.pipe(texts.zip(labels), as_tuples: true), :bar => "Training documents into spacy format" do |doc,label|
|
40
|
-
|
41
|
-
|
42
|
-
doc.cats[
|
43
|
-
else
|
44
|
-
doc.cats["positive"] = 0
|
45
|
-
doc.cats["negative"] = 1
|
41
|
+
unique_labels.each do |other_label|
|
42
|
+
next if other_label == label
|
43
|
+
doc.cats[other_label] = false
|
46
44
|
end
|
45
|
+
doc.cats[label] = true
|
47
46
|
docs << doc
|
48
47
|
end
|
49
48
|
|
@@ -51,22 +50,23 @@ class SpaCyModel < VectorModel
|
|
51
50
|
doc_bin.to_disk(tmptrain)
|
52
51
|
end
|
53
52
|
|
54
|
-
gpu = Rbbt::Config.get('gpu_id', :spacy, :spacy_train)
|
53
|
+
gpu = Rbbt::Config.get('gpu_id', :spacy, :spacy_train, :default => 0)
|
55
54
|
CMD.cmd_log(:spacy, "train #{tmpconfig} --output #{file} --paths.train #{tmptrain} --paths.dev #{tmptrain}", "--gpu-id" => gpu)
|
56
55
|
end
|
57
56
|
|
58
|
-
@eval_model = Proc.new do |file, features|
|
57
|
+
@eval_model = Proc.new do |file, features,list|
|
59
58
|
texts = features
|
59
|
+
texts = [texts] unless list
|
60
60
|
|
61
61
|
docs = []
|
62
|
-
spacy do
|
62
|
+
SpaCyModel.spacy do
|
63
63
|
nlp = spacy.load("#{file}/model-best")
|
64
64
|
|
65
65
|
Log::ProgressBar.with_bar texts.length, :desc => "Evaluating documents" do |bar|
|
66
66
|
texts.collect do |text|
|
67
67
|
cats = nlp.(text).cats
|
68
68
|
bar.tick
|
69
|
-
cats
|
69
|
+
cats.sort_by{|l,v| v.to_f }.last.first
|
70
70
|
end
|
71
71
|
end
|
72
72
|
end
|
@@ -3,16 +3,16 @@ class SVMModel < VectorModel
|
|
3
3
|
def initialize(dir)
|
4
4
|
super(dir)
|
5
5
|
|
6
|
-
@extract_features
|
6
|
+
@extract_features ||= Proc.new{|element|
|
7
7
|
element
|
8
8
|
}
|
9
9
|
|
10
|
-
@train_model
|
10
|
+
@train_model ||=<<-EOF
|
11
11
|
rbbt.require('e1071');
|
12
12
|
model = svm(as.factor(label) ~ ., data = features);
|
13
13
|
EOF
|
14
14
|
|
15
|
-
@eval_model
|
15
|
+
@eval_model ||=<<-EOF
|
16
16
|
rbbt.require('e1071');
|
17
17
|
label = predict(model, features);
|
18
18
|
EOF
|
data/lib/rbbt/vector/model.rb
CHANGED
@@ -53,6 +53,13 @@ features = cbind(features, label = labels);
|
|
53
53
|
"features[['#{name}']] = factor(features[['#{name}']], levels=#{R.ruby2R levels})"
|
54
54
|
end * "\n" if factor_levels }
|
55
55
|
#{code}
|
56
|
+
# Save used factor levels
|
57
|
+
factor_levels = c()
|
58
|
+
for (c in names(features)){
|
59
|
+
if (is.factor(features[[c]]))
|
60
|
+
factor_levels[c] = paste(levels(features[[c]]), collapse="\t")
|
61
|
+
}
|
62
|
+
rbbt.tsv.write("#{model_file}.factor_levels", factor_levels, names=c('Levels'), type='flat')
|
56
63
|
save(model, file='#{model_file}')
|
57
64
|
EOF
|
58
65
|
end
|
@@ -150,6 +157,9 @@ cat(paste(label, sep="\\n", collapse="\\n"));
|
|
150
157
|
if File.exists?(@levels_file)
|
151
158
|
@factor_levels = YAML.load(Open.read(@levels_file))
|
152
159
|
end
|
160
|
+
if File.exists?(@model_file + '.factor_levels')
|
161
|
+
@factor_levels = TSV.open(@model_file + '.factor_levels')
|
162
|
+
end
|
153
163
|
else
|
154
164
|
@factor_levels = factor_levels
|
155
165
|
end
|
@@ -294,15 +304,16 @@ cat(paste(label, sep="\\n", collapse="\\n"));
|
|
294
304
|
good_label = labels.uniq.select{|l| l.to_s == "true"}.first if good_label.nil?
|
295
305
|
good_label = labels.uniq.select{|l| l.to_s == "1"}.first if good_label.nil?
|
296
306
|
good_label = labels.uniq.sort.first if good_label.nil?
|
307
|
+
good_label = good_label.to_s
|
297
308
|
|
298
309
|
test.zip(predicted).each do |gs,pred|
|
299
310
|
gs = gs.to_s
|
300
311
|
pred = pred.to_s
|
301
312
|
|
302
|
-
tp += 1 if
|
303
|
-
|
304
|
-
|
305
|
-
fn += 1 if
|
313
|
+
tp += 1 if pred == good_label && gs == good_label
|
314
|
+
fp += 1 if pred == good_label && gs != good_label
|
315
|
+
tn += 1 if pred != good_label && gs != good_label
|
316
|
+
fn += 1 if pred != good_label && gs == good_label
|
306
317
|
end
|
307
318
|
|
308
319
|
p = tp + fn
|
@@ -319,6 +330,8 @@ cat(paste(label, sep="\\n", collapse="\\n"));
|
|
319
330
|
acc = []
|
320
331
|
labels.each do |good_label|
|
321
332
|
values = VectorModel.f1_metrics(test, predicted, good_label)
|
333
|
+
tp, tn, fp, fn, pr, re, f1 = values
|
334
|
+
Log.debug "Partial CV #{good_label} - P:#{"%.3f" % pr} R:#{"%.3f" % re} F1:#{"%.3f" % f1} - #{[tp.to_s, tn.to_s, fp.to_s, fn.to_s] * " "}"
|
322
335
|
acc << values
|
323
336
|
end
|
324
337
|
Misc.zip_fields(acc).collect{|s| Misc.mean(s)}
|
@@ -339,12 +352,21 @@ cat(paste(label, sep="\\n", collapse="\\n"));
|
|
339
352
|
end
|
340
353
|
|
341
354
|
begin
|
342
|
-
|
343
|
-
|
355
|
+
if folds == 1
|
356
|
+
feature_folds = [@features]
|
357
|
+
labels_folds = [@labels]
|
358
|
+
else
|
359
|
+
feature_folds = Misc.divide(@features, folds)
|
360
|
+
labels_folds = Misc.divide(@labels, folds)
|
361
|
+
end
|
344
362
|
|
345
363
|
folds.times do |fix|
|
346
364
|
|
347
|
-
|
365
|
+
if folds == 1
|
366
|
+
rest = [fix]
|
367
|
+
else
|
368
|
+
rest = (0..(folds-1)).to_a - [fix]
|
369
|
+
end
|
348
370
|
|
349
371
|
test_set = feature_folds[fix]
|
350
372
|
train_set = feature_folds.values_at(*rest).inject([]){|acc,e| acc += e; acc}
|
@@ -354,6 +376,7 @@ cat(paste(label, sep="\\n", collapse="\\n"));
|
|
354
376
|
|
355
377
|
@features = train_set
|
356
378
|
@labels = train_labels
|
379
|
+
|
357
380
|
self.train
|
358
381
|
predictions = self.eval_list test_set, false
|
359
382
|
|
@@ -361,6 +384,8 @@ cat(paste(label, sep="\\n", collapse="\\n"));
|
|
361
384
|
|
362
385
|
different_labels = test_labels.uniq
|
363
386
|
|
387
|
+
Log.debug do "Accuracy Fold #{fix}: #{(100 * test_labels.zip(predictions).select{|t,p| t == p }.length.to_f / test_labels.length).round(2)}%" end
|
388
|
+
|
364
389
|
tp, tn, fp, fn, pr, re, f1 = VectorModel.f1_metrics(test_labels, predictions, good_label)
|
365
390
|
|
366
391
|
if multiclass
|
@@ -376,7 +401,7 @@ cat(paste(label, sep="\\n", collapse="\\n"));
|
|
376
401
|
@features = orig_features
|
377
402
|
@labels = orig_labels
|
378
403
|
end
|
379
|
-
self.train
|
404
|
+
self.train unless folds == 1
|
380
405
|
res
|
381
406
|
end
|
382
407
|
end
|
@@ -0,0 +1,86 @@
|
|
1
|
+
# This is an auto-generated partial config. To use it with 'spacy train'
|
2
|
+
# you can run spacy init fill-config to auto-fill all default settings:
|
3
|
+
# python -m spacy init fill-config ./base_config.cfg ./config.cfg
|
4
|
+
[paths]
|
5
|
+
train = null
|
6
|
+
dev = null
|
7
|
+
|
8
|
+
[system]
|
9
|
+
gpu_allocator = null
|
10
|
+
|
11
|
+
[nlp]
|
12
|
+
lang = "en"
|
13
|
+
pipeline = ["tok2vec","textcat_multilabel"]
|
14
|
+
batch_size = 1000
|
15
|
+
|
16
|
+
[components]
|
17
|
+
|
18
|
+
[components.tok2vec]
|
19
|
+
factory = "tok2vec"
|
20
|
+
|
21
|
+
[components.tok2vec.model]
|
22
|
+
@architectures = "spacy.Tok2Vec.v2"
|
23
|
+
|
24
|
+
[components.tok2vec.model.embed]
|
25
|
+
@architectures = "spacy.MultiHashEmbed.v2"
|
26
|
+
width = ${components.tok2vec.model.encode.width}
|
27
|
+
attrs = ["ORTH", "SHAPE"]
|
28
|
+
rows = [5000, 2500]
|
29
|
+
include_static_vectors = true
|
30
|
+
|
31
|
+
[components.tok2vec.model.encode]
|
32
|
+
@architectures = "spacy.MaxoutWindowEncoder.v2"
|
33
|
+
width = 256
|
34
|
+
depth = 8
|
35
|
+
window_size = 1
|
36
|
+
maxout_pieces = 3
|
37
|
+
|
38
|
+
[components.textcat_multilabel]
|
39
|
+
factory = "textcat_multilabel"
|
40
|
+
|
41
|
+
[components.textcat_multilabel.model]
|
42
|
+
@architectures = "spacy.TextCatEnsemble.v2"
|
43
|
+
nO = null
|
44
|
+
|
45
|
+
[components.textcat_multilabel.model.tok2vec]
|
46
|
+
@architectures = "spacy.Tok2VecListener.v1"
|
47
|
+
width = ${components.tok2vec.model.encode.width}
|
48
|
+
|
49
|
+
[components.textcat_multilabel.model.linear_model]
|
50
|
+
@architectures = "spacy.TextCatBOW.v1"
|
51
|
+
exclusive_classes = true
|
52
|
+
ngram_size = 1
|
53
|
+
no_output_layer = false
|
54
|
+
|
55
|
+
[corpora]
|
56
|
+
|
57
|
+
[corpora.train]
|
58
|
+
@readers = "spacy.Corpus.v1"
|
59
|
+
path = ${paths.train}
|
60
|
+
max_length = 2000
|
61
|
+
|
62
|
+
[corpora.dev]
|
63
|
+
@readers = "spacy.Corpus.v1"
|
64
|
+
path = ${paths.dev}
|
65
|
+
max_length = 0
|
66
|
+
|
67
|
+
[training]
|
68
|
+
dev_corpus = "corpora.dev"
|
69
|
+
train_corpus = "corpora.train"
|
70
|
+
|
71
|
+
[training.optimizer]
|
72
|
+
@optimizers = "Adam.v1"
|
73
|
+
|
74
|
+
[training.batcher]
|
75
|
+
@batchers = "spacy.batch_by_words.v1"
|
76
|
+
discard_oversize = false
|
77
|
+
tolerance = 0.2
|
78
|
+
|
79
|
+
[training.batcher.size]
|
80
|
+
@schedules = "compounding.v1"
|
81
|
+
start = 100
|
82
|
+
stop = 1000
|
83
|
+
compound = 1.001
|
84
|
+
|
85
|
+
[initialize]
|
86
|
+
vectors = "en_core_web_lg"
|
@@ -0,0 +1,78 @@
|
|
1
|
+
# This is an auto-generated partial config. To use it with 'spacy train'
|
2
|
+
# you can run spacy init fill-config to auto-fill all default settings:
|
3
|
+
# python -m spacy init fill-config ./base_config.cfg ./config.cfg
|
4
|
+
[paths]
|
5
|
+
train = null
|
6
|
+
dev = null
|
7
|
+
|
8
|
+
[system]
|
9
|
+
gpu_allocator = null
|
10
|
+
|
11
|
+
[nlp]
|
12
|
+
lang = "en"
|
13
|
+
pipeline = ["tok2vec","textcat_multilabel"]
|
14
|
+
batch_size = 1000
|
15
|
+
|
16
|
+
[components]
|
17
|
+
|
18
|
+
[components.tok2vec]
|
19
|
+
factory = "tok2vec"
|
20
|
+
|
21
|
+
[components.tok2vec.model]
|
22
|
+
@architectures = "spacy.Tok2Vec.v2"
|
23
|
+
|
24
|
+
[components.tok2vec.model.embed]
|
25
|
+
@architectures = "spacy.MultiHashEmbed.v2"
|
26
|
+
width = ${components.tok2vec.model.encode.width}
|
27
|
+
attrs = ["ORTH", "SHAPE"]
|
28
|
+
rows = [5000, 2500]
|
29
|
+
include_static_vectors = false
|
30
|
+
|
31
|
+
[components.tok2vec.model.encode]
|
32
|
+
@architectures = "spacy.MaxoutWindowEncoder.v2"
|
33
|
+
width = 96
|
34
|
+
depth = 4
|
35
|
+
window_size = 1
|
36
|
+
maxout_pieces = 3
|
37
|
+
|
38
|
+
[components.textcat_multilabel]
|
39
|
+
factory = "textcat_multilabel"
|
40
|
+
|
41
|
+
[components.textcat_multilabel.model]
|
42
|
+
@architectures = "spacy.TextCatBOW.v1"
|
43
|
+
exclusive_classes = true
|
44
|
+
ngram_size = 1
|
45
|
+
no_output_layer = false
|
46
|
+
|
47
|
+
[corpora]
|
48
|
+
|
49
|
+
[corpora.train]
|
50
|
+
@readers = "spacy.Corpus.v1"
|
51
|
+
path = ${paths.train}
|
52
|
+
max_length = 2000
|
53
|
+
|
54
|
+
[corpora.dev]
|
55
|
+
@readers = "spacy.Corpus.v1"
|
56
|
+
path = ${paths.dev}
|
57
|
+
max_length = 0
|
58
|
+
|
59
|
+
[training]
|
60
|
+
dev_corpus = "corpora.dev"
|
61
|
+
train_corpus = "corpora.train"
|
62
|
+
|
63
|
+
[training.optimizer]
|
64
|
+
@optimizers = "Adam.v1"
|
65
|
+
|
66
|
+
[training.batcher]
|
67
|
+
@batchers = "spacy.batch_by_words.v1"
|
68
|
+
discard_oversize = false
|
69
|
+
tolerance = 0.2
|
70
|
+
|
71
|
+
[training.batcher.size]
|
72
|
+
@schedules = "compounding.v1"
|
73
|
+
start = 100
|
74
|
+
stop = 1000
|
75
|
+
compound = 1.001
|
76
|
+
|
77
|
+
[initialize]
|
78
|
+
vectors = null
|
@@ -0,0 +1,84 @@
|
|
1
|
+
# This is an auto-generated partial config. To use it with 'spacy train'
|
2
|
+
# you can run spacy init fill-config to auto-fill all default settings:
|
3
|
+
# python -m spacy init fill-config ./base_config.cfg ./config.cfg
|
4
|
+
[paths]
|
5
|
+
train = null
|
6
|
+
dev = null
|
7
|
+
|
8
|
+
[system]
|
9
|
+
gpu_allocator = "pytorch"
|
10
|
+
|
11
|
+
[nlp]
|
12
|
+
lang = "en"
|
13
|
+
pipeline = ["transformer","textcat_multilabel"]
|
14
|
+
batch_size = 128
|
15
|
+
|
16
|
+
[components]
|
17
|
+
|
18
|
+
[components.transformer]
|
19
|
+
factory = "transformer"
|
20
|
+
|
21
|
+
[components.transformer.model]
|
22
|
+
@architectures = "spacy-transformers.TransformerModel.v1"
|
23
|
+
name = "emilyalsentzer/Bio_ClinicalBERT"
|
24
|
+
tokenizer_config = {"use_fast": true}
|
25
|
+
|
26
|
+
[components.transformer.model.get_spans]
|
27
|
+
@span_getters = "spacy-transformers.strided_spans.v1"
|
28
|
+
window = 128
|
29
|
+
stride = 96
|
30
|
+
|
31
|
+
[components.textcat_multilabel]
|
32
|
+
factory = "textcat_multilabel"
|
33
|
+
|
34
|
+
[components.textcat_multilabel.model]
|
35
|
+
@architectures = "spacy.TextCatEnsemble.v2"
|
36
|
+
nO = null
|
37
|
+
|
38
|
+
[components.textcat_multilabel.model.tok2vec]
|
39
|
+
@architectures = "spacy-transformers.TransformerListener.v1"
|
40
|
+
grad_factor = 1.0
|
41
|
+
|
42
|
+
[components.textcat_multilabel.model.tok2vec.pooling]
|
43
|
+
@layers = "reduce_mean.v1"
|
44
|
+
|
45
|
+
[components.textcat_multilabel.model.linear_model]
|
46
|
+
@architectures = "spacy.TextCatBOW.v1"
|
47
|
+
exclusive_classes = true
|
48
|
+
ngram_size = 1
|
49
|
+
no_output_layer = false
|
50
|
+
|
51
|
+
[corpora]
|
52
|
+
|
53
|
+
[corpora.train]
|
54
|
+
@readers = "spacy.Corpus.v1"
|
55
|
+
path = ${paths.train}
|
56
|
+
max_length = 500
|
57
|
+
|
58
|
+
[corpora.dev]
|
59
|
+
@readers = "spacy.Corpus.v1"
|
60
|
+
path = ${paths.dev}
|
61
|
+
max_length = 0
|
62
|
+
|
63
|
+
[training]
|
64
|
+
accumulate_gradient = 3
|
65
|
+
dev_corpus = "corpora.dev"
|
66
|
+
train_corpus = "corpora.train"
|
67
|
+
|
68
|
+
[training.optimizer]
|
69
|
+
@optimizers = "Adam.v1"
|
70
|
+
|
71
|
+
[training.optimizer.learn_rate]
|
72
|
+
@schedules = "warmup_linear.v1"
|
73
|
+
warmup_steps = 250
|
74
|
+
total_steps = 20000
|
75
|
+
initial_rate = 5e-5
|
76
|
+
|
77
|
+
[training.batcher]
|
78
|
+
@batchers = "spacy.batch_by_padded.v1"
|
79
|
+
discard_oversize = true
|
80
|
+
size = 2000
|
81
|
+
buffer = 256
|
82
|
+
|
83
|
+
[initialize]
|
84
|
+
vectors = null
|
@@ -0,0 +1,73 @@
|
|
1
|
+
# This is an auto-generated partial config. To use it with 'spacy train'
|
2
|
+
# you can run spacy init fill-config to auto-fill all default settings:
|
3
|
+
# python -m spacy init fill-config ./base_config.cfg ./config.cfg
|
4
|
+
[paths]
|
5
|
+
train = null
|
6
|
+
dev = null
|
7
|
+
|
8
|
+
[system]
|
9
|
+
gpu_allocator = "pytorch"
|
10
|
+
|
11
|
+
[nlp]
|
12
|
+
lang = "en"
|
13
|
+
pipeline = ["transformer","textcat_multilabel"]
|
14
|
+
batch_size = 128
|
15
|
+
|
16
|
+
[components]
|
17
|
+
|
18
|
+
[components.transformer]
|
19
|
+
factory = "transformer"
|
20
|
+
|
21
|
+
[components.transformer.model]
|
22
|
+
@architectures = "spacy-transformers.TransformerModel.v1"
|
23
|
+
name = "roberta-base"
|
24
|
+
tokenizer_config = {"use_fast": true}
|
25
|
+
|
26
|
+
[components.transformer.model.get_spans]
|
27
|
+
@span_getters = "spacy-transformers.strided_spans.v1"
|
28
|
+
window = 128
|
29
|
+
stride = 96
|
30
|
+
|
31
|
+
[components.textcat_multilabel]
|
32
|
+
factory = "textcat_multilabel"
|
33
|
+
|
34
|
+
[components.textcat_multilabel.model]
|
35
|
+
@architectures = "spacy.TextCatBOW.v1"
|
36
|
+
exclusive_classes = true
|
37
|
+
ngram_size = 1
|
38
|
+
no_output_layer = false
|
39
|
+
|
40
|
+
[corpora]
|
41
|
+
|
42
|
+
[corpora.train]
|
43
|
+
@readers = "spacy.Corpus.v1"
|
44
|
+
path = ${paths.train}
|
45
|
+
max_length = 500
|
46
|
+
|
47
|
+
[corpora.dev]
|
48
|
+
@readers = "spacy.Corpus.v1"
|
49
|
+
path = ${paths.dev}
|
50
|
+
max_length = 0
|
51
|
+
|
52
|
+
[training]
|
53
|
+
accumulate_gradient = 3
|
54
|
+
dev_corpus = "corpora.dev"
|
55
|
+
train_corpus = "corpora.train"
|
56
|
+
|
57
|
+
[training.optimizer]
|
58
|
+
@optimizers = "Adam.v1"
|
59
|
+
|
60
|
+
[training.optimizer.learn_rate]
|
61
|
+
@schedules = "warmup_linear.v1"
|
62
|
+
warmup_steps = 250
|
63
|
+
total_steps = 20000
|
64
|
+
initial_rate = 5e-5
|
65
|
+
|
66
|
+
[training.batcher]
|
67
|
+
@batchers = "spacy.batch_by_padded.v1"
|
68
|
+
discard_oversize = true
|
69
|
+
size = 2000
|
70
|
+
buffer = 256
|
71
|
+
|
72
|
+
[initialize]
|
73
|
+
vectors = null
|
@@ -23,18 +23,23 @@ class TestSpaCyModel < Test::Unit::TestCase
|
|
23
23
|
good = tsv.select("Recommended IND" => '1')
|
24
24
|
bad = tsv.select("Recommended IND" => '0')
|
25
25
|
|
26
|
-
gsize =
|
27
|
-
bsize =
|
26
|
+
gsize = 200
|
27
|
+
bsize = 50
|
28
28
|
good.keys[0..gsize-1].each do |text|
|
29
29
|
next if text.nil? || text.empty?
|
30
|
-
model.add text, '
|
30
|
+
model.add text, 'good'
|
31
31
|
end
|
32
32
|
|
33
33
|
bad.keys[0..bsize-1].each do |text|
|
34
|
-
model.add text, '
|
34
|
+
model.add text, 'bad'
|
35
35
|
end
|
36
36
|
|
37
|
-
model.cross_validation
|
37
|
+
model.cross_validation 1
|
38
|
+
|
39
|
+
model = VectorModel.new dir
|
40
|
+
|
41
|
+
assert Misc.counts(model.eval_list(good.keys[0..50]))['good'] > 40
|
42
|
+
assert Misc.counts(model.eval_list(bad.keys[0..50]))['bad'] > 40
|
38
43
|
end
|
39
44
|
|
40
45
|
def test_svm_spacy
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: rbbt-dm
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.1.
|
4
|
+
version: 1.1.58
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Miguel Vazquez
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2022-07-19 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: rbbt-util
|
@@ -116,8 +116,12 @@ files:
|
|
116
116
|
- share/R/heatmap.3.R
|
117
117
|
- share/spaCy/cpu/textcat_accuracy.conf
|
118
118
|
- share/spaCy/cpu/textcat_efficiency.conf
|
119
|
+
- share/spaCy/cpu/textcat_multilabel_accuracy.conf
|
120
|
+
- share/spaCy/cpu/textcat_multilabel_efficiency.conf
|
119
121
|
- share/spaCy/gpu/textcat_accuracy.conf
|
120
122
|
- share/spaCy/gpu/textcat_efficiency.conf
|
123
|
+
- share/spaCy/gpu/textcat_multilabel_accuracy.conf
|
124
|
+
- share/spaCy/gpu/textcat_multilabel_efficiency.conf
|
121
125
|
- test/rbbt/matrix/test_barcode.rb
|
122
126
|
- test/rbbt/network/test_paths.rb
|
123
127
|
- test/rbbt/statistics/test_fdr.rb
|