opener-kaf-naf-parser 1.0.1 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +67 -8
- data/bin/kaf-naf-parser-daemon +10 -0
- data/core/kaf-naf-parser.py +5 -5
- data/exec/kaf-naf-parser.rb +9 -0
- data/ext/hack/Rakefile +13 -0
- data/lib/opener/kaf_naf_parser/version.rb +1 -1
- data/opener-kaf-naf-parser.gemspec +5 -1
- data/pre_install_requirements.txt +3 -0
- metadata +37 -51
- data/core/packages/KafNafParser-1.2.tar.gz +0 -0
- data/core/packages/VUA_pylib-1.3.tar.gz +0 -0
- data/core/site-packages/pre_build/KafNafParser/KafNafParserMod.py +0 -338
- data/core/site-packages/pre_build/KafNafParser/__init__.py +0 -14
- data/core/site-packages/pre_build/KafNafParser/constituency_data.py +0 -125
- data/core/site-packages/pre_build/KafNafParser/coreference_data.py +0 -52
- data/core/site-packages/pre_build/KafNafParser/dependency_data.py +0 -80
- data/core/site-packages/pre_build/KafNafParser/entity_data.py +0 -59
- data/core/site-packages/pre_build/KafNafParser/external_references_data.py +0 -41
- data/core/site-packages/pre_build/KafNafParser/feature_extractor/__init__.py +0 -2
- data/core/site-packages/pre_build/KafNafParser/feature_extractor/constituency.py +0 -205
- data/core/site-packages/pre_build/KafNafParser/feature_extractor/dependency.py +0 -300
- data/core/site-packages/pre_build/KafNafParser/features_data.py +0 -71
- data/core/site-packages/pre_build/KafNafParser/header_data.py +0 -127
- data/core/site-packages/pre_build/KafNafParser/opinion_data.py +0 -200
- data/core/site-packages/pre_build/KafNafParser/references_data.py +0 -15
- data/core/site-packages/pre_build/KafNafParser/span_data.py +0 -63
- data/core/site-packages/pre_build/KafNafParser/term_data.py +0 -111
- data/core/site-packages/pre_build/KafNafParser/term_sentiment_data.py +0 -42
- data/core/site-packages/pre_build/KafNafParser/text_data.py +0 -90
- data/core/site-packages/pre_build/KafNafParser-1.2-py2.7.egg-info/PKG-INFO +0 -10
- data/core/site-packages/pre_build/KafNafParser-1.2-py2.7.egg-info/SOURCES.txt +0 -22
- data/core/site-packages/pre_build/KafNafParser-1.2-py2.7.egg-info/dependency_links.txt +0 -1
- data/core/site-packages/pre_build/KafNafParser-1.2-py2.7.egg-info/installed-files.txt +0 -47
- data/core/site-packages/pre_build/KafNafParser-1.2-py2.7.egg-info/top_level.txt +0 -1
- data/core/site-packages/pre_build/VUA_pylib/__init__.py +0 -1
- data/core/site-packages/pre_build/VUA_pylib/common/__init__.py +0 -1
- data/core/site-packages/pre_build/VUA_pylib/common/common.py +0 -28
- data/core/site-packages/pre_build/VUA_pylib/corpus_reader/__init__.py +0 -1
- data/core/site-packages/pre_build/VUA_pylib/corpus_reader/google_web_nl.py +0 -156
- data/core/site-packages/pre_build/VUA_pylib/io_utils/__init__.py +0 -1
- data/core/site-packages/pre_build/VUA_pylib/io_utils/feature_file.py +0 -121
- data/core/site-packages/pre_build/VUA_pylib/lexicon/__init__.py +0 -1
- data/core/site-packages/pre_build/VUA_pylib/lexicon/lexicon.py +0 -72
- data/core/site-packages/pre_build/VUA_pylib-1.3-py2.7.egg-info/PKG-INFO +0 -10
- data/core/site-packages/pre_build/VUA_pylib-1.3-py2.7.egg-info/SOURCES.txt +0 -14
- data/core/site-packages/pre_build/VUA_pylib-1.3-py2.7.egg-info/dependency_links.txt +0 -1
- data/core/site-packages/pre_build/VUA_pylib-1.3-py2.7.egg-info/installed-files.txt +0 -23
- data/core/site-packages/pre_build/VUA_pylib-1.3-py2.7.egg-info/top_level.txt +0 -1
- data/core/site-packages/pre_build/VUKafParserPy/KafDataObjectsMod.py +0 -165
- data/core/site-packages/pre_build/VUKafParserPy/KafParserMod.py +0 -439
- data/core/site-packages/pre_build/VUKafParserPy/__init__.py +0 -7
- data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/PKG-INFO +0 -10
- data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/SOURCES.txt +0 -7
- data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/dependency_links.txt +0 -1
- data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/installed-files.txt +0 -11
- data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/top_level.txt +0 -1
- data/pre_build_requirements.txt +0 -3
@@ -1,300 +0,0 @@
|
|
1
|
-
from operator import itemgetter
|
2
|
-
from VUA_pylib.common import get_max_distr_dict
|
3
|
-
import sys
|
4
|
-
|
5
|
-
class Cdependency_extractor:
|
6
|
-
def __init__(self,knaf_obj):
|
7
|
-
self.naf = knaf_obj
|
8
|
-
self.relations_for_term = {}
|
9
|
-
self.reverse_relations_for_term = {}
|
10
|
-
self.prefix_for_reverse = ''
|
11
|
-
|
12
|
-
|
13
|
-
already_linked = {}
|
14
|
-
for dep in knaf_obj.get_dependencies():
|
15
|
-
term_from = dep.get_from()
|
16
|
-
term_to = dep.get_to()
|
17
|
-
rfunc = dep.get_function()
|
18
|
-
|
19
|
-
# Dependencies reversed are skipped...
|
20
|
-
#if rfunc.startswith('rhd/') or rfunc.startswith('whd/'):
|
21
|
-
# continue
|
22
|
-
|
23
|
-
# For detecting cycles like:
|
24
|
-
# <!-- rhd/body(geef,wat) -->
|
25
|
-
# <dep from="t19" to="t15" rfunc="rhd/body"/>
|
26
|
-
# <!-- hd/su(wat,geef) -->
|
27
|
-
# <dep from="t15" to="t19" rfunc="hd/su"/>
|
28
|
-
|
29
|
-
'''
|
30
|
-
if term_from in already_linked and term_to in already_linked[term_from]:
|
31
|
-
#There could be a cycle, skip this
|
32
|
-
print>>sys.stderr,'Skipped from',term_from,'to',term_to,'func',rfunc,' cycle detected'
|
33
|
-
continue
|
34
|
-
else:
|
35
|
-
#Include term_from as linked with term_to for future...
|
36
|
-
if term_to not in already_linked:
|
37
|
-
already_linked[term_to] = set()
|
38
|
-
already_linked[term_to].add(term_from)
|
39
|
-
'''
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
if term_from in self.relations_for_term:
|
45
|
-
self.relations_for_term[term_from].append((rfunc,term_to))
|
46
|
-
else:
|
47
|
-
self.relations_for_term[term_from] = [(rfunc,term_to)]
|
48
|
-
|
49
|
-
if term_to in self.reverse_relations_for_term:
|
50
|
-
self.reverse_relations_for_term[term_to].append((self.prefix_for_reverse+rfunc,term_from))
|
51
|
-
else:
|
52
|
-
self.reverse_relations_for_term[term_to] = [(self.prefix_for_reverse+rfunc,term_from)]
|
53
|
-
|
54
|
-
|
55
|
-
self.paths_for_termid={}
|
56
|
-
self.sentence_for_termid={}
|
57
|
-
self.top_relation_for_term = {} ## termid --> (relation,topnode)
|
58
|
-
self.root_for_sentence = {} ## sentenceid --> termid
|
59
|
-
|
60
|
-
for term_obj in knaf_obj.get_terms():
|
61
|
-
termid = term_obj.get_id()
|
62
|
-
|
63
|
-
#Calculating the sentence id for the term id
|
64
|
-
span_ids = term_obj.get_span().get_span_ids()
|
65
|
-
token_obj = knaf_obj.get_token(span_ids[0])
|
66
|
-
sentence = token_obj.get_sent()
|
67
|
-
|
68
|
-
self.sentence_for_termid[termid] = sentence
|
69
|
-
###########################################
|
70
|
-
|
71
|
-
#paths = self.__propagate_node(termid,[])
|
72
|
-
#inversed = self.__reverse_propagate_node(termid)
|
73
|
-
|
74
|
-
## Due to the change on direction of dependencies...
|
75
|
-
inversed = self.__propagate_node(termid,[])
|
76
|
-
paths = self.__reverse_propagate_node(termid)
|
77
|
-
|
78
|
-
##Calculate the top relation for the node, the relation with the main root of the tree
|
79
|
-
if len(inversed) != 0:
|
80
|
-
for ip in inversed:
|
81
|
-
if len(ip)!=0:
|
82
|
-
self.top_relation_for_term[termid] = ip[-1] ## ex. ('NMOD', 't2')
|
83
|
-
root = ip[-1][1]
|
84
|
-
if sentence not in self.root_for_sentence:
|
85
|
-
self.root_for_sentence[sentence] = {}
|
86
|
-
|
87
|
-
if root not in self.root_for_sentence[sentence]:
|
88
|
-
self.root_for_sentence[sentence][root]=0
|
89
|
-
else:
|
90
|
-
self.root_for_sentence[sentence][root]+=1
|
91
|
-
break
|
92
|
-
|
93
|
-
self.paths_for_termid[termid] = paths + inversed
|
94
|
-
'''
|
95
|
-
print termid
|
96
|
-
print 'DIRECT RELS'
|
97
|
-
for p in paths:
|
98
|
-
print ' ',p
|
99
|
-
|
100
|
-
print 'INDIRECT RELS'
|
101
|
-
for p in inversed:
|
102
|
-
print ' ',p
|
103
|
-
'''
|
104
|
-
####
|
105
|
-
|
106
|
-
for sent_id, distr in self.root_for_sentence.items():
|
107
|
-
## get_max_distr_dict imported from VUA_pylib.common
|
108
|
-
most_freq,c = get_max_distr_dict(distr)
|
109
|
-
self.root_for_sentence[sent_id] = most_freq
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
def __propagate_node(self,node,already_propagated=[]):
|
115
|
-
paths = []
|
116
|
-
|
117
|
-
relations = self.relations_for_term.get(node)
|
118
|
-
if relations is None: ##Case base
|
119
|
-
paths = [[]]
|
120
|
-
elif node in already_propagated:
|
121
|
-
paths = [[]]
|
122
|
-
|
123
|
-
else:
|
124
|
-
already_propagated.append(node)
|
125
|
-
for func, target_node in relations:
|
126
|
-
new_paths = self.__propagate_node(target_node, already_propagated)
|
127
|
-
for new_path in new_paths:
|
128
|
-
new_path.insert(0,(func,target_node))
|
129
|
-
paths.append(new_path)
|
130
|
-
return paths
|
131
|
-
|
132
|
-
def __reverse_propagate_node(self,node,already_propagated=[]):
|
133
|
-
paths = []
|
134
|
-
relations = self.reverse_relations_for_term.get(node)
|
135
|
-
if relations is None: ##Case base
|
136
|
-
paths = [[]]
|
137
|
-
elif node in already_propagated:
|
138
|
-
paths = [[]]
|
139
|
-
else:
|
140
|
-
already_propagated.append(node)
|
141
|
-
for func, target_node in relations:
|
142
|
-
new_paths = self.__reverse_propagate_node(target_node,already_propagated)
|
143
|
-
for new_path in new_paths:
|
144
|
-
new_path.insert(0,(func,target_node))
|
145
|
-
paths.append(new_path)
|
146
|
-
return paths
|
147
|
-
|
148
|
-
|
149
|
-
# Get the shortest path between 2 term ids
|
150
|
-
def get_shortest_path(self,term1,term2):
|
151
|
-
dep_path = None
|
152
|
-
if term1 == term2: dep_path = []
|
153
|
-
else:
|
154
|
-
paths1 = self.paths_for_termid[term1]
|
155
|
-
paths2 = self.paths_for_termid[term2]
|
156
|
-
|
157
|
-
##Check if term2 is on paths1
|
158
|
-
hits = [] ## list of (common_id,idx1,idx2,numpath1,numpath2)
|
159
|
-
for num1, p1 in enumerate(paths1):
|
160
|
-
|
161
|
-
ids1 = [ my_id for my_func, my_id in p1]
|
162
|
-
if term2 in ids1:
|
163
|
-
idx1=ids1.index(term2)
|
164
|
-
hits.append((term2,idx1+0,idx1,0,num1,None))
|
165
|
-
|
166
|
-
for num2,p2 in enumerate(paths2):
|
167
|
-
ids2 = [ my_id for my_func, my_id in p2]
|
168
|
-
if term1 in p2:
|
169
|
-
idx2=ids2.index(term1)
|
170
|
-
hits.append((term1,0+idx2,0,idx2,None,num2))
|
171
|
-
|
172
|
-
#Pair by pair
|
173
|
-
for num1, p1 in enumerate(paths1):
|
174
|
-
#print 'Path1',term1, p1
|
175
|
-
ids1 = [ my_id for my_func, my_id in p1]
|
176
|
-
for num2, p2 in enumerate(paths2):
|
177
|
-
#print '\t',term2,p2
|
178
|
-
ids2 = [ my_id for my_func, my_id in p2]
|
179
|
-
common_ids = set(ids1) & set(ids2)
|
180
|
-
for common_id in common_ids:
|
181
|
-
idx1 = ids1.index(common_id)
|
182
|
-
idx2 = ids2.index(common_id)
|
183
|
-
hits.append((common_id,idx1+idx2,idx1,idx2,num1,num2))
|
184
|
-
|
185
|
-
|
186
|
-
if len(hits) != 0:
|
187
|
-
dep_path = []
|
188
|
-
hits.sort(key=itemgetter(1))
|
189
|
-
best_hit = hits[0]
|
190
|
-
common_id, _, idx1, idx2, numpath1, numpath2 = best_hit
|
191
|
-
|
192
|
-
if numpath2 is None: #term2 is in one of the paths of t1
|
193
|
-
path1 = paths1[numpath1]
|
194
|
-
my_rels1 = path1[:idx1+1]
|
195
|
-
##complete_path = ''
|
196
|
-
##complete_path_ids = ''
|
197
|
-
for func,node in my_rels1:
|
198
|
-
dep_path.append(func)
|
199
|
-
##complete_path+=func+'#'
|
200
|
-
##complete_path_ids+=node+'#'
|
201
|
-
|
202
|
-
#===========================================================
|
203
|
-
# print 'CASE1',best_hit
|
204
|
-
# print complete_path
|
205
|
-
# print complete_path_ids
|
206
|
-
#===========================================================
|
207
|
-
elif numpath1 is None: #term1 is in one of the paths of t2
|
208
|
-
path2 = paths2[numpath2]
|
209
|
-
my_rels2 = path2[:idx2+1]
|
210
|
-
##complete_path = ''
|
211
|
-
##complete_path_ids = ''
|
212
|
-
for func,node in my_rels2:
|
213
|
-
dep_path.append(func)
|
214
|
-
#complete_path+=func+'#'
|
215
|
-
#complete_path_ids+=node+'#'
|
216
|
-
|
217
|
-
#===========================================================
|
218
|
-
# print 'CASE2',best_hit
|
219
|
-
# print complete_path
|
220
|
-
# print complete_path_ids
|
221
|
-
#===========================================================
|
222
|
-
else: #There is a common node linking both
|
223
|
-
path1 = paths1[numpath1]
|
224
|
-
my_rels1 = path1[:idx1+1]
|
225
|
-
|
226
|
-
path2 = paths2[numpath2]
|
227
|
-
my_rels2 = path2[:idx2+1]
|
228
|
-
|
229
|
-
##complete_path = ''
|
230
|
-
#complete_path_ids = ''
|
231
|
-
for func,node in my_rels1:
|
232
|
-
dep_path.append(func)
|
233
|
-
##complete_path+=func+'#'
|
234
|
-
#complete_path_ids+=func+'->'+self.naf.get_term(node).get_lemma()+'->'
|
235
|
-
|
236
|
-
for func,node in my_rels2[-1::-1]:
|
237
|
-
dep_path.append(func)
|
238
|
-
##complete_path+=func+'#'
|
239
|
-
#complete_path_ids+=func+'->'+self.naf.get_term(node).get_lemma()+'->'
|
240
|
-
#===========================================================
|
241
|
-
#
|
242
|
-
# print complete_path
|
243
|
-
# print complete_path_ids
|
244
|
-
# print path2
|
245
|
-
# print my_rels1
|
246
|
-
# print my_rels2
|
247
|
-
# print 'CASE3',best_hit
|
248
|
-
#===========================================================
|
249
|
-
return dep_path
|
250
|
-
|
251
|
-
## Get the shortest dependency path between 2 sets of spans
|
252
|
-
def get_shortest_path_spans(self,span1,span2):
|
253
|
-
shortest_path = None
|
254
|
-
|
255
|
-
for term1 in span1:
|
256
|
-
for term2 in span2:
|
257
|
-
this_path = self.get_shortest_path(term1, term2)
|
258
|
-
if shortest_path is None or (this_path is not None and len(this_path)<len(shortest_path)):
|
259
|
-
shortest_path = this_path
|
260
|
-
return shortest_path
|
261
|
-
|
262
|
-
# Get the dependency path to the sentence root for a term id
|
263
|
-
def get_path_to_root(self,termid):
|
264
|
-
# Get the sentence for the term
|
265
|
-
root = None
|
266
|
-
sentence = self.sentence_for_termid.get(termid)
|
267
|
-
|
268
|
-
if sentence is None: #try with the top node
|
269
|
-
top_node = self.top_relation_for_term.get(termid)
|
270
|
-
if top_node is not None:
|
271
|
-
root = top_node[1]
|
272
|
-
else:
|
273
|
-
return None
|
274
|
-
else:
|
275
|
-
if sentence in self.root_for_sentence:
|
276
|
-
root = self.root_for_sentence[sentence]
|
277
|
-
else:
|
278
|
-
##There is no root for this sentence
|
279
|
-
return None
|
280
|
-
# In this point top_node should be properly set
|
281
|
-
path = self.get_shortest_path(termid, root)
|
282
|
-
return path
|
283
|
-
|
284
|
-
# Get the shortest dependency path to the sentence root for a span of ids
|
285
|
-
# extractor.get_shortest_path_to_root_span(['t444','t445','t446'])
|
286
|
-
def get_shortest_path_to_root_span(self,span):
|
287
|
-
shortest_path = None
|
288
|
-
for termid in span:
|
289
|
-
this_path = self.get_path_to_root(termid)
|
290
|
-
## In case of , or . or whatever, the path to the root usually is None, there are no dependencies...
|
291
|
-
if shortest_path is None or (this_path is not None and len(this_path) < len(shortest_path)):
|
292
|
-
shortest_path = this_path
|
293
|
-
return shortest_path
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
@@ -1,71 +0,0 @@
|
|
1
|
-
from lxml import etree
|
2
|
-
from lxml.objectify import dump
|
3
|
-
from references_data import *
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
class Cproperty:
|
8
|
-
def __init__(self,node=None,type='NAF'):
|
9
|
-
self.type = type
|
10
|
-
if node is None:
|
11
|
-
self.node = etree.Element('property')
|
12
|
-
else:
|
13
|
-
self.node = node
|
14
|
-
|
15
|
-
def get_id(self):
|
16
|
-
if self.type == 'KAF':
|
17
|
-
return self.node.get('pid')
|
18
|
-
elif self.type == 'NAF':
|
19
|
-
return self.node.get('id')
|
20
|
-
|
21
|
-
def get_type(self):
|
22
|
-
return self.node.get('lemma')
|
23
|
-
|
24
|
-
def get_references(self):
|
25
|
-
for ref_node in self.node.findall('references'):
|
26
|
-
yield Creferences(ref_node)
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
class Cproperties:
|
31
|
-
def __init__(self,node=None,type='NAF'):
|
32
|
-
self.type=type
|
33
|
-
if node is None:
|
34
|
-
self.node = etree.Element('properties')
|
35
|
-
else:
|
36
|
-
self.node = node
|
37
|
-
|
38
|
-
def __iter__(self):
|
39
|
-
for prop_node in self.node.findall('property'):
|
40
|
-
yield Cproperty(prop_node,self.type)
|
41
|
-
|
42
|
-
class Cfeatures:
|
43
|
-
def __init__(self,node=None,type='NAF'):
|
44
|
-
self.type = type
|
45
|
-
if node is None:
|
46
|
-
self.node = etree.Element('features')
|
47
|
-
else:
|
48
|
-
self.node = node
|
49
|
-
|
50
|
-
def to_kaf(self):
|
51
|
-
if self.type == 'NAF':
|
52
|
-
##convert all the properties
|
53
|
-
for node in self.node.findall('properties/property'):
|
54
|
-
node.set('pid',node.get('id'))
|
55
|
-
del node.attrib['id']
|
56
|
-
|
57
|
-
def to_naf(self):
|
58
|
-
if self.type == 'KAF':
|
59
|
-
##convert all the properties
|
60
|
-
for node in self.node.findall('properties/property'):
|
61
|
-
node.set('id',node.get('pid'))
|
62
|
-
del node.attrib['pid']
|
63
|
-
|
64
|
-
|
65
|
-
def get_properties(self):
|
66
|
-
node_prop = self.node.find('properties')
|
67
|
-
if node_prop is not None:
|
68
|
-
obj_properties = Cproperties(node_prop,self.type)
|
69
|
-
for prop in obj_properties:
|
70
|
-
yield prop
|
71
|
-
|
@@ -1,127 +0,0 @@
|
|
1
|
-
# Modified to KAF / NAF
|
2
|
-
|
3
|
-
from lxml import etree
|
4
|
-
import time
|
5
|
-
|
6
|
-
class CfileDesc:
|
7
|
-
def __init__(self,node=None):
|
8
|
-
self.type = 'KAF/NAF'
|
9
|
-
if node is None:
|
10
|
-
self.node = etree.Element('fileDesc')
|
11
|
-
else:
|
12
|
-
self.node = node
|
13
|
-
|
14
|
-
#self.title='' #self.author='' #self.creationtime='' #self.filename='' #self.filetype='' #self.pages=''
|
15
|
-
|
16
|
-
|
17
|
-
class Cpublic:
|
18
|
-
def __init__(self,node=None):
|
19
|
-
self.type = 'KAF/NAF'
|
20
|
-
if node is None:
|
21
|
-
self.node = etree.Element('public')
|
22
|
-
else:
|
23
|
-
self.node = node
|
24
|
-
|
25
|
-
#self.publicId = ''
|
26
|
-
#slf.uri = ''
|
27
|
-
|
28
|
-
|
29
|
-
class Clp:
|
30
|
-
def __init__(self,node=None,name="",version="",timestamp=None):
|
31
|
-
self.type = 'KAF/NAF'
|
32
|
-
if node is None:
|
33
|
-
self.node = etree.Element('lp')
|
34
|
-
self.set_name(name)
|
35
|
-
self.set_version(name)
|
36
|
-
self.set_timestamp(timestamp)
|
37
|
-
else:
|
38
|
-
self.node = node
|
39
|
-
|
40
|
-
def set_name(self,name):
|
41
|
-
self.node.set('name',name)
|
42
|
-
|
43
|
-
def set_version(self,version):
|
44
|
-
self.node.set('version',version)
|
45
|
-
|
46
|
-
def set_timestamp(self,timestamp=None):
|
47
|
-
if timestamp is None:
|
48
|
-
import time
|
49
|
-
timestamp = time.strftime('%Y-%m-%dT%H:%M:%S%Z')
|
50
|
-
self.node.set('timestamp',timestamp)
|
51
|
-
|
52
|
-
|
53
|
-
def get_node(self):
|
54
|
-
return self.node
|
55
|
-
|
56
|
-
|
57
|
-
class ClinguisticProcessors:
|
58
|
-
def __init__(self,node=None):
|
59
|
-
self.type = 'KAF/NAF'
|
60
|
-
if node is None:
|
61
|
-
self.node = etree.Element('linguisticProcessors')
|
62
|
-
else:
|
63
|
-
self.node = node
|
64
|
-
|
65
|
-
def get_layer(self):
|
66
|
-
return self.node.get('layer')
|
67
|
-
|
68
|
-
def set_layer(self,layer):
|
69
|
-
self.node.set('layer',layer)
|
70
|
-
|
71
|
-
def add_linguistic_processor(self,my_lp):
|
72
|
-
self.node.append(my_lp.get_node())
|
73
|
-
|
74
|
-
def get_node(self):
|
75
|
-
return self.node
|
76
|
-
|
77
|
-
|
78
|
-
class CHeader:
|
79
|
-
def __init__(self,node=None,type='NAF'):
|
80
|
-
self.type = type
|
81
|
-
if node is None:
|
82
|
-
if self.type == 'NAF':
|
83
|
-
self.node = etree.Element('nafHeader')
|
84
|
-
elif self.type == 'KAF':
|
85
|
-
self.node = etree.Element('kafHeader')
|
86
|
-
else:
|
87
|
-
self.node = node
|
88
|
-
|
89
|
-
def to_kaf(self):
|
90
|
-
if self.type == 'NAF':
|
91
|
-
self.node.tag = 'kafHeader'
|
92
|
-
self.type = 'KAF'
|
93
|
-
|
94
|
-
def to_naf(self):
|
95
|
-
if self.type == 'KAF':
|
96
|
-
self.node.tag = 'nafHeader'
|
97
|
-
self.type = 'NAF'
|
98
|
-
|
99
|
-
def add_linguistic_processors(self,linpro):
|
100
|
-
self.node.append(linpro.get_node())
|
101
|
-
|
102
|
-
def remove_lp(self,layer):
|
103
|
-
for this_node in self.node.findall('linguisticProcessors'):
|
104
|
-
if this_node.get('layer') == layer:
|
105
|
-
self.node.remove(this_node)
|
106
|
-
break
|
107
|
-
|
108
|
-
|
109
|
-
def add_linguistic_processor(self, layer ,my_lp):
|
110
|
-
## Locate the linguisticProcessor element for taht layer
|
111
|
-
found_lp_obj = None
|
112
|
-
for this_lp in self.node.findall('linguisticProcessors'):
|
113
|
-
lp_obj = ClinguisticProcessors(this_lp)
|
114
|
-
if lp_obj.get_layer() == layer:
|
115
|
-
found_lp_obj = lp_obj
|
116
|
-
break
|
117
|
-
|
118
|
-
if found_lp_obj is None: #Not found
|
119
|
-
found_lp_obj = ClinguisticProcessors()
|
120
|
-
found_lp_obj.set_layer(layer)
|
121
|
-
self.add_linguistic_processors(found_lp_obj)
|
122
|
-
|
123
|
-
found_lp_obj.add_linguistic_processor(my_lp)
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
@@ -1,200 +0,0 @@
|
|
1
|
-
#Modified for KAF NAF ok
|
2
|
-
|
3
|
-
from lxml import etree
|
4
|
-
from lxml.objectify import dump
|
5
|
-
from span_data import *
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
class Cholder:
|
10
|
-
def __init__(self,node=None):
|
11
|
-
self.type = 'NAF/KAF'
|
12
|
-
if node is None:
|
13
|
-
self.node = etree.Element('opinion_holder')
|
14
|
-
else:
|
15
|
-
self.node = node
|
16
|
-
|
17
|
-
def set_span(self,my_span):
|
18
|
-
self.node.append(my_span.get_node())
|
19
|
-
|
20
|
-
def set_comment(self,c):
|
21
|
-
c = c.replace('--','- -')
|
22
|
-
self.node.insert(0,etree.Comment(c) )
|
23
|
-
|
24
|
-
def get_span(self):
|
25
|
-
span_obj = self.node.find('span')
|
26
|
-
if span_obj is not None:
|
27
|
-
return Cspan(span_obj)
|
28
|
-
return None
|
29
|
-
|
30
|
-
def __str__(self):
|
31
|
-
return dump(self.node)
|
32
|
-
|
33
|
-
def get_node(self):
|
34
|
-
return self.node
|
35
|
-
|
36
|
-
|
37
|
-
class Ctarget:
|
38
|
-
def __init__(self,node=None):
|
39
|
-
self.type = 'NAF/KAF'
|
40
|
-
if node is None:
|
41
|
-
self.node = etree.Element('opinion_target')
|
42
|
-
else:
|
43
|
-
self.node = node
|
44
|
-
|
45
|
-
def set_comment(self,c):
|
46
|
-
c = c.replace('--','- -')
|
47
|
-
self.node.insert(0,etree.Comment(c) )
|
48
|
-
|
49
|
-
def get_comment(self):
|
50
|
-
return self.node_comment
|
51
|
-
|
52
|
-
def set_span(self,my_span):
|
53
|
-
self.node.append(my_span.get_node())
|
54
|
-
|
55
|
-
def get_span(self):
|
56
|
-
span_obj = self.node.find('span')
|
57
|
-
if span_obj is not None:
|
58
|
-
return Cspan(span_obj)
|
59
|
-
return None
|
60
|
-
|
61
|
-
def __str__(self):
|
62
|
-
return dump(self.node)
|
63
|
-
|
64
|
-
def get_node(self):
|
65
|
-
return self.node
|
66
|
-
|
67
|
-
|
68
|
-
class Cexpression:
|
69
|
-
def __init__(self,node=None):
|
70
|
-
self.type = 'NAF/KAF'
|
71
|
-
if node is None:
|
72
|
-
self.node = etree.Element('opinion_expression')
|
73
|
-
else:
|
74
|
-
self.node = node
|
75
|
-
|
76
|
-
def set_comment(self,c):
|
77
|
-
c = c.replace('--','- -')
|
78
|
-
self.node.insert(0,etree.Comment(c))
|
79
|
-
|
80
|
-
def set_polarity(self,pol):
|
81
|
-
self.node.set('polarity',pol)
|
82
|
-
|
83
|
-
def get_polarity(self):
|
84
|
-
return self.node.get('polarity')
|
85
|
-
|
86
|
-
def set_strength(self,st):
|
87
|
-
self.node.set('strength',st)
|
88
|
-
|
89
|
-
def get_strength(self):
|
90
|
-
return self.node.get('strength')
|
91
|
-
|
92
|
-
def set_span(self,my_span):
|
93
|
-
self.node.append(my_span.get_node())
|
94
|
-
|
95
|
-
def get_span(self):
|
96
|
-
span_obj = self.node.find('span')
|
97
|
-
if span_obj is not None:
|
98
|
-
return Cspan(span_obj)
|
99
|
-
return None
|
100
|
-
|
101
|
-
def __str__(self):
|
102
|
-
return dump(self.node)
|
103
|
-
|
104
|
-
def get_node(self):
|
105
|
-
return self.node
|
106
|
-
|
107
|
-
class Copinion:
|
108
|
-
def __init__(self,node=None,type='NAF'):
|
109
|
-
self.type = type
|
110
|
-
if node is None:
|
111
|
-
self.node = etree.Element('opinion')
|
112
|
-
else:
|
113
|
-
self.node = node
|
114
|
-
|
115
|
-
def set_id(self,my_id):
|
116
|
-
if self.type == 'NAF':
|
117
|
-
self.node.set('id',my_id)
|
118
|
-
elif self.type == 'KAF':
|
119
|
-
self.node.set('oid',my_id)
|
120
|
-
|
121
|
-
def get_id(self):
|
122
|
-
if self.type == 'NAF':
|
123
|
-
self.node.het('id')
|
124
|
-
elif self.type == 'KAF':
|
125
|
-
self.node.get('oid')
|
126
|
-
|
127
|
-
def set_holder(self,hol):
|
128
|
-
self.node.append(hol.get_node())
|
129
|
-
|
130
|
-
def get_holder(self):
|
131
|
-
node_hol = self.node.find('opinion_holder')
|
132
|
-
if node_hol is not None:
|
133
|
-
return Cholder(node_hol)
|
134
|
-
else:
|
135
|
-
return None
|
136
|
-
|
137
|
-
def set_target(self,tar):
|
138
|
-
self.node.append(tar.get_node())
|
139
|
-
|
140
|
-
def get_target(self):
|
141
|
-
node_target = self.node.find('opinion_target')
|
142
|
-
if node_target is not None:
|
143
|
-
return Ctarget(node_target)
|
144
|
-
else:
|
145
|
-
return None
|
146
|
-
|
147
|
-
def set_expression(self,exp):
|
148
|
-
self.node.append(exp.get_node())
|
149
|
-
|
150
|
-
def get_expression(self):
|
151
|
-
node_exp = self.node.find('opinion_expression')
|
152
|
-
if node_exp is not None:
|
153
|
-
return Cexpression(node_exp)
|
154
|
-
else:
|
155
|
-
return None
|
156
|
-
|
157
|
-
def __str__(self):
|
158
|
-
return dump(self.node)
|
159
|
-
|
160
|
-
def get_node(self):
|
161
|
-
return self.node
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
class Copinions:
|
168
|
-
def __init__(self,node=None,type='NAF'):
|
169
|
-
self.type = type
|
170
|
-
if node is None:
|
171
|
-
self.node = etree.Element('opinions')
|
172
|
-
else:
|
173
|
-
self.node = node
|
174
|
-
|
175
|
-
def __get_opinion_nodes(self):
|
176
|
-
for node in self.node.findall('opinion'):
|
177
|
-
yield node
|
178
|
-
|
179
|
-
def get_opinions(self):
|
180
|
-
for node in self.__get_opinion_nodes():
|
181
|
-
yield Copinion(node,self.type)
|
182
|
-
|
183
|
-
def to_kaf(self):
|
184
|
-
if self.type == 'NAF':
|
185
|
-
for node in self.__get_opinion_nodes():
|
186
|
-
node.set('oid',node.get('id'))
|
187
|
-
del node.attrib['id']
|
188
|
-
|
189
|
-
def to_naf(self):
|
190
|
-
if self.type == 'KAF':
|
191
|
-
for node in self.__get_opinion_nodes():
|
192
|
-
node.set('id',node.get('oid'))
|
193
|
-
del node.attrib['oid']
|
194
|
-
|
195
|
-
|
196
|
-
def add_opinion(self,opi_obj):
|
197
|
-
self.node.append(opi_obj.get_node())
|
198
|
-
|
199
|
-
def get_node(self):
|
200
|
-
return self.node
|