opener-kaf-naf-parser 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/LICENSE +674 -0
- data/README.md +46 -0
- data/bin/kaf-naf-parser +8 -0
- data/bin/kaf-naf-parser-server +10 -0
- data/bin/kaf-to-naf +7 -0
- data/bin/naf-to-kaf +7 -0
- data/config.ru +4 -0
- data/core/kaf-naf-parser.py +42 -0
- data/core/packages/KafNafParser-1.2.tar.gz +0 -0
- data/core/packages/VUA_pylib-1.3.tar.gz +0 -0
- data/core/site-packages/pre_build/KafNafParser-1.2-py2.7.egg-info/PKG-INFO +10 -0
- data/core/site-packages/pre_build/KafNafParser-1.2-py2.7.egg-info/SOURCES.txt +22 -0
- data/core/site-packages/pre_build/KafNafParser-1.2-py2.7.egg-info/dependency_links.txt +1 -0
- data/core/site-packages/pre_build/KafNafParser-1.2-py2.7.egg-info/installed-files.txt +47 -0
- data/core/site-packages/pre_build/KafNafParser-1.2-py2.7.egg-info/top_level.txt +1 -0
- data/core/site-packages/pre_build/KafNafParser/KafNafParserMod.py +338 -0
- data/core/site-packages/pre_build/KafNafParser/KafNafParserMod.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/__init__.py +14 -0
- data/core/site-packages/pre_build/KafNafParser/__init__.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/constituency_data.py +125 -0
- data/core/site-packages/pre_build/KafNafParser/constituency_data.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/coreference_data.py +52 -0
- data/core/site-packages/pre_build/KafNafParser/coreference_data.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/dependency_data.py +80 -0
- data/core/site-packages/pre_build/KafNafParser/dependency_data.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/entity_data.py +59 -0
- data/core/site-packages/pre_build/KafNafParser/entity_data.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/external_references_data.py +41 -0
- data/core/site-packages/pre_build/KafNafParser/external_references_data.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/feature_extractor/__init__.py +2 -0
- data/core/site-packages/pre_build/KafNafParser/feature_extractor/__init__.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/feature_extractor/constituency.py +205 -0
- data/core/site-packages/pre_build/KafNafParser/feature_extractor/constituency.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/feature_extractor/dependency.py +300 -0
- data/core/site-packages/pre_build/KafNafParser/feature_extractor/dependency.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/features_data.py +71 -0
- data/core/site-packages/pre_build/KafNafParser/features_data.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/header_data.py +127 -0
- data/core/site-packages/pre_build/KafNafParser/header_data.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/opinion_data.py +200 -0
- data/core/site-packages/pre_build/KafNafParser/opinion_data.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/references_data.py +15 -0
- data/core/site-packages/pre_build/KafNafParser/references_data.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/span_data.py +63 -0
- data/core/site-packages/pre_build/KafNafParser/span_data.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/term_data.py +111 -0
- data/core/site-packages/pre_build/KafNafParser/term_data.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/term_sentiment_data.py +42 -0
- data/core/site-packages/pre_build/KafNafParser/term_sentiment_data.pyc +0 -0
- data/core/site-packages/pre_build/KafNafParser/text_data.py +90 -0
- data/core/site-packages/pre_build/KafNafParser/text_data.pyc +0 -0
- data/core/site-packages/pre_build/VUA_pylib-1.3-py2.7.egg-info/PKG-INFO +10 -0
- data/core/site-packages/pre_build/VUA_pylib-1.3-py2.7.egg-info/SOURCES.txt +14 -0
- data/core/site-packages/pre_build/VUA_pylib-1.3-py2.7.egg-info/dependency_links.txt +1 -0
- data/core/site-packages/pre_build/VUA_pylib-1.3-py2.7.egg-info/installed-files.txt +23 -0
- data/core/site-packages/pre_build/VUA_pylib-1.3-py2.7.egg-info/top_level.txt +1 -0
- data/core/site-packages/pre_build/VUA_pylib/__init__.py +1 -0
- data/core/site-packages/pre_build/VUA_pylib/__init__.pyc +0 -0
- data/core/site-packages/pre_build/VUA_pylib/common/__init__.py +1 -0
- data/core/site-packages/pre_build/VUA_pylib/common/__init__.pyc +0 -0
- data/core/site-packages/pre_build/VUA_pylib/common/common.py +28 -0
- data/core/site-packages/pre_build/VUA_pylib/common/common.pyc +0 -0
- data/core/site-packages/pre_build/VUA_pylib/corpus_reader/__init__.py +1 -0
- data/core/site-packages/pre_build/VUA_pylib/corpus_reader/__init__.pyc +0 -0
- data/core/site-packages/pre_build/VUA_pylib/corpus_reader/google_web_nl.py +156 -0
- data/core/site-packages/pre_build/VUA_pylib/corpus_reader/google_web_nl.pyc +0 -0
- data/core/site-packages/pre_build/VUA_pylib/io_utils/__init__.py +1 -0
- data/core/site-packages/pre_build/VUA_pylib/io_utils/__init__.pyc +0 -0
- data/core/site-packages/pre_build/VUA_pylib/io_utils/feature_file.py +121 -0
- data/core/site-packages/pre_build/VUA_pylib/io_utils/feature_file.pyc +0 -0
- data/core/site-packages/pre_build/VUA_pylib/lexicon/__init__.py +1 -0
- data/core/site-packages/pre_build/VUA_pylib/lexicon/__init__.pyc +0 -0
- data/core/site-packages/pre_build/VUA_pylib/lexicon/lexicon.py +72 -0
- data/core/site-packages/pre_build/VUA_pylib/lexicon/lexicon.pyc +0 -0
- data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/PKG-INFO +10 -0
- data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/SOURCES.txt +7 -0
- data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/dependency_links.txt +1 -0
- data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/installed-files.txt +11 -0
- data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/top_level.txt +1 -0
- data/core/site-packages/pre_build/VUKafParserPy/KafDataObjectsMod.py +165 -0
- data/core/site-packages/pre_build/VUKafParserPy/KafDataObjectsMod.pyc +0 -0
- data/core/site-packages/pre_build/VUKafParserPy/KafParserMod.py +439 -0
- data/core/site-packages/pre_build/VUKafParserPy/KafParserMod.pyc +0 -0
- data/core/site-packages/pre_build/VUKafParserPy/__init__.py +7 -0
- data/core/site-packages/pre_build/VUKafParserPy/__init__.pyc +0 -0
- data/ext/hack/support.rb +38 -0
- data/lib/opener/kaf_naf_parser.rb +77 -0
- data/lib/opener/kaf_naf_parser/cli.rb +92 -0
- data/lib/opener/kaf_naf_parser/public/markdown.css +284 -0
- data/lib/opener/kaf_naf_parser/server.rb +16 -0
- data/lib/opener/kaf_naf_parser/version.rb +5 -0
- data/lib/opener/kaf_naf_parser/views/index.erb +103 -0
- data/lib/opener/kaf_naf_parser/views/result.erb +15 -0
- data/opener-kaf-naf-parser.gemspec +38 -0
- data/pre_build_requirements.txt +3 -0
- metadata +283 -0
@@ -0,0 +1,300 @@
|
|
1
|
+
from operator import itemgetter
|
2
|
+
from VUA_pylib.common import get_max_distr_dict
|
3
|
+
import sys
|
4
|
+
|
5
|
+
class Cdependency_extractor:
|
6
|
+
def __init__(self,knaf_obj):
|
7
|
+
self.naf = knaf_obj
|
8
|
+
self.relations_for_term = {}
|
9
|
+
self.reverse_relations_for_term = {}
|
10
|
+
self.prefix_for_reverse = ''
|
11
|
+
|
12
|
+
|
13
|
+
already_linked = {}
|
14
|
+
for dep in knaf_obj.get_dependencies():
|
15
|
+
term_from = dep.get_from()
|
16
|
+
term_to = dep.get_to()
|
17
|
+
rfunc = dep.get_function()
|
18
|
+
|
19
|
+
# Dependencies reversed are skipped...
|
20
|
+
#if rfunc.startswith('rhd/') or rfunc.startswith('whd/'):
|
21
|
+
# continue
|
22
|
+
|
23
|
+
# For detecting cycles like:
|
24
|
+
# <!-- rhd/body(geef,wat) -->
|
25
|
+
# <dep from="t19" to="t15" rfunc="rhd/body"/>
|
26
|
+
# <!-- hd/su(wat,geef) -->
|
27
|
+
# <dep from="t15" to="t19" rfunc="hd/su"/>
|
28
|
+
|
29
|
+
'''
|
30
|
+
if term_from in already_linked and term_to in already_linked[term_from]:
|
31
|
+
#There could be a cycle, skip this
|
32
|
+
print>>sys.stderr,'Skipped from',term_from,'to',term_to,'func',rfunc,' cycle detected'
|
33
|
+
continue
|
34
|
+
else:
|
35
|
+
#Include term_from as linked with term_to for future...
|
36
|
+
if term_to not in already_linked:
|
37
|
+
already_linked[term_to] = set()
|
38
|
+
already_linked[term_to].add(term_from)
|
39
|
+
'''
|
40
|
+
|
41
|
+
|
42
|
+
|
43
|
+
|
44
|
+
if term_from in self.relations_for_term:
|
45
|
+
self.relations_for_term[term_from].append((rfunc,term_to))
|
46
|
+
else:
|
47
|
+
self.relations_for_term[term_from] = [(rfunc,term_to)]
|
48
|
+
|
49
|
+
if term_to in self.reverse_relations_for_term:
|
50
|
+
self.reverse_relations_for_term[term_to].append((self.prefix_for_reverse+rfunc,term_from))
|
51
|
+
else:
|
52
|
+
self.reverse_relations_for_term[term_to] = [(self.prefix_for_reverse+rfunc,term_from)]
|
53
|
+
|
54
|
+
|
55
|
+
self.paths_for_termid={}
|
56
|
+
self.sentence_for_termid={}
|
57
|
+
self.top_relation_for_term = {} ## termid --> (relation,topnode)
|
58
|
+
self.root_for_sentence = {} ## sentenceid --> termid
|
59
|
+
|
60
|
+
for term_obj in knaf_obj.get_terms():
|
61
|
+
termid = term_obj.get_id()
|
62
|
+
|
63
|
+
#Calculating the sentence id for the term id
|
64
|
+
span_ids = term_obj.get_span().get_span_ids()
|
65
|
+
token_obj = knaf_obj.get_token(span_ids[0])
|
66
|
+
sentence = token_obj.get_sent()
|
67
|
+
|
68
|
+
self.sentence_for_termid[termid] = sentence
|
69
|
+
###########################################
|
70
|
+
|
71
|
+
#paths = self.__propagate_node(termid,[])
|
72
|
+
#inversed = self.__reverse_propagate_node(termid)
|
73
|
+
|
74
|
+
## Due to the change on direction of dependencies...
|
75
|
+
inversed = self.__propagate_node(termid,[])
|
76
|
+
paths = self.__reverse_propagate_node(termid)
|
77
|
+
|
78
|
+
##Calculate the top relation for the node, the relation with the main root of the tree
|
79
|
+
if len(inversed) != 0:
|
80
|
+
for ip in inversed:
|
81
|
+
if len(ip)!=0:
|
82
|
+
self.top_relation_for_term[termid] = ip[-1] ## ex. ('NMOD', 't2')
|
83
|
+
root = ip[-1][1]
|
84
|
+
if sentence not in self.root_for_sentence:
|
85
|
+
self.root_for_sentence[sentence] = {}
|
86
|
+
|
87
|
+
if root not in self.root_for_sentence[sentence]:
|
88
|
+
self.root_for_sentence[sentence][root]=0
|
89
|
+
else:
|
90
|
+
self.root_for_sentence[sentence][root]+=1
|
91
|
+
break
|
92
|
+
|
93
|
+
self.paths_for_termid[termid] = paths + inversed
|
94
|
+
'''
|
95
|
+
print termid
|
96
|
+
print 'DIRECT RELS'
|
97
|
+
for p in paths:
|
98
|
+
print ' ',p
|
99
|
+
|
100
|
+
print 'INDIRECT RELS'
|
101
|
+
for p in inversed:
|
102
|
+
print ' ',p
|
103
|
+
'''
|
104
|
+
####
|
105
|
+
|
106
|
+
for sent_id, distr in self.root_for_sentence.items():
|
107
|
+
## get_max_distr_dict imported from VUA_pylib.common
|
108
|
+
most_freq,c = get_max_distr_dict(distr)
|
109
|
+
self.root_for_sentence[sent_id] = most_freq
|
110
|
+
|
111
|
+
|
112
|
+
|
113
|
+
|
114
|
+
def __propagate_node(self,node,already_propagated=[]):
|
115
|
+
paths = []
|
116
|
+
|
117
|
+
relations = self.relations_for_term.get(node)
|
118
|
+
if relations is None: ##Case base
|
119
|
+
paths = [[]]
|
120
|
+
elif node in already_propagated:
|
121
|
+
paths = [[]]
|
122
|
+
|
123
|
+
else:
|
124
|
+
already_propagated.append(node)
|
125
|
+
for func, target_node in relations:
|
126
|
+
new_paths = self.__propagate_node(target_node, already_propagated)
|
127
|
+
for new_path in new_paths:
|
128
|
+
new_path.insert(0,(func,target_node))
|
129
|
+
paths.append(new_path)
|
130
|
+
return paths
|
131
|
+
|
132
|
+
def __reverse_propagate_node(self,node,already_propagated=[]):
|
133
|
+
paths = []
|
134
|
+
relations = self.reverse_relations_for_term.get(node)
|
135
|
+
if relations is None: ##Case base
|
136
|
+
paths = [[]]
|
137
|
+
elif node in already_propagated:
|
138
|
+
paths = [[]]
|
139
|
+
else:
|
140
|
+
already_propagated.append(node)
|
141
|
+
for func, target_node in relations:
|
142
|
+
new_paths = self.__reverse_propagate_node(target_node,already_propagated)
|
143
|
+
for new_path in new_paths:
|
144
|
+
new_path.insert(0,(func,target_node))
|
145
|
+
paths.append(new_path)
|
146
|
+
return paths
|
147
|
+
|
148
|
+
|
149
|
+
# Get the shortest path between 2 term ids
|
150
|
+
def get_shortest_path(self,term1,term2):
|
151
|
+
dep_path = None
|
152
|
+
if term1 == term2: dep_path = []
|
153
|
+
else:
|
154
|
+
paths1 = self.paths_for_termid[term1]
|
155
|
+
paths2 = self.paths_for_termid[term2]
|
156
|
+
|
157
|
+
##Check if term2 is on paths1
|
158
|
+
hits = [] ## list of (common_id,idx1,idx2,numpath1,numpath2)
|
159
|
+
for num1, p1 in enumerate(paths1):
|
160
|
+
|
161
|
+
ids1 = [ my_id for my_func, my_id in p1]
|
162
|
+
if term2 in ids1:
|
163
|
+
idx1=ids1.index(term2)
|
164
|
+
hits.append((term2,idx1+0,idx1,0,num1,None))
|
165
|
+
|
166
|
+
for num2,p2 in enumerate(paths2):
|
167
|
+
ids2 = [ my_id for my_func, my_id in p2]
|
168
|
+
if term1 in p2:
|
169
|
+
idx2=ids2.index(term1)
|
170
|
+
hits.append((term1,0+idx2,0,idx2,None,num2))
|
171
|
+
|
172
|
+
#Pair by pair
|
173
|
+
for num1, p1 in enumerate(paths1):
|
174
|
+
#print 'Path1',term1, p1
|
175
|
+
ids1 = [ my_id for my_func, my_id in p1]
|
176
|
+
for num2, p2 in enumerate(paths2):
|
177
|
+
#print '\t',term2,p2
|
178
|
+
ids2 = [ my_id for my_func, my_id in p2]
|
179
|
+
common_ids = set(ids1) & set(ids2)
|
180
|
+
for common_id in common_ids:
|
181
|
+
idx1 = ids1.index(common_id)
|
182
|
+
idx2 = ids2.index(common_id)
|
183
|
+
hits.append((common_id,idx1+idx2,idx1,idx2,num1,num2))
|
184
|
+
|
185
|
+
|
186
|
+
if len(hits) != 0:
|
187
|
+
dep_path = []
|
188
|
+
hits.sort(key=itemgetter(1))
|
189
|
+
best_hit = hits[0]
|
190
|
+
common_id, _, idx1, idx2, numpath1, numpath2 = best_hit
|
191
|
+
|
192
|
+
if numpath2 is None: #term2 is in one of the paths of t1
|
193
|
+
path1 = paths1[numpath1]
|
194
|
+
my_rels1 = path1[:idx1+1]
|
195
|
+
##complete_path = ''
|
196
|
+
##complete_path_ids = ''
|
197
|
+
for func,node in my_rels1:
|
198
|
+
dep_path.append(func)
|
199
|
+
##complete_path+=func+'#'
|
200
|
+
##complete_path_ids+=node+'#'
|
201
|
+
|
202
|
+
#===========================================================
|
203
|
+
# print 'CASE1',best_hit
|
204
|
+
# print complete_path
|
205
|
+
# print complete_path_ids
|
206
|
+
#===========================================================
|
207
|
+
elif numpath1 is None: #term1 is in one of the paths of t2
|
208
|
+
path2 = paths2[numpath2]
|
209
|
+
my_rels2 = path2[:idx2+1]
|
210
|
+
##complete_path = ''
|
211
|
+
##complete_path_ids = ''
|
212
|
+
for func,node in my_rels2:
|
213
|
+
dep_path.append(func)
|
214
|
+
#complete_path+=func+'#'
|
215
|
+
#complete_path_ids+=node+'#'
|
216
|
+
|
217
|
+
#===========================================================
|
218
|
+
# print 'CASE2',best_hit
|
219
|
+
# print complete_path
|
220
|
+
# print complete_path_ids
|
221
|
+
#===========================================================
|
222
|
+
else: #There is a common node linking both
|
223
|
+
path1 = paths1[numpath1]
|
224
|
+
my_rels1 = path1[:idx1+1]
|
225
|
+
|
226
|
+
path2 = paths2[numpath2]
|
227
|
+
my_rels2 = path2[:idx2+1]
|
228
|
+
|
229
|
+
##complete_path = ''
|
230
|
+
#complete_path_ids = ''
|
231
|
+
for func,node in my_rels1:
|
232
|
+
dep_path.append(func)
|
233
|
+
##complete_path+=func+'#'
|
234
|
+
#complete_path_ids+=func+'->'+self.naf.get_term(node).get_lemma()+'->'
|
235
|
+
|
236
|
+
for func,node in my_rels2[-1::-1]:
|
237
|
+
dep_path.append(func)
|
238
|
+
##complete_path+=func+'#'
|
239
|
+
#complete_path_ids+=func+'->'+self.naf.get_term(node).get_lemma()+'->'
|
240
|
+
#===========================================================
|
241
|
+
#
|
242
|
+
# print complete_path
|
243
|
+
# print complete_path_ids
|
244
|
+
# print path2
|
245
|
+
# print my_rels1
|
246
|
+
# print my_rels2
|
247
|
+
# print 'CASE3',best_hit
|
248
|
+
#===========================================================
|
249
|
+
return dep_path
|
250
|
+
|
251
|
+
## Get the shortest dependency path between 2 sets of spans
|
252
|
+
def get_shortest_path_spans(self,span1,span2):
|
253
|
+
shortest_path = None
|
254
|
+
|
255
|
+
for term1 in span1:
|
256
|
+
for term2 in span2:
|
257
|
+
this_path = self.get_shortest_path(term1, term2)
|
258
|
+
if shortest_path is None or (this_path is not None and len(this_path)<len(shortest_path)):
|
259
|
+
shortest_path = this_path
|
260
|
+
return shortest_path
|
261
|
+
|
262
|
+
# Get the dependency path to the sentence root for a term id
|
263
|
+
def get_path_to_root(self,termid):
|
264
|
+
# Get the sentence for the term
|
265
|
+
root = None
|
266
|
+
sentence = self.sentence_for_termid.get(termid)
|
267
|
+
|
268
|
+
if sentence is None: #try with the top node
|
269
|
+
top_node = self.top_relation_for_term.get(termid)
|
270
|
+
if top_node is not None:
|
271
|
+
root = top_node[1]
|
272
|
+
else:
|
273
|
+
return None
|
274
|
+
else:
|
275
|
+
if sentence in self.root_for_sentence:
|
276
|
+
root = self.root_for_sentence[sentence]
|
277
|
+
else:
|
278
|
+
##There is no root for this sentence
|
279
|
+
return None
|
280
|
+
# In this point top_node should be properly set
|
281
|
+
path = self.get_shortest_path(termid, root)
|
282
|
+
return path
|
283
|
+
|
284
|
+
# Get the shortest dependency path to the sentence root for a span of ids
|
285
|
+
# extractor.get_shortest_path_to_root_span(['t444','t445','t446'])
|
286
|
+
def get_shortest_path_to_root_span(self,span):
|
287
|
+
shortest_path = None
|
288
|
+
for termid in span:
|
289
|
+
this_path = self.get_path_to_root(termid)
|
290
|
+
## In case of , or . or whatever, the path to the root usually is None, there are no dependencies...
|
291
|
+
if shortest_path is None or (this_path is not None and len(this_path) < len(shortest_path)):
|
292
|
+
shortest_path = this_path
|
293
|
+
return shortest_path
|
294
|
+
|
295
|
+
|
296
|
+
|
297
|
+
|
298
|
+
|
299
|
+
|
300
|
+
|
@@ -0,0 +1,71 @@
|
|
1
|
+
from lxml import etree
|
2
|
+
from lxml.objectify import dump
|
3
|
+
from references_data import *
|
4
|
+
|
5
|
+
|
6
|
+
|
7
|
+
class Cproperty:
|
8
|
+
def __init__(self,node=None,type='NAF'):
|
9
|
+
self.type = type
|
10
|
+
if node is None:
|
11
|
+
self.node = etree.Element('property')
|
12
|
+
else:
|
13
|
+
self.node = node
|
14
|
+
|
15
|
+
def get_id(self):
|
16
|
+
if self.type == 'KAF':
|
17
|
+
return self.node.get('pid')
|
18
|
+
elif self.type == 'NAF':
|
19
|
+
return self.node.get('id')
|
20
|
+
|
21
|
+
def get_type(self):
|
22
|
+
return self.node.get('lemma')
|
23
|
+
|
24
|
+
def get_references(self):
|
25
|
+
for ref_node in self.node.findall('references'):
|
26
|
+
yield Creferences(ref_node)
|
27
|
+
|
28
|
+
|
29
|
+
|
30
|
+
class Cproperties:
|
31
|
+
def __init__(self,node=None,type='NAF'):
|
32
|
+
self.type=type
|
33
|
+
if node is None:
|
34
|
+
self.node = etree.Element('properties')
|
35
|
+
else:
|
36
|
+
self.node = node
|
37
|
+
|
38
|
+
def __iter__(self):
|
39
|
+
for prop_node in self.node.findall('property'):
|
40
|
+
yield Cproperty(prop_node,self.type)
|
41
|
+
|
42
|
+
class Cfeatures:
|
43
|
+
def __init__(self,node=None,type='NAF'):
|
44
|
+
self.type = type
|
45
|
+
if node is None:
|
46
|
+
self.node = etree.Element('features')
|
47
|
+
else:
|
48
|
+
self.node = node
|
49
|
+
|
50
|
+
def to_kaf(self):
|
51
|
+
if self.type == 'NAF':
|
52
|
+
##convert all the properties
|
53
|
+
for node in self.node.findall('properties/property'):
|
54
|
+
node.set('pid',node.get('id'))
|
55
|
+
del node.attrib['id']
|
56
|
+
|
57
|
+
def to_naf(self):
|
58
|
+
if self.type == 'KAF':
|
59
|
+
##convert all the properties
|
60
|
+
for node in self.node.findall('properties/property'):
|
61
|
+
node.set('id',node.get('pid'))
|
62
|
+
del node.attrib['pid']
|
63
|
+
|
64
|
+
|
65
|
+
def get_properties(self):
|
66
|
+
node_prop = self.node.find('properties')
|
67
|
+
if node_prop is not None:
|
68
|
+
obj_properties = Cproperties(node_prop,self.type)
|
69
|
+
for prop in obj_properties:
|
70
|
+
yield prop
|
71
|
+
|
Binary file
|
@@ -0,0 +1,127 @@
|
|
1
|
+
# Modified to KAF / NAF
|
2
|
+
|
3
|
+
from lxml import etree
|
4
|
+
import time
|
5
|
+
|
6
|
+
class CfileDesc:
|
7
|
+
def __init__(self,node=None):
|
8
|
+
self.type = 'KAF/NAF'
|
9
|
+
if node is None:
|
10
|
+
self.node = etree.Element('fileDesc')
|
11
|
+
else:
|
12
|
+
self.node = node
|
13
|
+
|
14
|
+
#self.title='' #self.author='' #self.creationtime='' #self.filename='' #self.filetype='' #self.pages=''
|
15
|
+
|
16
|
+
|
17
|
+
class Cpublic:
|
18
|
+
def __init__(self,node=None):
|
19
|
+
self.type = 'KAF/NAF'
|
20
|
+
if node is None:
|
21
|
+
self.node = etree.Element('public')
|
22
|
+
else:
|
23
|
+
self.node = node
|
24
|
+
|
25
|
+
#self.publicId = ''
|
26
|
+
#slf.uri = ''
|
27
|
+
|
28
|
+
|
29
|
+
class Clp:
|
30
|
+
def __init__(self,node=None,name="",version="",timestamp=None):
|
31
|
+
self.type = 'KAF/NAF'
|
32
|
+
if node is None:
|
33
|
+
self.node = etree.Element('lp')
|
34
|
+
self.set_name(name)
|
35
|
+
self.set_version(name)
|
36
|
+
self.set_timestamp(timestamp)
|
37
|
+
else:
|
38
|
+
self.node = node
|
39
|
+
|
40
|
+
def set_name(self,name):
|
41
|
+
self.node.set('name',name)
|
42
|
+
|
43
|
+
def set_version(self,version):
|
44
|
+
self.node.set('version',version)
|
45
|
+
|
46
|
+
def set_timestamp(self,timestamp=None):
|
47
|
+
if timestamp is None:
|
48
|
+
import time
|
49
|
+
timestamp = time.strftime('%Y-%m-%dT%H:%M:%S%Z')
|
50
|
+
self.node.set('timestamp',timestamp)
|
51
|
+
|
52
|
+
|
53
|
+
def get_node(self):
|
54
|
+
return self.node
|
55
|
+
|
56
|
+
|
57
|
+
class ClinguisticProcessors:
|
58
|
+
def __init__(self,node=None):
|
59
|
+
self.type = 'KAF/NAF'
|
60
|
+
if node is None:
|
61
|
+
self.node = etree.Element('linguisticProcessors')
|
62
|
+
else:
|
63
|
+
self.node = node
|
64
|
+
|
65
|
+
def get_layer(self):
|
66
|
+
return self.node.get('layer')
|
67
|
+
|
68
|
+
def set_layer(self,layer):
|
69
|
+
self.node.set('layer',layer)
|
70
|
+
|
71
|
+
def add_linguistic_processor(self,my_lp):
|
72
|
+
self.node.append(my_lp.get_node())
|
73
|
+
|
74
|
+
def get_node(self):
|
75
|
+
return self.node
|
76
|
+
|
77
|
+
|
78
|
+
class CHeader:
|
79
|
+
def __init__(self,node=None,type='NAF'):
|
80
|
+
self.type = type
|
81
|
+
if node is None:
|
82
|
+
if self.type == 'NAF':
|
83
|
+
self.node = etree.Element('nafHeader')
|
84
|
+
elif self.type == 'KAF':
|
85
|
+
self.node = etree.Element('kafHeader')
|
86
|
+
else:
|
87
|
+
self.node = node
|
88
|
+
|
89
|
+
def to_kaf(self):
|
90
|
+
if self.type == 'NAF':
|
91
|
+
self.node.tag = 'kafHeader'
|
92
|
+
self.type = 'KAF'
|
93
|
+
|
94
|
+
def to_naf(self):
|
95
|
+
if self.type == 'KAF':
|
96
|
+
self.node.tag = 'nafHeader'
|
97
|
+
self.type = 'NAF'
|
98
|
+
|
99
|
+
def add_linguistic_processors(self,linpro):
|
100
|
+
self.node.append(linpro.get_node())
|
101
|
+
|
102
|
+
def remove_lp(self,layer):
|
103
|
+
for this_node in self.node.findall('linguisticProcessors'):
|
104
|
+
if this_node.get('layer') == layer:
|
105
|
+
self.node.remove(this_node)
|
106
|
+
break
|
107
|
+
|
108
|
+
|
109
|
+
def add_linguistic_processor(self, layer ,my_lp):
|
110
|
+
## Locate the linguisticProcessor element for taht layer
|
111
|
+
found_lp_obj = None
|
112
|
+
for this_lp in self.node.findall('linguisticProcessors'):
|
113
|
+
lp_obj = ClinguisticProcessors(this_lp)
|
114
|
+
if lp_obj.get_layer() == layer:
|
115
|
+
found_lp_obj = lp_obj
|
116
|
+
break
|
117
|
+
|
118
|
+
if found_lp_obj is None: #Not found
|
119
|
+
found_lp_obj = ClinguisticProcessors()
|
120
|
+
found_lp_obj.set_layer(layer)
|
121
|
+
self.add_linguistic_processors(found_lp_obj)
|
122
|
+
|
123
|
+
found_lp_obj.add_linguistic_processor(my_lp)
|
124
|
+
|
125
|
+
|
126
|
+
|
127
|
+
|