test_conll.ml
20.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
let empty_result = {
input_text=RawText "";
pre_text=RawText "";
status=Idle;
msg="";
pre_time1=0.;
pre_time2=0.;
parse_time=0.;
parsed_text=RawText "";
semantic_time=0.;
selected_sent_text=RawText "";
semantic_text=RawText "";
selected_semantic_text=RawText "";
tokens=ExtArray.make 1 ENIAMtokenizerTypes.empty_token;
lex_sems=ExtArray.make 1 ENIAMlexSemanticsTypes.empty_lex_sem;
}
let empty_eniam_parse_result = {
file_prefix="";
status=Idle;
msg="";
lex_time=0.;
parse_time=0.;
reduction_time=0.;
sem_time=0.;
paths_size=0;
chart_size=0;
dependency_tree_size=0;
chart=[| |];
dependency_tree=[| |];
}
let empty_conll_parse_result = {
file_prefix="";
status=Idle;
msg="";
lex_time=0.;
parse_time=0.;
reduction_time=0.;
sem_time=0.;
paths_size=0;
dependency_tree_size=0;
paths=[| |];
dep_chart=DepNode(-100,[],[],[]);
parsed_dep_chart=[];
not_parsed_dep_chart=(-100,[],[],[]);
dependency_tree=[| |];
}
open Printf
let translate_mode = function
ENIAMsubsyntaxTypes.Raw -> Raw
| ENIAMsubsyntaxTypes.Struct -> Struct
| ENIAMsubsyntaxTypes.CONLL -> CONLL
| ENIAMsubsyntaxTypes.ENIAM -> ENIAM
| ENIAMsubsyntaxTypes.Mate -> Mate
| ENIAMsubsyntaxTypes.Swigra -> Swigra
| ENIAMsubsyntaxTypes.POLFIE -> POLFIE
let rec translate_sentence = function
ENIAMsubsyntaxTypes.RawSentence s -> RawSentence s
| ENIAMsubsyntaxTypes.StructSentence(paths,last) -> StructSentence(paths,last)
| ENIAMsubsyntaxTypes.DepSentence(paths) -> DepSentence(paths)
| ENIAMsubsyntaxTypes.QuotedSentences sentences ->
QuotedSentences(Xlist.map sentences (fun p ->
{pid=p.ENIAMsubsyntaxTypes.pid; pbeg=p.ENIAMsubsyntaxTypes.pbeg; plen=p.ENIAMsubsyntaxTypes.plen; pnext=p.ENIAMsubsyntaxTypes.pnext; pfile_prefix=p.ENIAMsubsyntaxTypes.pfile_prefix;
psentence=translate_sentence p.ENIAMsubsyntaxTypes.psentence}))
| ENIAMsubsyntaxTypes.AltSentence l -> AltSentence(Xlist.map l (fun (mode,sentence) ->
translate_mode mode, translate_sentence sentence))
let rec translate_paragraph = function
ENIAMsubsyntaxTypes.RawParagraph s -> RawParagraph s
| ENIAMsubsyntaxTypes.StructParagraph sentences ->
StructParagraph(Xlist.map sentences (fun p ->
{pid=p.ENIAMsubsyntaxTypes.pid; pbeg=p.ENIAMsubsyntaxTypes.pbeg; plen=p.ENIAMsubsyntaxTypes.plen; pnext=p.ENIAMsubsyntaxTypes.pnext; pfile_prefix=p.ENIAMsubsyntaxTypes.pfile_prefix;
psentence=translate_sentence p.ENIAMsubsyntaxTypes.psentence}))
| ENIAMsubsyntaxTypes.AltParagraph l -> AltParagraph(Xlist.map l (fun (mode,paragraph) ->
translate_mode mode, translate_paragraph paragraph))
let rec translate_text = function
ENIAMsubsyntaxTypes.RawText s -> RawText s
| ENIAMsubsyntaxTypes.StructText paragraphs ->
StructText(Xlist.map paragraphs translate_paragraph)
| ENIAMsubsyntaxTypes.AltText l -> AltText(Xlist.map l (fun (mode,text) ->
translate_mode mode, translate_text text))
let eniam_parse_sentence timeout test_only_flag paths last tokens lex_sems =
let result = empty_eniam_parse_result in
let time2 = time_fun () in
try
let chart = LCGlexicon.create (paths,last) tokens lex_sems in
let chart,references = LCGchart.lazify chart in
let result = if test_only_flag then result else {result with chart=chart} in
let time3 = time_fun () in
let result = {result with lex_time=time3 -. time2} in
try
(* print_endline "eniam_parse_sentence 1"; *)
let chart = LCGchart.parse chart references timeout time_fun in (* uwaga: niejawna zmiana imperatywna w references *)
(* print_endline "eniam_parse_sentence 2"; *)
let time4 = time_fun () in
let result = if test_only_flag then result else {result with chart=chart} in
let result = {result with parse_time=time4 -. time3; chart_size=LCGchart.get_no_entries chart} in
if LCGchart.is_parsed chart then
try
let term = LCGchart.get_parsed_term tokens lex_sems chart in
let dependency_tree = LCGreductions.reduce term references in
let time5 = time_fun () in
let result = if test_only_flag then result else {result with dependency_tree=dependency_tree} in
let result = {result with reduction_time=time5 -. time4; dependency_tree_size=Array.length dependency_tree} in
if LCGreductions.is_reduced_dependency_tree dependency_tree then
try
(* print_endline "eniam_parse_sentence 3"; *)
LCGreductions.assign_labels dependency_tree; (* uwaga: niejawna zmiana imperatywna w result *)
(* print_endline "eniam_parse_sentence 4"; *)
LCGreductions.remove_cuts dependency_tree; (* uwaga: niejawna zmiana imperatywna w result *)
(* if Array.length dependency_tree < 10000 then print_xml_dependency_tree "results/trees/" id dependency_tree; *)
(* print_endline "eniam_parse_sentence 5"; *)
let time6 = time_fun () in
{result with status=Parsed; sem_time=time6 -. time5}
with e ->
let time6 = time_fun () in
{result with status=SemError; msg=Printexc.to_string e; sem_time=time6 -. time5}
else
{result with status=NotReduced}
with
| SemTooBig ->
let time5 = time_fun () in
{result with status=TooManyNodes; reduction_time=time5 -. time4}
| e ->
let time5 = time_fun () in
{result with status=ReductionError; msg=Printexc.to_string e; reduction_time=time5 -. time4}
else {result with status=NotParsed}
with
Timeout t ->
let time4 = time_fun () in
{result with status=ParseTimeout; msg=Printf.sprintf "%f" t; parse_time=time4 -. time3}
| e ->
let time4 = time_fun () in
{result with status=ParseError; msg=Printexc.to_string e; parse_time=time4 -. time3}
with e ->
let time3 = time_fun () in
{result with status=LexiconError; msg=Printexc.to_string e; lex_time=time3 -. time2}
let rec conll_parse_sentence timeout test_only_flag id first_try paths tokens lex_sems =
let result = empty_conll_parse_result in
let time2 = time_fun () in
let paths = CONLL_adapter.convert_dep_tree id first_try paths tokens lex_sems
try
let dep_chart = LCGlexicon.dep_create paths tokens lex_sems in
let dep_chart,references = LCGchart.dep_lazify dep_chart in
let result = if test_only_flag then result else {result with dep_chart=dep_chart} in
let time3 = time_fun () in
let result = {result with lex_time=time3 -. time2} in
try
(* print_endline "conll_parse_sentence 1"; *)
(* LCGlatexOf.print_references "results/" "references1" references; *)
let parsed_dep_chart = LCGchart.dep_parse dep_chart references timeout time_fun in (* uwaga: niejawna zmiana imperatywna w references *)
(* print_endline "conll_parse_sentence 2"; *)
(* LCGlatexOf.print_references "results/" "references2" references; *)
let time4 = time_fun () in
let result = if test_only_flag then result else {result with parsed_dep_chart=parsed_dep_chart} in
let result = {result with parse_time=time4 -. time3} in
if LCGchart.is_dep_parsed parsed_dep_chart then
try
let term = LCGchart.get_dep_parsed_term tokens lex_sems parsed_dep_chart in
(* LCGlatexOf.print_dependency_tree "dep_dependency_tree1" dependency_tree; *)
let dependency_tree = LCGreductions.reduce term references in
let time5 = time_fun () in
let result = if test_only_flag then result else {result with dependency_tree=dependency_tree} in
let result = {result with reduction_time=time5 -. time4; dependency_tree_size=Array.length dependency_tree} in
if LCGreductions.is_reduced_dependency_tree dependency_tree then
try
(* print_endline "conll_parse_sentence 3"; *)
LCGreductions.assign_labels dependency_tree; (* uwaga: niejawna zmiana imperatywna w result *)
(* print_endline "conll_parse_sentence 4"; *)
LCGreductions.remove_cuts dependency_tree; (* uwaga: niejawna zmiana imperatywna w result *)
(* if Array.length dependency_tree < 10000 then print_xml_dependency_tree "results/trees/" id dependency_tree; *)
(* print_endline "conll_parse_sentence 5"; *)
let time6 = time_fun () in
{result with status=Parsed; sem_time=time6 -. time5}
with e ->
let time6 = time_fun () in
{result with status=SemError; msg=Printexc.to_string e; sem_time=time6 -. time5}
else
{result with status=NotReduced}
with
| SemTooBig ->
let time5 = time_fun () in
{result with status=TooManyNodes; reduction_time=time5 -. time4}
| e ->
let time5 = time_fun () in
{result with status=ReductionError; msg=Printexc.to_string e; reduction_time=time5 -. time4}
else if first_try
then conll_parse_sentence timeout test_only_flag id false paths tokens
else {result with status=NotParsed}
with
Timeout t ->
let time4 = time_fun () in
{result with status=ParseTimeout; msg=Printf.sprintf "%f" t; parse_time=time4 -. time3}
| NotDepParsed(id_ndp,left,l,right) ->
if first_try
then conll_parse_sentence timeout test_only_flag id false paths tokens
else let time4 = time_fun () in
{result with status=NotParsed; not_parsed_dep_chart=(id_ndp,left,l,right); parse_time=time4 -. time3}
| e ->
let time4 = time_fun () in
{result with status=ParseError; msg=Printexc.to_string e; parse_time=time4 -. time3}
with e -> (*print_endline (Printexc.to_string e);*)
let time3 = time_fun () in
if first_try
then conll_parse_sentence timeout test_only_flag id false paths tokens
else {result with status=LexiconError; msg=Printexc.to_string e; lex_time=time3 -. time2}
let mate_in, mate_out = (*Unix.open_process "java -jar ../dependencyParser/basic/mate-tools/dist/anna-3.5.jar -model ../dependencyParser/basic/mate-tools/examples/160622_Polish_MateParser.mdl -test"*)
if Paths.config.Paths.mate_parser_enabled then
Unix.open_process ("java -jar " ^ Paths.config.Paths.mate_parser_path ^ "dist/anna-3.5.jar -model " ^
Paths.config.Paths.mate_parser_path ^ "examples/160622_Polish_MateParser.mdl -test")
else stdin, stdout
let swigra_in, swigra_out = (*Unix.open_process "../swigra/parser/run.sh"*)
if Paths.config.Paths.swigra_enabled then
Unix.open_process (Paths.config.Paths.swigra_path ^ "run.sh")
else stdin, stdout
let file_prefix_of_mode = function
Raw -> "R"
| Struct -> "St"
| CONLL -> "C"
| ENIAM -> "E"
| Mate -> "M"
| Swigra -> "S"
| POLFIE -> "P"
let get_paths old_paths = function
{ENIAMsubsyntaxTypes.psentence=ENIAMsubsyntaxTypes.DepSentence(paths)},_ ->
Int.iter 0 (Array.length paths - 1) (fun i ->
let id,_,_ = old_paths.(i) in
let _,super,label = paths.(i) in
paths.(i) <- id,super,label);
paths
| _ -> failwith "get_paths"
<<<<<<< HEAD
let rec parse_sentence timeout test_only_flag mode file_prefix tokens lex_sems = function
=======
let rec parse_sentence timeout test_only_flag mode id file_prefix tokens = function
>>>>>>> dep_trees
RawSentence s ->
(match mode with
Swigra ->
if not Paths.config.Paths.swigra_enabled then RawSentence s else (
Printf.fprintf swigra_out "%s\n%!" s;
print_endline ("swigra: " ^ input_line swigra_in);
RawSentence s)
| _ -> RawSentence s)
| StructSentence(paths,last) ->
(match mode with
ENIAM ->
<<<<<<< HEAD
let result = eniam_parse_sentence timeout test_only_flag paths last tokens lex_sems in
=======
let result = empty_eniam_parse_result in
(* let result = print_endline "eniam_parse_sentence"; eniam_parse_sentence timeout test_only_flag paths last tokens in *)
>>>>>>> dep_trees
let result = {result with file_prefix = file_prefix_of_mode mode ^ file_prefix} in
ENIAMSentence result
| _ -> failwith "parse_sentence")
| DepSentence(paths) ->
(match mode with
CONLL ->
<<<<<<< HEAD
let result = conll_parse_sentence timeout test_only_flag paths tokens lex_sems in
=======
let result = (*print_endline "conll_parse_sentence";*) conll_parse_sentence timeout test_only_flag id true paths tokens in
>>>>>>> dep_trees
let result = {result with
file_prefix = file_prefix_of_mode mode ^ file_prefix;
paths = paths} in
CONLLSentence result
(* let xml = DepTree.conll_to_xml paths in
let graph = XmlPrinter.graph_of_xml xml in (* FIXME: do poprawy *)
Visualization.print_graph "results/" "term_conll" graph;
let result = {empty_eniam_parse_result with status=Parsed; term=graph} in
ENIAMSentence result, next_id *)
| Mate ->
if not Paths.config.Paths.mate_parser_enabled then DepSentence paths else (
print_endline "parse_sentence 1";
(* print_endline (Visualization.html_of_dep_sentence tokens paths); *)
let conll = ENIAM_CONLL.string_of_paths ENIAMsubsyntaxTypes.Mate tokens paths in
print_endline "parse_sentence 2";
(* printf "|%s|\n" conll; *)
Printf.fprintf mate_out "%s%!" conll;
print_endline "parse_sentence 3";
let new_paths = get_paths paths (ENIAM_CONLL.load_sentence mate_in) in
print_endline "parse_sentence 4";
(* print_endline (Visualization.html_of_dep_sentence tokens new_paths); *)
<<<<<<< HEAD
let result = conll_parse_sentence timeout test_only_flag new_paths tokens lex_sems in
=======
let result = conll_parse_sentence timeout test_only_flag id true new_paths tokens in
>>>>>>> dep_trees
let result = {result with
file_prefix = file_prefix_of_mode mode ^ file_prefix;
paths=new_paths} in
CONLLSentence result)
| _ -> failwith "parse_sentence")
| QuotedSentences sentences ->
let sentences = Xlist.rev_map sentences (fun p ->
<<<<<<< HEAD
let sentence = parse_sentence timeout test_only_flag mode p.pfile_prefix tokens lex_sems p.psentence in
=======
let sentence = parse_sentence timeout test_only_flag mode id p.pfile_prefix tokens p.psentence in
>>>>>>> dep_trees
{p with psentence=sentence}) in
QuotedSentences(List.rev sentences)
| AltSentence l ->
let l = Xlist.rev_map l (fun (mode,sentence) ->
<<<<<<< HEAD
mode, parse_sentence timeout test_only_flag mode file_prefix tokens lex_sems sentence) in
AltSentence(List.rev l)
| _ -> failwith "parse_sentence"
let rec parse_paragraph timeout test_only_flag mode tokens lex_sems = function
RawParagraph s -> RawParagraph s
| StructParagraph sentences ->
let sentences = Xlist.rev_map sentences (fun p ->
let sentence = parse_sentence timeout test_only_flag mode p.pfile_prefix tokens lex_sems p.psentence in
=======
mode, parse_sentence timeout test_only_flag mode id file_prefix tokens sentence) in
AltSentence(List.rev l)
| _ -> failwith "parse_sentence"
let rec parse_paragraph timeout test_only_flag mode id tokens = function
RawParagraph s -> RawParagraph s
| StructParagraph sentences ->
let sentences = Xlist.rev_map sentences (fun p ->
let sentence = parse_sentence timeout test_only_flag mode id p.pfile_prefix tokens p.psentence in
>>>>>>> dep_trees
{p with psentence=sentence}) in
StructParagraph(List.rev sentences)
| AltParagraph l ->
let l = Xlist.rev_map l (fun (mode,paragraph) ->
<<<<<<< HEAD
mode, parse_paragraph timeout test_only_flag mode tokens lex_sems paragraph) in
AltParagraph(List.rev l)
let rec parse_text timeout test_only_flag mode tokens lex_sems = function
=======
mode, parse_paragraph timeout test_only_flag mode id tokens paragraph) in
AltParagraph(List.rev l)
let rec parse_text timeout test_only_flag mode id = function
>>>>>>> dep_trees
RawText s -> RawText s
| StructText paragraphs ->
let paragraphs = Xlist.rev_map paragraphs (fun paragraph ->
<<<<<<< HEAD
parse_paragraph timeout test_only_flag mode tokens lex_sems paragraph) in
StructText(List.rev paragraphs)
| AltText l -> AltText(Xlist.map l (fun (mode,text) ->
mode, parse_text timeout test_only_flag mode tokens lex_sems text))
=======
parse_paragraph timeout test_only_flag mode id tokens paragraph) in
StructText(List.rev paragraphs, tokens)
| AltText l -> AltText(Xlist.map l (fun (mode,text) ->
mode, parse_text timeout test_only_flag mode id text))
>>>>>>> dep_trees
let rec extract_query_text = function
RawText s -> s
| AltText l -> (try extract_query_text (Xlist.assoc l Raw) with Not_found -> failwith "extract_query_text")
| _ -> failwith "extract_query_text"
let process_query pre_in pre_out timeout test_only_flag id full_query max_n =
(* print_endline "process_query 0"; *)
let result = {empty_result with input_text=translate_text (fst full_query)} in
let time1 = time_fun () in
(* print_endline "process_query 1"; *)
Marshal.to_channel pre_out full_query [];
flush pre_out;
(* print_endline "process_query 2"; *)
let pre_text,tokens,lex_sems,msg,pre_time1 = (Marshal.from_channel pre_in :
ENIAMsubsyntaxTypes.text *
ENIAMtokenizerTypes.token_record ExtArray.t *
ENIAMlexSemanticsTypes.lex_sem ExtArray.t * string * float) in
let time2 = time_fun () in
let result = if test_only_flag then result else {result with pre_text=translate_text pre_text; tokens=tokens; lex_sems=lex_sems} in
let result = {result with pre_time1=pre_time1; pre_time2=time2 -. time1} in
if msg <> "" then {result with status=PreprocessingError; msg=msg} else (
(* print_endline "process_query 3"; *)
<<<<<<< HEAD
let parsed_text = parse_text timeout test_only_flag Struct tokens lex_sems (translate_text pre_text) in
=======
let parsed_text = parse_text timeout test_only_flag Struct id (translate_text pre_text) in
>>>>>>> dep_trees
(* print_endline "process_query 4"; *)
let time3 = time_fun () in
let result = if test_only_flag then result else {result with status=Parsed; parsed_text=parsed_text} in
let result = {result with parse_time=time3 -. time2} in
(* print_endline "process_query 5"; *)
let selected_sent_text = parsed_text in
(* print_endline "process_query 6"; *)
let result = if test_only_flag then result else {result with status=Parsed; selected_sent_text=selected_sent_text} in
(* let semantic_text = semantic_processing_text timeout test_only_flag tokens lex_sems max_n selected_sent_text in *)
let semantic_text = RawText "" in
(* print_endline "process_query 7"; *)
let selected_semantic_text =
if not Paths.config.Paths.sentence_selection_enabled then semantic_text
else select_sentences_text semantic_text in
(* print_endline "process_query 8"; *)
let time4 = time_fun () in
let result =
if test_only_flag then result
else {result with status=Parsed;
semantic_text=semantic_text;
selected_semantic_text=selected_semantic_text} in
let result = {result with semantic_time=time4 -. time3} in
result)
let process_conll_corpus filename =
let corpus = File.file_in filename (fun file -> CONLL.match_corpus (CONLL.load_corpus file)) in
print_endline "process_conll_corpus";
let corpus = [List.hd corpus] in
let ic,oc = Unix.open_connection (get_sock_addr Paths.pre_host Paths.pre_port) in
Xlist.iter corpus (fun query ->
let id = process_id (get_query_id query) in
let path = "results/" ^ id ^ "/" in
ignore (Sys.command ("mkdir -p " ^ path));
let result = process_query ic oc 30. false "x" query 10 in
Visualization.print_html_text path "input_text" result.input_text;
Visualization.print_html_text path "pre_text" result.pre_text;
Visualization.print_html_text path "parsed_text" result.parsed_text;
Visualization.print_html_text path "selected_sent_text" result.selected_sent_text;
Visualization.print_html_text path "semantic_text" result.semantic_text;
Visualization.print_html_text path "selected_semantic_text" result.selected_semantic_text;
(* printf "input_text:\n%s\n" (Visualization.string_of_text result.input_text);
printf "pre_text:\n%s\n" (Visualization.string_of_text result.pre_text); *)
(* Exec.print_result stdout result; *)
LCGfields.print_fields ["arole"] result.parsed_text;
(* CompTrees.compare_results result.parsed_text; *)
(* Visualization.print_paths "results/" "paths" result.paths; *)
());
Marshal.to_channel oc (PreTypes.RawText "",ExtArray.make 1 ENIAMtokenizerTypes.empty_token) [];
flush oc;
let _ = Unix.shutdown_connection ic in
()
let _ =
(* process_conll_corpus "../../NLP resources/Skladnica-zaleznosciowa-mod_130121.conll"; *)
(* process_conll_corpus "../../NLP resources/skladnica_zaleznosciowa.conll"; *)
process_conll_corpus "../testy/skladnica-test1.conll";
()