Commit 71ad2c03b8010f91302c3684dad4aad1e3df9264

Authored by Tomasz Bartosiak
2 parents 6df6f35d fa726329

Merge branch 'master' into tomek

Conflicts:
	semantics/static/js/semantics_roles.js
	semantics/views.py
Showing 55 changed files with 2253 additions and 2441 deletions
@@ -61,6 +61,8 @@ Dodaj w głównym folderze projektu plik konfiguracyjny database_data.py oraz zd @@ -61,6 +61,8 @@ Dodaj w głównym folderze projektu plik konfiguracyjny database_data.py oraz zd
61 Zainstaluj moduł psycopg2: 61 Zainstaluj moduł psycopg2:
62 >> sudo apt-get install python-psycopg2 62 >> sudo apt-get install python-psycopg2
63 63
  64 +Zainstaluj Morfeusza2 zgodnie z instrukcjami na stronie http://sgjp.pl/morfeusz/dopobrania.html.
  65 +
64 Zgraj pliki statyczne do dedykowanego katalogu poleceniem: 66 Zgraj pliki statyczne do dedykowanego katalogu poleceniem:
65 >> python manage.py collectstatic 67 >> python manage.py collectstatic
66 68
@@ -74,8 +76,8 @@ Utwórz plik slowal.wsgi odpowiednio definiując w nim ścieżki do plików stat @@ -74,8 +76,8 @@ Utwórz plik slowal.wsgi odpowiednio definiując w nim ścieżki do plików stat
74 -------------------------------------------- 76 --------------------------------------------
75 import os, sys 77 import os, sys
76 78
77 -sys.path.append('/home/zil/static')  
78 -sys.path.append('/home/zil/static/Slowal') 79 +sys.path.append('/home/zil/Slowal')
  80 +sys.path.append('/home/zil')
79 os.environ['DJANGO_SETTINGS_MODULE'] = 'Slowal.settings' 81 os.environ['DJANGO_SETTINGS_MODULE'] = 'Slowal.settings'
80 82
81 import django.core.handlers.wsgi 83 import django.core.handlers.wsgi
@@ -83,7 +85,7 @@ import django.core.handlers.wsgi @@ -83,7 +85,7 @@ import django.core.handlers.wsgi
83 application = django.core.handlers.wsgi.WSGIHandler() 85 application = django.core.handlers.wsgi.WSGIHandler()
84 -------------------------------------------- 86 --------------------------------------------
85 87
86 -Skonfiguruj apacha dodając plik konfiguracyjny (np. o nazwie slowal.conf) do folderu sites-available apacha (domyślnie /etc/apache2/sites-enabled/), ścieżka WSGIScriptAlias musi wskazywać na plik slowal.wsgi. Przykładowy plik konfiguracyjny poniżej: 88 +Skonfiguruj apacha dodając plik konfiguracyjny (np. o nazwie slowal.conf) do folderu sites-available apacha (domyślnie /etc/apache2/sites-available/), ścieżka WSGIScriptAlias musi wskazywać na plik slowal.wsgi. Przykładowy plik konfiguracyjny poniżej:
87 -------------------------------------------- 89 --------------------------------------------
88 <VirtualHost *:80> 90 <VirtualHost *:80>
89 ServerAdmin bartek.niton@gmail.com 91 ServerAdmin bartek.niton@gmail.com
@@ -116,8 +118,6 @@ Uruchom stronę poleceniem: @@ -116,8 +118,6 @@ Uruchom stronę poleceniem:
116 Zrestartuj apacha: 118 Zrestartuj apacha:
117 >> sudo service apache2 restart 119 >> sudo service apache2 restart
118 120
119 -Zainstaluj Morfeusza2 zgodnie z instrukcjami na stronie http://sgjp.pl/morfeusz/dopobrania.html.  
120 -  
121 Ustaw w crontabie cykliczne uruchamianie komend create_walenty i count_positions_occurrences: 121 Ustaw w crontabie cykliczne uruchamianie komend create_walenty i count_positions_occurrences:
122 1 0 * * 5 python /home/zil/Slowal/manage.py create_walenty 122 1 0 * * 5 python /home/zil/Slowal/manage.py create_walenty
123 0 1 * * * python /home/zil/Slowal/manage.py count_positions_occurrences 123 0 1 * * * python /home/zil/Slowal/manage.py count_positions_occurrences
accounts/models.py
@@ -43,7 +43,7 @@ class UserStats(Model): @@ -43,7 +43,7 @@ class UserStats(Model):
43 # oplaty za prace leksykograficzne 43 # oplaty za prace leksykograficzne
44 bind_phraseology_frames_history = ManyToManyField('RealizedPhraseologyBinding', db_table='powiazania_frazeologiczne', 44 bind_phraseology_frames_history = ManyToManyField('RealizedPhraseologyBinding', db_table='powiazania_frazeologiczne',
45 blank=True, null=True, related_name='user_stats') 45 blank=True, null=True, related_name='user_stats')
46 - # !NOWE! oplaty za prace semantyczne 46 + # oplaty za prace semantyczne
47 semantics_real_history = ManyToManyField('RealizedSemantics', db_table='prace_semantyczne', 47 semantics_real_history = ManyToManyField('RealizedSemantics', db_table='prace_semantyczne',
48 blank=True, null=True, related_name='user_stats') 48 blank=True, null=True, related_name='user_stats')
49 # kwota uiszczona 49 # kwota uiszczona
@@ -272,7 +272,6 @@ def get_anon_profile(): @@ -272,7 +272,6 @@ def get_anon_profile():
272 def filtering_mode(user): 272 def filtering_mode(user):
273 return user.usersettings.filter_search 273 return user.usersettings.filter_search
274 274
275 -# !NOWE!  
276 class RealizedSemantics(Model): 275 class RealizedSemantics(Model):
277 """Model representing realized semantic work.""" 276 """Model representing realized semantic work."""
278 # wykonane haslo 277 # wykonane haslo
@@ -288,15 +287,24 @@ class RealizedSemantics(Model): @@ -288,15 +287,24 @@ class RealizedSemantics(Model):
288 # wlasciwie wykonane ramki (wypelniane dla semantyka) 287 # wlasciwie wykonane ramki (wypelniane dla semantyka)
289 prop_frames = PositiveIntegerField(db_column='poprawne_ramki', 288 prop_frames = PositiveIntegerField(db_column='poprawne_ramki',
290 default=0) 289 default=0)
  290 + # czesciowo wlasciwie wykonane ramki (wypelniane dla semantyka) !NOWE
  291 + part_prop_frames = PositiveIntegerField(db_column='czesciowo_poprawne_ramki',
  292 + default=0)
291 # niewlasciwie wykonane ramki (wypelniane dla semantyka) 293 # niewlasciwie wykonane ramki (wypelniane dla semantyka)
292 wrong_frames = PositiveIntegerField(db_column='niepoprawne_ramki', 294 wrong_frames = PositiveIntegerField(db_column='niepoprawne_ramki',
293 default=0) 295 default=0)
  296 + # dodane powiazania miedzy ramami i schematami !NOWE
  297 + added_connections = PositiveIntegerField(db_column='dodane_powiazania',
  298 + default=0)
294 # wykonane ramki (wypelniane dla semantyka) 299 # wykonane ramki (wypelniane dla semantyka)
295 made_frames = PositiveIntegerField(db_column='wykonane_ramki', 300 made_frames = PositiveIntegerField(db_column='wykonane_ramki',
296 default=0) 301 default=0)
297 # poprawione ramki (wypelniane dla supersemantyka) 302 # poprawione ramki (wypelniane dla supersemantyka)
298 corr_frames = PositiveIntegerField(db_column='poprawione_ramki', 303 corr_frames = PositiveIntegerField(db_column='poprawione_ramki',
299 default=0) 304 default=0)
  305 + # czesciowo poprawione ramki (wypelniane dla supersemantyka) !NOWE
  306 + part_corr_frames = PositiveIntegerField(db_column='czesciowo_poprawione_ramki',
  307 + default=0)
300 # ramki niepoprawiane (wypelniane dla supersemantyka) 308 # ramki niepoprawiane (wypelniane dla supersemantyka)
301 ncorr_frames = PositiveIntegerField(db_column='niepoprawione_ramki', 309 ncorr_frames = PositiveIntegerField(db_column='niepoprawione_ramki',
302 default=0) 310 default=0)
dictionary/ajax_jqgrid.py
@@ -11,26 +11,7 @@ def default_sort_rules(): @@ -11,26 +11,7 @@ def default_sort_rules():
11 'semanticist': { 'priority': None, 'sort_order': 'desc'}, 11 'semanticist': { 'priority': None, 'sort_order': 'desc'},
12 'vocabulary': { 'priority': None, 'sort_order': 'desc'}, 12 'vocabulary': { 'priority': None, 'sort_order': 'desc'},
13 'status': { 'priority': None, 'sort_order': 'desc'}} 13 'status': { 'priority': None, 'sort_order': 'desc'}}
14 -  
15 -def default_filter_rules():  
16 - return { 'pos': None,  
17 - 'owner': None,  
18 - 'phraseologist': None,  
19 - 'semanticist': None,  
20 - 'vocabulary': None,  
21 - 'status': None,  
22 - 'example_source': None,  
23 - 'approver': None,  
24 - 'reflex': None,  
25 - 'negativity': None,  
26 - 'predicativity': None,  
27 - 'aspect': None,  
28 - 'argument': '.*',  
29 - 'position': '.*',  
30 - 'frame_opinion' : None,  
31 - 'sender': None,  
32 - 'frame_phraseologic': False}  
33 - 14 +
34 class JqGridAjax(object): 15 class JqGridAjax(object):
35 model = None 16 model = None
36 search_field = None 17 search_field = None
dictionary/ajax_lemma_status.py
1 # -*- coding: utf-8 -*- 1 # -*- coding: utf-8 -*-
2 2
3 -#Copyright (c) 2012, Bartłomiej Nitoń  
4 -#All rights reserved.  
5 -  
6 -#Redistribution and use in source and binary forms, with or without modification, are permitted provided  
7 -#that the following conditions are met:  
8 -  
9 -# Redistributions of source code must retain the above copyright notice, this list of conditions and  
10 -# the following disclaimer.  
11 -# Redistributions in binary form must reproduce the above copyright notice, this list of conditions  
12 -# and the following disclaimer in the documentation and/or other materials provided with the distribution.  
13 -  
14 -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED  
15 -# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A  
16 -# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR  
17 -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED  
18 -# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)  
19 -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING  
20 -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE  
21 -# POSSIBILITY OF SUCH DAMAGE.  
22 -  
23 import operator 3 import operator
24 4
25 from django.db.models import Q 5 from django.db.models import Q
@@ -36,9 +16,7 @@ from semantics.utils import get_frames_differences @@ -36,9 +16,7 @@ from semantics.utils import get_frames_differences
36 16
37 @render('lemma_status.html') 17 @render('lemma_status.html')
38 @ajax(method='get', encode_result=False) 18 @ajax(method='get', encode_result=False)
39 -def get_lemma_status(request, id):  
40 - if request.session.has_key('lemma_from_note_id') and request.session['lemma_from_note_id']:  
41 - id = request.session['lemma_from_note_id'] 19 +def get_lemma_status(request, id):
42 selected_lemma = Lemma.objects.get(id=id) 20 selected_lemma = Lemma.objects.get(id=id)
43 abort_status = None 21 abort_status = None
44 next_statuses = [] 22 next_statuses = []
@@ -254,10 +232,20 @@ def lemma_status_change(request, status_id, lemma_id): @@ -254,10 +232,20 @@ def lemma_status_change(request, status_id, lemma_id):
254 checked_frame_value = 0.0 232 checked_frame_value = 0.0
255 corrected_frame_value = 0.0 233 corrected_frame_value = 0.0
256 bonus = 4.0 234 bonus = 4.0
  235 + part_bonus = 2.0
  236 + connection_bonus = 0.1
257 ### naliczanie oplat za sprawdzenie i bonusow 237 ### naliczanie oplat za sprawdzenie i bonusow
258 - update_sem_stats_conf_s(lemma_obj.entry_obj, actual_semantic_frames,  
259 - lemma_obj.semanticist, request.user, new_status,  
260 - checked_frame_value, corrected_frame_value, bonus) 238 + update_sem_stats_conf_s(entry=lemma_obj.entry_obj,
  239 + semantic_frames=actual_semantic_frames,
  240 + semanticist=lemma_obj.semanticist,
  241 + supersemanticist=request.user,
  242 + status=new_status,
  243 + checked_frame_value=checked_frame_value,
  244 + corrected_frame_value=corrected_frame_value,
  245 + bonus_factor=bonus,
  246 + part_bonus_factor=part_bonus,
  247 + connection_bonus=connection_bonus)
  248 +
261 add_new_frames_to_phraseologic_propositions(lemma_obj) 249 add_new_frames_to_phraseologic_propositions(lemma_obj)
262 changed = True 250 changed = True
263 # zmiana statusu na w obrobce semantycznej 251 # zmiana statusu na w obrobce semantycznej
@@ -496,7 +484,8 @@ def update_sem_stats_ready_s(entry, semantic_frames, semanticist, status, frame_ @@ -496,7 +484,8 @@ def update_sem_stats_ready_s(entry, semantic_frames, semanticist, status, frame_
496 semanticist.user_stats.semantics_real_history.add(realized_semantics) 484 semanticist.user_stats.semantics_real_history.add(realized_semantics)
497 485
498 def update_sem_stats_conf_s(entry, semantic_frames, semanticist, supersemanticist, status, 486 def update_sem_stats_conf_s(entry, semantic_frames, semanticist, supersemanticist, status,
499 - checked_frame_value, corrected_frame_value, bonus_factor): 487 + checked_frame_value, corrected_frame_value,
  488 + bonus_factor, part_bonus_factor, connection_bonus):
500 ready_statuses = Lemma_Status.objects.filter(type__sym_name='ready_s') 489 ready_statuses = Lemma_Status.objects.filter(type__sym_name='ready_s')
501 q_ready_statuses = [Q(status=ready_status) for ready_status in ready_statuses.all()] 490 q_ready_statuses = [Q(status=ready_status) for ready_status in ready_statuses.all()]
502 491
@@ -505,17 +494,28 @@ def update_sem_stats_conf_s(entry, semantic_frames, semanticist, supersemanticis @@ -505,17 +494,28 @@ def update_sem_stats_conf_s(entry, semantic_frames, semanticist, supersemanticis
505 checked_sem_frames = semantic_frames 494 checked_sem_frames = semantic_frames
506 ready_to_checked_diffs = get_frames_differences(ready_sem_frames.all(), checked_sem_frames.all()) 495 ready_to_checked_diffs = get_frames_differences(ready_sem_frames.all(), checked_sem_frames.all())
507 checked_to_ready_diffs = get_frames_differences(checked_sem_frames.all(), ready_sem_frames.all()) 496 checked_to_ready_diffs = get_frames_differences(checked_sem_frames.all(), ready_sem_frames.all())
508 - sem_dict = {'same_frames': len(ready_to_checked_diffs['matching_frames']), 497 +
  498 + connections_amount = count_connections(ready_to_checked_diffs)
  499 + sem_cash = (bonus_factor*float(len(ready_to_checked_diffs['matching_frames'])) +
  500 + part_bonus_factor*float(len(ready_to_checked_diffs['part_matching_frames'])) +
  501 + connection_bonus*float(connections_amount))
  502 + sem_dict = {'same_frames': len(ready_to_checked_diffs['matching_frames']),
  503 + 'part_same_frames': len(ready_to_checked_diffs['part_matching_frames']),
509 'wrong_frames': len(ready_to_checked_diffs['missing_frames']), 504 'wrong_frames': len(ready_to_checked_diffs['missing_frames']),
510 - 'cash': bonus_factor*float(len(ready_to_checked_diffs['matching_frames']))} 505 + 'added_connections': connections_amount,
  506 + 'cash': sem_cash}
  507 +
  508 + supersem_cash = (float(len(checked_to_ready_diffs['missing_frames'])+len(checked_to_ready_diffs['part_matching_frames']))*corrected_frame_value +
  509 + float(len(ready_to_checked_diffs['matching_frames']))*checked_frame_value)
511 supersem_dict = {'same_frames': len(checked_to_ready_diffs['matching_frames']), 510 supersem_dict = {'same_frames': len(checked_to_ready_diffs['matching_frames']),
  511 + 'part_same_frames': len(checked_to_ready_diffs['part_matching_frames']),
512 'redo_frames': len(checked_to_ready_diffs['missing_frames']), 512 'redo_frames': len(checked_to_ready_diffs['missing_frames']),
513 - 'cash': (float(len(checked_to_ready_diffs['missing_frames']))*corrected_frame_value+  
514 - float(len(ready_to_checked_diffs['matching_frames']))*checked_frame_value)} 513 + 'cash': supersem_cash}
515 514
516 supersem_real_semantics = RealizedSemantics(entry=entry, 515 supersem_real_semantics = RealizedSemantics(entry=entry,
517 cash=supersem_dict['cash'], 516 cash=supersem_dict['cash'],
518 corr_frames=supersem_dict['redo_frames'], 517 corr_frames=supersem_dict['redo_frames'],
  518 + part_corr_frames=supersem_dict['part_same_frames'],
519 ncorr_frames=supersem_dict['same_frames'], 519 ncorr_frames=supersem_dict['same_frames'],
520 status=status, 520 status=status,
521 bonus=False) 521 bonus=False)
@@ -526,12 +526,22 @@ def update_sem_stats_conf_s(entry, semantic_frames, semanticist, supersemanticis @@ -526,12 +526,22 @@ def update_sem_stats_conf_s(entry, semantic_frames, semanticist, supersemanticis
526 sem_real_semantics = RealizedSemantics(entry=entry, 526 sem_real_semantics = RealizedSemantics(entry=entry,
527 cash=sem_dict['cash'], 527 cash=sem_dict['cash'],
528 prop_frames=sem_dict['same_frames'], 528 prop_frames=sem_dict['same_frames'],
  529 + part_prop_frames=sem_dict['part_same_frames'],
529 wrong_frames=sem_dict['wrong_frames'], 530 wrong_frames=sem_dict['wrong_frames'],
  531 + added_connections=sem_dict['added_connections'],
530 status=status, 532 status=status,
531 bonus=True) 533 bonus=True)
532 sem_real_semantics.save() 534 sem_real_semantics.save()
533 sem_real_semantics.frames.add(*semantic_frames.all()) 535 sem_real_semantics.frames.add(*semantic_frames.all())
534 semanticist.user_stats.semantics_real_history.add(sem_real_semantics) 536 semanticist.user_stats.semantics_real_history.add(sem_real_semantics)
535 - 537 +
  538 +def count_connections(differences):
  539 + amount = 0
  540 + for frame in differences['matching_frames']:
  541 + amount += frame.connected_schemata().count()
  542 + for frame in differences['part_matching_frames']:
  543 + amount += frame.connected_schemata().count()
  544 + return amount
  545 +
536 def remove_semantic_payments(entry): 546 def remove_semantic_payments(entry):
537 RealizedSemantics.objects.filter(entry=entry).delete() 547 RealizedSemantics.objects.filter(entry=entry).delete()
dictionary/ajax_lemma_view.py
1 # -*- coding: utf-8 -*- 1 # -*- coding: utf-8 -*-
2 2
3 -#Copyright (c) 2012, Bartłomiej Nitoń  
4 -#All rights reserved.  
5 -  
6 -#Redistribution and use in source and binary forms, with or without modification, are permitted provided  
7 -#that the following conditions are met:  
8 -  
9 -# Redistributions of source code must retain the above copyright notice, this list of conditions and  
10 -# the following disclaimer.  
11 -# Redistributions in binary form must reproduce the above copyright notice, this list of conditions  
12 -# and the following disclaimer in the documentation and/or other materials provided with the distribution.  
13 -  
14 -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED  
15 -# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A  
16 -# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR  
17 -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED  
18 -# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)  
19 -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING  
20 -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE  
21 -# POSSIBILITY OF SUCH DAMAGE.  
22 -  
23 import math 3 import math
24 import copy 4 import copy
25 import re 5 import re
@@ -32,7 +12,11 @@ from django.contrib.auth.models import User, Group @@ -32,7 +12,11 @@ from django.contrib.auth.models import User, Group
32 from django.core import serializers 12 from django.core import serializers
33 13
34 from common.js_to_obj import frameObjToSerializableDict, jsArgToObj, jsFrameToObj, jsPosToObj 14 from common.js_to_obj import frameObjToSerializableDict, jsArgToObj, jsFrameToObj, jsPosToObj
  15 +from dictionary.common_func import escape_regex
35 from dictionary.convert_frames import frame_conversion 16 from dictionary.convert_frames import frame_conversion
  17 +from dictionary.filtering import default_filter_rules, prepare_filter_form, \
  18 + save_lemma_filters_and_get_schemata_filter_setup, \
  19 + schemata_filter_options
36 from dictionary.models import Vocabulary, Lemma, Lemma_Status, Frame_Opinion, \ 20 from dictionary.models import Vocabulary, Lemma, Lemma_Status, Frame_Opinion, \
37 Frame_Opinion_Value, Frame, NKJP_Example, NKJP_ArgSelection, \ 21 Frame_Opinion_Value, Frame, NKJP_Example, NKJP_ArgSelection, \
38 NKJP_Source, NKJP_Opinion, Position, \ 22 NKJP_Source, NKJP_Opinion, Position, \
@@ -44,9 +28,9 @@ from dictionary.models import Vocabulary, Lemma, Lemma_Status, Frame_Opinion, \ @@ -44,9 +28,9 @@ from dictionary.models import Vocabulary, Lemma, Lemma_Status, Frame_Opinion, \
44 sorted_default_frame_char_vals, XcpExample, \ 28 sorted_default_frame_char_vals, XcpExample, \
45 POS, get_frame_char_and_its_value, get_frame_char_by_type_and_value_pk, \ 29 POS, get_frame_char_and_its_value, get_frame_char_by_type_and_value_pk, \
46 sortFrameChars, sortArguments, sortPositions, \ 30 sortFrameChars, sortArguments, sortPositions, \
47 - get_or_create_position, get_phraseologic_frames_only, pos_compatible 31 + get_or_create_position, get_schemata_by_type, pos_compatible
48 from dictionary.forms import AddPositionForm, FrameForm, Pos_Cat_Form, \ 32 from dictionary.forms import AddPositionForm, FrameForm, Pos_Cat_Form, \
49 - AddNkjpExampleForm, MessageForm, SortForm, FilterForm, \ 33 + AddNkjpExampleForm, MessageForm, SortForm, \
50 SimilarLemmasNewForm, ChangeUserFunctionForm, \ 34 SimilarLemmasNewForm, ChangeUserFunctionForm, \
51 ExampleOpinionForm, \ 35 ExampleOpinionForm, \
52 FrameConversionForm, CreatePositionForm, AssignPhraseologicFrameForm 36 FrameConversionForm, CreatePositionForm, AssignPhraseologicFrameForm
@@ -56,13 +40,15 @@ from dictionary.saving import connect_example_operation, disconnect_all_examples @@ -56,13 +40,15 @@ from dictionary.saving import connect_example_operation, disconnect_all_examples
56 40
57 from common.decorators import render, ajax, AjaxError 41 from common.decorators import render, ajax, AjaxError
58 from common.util import triple_arg_poss 42 from common.util import triple_arg_poss
  43 +from dictionary.filtering import filter_lemmas
59 from dictionary.validation import find_similar_frames, get_all_test_missing_frames, get_aspect_rel_lemmas, \ 44 from dictionary.validation import find_similar_frames, get_all_test_missing_frames, get_aspect_rel_lemmas, \
60 get_wrong_aspect_frames, validate_B_frames, get_deriv_miss_frames_message, \ 45 get_wrong_aspect_frames, validate_B_frames, get_deriv_miss_frames_message, \
61 validate_phraseology_binded_frames, validate_rule_5, \ 46 validate_phraseology_binded_frames, validate_rule_5, \
62 validate_examples_and_mark_errors, validate_schemas_and_mark_errors, \ 47 validate_examples_and_mark_errors, validate_schemas_and_mark_errors, \
  48 + validate_schemata_for_semantics_and_mark_errors, \
63 get_missing_aspects_msg, validate_same_positions_schemata 49 get_missing_aspects_msg, validate_same_positions_schemata
64 50
65 -from semantics.models import LexicalUnitExamples 51 +from semantics.models import LexicalUnitExamples, SemanticFrame
66 52
67 from wordnet.models import LexicalUnit 53 from wordnet.models import LexicalUnit
68 54
@@ -77,13 +63,11 @@ from django.core.validators import email_re @@ -77,13 +63,11 @@ from django.core.validators import email_re
77 from accounts.models import UserSettings, UserStats, RealizedPhraseologyBinding, \ 63 from accounts.models import UserSettings, UserStats, RealizedPhraseologyBinding, \
78 can_modify_phraseology_only, get_anon_profile 64 can_modify_phraseology_only, get_anon_profile
79 65
80 -from ajax_jqgrid import JqGridAjax, default_sort_rules, default_filter_rules 66 +from ajax_jqgrid import JqGridAjax, default_sort_rules
81 67
82 import locale 68 import locale
83 locale.setlocale(locale.LC_ALL, 'pl_PL.UTF-8') 69 locale.setlocale(locale.LC_ALL, 'pl_PL.UTF-8')
84 70
85 -import HTMLParser  
86 -  
87 DEFAULT_SAVE_PATH = os.path.join(PROJECT_PATH, 'tmp') 71 DEFAULT_SAVE_PATH = os.path.join(PROJECT_PATH, 'tmp')
88 72
89 def reverse(string): 73 def reverse(string):
@@ -165,7 +149,7 @@ def prepareFrameTable(frame): @@ -165,7 +149,7 @@ def prepareFrameTable(frame):
165 149
166 def nkjpExamplesObjToJs(nkjp_examples, user, lemma): 150 def nkjpExamplesObjToJs(nkjp_examples, user, lemma):
167 example_dict_list = [] 151 example_dict_list = []
168 - lexical_units = lemma.entry_obj.lexical_units() 152 + lexical_units = lemma.entry_obj.meanings.all()
169 for example in nkjp_examples: 153 for example in nkjp_examples:
170 frame = example.frame; 154 frame = example.frame;
171 frame_table_id = 'frame_'+str(frame.id)+'_' 155 frame_table_id = 'frame_'+str(frame.id)+'_'
@@ -178,18 +162,14 @@ def nkjpExamplesObjToJs(nkjp_examples, user, lemma): @@ -178,18 +162,14 @@ def nkjpExamplesObjToJs(nkjp_examples, user, lemma):
178 162
179 confirmed = True 163 confirmed = True
180 approvers_count = lemma.entry_obj.pos.example_approvers_num 164 approvers_count = lemma.entry_obj.pos.example_approvers_num
181 - #Configuration.objects.get(selected_conf=True).example_approvers_num  
182 if example.source.confirmation_required and example.approvers.count() < approvers_count: 165 if example.source.confirmation_required and example.approvers.count() < approvers_count:
183 try: 166 try:
184 example.approvers.get(username=user.username) 167 example.approvers.get(username=user.username)
185 except: 168 except:
186 - confirmed = False  
187 -  
188 - sentence = example.sentence.replace('\\', '\\\\').replace("\n", "\\n").replace("\r", "\\r").replace("\t", "\\t").replace("\"", "\\\"")  
189 - comment = example.comment.replace('\\', '\\\\').replace("\n", "\\n").replace("\r", "\\r").replace("\t", "\\t").replace("\"", "\\\"")  
190 - h = HTMLParser.HTMLParser()  
191 - sentence = h.unescape(sentence)  
192 - comment = h.unescape(comment) 169 + confirmed = False
  170 +
  171 + sentence = example.sentence
  172 + comment = example.comment
193 173
194 example_dict = { 'example_id' : example.id, 174 example_dict = { 'example_id' : example.id,
195 'frame_id' : frame_table_id, 175 'frame_id' : frame_table_id,
@@ -219,18 +199,19 @@ def nkjpLemmaExamplesObjToJs(nkjp_examples, user, lemma): @@ -219,18 +199,19 @@ def nkjpLemmaExamplesObjToJs(nkjp_examples, user, lemma):
219 for example in nkjp_examples: 199 for example in nkjp_examples:
220 confirmed = True 200 confirmed = True
221 approvers_count = lemma.entry_obj.pos.example_approvers_num 201 approvers_count = lemma.entry_obj.pos.example_approvers_num
222 - #Configuration.objects.get(selected_conf=True).example_approvers_num  
223 if example.source.confirmation_required and example.approvers.count() < approvers_count: 202 if example.source.confirmation_required and example.approvers.count() < approvers_count:
224 try: 203 try:
225 example.approvers.get(username=user.username) 204 example.approvers.get(username=user.username)
226 except: 205 except:
227 confirmed = False 206 confirmed = False
228 207
229 - sentence = example.sentence.replace('\\', '\\\\').replace("\n", "\\n").replace("\r", "\\r").replace("\t", "\\t").replace("\"", "\\\"")  
230 - comment = example.comment.replace('\\', '\\\\').replace("\n", "\\n").replace("\r", "\\r").replace("\t", "\\t").replace("\"", "\\\"")  
231 - h = HTMLParser.HTMLParser()  
232 - sentence = h.unescape(sentence)  
233 - comment = h.unescape(comment) 208 + sentence = example.sentence
  209 + comment = example.comment
  210 +# sentence = example.sentence.replace('\\', '\\\\').replace("\n", "\\n").replace("\r", "\\r").replace("\t", "\\t").replace("\"", "\\\"")
  211 +# comment = example.comment.replace('\\', '\\\\').replace("\n", "\\n").replace("\r", "\\r").replace("\t", "\\t").replace("\"", "\\\"")
  212 +# h = HTMLParser.HTMLParser()
  213 +# sentence = h.unescape(sentence)
  214 +# comment = h.unescape(comment)
234 215
235 example_dict = { 'example_id' : example.id, 216 example_dict = { 'example_id' : example.id,
236 'frame_id' : '', 217 'frame_id' : '',
@@ -249,8 +230,6 @@ def nkjpLemmaExamplesObjToJs(nkjp_examples, user, lemma): @@ -249,8 +230,6 @@ def nkjpLemmaExamplesObjToJs(nkjp_examples, user, lemma):
249 @render('old_frames.html') 230 @render('old_frames.html')
250 @ajax(method='get', encode_result=False) 231 @ajax(method='get', encode_result=False)
251 def get_old_frames(request, id): 232 def get_old_frames(request, id):
252 - if request.session.has_key('lemma_from_note_id') and request.session['lemma_from_note_id']:  
253 - id = request.session['lemma_from_note_id']  
254 selected_lemma = Lemma.objects.get(id=id) 233 selected_lemma = Lemma.objects.get(id=id)
255 old_frames = selected_lemma.old_frames 234 old_frames = selected_lemma.old_frames
256 reflexed_frames = [] 235 reflexed_frames = []
@@ -299,13 +278,7 @@ def get_arg_id(request, text_rep): @@ -299,13 +278,7 @@ def get_arg_id(request, text_rep):
299 @ajax(method='get', encode_result=False) 278 @ajax(method='get', encode_result=False)
300 def get_lemma_preview(request, id, main_lemma_id): 279 def get_lemma_preview(request, id, main_lemma_id):
301 selected_lemma = Lemma.objects.get(id=id) 280 selected_lemma = Lemma.objects.get(id=id)
302 - new_frames = selected_lemma.frames.order_by('text_rep')  
303 -  
304 - serialized_frames = []  
305 - for frame in new_frames:  
306 - serialized_frames.append(frameObjToSerializableDict(selected_lemma, frame))  
307 - json_frames = json_encode(serialized_frames)  
308 - 281 +
309 frame_char_models = Frame_Char_Model.objects.order_by('priority') 282 frame_char_models = Frame_Char_Model.objects.order_by('priority')
310 frame_char_prior_model_vals = frame_char_models[0].frame_char_values.all() 283 frame_char_prior_model_vals = frame_char_models[0].frame_char_values.all()
311 284
@@ -315,51 +288,15 @@ def get_lemma_preview(request, id, main_lemma_id): @@ -315,51 +288,15 @@ def get_lemma_preview(request, id, main_lemma_id):
315 288
316 json_frame_char_list = json_encode(frame_char_list) 289 json_frame_char_list = json_encode(frame_char_list)
317 290
318 - # konwertowanie przykladow na zrozumiale przez java sript  
319 - nkjp_examples = selected_lemma.nkjp_examples.all()  
320 - nkjp_examples_js = nkjpExamplesObjToJs(nkjp_examples, request.user, selected_lemma)  
321 -  
322 - json_nkjp_examples = json_encode(nkjp_examples_js)  
323 -  
324 similarLemmasNewForm = SimilarLemmasNewForm(statuses=Lemma_Status.objects.order_by('priority')) 291 similarLemmasNewForm = SimilarLemmasNewForm(statuses=Lemma_Status.objects.order_by('priority'))
325 292
326 - return {'serialized_frames': json_frames,  
327 - 'frame_char_list': json_frame_char_list,  
328 - 'nkjp_examples': json_nkjp_examples, 293 + return {'frame_char_list': json_frame_char_list,
329 'selected_lemma': selected_lemma, 294 'selected_lemma': selected_lemma,
330 'similarLemmasNewForm': similarLemmasNewForm} 295 'similarLemmasNewForm': similarLemmasNewForm}
331 296
332 @ajax(method='get') 297 @ajax(method='get')
333 def get_frame_filter_options(request): 298 def get_frame_filter_options(request):
334 - # pobieranie wartosci aspektu  
335 - aspect_model = Frame_Char_Model.objects.get(model_name=u'ASPEKT')  
336 - aspect_vals_objs = aspect_model.frame_char_values.order_by('-priority')  
337 - aspect_str_list = [val.value for val in aspect_vals_objs]  
338 -  
339 - # pobieranie wartosci zwrotnosci  
340 - reflex_model = Frame_Char_Model.objects.get(model_name=u'ZWROTNOŚĆ')  
341 - reflex_vals_objs = reflex_model.frame_char_values.order_by('-priority')  
342 - reflex_str_list = [val.value for val in reflex_vals_objs]  
343 -  
344 - # pobieranie wartosci negatywnosci  
345 - neg_model = Frame_Char_Model.objects.get(model_name=u'NEGATYWNOŚĆ')  
346 - neg_vals_objs = neg_model.frame_char_values.order_by('-priority')  
347 - neg_str_list = [val.value for val in neg_vals_objs]  
348 -  
349 - # pobieranie wartosci predykatywnosci  
350 - pred_model = Frame_Char_Model.objects.get(model_name=u'PREDYKATYWNOŚĆ')  
351 - pred_vals_objs = pred_model.frame_char_values.order_by('-priority')  
352 - pred_str_list = [val.value for val in pred_vals_objs]  
353 -  
354 - # pobieranie opinii o schemacie  
355 - opinion_str_list = [val.value for val in Frame_Opinion_Value.objects.order_by('priority')]  
356 -  
357 - return {'reflex_options': reflex_str_list,  
358 - 'aspect_options': aspect_str_list,  
359 - 'neg_options': neg_str_list,  
360 - 'pred_options': pred_str_list,  
361 - 'opinion_options': opinion_str_list}  
362 - 299 + return schemata_filter_options()
363 300
364 @render('lemma_desc.html') 301 @render('lemma_desc.html')
365 @ajax(method='get', encode_result=False) 302 @ajax(method='get', encode_result=False)
@@ -376,44 +313,14 @@ def get_lemma_desc(request, id): @@ -376,44 +313,14 @@ def get_lemma_desc(request, id):
376 @render('new_frames.html') 313 @render('new_frames.html')
377 @ajax(method='get', encode_result=False) 314 @ajax(method='get', encode_result=False)
378 def get_new_frames(request, id): 315 def get_new_frames(request, id):
379 - if request.session.has_key('lemma_from_note_id') and request.session['lemma_from_note_id']:  
380 - id = request.session['lemma_from_note_id']  
381 -  
382 - selected_lemma = Lemma.objects.get(id=id)  
383 - new_frames = selected_lemma.frames.order_by('text_rep')  
384 -  
385 -# sprawdz czy uzytkownik jest wlascicielem wybranego hasla  
386 - can_modify = user_can_modify(selected_lemma, request.user)  
387 -  
388 - serialized_frames = []  
389 - for frame in new_frames:  
390 - serialized_frames.append(frameObjToSerializableDict(selected_lemma, frame, True))  
391 - json_frames = json_encode(serialized_frames)  
392 -  
393 - # konwertowanie przykladow na zrozumiale przez java sript  
394 - nkjp_examples = selected_lemma.nkjp_examples.order_by('source__priority',  
395 - 'opinion__priority',  
396 - 'sentence')  
397 - nkjp_examples_js = nkjpExamplesObjToJs(nkjp_examples, request.user, selected_lemma)  
398 -  
399 - json_nkjp_examples = json_encode(nkjp_examples_js)  
400 -  
401 - add_nkjp_form = AddNkjpExampleForm()  
402 -  
403 - lemma_nkjp_examples = selected_lemma.lemma_nkjp_examples.order_by('source__priority',  
404 - 'opinion__priority',  
405 - 'sentence')  
406 - lemma_nkjp_examples_js = nkjpLemmaExamplesObjToJs(lemma_nkjp_examples, request.user, selected_lemma)  
407 - json_lemma_nkjp_examples = json_encode(lemma_nkjp_examples_js)  
408 -  
409 - return {'serialized_frames': json_frames,  
410 - 'add_nkjp_form': add_nkjp_form,  
411 - 'nkjp_examples': json_nkjp_examples,  
412 - 'can_modify': can_modify,  
413 - 'selected_lemma': selected_lemma,  
414 - 'lemma_nkjp_examples': json_lemma_nkjp_examples,  
415 - 'skladnica_examples': selected_lemma.skladnica_frames.exists(),  
416 - 'xcp_examples': selected_lemma.entry_obj.xcp_examples.exists()} 316 + selected_lemma = Lemma.objects.get(id=id)
  317 + can_modify = user_can_modify(selected_lemma, request.user)
  318 + add_nkjp_form = AddNkjpExampleForm()
  319 + return {'add_nkjp_form': add_nkjp_form,
  320 + 'can_modify': can_modify,
  321 + 'selected_lemma': selected_lemma,
  322 + 'skladnica_examples': selected_lemma.skladnica_frames.exists(),
  323 + 'xcp_examples': selected_lemma.entry_obj.xcp_examples.exists()}
417 324
418 325
419 @ajax(method='get', encode_result=True) 326 @ajax(method='get', encode_result=True)
@@ -427,72 +334,26 @@ def get_ctrl_preview(request, id): @@ -427,72 +334,26 @@ def get_ctrl_preview(request, id):
427 334
428 @render('lemma_examples.html') 335 @render('lemma_examples.html')
429 @ajax(method='get', encode_result=False) 336 @ajax(method='get', encode_result=False)
430 -def get_lemma_examples(request, id):  
431 - if request.session.has_key('lemma_from_note_id') and request.session['lemma_from_note_id']:  
432 - id = request.session['lemma_from_note_id'] 337 +def get_lemma_examples(request, id):
433 selected_lemma = Lemma.objects.get(id=id) 338 selected_lemma = Lemma.objects.get(id=id)
434 - lemma_nkjp_examples = selected_lemma.lemma_nkjp_examples.order_by('source__priority',  
435 - 'opinion__priority',  
436 - 'sentence')  
437 - lemma_nkjp_examples_js = nkjpLemmaExamplesObjToJs(lemma_nkjp_examples, request.user, selected_lemma) 339 +# lemma_nkjp_examples = selected_lemma.lemma_nkjp_examples.order_by('source__priority',
  340 +# 'opinion__priority',
  341 +# 'sentence')
  342 +# lemma_nkjp_examples_js = nkjpLemmaExamplesObjToJs(lemma_nkjp_examples, request.user, selected_lemma)
438 # sprawdz czy uzytkownik jest wlascicielem wybranego hasla 343 # sprawdz czy uzytkownik jest wlascicielem wybranego hasla
439 can_modify = user_can_modify(selected_lemma, request.user) 344 can_modify = user_can_modify(selected_lemma, request.user)
440 - json_lemma_nkjp_examples = json_encode(lemma_nkjp_examples_js) 345 + #json_lemma_nkjp_examples = json_encode(lemma_nkjp_examples_js)
441 add_nkjp_form = AddNkjpExampleForm() 346 add_nkjp_form = AddNkjpExampleForm()
442 347
443 return {'add_nkjp_form': add_nkjp_form, 348 return {'add_nkjp_form': add_nkjp_form,
444 - 'lemma_nkjp_examples': json_lemma_nkjp_examples, 349 + #'lemma_nkjp_examples': json_lemma_nkjp_examples,
445 'can_modify': can_modify} 350 'can_modify': can_modify}
446 351
447 -############################ semantics ###################################  
448 -@ajax(method='get')  
449 -def schema_got_assigned_semantics(request, lemma_id, schema_id):  
450 - lemma = Lemma.objects.get(id=lemma_id)  
451 -# sprawdz czy najnowsza wersja  
452 -# if lemma.old:  
453 -# raise AjaxError('old lemma')  
454 - frames = lemma.entry_obj.actual_frames() # jeszcze po przykladach trzeba sprawdzac  
455 - schema_examples = lemma.nkjp_examples.filter(frame__id=schema_id)  
456 - for frame in frames.all():  
457 - if complements_pinned(frame, schema_id) or examples_pinned(frame, schema_examples):  
458 - return {'got_assigned_semantics': True}  
459 - return {'got_assigned_semantics': False}  
460 -  
461 -def complements_pinned(frame, schema_id):  
462 - if frame.complements.filter(realizations__frame__id=schema_id).exists():  
463 - return True  
464 - return False  
465 -  
466 -def examples_pinned(frame, schema_examples):  
467 - for lexical_unit in frame.lexical_units.all():  
468 - for example in schema_examples.all():  
469 - if LexicalUnitExamples.objects.filter(lexical_unit=lexical_unit,  
470 - example=example).exists():  
471 - return True  
472 - return False  
473 -  
474 -@ajax(method='get')  
475 -def example_got_assigned_semantics(request, lemma_id, example_id):  
476 - lemma = Lemma.objects.get(id=lemma_id)  
477 -# sprawdz czy najnowsza wersja  
478 -# if lemma.old:  
479 -# raise AjaxError('old lemma')  
480 - # TODO niech lexical units beda wybierane z uzyciem Entry jak juz Tomasz to zrobi  
481 - lexical_units = LexicalUnit.objects.filter(Q(base = lemma.entry)|Q(base = lemma.entry + u' się'))  
482 - for lu in lexical_units:  
483 - if LexicalUnitExamples.objects.filter(example__id=example_id,  
484 - lexical_unit=lu).exists():  
485 - return {'got_assigned_semantics': True}  
486 - return {'got_assigned_semantics': False}  
487 -  
488 ############################ lemma notes ################################# 352 ############################ lemma notes #################################
489 353
490 @render('lemma_notes.html') 354 @render('lemma_notes.html')
491 @ajax(method='get', encode_result=False) 355 @ajax(method='get', encode_result=False)
492 def get_lemma_notes(request, id): 356 def get_lemma_notes(request, id):
493 - if request.session.has_key('lemma_from_note_id') and request.session['lemma_from_note_id']:  
494 - id = request.session['lemma_from_note_id']  
495 - request.session['lemma_from_note_id'] = ''  
496 selected_lemma = Lemma.objects.get(id=id) 357 selected_lemma = Lemma.objects.get(id=id)
497 add_note_form = MessageForm() 358 add_note_form = MessageForm()
498 messages = selected_lemma.messages.filter(private=False).order_by('-time') 359 messages = selected_lemma.messages.filter(private=False).order_by('-time')
@@ -672,9 +533,7 @@ def restore_lemma(request, change_id, lemma_id): @@ -672,9 +533,7 @@ def restore_lemma(request, change_id, lemma_id):
672 533
673 @render('change_ctrl.html') 534 @render('change_ctrl.html')
674 @ajax(method='get', encode_result=False) 535 @ajax(method='get', encode_result=False)
675 -def get_change_ctrl(request, id):  
676 - if request.session.has_key('lemma_from_note_id') and request.session['lemma_from_note_id']:  
677 - id = request.session['lemma_from_note_id'] 536 +def get_change_ctrl(request, id):
678 selected_lemma = Lemma.objects.get(id=id) 537 selected_lemma = Lemma.objects.get(id=id)
679 old_versions = selected_lemma.old_versions.order_by('-time') 538 old_versions = selected_lemma.old_versions.order_by('-time')
680 can_modify = (user_can_modify(selected_lemma, request.user) and 539 can_modify = (user_can_modify(selected_lemma, request.user) and
@@ -844,176 +703,14 @@ def get_sort_order(request): @@ -844,176 +703,14 @@ def get_sort_order(request):
844 703
845 @render('filter_form.html') 704 @render('filter_form.html')
846 @ajax(method='get', encode_result=False) 705 @ajax(method='get', encode_result=False)
847 -def filter_form(request):  
848 - if request.session.has_key('lemma_preview') and request.session['lemma_preview']:  
849 - if not request.session.has_key('filter_rules_lemma_preview'):  
850 - request.session['filter_rules_lemma_preview'] = default_filter_rules()  
851 - filter_rules = request.session['filter_rules_lemma_preview']  
852 - else:  
853 - if not request.session.has_key('filter_rules'):  
854 - request.session['filter_rules'] = default_filter_rules()  
855 - filter_rules = request.session['filter_rules']  
856 -  
857 - users = User.objects.none()  
858 - phraseologists = User.objects.none()  
859 - semanticists = User.objects.none()  
860 - vocabularies = Vocabulary.objects.none()  
861 - senders = User.objects.none()  
862 - statuses = get_anon_profile().visible_statuses.all()  
863 - can_confirm_example = False  
864 - if request.user.is_authenticated():  
865 - users = User.objects.filter(lemmas__old=False).distinct().order_by('username')  
866 - phraseologists = User.objects.filter(phraseologist_lemmas__old=False).distinct().order_by('username')  
867 - semanticists = User.objects.filter(semanticist_lemmas__old=False).distinct().order_by('username')  
868 - vocabularies = request.user.visible_vocabularies.all()  
869 - senders = User.objects.order_by('groups__group_settings__priority')  
870 - statuses = Lemma_Status.objects.all()  
871 - if request.user.has_perm('dictionary.confirm_example') or request.user.is_superuser:  
872 - can_confirm_example = True  
873 -  
874 - form = FilterForm(users=users,  
875 - phraseologists=phraseologists,  
876 - semanticists=semanticists,  
877 - vocabularies=vocabularies,  
878 - senders=senders,  
879 - statuses=statuses,  
880 - sel_pos=filter_rules['pos'],  
881 - sel_user=filter_rules['owner'],  
882 - sel_phraseologist=filter_rules['phraseologist'],  
883 - sel_semanticist=filter_rules['semanticist'],  
884 - sel_vocabulary=filter_rules['vocabulary'],  
885 - sel_status=filter_rules['status'],  
886 - sel_reflex=filter_rules['reflex'],  
887 - sel_negativity=filter_rules['negativity'],  
888 - sel_predicativity=filter_rules['predicativity'],  
889 - sel_aspect=filter_rules['aspect'],  
890 - sel_has_argument=filter_rules['argument'],  
891 - sel_has_position=filter_rules['position'],  
892 - sel_frame_opinion=filter_rules['frame_opinion'],  
893 - can_confirm_example = can_confirm_example,  
894 - sel_example_source=filter_rules['example_source'],  
895 - sel_approver=filter_rules['approver'],  
896 - sel_sender=filter_rules['sender'],  
897 - frame_phraseologic=filter_rules['frame_phraseologic'])  
898 - return {'form': form} 706 +def filter_form(request):
  707 + return prepare_filter_form(request)
899 708
900 @ajax(method='post') 709 @ajax(method='post')
901 def filter_form_submit(request, form_data): 710 def filter_form_submit(request, form_data):
902 - filter_dict = dict((x['name'], x['value']) for x in form_data)  
903 -  
904 - if filter_dict['pos']:  
905 - pos_obj = POS.objects.get(id=filter_dict['pos'])  
906 - else:  
907 - pos_obj = None  
908 -  
909 - if filter_dict['owner']:  
910 - owner_obj = User.objects.get(id=filter_dict['owner'])  
911 - else:  
912 - owner_obj = None  
913 -  
914 - if filter_dict['phraseologist']:  
915 - phraseologist_obj = User.objects.get(id=filter_dict['phraseologist'])  
916 - else:  
917 - phraseologist_obj = None  
918 -  
919 - if filter_dict['semanticist']:  
920 - semanticist_obj = User.objects.get(id=filter_dict['semanticist'])  
921 - else:  
922 - semanticist_obj = None  
923 -  
924 - if filter_dict['vocabulary']:  
925 - vocabulary_obj = Vocabulary.objects.get(name=filter_dict['vocabulary'])  
926 - else:  
927 - vocabulary_obj = None  
928 -  
929 - if filter_dict['status']:  
930 - status_obj = Lemma_Status.objects.get(id=filter_dict['status'])  
931 - else:  
932 - status_obj = None  
933 -  
934 - if filter_dict['example_source']:  
935 - nkjp_source_obj = NKJP_Source.objects.get(id=filter_dict['example_source'])  
936 - else:  
937 - nkjp_source_obj = None  
938 -  
939 - if filter_dict['approver']:  
940 - approver_obj = User.objects.get(id=filter_dict['approver'])  
941 - else:  
942 - approver_obj = None  
943 -  
944 - if filter_dict['has_message_from']:  
945 - try:  
946 - sender_obj = User.objects.get(pk=filter_dict['has_message_from'])  
947 - except User.DoesNotExist:  
948 - sender_obj = None  
949 - else:  
950 - sender_obj = None  
951 -  
952 - reflex_obj, reflex_val = get_frame_char_and_its_value(filter_dict['reflex'], '*')  
953 - negativity_obj, negativity_val = get_frame_char_and_its_value(filter_dict['negativity'], '*')  
954 - aspect_obj, aspect_val = get_frame_char_and_its_value(filter_dict['aspect'], '*')  
955 - pred_obj, pred_val = get_frame_char_and_its_value(filter_dict['predicativity'], '*')  
956 -  
957 - argument = filter_dict['has_argument']  
958 - position = filter_dict['has_position']  
959 -  
960 - if filter_dict['frame_opinion']:  
961 - frame_opinion_obj = Frame_Opinion_Value.objects.get(id=filter_dict['frame_opinion'])  
962 - opinion_val = frame_opinion_obj.value  
963 - else:  
964 - frame_opinion_obj = None  
965 - opinion_val = '*'  
966 -  
967 - if 'frame_phraseologic' in filter_dict:  
968 - frame_phraseologic = filter_dict['frame_phraseologic']  
969 - else:  
970 - frame_phraseologic = False  
971 -  
972 - if request.session.has_key('lemma_preview') and request.session['lemma_preview']:  
973 - request.session['filter_rules_lemma_preview'] = {'pos' : pos_obj,  
974 - 'owner' : owner_obj,  
975 - 'phraseologist' : phraseologist_obj,  
976 - 'semanticist' : semanticist_obj,  
977 - 'vocabulary' : vocabulary_obj,  
978 - 'status' : status_obj,  
979 - 'example_source' : nkjp_source_obj,  
980 - 'approver' : approver_obj,  
981 - 'reflex' : reflex_obj,  
982 - 'negativity' : negativity_obj,  
983 - 'predicativity' : pred_obj,  
984 - 'aspect' : aspect_obj,  
985 - 'argument' : argument,  
986 - 'position' : position,  
987 - 'frame_opinion' : frame_opinion_obj,  
988 - 'sender' : sender_obj,  
989 - 'frame_phraseologic' : frame_phraseologic}  
990 - else:  
991 - request.session['filter_rules'] = {'pos' : pos_obj,  
992 - 'owner' : owner_obj,  
993 - 'phraseologist' : phraseologist_obj,  
994 - 'semanticist' : semanticist_obj,  
995 - 'vocabulary' : vocabulary_obj,  
996 - 'status' : status_obj,  
997 - 'example_source' : nkjp_source_obj,  
998 - 'approver' : approver_obj,  
999 - 'reflex' : reflex_obj,  
1000 - 'negativity' : negativity_obj,  
1001 - 'predicativity' : pred_obj,  
1002 - 'aspect' : aspect_obj,  
1003 - 'argument' : argument,  
1004 - 'position' : position,  
1005 - 'frame_opinion' : frame_opinion_obj,  
1006 - 'sender' : sender_obj,  
1007 - 'frame_phraseologic' : frame_phraseologic}  
1008 -  
1009 - return {'filter_frames': filter_dict['filter_frames'],  
1010 - 'reflex' : reflex_val,  
1011 - 'negativity' : negativity_val,  
1012 - 'predicativity': pred_val,  
1013 - 'opinion' : opinion_val,  
1014 - 'aspect' : aspect_val,  
1015 - 'position' : filter_dict['has_position'],  
1016 - 'argument' : filter_dict['has_argument']} 711 + filter_dict = dict((x['name'], x['value']) for x in form_data)
  712 + schemata_filter_options = save_lemma_filters_and_get_schemata_filter_setup(request, filter_dict)
  713 + return schemata_filter_options
1017 714
1018 @ajax(method='post') 715 @ajax(method='post')
1019 def save_columns(request, col_model, col_names, remap): 716 def save_columns(request, col_model, col_names, remap):
@@ -1572,10 +1269,7 @@ def get_default_char_value(possibilities): @@ -1572,10 +1269,7 @@ def get_default_char_value(possibilities):
1572 value = possibilities.get(default=True) 1269 value = possibilities.get(default=True)
1573 except Frame_Char_Value.DoesNotExist: 1270 except Frame_Char_Value.DoesNotExist:
1574 value = possibilities.all()[0] 1271 value = possibilities.all()[0]
1575 - return value  
1576 -  
1577 -def escape_regex(string):  
1578 - return string.replace('(', '\(').replace(')', '\)').replace('{', '\{').replace('}', '\}').replace('[', '\[').replace(']', '\]') 1272 + return value
1579 1273
1580 def regex_query(string): 1274 def regex_query(string):
1581 q_query = [] 1275 q_query = []
@@ -2180,7 +1874,7 @@ def get_frame_chars(request, frame_id): @@ -2180,7 +1874,7 @@ def get_frame_chars(request, frame_id):
2180 @ajax(method='get', encode_result=False) 1874 @ajax(method='get', encode_result=False)
2181 def xcp_example_propositions(request, frame, argument_ids, lemma_id): 1875 def xcp_example_propositions(request, frame, argument_ids, lemma_id):
2182 propositions = [] 1876 propositions = []
2183 - lemma_obj = Lemma.objects.get(old=False, id=lemma_id) 1877 + lemma_obj = Lemma.objects.get(id=lemma_id)
2184 entry = lemma_obj.entry_obj 1878 entry = lemma_obj.entry_obj
2185 # TODO: zlikwidowac zaslepke na przyslowki 1879 # TODO: zlikwidowac zaslepke na przyslowki
2186 if entry.pos.tag != 'adv': 1880 if entry.pos.tag != 'adv':
@@ -2553,7 +2247,7 @@ def save_new_frames(request, data, id, examples, lemma_examples): @@ -2553,7 +2247,7 @@ def save_new_frames(request, data, id, examples, lemma_examples):
2553 reconnect_examples_operations.remove(disconnect_example_operation(example, nkjp_example_obj)) 2247 reconnect_examples_operations.remove(disconnect_example_operation(example, nkjp_example_obj))
2554 except ValueError: 2248 except ValueError:
2555 reconnect_examples_operations.append(connect_example_operation(example, nkjp_example_obj)) 2249 reconnect_examples_operations.append(connect_example_operation(example, nkjp_example_obj))
2556 - reconnect_examples(reconnect_examples_operations) 2250 + reconnect_examples(new_lemma_ver, reconnect_examples_operations)
2557 2251
2558 # dodawanie przykladow nkjp do czasownika 2252 # dodawanie przykladow nkjp do czasownika
2559 for example in decoded_lemma_examples: 2253 for example in decoded_lemma_examples:
@@ -2605,10 +2299,12 @@ def validate_new_frames(request, data, id, examples, lemma_examples, @@ -2605,10 +2299,12 @@ def validate_new_frames(request, data, id, examples, lemma_examples,
2605 2299
2606 status_need_validation = False 2300 status_need_validation = False
2607 status_need_examples_check = False 2301 status_need_examples_check = False
  2302 + status_need_semantic_check = False
2608 try: 2303 try:
2609 status_obj = Lemma_Status.objects.get(id=status_id) 2304 status_obj = Lemma_Status.objects.get(id=status_id)
2610 status_need_validation = status_obj.validate 2305 status_need_validation = status_obj.validate
2611 status_need_examples_check = status_obj.check_examples 2306 status_need_examples_check = status_obj.check_examples
  2307 + status_need_semantic_check = status_obj.check_semantics
2612 except Lemma_Status.DoesNotExist: 2308 except Lemma_Status.DoesNotExist:
2613 status_obj = None 2309 status_obj = None
2614 2310
@@ -2620,6 +2316,8 @@ def validate_new_frames(request, data, id, examples, lemma_examples, @@ -2620,6 +2316,8 @@ def validate_new_frames(request, data, id, examples, lemma_examples,
2620 serialized_frames, error = validate_examples_and_mark_errors(old_object, status_obj, selected_frame_id) 2316 serialized_frames, error = validate_examples_and_mark_errors(old_object, status_obj, selected_frame_id)
2621 elif status_need_validation or not status_obj: 2317 elif status_need_validation or not status_obj:
2622 serialized_frames, error = validate_schemas_and_mark_errors(old_object, status_obj, selected_frame_id) 2318 serialized_frames, error = validate_schemas_and_mark_errors(old_object, status_obj, selected_frame_id)
  2319 + elif status_need_semantic_check:
  2320 + serialized_frames, error = validate_schemata_for_semantics_and_mark_errors(old_object, status_obj, selected_frame_id)
2623 2321
2624 if error: 2322 if error:
2625 return {'id' : '', 2323 return {'id' : '',
@@ -2820,63 +2518,6 @@ def prepare_sort_rules(sort_rules): @@ -2820,63 +2518,6 @@ def prepare_sort_rules(sort_rules):
2820 prepared_sort_rules.append(rule['name']) 2518 prepared_sort_rules.append(rule['name'])
2821 return prepared_sort_rules 2519 return prepared_sort_rules
2822 2520
2823 -def pos_regex_frames(frames, string):  
2824 - try:  
2825 - alternative_queries = []  
2826 - for alternative in string.split('|'):  
2827 - possible_frames = frames  
2828 - for conj in alternative.split('&'):  
2829 - model_results = []  
2830 - negation = False  
2831 - conj = conj.strip()  
2832 - if conj.startswith('!'):  
2833 - conj = conj.lstrip('!')  
2834 - negation = True  
2835 - regex = ur'^%s$' % escape_regex(conj)  
2836 - model_results = Position.objects.filter(frames__lemmas__old=False,  
2837 - text_rep__regex=regex).distinct()  
2838 - if model_results.exists():  
2839 - if negation:  
2840 - possible_frames = possible_frames.exclude(positions__in=model_results)  
2841 - else:  
2842 - possible_frames = possible_frames.filter(positions__in=model_results)  
2843 - elif not model_results.exists() and not negation:  
2844 - possible_frames = Frame.objects.none()  
2845 - alternative_queries.append(Q(id__in=possible_frames))  
2846 - frames = frames.filter(reduce(operator.or_, alternative_queries)).distinct()  
2847 - except:  
2848 - frames = Frame.objects.none()  
2849 - return frames  
2850 -  
2851 -def arg_regex_frames(frames, string):  
2852 - try:  
2853 - alternative_queries = []  
2854 - for alternative in string.split('|'):  
2855 - possible_frames = frames  
2856 - for conj in alternative.split('&'):  
2857 - model_results = []  
2858 - negation = False  
2859 - conj = conj.strip()  
2860 - if conj.startswith('!'):  
2861 - conj = conj.lstrip('!')  
2862 - negation = True  
2863 - regex = ur'^%s$' % escape_regex(conj)  
2864 - model_results = Argument.objects.filter(positions__frames__lemmas__old=False,  
2865 - text_rep__regex=regex).distinct()  
2866 - if model_results.exists():  
2867 - if negation:  
2868 - possible_frames = possible_frames.exclude(positions__arguments__in=model_results)  
2869 - else:  
2870 - possible_frames = possible_frames.filter(positions__arguments__in=model_results)  
2871 - elif not model_results.exists() and not negation:  
2872 - possible_frames = Frame.objects.none()  
2873 - alternative_queries.append(Q(id__in=possible_frames))  
2874 - frames = frames.filter(reduce(operator.or_, alternative_queries)).distinct()  
2875 - except:  
2876 - frames = Frame.objects.none()  
2877 - return frames  
2878 -  
2879 -# @TODO to co tutaj jest prezentowane jest bardzo glupie  
2880 def get_lemma_query(prepared_sort_rules, filter_rules, lemma_query, user): 2521 def get_lemma_query(prepared_sort_rules, filter_rules, lemma_query, user):
2881 lemmas = Lemma.objects.none() 2522 lemmas = Lemma.objects.none()
2882 if user.is_authenticated(): 2523 if user.is_authenticated():
@@ -2897,66 +2538,8 @@ def get_lemma_query(prepared_sort_rules, filter_rules, lemma_query, user): @@ -2897,66 +2538,8 @@ def get_lemma_query(prepared_sort_rules, filter_rules, lemma_query, user):
2897 lemmas = lemmas.filter(reduce(operator.or_, q_vocab_list)) 2538 lemmas = lemmas.filter(reduce(operator.or_, q_vocab_list))
2898 lemmas = lemmas.filter(reduce(operator.or_, q_status_list)) 2539 lemmas = lemmas.filter(reduce(operator.or_, q_status_list))
2899 2540
2900 - ## filtrowanie  
2901 - if filter_rules['owner']:  
2902 - lemmas = lemmas.filter(owner=filter_rules['owner'])  
2903 - if filter_rules['phraseologist']:  
2904 - lemmas = lemmas.filter(phraseologist=filter_rules['phraseologist'])  
2905 - if filter_rules['semanticist']:  
2906 - lemmas = lemmas.filter(semanticist=filter_rules['semanticist'])  
2907 - if filter_rules['vocabulary']:  
2908 - lemmas = lemmas.filter(vocabulary=filter_rules['vocabulary'])  
2909 - if filter_rules['status']:  
2910 - lemmas = lemmas.filter(status=filter_rules['status'])  
2911 - if filter_rules['frame_opinion']:  
2912 - lemmas = lemmas.filter(frame_opinions__value=filter_rules['frame_opinion'])  
2913 -  
2914 - frames = Frame.objects.all()  
2915 - if filter_rules['reflex']:  
2916 - frames = frames.filter(characteristics=filter_rules['reflex'])  
2917 - if filter_rules['negativity']:  
2918 - frames = frames.filter(characteristics=filter_rules['negativity'])  
2919 - if filter_rules['predicativity']:  
2920 - frames = frames.filter(characteristics=filter_rules['predicativity'])  
2921 - if filter_rules['aspect']:  
2922 - frames = frames.filter(characteristics=filter_rules['aspect'])  
2923 - if filter_rules['position'] and filter_rules['position'] != '.*':  
2924 - frames = pos_regex_frames(frames, filter_rules['position'])  
2925 - if filter_rules['argument'] and filter_rules['argument'] != '.*':  
2926 - frames = arg_regex_frames(frames, filter_rules['argument'])  
2927 - if filter_rules['frame_phraseologic']:  
2928 - frames = get_phraseologic_frames_only(frames)  
2929 -  
2930 - if (filter_rules['reflex'] or filter_rules['negativity'] or  
2931 - filter_rules['aspect'] or filter_rules['predicativity'] or  
2932 - filter_rules['frame_phraseologic'] or filter_rules['frame_opinion'] or  
2933 - (filter_rules['argument'] and filter_rules['argument'] != '.*') or  
2934 - (filter_rules['position'] and filter_rules['position'] != '.*')):  
2935 - if filter_rules['frame_opinion']:  
2936 - lemmas = lemmas.filter(frame_opinions__frame__in=frames,  
2937 - frame_opinions__value=filter_rules['frame_opinion']).distinct()  
2938 - else:  
2939 - lemmas = lemmas.filter(frames__in=frames).distinct()  
2940 -  
2941 - if filter_rules['sender']:  
2942 - lemmas = lemmas.filter(messages__sender=filter_rules['sender'])  
2943 -  
2944 - if filter_rules['pos']:  
2945 - lemmas = lemmas.filter(entry_obj__pos=filter_rules['pos'])  
2946 -  
2947 - if filter_rules['example_source']:  
2948 - lemmas = lemmas.filter(Q(nkjp_examples__source=filter_rules['example_source']) &  
2949 - Q(nkjp_examples__approved=False)).distinct()  
2950 - napproved_examples = NKJP_Example.objects.filter(Q(source=filter_rules['example_source']) &  
2951 - Q(approved=False) &  
2952 - Q(lemmas__old=False) &  
2953 - ~Q(approvers=user)).distinct()  
2954 -  
2955 - if filter_rules['approver']:  
2956 - napproved_examples = napproved_examples.filter(approvers=filter_rules['approver'])  
2957 - lemmas = lemmas.filter(nkjp_examples__in=napproved_examples)  
2958 -  
2959 - lemmas = lemmas.distinct() 2541 + ## filtrowanie
  2542 + lemmas = filter_lemmas(lemmas, filter_rules, user)
2960 2543
2961 ## sortowanie 2544 ## sortowanie
2962 entrySortDefined = False 2545 entrySortDefined = False
@@ -3211,3 +2794,28 @@ def get_examples(request, lemma_id): @@ -3211,3 +2794,28 @@ def get_examples(request, lemma_id):
3211 json_examples = json_encode(examples_js) 2794 json_examples = json_encode(examples_js)
3212 return {'examples': json_examples, 2795 return {'examples': json_examples,
3213 'can_modify': user_can_modify(lemma, request.user)} 2796 'can_modify': user_can_modify(lemma, request.user)}
  2797 +
  2798 +@ajax(method='get')
  2799 +def get_schemata_and_examples(request, lemma_id):
  2800 + lemma = Lemma.objects.get(id=lemma_id)
  2801 +
  2802 + examples = lemma.nkjp_examples.order_by('source__priority',
  2803 + 'opinion__priority',
  2804 + 'sentence')
  2805 + examples_js = nkjpExamplesObjToJs(examples, request.user, lemma)
  2806 + json_examples = json_encode(examples_js)
  2807 +
  2808 + lemma_examples = lemma.lemma_nkjp_examples.order_by('source__priority',
  2809 + 'opinion__priority',
  2810 + 'sentence')
  2811 + lemma_examples_js = nkjpLemmaExamplesObjToJs(lemma_examples, request.user, lemma)
  2812 + json_lemma_examples = json_encode(lemma_examples_js)
  2813 +
  2814 + schemata = lemma.frames.order_by('text_rep')
  2815 + serialized_schemata = [frameObjToSerializableDict(lemma, schema, True) for schema in schemata]
  2816 + json_schemata = json_encode(serialized_schemata)
  2817 +
  2818 + return {'examples': json_examples,
  2819 + 'lemma_examples': json_lemma_examples,
  2820 + 'schemata': json_schemata,
  2821 + 'can_modify': user_can_modify(lemma, request.user)}
dictionary/ajax_user_stats.py
@@ -22,8 +22,10 @@ @@ -22,8 +22,10 @@
22 22
23 """Module covering functions used in user statistics views""" 23 """Module covering functions used in user statistics views"""
24 24
  25 +import operator
  26 +
25 from django.contrib.auth.models import User 27 from django.contrib.auth.models import User
26 -from django.db.models import Count, Sum 28 +from django.db.models import Count, Sum, Q
27 29
28 from common.decorators import render, ajax 30 from common.decorators import render, ajax
29 from dictionary.models import Lemma, Lemma_Status 31 from dictionary.models import Lemma, Lemma_Status
@@ -86,8 +88,8 @@ def get_user_stats(request, user_name): @@ -86,8 +88,8 @@ def get_user_stats(request, user_name):
86 'all_semantic_owned_lemmas_count': all_semantic_owned_lemmas.count(), 88 'all_semantic_owned_lemmas_count': all_semantic_owned_lemmas.count(),
87 'all_semantic_owned_frames_count': all_semantic_owned_frames_count, 89 'all_semantic_owned_frames_count': all_semantic_owned_frames_count,
88 'earned_cash': total_earned_cash, 90 'earned_cash': total_earned_cash,
89 - 'paid_cash' : round(user.user_stats.paid_cash, 2),  
90 - 'surcharge' : round(user.user_stats.paid_cash-total_earned_cash, 2), 91 + 'paid_cash': round(user.user_stats.paid_cash, 2),
  92 + 'surcharge': round(user.user_stats.paid_cash-total_earned_cash, 2),
91 'lex_work_stats': lex_work_stats, 93 'lex_work_stats': lex_work_stats,
92 'phraseology_work_stats': phraseology_work_stats, 94 'phraseology_work_stats': phraseology_work_stats,
93 'semantics_work_stats': semantics_work_stats} 95 'semantics_work_stats': semantics_work_stats}
@@ -212,22 +214,23 @@ def get_phraseology_stats(user): @@ -212,22 +214,23 @@ def get_phraseology_stats(user):
212 return phraseology_work_stats 214 return phraseology_work_stats
213 215
214 def get_used_bindings(added_bindings): 216 def get_used_bindings(added_bindings):
215 - used_bindings = added_bindings 217 + unused_bindings = []
216 for added_binding in added_bindings.all(): 218 for added_binding in added_bindings.all():
217 binded_entry = added_binding.binded_entry 219 binded_entry = added_binding.binded_entry
218 act_binded_lemma = binded_entry.lemmas.get(old=False) 220 act_binded_lemma = binded_entry.lemmas.get(old=False)
219 if act_binded_lemma.status.type.sym_name == 'erase': 221 if act_binded_lemma.status.type.sym_name == 'erase':
220 - used_bindings = used_bindings.exclude(pk=added_binding.pk) 222 + unused_bindings.append(added_binding.pk)
221 else: 223 else:
222 added_frame = added_binding.phraseologic_frame 224 added_frame = added_binding.phraseologic_frame
223 - act_lemma_phras_frames = act_binded_lemma.frames.annotate(positions_count=Count('positions'))\  
224 - .filter(phraseologic=True,  
225 - positions_count=added_frame.positions.count()) 225 + act_lemma_phras_frames = act_binded_lemma.frames.filter(phraseologic=True)
  226 + act_lemma_phras_frames = act_lemma_phras_frames.annotate(positions_count=Count('positions'))
  227 + act_lemma_phras_frames = act_lemma_phras_frames.filter(positions_count=added_frame.positions.count())
226 for pos in added_frame.positions.all(): 228 for pos in added_frame.positions.all():
227 act_lemma_phras_frames = act_lemma_phras_frames.filter(positions__text_rep=pos.text_rep) 229 act_lemma_phras_frames = act_lemma_phras_frames.filter(positions__text_rep=pos.text_rep)
228 - if not act_lemma_phras_frames.exists():  
229 - used_bindings = used_bindings.exclude(pk=added_binding.pk)  
230 - return used_bindings 230 + if not act_lemma_phras_frames.exists():
  231 + unused_bindings.append(added_binding.pk)
  232 + break
  233 + return added_bindings.exclude(pk__in=unused_bindings)
231 234
232 def get_semantics_stats(user): 235 def get_semantics_stats(user):
233 earned_cash = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('cash'))['cash__sum'] 236 earned_cash = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('cash'))['cash__sum']
@@ -241,28 +244,40 @@ def get_semantics_stats(user): @@ -241,28 +244,40 @@ def get_semantics_stats(user):
241 prop_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('prop_frames'))['prop_frames__sum'] 244 prop_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('prop_frames'))['prop_frames__sum']
242 if prop_frames == None: 245 if prop_frames == None:
243 prop_frames = 0 246 prop_frames = 0
  247 + part_prop_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('part_prop_frames'))['part_prop_frames__sum']
  248 + if part_prop_frames == None:
  249 + part_prop_frames = 0
244 wrong_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('wrong_frames'))['wrong_frames__sum'] 250 wrong_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('wrong_frames'))['wrong_frames__sum']
245 if wrong_frames == None: 251 if wrong_frames == None:
246 wrong_frames = 0 252 wrong_frames = 0
247 corr_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('corr_frames'))['corr_frames__sum'] 253 corr_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('corr_frames'))['corr_frames__sum']
248 if corr_frames == None: 254 if corr_frames == None:
249 - corr_frames = 0 255 + corr_frames = 0
  256 + part_corr_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('part_corr_frames'))['part_corr_frames__sum']
  257 + if part_corr_frames == None:
  258 + part_corr_frames = 0
250 ncorr_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('ncorr_frames'))['ncorr_frames__sum'] 259 ncorr_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('ncorr_frames'))['ncorr_frames__sum']
251 if ncorr_frames == None: 260 if ncorr_frames == None:
252 ncorr_frames = 0 261 ncorr_frames = 0
253 made_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('made_frames'))['made_frames__sum'] 262 made_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('made_frames'))['made_frames__sum']
254 if made_frames == None: 263 if made_frames == None:
255 - made_frames = 0 264 + made_frames = 0
  265 + added_connections = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('added_connections'))['added_connections__sum']
  266 + if added_connections == None:
  267 + added_connections = 0
256 efficacy = 0.0 268 efficacy = 0.0
257 if prop_frames+wrong_frames > 0: 269 if prop_frames+wrong_frames > 0:
258 efficacy = float(prop_frames)/float(prop_frames+wrong_frames)*100.0 270 efficacy = float(prop_frames)/float(prop_frames+wrong_frames)*100.0
259 271
260 - sem_work_stats = {'earned_cash' : round(earned_cash, 2),  
261 - 'bonus_cash' : round(bonus_cash, 2),  
262 - 'prop_frames' : prop_frames,  
263 - 'wrong_frames' : wrong_frames,  
264 - 'corr_frames' : corr_frames,  
265 - 'checked_frames': ncorr_frames+corr_frames,  
266 - 'made_frames' : made_frames,  
267 - 'efficacy' : round(efficacy, 2)} 272 + sem_work_stats = {'earned_cash': round(earned_cash, 2),
  273 + 'bonus_cash': round(bonus_cash, 2),
  274 + 'prop_frames': prop_frames,
  275 + 'part_prop_frames': part_prop_frames,
  276 + 'wrong_frames': wrong_frames,
  277 + 'corr_frames': corr_frames,
  278 + 'part_corr_frames': part_corr_frames,
  279 + 'checked_frames': ncorr_frames+corr_frames+part_corr_frames,
  280 + 'made_frames': made_frames,
  281 + 'efficacy': round(efficacy, 2),
  282 + 'added_connections' : added_connections}
268 return sem_work_stats 283 return sem_work_stats
dictionary/ajax_vocabulary_management.py
@@ -155,6 +155,8 @@ def create_text_walenty(file_name, lemmas, vocabularies, frame_opinions, @@ -155,6 +155,8 @@ def create_text_walenty(file_name, lemmas, vocabularies, frame_opinions,
155 pred_val=pred_val, 155 pred_val=pred_val,
156 aspect_val=aspect_val).order_by('text_rep') 156 aspect_val=aspect_val).order_by('text_rep')
157 for frame in matching_frames: 157 for frame in matching_frames:
  158 + if not lemma.phraseology_ready() and frame.phraseologic:
  159 + continue
158 if not frame_opinions or founded_frame_opinions.filter(frame=frame).exists(): 160 if not frame_opinions or founded_frame_opinions.filter(frame=frame).exists():
159 text_rep = frame.get_position_spaced_text_rep() 161 text_rep = frame.get_position_spaced_text_rep()
160 if add_frame_opinions: 162 if add_frame_opinions:
dictionary/common_func.py
1 #-*- coding:utf-8 -*- 1 #-*- coding:utf-8 -*-
2 2
3 -#Copyright (c) 2012, Bartłomiej Nitoń  
4 -#All rights reserved.  
5 -  
6 -#Redistribution and use in source and binary forms, with or without modification, are permitted provided  
7 -#that the following conditions are met:  
8 -  
9 -# Redistributions of source code must retain the above copyright notice, this list of conditions and  
10 -# the following disclaimer.  
11 -# Redistributions in binary form must reproduce the above copyright notice, this list of conditions  
12 -# and the following disclaimer in the documentation and/or other materials provided with the distribution.  
13 -  
14 -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED  
15 -# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A  
16 -# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR  
17 -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED  
18 -# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)  
19 -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING  
20 -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE  
21 -# POSSIBILITY OF SUCH DAMAGE.  
22 -  
23 ''' 3 '''
24 Common functions used in Slowal application. 4 Common functions used in Slowal application.
25 ''' 5 '''
@@ -131,4 +111,6 @@ def have_same_positions_structure(pos1, pos2): @@ -131,4 +111,6 @@ def have_same_positions_structure(pos1, pos2):
131 if(pos1_args == pos2_args and pos1_cats==pos2_cats): 111 if(pos1_args == pos2_args and pos1_cats==pos2_cats):
132 same_structure = True 112 same_structure = True
133 return same_structure 113 return same_structure
134 - 114 +
  115 +def escape_regex(string):
  116 + return string.replace('(', '\(').replace(')', '\)').replace('{', '\{').replace('}', '\}').replace('[', '\[').replace(']', '\]')
dictionary/filtering.py 0 → 100644
  1 +# -*- coding: utf-8 -*-
  2 +
  3 +import operator
  4 +
  5 +from django.contrib.auth.models import User
  6 +from django.db.models import Q
  7 +
  8 +from accounts.models import get_anon_profile
  9 +from dictionary.common_func import escape_regex
  10 +from dictionary.forms import FilterForm
  11 +from dictionary.models import Argument, Frame, Frame_Char_Model, Frame_Opinion_Value, \
  12 + Lemma, Lemma_Status, NKJP_Example, NKJP_Source, POS, \
  13 + Position, Vocabulary, \
  14 + get_frame_char_and_its_value, get_schemata_by_type
  15 +from semantics.forms import GeneralSelPrefForm, RelationalSelPrefForm, RoleForm, \
  16 + SynsetSelPrefForm
  17 +from semantics.models import Complement, FrameOpinion, RelationalSelectivePreference, \
  18 + SemanticFrame
  19 +from wordnet.models import LexicalUnit
  20 +
  21 +def schemata_filter_options():
  22 + # pobieranie wartosci aspektu
  23 + aspect_model = Frame_Char_Model.objects.get(model_name=u'ASPEKT')
  24 + aspect_vals_objs = aspect_model.frame_char_values.order_by('-priority')
  25 + aspect_options = [{'name': '*', 'value': '*'}]
  26 + aspect_options.extend([{'name': val.value, 'value': val.value} for val in aspect_vals_objs])
  27 +
  28 + # pobieranie wartosci zwrotnosci
  29 + reflex_model = Frame_Char_Model.objects.get(model_name=u'ZWROTNOŚĆ')
  30 + reflex_vals_objs = reflex_model.frame_char_values.order_by('-priority')
  31 + reflex_options = [{'name': '*', 'value': '*'}]
  32 + reflex_options.extend([{'name': val.value, 'value': val.value} for val in reflex_vals_objs])
  33 +
  34 + # pobieranie wartosci negatywnosci
  35 + neg_model = Frame_Char_Model.objects.get(model_name=u'NEGATYWNOŚĆ')
  36 + neg_vals_objs = neg_model.frame_char_values.order_by('-priority')
  37 + neg_options = [{'name': '*', 'value': '*'}]
  38 + neg_options.extend([{'name': val.value, 'value': val.value} for val in neg_vals_objs])
  39 +
  40 + # pobieranie wartosci predykatywnosci
  41 + pred_model = Frame_Char_Model.objects.get(model_name=u'PREDYKATYWNOŚĆ')
  42 + pred_vals_objs = pred_model.frame_char_values.order_by('-priority')
  43 + pred_options = [{'name': '*', 'value': '*'}]
  44 + pred_options.extend([{'name': val.value, 'value': val.value} for val in pred_vals_objs])
  45 +
  46 + # pobieranie opinii o schemacie
  47 + opinion_options = [{'name': '*', 'value': '*'}]
  48 + opinion_options.extend([{'name': val.value, 'value': val.value} for val in Frame_Opinion_Value.objects.order_by('priority')])
  49 +
  50 + schema_type_options = [{'name': '*', 'value': '*'},
  51 + {'name': 'normalny', 'value': 'normal'},
  52 + {'name': 'frazeologiczny', 'value': 'phraseologic'}]
  53 +
  54 + return {'schema_type_options': schema_type_options,
  55 + 'reflex_options': reflex_options,
  56 + 'aspect_options': aspect_options,
  57 + 'neg_options': neg_options,
  58 + 'pred_options': pred_options,
  59 + 'opinion_options': opinion_options}
  60 +
  61 +def all_filter_rules_loaded(rules):
  62 + if set(default_filter_rules().keys()) != set(rules):
  63 + return False
  64 + return True
  65 +
  66 +def default_filter_rules():
  67 + return {'lemma': '.*',
  68 + 'pos': None,
  69 + 'contains_phraseology': None,
  70 + 'owner': None,
  71 + 'phraseologist': None,
  72 + 'semanticist': None,
  73 + 'vocabulary': None,
  74 + 'status': None,
  75 + 'example_source': None,
  76 + 'approver': None,
  77 + 'reflex': None,
  78 + 'negativity': None,
  79 + 'predicativity': None,
  80 + 'aspect': None,
  81 + 'argument': '.*',
  82 + 'position': '.*',
  83 + 'schema_opinion' : None,
  84 + 'sender': None,
  85 + 'schema_type': None,
  86 + 'frame_opinion': None,
  87 + 'sem_arguments': []}
  88 +
  89 +def prepare_filter_form(request):
  90 + if request.session.has_key('lemma_preview') and request.session['lemma_preview']:
  91 + if not request.session.has_key('filter_rules_lemma_preview'):
  92 + request.session['filter_rules_lemma_preview'] = default_filter_rules()
  93 + filter_rules = request.session['filter_rules_lemma_preview']
  94 + else:
  95 + if not request.session.has_key('filter_rules'):
  96 + request.session['filter_rules'] = default_filter_rules()
  97 + filter_rules = request.session['filter_rules']
  98 +
  99 + users = User.objects.none()
  100 + phraseologists = User.objects.none()
  101 + semanticists = User.objects.none()
  102 + vocabularies = Vocabulary.objects.none()
  103 + senders = User.objects.none()
  104 + statuses = get_anon_profile().visible_statuses.all()
  105 + can_confirm_example = False
  106 + if request.user.is_authenticated():
  107 + users = User.objects.filter(lemmas__old=False).distinct().order_by('username')
  108 + phraseologists = User.objects.filter(phraseologist_lemmas__old=False).distinct().order_by('username')
  109 + semanticists = User.objects.filter(semanticist_lemmas__old=False).distinct().order_by('username')
  110 + vocabularies = request.user.visible_vocabularies.all()
  111 + senders = User.objects.order_by('groups__group_settings__priority')
  112 + statuses = Lemma_Status.objects.all()
  113 + if request.user.has_perm('dictionary.confirm_example') or request.user.is_superuser:
  114 + can_confirm_example = True
  115 +
  116 + form = FilterForm(users=users,
  117 + phraseologists=phraseologists,
  118 + semanticists=semanticists,
  119 + vocabularies=vocabularies,
  120 + senders=senders,
  121 + statuses=statuses,
  122 + lemma=filter_rules['lemma'],
  123 + sel_pos=filter_rules['pos'],
  124 + contains_phraseology=filter_rules['contains_phraseology'],
  125 + sel_user=filter_rules['owner'],
  126 + sel_phraseologist=filter_rules['phraseologist'],
  127 + sel_semanticist=filter_rules['semanticist'],
  128 + sel_vocabulary=filter_rules['vocabulary'],
  129 + sel_status=filter_rules['status'],
  130 + sel_reflex=filter_rules['reflex'],
  131 + sel_negativity=filter_rules['negativity'],
  132 + sel_predicativity=filter_rules['predicativity'],
  133 + sel_aspect=filter_rules['aspect'],
  134 + sel_has_argument=filter_rules['argument'],
  135 + sel_has_position=filter_rules['position'],
  136 + sel_schema_opinion=filter_rules['schema_opinion'],
  137 + can_confirm_example = can_confirm_example,
  138 + sel_example_source=filter_rules['example_source'],
  139 + sel_approver=filter_rules['approver'],
  140 + sel_sender=filter_rules['sender'],
  141 + sel_schema_type=filter_rules['schema_type'],
  142 + sel_frame_opinion=filter_rules['frame_opinion'])
  143 + return {'form': form,
  144 + 'sem_args_forms': sem_args_to_forms(filter_rules['sem_arguments'])}
  145 +
  146 +def sem_args_to_forms(sem_arguments):
  147 + args_forms = []
  148 + first_alternative = True
  149 + for alternative in sem_arguments:
  150 + if first_alternative:
  151 + first_alternative = False
  152 + else:
  153 + args_forms.append('or')
  154 + for arg in alternative:
  155 + args_forms.append(RoleForm(negation=arg['negation'],
  156 + sel_role=arg['role'],
  157 + sel_attribute=arg['attribute'],
  158 + sel_preferences=get_sel_prefs_as_forms(arg)))
  159 + return args_forms
  160 +
  161 +def get_sel_prefs_as_forms(arg):
  162 + forms = []
  163 + if arg['general_prefs']:
  164 + forms.extend(general_prefs_to_forms(arg['general_prefs']))
  165 + if arg['synset_prefs']:
  166 + forms.extend(synset_prefs_to_forms(arg['synset_prefs']))
  167 + if arg['relational_prefs']:
  168 + forms.extend(relational_prefs_to_forms(arg['relational_prefs']))
  169 + return forms
  170 +
  171 +def general_prefs_to_forms(prefs):
  172 + forms = []
  173 + for pref in prefs:
  174 + forms.append(GeneralSelPrefForm(sel_preference=pref))
  175 + return forms
  176 +
  177 +def synset_prefs_to_forms(prefs):
  178 + forms = []
  179 + for pref in prefs:
  180 + forms.append(SynsetSelPrefForm(sel_preference=pref))
  181 + return forms
  182 +
  183 +def relational_prefs_to_forms(prefs):
  184 + forms = []
  185 + for pref in prefs:
  186 + forms.append(RelationalSelPrefForm(sel_relation=pref['relation'],
  187 + sel_role=pref['role'],
  188 + sel_attribute=pref['attribute']))
  189 + return forms
  190 +
  191 +def save_lemma_filters_and_get_schemata_filter_setup(request, filter_dict):
  192 + if filter_dict['pos']:
  193 + pos_obj = POS.objects.get(id=filter_dict['pos'])
  194 + else:
  195 + pos_obj = None
  196 +
  197 + if filter_dict['owner']:
  198 + owner_obj = User.objects.get(id=filter_dict['owner'])
  199 + else:
  200 + owner_obj = None
  201 +
  202 + if filter_dict['phraseologist']:
  203 + phraseologist_obj = User.objects.get(id=filter_dict['phraseologist'])
  204 + else:
  205 + phraseologist_obj = None
  206 +
  207 + if filter_dict['semanticist']:
  208 + semanticist_obj = User.objects.get(id=filter_dict['semanticist'])
  209 + else:
  210 + semanticist_obj = None
  211 +
  212 + if filter_dict['vocabulary']:
  213 + vocabulary_obj = Vocabulary.objects.get(name=filter_dict['vocabulary'])
  214 + else:
  215 + vocabulary_obj = None
  216 +
  217 + if filter_dict['status']:
  218 + status_obj = Lemma_Status.objects.get(id=filter_dict['status'])
  219 + else:
  220 + status_obj = None
  221 +
  222 + if filter_dict['example_source']:
  223 + nkjp_source_obj = NKJP_Source.objects.get(id=filter_dict['example_source'])
  224 + else:
  225 + nkjp_source_obj = None
  226 +
  227 + if filter_dict['approver']:
  228 + approver_obj = User.objects.get(id=filter_dict['approver'])
  229 + else:
  230 + approver_obj = None
  231 +
  232 + if filter_dict['has_message_from']:
  233 + try:
  234 + sender_obj = User.objects.get(pk=filter_dict['has_message_from'])
  235 + except User.DoesNotExist:
  236 + sender_obj = None
  237 + else:
  238 + sender_obj = None
  239 +
  240 + reflex_obj, reflex_val = get_frame_char_and_its_value(filter_dict['reflex'], '*')
  241 + negativity_obj, negativity_val = get_frame_char_and_its_value(filter_dict['negativity'], '*')
  242 + aspect_obj, aspect_val = get_frame_char_and_its_value(filter_dict['aspect'], '*')
  243 + pred_obj, pred_val = get_frame_char_and_its_value(filter_dict['predicativity'], '*')
  244 +
  245 + if filter_dict['schema_opinion']:
  246 + schema_opinion_obj = Frame_Opinion_Value.objects.get(id=filter_dict['schema_opinion'])
  247 + opinion_val = schema_opinion_obj.value
  248 + else:
  249 + schema_opinion_obj = None
  250 + opinion_val = '*'
  251 +
  252 + if 'schema_type' in filter_dict:
  253 + schema_type = filter_dict['schema_type']
  254 + else:
  255 + schema_type = None
  256 +
  257 + if 'frame_opinion' in filter_dict and filter_dict['frame_opinion']:
  258 + frame_opinion = FrameOpinion.objects.get(id=filter_dict['frame_opinion'])
  259 + else:
  260 + frame_opinion = None
  261 +
  262 + sem_arguments = [constraints for constraints in filter_dict['sem_arguments'] if constraints != []]
  263 +
  264 + if request.session.has_key('lemma_preview') and request.session['lemma_preview']:
  265 + request.session['filter_rules_lemma_preview'] = {'pos' : pos_obj,
  266 + 'contains_phraseology': filter_dict['contains_phraseology'],
  267 + 'owner' : owner_obj,
  268 + 'phraseologist' : phraseologist_obj,
  269 + 'semanticist' : semanticist_obj,
  270 + 'vocabulary' : vocabulary_obj,
  271 + 'status' : status_obj,
  272 + 'example_source' : nkjp_source_obj,
  273 + 'approver' : approver_obj,
  274 + 'reflex' : reflex_obj,
  275 + 'negativity' : negativity_obj,
  276 + 'predicativity' : pred_obj,
  277 + 'aspect' : aspect_obj,
  278 + 'argument' : filter_dict['has_argument'],
  279 + 'position' : filter_dict['has_position'],
  280 + 'lemma' : filter_dict['lemma'],
  281 + 'schema_opinion' : schema_opinion_obj,
  282 + 'sender' : sender_obj,
  283 + 'schema_type' : schema_type,
  284 + 'frame_opinion' : frame_opinion,
  285 + 'sem_arguments' : sem_arguments}
  286 + else:
  287 + request.session['filter_rules'] = {'pos' : pos_obj,
  288 + 'contains_phraseology': filter_dict['contains_phraseology'],
  289 + 'owner' : owner_obj,
  290 + 'phraseologist' : phraseologist_obj,
  291 + 'semanticist' : semanticist_obj,
  292 + 'vocabulary' : vocabulary_obj,
  293 + 'status' : status_obj,
  294 + 'example_source' : nkjp_source_obj,
  295 + 'approver' : approver_obj,
  296 + 'reflex' : reflex_obj,
  297 + 'negativity' : negativity_obj,
  298 + 'predicativity' : pred_obj,
  299 + 'aspect' : aspect_obj,
  300 + 'argument' : filter_dict['has_argument'],
  301 + 'position' : filter_dict['has_position'],
  302 + 'lemma' : filter_dict['lemma'],
  303 + 'schema_opinion' : schema_opinion_obj,
  304 + 'sender' : sender_obj,
  305 + 'schema_type' : schema_type,
  306 + 'frame_opinion' : frame_opinion,
  307 + 'sem_arguments' : sem_arguments}
  308 +
  309 + return {'filter_frames': filter_dict['filter_frames'],
  310 + 'schema_type' : schema_type,
  311 + 'reflex' : reflex_val,
  312 + 'negativity' : negativity_val,
  313 + 'predicativity': pred_val,
  314 + 'opinion' : opinion_val,
  315 + 'aspect' : aspect_val,
  316 + 'position' : filter_dict['has_position'],
  317 + 'argument' : filter_dict['has_argument']}
  318 +
  319 +def filter_lemmas(lemmas, filter_rules, user):
  320 + lemmas = filter_by_lemma_properties(lemmas, filter_rules, user)
  321 + lemmas = filter_by_schemata(lemmas, filter_rules)
  322 + lemmas = filter_by_frames(lemmas, filter_rules)
  323 + return lemmas
  324 +
  325 +def filter_by_lemma_properties(lemmas, filter_rules, user):
  326 + if filter_rules['owner']:
  327 + lemmas = lemmas.filter(owner=filter_rules['owner'])
  328 + if filter_rules['phraseologist']:
  329 + lemmas = lemmas.filter(phraseologist=filter_rules['phraseologist'])
  330 + if filter_rules['semanticist']:
  331 + lemmas = lemmas.filter(semanticist=filter_rules['semanticist'])
  332 + if filter_rules['vocabulary']:
  333 + lemmas = lemmas.filter(vocabulary=filter_rules['vocabulary'])
  334 + if filter_rules['status']:
  335 + lemmas = lemmas.filter(status=filter_rules['status'])
  336 + if filter_rules['schema_opinion']:
  337 + lemmas = lemmas.filter(frame_opinions__value=filter_rules['schema_opinion'])
  338 + if filter_rules['lemma'] and filter_rules['lemma'] != '.*':
  339 + lemmas = lemma_regex_filter(lemmas, filter_rules['lemma'])
  340 + if filter_rules['sender']:
  341 + lemmas = lemmas.filter(messages__sender=filter_rules['sender'])
  342 + if filter_rules['pos']:
  343 + lemmas = lemmas.filter(entry_obj__pos=filter_rules['pos'])
  344 + if filter_rules['contains_phraseology']:
  345 + phraseologic_lemmas = lemmas.filter(frames__phraseologic=True)
  346 + if filter_rules['contains_phraseology'] == 'yes':
  347 + lemmas = phraseologic_lemmas
  348 + else:
  349 + lemmas = lemmas.exclude(pk__in=phraseologic_lemmas)
  350 + if filter_rules['example_source']:
  351 + lemmas = lemmas.filter(Q(nkjp_examples__source=filter_rules['example_source']) &
  352 + Q(nkjp_examples__approved=False)).distinct()
  353 + napproved_examples = NKJP_Example.objects.filter(Q(source=filter_rules['example_source']) &
  354 + Q(approved=False) &
  355 + Q(lemmas__old=False) &
  356 + ~Q(approvers=user)).distinct()
  357 +
  358 + if filter_rules['approver']:
  359 + napproved_examples = napproved_examples.filter(approvers=filter_rules['approver'])
  360 + lemmas = lemmas.filter(nkjp_examples__in=napproved_examples)
  361 + lemmas = lemmas.distinct()
  362 + return lemmas
  363 +
  364 +def lemma_regex_filter(lemmas, string):
  365 + try:
  366 + alternative_queries = []
  367 + for alternative in string.split('|'):
  368 + possible_lemmas = lemmas
  369 + for conj in alternative.split('&'):
  370 + model_results = []
  371 + negation = False
  372 + conj = conj.strip()
  373 + if conj.startswith('!'):
  374 + conj = conj.lstrip('!')
  375 + negation = True
  376 + regex = ur'^%s$' % escape_regex(conj)
  377 + model_results = Lemma.objects.filter(old=False,
  378 + entry_obj__name__regex=regex).distinct()
  379 + if model_results.exists():
  380 + if negation:
  381 + possible_lemmas = possible_lemmas.exclude(pk__in=model_results)
  382 + else:
  383 + possible_lemmas = possible_lemmas.filter(pk__in=model_results)
  384 + elif not model_results.exists() and not negation:
  385 + possible_lemmas = Lemma.objects.none()
  386 + alternative_queries.append(Q(id__in=possible_lemmas))
  387 + lemmas = lemmas.filter(reduce(operator.or_, alternative_queries)).distinct()
  388 + except:
  389 + lemmas = Lemma.objects.none()
  390 + return lemmas
  391 +
  392 +def filter_by_schemata(lemmas, filter_rules):
  393 + schemata = Frame.objects
  394 + if filter_rules['reflex']:
  395 + schemata = schemata.filter(characteristics=filter_rules['reflex'])
  396 + if filter_rules['negativity']:
  397 + schemata = schemata.filter(characteristics=filter_rules['negativity'])
  398 + if filter_rules['predicativity']:
  399 + schemata = schemata.filter(characteristics=filter_rules['predicativity'])
  400 + if filter_rules['aspect']:
  401 + schemata = schemata.filter(characteristics=filter_rules['aspect'])
  402 + if filter_rules['position'] and filter_rules['position'] != '.*':
  403 + schemata = pos_regex_frames(schemata, filter_rules['position'])
  404 + if filter_rules['argument'] and filter_rules['argument'] != '.*':
  405 + schemata = arg_regex_frames(schemata, filter_rules['argument'])
  406 + if filter_rules['schema_type']:
  407 + schemata = get_schemata_by_type(filter_rules['schema_type'], schemata)
  408 +
  409 + if (filter_rules['reflex'] or filter_rules['negativity'] or
  410 + filter_rules['aspect'] or filter_rules['predicativity'] or
  411 + filter_rules['schema_type'] or filter_rules['schema_opinion'] or
  412 + (filter_rules['argument'] and filter_rules['argument'] != '.*') or
  413 + (filter_rules['position'] and filter_rules['position'] != '.*')):
  414 + if filter_rules['schema_opinion']:
  415 + lemmas = lemmas.filter(frame_opinions__frame__in=schemata.all(),
  416 + frame_opinions__value=filter_rules['schema_opinion'])
  417 + else:
  418 + lemmas = lemmas.filter(frames__in=schemata.all())
  419 + lemmas = lemmas.distinct()
  420 + return lemmas
  421 +
  422 +def pos_regex_frames(frames, string):
  423 + try:
  424 + alternative_queries = []
  425 + for alternative in string.split('|'):
  426 + possible_frames = frames
  427 + for conj in alternative.split('&'):
  428 + model_results = []
  429 + negation = False
  430 + conj = conj.strip()
  431 + if conj.startswith('!'):
  432 + conj = conj.lstrip('!')
  433 + negation = True
  434 + regex = ur'^%s$' % escape_regex(conj)
  435 + model_results = Position.objects.filter(frames__lemmas__old=False,
  436 + text_rep__regex=regex).distinct()
  437 + if model_results.exists():
  438 + if negation:
  439 + possible_frames = possible_frames.exclude(positions__in=model_results)
  440 + else:
  441 + possible_frames = possible_frames.filter(positions__in=model_results)
  442 + elif not model_results.exists() and not negation:
  443 + possible_frames = Frame.objects.none()
  444 + alternative_queries.append(Q(id__in=possible_frames))
  445 + frames = frames.filter(reduce(operator.or_, alternative_queries)).distinct()
  446 + except:
  447 + frames = Frame.objects.none()
  448 + return frames
  449 +
  450 +def arg_regex_frames(frames, string):
  451 + try:
  452 + alternative_queries = []
  453 + for alternative in string.split('|'):
  454 + possible_frames = frames
  455 + for conj in alternative.split('&'):
  456 + model_results = []
  457 + negation = False
  458 + conj = conj.strip()
  459 + if conj.startswith('!'):
  460 + conj = conj.lstrip('!')
  461 + negation = True
  462 + regex = ur'^%s$' % escape_regex(conj)
  463 + model_results = Argument.objects.filter(positions__frames__lemmas__old=False,
  464 + text_rep__regex=regex).distinct()
  465 + if model_results.exists():
  466 + if negation:
  467 + possible_frames = possible_frames.exclude(positions__arguments__in=model_results)
  468 + else:
  469 + possible_frames = possible_frames.filter(positions__arguments__in=model_results)
  470 + elif not model_results.exists() and not negation:
  471 + possible_frames = Frame.objects.none()
  472 + alternative_queries.append(Q(id__in=possible_frames))
  473 + frames = frames.filter(reduce(operator.or_, alternative_queries)).distinct()
  474 + except:
  475 + frames = Frame.objects.none()
  476 + return frames
  477 +
  478 +def filter_by_frames(lemmas, filter_rules):
  479 + frames = SemanticFrame.objects.filter(next__isnull=True, removed=False)
  480 + if filter_rules['frame_opinion']:
  481 + frames = frames.filter(opinion=filter_rules['frame_opinion'])
  482 + if filter_rules['sem_arguments']:
  483 + frames = get_frames_by_args_rule(frames, filter_rules['sem_arguments'])
  484 + if filter_rules['frame_opinion'] or filter_rules['sem_arguments']:
  485 + lemmas = lemmas.filter(entry_obj__meanings__frames__in=frames).distinct()
  486 + return lemmas
  487 +
  488 +def get_frames_by_args_rule(frames, args_filter_rule):
  489 + matching_frames = []
  490 + for alternative in args_filter_rule:
  491 + alt_matching_frames = get_matching_frames(frames, alternative)
  492 + matching_frames.extend(alt_matching_frames.values_list('id', flat=True))
  493 + return frames.filter(id__in=list(set(matching_frames)))
  494 +
  495 +def get_matching_frames(frames, arguments_rules):
  496 + for rules in arguments_rules:
  497 + if not rules['negation']:
  498 + frames = frames.filter(complements__in=matching_complements(rules))
  499 + else:
  500 + frames = frames.exclude(complements__in=matching_complements(rules))
  501 + return frames
  502 +
  503 +def matching_complements(filter_rules):
  504 + complements = Complement.objects
  505 + if filter_rules['role']:
  506 + complements = complements.filter(roles=filter_rules['role'])
  507 + if filter_rules['attribute']:
  508 + complements = complements.filter(roles=filter_rules['attribute'])
  509 + if filter_rules['general_prefs'] or filter_rules['synset_prefs'] or filter_rules['relational_prefs']:
  510 + complements = complements.filter(selective_preference__isnull=False)
  511 + if filter_rules['general_prefs']:
  512 + complements = filter_by_general_prefs(complements, filter_rules['general_prefs'])
  513 + if filter_rules['synset_prefs']:
  514 + complements = filter_by_synset_prefs(complements, filter_rules['synset_prefs'])
  515 + if filter_rules['relational_prefs']:
  516 + complements = filter_by_relational_prefs(complements, filter_rules['relational_prefs'])
  517 + return complements.all()
  518 +
  519 +def filter_by_general_prefs(complements, prefs):
  520 + complements = complements.exclude(selective_preference__generals=None)
  521 + for pref in list(set(prefs)):
  522 + if pref:
  523 + complements = complements.filter(selective_preference__generals=pref)
  524 + return complements
  525 +
  526 +def filter_by_synset_prefs(complements, prefs):
  527 + complements = complements.exclude(selective_preference__synsets=None)
  528 + for pref in list(set(prefs)):
  529 + if pref:
  530 + try:
  531 + pref_parts = pref.split('-')
  532 + base = pref_parts[0]
  533 + sense = pref_parts[1]
  534 + synset = LexicalUnit.objects.get(base=base, sense=sense).synset
  535 + complements = complements.filter(selective_preference__synsets=synset)
  536 + except:
  537 + complements = Complement.objects.none()
  538 + return complements
  539 +
  540 +def filter_by_relational_prefs(complements, prefs):
  541 + complements = complements.exclude(selective_preference__relations=None)
  542 + for pref in prefs:
  543 + if pref['relation'] or pref['role'] or pref['attribute']:
  544 + relational_prefs = RelationalSelectivePreference.objects
  545 + if pref['relation']:
  546 + relational_prefs = relational_prefs.filter(relation=pref['relation'])
  547 + if pref['role'] or pref['attribute']:
  548 + to_complements = Complement.objects
  549 + if pref['role']:
  550 + to_complements = to_complements.filter(roles=pref['role'])
  551 + if pref['attribute']:
  552 + to_complements = to_complements.filter(roles=pref['attribute'])
  553 + relational_prefs = relational_prefs.filter(to__in=to_complements.all()).distinct()
  554 + complements = complements.filter(selective_preference__relations__in=relational_prefs).distinct()
  555 + return complements
  556 +
0 \ No newline at end of file 557 \ No newline at end of file
dictionary/forms.py
1 #-*- coding:utf-8 -*- 1 #-*- coding:utf-8 -*-
2 2
3 -#Copyright (c) 2012, Bartłomiej Nitoń  
4 -#All rights reserved.  
5 -  
6 -#Redistribution and use in source and binary forms, with or without modification, are permitted provided  
7 -#that the following conditions are met:  
8 -  
9 -# Redistributions of source code must retain the above copyright notice, this list of conditions and  
10 -# the following disclaimer.  
11 -# Redistributions in binary form must reproduce the above copyright notice, this list of conditions  
12 -# and the following disclaimer in the documentation and/or other materials provided with the distribution.  
13 -  
14 -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED  
15 -# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A  
16 -# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR  
17 -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED  
18 -# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)  
19 -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING  
20 -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE  
21 -# POSSIBILITY OF SUCH DAMAGE.  
22 -  
23 import datetime 3 import datetime
24 4
25 from django.contrib.auth.forms import UserCreationForm 5 from django.contrib.auth.forms import UserCreationForm
@@ -28,6 +8,7 @@ from django.db.models import Q @@ -28,6 +8,7 @@ from django.db.models import Q
28 from django.forms import * 8 from django.forms import *
29 9
30 from dictionary.models import * 10 from dictionary.models import *
  11 +from semantics.models import FrameOpinion
31 #Frame, Argument_Model, AttrValueSelectionMode, \ 12 #Frame, Argument_Model, AttrValueSelectionMode, \
32 # AttrValuesSeparator, Atribute_Value, PositionCategory, \ 13 # AttrValuesSeparator, Atribute_Value, PositionCategory, \
33 # Argument, Atribute_Model, ArgRealOpinion, Frame_Characteristic, \ 14 # Argument, Atribute_Model, ArgRealOpinion, Frame_Characteristic, \
@@ -529,9 +510,14 @@ class MessageForm(ModelForm): @@ -529,9 +510,14 @@ class MessageForm(ModelForm):
529 exclude = ('sender', 'lemma', 'new', 'recipient') 510 exclude = ('sender', 'lemma', 'new', 'recipient')
530 511
531 512
532 -############################ sorting, filtering 513 +############################ sorting, filtering
  514 +
533 class FilterForm(Form): 515 class FilterForm(Form):
  516 + # Lemma filters
  517 + lemma = forms.CharField(label=u'Lemat', required=False)
534 pos = ModelChoiceField(label=u'Część mowy', queryset=POS.objects.none(), required=False) 518 pos = ModelChoiceField(label=u'Część mowy', queryset=POS.objects.none(), required=False)
  519 + contains_phraseology = ChoiceField(choices=[('', '---------'), ('yes', 'zawiera'), ('no', 'nie zawiera')],
  520 + label=u'Frazeologia', required=False)
535 owner = ModelChoiceField(label=u'Właściciel', queryset=User.objects.none(), required=False) 521 owner = ModelChoiceField(label=u'Właściciel', queryset=User.objects.none(), required=False)
536 phraseologist = ModelChoiceField(label=u'Frazeolog', queryset=User.objects.none(), required=False) 522 phraseologist = ModelChoiceField(label=u'Frazeolog', queryset=User.objects.none(), required=False)
537 semanticist = ModelChoiceField(label=u'Semantyk', queryset=User.objects.none(), required=False) 523 semanticist = ModelChoiceField(label=u'Semantyk', queryset=User.objects.none(), required=False)
@@ -544,7 +530,10 @@ class FilterForm(Form): @@ -544,7 +530,10 @@ class FilterForm(Form):
544 approver = ModelChoiceField(label=u'Zatwierdzający przykład', queryset=User.objects.filter(Q(groups__permissions__codename='confirm_example') | 530 approver = ModelChoiceField(label=u'Zatwierdzający przykład', queryset=User.objects.filter(Q(groups__permissions__codename='confirm_example') |
545 Q(is_superuser=True)).distinct().order_by('username'), 531 Q(is_superuser=True)).distinct().order_by('username'),
546 required=False) 532 required=False)
547 - reflex = ModelChoiceField(label=u'Zwrotność', 533 + # Schema filters
  534 + schema_type = ChoiceField(choices=[('', '---------'), ('normal', 'normalny'), ('phraseologic', 'frazeologiczny')],
  535 + label=u'Typ schematu', required=False)
  536 + reflex = ModelChoiceField(label=u'Zwrotność',
548 queryset=Frame_Characteristic.objects.filter(type=u'ZWROTNOŚĆ').order_by('value__priority'), 537 queryset=Frame_Characteristic.objects.filter(type=u'ZWROTNOŚĆ').order_by('value__priority'),
549 required=False) 538 required=False)
550 negativity = ModelChoiceField(label=u'Negatywność', 539 negativity = ModelChoiceField(label=u'Negatywność',
@@ -558,28 +547,34 @@ class FilterForm(Form): @@ -558,28 +547,34 @@ class FilterForm(Form):
558 required=False) 547 required=False)
559 has_argument = forms.CharField(label=u'Zawiera typ frazy', required=False) 548 has_argument = forms.CharField(label=u'Zawiera typ frazy', required=False)
560 has_position = forms.CharField(label=u'Zawiera pozycję', required=False) 549 has_position = forms.CharField(label=u'Zawiera pozycję', required=False)
561 - frame_opinion = ModelChoiceField(label=u'Opinia o schemacie', queryset=Frame_Opinion_Value.objects.all(),  
562 - required=False)  
563 - frame_phraseologic = forms.BooleanField(label=u'Schemat frazeologiczny', initial=False,  
564 - required=False) 550 + schema_opinion = ModelChoiceField(label=u'Opinia o schemacie', queryset=Frame_Opinion_Value.objects.all(),
  551 + required=False)
565 filter_frames = forms.BooleanField(label=u'Odfiltruj niepasujące schematy', initial=False, 552 filter_frames = forms.BooleanField(label=u'Odfiltruj niepasujące schematy', initial=False,
566 required=False) 553 required=False)
  554 + # Frame filters
  555 + frame_opinion = ModelChoiceField(label=u'Opinia o ramie', queryset=FrameOpinion.objects.order_by('priority'),
  556 + required=False)
567 557
568 def __init__(self, users, phraseologists, semanticists, vocabularies, statuses, senders, 558 def __init__(self, users, phraseologists, semanticists, vocabularies, statuses, senders,
569 - sel_pos=None, sel_user=None, sel_phraseologist=None, sel_semanticist=None,  
570 - sel_vocabulary=None, sel_status=None, frame_phraseologic=False,#sel_old_property=None, 559 + lemma='.*', sel_pos=None, contains_phraseology=None,
  560 + sel_user=None, sel_phraseologist=None, sel_semanticist=None,
  561 + sel_vocabulary=None, sel_status=None, sel_schema_type=None,
571 sel_reflex=None, sel_negativity=None, sel_predicativity=None, 562 sel_reflex=None, sel_negativity=None, sel_predicativity=None,
572 - sel_aspect=None, sel_has_argument='.*', sel_has_position='.*', #sel_has_frame='.*',  
573 - sel_frame_opinion=None, can_confirm_example=False, sel_example_source=None,  
574 - sel_approver=None, sel_sender=None, *args, **kwargs): 563 + sel_aspect=None, sel_has_argument='.*', sel_has_position='.*',
  564 + sel_schema_opinion=None, can_confirm_example=False, sel_example_source=None,
  565 + sel_approver=None, sel_sender=None,
  566 + sel_frame_opinion=None, *args, **kwargs):
575 super(FilterForm, self).__init__(*args, **kwargs) 567 super(FilterForm, self).__init__(*args, **kwargs)
  568 +
576 self.fields['pos'].queryset = POS.objects.exclude(tag='unk') 569 self.fields['pos'].queryset = POS.objects.exclude(tag='unk')
577 self.fields['owner'].queryset = users 570 self.fields['owner'].queryset = users
578 self.fields['phraseologist'].queryset = phraseologists 571 self.fields['phraseologist'].queryset = phraseologists
579 self.fields['semanticist'].queryset = semanticists 572 self.fields['semanticist'].queryset = semanticists
580 self.fields['vocabulary'].queryset = vocabularies 573 self.fields['vocabulary'].queryset = vocabularies
581 self.fields['status'].queryset = statuses 574 self.fields['status'].queryset = statuses
  575 + self.fields['lemma'].initial = lemma
582 self.fields['pos'].initial = sel_pos 576 self.fields['pos'].initial = sel_pos
  577 + self.fields['contains_phraseology'].initial = contains_phraseology
583 self.fields['owner'].initial = sel_user 578 self.fields['owner'].initial = sel_user
584 self.fields['phraseologist'].initial = sel_phraseologist 579 self.fields['phraseologist'].initial = sel_phraseologist
585 self.fields['semanticist'].initial = sel_semanticist 580 self.fields['semanticist'].initial = sel_semanticist
@@ -593,23 +588,20 @@ class FilterForm(Form): @@ -593,23 +588,20 @@ class FilterForm(Form):
593 self.fields['example_source'].initial = None 588 self.fields['example_source'].initial = None
594 self.fields['approver'].widget = self.fields['approver'].hidden_widget() 589 self.fields['approver'].widget = self.fields['approver'].hidden_widget()
595 self.fields['approver'].initial = None 590 self.fields['approver'].initial = None
596 -# self.fields['has_old_frames_property'].initial = sel_old_property  
597 self.fields['reflex'].initial = sel_reflex 591 self.fields['reflex'].initial = sel_reflex
598 self.fields['negativity'].initial = sel_negativity 592 self.fields['negativity'].initial = sel_negativity
599 self.fields['predicativity'].initial = sel_predicativity 593 self.fields['predicativity'].initial = sel_predicativity
600 self.fields['aspect'].initial = sel_aspect 594 self.fields['aspect'].initial = sel_aspect
601 self.fields['has_argument'].initial = sel_has_argument 595 self.fields['has_argument'].initial = sel_has_argument
602 self.fields['has_position'].initial = sel_has_position 596 self.fields['has_position'].initial = sel_has_position
603 - #self.fields['has_frame'].initial = sel_has_frame  
604 - self.fields['frame_opinion'].initial = sel_frame_opinion 597 + self.fields['schema_opinion'].initial = sel_schema_opinion
605 self.fields['has_message_from'].initial = sel_sender 598 self.fields['has_message_from'].initial = sel_sender
606 self.fields['has_message_from'].queryset = senders 599 self.fields['has_message_from'].queryset = senders
607 600
608 - self.fields['frame_phraseologic'].initial = frame_phraseologic 601 + self.fields['schema_type'].initial = sel_schema_type
  602 + self.fields['frame_opinion'].initial = sel_frame_opinion
609 603
610 self.hide_unused_fields() 604 self.hide_unused_fields()
611 -  
612 - #self.fields['has_frame'].widget = self.fields['has_frame'].hidden_widget()  
613 605
614 def hide_unused_fields(self): 606 def hide_unused_fields(self):
615 for field_name in self.fields: 607 for field_name in self.fields:
dictionary/management/commands/approve_examples.py 0 → 100644
  1 +#-*- coding:utf-8 -*-
  2 +
  3 +from django.core.management.base import BaseCommand
  4 +
  5 +from dictionary.models import NKJP_Example
  6 +
  7 +class Command(BaseCommand):
  8 + args = 'none'
  9 + help = ""
  10 +
  11 + def handle(self, **options):
  12 + approve_examples()
  13 +
  14 +def approve_examples():
  15 + for example in NKJP_Example.objects.filter(approved=False):
  16 + if example.approvers.count() > 0:
  17 + example.approved = True
  18 + example.save()
  19 + print example
  20 +
0 \ No newline at end of file 21 \ No newline at end of file
dictionary/management/commands/create_TEI_walenty.py
1 #-*- coding:utf-8 -*- 1 #-*- coding:utf-8 -*-
2 2
3 -#Copyright (c) 2015, Bartłomiej Nitoń  
4 -#All rights reserved.  
5 -  
6 -#Redistribution and use in source and binary forms, with or without modification, are permitted provided  
7 -#that the following conditions are met:  
8 -  
9 -# Redistributions of source code must retain the above copyright notice, this list of conditions and  
10 -# the following disclaimer.  
11 -# Redistributions in binary form must reproduce the above copyright notice, this list of conditions  
12 -# and the following disclaimer in the documentation and/or other materials provided with the distribution.  
13 -  
14 -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED  
15 -# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A  
16 -# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR  
17 -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED  
18 -# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)  
19 -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING  
20 -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE  
21 -# POSSIBILITY OF SUCH DAMAGE.  
22 -  
23 import datetime 3 import datetime
24 import os 4 import os
25 5
dictionary/management/commands/get_examples.py
1 #-*- coding:utf-8 -*- 1 #-*- coding:utf-8 -*-
2 2
3 import codecs 3 import codecs
4 -import operator 4 +import datetime
5 import os 5 import os
6 -import re  
7 -from subprocess import call  
8 -from tempfile import mkdtemp, mkstemp  
9 6
10 from django.core.management.base import BaseCommand 7 from django.core.management.base import BaseCommand
11 -from django.utils.encoding import smart_str  
12 -from django.db.models import Q  
13 8
14 -#import corpus2  
15 -from common.morfeusz import analyse  
16 -  
17 -from dictionary.models import Argument, Lemma 9 +from dictionary.models import Lemma, get_ready_statuses
18 from settings import PROJECT_PATH 10 from settings import PROJECT_PATH
19 11
20 BASE_PATH = os.path.join(PROJECT_PATH, 'data') 12 BASE_PATH = os.path.join(PROJECT_PATH, 'data')
21 -#['gotowe', 'sprawdzone', 'tymczasowy']  
22 -STATUSES_LS = [u'zalążkowe', u'gotowe', u'sprawdzone',  
23 - u'(F) w obróbce', u'(F) gotowe', u'(F) sprawdzone',  
24 - u'(S) w obróbce', u'(S) gotowe', u'(S) sprawdzone']  
25 -  
26 -NOUN_TAGS = ['subst', 'ger']  
27 -  
28 -#VERBTAGLIST = ['fin', 'praet', 'bedzie', 'inf', 'imps', 'impt',  
29 -# 'winien', 'pred']  
30 -#ADJTAGLIST = ['adj', 'pact', 'ppas']  
31 -#INTERPTAGLIST = ['interp']  
32 -#NUMERALTAGLIST = ['num', 'numcol']  
33 -  
34 -XCES_HEADER = """<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE cesAna SYSTEM 'xcesAnaIPI.dtd'><cesAna type="pre_morph" version="WROC-1.0" xmlns:xlink="http://www.w3.org/1999/xlink">  
35 -<chunkList xml:base="text.xml">  
36 -"""  
37 -XCES_FOOTER = """</chunkList>  
38 -</cesAna>  
39 -"""  
40 -  
41 -WCRFT_CONFIG = 'nkjp_s2.ini'  
42 -  
43 -LABELS = ('haslo',  
44 - 'status hasla',  
45 - 'identyfikator schematu',  
46 - 'schemat',  
47 - 'opinia o schemacie',  
48 - 'przyklad',  
49 - 'otagowany przyklad',  
50 - 'fragmenty przykladu',  
51 - 'opinia o przykladzie',  
52 - 'zrodlo przykladu',  
53 - 'wybor argumentow')  
54 13
55 -  
56 -ARG_TYPES_BY_PRIORITY = ['fixed',  
57 - # frazy przyimkowe  
58 - 'preplexnp', 'comprepnp', 'prepnp', 'prepncp', 'prepadjp',  
59 - # frazy rzeczownikowe  
60 - 'lexnp', 'np',  
61 - # frazy rzeczownikowo-zdaniowe  
62 - 'ncp', 'cp',  
63 - # adjp  
64 - 'adjp',  
65 - # bezokoliczniki  
66 - 'infp',  
67 - # refl  
68 - 'refl',  
69 - # xp  
70 - 'xp',  
71 - # advp  
72 - 'advp',  
73 - # nonch  
74 - 'nonch',  
75 - # lemma - nie jest sortowane chyba, bo dodawane na innym etapie niz reszta argumentow  
76 - 'lemma',  
77 - # xp  
78 - 'xp'  
79 - ] 14 +LABELS = (u'hasło',
  15 + u'status hasła',
  16 + u'identyfikator schematu',
  17 + u'schemat',
  18 + u'opinia o schemacie',
  19 + u'przykład',
  20 + u'opinia o przykładzie',
  21 + u'zródło przykładu',
  22 + u'wybór typów fraz')
80 23
81 class Command(BaseCommand): 24 class Command(BaseCommand):
82 help = 'Get pinned examples from Slowal.' 25 help = 'Get pinned examples from Slowal.'
83 26
84 def handle(self, **options): 27 def handle(self, **options):
85 - get_examples()  
86 -  
87 -def write_examples(q_statuses):  
88 - try:  
89 - examples_file = codecs.open(os.path.join(BASE_PATH,  
90 - 'examples_gotowe_plus.txt'), 'wt', 'utf-8')  
91 - for lemma in Lemma.objects.filter(old=False).filter(reduce(operator.or_, q_statuses)).order_by('entry').all():  
92 - print lemma  
93 - examples_file.write(lemma.entry+'\n')  
94 - for frame in lemma.frames.order_by('text_rep').all():  
95 - if lemma.frame_opinions.get(frame=frame).value.value != u'zła':  
96 - examples_file.write('\t%s\n' % frame.text_rep)  
97 - for example in lemma.nkjp_examples.filter(frame=frame):  
98 - examples_file.write('\t\t--> %s\n' % example.sentence)  
99 - examples_file.write('\n\n')  
100 - finally:  
101 - examples_file.close()  
102 -  
103 -def write_xces_opening(outfile):  
104 - outfile.write(XCES_HEADER)  
105 -  
106 -def write_xces_closing(outfile):  
107 - outfile.write(XCES_FOOTER) 28 + get_examples()
  29 +
  30 +def get_examples():
  31 + ready_statuses = get_ready_statuses()
  32 + write_detailed_examples(ready_statuses)
  33 + # write_examples(ready_statuses)
108 34
109 -def write_paragraph(what, outfile):  
110 - if len(what) > 0 and not what.isspace():  
111 - outfile.write(u'<chunk type="p" id="p1">')  
112 - outfile.write(what)  
113 - outfile.write(u'</chunk>\n')  
114 -  
115 -def sentence_to_xces(sentence): 35 +def write_detailed_examples(statuses):
116 try: 36 try:
117 - tmp_folder = mkdtemp()  
118 - os.chdir(tmp_folder)  
119 - tmp_file, tmpfilename = mkstemp(dir=tmp_folder)  
120 - os.close(tmp_file)  
121 - outfile = codecs.open(tmpfilename, 'wt', 'utf-8')  
122 - write_xces_opening(outfile)  
123 - write_paragraph(sentence, outfile)  
124 - write_xces_closing(outfile)  
125 - finally:  
126 - outfile.close()  
127 - return tmpfilename  
128 -  
129 -def chunks(rdr):  
130 - """Yields subsequent paragraphs from a reader."""  
131 - while True:  
132 - chunk = rdr.get_next_chunk()  
133 - if not chunk:  
134 - break  
135 - yield chunk  
136 -  
137 -#def tag_sentence(tagged_sentence_path):  
138 -# sentences_count = 0  
139 -# tagged_sentence_chunks = []  
140 -# tagset = corpus2.get_named_tagset('nkjp')  
141 -# rdr = corpus2.TokenReader.create_path_reader('xces', tagset, tagged_sentence_path)  
142 -# for chunk in chunks(rdr):  
143 -# for sent in chunk.sentences():  
144 -# sentences_count += 1  
145 -# for tok in sent.tokens():  
146 -# prefered_lexeme = tok.get_preferred_lexeme(tagset)  
147 -# base_form = prefered_lexeme.lemma_utf8().decode('utf-8')  
148 -# orth_form = tok.orth_utf8().decode('utf-8')  
149 -# tags = tagset.tag_to_string(prefered_lexeme.tag())  
150 -# sentence_chunk = u'%s[%s>%s]' % (orth_form, base_form, tags)  
151 -# tagged_sentence_chunks.append(sentence_chunk)  
152 -# tagged_sentence = ' '.join(tagged_sentence_chunks)  
153 -# if sentences_count > 1:  
154 -# pass  
155 -# return tagged_sentence  
156 -  
157 -#def get_tagged_sentence(sentence):  
158 -# tagged_sentence = 'Error!'  
159 -# try:  
160 -# tmp_folder = mkdtemp()  
161 -# os.chdir(tmp_folder)  
162 -# xces_file, xces_path = mkstemp(dir=tmp_folder)  
163 -# os.close(xces_file)  
164 -# tagged_sentence_file, tagged_sentence_path = mkstemp(dir=tmp_folder)  
165 -# os.close(tagged_sentence_file)  
166 -# xces_file = codecs.open(xces_path, 'wt', 'utf-8')  
167 -# write_xces_opening(xces_file)  
168 -# write_paragraph(sentence, xces_file)  
169 -# write_xces_closing(xces_file)  
170 -# xces_file.close()  
171 -# try:  
172 -# call(['wcrft', WCRFT_CONFIG, xces_path, '-O', tagged_sentence_path, '-C', '-i', 'premorph'])  
173 -# tagged_sentence = tag_sentence(tagged_sentence_path)  
174 -# except:  
175 -# print 'Tagging failed.'  
176 -# finally:  
177 -# xces_file.close()  
178 -# os.remove(xces_path)  
179 -# os.remove(tagged_sentence_path)  
180 -# return tagged_sentence  
181 -  
182 -def write_detailed_examples(q_statuses):  
183 - try:  
184 - examples_file = codecs.open(os.path.join(BASE_PATH,  
185 - 'detailed_examples_20150616.csv'), 'wt', 'utf-8')  
186 - examples_file.write(u'%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' % LABELS)  
187 - for lemma in Lemma.objects.filter(old=False).filter(reduce(operator.or_, q_statuses)).order_by('entry').all(): 37 + lemmas = Lemma.objects.filter(old=False)
  38 + lemmas = lemmas.filter(status__in=statuses)
  39 + now = datetime.datetime.now().strftime('%Y%m%d')
  40 + examples_file = codecs.open(os.path.join(BASE_PATH, 'detailed_examples_%s.csv' % now), 'wt', 'utf-8')
  41 + examples_file.write(u'%s\n' % u'\t'.join(LABELS))
  42 + for lemma in lemmas.order_by('entry_obj__name'):
188 print lemma 43 print lemma
189 - lemma_entry = lemma.entry 44 + lemma_entry = lemma.entry_obj.name
190 lemma_status = lemma.status.status 45 lemma_status = lemma.status.status
191 for frame in lemma.frames.order_by('text_rep').all(): 46 for frame in lemma.frames.order_by('text_rep').all():
192 - frame_text_rep = frame.text_rep 47 + if not lemma.phraseology_ready() and frame.phraseologic:
  48 + continue
193 frame_opinion = lemma.frame_opinions.filter(frame=frame).all()[0].value 49 frame_opinion = lemma.frame_opinions.filter(frame=frame).all()[0].value
194 for example in lemma.nkjp_examples.filter(frame=frame): 50 for example in lemma.nkjp_examples.filter(frame=frame):
195 - sentence = example.sentence.replace('\n', ' ').replace('\r', '').replace('\t', ' ')  
196 - #tagged_sentence = get_tagged_sentence(sentence) mozna wlaczyc w razie czego  
197 - tagged_sentence = ''  
198 - example_opinion = example.opinion.opinion  
199 - example_source = example.source.source 51 + sentence = example.sentence.replace('\n', ' ').replace('\r', '').replace('\t', ' ')
200 arguments_selection = u'%s' % u' + '.join([u'%s' % selection.__unicode__() for selection in example.arguments.all()]) 52 arguments_selection = u'%s' % u' + '.join([u'%s' % selection.__unicode__() for selection in example.arguments.all()])
201 - examples_file.write(u'%s\t%s\t%d\t%s\t%s\t%s\t%s\t\t%s\t%s\t%s\n' % (lemma_entry,  
202 - lemma_status,  
203 - frame.id,  
204 - frame_text_rep,  
205 - frame_opinion,  
206 - sentence,  
207 - tagged_sentence,  
208 - example_opinion,  
209 - example_source,  
210 - arguments_selection)) 53 + examples_file.write(u'%s\t%s\t%d\t%s\t%s\t%s\t%s\t%s\t%s\n' % (lemma_entry,
  54 + lemma_status,
  55 + frame.id,
  56 + frame.get_position_spaced_text_rep(),
  57 + frame_opinion,
  58 + sentence,
  59 + example.opinion.opinion,
  60 + example.source.source,
  61 + arguments_selection))
211 finally: 62 finally:
212 examples_file.close() 63 examples_file.close()
213 -  
214 -def get_arguments(arguments_selection):  
215 - arguments = []  
216 - positions = arguments_selection.split('+')  
217 - for position in positions:  
218 - category = ''  
219 - position = position.strip().lstrip('[').rstrip(']')  
220 - if position.startswith('subj'):  
221 - category = 'subj'  
222 - elif position.startswith('obj'):  
223 - category = 'obj'  
224 - selection = re.findall(ur'<.*?>', position)[0]  
225 - for arg in selection.lstrip('<').rstrip('>').split(';'):  
226 - if category:  
227 - arguments.append(u'%s:%s' % (category, arg))  
228 - else:  
229 - arguments.append(arg)  
230 - arguments = sort_arguments(arguments)  
231 - return arguments  
232 -  
233 -def sort_arguments(arguments):  
234 - sorted_arguments = []  
235 - for type in ARG_TYPES_BY_PRIORITY:  
236 - for arg in arguments:  
237 - (arg_type, attributes, category) = arg_from_text_rep(arg)  
238 - if arg_type == type:  
239 - sorted_arguments.append(arg)  
240 - return sorted_arguments  
241 -  
242 -def arg_from_text_rep(argument):  
243 - attributes = []  
244 - category = ''  
245 - if ':' in argument:  
246 - arg_split = argument.split(':')  
247 - category = arg_split[0]  
248 - argument = arg_split[1]  
249 - arg_parts = argument.split('(')  
250 - arg_type = arg_parts[0]  
251 - if len(arg_parts) > 1:  
252 - attributes = arg_parts[1].rstrip(')').replace("'", "").split(',')  
253 - return arg_type, attributes, category  
254 -  
255 -def tokenize_sentence(sentence):  
256 - token_idx = 0  
257 - tokens = []  
258 - chunks = sentence.split('] ')  
259 - for chunk in chunks:  
260 - if chunk.startswith('[[['):  
261 - token = {'idx': token_idx,  
262 - 'orth': '[',  
263 - 'base': '[',  
264 - 'tags': ['interp'],  
265 - 'argument': '',  
266 - 'argument_start': -1,  
267 - 'argument_end': -1,  
268 - 'occupied': False}  
269 - elif chunk.startswith('>'):  
270 - token = {'idx': token_idx,  
271 - 'orth': '>',  
272 - 'base': '>',  
273 - 'tags': ['interp'],  
274 - 'argument': '',  
275 - 'argument_start': -1,  
276 - 'argument_end': -1,  
277 - 'occupied': False}  
278 - else:  
279 - chunk_parts = chunk.split('[')  
280 - (base, tags) = (chunk_parts[1].split('>'))#rstrip(']').)  
281 - orth = chunk_parts[0].lower()  
282 - token = {'idx': token_idx,  
283 - 'orth': orth,  
284 - 'base': base,  
285 - 'tags': tags.split(':'),  
286 - 'argument': '',  
287 - 'argument_start': -1,  
288 - 'argument_end': -1,  
289 - 'occupied': False}  
290 - tokens.append(token)  
291 - token_idx += 1  
292 - return tokens  
293 -  
294 -def case_conversion(case, category):  
295 - if case == 'instr':  
296 - case = 'inst'  
297 - elif case == 'part':  
298 - case = u'gen|acc'  
299 - elif case == 'str' and (category == 'subj' or not category):  
300 - case = 'nom'  
301 - elif case == 'str' and category == 'obj':  
302 - case = 'acc'  
303 - return case  
304 -  
305 -def number_conversion(number):  
306 - if number == '_':  
307 - number = ''  
308 - return number  
309 -  
310 -def aspect_conversion(aspect):  
311 - if aspect == '_':  
312 - aspect = ''  
313 - return aspect  
314 -  
315 -def phrase_type_conversion(phrase_type):  
316 - if phrase_type == u'że':  
317 - phrase_type = u'że|iż'  
318 - elif phrase_type == u'żeby':  
319 - phrase_type = u'żeby|aby|by|iżby|ażeby'  
320 - elif phrase_type == u'żeby2':  
321 - phrase_type = u'że|iż|żeby' # !!! nie wiem co ma być pod żeby2  
322 - elif phrase_type == u'int':  
323 - phrase_type = u'kiedy|jak|czy' # !!! nie wiem co ma być pod int  
324 - elif phrase_type == u'jakby':  
325 - phrase_type = u'jakby|jak gdyby'  
326 - return phrase_type  
327 64
328 -def complex_prep_lemma_conversion(lemma):  
329 - if lemma == u'powodu':  
330 - lemma = u'powód'  
331 - elif lemma == u'sprawie':  
332 - lemma = u'sprawa'  
333 - elif lemma == u'kwestii':  
334 - lemma = u'kwestia'  
335 - elif lemma == u'roli':  
336 - lemma = u'rola'  
337 - elif lemma == u'okolicach':  
338 - lemma = u'okolica'  
339 - elif lemma == u'czasie':  
340 - lemma = u'czas'  
341 - elif lemma == u'stronie':  
342 - lemma = u'strona'  
343 - elif lemma == u'początku':  
344 - lemma = u'początek'  
345 - return lemma  
346 -  
347 -def proper_case(token, case):  
348 - possible_cases = [case]  
349 - proper_case = False  
350 - if '|' in case:  
351 - possible_cases = case.split('|')  
352 - if len(set(token['tags']) & set(possible_cases)) == 1:  
353 - proper_case = True  
354 - return proper_case  
355 -  
356 -def get_matching_token(tokens, orth='', base='', case='',  
357 - number='', phrase_type='', aspect='',  
358 - degree='', pos=''):  
359 -# print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'  
360 -# print 'orth: %s, base: %s, case: %s, number: %s, pos: %s' % (orth,  
361 -# base,  
362 -# case,  
363 -# number,  
364 -# pos)  
365 - matching_token = None  
366 - for token in tokens:  
367 - match = True  
368 - if token['occupied']:  
369 - continue  
370 - if orth and not token['orth'] == orth:  
371 - match = False  
372 - if base and not token['base'] == base:  
373 - match = False  
374 - if case and not proper_case(token, case):  
375 - match = False  
376 - if number and not number in token['tags']:  
377 - match = False  
378 - if aspect and not aspect in token['tags']:  
379 - match = False  
380 - if degree and not degree in token['tags']:  
381 - match = False  
382 - if pos and not pos in token['tags']:  
383 - match = False  
384 - if match:  
385 - matching_token = token  
386 - break  
387 - return matching_token  
388 -  
389 -def fill_token_data(token, argument, start_idx, end_idx):  
390 - token['argument'] = argument  
391 - token['argument_start'] = start_idx  
392 - token['argument_end'] = end_idx  
393 -  
394 -def mark_fixed(tokens, argument, tresc):  
395 - tresc_idx = 0  
396 - tresc_orths = tresc.split()  
397 - tresc_start = -1  
398 - tresc_end = -1  
399 - for token in tokens:  
400 - if token['occupied']:  
401 - continue  
402 - if token['orth'] == tresc_orths[tresc_idx]:  
403 - tresc_idx += 1  
404 - if tresc_start == -1:  
405 - tresc_start = tokens.index(token)  
406 - else:  
407 - tresc_idx = 0  
408 - tresc_start = -1  
409 - if tresc_idx == len(tresc_orths):  
410 - tresc_end = tokens.index(token)  
411 - break  
412 - for token in tokens[tresc_start:tresc_end+1]:  
413 - fill_token_data(token, argument, tresc_start, tresc_end)  
414 - token['occupied'] = True  
415 -  
416 -def mark_preplexnp(tokens, argument, preposition, case, number, lemma):  
417 - preposition_token = get_matching_token(tokens, orth='', base=preposition,  
418 - case=case, number='', pos='prep') # !! case nie powinien być zgodny z lematem??  
419 - start_idx = tokens.index(preposition_token)  
420 - lemma_token = get_matching_token(tokens[start_idx:], orth='', base=lemma,  
421 - case=case, number=number, pos='subst')  
422 - end_idx = tokens.index(lemma_token)  
423 - fill_token_data(preposition_token, argument, start_idx, end_idx)  
424 - fill_token_data(lemma_token, argument, start_idx, end_idx)  
425 - for token in tokens[start_idx:end_idx+1]:  
426 - token['occupied'] = True  
427 -  
428 -def mark_comprepnp(tokens, argument, preposition, lemma):  
429 - if preposition == u'co' and lemma == u'do':  
430 - preposition_token = get_matching_token(tokens, orth='co', base='',  
431 - case='', number='', pos='subst') # !! czy pos nie powinien byc subst  
432 - start_idx = tokens.index(preposition_token)  
433 - lemma_token = get_matching_token(tokens[start_idx:], orth='do', base='',  
434 - case='', number='', pos='prep')  
435 - end_idx = tokens.index(lemma_token)  
436 - else:  
437 - preposition_token = get_matching_token(tokens, orth='', base=preposition,  
438 - case='', number='', pos='prep') # !! case nie powinien być zgodny z lematem??  
439 - start_idx = tokens.index(preposition_token)  
440 - lemma_base = complex_prep_lemma_conversion(lemma)  
441 - lemma_token = get_matching_token(tokens[start_idx:], orth='', base=lemma_base,  
442 - case='', number='', pos='subst')  
443 - end_idx = tokens.index(lemma_token)  
444 - noun_token = get_matching_token(tokens[end_idx+1:], orth='', base='',  
445 - case='', number='', pos='subst') # za proste, glupoty wychodza  
446 - end_idx = tokens.index(noun_token)  
447 - fill_token_data(preposition_token, argument, start_idx, end_idx)  
448 - fill_token_data(lemma_token, argument, start_idx, end_idx)  
449 - fill_token_data(noun_token, argument, start_idx, end_idx)  
450 - for token in tokens[start_idx:end_idx+1]:  
451 - token['occupied'] = True  
452 -  
453 -def mark_prepnp(tokens, argument, preposition, case):  
454 - preposition_token = get_matching_token(tokens, orth='', base=preposition,  
455 - case=case, number='', pos='prep') # !! case nie powinien być zgodny z lematem??  
456 - start_idx = tokens.index(preposition_token)  
457 - noun_token = get_matching_token(tokens[start_idx:], orth='', base='',  
458 - case=case, number='', pos='subst')  
459 - end_idx = tokens.index(noun_token)  
460 - fill_token_data(preposition_token, argument, start_idx, end_idx)  
461 - fill_token_data(noun_token, argument, start_idx, end_idx)  
462 - for token in tokens[start_idx:end_idx+1]:  
463 - token['occupied'] = True  
464 -  
465 -def mark_phrase(tokens, start_idx, argument, phrase_type):  
466 - for phrase in phrase_type.split('|'):  
467 - phrase_parts = phrase.split()  
468 - if len(phrase_parts) > 1:  
469 - phrase_token1 = get_matching_token(tokens[start_idx+1:], orth='', base=phrase_parts[0],  
470 - case='', number='', pos='')  
471 - if phrase_token1:  
472 - phrase_start_idx = tokens.index(phrase_token1)  
473 - phrase_token2 = get_matching_token(tokens[phrase_start_idx+1:], orth='', base=phrase_parts[1],  
474 - case='', number='', pos='')  
475 - if phrase_token1 and phrase_token2:  
476 - phrase_end_idx = tokens.index(phrase_token2)  
477 - fill_token_data(phrase_token1, argument, phrase_start_idx, phrase_end_idx)  
478 - fill_token_data(phrase_token2, argument, phrase_start_idx, phrase_end_idx)  
479 - break  
480 - else:  
481 - phrase_token = get_matching_token(tokens[start_idx+1:], base=phrase)  
482 - if phrase_token:  
483 - phrase_end_idx = tokens.index(phrase_token)  
484 - phrase_start_idx = phrase_end_idx  
485 - fill_token_data(phrase_token, argument, phrase_start_idx, phrase_end_idx)  
486 - break  
487 - return phrase_start_idx, phrase_end_idx  
488 -  
489 -def mark_prepncp(tokens, argument, preposition, case, phrase_type):  
490 - preposition_token = get_matching_token(tokens, orth='', base=preposition,  
491 - case=case, number='', pos='prep') # !! case nie powinien być zgodny z lematem??  
492 - start_idx = tokens.index(preposition_token)  
493 - noun_token = get_matching_token(tokens[start_idx:], orth='', base='',  
494 - case=case, number='', pos='subst')  
495 - end_idx = tokens.index(noun_token)  
496 - xx, end_idx = mark_phrase(tokens, end_idx, argument, phrase_type)  
497 - fill_token_data(preposition_token, argument, start_idx, end_idx)  
498 - fill_token_data(noun_token, argument, start_idx, end_idx)  
499 - for token in tokens[start_idx:end_idx+1]:  
500 - token['occupied'] = True  
501 -  
502 -def mark_prepadjp(tokens, argument, preposition, case):  
503 - preposition_token = get_matching_token(tokens, orth='', base=preposition,  
504 - case=case, number='', pos='prep') # !! case nie powinien być zgodny z lematem??  
505 - start_idx = tokens.index(preposition_token)  
506 - adj_token = get_matching_token(tokens[start_idx:], orth='', base='',  
507 - case=case, number='', pos='adj')  
508 - end_idx = tokens.index(adj_token)  
509 - fill_token_data(preposition_token, argument, start_idx, end_idx)  
510 - fill_token_data(adj_token, argument, start_idx, end_idx)  
511 - for token in tokens[start_idx:end_idx+1]:  
512 - token['occupied'] = True  
513 -  
514 -def mark_lexnp(tokens, argument, case, number, lemma):  
515 - lemma_token = get_matching_token(tokens, orth='', base=lemma,  
516 - case=case, number=number, pos='subst')  
517 - start_idx = tokens.index(lemma_token)  
518 - end_idx = start_idx  
519 - fill_token_data(lemma_token, argument, start_idx, end_idx)  
520 - for token in tokens[start_idx:end_idx+1]:  
521 - token['occupied'] = True  
522 -  
523 -def mark_np(tokens, argument, case):  
524 - noun_token = get_matching_token(tokens, orth='', base='',  
525 - case=case, number='', pos='subst')  
526 - start_idx = tokens.index(noun_token)  
527 - end_idx = start_idx  
528 - fill_token_data(noun_token, argument, start_idx, end_idx)  
529 - for token in tokens[start_idx:end_idx+1]:  
530 - token['occupied'] = True  
531 -  
532 -def mark_ncp(tokens, argument, case, phrase_type):  
533 - noun_token = get_matching_token(tokens, orth='', base='',  
534 - case=case, number='', pos='subst')  
535 - start_idx = tokens.index(noun_token)  
536 - xx, end_idx = mark_phrase(tokens, start_idx, argument, phrase_type)  
537 - fill_token_data(noun_token, argument, start_idx, end_idx)  
538 - for token in tokens[start_idx:end_idx+1]:  
539 - token['occupied'] = True  
540 -  
541 -def mark_cp(tokens, argument, phrase_type):  
542 - start_idx, end_idx = mark_phrase(tokens, -1, argument, phrase_type)  
543 - for token in tokens[start_idx:end_idx+1]:  
544 - token['occupied'] = True  
545 -  
546 -def mark_adjp(tokens, argument, case):  
547 - adj_token = get_matching_token(tokens, case=case, pos='adj')  
548 - start_idx = tokens.index(adj_token)  
549 - end_idx = start_idx  
550 - fill_token_data(adj_token, argument, start_idx, end_idx)  
551 - for token in tokens[start_idx:end_idx+1]:  
552 - token['occupied'] = True  
553 -  
554 -def mark_infp(tokens, argument, aspect):  
555 - inf_token = get_matching_token(tokens, orth='', base='',  
556 - case='', number='', aspect=aspect, pos='inf')  
557 - start_idx = tokens.index(inf_token)  
558 - end_idx = start_idx  
559 - fill_token_data(inf_token, argument, start_idx, end_idx)  
560 - for token in tokens[start_idx:end_idx+1]:  
561 - token['occupied'] = True  
562 -  
563 -def mark_lemma(tokens, argument, lemma, sie, aspect):  
564 - lemma_token = get_matching_token(tokens, orth='', base=lemma,  
565 - case='', number='', aspect=aspect,  
566 - pos='')  
567 - start_idx = tokens.index(lemma_token)  
568 - if sie:  
569 - sie_token = get_matching_token(tokens[start_idx:], orth='', base=u'się',  
570 - case='', number='', pos='')  
571 - end_idx = tokens.index(sie_token)  
572 - fill_token_data(sie_token, argument, start_idx, end_idx)  
573 - else:  
574 - end_idx = start_idx  
575 - fill_token_data(lemma_token, argument, start_idx, end_idx)  
576 -  
577 - for token in tokens[start_idx:end_idx+1]:  
578 - token['occupied'] = True  
579 -  
580 -def mark_nonch(tokens, argument, nonch):  
581 - for pronoun in nonch.split('|'):  
582 - pronoun_parts = pronoun.split()  
583 - if len(pronoun_parts) > 1:  
584 - matched_tokens = []  
585 - parts_matched = True  
586 - pronoun_start_idx = 0  
587 - for pronoun_part in pronoun_parts:  
588 - pronoun_token = get_matching_token(tokens[pronoun_start_idx+1:], orth='', base=pronoun_part,  
589 - case='', number='', pos='')  
590 - if pronoun_token:  
591 - pronoun_start_idx = tokens.index(pronoun_token)  
592 - matched_tokens.append(pronoun_token)  
593 - else:  
594 - parts_matched = False  
595 - break  
596 - if parts_matched:  
597 - start_idx = tokens.index(matched_tokens[0])  
598 - end_idx = tokens.index(matched_tokens[-1])  
599 - for token in matched_tokens:  
600 - fill_token_data(token, argument, start_idx, end_idx)  
601 - break  
602 - else:  
603 - pronoun_token = get_matching_token(tokens, orth='', base=pronoun,  
604 - case='', number='', pos='')  
605 - if pronoun_token:  
606 - start_idx = tokens.index(pronoun_token)  
607 - end_idx = start_idx  
608 - fill_token_data(pronoun_token, argument, start_idx, end_idx)  
609 - break  
610 - for token in tokens[start_idx:end_idx+1]:  
611 - token['occupied'] = True  
612 -  
613 -def mark_advp(tokens, argument, advp_type):  
614 - if advp_type == 'pron':  
615 - possible_bases = ['tak', 'jak']  
616 - for base in possible_bases:  
617 - advp_token = get_matching_token(tokens, base=base, pos='adv')  
618 - if advp_token:  
619 - break  
620 - elif advp_type == 'misc':  
621 - possible_degrees = ['com', 'sup']  
622 - for degree in possible_degrees:  
623 - advp_token = get_matching_token(tokens, degree=degree, pos='adv')  
624 - if advp_token:  
625 - break  
626 - start_idx = tokens.index(advp_token)  
627 - end_idx = start_idx  
628 - fill_token_data(advp_token, argument, start_idx, end_idx)  
629 - for token in tokens[start_idx:end_idx+1]:  
630 - token['occupied'] = True  
631 -  
632 -def count_occupied(tokens):  
633 - occupied_tokens = [token for token in tokens if token['occupied']]  
634 - return len(occupied_tokens)  
635 -  
636 -def mark_arg_in_sentence(argument, sentence_tokens):  
637 - (arg_type, attributes, category) = arg_from_text_rep(argument)  
638 - if arg_type == 'fixed':  
639 - mark_fixed(sentence_tokens, argument, attributes[0])  
640 - elif arg_type == 'preplexnp':  
641 - preposition = attributes[0]  
642 - case = case_conversion(attributes[1], category)  
643 - number = number_conversion(attributes[2])  
644 - lemma = attributes[3]  
645 - mark_preplexnp(sentence_tokens, argument, preposition, case, number, lemma)  
646 - elif arg_type == 'comprepnp':  
647 - complex_preposition_parts = attributes[0].split()  
648 - preposition = complex_preposition_parts[0]  
649 - lemma = complex_preposition_parts[1]  
650 - mark_comprepnp(sentence_tokens, argument, preposition, lemma)  
651 - elif arg_type == 'prepnp':  
652 - preposition = attributes[0]  
653 - case = case_conversion(attributes[1], category)  
654 - mark_prepnp(sentence_tokens, argument, preposition, case)  
655 - elif arg_type == 'prepncp':  
656 - preposition = attributes[0]  
657 - case = case_conversion(attributes[1], category)  
658 - phrase_type = phrase_type_conversion(attributes[2])  
659 - mark_prepncp(sentence_tokens, argument, preposition, case, phrase_type)  
660 - elif arg_type == 'prepadjp':  
661 - preposition = attributes[0]  
662 - case = case_conversion(attributes[1], category)  
663 - mark_prepadjp(sentence_tokens, argument, preposition, case)  
664 - elif arg_type == 'lexnp':  
665 - case = case_conversion(attributes[0], category)  
666 - number = number_conversion(attributes[1])  
667 - lemma = attributes[2]  
668 - mark_lexnp(sentence_tokens, argument, case, number, lemma)  
669 - elif arg_type == 'np':  
670 - case = case_conversion(attributes[0], category)  
671 - mark_np(sentence_tokens, argument, case)  
672 - elif arg_type == 'ncp':  
673 - case = case_conversion(attributes[0], category)  
674 - phrase_type = phrase_type_conversion(attributes[1])  
675 - mark_ncp(sentence_tokens, argument, case, phrase_type)  
676 - elif arg_type == 'cp':  
677 - phrase_type = phrase_type_conversion(attributes[0])  
678 - mark_cp(sentence_tokens, argument, phrase_type)  
679 - elif arg_type == 'adjp':  
680 - case = case_conversion(attributes[0], category)  
681 - mark_adjp(sentence_tokens, argument, case)  
682 - elif arg_type == 'infp':  
683 - aspect = aspect_conversion(attributes[0])  
684 - mark_infp(sentence_tokens, argument, aspect)  
685 - elif arg_type == u'nonch':  
686 - nonch = u'co|coś|nic|to|to samo co'  
687 - mark_nonch(sentence_tokens, argument, nonch)  
688 - elif arg_type == 'lemma':  
689 - lemma = attributes[0]  
690 - sie = attributes[1]  
691 - aspect = aspect_conversion(attributes[2])  
692 - mark_lemma(sentence_tokens, argument, lemma, sie, aspect)  
693 - elif arg_type == 'advp':  
694 - advp_type = attributes[0]  
695 - mark_advp(sentence_tokens, argument, advp_type)  
696 -# elif arg_type == 'xp':  
697 -# argument_obj = Argument.objects.get(text_rep=argument)  
698 -# realizations = [realization.argument.text_rep for realization in argument_obj.realizations.all()]  
699 -# start_occupacy = count_occupied(sentence_tokens)  
700 -# for realization in sort_arguments(realizations):  
701 -# mark_arg_in_sentence(realization, sentence_tokens)  
702 -# if count_occupied(sentence_tokens) > start_occupacy:  
703 -# break  
704 -  
705 -  
706 -def cut_sentence_chunks(sentence_tokens):  
707 - endpoint = -1  
708 - ignore = False  
709 - sentence_chunks = []  
710 - for token in sentence_tokens:  
711 - if token['argument'] and not ignore:  
712 - orths = [tok['orth'] for tok in sentence_tokens[token['argument_start']:token['argument_end']+1] if tok['argument']]  
713 - arg_realization = u'%s (%s)' % (u' '.join(orths), token['argument'])  
714 - endpoint = token['argument_end']  
715 - sentence_chunks.append(arg_realization)  
716 - ignore = True  
717 - if token['idx'] == endpoint:  
718 - ignore = False  
719 - return u' '.join(sentence_chunks)  
720 -  
721 -def get_sentence_chunk(arguments, sentence_tokens):  
722 - for arg in arguments:  
723 - mark_arg_in_sentence(arg, sentence_tokens)  
724 - return cut_sentence_chunks(sentence_tokens)  
725 -  
726 -def create_lemma_argument(lemma_entry, frame_text_rep):  
727 - frame_parts = frame_text_rep.split(':')  
728 - sie = frame_parts[0]  
729 - aspect = frame_parts[2]  
730 - frame_structure = frame_parts[3]  
731 - if not sie and u'refl' in frame_structure:  
732 - sie = u'się'  
733 - argument = u'lemma(%s,%s,%s)' % (lemma_entry, sie, aspect)  
734 - return argument  
735 -  
736 -def get_arguments_coverage():  
737 - try:  
738 - first_line = True 65 +def write_examples(statuses):
  66 + try:
739 examples_file = codecs.open(os.path.join(BASE_PATH, 67 examples_file = codecs.open(os.path.join(BASE_PATH,
740 - 'detailed_examples_v2.csv'), 'rt', 'utf-8')  
741 - output_file = codecs.open(os.path.join(BASE_PATH,  
742 - 'detailed_examples_cover_v2.csv'), 'wt', 'utf-8')  
743 - output_file.write(u'%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' % LABELS)  
744 - for line in examples_file:  
745 - if first_line:  
746 - first_line = False  
747 - continue  
748 - if 'Error!!!' in line:  
749 - continue  
750 - line = line.strip()  
751 - example_data = line.split('\t')  
752 - lemma_entry = example_data[0]  
753 - lemma_status = example_data[1]  
754 - frame_text_rep = example_data[2]  
755 - frame_opinion = example_data[3]  
756 - sentence = example_data[4]  
757 - tagged_sentence = example_data[5]  
758 - example_opinion = example_data[6]  
759 - example_source = example_data[7]  
760 - arguments_selection = example_data[8]  
761 - if not tagged_sentence:  
762 - sentence_chunk = u'Error!!! Błąd tagowania.'  
763 - else:  
764 -# print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'  
765 -# print sentence  
766 - lemma_argument = create_lemma_argument(lemma_entry, frame_text_rep)  
767 - arguments = [lemma_argument]  
768 - arguments.extend(get_arguments(arguments_selection))  
769 - sentence_tokens = tokenize_sentence(tagged_sentence)  
770 - try:  
771 - sentence_chunk = get_sentence_chunk(arguments, sentence_tokens)  
772 - except:  
773 - sentence_chunk = u'Error!!! Nie dopasowano wszystkich argumentów.'  
774 - output_file.write(u'%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' % (lemma_entry,  
775 - lemma_status,  
776 - frame_text_rep,  
777 - frame_opinion,  
778 - sentence,  
779 - tagged_sentence,  
780 - sentence_chunk,  
781 - example_opinion,  
782 - example_source,  
783 - arguments_selection)) 68 + 'examples_gotowe_plus.txt'), 'wt', 'utf-8')
  69 + for lemma in Lemma.objects.filter(old=False).filter(status__in=statuses).order_by('entry').all():
  70 + print lemma
  71 + examples_file.write(lemma.entry+'\n')
  72 + for frame in lemma.frames.order_by('text_rep').all():
  73 + if lemma.frame_opinions.get(frame=frame).value.value != u'zła':
  74 + examples_file.write('\t%s\n' % frame.text_rep)
  75 + for example in lemma.nkjp_examples.filter(frame=frame):
  76 + examples_file.write('\t\t--> %s\n' % example.sentence)
  77 + examples_file.write('\n\n')
784 finally: 78 finally:
785 - examples_file.close()  
786 - output_file.close()  
787 -  
788 -def get_examples():  
789 - q_statuses = []  
790 - for status in STATUSES_LS:  
791 - q_statuses.append(Q(status__status=status))  
792 - write_detailed_examples(q_statuses)  
793 -# write_examples(q_statuses)  
794 -# get_arguments_coverage()  
795 -  
796 \ No newline at end of file 79 \ No newline at end of file
  80 + examples_file.close()
dictionary/models.py
1 #-*- coding:utf-8 -*- 1 #-*- coding:utf-8 -*-
2 2
3 -#Copyright (c) 2012, Bartłomiej Nitoń  
4 -#All rights reserved.  
5 -  
6 -#Redistribution and use in source and binary forms, with or without modification, are permitted provided  
7 -#that the following conditions are met:  
8 -  
9 -# Redistributions of source code must retain the above copyright notice, this list of conditions and  
10 -# the following disclaimer.  
11 -# Redistributions in binary form must reproduce the above copyright notice, this list of conditions  
12 -# and the following disclaimer in the documentation and/or other materials provided with the distribution.  
13 -  
14 -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED  
15 -# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A  
16 -# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR  
17 -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED  
18 -# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)  
19 -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING  
20 -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE  
21 -# POSSIBILITY OF SUCH DAMAGE.  
22 -  
23 from django.contrib.auth.models import User 3 from django.contrib.auth.models import User
24 from django.db.models import * 4 from django.db.models import *
25 5
26 -from wordnet.models import LexicalUnit  
27 -  
28 class Configuration(Model): 6 class Configuration(Model):
29 name = CharField(max_length=16, primary_key=True, unique=True, db_column='nazwa_konfiguracji') 7 name = CharField(max_length=16, primary_key=True, unique=True, db_column='nazwa_konfiguracji')
30 selected_conf = BooleanField(db_column='wybrana_konfiguracja', default=False) 8 selected_conf = BooleanField(db_column='wybrana_konfiguracja', default=False)
@@ -250,6 +228,20 @@ class Lemma(Model): @@ -250,6 +228,20 @@ class Lemma(Model):
250 except Frame_Opinion.DoesNotExist: 228 except Frame_Opinion.DoesNotExist:
251 pass 229 pass
252 return frame_opinion_name 230 return frame_opinion_name
  231 +
  232 + def phraseology_ready(self):
  233 + actual_status = self.status
  234 + ready_f_status = Lemma_Status.objects.get(type__sym_name='ready_f')
  235 + if actual_status.priority >= ready_f_status.priority:
  236 + return True
  237 + return False
  238 +
  239 + def semantics_ready(self):
  240 + actual_status = self.status
  241 + ready_s_status = Lemma_Status.objects.get(type__sym_name='ready_s')
  242 + if actual_status.priority >= ready_s_status.priority:
  243 + return True
  244 + return False
253 245
254 class Meta: 246 class Meta:
255 db_table = 'hasla' 247 db_table = 'hasla'
@@ -439,15 +431,22 @@ def positions_to_frame(positions, reflex, negativity, predicativity, aspect): @@ -439,15 +431,22 @@ def positions_to_frame(positions, reflex, negativity, predicativity, aspect):
439 if frame_obj.has_phraseologic_arguments(): 431 if frame_obj.has_phraseologic_arguments():
440 frame_obj.phraseologic = True 432 frame_obj.phraseologic = True
441 frame_obj.save() 433 frame_obj.save()
442 - return frame_obj 434 + return frame_obj
  435 +
  436 +def get_schemata_by_type(sch_type, schemata_query):
  437 + if sch_type == 'normal':
  438 + schemata_query = get_normal_schemata_only(schemata_query)
  439 + elif sch_type == 'phraseologic':
  440 + schemata_query = get_phraseologic_schemata_only(schemata_query)
  441 + return schemata_query
443 442
444 -def get_phraseologic_frames_only(frames_query):  
445 - frames_query = frames_query.filter(phraseologic=True)  
446 -# phraseologic_arg_models = Argument_Model.objects.filter(phraseologic=True)  
447 -# phraseologic_arg_models_names = [arg_model.arg_model_name for arg_model in phraseologic_arg_models.all()]  
448 -# frames_query = frames_query.filter(Q(positions__arguments__type__in=phraseologic_arg_models_names) |  
449 -# Q(positions__arguments__atributes__values__argument__type__in=phraseologic_arg_models_names))  
450 - return frames_query 443 +def get_normal_schemata_only(schemata_query):
  444 + schemata_query = schemata_query.filter(phraseologic=False)
  445 + return schemata_query
  446 +
  447 +def get_phraseologic_schemata_only(schemata_query):
  448 + schemata_query = schemata_query.filter(phraseologic=True)
  449 + return schemata_query
451 450
452 451
453 class NKJP_Example(Model): 452 class NKJP_Example(Model):
@@ -745,13 +744,6 @@ class Argument(Model): @@ -745,13 +744,6 @@ class Argument(Model):
745 break 744 break
746 return is_fully_lexicalized 745 return is_fully_lexicalized
747 746
748 - class Meta:  
749 - permissions = (  
750 - ('view_realization', u'Może oglądać realizacje argumentów.'),  
751 - ('create_realization', u'Może kreować realizacje argumentów.'),  
752 - ('view_arg_stats', u'Może oglądać statystyki argumentów.'),  
753 - )  
754 -  
755 def __unicode__(self): 747 def __unicode__(self):
756 return '%s' % (self.text_rep) 748 return '%s' % (self.text_rep)
757 749
@@ -764,7 +756,21 @@ class Argument(Model): @@ -764,7 +756,21 @@ class Argument(Model):
764 for value in attr.values.filter(type__sym_name=u'parameter'): 756 for value in attr.values.filter(type__sym_name=u'parameter'):
765 if value.parameter.type.realization_only: 757 if value.parameter.type.realization_only:
766 return True 758 return True
767 - return False 759 + return False
  760 +
  761 + def main_phrase_type(self):
  762 + category_attrs = self.atributes.filter(type=u'KATEGORIA')
  763 + if not category_attrs.exists() or category_attrs.all()[0].values.count() == 0: # xp bez podtypow
  764 + return self
  765 + else:
  766 + return Argument.objects.get(text_rep=u'%s(%s)' % (self.type, category_attrs.all()[0].selection_mode.name))
  767 +
  768 + class Meta:
  769 + permissions = (
  770 + ('view_realization', u'Może oglądać realizacje argumentów.'),
  771 + ('create_realization', u'Może kreować realizacje argumentów.'),
  772 + ('view_arg_stats', u'Może oglądać statystyki argumentów.'),
  773 + )
768 774
769 def sort_arguments(arguments): 775 def sort_arguments(arguments):
770 return sortArguments(arguments) 776 return sortArguments(arguments)
@@ -1382,12 +1388,9 @@ class Entry(Model): @@ -1382,12 +1388,9 @@ class Entry(Model):
1382 ('view_semantics', u'Może oglądać semantykę.'), 1388 ('view_semantics', u'Może oglądać semantykę.'),
1383 ) 1389 )
1384 1390
1385 - def lexical_units(self):  
1386 - return LexicalUnit.objects.filter(Q(base = self.name)|Q(base = self.name + u' się'))  
1387 -  
1388 def actual_frames(self): 1391 def actual_frames(self):
1389 frame_ids = [] 1392 frame_ids = []
1390 - lexical_units = self.lexical_units().order_by('sense') 1393 + lexical_units = self.meanings.order_by('sense')
1391 for lexical_unit in lexical_units: 1394 for lexical_unit in lexical_units:
1392 frame_ids.extend([f.id for f in lexical_unit.actual_frames()]) 1395 frame_ids.extend([f.id for f in lexical_unit.actual_frames()])
1393 return get_model('semantics', 'SemanticFrame').objects.filter(id__in=list(set(frame_ids))) 1396 return get_model('semantics', 'SemanticFrame').objects.filter(id__in=list(set(frame_ids)))
@@ -1406,6 +1409,9 @@ class Entry(Model): @@ -1406,6 +1409,9 @@ class Entry(Model):
1406 'realizations': realizations_ids}) 1409 'realizations': realizations_ids})
1407 return matching_connections 1410 return matching_connections
1408 1411
  1412 + def actual_lemma(self):
  1413 + return self.lemmas.get(old=False)
  1414 +
1409 def __unicode__(self): 1415 def __unicode__(self):
1410 return self.name 1416 return self.name
1411 1417
dictionary/saving.py
@@ -94,7 +94,7 @@ def update_connections(lemma_id, reconnect_operations, user): @@ -94,7 +94,7 @@ def update_connections(lemma_id, reconnect_operations, user):
94 94
95 def disconnect_all_examples_operations(lemma): 95 def disconnect_all_examples_operations(lemma):
96 operations = [] 96 operations = []
97 - lex_units = lemma.entry_obj.lexical_units().all() 97 + lex_units = lemma.entry_obj.meanings.all()
98 for lu in lex_units: 98 for lu in lex_units:
99 lu_examples = LexicalUnitExamples.objects.filter(lexical_unit=lu) 99 lu_examples = LexicalUnitExamples.objects.filter(lexical_unit=lu)
100 for lu_ex in lu_examples: 100 for lu_ex in lu_examples:
@@ -112,6 +112,6 @@ def disconnect_example_operation(example_dict, example_obj): @@ -112,6 +112,6 @@ def disconnect_example_operation(example_dict, example_obj):
112 lu = LexicalUnit.objects.get(id=example_dict['lexical_unit']) 112 lu = LexicalUnit.objects.get(id=example_dict['lexical_unit'])
113 return {'operation': 'remove_example', 'unit': lu.id, 'example': example_obj.id} 113 return {'operation': 'remove_example', 'unit': lu.id, 'example': example_obj.id}
114 114
115 -def reconnect_examples(operations):  
116 - update_meanings(operations) 115 +def reconnect_examples(lemma, operations):
  116 + update_meanings(lemma.id, operations)
117 117
118 \ No newline at end of file 118 \ No newline at end of file
dictionary/static/css/frame_table.css
@@ -80,8 +80,7 @@ table.ActiveFrameTable td.ColumnCategory, table.InactiveFrameTable td.ColumnCate @@ -80,8 +80,7 @@ table.ActiveFrameTable td.ColumnCategory, table.InactiveFrameTable td.ColumnCate
80 } 80 }
81 81
82 table.ActiveFrameTable td[selected=selected], table.InactiveFrameTable td[selected=selected] { 82 table.ActiveFrameTable td[selected=selected], table.InactiveFrameTable td[selected=selected] {
83 - position: relative;  
84 - box-shadow: 0px 0px 0px 4px grey; 83 + border-width: 5px;
85 } 84 }
86 85
87 table.ActiveFrameTable .Opinion, table.InactiveFrameTable .Opinion { 86 table.ActiveFrameTable .Opinion, table.InactiveFrameTable .Opinion {
dictionary/static/css/lemmas_filtering.css 0 → 100644
  1 +hr.filtersSeparator {
  2 + border-top: medium double #333;
  3 +}
  4 +
  5 +hr.argSeparator {
  6 + border-top: 1px dashed #8c8b8b;
  7 +}
  8 +
  9 +hr.alterSeparator {
  10 + border-top: 1px solid #8c8b8b;
  11 +}
dictionary/static/js/lemma-view.js
@@ -22,25 +22,6 @@ var nkjp_source_tab = ax_nkjp_source_vals; @@ -22,25 +22,6 @@ var nkjp_source_tab = ax_nkjp_source_vals;
22 // te wartosci maja zasieg na wszystkie hasla 22 // te wartosci maja zasieg na wszystkie hasla
23 window.schemas = new Array(); 23 window.schemas = new Array();
24 var lemma_id = -1; 24 var lemma_id = -1;
25 - var aspect_vals = [];  
26 - var reflex_vals = [];  
27 - var neg_vals = [];  
28 - var pred_vals = [];  
29 - var opinion_vals = [];  
30 - var filter_aspect_val = '*';  
31 - var filter_reflex_val = '*';  
32 - var filter_neg_val = '*';  
33 - var filter_pred_val = '*';  
34 - var filter_opinion_val = '*';  
35 - var filter_position_val = '.*';  
36 - var filter_argument_val = '.*';  
37 - var prev_filter_aspect_val = '*';  
38 - var prev_filter_reflex_val = '*';  
39 - var prev_filter_neg_val = '*';  
40 - var prev_filter_pred_val = '*';  
41 - var prev_filter_opinion_val = '*';  
42 - var prev_filter_position_val = '.*';  
43 - var prev_filter_argument_val = '.*';  
44 var prev_lemma_id = -1; 25 var prev_lemma_id = -1;
45 26
46 // te wartosci trzeba czyscic przy ladowaniu innego hasla 27 // te wartosci trzeba czyscic przy ladowaniu innego hasla
@@ -68,7 +49,7 @@ var nkjp_source_tab = ax_nkjp_source_vals; @@ -68,7 +49,7 @@ var nkjp_source_tab = ax_nkjp_source_vals;
68 var lemma_entry = ''; 49 var lemma_entry = '';
69 var prev_lemma_entry = ''; 50 var prev_lemma_entry = '';
70 var selected_notes_row_id = -1; 51 var selected_notes_row_id = -1;
71 - var can_modify = false; 52 + //var can_modify = false;
72 53
73 //////////////////////////////////////////////////////////////// 54 ////////////////////////////////////////////////////////////////
74 55
@@ -84,33 +65,6 @@ function resetLemmaVersions() { @@ -84,33 +65,6 @@ function resetLemmaVersions() {
84 window.nkjp_lemma_examples); 65 window.nkjp_lemma_examples);
85 frames_modif.push(lemma_version); 66 frames_modif.push(lemma_version);
86 } 67 }
87 -  
88 -function initiateFrameFilters()  
89 -{  
90 - $.ajaxJSON({  
91 - method: 'get',  
92 - url: ajax_get_frame_filter_options,  
93 - data: {  
94 - //message_id: message_id,  
95 - },  
96 - callback: function(result) {  
97 - window.aspect_vals = ['*'];  
98 - window.reflex_vals = ['*'];  
99 - window.neg_vals = ['*'];  
100 - window.pred_vals = ['*'];  
101 - window.opinion_vals = ['*'];  
102 - $.merge(window.aspect_vals, result['aspect_options']);  
103 - $.merge(window.reflex_vals, result['reflex_options']);  
104 - $.merge(window.neg_vals, result['neg_options']);  
105 - $.merge(window.pred_vals, result['pred_options']);  
106 - $.merge(window.opinion_vals, result['opinion_options']);  
107 - },  
108 -  
109 - error_callback: function(xhr, status, error) {  
110 - error_alert(status + ': ' + error);  
111 - },  
112 - });  
113 -}  
114 68
115 function argsToRemove(example, elementToRemoveId) 69 function argsToRemove(example, elementToRemoveId)
116 { 70 {
@@ -211,198 +165,7 @@ function addPinnedExamplesDialog() { @@ -211,198 +165,7 @@ function addPinnedExamplesDialog() {
211 165
212 /////////////////////////////////////////////////////////////// 166 ///////////////////////////////////////////////////////////////
213 167
214 -function filter_update(id)  
215 -{  
216 - if(id == 'frame_filter')  
217 - {  
218 - window.filter_aspect_val = $('#frame_filter #aspect_filter').val();  
219 - window.filter_reflex_val = $('#frame_filter #reflex_filter').val();  
220 - window.filter_neg_val = $('#frame_filter #neg_filter').val();  
221 - window.filter_pred_val = $('#frame_filter #pred_filter').val();  
222 - window.filter_opinion_val = $('#frame_filter #opinion_filter').val();  
223 - window.filter_position_val = $('#frame_filter #position_filter').val();  
224 - window.filter_argument_val = $('#frame_filter #argument_filter').val();  
225 - }  
226 - else if(id == 'prev_frame_filter')  
227 - {  
228 - window.prev_filter_aspect_val = $('#prev_frame_filter #aspect_filter').val();  
229 - window.prev_filter_reflex_val = $('#prev_frame_filter #reflex_filter').val();  
230 - window.prev_filter_neg_val = $('#prev_frame_filter #neg_filter').val();  
231 - window.prev_filter_pred_val = $('#prev_frame_filter #pred_filter').val();  
232 - window.prev_filter_opinion_val = $('#prev_frame_filter #opinion_filter').val();  
233 - window.prev_filter_position_val = $('#prev_frame_filter #position_filter').val();  
234 - window.prev_filter_argument_val = $('#prev_frame_filter #argument_filter').val();  
235 - }  
236 -}  
237 -  
238 -  
239 -function draw_frames_filter(id)  
240 -{  
241 - var frame_filter = document.getElementById(id);  
242 -  
243 - p = document.createElement('p');  
244 - text = document.createTextNode("Aspekt: ");  
245 - p.appendChild(text);  
246 - var select = document.createElement('select');  
247 - select.setAttribute('id', 'aspect_filter');  
248 - select.setAttribute('name', 'ASPEKT');  
249 - p.appendChild(select);  
250 - for(var i=0; i<aspect_vals.length; i++)  
251 - {  
252 - var option = document.createElement('option');  
253 - option.setAttribute('value', aspect_vals[i]);  
254 - option.appendChild(document.createTextNode(aspect_vals[i]));  
255 - select.appendChild(option);  
256 - }  
257 - frame_filter.appendChild(p);  
258 -  
259 - p = document.createElement('p');  
260 - text = document.createTextNode("Zwrotność: ");  
261 - p.appendChild(text);  
262 - select = document.createElement('select');  
263 - select.setAttribute('id', 'reflex_filter');  
264 - select.setAttribute('name', 'ZWROTNOSC');  
265 - p.appendChild(select);  
266 - for(var i=0; i<reflex_vals.length; i++)  
267 - {  
268 - var option = document.createElement('option');  
269 - option.setAttribute('value', reflex_vals[i]);  
270 - option.appendChild(document.createTextNode(reflex_vals[i]));  
271 - select.appendChild(option);  
272 - }  
273 - frame_filter.appendChild(p);  
274 -  
275 - p = document.createElement('p');  
276 - text = document.createTextNode("Negatywność: ");  
277 - p.appendChild(text);  
278 - select = document.createElement('select');  
279 - select.setAttribute('id', 'neg_filter');  
280 - select.setAttribute('name', 'NEGATYWNOŚĆ');  
281 - p.appendChild(select);  
282 - for(var i=0; i<neg_vals.length; i++)  
283 - {  
284 - var option = document.createElement('option');  
285 - option.setAttribute('value', neg_vals[i]);  
286 - option.appendChild(document.createTextNode(neg_vals[i]));  
287 - select.appendChild(option);  
288 - }  
289 - frame_filter.appendChild(p);  
290 -  
291 - p = document.createElement('p');  
292 - text = document.createTextNode("Predykatywność: ");  
293 - p.appendChild(text);  
294 - select = document.createElement('select');  
295 - select.setAttribute('id', 'pred_filter');  
296 - select.setAttribute('name', 'PREDYKATYWNOŚĆ');  
297 - p.appendChild(select);  
298 - for(var i=0; i<pred_vals.length; i++)  
299 - {  
300 - var option = document.createElement('option');  
301 - option.setAttribute('value', pred_vals[i]);  
302 - option.appendChild(document.createTextNode(pred_vals[i]));  
303 - select.appendChild(option);  
304 - }  
305 - frame_filter.appendChild(p);  
306 -  
307 - p = document.createElement('p');  
308 - text = document.createTextNode("Opinia: ");  
309 - p.appendChild(text);  
310 - select = document.createElement('select');  
311 - select.setAttribute('id', 'opinion_filter');  
312 - select.setAttribute('name', 'OPINIA');  
313 - p.appendChild(select);  
314 - for(var i=0; i<opinion_vals.length; i++)  
315 - {  
316 - var option = document.createElement('option');  
317 - option.setAttribute('value', opinion_vals[i]);  
318 - option.appendChild(document.createTextNode(opinion_vals[i]));  
319 - select.appendChild(option);  
320 - }  
321 - frame_filter.appendChild(p);  
322 -  
323 - p = document.createElement('p');  
324 - text = document.createTextNode("Typ frazy: ");  
325 - p.appendChild(text);  
326 - select = document.createElement('input');  
327 - select.setAttribute('id', 'argument_filter');  
328 - select.setAttribute('name', 'ARGUMENT');  
329 - p.appendChild(select);  
330 - frame_filter.appendChild(p);  
331 -  
332 - p = document.createElement('p');  
333 - text = document.createTextNode("Pozycja: ");  
334 - p.appendChild(text);  
335 - select = document.createElement('input');  
336 - select.setAttribute('id', 'position_filter');  
337 - select.setAttribute('name', 'POZYCJA');  
338 - p.appendChild(select);  
339 - frame_filter.appendChild(p);  
340 -  
341 - if(id == 'frame_filter')  
342 - {  
343 - $('#frame_filter #aspect_filter').val(window.filter_aspect_val);  
344 - $('#frame_filter #reflex_filter').val(window.filter_reflex_val);  
345 - $('#frame_filter #neg_filter').val(window.filter_neg_val);  
346 - $('#frame_filter #pred_filter').val(window.filter_pred_val);  
347 - $('#frame_filter #opinion_filter').val(window.filter_opinion_val);  
348 - $('#frame_filter #position_filter').val(window.filter_position_val);  
349 - $('#frame_filter #argument_filter').val(window.filter_argument_val);  
350 - }  
351 - else if(id == 'prev_frame_filter')  
352 - {  
353 - $('#prev_frame_filter #aspect_filter').val(window.prev_filter_aspect_val);  
354 - $('#prev_frame_filter #reflex_filter').val(window.prev_filter_reflex_val);  
355 - $('#prev_frame_filter #neg_filter').val(window.prev_filter_neg_val);  
356 - $('#prev_frame_filter #pred_filter').val(window.prev_filter_pred_val);  
357 - $('#prev_frame_filter #opinion_filter').val(window.prev_filter_opinion_val);  
358 - $('#prev_frame_filter #position_filter').val(window.prev_filter_position_val);  
359 - $('#prev_frame_filter #argument_filter').val(window.prev_filter_argument_val);  
360 - }  
361 -  
362 - //attach autocomplete  
363 - $('#' + id + ' #argument_filter').autocomplete({  
364 - // triggers when selection performed  
365 - select: function(event, ui){  
366 - filter_update(id);  
367 - },  
368 - //define callback to format results  
369 - source: function(req, add){  
370 - //pass request to server  
371 - $.getJSON(ajax_argument_lookup, req, function(data) {  
372 - //create array for response objects  
373 - var suggestions = [];  
374 - $.each(data['result'], function(i, val){  
375 - suggestions.push(val[0]);  
376 - });  
377 - //pass array to callback  
378 - add(suggestions);  
379 - });  
380 - },  
381 - });  
382 - $('#' + id + ' #position_filter').autocomplete({  
383 - // triggers when selection performed  
384 - select: function(event, ui){  
385 - filter_update(id);  
386 - },  
387 - //define callback to format results  
388 - source: function(req, add){  
389 - //pass request to server  
390 - $.getJSON(ajax_position_lookup, req, function(data) {  
391 - //create array for response objects  
392 - var suggestions = [];  
393 - $.each(data['result'], function(i, val){  
394 - suggestions.push(val[0]);  
395 - });  
396 - //pass array to callback  
397 - add(suggestions);  
398 - });  
399 - },  
400 - });  
401 -}  
402 -  
403 -  
404 function load_content(id) { 168 function load_content(id) {
405 - ShowProgressAnimation();  
406 $('#add-table-elem-dialog').dialog('close'); 169 $('#add-table-elem-dialog').dialog('close');
407 if(lemma_id != id) 170 if(lemma_id != id)
408 $('#ready-note-dialog').dialog('close'); 171 $('#ready-note-dialog').dialog('close');
@@ -410,15 +173,17 @@ function load_content(id) { @@ -410,15 +173,17 @@ function load_content(id) {
410 173
411 if(window.activeLemmaPanel == 'preview_lemma') 174 if(window.activeLemmaPanel == 'preview_lemma')
412 { 175 {
  176 + ShowProgressAnimation();
413 $('#preview_lemma').load(ajax_lemma_preview, 'id='+id+'&main_lemma_id='+window.lemma_id, function(){ 177 $('#preview_lemma').load(ajax_lemma_preview, 'id='+id+'&main_lemma_id='+window.lemma_id, function(){
414 window.prev_lemma_id=id; 178 window.prev_lemma_id=id;
  179 + loadPrevSchemataAndExamples(true);
415 createSplitter('prevFramesSplit', 'prev-lemma-tables', 'prev_tabs'); 180 createSplitter('prevFramesSplit', 'prev-lemma-tables', 'prev_tabs');
416 areNewPreviewEntriesRelated(); 181 areNewPreviewEntriesRelated();
417 - HideProgressAnimation();  
418 }); 182 });
419 } 183 }
420 else 184 else
421 { 185 {
  186 + ShowProgressAnimation();
422 // czyszczenie wartosci 187 // czyszczenie wartosci
423 window.elem_in_bucket = ''; 188 window.elem_in_bucket = '';
424 window.selected_notes_row_id = -1; 189 window.selected_notes_row_id = -1;
@@ -446,10 +211,13 @@ function load_content(id) { @@ -446,10 +211,13 @@ function load_content(id) {
446 window.notesNotSaved = false; 211 window.notesNotSaved = false;
447 window.lemmaExNotSaved = false; 212 window.lemmaExNotSaved = false;
448 213
449 - $('#new_frames').load(ajax_new_frames, 'id='+id, function(){ 214 + $('#new_frames').load(ajax_new_frames, 'id='+id, function(data){
450 window.lemma_id = id; 215 window.lemma_id = id;
  216 +
  217 + loadSchemataAndExamples();
  218 +
451 createSplitter('framesSplit','new-frame-tables', 'tabs'); 219 createSplitter('framesSplit','new-frame-tables', 'tabs');
452 - if(window.can_modify) 220 + /*if(window.can_modify)
453 { 221 {
454 addSyntacticFramesPerm = user_has_perm('dictionary.add_syntactic_frames'); 222 addSyntacticFramesPerm = user_has_perm('dictionary.add_syntactic_frames');
455 addPhraseologicFramesPerm = user_has_perm('dictionary.add_phraseologic_frames'); 223 addPhraseologicFramesPerm = user_has_perm('dictionary.add_phraseologic_frames');
@@ -464,15 +232,7 @@ function load_content(id) { @@ -464,15 +232,7 @@ function load_content(id) {
464 $(document).bind('keydown', 'shift+d', function(evt){duplicateElement(); return false; }); 232 $(document).bind('keydown', 'shift+d', function(evt){duplicateElement(); return false; });
465 $(document).bind('keydown', 'shift+c', function(evt){copyElement(); return false; }); 233 $(document).bind('keydown', 'shift+c', function(evt){copyElement(); return false; });
466 $(document).bind('keydown', 'shift+v', function(evt){pasteElement(); return false; }); 234 $(document).bind('keydown', 'shift+v', function(evt){pasteElement(); return false; });
467 - $(document).bind('keydown', 'shift+w', function(evt){  
468 - if(window.change)  
469 - {  
470 - error_alert('Przed walidacją/zmianą statusu hasło musi zostać zapisane.');  
471 - return false;  
472 - }  
473 - validate_new_frames(false, false);  
474 - return false;  
475 - }); 235 + $(document).bind('keydown', 'shift+w', function(evt){validateSchemata(); return false; });
476 if(addSyntacticFramesPerm) { 236 if(addSyntacticFramesPerm) {
477 $(document).bind('keydown', 'shift+x', function(evt){cutElement(); return false; }); 237 $(document).bind('keydown', 'shift+x', function(evt){cutElement(); return false; });
478 $(document).bind('keydown', 'shift+m', function(evt){reserveLemma(); return false; }); 238 $(document).bind('keydown', 'shift+m', function(evt){reserveLemma(); return false; });
@@ -485,18 +245,14 @@ function load_content(id) { @@ -485,18 +245,14 @@ function load_content(id) {
485 } 245 }
486 else 246 else
487 { 247 {
488 - $(document).unbind('keydown') 248 + $(document).unbind('keydown');
489 $.get(ajax_user_has_perm, {perm: 'dictionary.own_lemmas'}, function(result) { 249 $.get(ajax_user_has_perm, {perm: 'dictionary.own_lemmas'}, function(result) {
490 if(result['has_perm']) { 250 if(result['has_perm']) {
491 $(document).bind('keydown', 'shift+m', function(evt){reserveLemma(); return false; }); 251 $(document).bind('keydown', 'shift+m', function(evt){reserveLemma(); return false; });
492 } 252 }
493 }); 253 });
494 - }  
495 -  
496 - window.frames_modif = new Array();  
497 - window.frames_modif_idx = 0;  
498 - var lemma_version = new Lemma_Version(window.schemas, window.nkjp_examples, window.nkjp_lemma_examples);  
499 - frames_modif.push(lemma_version); 254 + }*/
  255 +
500 if(document.getElementById("lemma_example_show")) 256 if(document.getElementById("lemma_example_show"))
501 { 257 {
502 draw_nkjp_table(document.getElementById("lemma_example_show"), '', window.nkjp_lemma_examples, 'NkjpLemmaTableRow', 'nkjpLemma_') 258 draw_nkjp_table(document.getElementById("lemma_example_show"), '', window.nkjp_lemma_examples, 'NkjpLemmaTableRow', 'nkjpLemma_')
@@ -506,20 +262,10 @@ function load_content(id) { @@ -506,20 +262,10 @@ function load_content(id) {
506 addPinnedExamplesDialog(); 262 addPinnedExamplesDialog();
507 $('#lemma_desc').load(ajax_get_lemma_desc, 'id='+id); 263 $('#lemma_desc').load(ajax_get_lemma_desc, 'id='+id);
508 areNewPreviewEntriesRelated(); 264 areNewPreviewEntriesRelated();
509 - HideProgressAnimation();  
510 refresh_example_propositions(); 265 refresh_example_propositions();
511 }); 266 });
512 $('#change_ctrl').load(ajax_change_ctrl, 'id='+id); 267 $('#change_ctrl').load(ajax_change_ctrl, 'id='+id);
513 $('#semantics').load(ajax_semantics, 'id='+id); 268 $('#semantics').load(ajax_semantics, 'id='+id);
514 - $('#examples').load(ajax_lemma_examples, 'id='+id, function(){  
515 - window.frames_modif = new Array(); // UWAGA, przestawic do lemma-view  
516 - window.frames_modif_idx = 0;  
517 - var lemma_version = new Lemma_Version(window.schemas, window.nkjp_examples, window.nkjp_lemma_examples); // TO  
518 - frames_modif.push(lemma_version);  
519 - draw_nkjp_table(document.getElementById("lemma_example_show"), '', window.nkjp_lemma_examples, 'NkjpLemmaTableRow', 'nkjpLemma_')  
520 - $("tr.NkjpLemmaTableRow").click(function(){  
521 - selectLemmaNkjpTr(this.id)});  
522 - });  
523 $('#status').load(ajax_lemma_status, 'id='+id, function(){ 269 $('#status').load(ajax_lemma_status, 'id='+id, function(){
524 $("#lemma-status-change button").click(validate_and_change_status); 270 $("#lemma-status-change button").click(validate_and_change_status);
525 $('#ready-note-dialog').dialog({ autoOpen: false, 271 $('#ready-note-dialog').dialog({ autoOpen: false,
@@ -539,12 +285,74 @@ function load_content(id) { @@ -539,12 +285,74 @@ function load_content(id) {
539 285
540 $('#preview_lemma').load(ajax_lemma_preview, 'id='+prevId+'&main_lemma_id='+id, function(){ 286 $('#preview_lemma').load(ajax_lemma_preview, 'id='+prevId+'&main_lemma_id='+id, function(){
541 window.prev_lemma_id=prevId; 287 window.prev_lemma_id=prevId;
  288 + loadPrevSchemataAndExamples(false);
542 createSplitter('prevFramesSplit', 'prev-lemma-tables', 'prev_tabs'); 289 createSplitter('prevFramesSplit', 'prev-lemma-tables', 'prev_tabs');
543 areNewPreviewEntriesRelated(); 290 areNewPreviewEntriesRelated();
544 }); 291 });
545 } 292 }
546 } 293 }
547 294
  295 +function validateSchemata() {
  296 + if(window.change) {
  297 + error_alert('Przed walidacją/zmianą statusu hasło musi zostać zapisane.');
  298 + return false;
  299 + }
  300 + validate_new_frames(false, false);
  301 +}
  302 +
  303 +function loadSchemataAndExamples() {
  304 + $.ajaxJSON({
  305 + method: 'get',
  306 + url: ajax_get_schemata_and_examples,
  307 + data: {
  308 + lemma_id: window.lemma_id
  309 + },
  310 +
  311 + callback: function(result) {
  312 + window.schemas = serializedObjToObj(result['schemata']);
  313 + window.nkjp_examples = serializedNkjpToObj(result['examples']);
  314 + window.nkjp_lemma_examples = serializedNkjpToObj(result['lemma_examples']);
  315 + resetLemmaVersions();
  316 + var frame_class = 'InactiveFrameTable';
  317 + if(result['can_modify']) {
  318 + frame_class = 'ActiveFrameTable';
  319 + }
  320 + draw_filtered_frames(window.schemas, 'new-frame-tables', 'new-frame-table',
  321 + 'frame_filter', window.nkjp_examples, frame_class,
  322 + window.lemma_entry, window.lemma_entry);
  323 + $('#examples').load(ajax_lemma_examples, 'id='+window.lemma_id, function(){
  324 + draw_nkjp_table(document.getElementById("lemma_example_show"), '',
  325 + window.nkjp_lemma_examples, 'NkjpLemmaTableRow', 'nkjpLemma_')
  326 + $("tr.NkjpLemmaTableRow").click(function(){selectLemmaNkjpTr(this.id)});
  327 + HideProgressAnimation();
  328 + });
  329 + $("span#new-frames-count").empty();
  330 + $("span#new-frames-count").append(window.schemas.length);
  331 + $("span#lemma-examples-count").empty();
  332 + $("span#lemma-examples-count").append(window.nkjp_lemma_examples.length);
  333 + },
  334 + });
  335 +}
  336 +
  337 +function loadPrevSchemataAndExamples(hideWaitDialog) {
  338 + $.ajaxJSON({
  339 + method: 'get',
  340 + url: ajax_get_schemata_and_examples,
  341 + data: {
  342 + lemma_id: window.prev_lemma_id
  343 + },
  344 + callback: function(result) {
  345 + window.prev_frames = serializedObjToObj(result['schemata']);
  346 + window.prev_nkjp_examples = serializedNkjpToObj(result['examples']);
  347 + draw_filtered_frames(window.prev_frames, 'prev-lemma-tables', 'prev-lemma-table', 'prev_frame_filter',
  348 + window.prev_nkjp_examples, 'InactiveFrameTable', window.prev_lemma_entry);
  349 + if(hideWaitDialog) {
  350 + HideProgressAnimation();
  351 + }
  352 + },
  353 + });
  354 +}
  355 +
548 // klasa reprezentujaca wersje hasla, do cofania i dodawania 356 // klasa reprezentujaca wersje hasla, do cofania i dodawania
549 function Lemma_Version(schemas, nkjp_examples, nkjp_lemma_examples) 357 function Lemma_Version(schemas, nkjp_examples, nkjp_lemma_examples)
550 { 358 {
@@ -877,7 +685,7 @@ function needConfirmation(nkjpInstance) { @@ -877,7 +685,7 @@ function needConfirmation(nkjpInstance) {
877 function unpin_nkjp_example(example_tabId) 685 function unpin_nkjp_example(example_tabId)
878 { 686 {
879 if(example_tabId != -1 && 687 if(example_tabId != -1 &&
880 - !checkIfSemChangedAndAlert())// && !exampleGotAssignedSemantics(example_tabId)) 688 + !checkIfSemChangedAndAlert())
881 { 689 {
882 example_id = example_tabId.replace('nkjp_', ''); 690 example_id = example_tabId.replace('nkjp_', '');
883 for(var i=0; i<window.nkjp_examples.length; i++) 691 for(var i=0; i<window.nkjp_examples.length; i++)
@@ -1014,7 +822,7 @@ function getNkjpLemmaExampleInstance(nkjp_examples, example_id) @@ -1014,7 +822,7 @@ function getNkjpLemmaExampleInstance(nkjp_examples, example_id)
1014 } 822 }
1015 823
1016 function remove_semantic_example(example_id) { 824 function remove_semantic_example(example_id) {
1017 - if(example_id != -1 && !checkIfSemChangedAndAlert())// && !exampleGotAssignedSemantics(example_id)) 825 + if(example_id != -1 && !checkIfSemChangedAndAlert())
1018 { 826 {
1019 example_id = example_id.replace('nkjp_', ''); 827 example_id = example_id.replace('nkjp_', '');
1020 for(var i=0; i<nkjp_examples.length; i++) 828 for(var i=0; i<nkjp_examples.length; i++)
@@ -1074,9 +882,11 @@ function remove_example_from_lemma(lemma_id, example_id, examplesTabId) { @@ -1074,9 +882,11 @@ function remove_example_from_lemma(lemma_id, example_id, examplesTabId) {
1074 function addFrameClickEvents(tableClass, tableId) { 882 function addFrameClickEvents(tableClass, tableId) {
1075 selector = 'table.'+tableClass+'#'+tableId+' td'; 883 selector = 'table.'+tableClass+'#'+tableId+' td';
1076 if(tableId === 'new-frame-table') { 884 if(tableId === 'new-frame-table') {
1077 - $(selector).dblclick(function(e){  
1078 - e.stopPropagation();  
1079 - openEditForm(this.id)}); 885 + if(tableClass === 'ActiveFrameTable') {
  886 + $(selector).dblclick(function(e){
  887 + e.stopPropagation();
  888 + openEditForm(this.id)});
  889 + }
1080 $(selector).click(function(e){ 890 $(selector).click(function(e){
1081 e.stopPropagation(); 891 e.stopPropagation();
1082 selectTd(this.id)}); 892 selectTd(this.id)});
@@ -1106,17 +916,17 @@ function draw_frames(schemas, parent, table_name, nkjp_examples, table_class, le @@ -1106,17 +916,17 @@ function draw_frames(schemas, parent, table_name, nkjp_examples, table_class, le
1106 first = true; 916 first = true;
1107 for(var j=0; j<schemas.length; j++) 917 for(var j=0; j<schemas.length; j++)
1108 { 918 {
1109 - if(schemas[j].characteristics[3]==aspect_vals[k] && schemas[j].characteristics[0]==reflex_vals[i] &&  
1110 - schemas[j].characteristics[1]==neg_vals[l] && schemas[j].characteristics[2]==pred_vals[m]) 919 + if(schemas[j].characteristics[3]==aspect_vals[k].value && schemas[j].characteristics[0]==reflex_vals[i].value &&
  920 + schemas[j].characteristics[1]==neg_vals[l].value && schemas[j].characteristics[2]==pred_vals[m].value)
1111 { 921 {
1112 if(first) 922 if(first)
1113 { 923 {
1114 div = document.createElement('div'); 924 div = document.createElement('div');
1115 strong = document.createElement('strong'); 925 strong = document.createElement('strong');
1116 - if(reflex_vals[i])  
1117 - strong.appendChild(document.createTextNode(lemma_entry+" "+reflex_vals[i]+" "+"("+neg_vals[l]+","+pred_vals[m]+","+aspect_vals[k]+"):")); 926 + if(reflex_vals[i].value)
  927 + strong.appendChild(document.createTextNode(lemma_entry+" "+reflex_vals[i].name+" "+"("+neg_vals[l].name+","+pred_vals[m].name+","+aspect_vals[k].name+"):"));
1118 else 928 else
1119 - strong.appendChild(document.createTextNode(lemma_entry+" "+"("+neg_vals[l]+","+pred_vals[m]+","+aspect_vals[k]+"):")); 929 + strong.appendChild(document.createTextNode(lemma_entry+" "+"("+neg_vals[l].name+","+pred_vals[m].name+","+aspect_vals[k].name+"):"));
1120 div.appendChild(strong); 930 div.appendChild(strong);
1121 parent.appendChild(div); 931 parent.appendChild(div);
1122 first = false; 932 first = false;
@@ -1526,11 +1336,7 @@ function can_add_position_category(lemma_id) { @@ -1526,11 +1336,7 @@ function can_add_position_category(lemma_id) {
1526 } 1336 }
1527 1337
1528 function openEditForm(id) { 1338 function openEditForm(id) {
1529 - if(window.can_modify && !checkIfSemChangedAndAlert()) {  
1530 - /*if(schemaGotAssignedSemantics(id)) {  
1531 - semanticsAssignedAlert();  
1532 - }*/  
1533 - 1339 + if(!checkIfSemChangedAndAlert()) {
1534 editedFrameInstance = getFrameInstance(id, window.schemas); 1340 editedFrameInstance = getFrameInstance(id, window.schemas);
1535 elemInstance = getElementInstance(id, window.schemas); 1341 elemInstance = getElementInstance(id, window.schemas);
1536 addSyntacticFramesPerm = user_has_perm('dictionary.add_syntactic_frames'); 1342 addSyntacticFramesPerm = user_has_perm('dictionary.add_syntactic_frames');
@@ -2000,10 +1806,10 @@ function frame_form_submit() { @@ -2000,10 +1806,10 @@ function frame_form_submit() {
2000 else { 1806 else {
2001 edited_frame = getElementInstance(edited_id, schemas); 1807 edited_frame = getElementInstance(edited_id, schemas);
2002 var old_edited_frame_id = edited_frame['element'].id; 1808 var old_edited_frame_id = edited_frame['element'].id;
2003 - //edited_frame['element'].id = new_elem_id; tuta zmienilem  
2004 - edited_frame['element'].id = result['id']; 1809 + edited_frame['element'].id = new_elem_id; //tutaj zmienilem
  1810 + //edited_frame['element'].id = result['id'];
2005 edited_frame_id = edited_frame['element'].id; 1811 edited_frame_id = edited_frame['element'].id;
2006 - //new_elem_id--; 1812 + new_elem_id--;
2007 edited_frame['element'].text_rep = result['text_rep']; 1813 edited_frame['element'].text_rep = result['text_rep'];
2008 edited_frame['element'].characteristics = result['characteristics']; 1814 edited_frame['element'].characteristics = result['characteristics'];
2009 edited_frame['element'].opinion = result['opinion']; 1815 edited_frame['element'].opinion = result['opinion'];
@@ -3202,125 +3008,6 @@ function escape_regex(str) @@ -3202,125 +3008,6 @@ function escape_regex(str)
3202 split('}').join('\\}') 3008 split('}').join('\\}')
3203 } 3009 }
3204 3010
3205 -function has_positions(frame, pos_term)  
3206 -{  
3207 - var alternatives = pos_term.split('|');  
3208 - for(var h=0; h<alternatives.length; h++) {  
3209 - var allConjsMatch = true;  
3210 - var conjs = alternatives[h].split('&');  
3211 -  
3212 - for(var i=0; i<conjs.length; i++) {  
3213 - try {  
3214 - var matched_poss = [];  
3215 - var conj = conjs[i].trim();  
3216 - var regEx = conj;  
3217 - if (regEx.substring(0, 1) == '!') {  
3218 - regEx = regEx.substring(1);  
3219 - }  
3220 - var posRe = new RegExp('^'+escape_regex(regEx)+'$');  
3221 - matched_poss = $.grep(frame.positions,  
3222 - function(pos){  
3223 - return pos.text_rep.match(posRe);  
3224 - });  
3225 - if((matched_poss.length > 0 && conj.startsWith('!')) ||  
3226 - (matched_poss.length == 0 && !conj.startsWith('!'))) {  
3227 - allConjsMatch = false;  
3228 - break;  
3229 - }  
3230 - }  
3231 - catch(e) {  
3232 - allConjsMatch = false;  
3233 - break;  
3234 - }  
3235 - }  
3236 - if(allConjsMatch) {  
3237 - return true;  
3238 - }  
3239 - }  
3240 -  
3241 - return false;  
3242 -}  
3243 -  
3244 -function has_arguments(frame, arg_term) {  
3245 - var alternatives = arg_term.split('|');  
3246 - for(var h=0; h<alternatives.length; h++) {  
3247 - var allConjsMatch = true;  
3248 - var conjs = alternatives[h].split('&');  
3249 - for(var i=0; i<conjs.length; i++) {  
3250 - try {  
3251 - var matched_args = [];  
3252 - var conj = conjs[i].trim();  
3253 - var regEx = conj;  
3254 - if (regEx.substring(0, 1) == '!') {  
3255 - regEx = regEx.substring(1);  
3256 - }  
3257 - var argRe = new RegExp('^'+escape_regex(regEx)+'$');  
3258 -  
3259 - for(var j=0; j<frame.positions.length; j++) {  
3260 - matched_args = $.grep(frame.positions[j].arguments, function(arg) {  
3261 - return arg.text_rep.match(argRe);  
3262 - });  
3263 - if(matched_args.length > 0) {  
3264 - break;  
3265 - }  
3266 - }  
3267 - if((matched_args.length > 0 && conj.startsWith('!')) ||  
3268 - (matched_args.length == 0 && !conj.startsWith('!'))) {  
3269 - allConjsMatch = false;  
3270 - break;  
3271 - }  
3272 - }  
3273 - catch(e) {  
3274 - allConjsMatch = false;  
3275 - break;  
3276 - }  
3277 - }  
3278 - if(allConjsMatch){  
3279 - return true;  
3280 - }  
3281 - }  
3282 - return false;  
3283 -}  
3284 -  
3285 -function filter_frames(schemas, filter_id)  
3286 -{  
3287 - var aspect_val = $('#'+filter_id+' #aspect_filter').val();  
3288 - var reflex_val = $('#'+filter_id+' #reflex_filter').val();  
3289 - var neg_val = $('#'+filter_id+' #neg_filter').val();  
3290 - var pred_val = $('#'+filter_id+' #pred_filter').val();  
3291 - var opinion_val = $('#'+filter_id+' #opinion_filter').val();  
3292 - var position_val = $('#'+filter_id+' #position_filter').val().trim();  
3293 - var argument_val = $('#'+filter_id+' #argument_filter').val().trim();  
3294 - var filtered_frames = new Array();  
3295 -  
3296 - if(position_val == '.*')  
3297 - position_val = ''  
3298 - if(argument_val == '.*')  
3299 - argument_val = ''  
3300 -  
3301 - for(var i=0; i<schemas.length; i++)  
3302 - {  
3303 - if((schemas[i].characteristics[3] == aspect_val || aspect_val == '*')  
3304 - && (schemas[i].characteristics[0] == reflex_val || reflex_val == '*')  
3305 - && (schemas[i].characteristics[1] == neg_val || neg_val == '*')  
3306 - && (schemas[i].characteristics[2] == pred_val || pred_val == '*')  
3307 - && (schemas[i].opinion == opinion_val || opinion_val == '*'))  
3308 - {  
3309 - frameMatch = false;  
3310 - if(position_val)  
3311 - frameMatch = has_positions(schemas[i], position_val)  
3312 - if(argument_val && (frameMatch || !position_val))  
3313 - {  
3314 -  
3315 - frameMatch = has_arguments(schemas[i], argument_val)  
3316 - }  
3317 - if(frameMatch || (!argument_val && !position_val))  
3318 - filtered_frames.push(schemas[i]);  
3319 - }  
3320 - }  
3321 - return filtered_frames;  
3322 -}  
3323 -  
3324 function draw_filtered_frames(schemas, parent_id, table_id, filter_id, nkjp_examples, table_class, lemma_entry) 3011 function draw_filtered_frames(schemas, parent_id, table_id, filter_id, nkjp_examples, table_class, lemma_entry)
3325 { 3012 {
3326 var parent = document.getElementById(parent_id); 3013 var parent = document.getElementById(parent_id);
@@ -3781,11 +3468,6 @@ function restore_lemma() { @@ -3781,11 +3468,6 @@ function restore_lemma() {
3781 var assignedExamples = []; 3468 var assignedExamples = [];
3782 if(canModifyFrame(window.selected_id, window.schemas) && 3469 if(canModifyFrame(window.selected_id, window.schemas) &&
3783 !checkIfSemChangedAndAlert()) { 3470 !checkIfSemChangedAndAlert()) {
3784 -  
3785 - /*if(schemaGotAssignedSemantics(window.selected_id)) {  
3786 - semanticsAssignedAlert();  
3787 - }*/  
3788 -  
3789 assignedExamples = gotAssignedExample(nkjp_examples, selected_id, true); 3471 assignedExamples = gotAssignedExample(nkjp_examples, selected_id, true);
3790 if(assignedExamples.length == 0) { 3472 if(assignedExamples.length == 0) {
3791 schemas = removeFrameElement(selected_id, schemas); 3473 schemas = removeFrameElement(selected_id, schemas);
@@ -3801,9 +3483,6 @@ function restore_lemma() { @@ -3801,9 +3483,6 @@ function restore_lemma() {
3801 function addElement() { 3483 function addElement() {
3802 if(!checkIfSemChangedAndAlert() && 3484 if(!checkIfSemChangedAndAlert() &&
3803 (window.selected_id == -1 || canModifyFrame(window.selected_id, window.schemas))) { 3485 (window.selected_id == -1 || canModifyFrame(window.selected_id, window.schemas))) {
3804 - /*if(schemaGotAssignedSemantics(window.selected_id)) {  
3805 - semanticsAssignedAlert();  
3806 - }*/  
3807 window.schemas = addFrameElementDialog(window.selected_id, window.schemas); 3486 window.schemas = addFrameElementDialog(window.selected_id, window.schemas);
3808 } 3487 }
3809 } 3488 }
@@ -3941,10 +3620,6 @@ function restore_lemma() { @@ -3941,10 +3620,6 @@ function restore_lemma() {
3941 if(window.elem_in_bucket && !checkIfSemChangedAndAlert() && 3620 if(window.elem_in_bucket && !checkIfSemChangedAndAlert() &&
3942 (window.selected_id == -1 || 3621 (window.selected_id == -1 ||
3943 canModifyFrame(window.selected_id, window.schemas))) { 3622 canModifyFrame(window.selected_id, window.schemas))) {
3944 -  
3945 - /*if(schemaGotAssignedSemantics(window.selected_id)) {  
3946 - semanticsAssignedAlert();  
3947 - }*/  
3948 pasteFrameElement(selected_id, elem_in_bucket, schemas); 3623 pasteFrameElement(selected_id, elem_in_bucket, schemas);
3949 } 3624 }
3950 } 3625 }
@@ -3975,12 +3650,6 @@ function restore_lemma() { @@ -3975,12 +3650,6 @@ function restore_lemma() {
3975 canModifyFrame(window.selected_id, window.schemas) && 3650 canModifyFrame(window.selected_id, window.schemas) &&
3976 !checkIfSemChangedAndAlert()) 3651 !checkIfSemChangedAndAlert())
3977 { 3652 {
3978 - /*if(getElementInstance(selected_id, schemas)['type'] != 'frame' &&  
3979 - schemaGotAssignedSemantics(selected_id)) {  
3980 - semanticsAssignedAlert();  
3981 - return;  
3982 - }*/  
3983 -  
3984 elem_in_bucket = getElementInstance(selected_id, schemas); 3653 elem_in_bucket = getElementInstance(selected_id, schemas);
3985 3654
3986 var parent_elem = getParentInstance(selected_id, schemas); 3655 var parent_elem = getParentInstance(selected_id, schemas);
@@ -4093,7 +3762,7 @@ function restore_lemma() { @@ -4093,7 +3762,7 @@ function restore_lemma() {
4093 3762
4094 function delete_nkjp_example(example_id) 3763 function delete_nkjp_example(example_id)
4095 { 3764 {
4096 - if(example_id != -1 && !checkIfSemChangedAndAlert())// && !exampleGotAssignedSemantics(example_id)) 3765 + if(example_id != -1 && !checkIfSemChangedAndAlert())
4097 { 3766 {
4098 example_id = selected_example_id.replace('nkjp_', ''); 3767 example_id = selected_example_id.replace('nkjp_', '');
4099 for(var i=0; i<nkjp_examples.length; i++) 3768 for(var i=0; i<nkjp_examples.length; i++)
@@ -4128,7 +3797,7 @@ function restore_lemma() { @@ -4128,7 +3797,7 @@ function restore_lemma() {
4128 function delete_all_nkjp_examples(frame_id) 3797 function delete_all_nkjp_examples(frame_id)
4129 { 3798 {
4130 if(canModifyFrame(frame_id, window.schemas) && 3799 if(canModifyFrame(frame_id, window.schemas) &&
4131 - !checkIfSemChangedAndAlert())// && !schemaGotAssignedSemantics(frame_id)) 3800 + !checkIfSemChangedAndAlert())
4132 { 3801 {
4133 var new_example_tab = new Array(); 3802 var new_example_tab = new Array();
4134 for(var i=0; i<nkjp_examples.length; i++) 3803 for(var i=0; i<nkjp_examples.length; i++)
@@ -4187,7 +3856,7 @@ function restore_lemma() { @@ -4187,7 +3856,7 @@ function restore_lemma() {
4187 3856
4188 function modify_nkjp_example(example_id) 3857 function modify_nkjp_example(example_id)
4189 { 3858 {
4190 - if(example_id != -1 && !checkIfSemChangedAndAlert())// && !exampleGotAssignedSemantics(example_id)) 3859 + if(example_id != -1 && !checkIfSemChangedAndAlert())
4191 { 3860 {
4192 var example = ''; 3861 var example = '';
4193 for(var i=0; i<window.nkjp_examples.length; i++) 3862 for(var i=0; i<window.nkjp_examples.length; i++)
dictionary/static/js/lemma_grid.js
@@ -61,7 +61,6 @@ $(function(){ @@ -61,7 +61,6 @@ $(function(){
61 window['remap']? remap : undefined}, 61 window['remap']? remap : undefined},
62 62
63 gridComplete: function() { 63 gridComplete: function() {
64 - //grid.jqGrid('sortGrid', grid.jqGrid('getGridParam','sortname'), false, grid.jqGrid('getGridParam','sortorder'));  
65 var lemma_id = window.lemma_id; 64 var lemma_id = window.lemma_id;
66 var lastSelectedId = window.lastSelectedId; 65 var lastSelectedId = window.lastSelectedId;
67 if(window.activeLemmaPanel == 'preview_lemma') { 66 if(window.activeLemmaPanel == 'preview_lemma') {
@@ -156,7 +155,7 @@ $(function(){ @@ -156,7 +155,7 @@ $(function(){
156 $('#search-panel-dialog').dialog( "option", "title", "Sortowanie haseł:" ).load(ajax_sort_form).dialog('open'); 155 $('#search-panel-dialog').dialog( "option", "title", "Sortowanie haseł:" ).load(ajax_sort_form).dialog('open');
157 }); 156 });
158 $("#filter-button").click(function(e){ 157 $("#filter-button").click(function(e){
159 - $('#search-panel-dialog').dialog( "option", "title", "Filtrowanie haseł:" ).load(ajax_filter_form).dialog('open'); 158 + $('#search-panel-dialog').empty().dialog( "option", "title", "Filtrowanie haseł:" ).load(ajax_filter_form).dialog('open');
160 }); 159 });
161 160
162 $("#show-columns-button").click(function(){ 161 $("#show-columns-button").click(function(){
@@ -199,128 +198,6 @@ function createSearchDialog() { @@ -199,128 +198,6 @@ function createSearchDialog() {
199 width: 'auto' }); 198 width: 'auto' });
200 } 199 }
201 200
202 -function filter_form_submit() {  
203 - this_form = $(this);  
204 - form_data = this_form.serializeArray();  
205 -  
206 - var owner = '';  
207 - var vocabulary = '';  
208 - var status = '';  
209 - var filter_frames = false;  
210 -  
211 - form_data = $.map(form_data, function(elem)  
212 - {  
213 - if (elem.name != 'owner' && elem.name != 'vocabulary' &&  
214 - elem.name != 'status' &&  
215 - elem.name != 'reflex' && elem.name != 'negativity' && elem.name != 'aspect' &&  
216 - elem.name != 'has_argument' && elem.name != 'has_position' &&  
217 - elem.name != 'approver' && elem.name != 'has_message_from' &&  
218 - elem.name != 'filter_frames')  
219 - return elem;  
220 - else {  
221 - if (elem.name == 'owner')  
222 - owner = elem.value;  
223 - else if (elem.name == 'vocabulary')  
224 - vocabulary = elem.value;  
225 - else if (elem.name == 'status')  
226 - status = elem.value;  
227 - else if (elem.name == 'has_message_from')  
228 - has_message_from = elem.value;  
229 - else if (elem.name == 'reflex')  
230 - reflex = elem.value;  
231 - else if (elem.name == 'negativity')  
232 - negativity = elem.value;  
233 - else if (elem.name == 'aspect')  
234 - aspect = elem.value;  
235 - else if (elem.name == 'has_argument')  
236 - has_argument = elem.value;  
237 - else if (elem.name == 'has_position')  
238 - has_position = elem.value;  
239 - else if (elem.name == 'approver')  
240 - approver = elem.value;  
241 - else if (elem.name == 'filter_frames')  
242 - filter_frames = elem.value;  
243 - }  
244 - });  
245 -  
246 - form_data.push({name: 'owner', value: owner});  
247 - form_data.push({name: 'vocabulary', value: vocabulary});  
248 - form_data.push({name: 'status', value: status});  
249 - form_data.push({name: 'has_message_from', value: has_message_from});  
250 - form_data.push({name: 'reflex', value: reflex});  
251 - form_data.push({name: 'negativity', value: negativity});  
252 - form_data.push({name: 'aspect', value: aspect});  
253 - form_data.push({name: 'has_argument', value: has_argument});  
254 - form_data.push({name: 'has_position', value: has_position});  
255 - form_data.push({name: 'approver', value: approver});  
256 - form_data.push({name: 'filter_frames', value: filter_frames});  
257 -  
258 - act_lemma_id = window.prev_lemma_id;  
259 - if(window.activeLemmaPanel != 'preview_lemma')  
260 - act_lemma_id = window.lemma_id;  
261 -  
262 - form_data.push({name: 'lemma_id', value: act_lemma_id})  
263 -  
264 - $.ajaxJSON({  
265 - method: 'post',  
266 - url: ajax_filter_form_submit,  
267 - data: {  
268 - form_data: form_data  
269 - },  
270 -  
271 - callback: function(result) {  
272 - $('#search-panel-dialog').dialog('close');  
273 - if(result['filter_frames'])  
274 - {  
275 - if(window.activeLemmaPanel == 'preview_lemma')  
276 - {  
277 - window.prev_filter_reflex_val = result['reflex'];  
278 - window.prev_filter_neg_val = result['negativity'];  
279 - window.prev_filter_pred_val = result['predicativity'];  
280 - window.prev_filter_opinion_val = result['opinion'];  
281 - window.prev_filter_aspect_val = result['aspect'];  
282 - window.prev_filter_position_val = result['position'];  
283 - window.prev_filter_argument_val = result['argument'];  
284 - $('#prev_frame_filter #reflex_filter').val(result['reflex']);  
285 - $('#prev_frame_filter #neg_filter').val(result['negativity']);  
286 - $('#prev_frame_filter #pred_filter').val(result['predicativity']);  
287 - $('#prev_frame_filter #opinion_filter').val(result['opinion']);  
288 - $('#prev_frame_filter #aspect_filter').val(result['aspect']);  
289 - $('#prev_frame_filter #argument_filter').val(result['argument']);  
290 - $('#prev_frame_filter #position_filter').val(result['position']);  
291 - $('#prev_frame_filter #argument_filter').trigger('change');  
292 - }  
293 - else  
294 - {  
295 - window.filter_reflex_val = result['reflex'];  
296 - window.filter_neg_val = result['negativity'];  
297 - window.filter_pred_val = result['predicativity'];  
298 - window.filter_opinion_val = result['opinion'];  
299 - window.filter_aspect_val = result['aspect'];  
300 - window.filter_position_val = result['position'];  
301 - window.filter_argument_val = result['argument'];  
302 - $('#frame_filter #reflex_filter').val(result['reflex']);  
303 - $('#frame_filter #neg_filter').val(result['negativity']);  
304 - $('#frame_filter #pred_filter').val(result['predicativity']);  
305 - $('#frame_filter #opinion_filter').val(result['opinion']);  
306 - $('#frame_filter #aspect_filter').val(result['aspect']);  
307 - $('#frame_filter #argument_filter').val(result['argument']);  
308 - $('#frame_filter #position_filter').val(result['position']);  
309 - $('#frame_filter #argument_filter').trigger('change');  
310 - }  
311 - }  
312 - grid.trigger("reloadGrid");  
313 - },  
314 - error_callback: function(xhr, status, error) {  
315 - error_alert(status + ': ' + error);  
316 - },  
317 - bad_data_callback: function(result) {  
318 - return true;  
319 - },  
320 - });  
321 - return false;  
322 -}  
323 -  
324 function sort_form_submit() { 201 function sort_form_submit() {
325 this_form = $(this); 202 this_form = $(this);
326 form_data = this_form.serializeArray(); 203 form_data = this_form.serializeArray();
dictionary/static/js/lemmas_filtering.js 0 → 100644
  1 +function filter_form_submit() {
  2 + this_form = $(this);
  3 + form_data = this_form.serializeArray();
  4 +
  5 + var filter_frames = false;
  6 + var actSemArgument = {};
  7 + var relationalSemPref = {'relation': '',
  8 + 'role': '',
  9 + 'attribute': ''};
  10 + var semArgumentsAlternatives = [];
  11 + var semArguments = [];
  12 +
  13 + form_data = $.map(form_data, function(elem)
  14 + {
  15 + if (elem.name != 'filter_frames' && !isPartOfSemArgFilter(elem)) {
  16 + return elem;
  17 + }
  18 + else {
  19 + if(elem.name == 'filter_frames') {
  20 + filter_frames = elem.value;
  21 + }
  22 + else if(elem.name == 'negation') {
  23 + if(!jQuery.isEmptyObject(actSemArgument)) {
  24 + semArguments.push(actSemArgument);
  25 + }
  26 + actSemArgument = {'negation': elem.value,
  27 + 'role': '',
  28 + 'attribute': '',
  29 + 'general_prefs': [],
  30 + 'synset_prefs': [],
  31 + 'relational_prefs': []}
  32 + }
  33 + else if(elem.name == 'or') {
  34 + if(!jQuery.isEmptyObject(actSemArgument)) {
  35 + semArguments.push(actSemArgument);
  36 + actSemArgument = {};
  37 + }
  38 + semArgumentsAlternatives.push(semArguments);
  39 + semArguments = [];
  40 + }
  41 + else if(elem.name == 'role' || elem.name == 'attribute') {
  42 + actSemArgument[elem.name] = elem.value;
  43 + }
  44 + else if(elem.name == 'general_pref' || elem.name == 'synset_pref') {
  45 + actSemArgument[elem.name+'s'].push(elem.value);
  46 + }
  47 + else if(elem.name.startsWith('relational_pref')) {
  48 + if(elem.name.endsWith('relation')) {
  49 + relationalSemPref = {'relation': elem.value,
  50 + 'role': '',
  51 + 'attribute': ''};
  52 + }
  53 + else if(elem.name.endsWith('role')) {
  54 + relationalSemPref['role'] = elem.value;
  55 + }
  56 + else if(elem.name.endsWith('attribute')) {
  57 + relationalSemPref['attribute'] = elem.value;
  58 + actSemArgument['relational_prefs'].push(relationalSemPref);
  59 + relationalSemPref = {};
  60 + }
  61 + }
  62 + }
  63 + });
  64 + if(!jQuery.isEmptyObject(actSemArgument)) {
  65 + semArguments.push(actSemArgument);
  66 + }
  67 + if(semArguments.length > 0) {
  68 + semArgumentsAlternatives.push(semArguments);
  69 + }
  70 +
  71 +
  72 + form_data.push({name: 'filter_frames', value: filter_frames});
  73 + form_data.push({name: 'sem_arguments', value: semArgumentsAlternatives});
  74 +
  75 + act_lemma_id = window.prev_lemma_id;
  76 + if(window.activeLemmaPanel != 'preview_lemma')
  77 + act_lemma_id = window.lemma_id;
  78 +
  79 + form_data.push({name: 'lemma_id', value: act_lemma_id})
  80 +
  81 + $.ajaxJSON({
  82 + method: 'post',
  83 + url: ajax_filter_form_submit,
  84 + data: {
  85 + form_data: form_data
  86 + },
  87 +
  88 + callback: function(result) {
  89 + $('#search-panel-dialog').dialog('close');
  90 + if(result['filter_frames'])
  91 + {
  92 + if(window.activeLemmaPanel == 'preview_lemma')
  93 + {
  94 + window.prev_filter_schema_type_val = result['schema_type'];
  95 + window.prev_filter_reflex_val = result['reflex'];
  96 + window.prev_filter_neg_val = result['negativity'];
  97 + window.prev_filter_pred_val = result['predicativity'];
  98 + window.prev_filter_opinion_val = result['opinion'];
  99 + window.prev_filter_aspect_val = result['aspect'];
  100 + window.prev_filter_position_val = result['position'];
  101 + window.prev_filter_argument_val = result['argument'];
  102 + $('#prev_frame_filter #schema_type_filter').val(result['schema_type']);
  103 + $('#prev_frame_filter #reflex_filter').val(result['reflex']);
  104 + $('#prev_frame_filter #neg_filter').val(result['negativity']);
  105 + $('#prev_frame_filter #pred_filter').val(result['predicativity']);
  106 + $('#prev_frame_filter #opinion_filter').val(result['opinion']);
  107 + $('#prev_frame_filter #aspect_filter').val(result['aspect']);
  108 + $('#prev_frame_filter #argument_filter').val(result['argument']);
  109 + $('#prev_frame_filter #position_filter').val(result['position']);
  110 + $('#prev_frame_filter #argument_filter').trigger('change');
  111 + }
  112 + else
  113 + {
  114 + window.filter_schema_type_val = result['schema_type'];
  115 + window.filter_reflex_val = result['reflex'];
  116 + window.filter_neg_val = result['negativity'];
  117 + window.filter_pred_val = result['predicativity'];
  118 + window.filter_opinion_val = result['opinion'];
  119 + window.filter_aspect_val = result['aspect'];
  120 + window.filter_position_val = result['position'];
  121 + window.filter_argument_val = result['argument'];
  122 + $('#frame_filter #schema_type_filter').val(result['schema_type']);
  123 + $('#frame_filter #reflex_filter').val(result['reflex']);
  124 + $('#frame_filter #neg_filter').val(result['negativity']);
  125 + $('#frame_filter #pred_filter').val(result['predicativity']);
  126 + $('#frame_filter #opinion_filter').val(result['opinion']);
  127 + $('#frame_filter #aspect_filter').val(result['aspect']);
  128 + $('#frame_filter #argument_filter').val(result['argument']);
  129 + $('#frame_filter #position_filter').val(result['position']);
  130 + $('#frame_filter #argument_filter').trigger('change');
  131 + }
  132 + }
  133 + grid.trigger("reloadGrid");
  134 + },
  135 + error_callback: function(xhr, status, error) {
  136 + error_alert(status + ': ' + error);
  137 + },
  138 + bad_data_callback: function(result) {
  139 + return true;
  140 + },
  141 + });
  142 + return false;
  143 +}
  144 +
  145 +function isPartOfSemArgFilter(field) {
  146 + if(field.name == 'or' || field.name == 'negation' ||
  147 + field.name == 'role' || field.name == 'attribute' ||
  148 + field.name == 'general_pref' || field.name == 'synset_pref' ||
  149 + field.name.startsWith('relational_pref')) {
  150 + return true;
  151 + }
  152 + return false;
  153 +}
  154 +
  155 +function addSemArgFilter(buttonElem) {
  156 + var semArgsElem = $(buttonElem).parent().parent();
  157 + semArgsElem.append('<p id="sem-argument"></p>');
  158 + semArgsElem.children().last().load(ajax_sem_arg_form);
  159 +}
  160 +
  161 +function addArgAlternative(buttonElem) {
  162 + var semArgsElem = $(buttonElem).parent().parent();
  163 + semArgsElem.append('<div><hr class="alterSeparator"><input type="hidden" name="or" value="or"><strong>lub</strong> <button type="button" onclick="removeAlternative(this)">Usuń</button></div>');
  164 +}
  165 +
  166 +function removeAlternative(buttonElem) {
  167 + $(buttonElem).parent().remove();
  168 +}
  169 +
  170 +function removeSemArgFilter(buttonElem) {
  171 + $(buttonElem).parent().parent().remove();
  172 +}
  173 +
  174 +function addSelectivePreferenceFilter(buttonElem) {
  175 + var selPrefsElem = $(buttonElem).parent().parent();
  176 + var selPrefType = selPrefsElem.find('#id_preference_type').first().val();
  177 + selPrefsElem.append('<p id="sel-preference"></p>');
  178 + if(selPrefType === 'general') {
  179 + selPrefsElem.children().last().load(ajax_general_preference_form);
  180 + }
  181 + else if(selPrefType === 'synset') {
  182 + selPrefsElem.children().last().load(ajax_synset_preference_form);
  183 + }
  184 + else if(selPrefType === 'relation') {
  185 + selPrefsElem.children().last().load(ajax_relational_preference_form);
  186 + }
  187 +}
  188 +
  189 +function removeSelPreferenceFilter(buttonElem) {
  190 + $(buttonElem).parent().remove();
  191 +}
dictionary/static/js/schemata_filtering.js 0 → 100644
  1 +var schema_type_vals = [];
  2 +var aspect_vals = [];
  3 +var reflex_vals = [];
  4 +var neg_vals = [];
  5 +var pred_vals = [];
  6 +var opinion_vals = [];
  7 +var filter_schema_type_val = '*';
  8 +var filter_aspect_val = '*';
  9 +var filter_reflex_val = '*';
  10 +var filter_neg_val = '*';
  11 +var filter_pred_val = '*';
  12 +var filter_opinion_val = '*';
  13 +var filter_position_val = '.*';
  14 +var filter_argument_val = '.*';
  15 +var prev_filter_schema_type_val = '*';
  16 +var prev_filter_aspect_val = '*';
  17 +var prev_filter_reflex_val = '*';
  18 +var prev_filter_neg_val = '*';
  19 +var prev_filter_pred_val = '*';
  20 +var prev_filter_opinion_val = '*';
  21 +var prev_filter_position_val = '.*';
  22 +var prev_filter_argument_val = '.*';
  23 +
  24 +function cancel_schemata_filtering() {
  25 + window.filter_position_val = '.*';
  26 + window.filter_argument_val = '.*';
  27 + window.filter_schema_type_val = '*';
  28 + window.filter_aspect_val = '*';
  29 + window.filter_reflex_val = '*';
  30 + window.filter_neg_val = '*';
  31 + window.filter_pred_val = '*';
  32 + window.filter_opinion_val = '*';
  33 + $('#frame_filter #argument_filter').val(window.filter_argument_val);
  34 + $('#frame_filter #position_filter').val(window.filter_position_val);
  35 + $('#frame_filter #schema_type_filter').val(window.filter_schema_type_val);
  36 + $('#frame_filter #aspect_filter').val(window.filter_aspect_val);
  37 + $('#frame_filter #reflex_filter').val(window.filter_reflex_val);
  38 + $('#frame_filter #neg_filter').val(window.filter_neg_val);
  39 + $('#frame_filter #pred_filter').val(window.filter_pred_val);
  40 + $('#frame_filter #opinion_filter').val(window.filter_opinion_val);
  41 + $('#frame_filter #argument_filter').trigger('change');
  42 +}
  43 +
  44 +function cancel_prev_schemata_filtering() {
  45 + window.prev_filter_position_val = '.*';
  46 + window.prev_filter_argument_val = '.*';
  47 + window.prev_filter_schema_type_val = '*';
  48 + window.prev_filter_aspect_val = '*';
  49 + window.prev_filter_reflex_val = '*';
  50 + window.prev_filter_neg_val = '*';
  51 + window.prev_filter_pred_val = '*';
  52 + window.prev_filter_opinion_val = '*';
  53 + $('#prev_frame_filter #argument_filter').val(window.prev_filter_argument_val);
  54 + $('#prev_frame_filter #position_filter').val(window.prev_filter_position_val);
  55 + $('#prev_frame_filter #schema_type_filter').val(window.prev_filter_schema_type_val);
  56 + $('#prev_frame_filter #aspect_filter').val(window.prev_filter_aspect_val);
  57 + $('#prev_frame_filter #reflex_filter').val(window.prev_filter_reflex_val);
  58 + $('#prev_frame_filter #neg_filter').val(window.prev_filter_neg_val);
  59 + $('#prev_frame_filter #pred_filter').val(window.prev_filter_pred_val);
  60 + $('#prev_frame_filter #opinion_filter').val(window.prev_filter_opinion_val);
  61 + $('#prev_frame_filter #argument_filter').trigger('change');
  62 +}
  63 +
  64 +function initiateFrameFilters() {
  65 + $.ajaxJSON({
  66 + method: 'get',
  67 + url: ajax_get_frame_filter_options,
  68 + data: {
  69 + //message_id: message_id,
  70 + },
  71 + callback: function(result) {
  72 + window.schema_type_vals = result['schema_type_options'];
  73 + window.aspect_vals = result['aspect_options'];
  74 + window.reflex_vals = result['reflex_options'];
  75 + window.neg_vals = result['neg_options'];
  76 + window.pred_vals = result['pred_options'];
  77 + window.opinion_vals = result['opinion_options'];
  78 + },
  79 +
  80 + error_callback: function(xhr, status, error) {
  81 + error_alert(status + ': ' + error);
  82 + },
  83 + });
  84 +}
  85 +
  86 +function filter_update(id) {
  87 + if(id == 'frame_filter')
  88 + {
  89 + window.filter_schema_type_val = $('#frame_filter #schema_type_filter').val();
  90 + window.filter_aspect_val = $('#frame_filter #aspect_filter').val();
  91 + window.filter_reflex_val = $('#frame_filter #reflex_filter').val();
  92 + window.filter_neg_val = $('#frame_filter #neg_filter').val();
  93 + window.filter_pred_val = $('#frame_filter #pred_filter').val();
  94 + window.filter_opinion_val = $('#frame_filter #opinion_filter').val();
  95 + window.filter_position_val = $('#frame_filter #position_filter').val();
  96 + window.filter_argument_val = $('#frame_filter #argument_filter').val();
  97 + }
  98 + else if(id == 'prev_frame_filter')
  99 + {
  100 + window.prev_filter_schema_type_val = $('#prev_frame_filter #schema_type_filter').val();
  101 + window.prev_filter_aspect_val = $('#prev_frame_filter #aspect_filter').val();
  102 + window.prev_filter_reflex_val = $('#prev_frame_filter #reflex_filter').val();
  103 + window.prev_filter_neg_val = $('#prev_frame_filter #neg_filter').val();
  104 + window.prev_filter_pred_val = $('#prev_frame_filter #pred_filter').val();
  105 + window.prev_filter_opinion_val = $('#prev_frame_filter #opinion_filter').val();
  106 + window.prev_filter_position_val = $('#prev_frame_filter #position_filter').val();
  107 + window.prev_filter_argument_val = $('#prev_frame_filter #argument_filter').val();
  108 + }
  109 +}
  110 +
  111 +function draw_frames_filter(id) {
  112 + var frame_filter = document.getElementById(id);
  113 +
  114 + p = document.createElement('p');
  115 + text = document.createTextNode("Typ schematu: ");
  116 + p.appendChild(text);
  117 + var select = document.createElement('select');
  118 + select.setAttribute('id', 'schema_type_filter');
  119 + select.setAttribute('name', 'schema_type');
  120 + p.appendChild(select);
  121 + for(var i=0; i<schema_type_vals.length; i++)
  122 + {
  123 + var option = document.createElement('option');
  124 + option.setAttribute('value', schema_type_vals[i].value);
  125 + option.appendChild(document.createTextNode(schema_type_vals[i].name));
  126 + select.appendChild(option);
  127 + }
  128 + frame_filter.appendChild(p);
  129 +
  130 + p = document.createElement('p');
  131 + text = document.createTextNode("Aspekt: ");
  132 + p.appendChild(text);
  133 + var select = document.createElement('select');
  134 + select.setAttribute('id', 'aspect_filter');
  135 + select.setAttribute('name', 'ASPEKT');
  136 + p.appendChild(select);
  137 + for(var i=0; i<aspect_vals.length; i++)
  138 + {
  139 + var option = document.createElement('option');
  140 + option.setAttribute('value', aspect_vals[i].value);
  141 + option.appendChild(document.createTextNode(aspect_vals[i].name));
  142 + select.appendChild(option);
  143 + }
  144 + frame_filter.appendChild(p);
  145 +
  146 + p = document.createElement('p');
  147 + text = document.createTextNode("Zwrotność: ");
  148 + p.appendChild(text);
  149 + select = document.createElement('select');
  150 + select.setAttribute('id', 'reflex_filter');
  151 + select.setAttribute('name', 'ZWROTNOSC');
  152 + p.appendChild(select);
  153 + for(var i=0; i<reflex_vals.length; i++)
  154 + {
  155 + var option = document.createElement('option');
  156 + option.setAttribute('value', reflex_vals[i].value);
  157 + option.appendChild(document.createTextNode(reflex_vals[i].name));
  158 + select.appendChild(option);
  159 + }
  160 + frame_filter.appendChild(p);
  161 +
  162 + p = document.createElement('p');
  163 + text = document.createTextNode("Negatywność: ");
  164 + p.appendChild(text);
  165 + select = document.createElement('select');
  166 + select.setAttribute('id', 'neg_filter');
  167 + select.setAttribute('name', 'NEGATYWNOŚĆ');
  168 + p.appendChild(select);
  169 + for(var i=0; i<neg_vals.length; i++)
  170 + {
  171 + var option = document.createElement('option');
  172 + option.setAttribute('value', neg_vals[i].value);
  173 + option.appendChild(document.createTextNode(neg_vals[i].name));
  174 + select.appendChild(option);
  175 + }
  176 + frame_filter.appendChild(p);
  177 +
  178 + p = document.createElement('p');
  179 + text = document.createTextNode("Predykatywność: ");
  180 + p.appendChild(text);
  181 + select = document.createElement('select');
  182 + select.setAttribute('id', 'pred_filter');
  183 + select.setAttribute('name', 'PREDYKATYWNOŚĆ');
  184 + p.appendChild(select);
  185 + for(var i=0; i<pred_vals.length; i++)
  186 + {
  187 + var option = document.createElement('option');
  188 + option.setAttribute('value', pred_vals[i].value);
  189 + option.appendChild(document.createTextNode(pred_vals[i].name));
  190 + select.appendChild(option);
  191 + }
  192 + frame_filter.appendChild(p);
  193 +
  194 + p = document.createElement('p');
  195 + text = document.createTextNode("Opinia: ");
  196 + p.appendChild(text);
  197 + select = document.createElement('select');
  198 + select.setAttribute('id', 'opinion_filter');
  199 + select.setAttribute('name', 'OPINIA');
  200 + p.appendChild(select);
  201 + for(var i=0; i<opinion_vals.length; i++)
  202 + {
  203 + var option = document.createElement('option');
  204 + option.setAttribute('value', opinion_vals[i].value);
  205 + option.appendChild(document.createTextNode(opinion_vals[i].name));
  206 + select.appendChild(option);
  207 + }
  208 + frame_filter.appendChild(p);
  209 +
  210 + p = document.createElement('p');
  211 + text = document.createTextNode("Typ frazy: ");
  212 + p.appendChild(text);
  213 + select = document.createElement('input');
  214 + select.setAttribute('id', 'argument_filter');
  215 + select.setAttribute('name', 'ARGUMENT');
  216 + p.appendChild(select);
  217 + frame_filter.appendChild(p);
  218 +
  219 + p = document.createElement('p');
  220 + text = document.createTextNode("Pozycja: ");
  221 + p.appendChild(text);
  222 + select = document.createElement('input');
  223 + select.setAttribute('id', 'position_filter');
  224 + select.setAttribute('name', 'POZYCJA');
  225 + p.appendChild(select);
  226 + frame_filter.appendChild(p);
  227 +
  228 + if(id == 'frame_filter')
  229 + {
  230 + $('#frame_filter #schema_type_filter').val(window.filter_schema_type_val);
  231 + $('#frame_filter #aspect_filter').val(window.filter_aspect_val);
  232 + $('#frame_filter #reflex_filter').val(window.filter_reflex_val);
  233 + $('#frame_filter #neg_filter').val(window.filter_neg_val);
  234 + $('#frame_filter #pred_filter').val(window.filter_pred_val);
  235 + $('#frame_filter #opinion_filter').val(window.filter_opinion_val);
  236 + $('#frame_filter #position_filter').val(window.filter_position_val);
  237 + $('#frame_filter #argument_filter').val(window.filter_argument_val);
  238 + }
  239 + else if(id == 'prev_frame_filter')
  240 + {
  241 + $('#prev_frame_filter #schema_type_filter').val(window.prev_filter_schema_type_val);
  242 + $('#prev_frame_filter #aspect_filter').val(window.prev_filter_aspect_val);
  243 + $('#prev_frame_filter #reflex_filter').val(window.prev_filter_reflex_val);
  244 + $('#prev_frame_filter #neg_filter').val(window.prev_filter_neg_val);
  245 + $('#prev_frame_filter #pred_filter').val(window.prev_filter_pred_val);
  246 + $('#prev_frame_filter #opinion_filter').val(window.prev_filter_opinion_val);
  247 + $('#prev_frame_filter #position_filter').val(window.prev_filter_position_val);
  248 + $('#prev_frame_filter #argument_filter').val(window.prev_filter_argument_val);
  249 + }
  250 +
  251 + //attach autocomplete
  252 + $('#' + id + ' #argument_filter').autocomplete({
  253 + // triggers when selection performed
  254 + select: function(event, ui){
  255 + filter_update(id);
  256 + },
  257 + //define callback to format results
  258 + source: function(req, add){
  259 + //pass request to server
  260 + $.getJSON(ajax_argument_lookup, req, function(data) {
  261 + //create array for response objects
  262 + var suggestions = [];
  263 + $.each(data['result'], function(i, val){
  264 + suggestions.push(val[0]);
  265 + });
  266 + //pass array to callback
  267 + add(suggestions);
  268 + });
  269 + },
  270 + });
  271 + $('#' + id + ' #position_filter').autocomplete({
  272 + // triggers when selection performed
  273 + select: function(event, ui){
  274 + filter_update(id);
  275 + },
  276 + //define callback to format results
  277 + source: function(req, add){
  278 + //pass request to server
  279 + $.getJSON(ajax_position_lookup, req, function(data) {
  280 + //create array for response objects
  281 + var suggestions = [];
  282 + $.each(data['result'], function(i, val){
  283 + suggestions.push(val[0]);
  284 + });
  285 + //pass array to callback
  286 + add(suggestions);
  287 + });
  288 + },
  289 + });
  290 +}
  291 +
  292 +function filter_frames(schemas, filter_id) {
  293 + var schema_type_val = $('#'+filter_id+' #schema_type_filter').val();
  294 + var aspect_val = $('#'+filter_id+' #aspect_filter').val();
  295 + var reflex_val = $('#'+filter_id+' #reflex_filter').val();
  296 + var neg_val = $('#'+filter_id+' #neg_filter').val();
  297 + var pred_val = $('#'+filter_id+' #pred_filter').val();
  298 + var opinion_val = $('#'+filter_id+' #opinion_filter').val();
  299 + var position_val = $('#'+filter_id+' #position_filter').val().trim();
  300 + var argument_val = $('#'+filter_id+' #argument_filter').val().trim();
  301 + var filtered_frames = new Array();
  302 +
  303 + if(position_val == '.*')
  304 + position_val = ''
  305 + if(argument_val == '.*')
  306 + argument_val = ''
  307 +
  308 + for(var i=0; i<schemas.length; i++)
  309 + {
  310 + if((schemas[i].characteristics[3] == aspect_val || aspect_val == '*')
  311 + && (schemas[i].characteristics[0] == reflex_val || reflex_val == '*')
  312 + && (schemas[i].characteristics[1] == neg_val || neg_val == '*')
  313 + && (schemas[i].characteristics[2] == pred_val || pred_val == '*')
  314 + && (schemas[i].opinion == opinion_val || opinion_val == '*')
  315 + && schema_type_valid(schemas[i], schema_type_val))
  316 + {
  317 + frameMatch = false;
  318 + if(position_val)
  319 + frameMatch = has_positions(schemas[i], position_val)
  320 + if(argument_val && (frameMatch || !position_val))
  321 + {
  322 +
  323 + frameMatch = has_arguments(schemas[i], argument_val)
  324 + }
  325 + if(frameMatch || (!argument_val && !position_val))
  326 + filtered_frames.push(schemas[i]);
  327 + }
  328 + }
  329 + return filtered_frames;
  330 +}
  331 +
  332 +function has_positions(frame, pos_term) {
  333 + var alternatives = pos_term.split('|');
  334 + for(var h=0; h<alternatives.length; h++) {
  335 + var allConjsMatch = true;
  336 + var conjs = alternatives[h].split('&');
  337 +
  338 + for(var i=0; i<conjs.length; i++) {
  339 + try {
  340 + var matched_poss = [];
  341 + var conj = conjs[i].trim();
  342 + var regEx = conj;
  343 + if (regEx.substring(0, 1) == '!') {
  344 + regEx = regEx.substring(1);
  345 + }
  346 + var posRe = new RegExp('^'+escape_regex(regEx)+'$');
  347 + matched_poss = $.grep(frame.positions,
  348 + function(pos){
  349 + return pos.text_rep.match(posRe);
  350 + });
  351 + if((matched_poss.length > 0 && conj.startsWith('!')) ||
  352 + (matched_poss.length == 0 && !conj.startsWith('!'))) {
  353 + allConjsMatch = false;
  354 + break;
  355 + }
  356 + }
  357 + catch(e) {
  358 + allConjsMatch = false;
  359 + break;
  360 + }
  361 + }
  362 + if(allConjsMatch) {
  363 + return true;
  364 + }
  365 + }
  366 +
  367 + return false;
  368 +}
  369 +
  370 +function has_arguments(frame, arg_term) {
  371 + var alternatives = arg_term.split('|');
  372 + for(var h=0; h<alternatives.length; h++) {
  373 + var allConjsMatch = true;
  374 + var conjs = alternatives[h].split('&');
  375 + for(var i=0; i<conjs.length; i++) {
  376 + try {
  377 + var matched_args = [];
  378 + var conj = conjs[i].trim();
  379 + var regEx = conj;
  380 + if (regEx.substring(0, 1) == '!') {
  381 + regEx = regEx.substring(1);
  382 + }
  383 + var argRe = new RegExp('^'+escape_regex(regEx)+'$');
  384 +
  385 + for(var j=0; j<frame.positions.length; j++) {
  386 + matched_args = $.grep(frame.positions[j].arguments, function(arg) {
  387 + return arg.text_rep.match(argRe);
  388 + });
  389 + if(matched_args.length > 0) {
  390 + break;
  391 + }
  392 + }
  393 + if((matched_args.length > 0 && conj.startsWith('!')) ||
  394 + (matched_args.length == 0 && !conj.startsWith('!'))) {
  395 + allConjsMatch = false;
  396 + break;
  397 + }
  398 + }
  399 + catch(e) {
  400 + allConjsMatch = false;
  401 + break;
  402 + }
  403 + }
  404 + if(allConjsMatch){
  405 + return true;
  406 + }
  407 + }
  408 + return false;
  409 +}
  410 +
  411 +function schema_type_valid(schema, filter_option) {
  412 + if(filter_option == '*') return true;
  413 + else if(filter_option == 'normal' && !schema.is_phraseologic) return true;
  414 + else if(filter_option == 'phraseologic' && schema.is_phraseologic) return true;
  415 + else return false;
  416 +}
dictionary/static/js/semantics_coupling.js
1 -function schemaGotAssignedSemantics(element_id) {  
2 - var semanticsAssigned = true;  
3 - var id_map = parseId(element_id);  
4 - var schema_id = id_map['frame_id'];  
5 - if(schema_id < 0) {  
6 - semanticsAssigned = false;  
7 - }  
8 - else {  
9 - jQuery.ajax({  
10 - type: 'get',  
11 - url: ajax_schema_got_assigned_semantics,  
12 - data: {lemma_id: window.lemma_id,  
13 - schema_id: schema_id},  
14 - success: function(result) {  
15 - semanticsAssigned = result['got_assigned_semantics'];  
16 - },  
17 - async: false  
18 - });  
19 - }  
20 - return semanticsAssigned;  
21 -}  
22 -  
23 function semanticsAssignedAlert() { 1 function semanticsAssignedAlert() {
24 error_alert('Działaj rozważnie, element jest wykorzystywany w ramach semantycznych.'); 2 error_alert('Działaj rozważnie, element jest wykorzystywany w ramach semantycznych.');
25 } 3 }
26 4
27 -function exampleGotAssignedSemantics(example_tab_id)  
28 -{  
29 - var semanticsAssigned = true;  
30 - var example_id = example_tab_id.replace('nkjp_', '');  
31 - if (example_id < 0) {  
32 - semanticsAssigned = false;  
33 - }  
34 - else {  
35 - jQuery.ajax({  
36 - type: 'get',  
37 - url: ajax_example_got_assigned_semantics,  
38 - data: {lemma_id: window.lemma_id,  
39 - example_id: example_id},  
40 - success: function(result) {  
41 - semanticsAssigned = result['got_assigned_semantics'];  
42 - },  
43 - async: false  
44 - });  
45 - }  
46 - return semanticsAssigned;  
47 -}  
48 -  
49 function semanticsAssignedExampleAlert() { 5 function semanticsAssignedExampleAlert() {
50 error_alert('Działaj rozważnie, przykład jest wykorzystywany w ramach semantycznych.'); 6 error_alert('Działaj rozważnie, przykład jest wykorzystywany w ramach semantycznych.');
51 } 7 }
dictionary/teixml.py
1 #-*- coding:utf-8 -*- 1 #-*- coding:utf-8 -*-
2 from semantics.models import LexicalUnitExamples 2 from semantics.models import LexicalUnitExamples
3 3
4 -#Copyright (c) 2015, Bartłomiej Nitoń  
5 -#All rights reserved.  
6 -  
7 -#Redistribution and use in source and binary forms, with or without modification, are permitted provided  
8 -#that the following conditions are met:  
9 -  
10 -# Redistributions of source code must retain the above copyright notice, this list of conditions and  
11 -# the following disclaimer.  
12 -# Redistributions in binary form must reproduce the above copyright notice, this list of conditions  
13 -# and the following disclaimer in the documentation and/or other materials provided with the distribution.  
14 -  
15 -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED  
16 -# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A  
17 -# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR  
18 -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED  
19 -# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)  
20 -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING  
21 -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE  
22 -# POSSIBILITY OF SUCH DAMAGE.  
23 -  
24 -'''  
25 -File with functions responsible for creating TEI xml.  
26 -'''  
27 -  
28 import datetime 4 import datetime
29 -import operator  
30 5
31 from lxml import etree 6 from lxml import etree
32 from xml.sax.saxutils import escape 7 from xml.sax.saxutils import escape
33 8
34 -from dictionary.models import Atribute_Model, Frame_Opinion_Value, Frame_Char_Model, \  
35 - PositionCategory, Argument_Model, \ 9 +from dictionary.models import Atribute_Model, \
36 sortArguments, sortatributes, sortPositions, sort_positions 10 sortArguments, sortatributes, sortPositions, sort_positions
37 11
38 XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' 12 XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
@@ -41,7 +15,6 @@ XML_NAMESPACE = &#39;http://www.w3.org/XML/1998/namespace&#39; @@ -41,7 +15,6 @@ XML_NAMESPACE = &#39;http://www.w3.org/XML/1998/namespace&#39;
41 def createteixml(outpath, lemmas, frame_opinion_values): 15 def createteixml(outpath, lemmas, frame_opinion_values):
42 root = write_root() 16 root = write_root()
43 write_header(root) 17 write_header(root)
44 - #lemmas = lemmas.filter(entry=u'brnąć')  
45 write_entries(root, lemmas, frame_opinion_values) 18 write_entries(root, lemmas, frame_opinion_values)
46 with open(outpath, 'w') as output_file: 19 with open(outpath, 'w') as output_file:
47 output_file.write(etree.tostring(root, pretty_print=True, 20 output_file.write(etree.tostring(root, pretty_print=True,
@@ -92,9 +65,10 @@ def write_entry(body_elem, lemma, frame_opinions, frame_opinion_values): @@ -92,9 +65,10 @@ def write_entry(body_elem, lemma, frame_opinions, frame_opinion_values):
92 65
93 write_syntactic_layer(entry_elem, lemma, frame_opinions, frame_opinion_values) 66 write_syntactic_layer(entry_elem, lemma, frame_opinions, frame_opinion_values)
94 write_examples_layer(entry_elem, lemma) 67 write_examples_layer(entry_elem, lemma)
95 - write_semantic_layer(entry_elem, lemma)  
96 - write_meanings_layer(entry_elem, lemma)  
97 - write_connections_layer(entry_elem, lemma) 68 + if lemma.semantics_ready():
  69 + write_semantic_layer(entry_elem, lemma)
  70 + write_meanings_layer(entry_elem, lemma)
  71 + write_connections_layer(entry_elem, lemma)
98 72
99 def write_syntactic_layer(entry_elem, lemma, frame_opinions, frame_opinion_values): 73 def write_syntactic_layer(entry_elem, lemma, frame_opinions, frame_opinion_values):
100 synt_layer_fs_elem = etree.SubElement(entry_elem, 'fs') 74 synt_layer_fs_elem = etree.SubElement(entry_elem, 'fs')
@@ -113,18 +87,18 @@ def write_syntactic_layer(entry_elem, lemma, frame_opinions, frame_opinion_value @@ -113,18 +87,18 @@ def write_syntactic_layer(entry_elem, lemma, frame_opinions, frame_opinion_value
113 pred_val=pred_val, 87 pred_val=pred_val,
114 aspect_val=aspect_val).order_by('text_rep') 88 aspect_val=aspect_val).order_by('text_rep')
115 for frame in matchingframes: 89 for frame in matchingframes:
116 - if (not frame_opinion_values.exists() or  
117 - frame_opinions.filter(frame=frame).exists()): 90 + if not lemma.phraseology_ready() and frame.phraseologic:
  91 + continue
  92 + if (not frame_opinion_values.exists() or frame_opinions.filter(frame=frame).exists()):
118 write_schema(vColl_elem, frame, lemma) 93 write_schema(vColl_elem, frame, lemma)
119 94
120 def write_schema(parent_elem, schema, lemma): 95 def write_schema(parent_elem, schema, lemma):
121 -  
122 schema_xml_id = 'wal_%s.%s-sch' % (str(lemma.entry_obj.id), str(schema.id)) 96 schema_xml_id = 'wal_%s.%s-sch' % (str(lemma.entry_obj.id), str(schema.id))
123 97
124 schema_fs_elem = etree.SubElement(parent_elem, 'fs') 98 schema_fs_elem = etree.SubElement(parent_elem, 'fs')
125 schema_fs_elem.attrib[etree.QName(XML_NAMESPACE, 'id')] = schema_xml_id 99 schema_fs_elem.attrib[etree.QName(XML_NAMESPACE, 'id')] = schema_xml_id
126 schema_fs_elem.attrib['type'] = 'schema' 100 schema_fs_elem.attrib['type'] = 'schema'
127 - # opinia o ramce 101 + # opinia o schemacie
128 try: 102 try:
129 schema_opinion = lemma.frame_opinions.filter(frame=schema).all()[0].value.short 103 schema_opinion = lemma.frame_opinions.filter(frame=schema).all()[0].value.short
130 except IndexError: 104 except IndexError:
@@ -420,20 +394,24 @@ def write_examples_layer(parent_elem, lemma): @@ -420,20 +394,24 @@ def write_examples_layer(parent_elem, lemma):
420 write_examples_feature(vColl_elem, lemma) 394 write_examples_feature(vColl_elem, lemma)
421 395
422 def write_examples_feature(parent_elem, lemma): 396 def write_examples_feature(parent_elem, lemma):
423 - entry = lemma.entry_obj  
424 for example in lemma.nkjp_examples.order_by('opinion__priority').all(): 397 for example in lemma.nkjp_examples.order_by('opinion__priority').all():
425 - write_example(parent_elem, entry, example) 398 + if not lemma.phraseology_ready() and example.frame.phraseologic:
  399 + pass
  400 + else:
  401 + write_example(parent_elem, lemma, example)
426 for example in lemma.lemma_nkjp_examples.order_by('opinion__priority').all(): 402 for example in lemma.lemma_nkjp_examples.order_by('opinion__priority').all():
427 - write_example(parent_elem, entry, example) 403 + write_example(parent_elem, lemma, example)
428 404
429 -def write_example(parent_elem, entry, example): 405 +def write_example(parent_elem, lemma, example):
  406 + entry = lemma.entry_obj
430 example_xml_id = u'wal_%s.%s-exm' % (str(entry.id), str(example.id)) 407 example_xml_id = u'wal_%s.%s-exm' % (str(entry.id), str(example.id))
431 408
432 example_fs_elem = etree.SubElement(parent_elem, 'fs') 409 example_fs_elem = etree.SubElement(parent_elem, 'fs')
433 example_fs_elem.attrib[etree.QName(XML_NAMESPACE, 'id')] = example_xml_id 410 example_fs_elem.attrib[etree.QName(XML_NAMESPACE, 'id')] = example_xml_id
434 example_fs_elem.attrib['type'] = 'example' 411 example_fs_elem.attrib['type'] = 'example'
435 412
436 - get_and_write_meaning_link(example_fs_elem, entry, example) 413 + if lemma.semantics_ready():
  414 + get_and_write_meaning_link(example_fs_elem, entry, example)
437 write_phrases_links(example_fs_elem, entry, example) 415 write_phrases_links(example_fs_elem, entry, example)
438 416
439 sentence_f_elem = etree.SubElement(example_fs_elem, 'f') 417 sentence_f_elem = etree.SubElement(example_fs_elem, 'f')
@@ -441,7 +419,6 @@ def write_example(parent_elem, entry, example): @@ -441,7 +419,6 @@ def write_example(parent_elem, entry, example):
441 sentence_content_elem = etree.SubElement(sentence_f_elem, 'string') 419 sentence_content_elem = etree.SubElement(sentence_f_elem, 'string')
442 sentence_content_elem.text = escape(example.sentence) 420 sentence_content_elem.text = escape(example.sentence)
443 421
444 - # trzeba do zrodel dodac nazwy symboliczne  
445 source_f_elem = etree.SubElement(example_fs_elem, 'f') 422 source_f_elem = etree.SubElement(example_fs_elem, 'f')
446 source_f_elem.attrib['name'] = 'source' 423 source_f_elem.attrib['name'] = 'source'
447 source_symbol_elem = etree.SubElement(source_f_elem, 'symbol') 424 source_symbol_elem = etree.SubElement(source_f_elem, 'symbol')
@@ -460,8 +437,9 @@ def write_example(parent_elem, entry, example): @@ -460,8 +437,9 @@ def write_example(parent_elem, entry, example):
460 437
461 def get_and_write_meaning_link(parent_elem, entry, example): 438 def get_and_write_meaning_link(parent_elem, entry, example):
462 try: 439 try:
  440 + entry_lex_units = entry.meanings.all()
463 lex_unit_example = LexicalUnitExamples.objects.get(example=example, 441 lex_unit_example = LexicalUnitExamples.objects.get(example=example,
464 - lexical_unit__base=entry.name) 442 + lexical_unit__in=entry_lex_units)
465 meaning = lex_unit_example.lexical_unit 443 meaning = lex_unit_example.lexical_unit
466 meaning_xml_id = u'#wal_%s.%s-mng' % (str(entry.id), str(meaning.id)) 444 meaning_xml_id = u'#wal_%s.%s-mng' % (str(entry.id), str(meaning.id))
467 445
@@ -518,9 +496,19 @@ def write_frame_fs(parent_elem, entry, frame): @@ -518,9 +496,19 @@ def write_frame_fs(parent_elem, entry, frame):
518 frame_fs_elem.attrib[etree.QName(XML_NAMESPACE, 'id')] = frame_xml_id 496 frame_fs_elem.attrib[etree.QName(XML_NAMESPACE, 'id')] = frame_xml_id
519 frame_fs_elem.attrib['type'] = 'frame' 497 frame_fs_elem.attrib['type'] = 'frame'
520 498
  499 + write_frame_opinion(frame_fs_elem, frame)
521 write_frame_meanings(frame_fs_elem, entry, frame) 500 write_frame_meanings(frame_fs_elem, entry, frame)
522 write_frame_arguments(frame_fs_elem, entry, frame) 501 write_frame_arguments(frame_fs_elem, entry, frame)
523 502
  503 +def write_frame_opinion(parent_elem, frame):
  504 + frame_opinion = 'unk'
  505 + if frame.opinion_selected():
  506 + frame_opinion = frame.opinion.short
  507 + opinion_f_elem = etree.SubElement(parent_elem, 'f')
  508 + opinion_f_elem.attrib['name'] = 'opinion'
  509 + opinion_symbol = etree.SubElement(opinion_f_elem, 'symbol')
  510 + opinion_symbol.attrib['value'] = frame_opinion
  511 +
524 def write_frame_meanings(parent_elem, entry, frame): 512 def write_frame_meanings(parent_elem, entry, frame):
525 meanings_f_elem = etree.SubElement(parent_elem, 'f') 513 meanings_f_elem = etree.SubElement(parent_elem, 'f')
526 meanings_f_elem.attrib['name'] = 'meanings' 514 meanings_f_elem.attrib['name'] = 'meanings'
@@ -686,8 +674,7 @@ def write_meanings_layer(parent_elem, lemma): @@ -686,8 +674,7 @@ def write_meanings_layer(parent_elem, lemma):
686 674
687 def write_meanings(parent_elem, lemma): 675 def write_meanings(parent_elem, lemma):
688 entry = lemma.entry_obj 676 entry = lemma.entry_obj
689 - lex_units = entry.lexical_units()  
690 - for lex_unit in lex_units.all(): 677 + for lex_unit in entry.meanings.all():
691 write_meaning(parent_elem, entry, lex_unit) 678 write_meaning(parent_elem, entry, lex_unit)
692 679
693 def write_meaning(parent_elem, entry, lex_unit): 680 def write_meaning(parent_elem, entry, lex_unit):
@@ -733,39 +720,30 @@ def write_connections_layer(parent_elem, lemma): @@ -733,39 +720,30 @@ def write_connections_layer(parent_elem, lemma):
733 def write_alternations(parent_elem, lemma): 720 def write_alternations(parent_elem, lemma):
734 entry = lemma.entry_obj 721 entry = lemma.entry_obj
735 frames = entry.actual_frames() 722 frames = entry.actual_frames()
736 -  
737 for schema in lemma.frames.all(): 723 for schema in lemma.frames.all():
738 for frame in frames: 724 for frame in frames:
739 matching_complements = frame.complements.filter(realizations__frame=schema).distinct() 725 matching_complements = frame.complements.filter(realizations__frame=schema).distinct()
740 - if matching_complements.filter(realizations__alternation=1).exists():  
741 - alternation_fs_elem = etree.SubElement(parent_elem, 'fs')  
742 - alternation_fs_elem.attrib['type'] = 'alternation'  
743 -  
744 - connections_f_elem = etree.SubElement(alternation_fs_elem, 'f')  
745 - connections_f_elem.attrib['name'] = 'connections'  
746 -  
747 - vColl_elem = etree.SubElement(connections_f_elem, 'vColl')  
748 - vColl_elem.attrib['org'] = 'set'  
749 -  
750 - for arg in frame.complements.all():  
751 - alt_realizations = arg.realizations.filter(frame=schema, alternation=1)  
752 - if alt_realizations.exists():  
753 - write_connection(vColl_elem, entry, frame, arg, alt_realizations)  
754 -  
755 - if matching_complements.filter(realizations__alternation=2).exists():  
756 - alternation_fs_elem = etree.SubElement(parent_elem, 'fs')  
757 - alternation_fs_elem.attrib['type'] = 'alternation'  
758 -  
759 - connections_f_elem = etree.SubElement(alternation_fs_elem, 'f')  
760 - connections_f_elem.attrib['name'] = 'connections'  
761 -  
762 - vColl_elem = etree.SubElement(connections_f_elem, 'vColl')  
763 - vColl_elem.attrib['org'] = 'set'  
764 -  
765 - for arg in frame.complements.all():  
766 - alt_realizations = arg.realizations.filter(frame=schema, alternation=2)  
767 - if alt_realizations.exists():  
768 - write_connection(vColl_elem, entry, frame, arg, alt_realizations) 726 + write_alternation(parent_elem, entry, schema, frame, matching_complements, 1)
  727 + write_alternation(parent_elem, entry, schema, frame, matching_complements, 2)
  728 +
  729 +def write_alternation(parent_elem, entry, schema, frame, complements, alternation):
  730 + alternation_compls = complements.filter(realizations__alternation=alternation)
  731 + if alternation_compls.exists():
  732 + first_connection = True
  733 + for arg in alternation_compls.all():
  734 + alt_realizations = arg.realizations.filter(frame=schema, alternation=alternation)
  735 + if alt_realizations.exists():
  736 + if first_connection:
  737 + alternation_fs_elem = etree.SubElement(parent_elem, 'fs')
  738 + alternation_fs_elem.attrib['type'] = 'alternation'
  739 +
  740 + connections_f_elem = etree.SubElement(alternation_fs_elem, 'f')
  741 + connections_f_elem.attrib['name'] = 'connections'
  742 +
  743 + vColl_elem = etree.SubElement(connections_f_elem, 'vColl')
  744 + vColl_elem.attrib['org'] = 'set'
  745 + first_connection = False
  746 + write_connection(vColl_elem, entry, frame, arg, alt_realizations)
769 747
770 def write_connection(parent_elem, entry, frame, arg, realizations): 748 def write_connection(parent_elem, entry, frame, arg, realizations):
771 connection_fs_elem = etree.SubElement(parent_elem, 'fs') 749 connection_fs_elem = etree.SubElement(parent_elem, 'fs')
@@ -794,158 +772,5 @@ def write_phrases(parent_elem, entry, realizations): @@ -794,158 +772,5 @@ def write_phrases(parent_elem, entry, realizations):
794 realization.position.id, realization.argument.id) 772 realization.position.id, realization.argument.id)
795 phrase_link_elem = etree.SubElement(vColl_elem, 'fs') 773 phrase_link_elem = etree.SubElement(vColl_elem, 'fs')
796 phrase_link_elem.attrib['sameAs'] = phrase_xml_link 774 phrase_link_elem.attrib['sameAs'] = phrase_xml_link
797 - phrase_link_elem.attrib['type'] = 'phrase'  
798 -  
799 -  
800 -  
801 -def writefsdecl(outfile):  
802 - '''  
803 - Write feature structures declarations  
804 - '''  
805 - outfile.write(u' <encodingDesc>\n')  
806 - outfile.write(u' <fsdDecl>\n')  
807 -  
808 -# syntacticBahaviour fs declaration  
809 - outfile.write(u' <fsDecl type="syntacticBehaviour">\n')  
810 - outfile.write(u' <fsDescr>Describes syntactic behaviour of entry</fsDescr>\n')  
811 - outfile.write(u' <fDecl name="frames">\n')  
812 - outfile.write(u' <fDescr>syntactic frames</fDescr>\n')  
813 - outfile.write(u' <vRange>\n')  
814 - outfile.write(u' <vColl org="list">\n')  
815 - outfile.write(u' <fs type="frame"/>\n')  
816 - outfile.write(u' </vColl>\n')  
817 - outfile.write(u' </vRange>\n')  
818 - outfile.write(u' </fDecl>\n')  
819 - outfile.write(u' </fsDecl>\n')  
820 -  
821 -# frame fs declaration  
822 - outfile.write(u' <fsDecl type="frame">\n')  
823 - outfile.write(u' <fsDescr>Describes syntactic frame</fsDescr>\n')  
824 - # frame opinion  
825 - outfile.write(u' <fDecl name="opinion">\n')  
826 - outfile.write(u' <fDescr>frame opinion</fDescr>\n')  
827 - outfile.write(u' <vRange>\n')  
828 - outfile.write(u' <vAlt>\n')  
829 - for alt in Frame_Opinion_Value.objects.order_by('priority'):  
830 - outfile.write(u' <symbol value="%s"/>\n' % alt.short)  
831 - outfile.write(u' </vAlt>\n')  
832 - outfile.write(u' </vRange>\n')  
833 - outfile.write(u' </fDecl>\n')  
834 - # reflex  
835 - outfile.write(u' <fDecl name="reflex">\n')  
836 - outfile.write(u' <fDescr>frame reflexivity</fDescr>\n')  
837 - outfile.write(u' <vRange>\n')  
838 - outfile.write(u' <vAlt>\n')  
839 - outfile.write(u' <binary value="true"/>\n')  
840 - outfile.write(u' <binary value="false"/>\n')  
841 - outfile.write(u' </vAlt>\n')  
842 - outfile.write(u' </vRange>\n')  
843 - outfile.write(u' </fDecl>\n')  
844 - # aspect  
845 - outfile.write(u' <fDecl name="aspect">\n')  
846 - outfile.write(u' <fDescr>frame aspect</fDescr>\n')  
847 - outfile.write(u' <vRange>\n')  
848 - outfile.write(u' <vAlt>\n')  
849 - aspect_obj = Frame_Char_Model.objects.get(model_name=u'ASPEKT')  
850 - for alt in aspect_obj.frame_char_values.order_by('priority'):  
851 - outfile.write(u' <symbol value="%s"/>\n' %  
852 - alt.value)  
853 - outfile.write(u' </vAlt>\n')  
854 - outfile.write(u' </vRange>\n')  
855 - outfile.write(u' </fDecl>\n')  
856 - # negatywnosc  
857 - outfile.write(u' <fDecl name="negativity">\n')  
858 - outfile.write(u' <fDescr>frame negativity</fDescr>\n')  
859 - outfile.write(u' <vRange>\n')  
860 - outfile.write(u' <vAlt>\n')  
861 - aspect_obj = Frame_Char_Model.objects.get(model_name=u'NEGATYWNOŚĆ')  
862 - for alt in aspect_obj.frame_char_values.order_by('priority'):  
863 - outfile.write(u' <symbol value="%s"/>\n' %  
864 - alt.value)  
865 - outfile.write(u' </vAlt>\n')  
866 - outfile.write(u' </vRange>\n')  
867 - outfile.write(u' </fDecl>\n')  
868 - # predykatywnosc  
869 - outfile.write(u' <fDecl name="predicativity">\n')  
870 - outfile.write(u' <fDescr>frame predicativity</fDescr>\n')  
871 - outfile.write(u' <vRange>\n')  
872 - outfile.write(u' <vAlt>\n')  
873 - aspect_obj = Frame_Char_Model.objects.get(model_name=u'PREDYKATYWNOŚĆ')  
874 - for alt in aspect_obj.frame_char_values.order_by('priority'):  
875 - outfile.write(u' <symbol value="%s"/>\n' %  
876 - alt.value)  
877 - outfile.write(u' </vAlt>\n')  
878 - outfile.write(u' </vRange>\n')  
879 - outfile.write(u' </fDecl>\n')  
880 - # positions  
881 - outfile.write(u' <fDecl name="positions">\n')  
882 - outfile.write(u' <fDescr>syntactic positions</fDescr>\n')  
883 - outfile.write(u' <vRange>\n')  
884 - outfile.write(u' <vColl org="list">\n')  
885 - outfile.write(u' <fs type="position"/>\n')  
886 - outfile.write(u' </vColl>\n')  
887 - outfile.write(u' </vRange>\n')  
888 - outfile.write(u' </fDecl>\n')  
889 - outfile.write(u' </fsDecl>\n')  
890 -  
891 -# position fs declaration  
892 - outfile.write(u' <fsDecl type="position">\n')  
893 - outfile.write(u' <fsDescr>Describes syntactic position</fsDescr>\n')  
894 - # position category  
895 - outfile.write(u' <fDecl name="category">\n')  
896 - outfile.write(u' <fDescr>position category</fDescr>\n')  
897 - outfile.write(u' <vRange>\n')  
898 - outfile.write(u' <vAlt>\n')  
899 - for alt in PositionCategory.objects.filter(control=False).order_by('priority'):  
900 - outfile.write(u' <symbol value="%s"/>\n' % alt.category)  
901 - outfile.write(u' </vAlt>\n')  
902 - outfile.write(u' </vRange>\n')  
903 - outfile.write(u' </fDecl>\n')  
904 - # position control  
905 - outfile.write(u' <fDecl name="control">\n')  
906 - outfile.write(u' <fDescr>position category</fDescr>\n')  
907 - outfile.write(u' <vRange>\n')  
908 - outfile.write(u' <vAlt>\n')  
909 - for alt in PositionCategory.objects.filter(control=True).order_by('priority'):  
910 - outfile.write(u' <symbol value="%s"/>\n' % alt.category)  
911 - outfile.write(u' </vAlt>\n')  
912 - outfile.write(u' </vRange>\n')  
913 - outfile.write(u' </fDecl>\n')  
914 - # arguments  
915 - outfile.write(u' <fDecl name="arguments">\n')  
916 - outfile.write(u' <fDescr>syntactic arguments</fDescr>\n')  
917 - outfile.write(u' <vRange>\n')  
918 - outfile.write(u' <vColl org="list">\n')  
919 - outfile.write(u' <fs type="argument"/>\n')  
920 - outfile.write(u' </vColl>\n')  
921 - outfile.write(u' </vRange>\n')  
922 - outfile.write(u' </fDecl>\n')  
923 - outfile.write(u' </fsDecl>\n')  
924 -  
925 -# argument fs declaration  
926 - outfile.write(u' <fsDecl type="argument">\n')  
927 - outfile.write(u' <fsDescr>Describes syntactic argument</fsDescr>\n')  
928 - # position category  
929 - outfile.write(u' <fDecl name="type">\n')  
930 - outfile.write(u' <fDescr>type of argument</fDescr>\n')  
931 - outfile.write(u' <vRange>\n')  
932 - outfile.write(u' <vAlt>\n')  
933 - for alt in Argument_Model.objects.order_by('priority'):  
934 - outfile.write(u' <symbol value="%s"/>\n' % alt.arg_model_name)  
935 - outfile.write(u' </vAlt>\n')  
936 - outfile.write(u' </vRange>\n')  
937 - outfile.write(u' </fDecl>\n')  
938 - # attributes  
939 - outfile.write(u' <fDecl name="attributes">\n')  
940 - outfile.write(u' <fDescr>argument attributes</fDescr>\n')  
941 - outfile.write(u' <vRange>\n')  
942 - outfile.write(u' <vColl org="list">\n')  
943 - outfile.write(u' <fs type="attribut"/>\n')  
944 - outfile.write(u' </vColl>\n')  
945 - outfile.write(u' </vRange>\n')  
946 - outfile.write(u' </fDecl>\n')  
947 - outfile.write(u' </fsDecl>\n')  
948 -  
949 - outfile.write(u' </fsdDecl>\n')  
950 - outfile.write(u' </encodingDesc>\n') 775 + phrase_link_elem.attrib['type'] = 'phrase'
951 776
952 \ No newline at end of file 777 \ No newline at end of file
dictionary/templates/arg_realizations.html
@@ -7,7 +7,7 @@ @@ -7,7 +7,7 @@
7 {% endblock %} 7 {% endblock %}
8 8
9 {% block content %} 9 {% block content %}
10 -</br> 10 +<br/>
11 <table class='ArgRealViewTable'> 11 <table class='ArgRealViewTable'>
12 <tr> 12 <tr>
13 <td> 13 <td>
dictionary/templates/filter_form.html
@@ -36,13 +36,177 @@ $(function(){ @@ -36,13 +36,177 @@ $(function(){
36 }, 36 },
37 }); 37 });
38 38
  39 + $('#filter-form-tabs').tabs();
39 40
40 }); 41 });
41 42
42 </script> 43 </script>
43 44
44 <form class="filter-form" method="post"> {% csrf_token %} 45 <form class="filter-form" method="post"> {% csrf_token %}
45 - {{ form.as_p }} 46 +<div id="filter-form-tabs">
  47 + <ul>
  48 + <li><a href="#lemma-filters">Hasło</a></li>
  49 + <li><a href="#schema-filters">Schematy</a></li>
  50 + <li><a href="#frame-filters">Ramy</a></li>
  51 + </ul>
  52 + <div id="lemma-filters">
  53 + {% if not form.lemma.is_hidden %}
  54 + <p class="fieldWrapper">
  55 + {{ form.lemma.errors }}
  56 + {{ form.lemma.label_tag }}: {{ form.lemma }}
  57 + </p>
  58 + {% endif %}
  59 + {% if not form.pos.is_hidden %}
  60 + <p class="fieldWrapper">
  61 + {{ form.pos.errors }}
  62 + {{ form.pos.label_tag }}: {{ form.pos }}
  63 + </p>
  64 + {% endif %}
  65 + {% if not form.contains_phraseology.is_hidden %}
  66 + <p class="fieldWrapper">
  67 + {{ form.contains_phraseology.errors }}
  68 + {{ form.contains_phraseology.label_tag }}: {{ form.contains_phraseology }}
  69 + </p>
  70 + {% endif %}
  71 + {% if not form.owner.is_hidden or not form.phraseologist.is_hidden or not form.semanticist.is_hidden %}
  72 + <hr class="filtersSeparator">
  73 + {% endif %}
  74 + {% if not form.owner.is_hidden %}
  75 + <p class="fieldWrapper">
  76 + {{ form.owner.errors }}
  77 + {{ form.owner.label_tag }}: {{ form.owner }}
  78 + </p>
  79 + {% endif %}
  80 + {% if not form.phraseologist.is_hidden %}
  81 + <p class="fieldWrapper">
  82 + {{ form.phraseologist.errors }}
  83 + {{ form.phraseologist.label_tag }}: {{ form.phraseologist }}
  84 + </p>
  85 + {% endif %}
  86 + {% if not form.semanticist.is_hidden %}
  87 + <p class="fieldWrapper">
  88 + {{ form.semanticist.errors }}
  89 + {{ form.semanticist.label_tag }}: {{ form.semanticist }}
  90 + </p>
  91 + {% endif %}
  92 + {% if not form.vocabulary.is_hidden or not form.status.is_hidden or not form.has_message_from.is_hidden %}
  93 + <hr class="filtersSeparator">
  94 + {% endif %}
  95 + {% if not form.vocabulary.is_hidden %}
  96 + <p class="fieldWrapper">
  97 + {{ form.vocabulary.errors }}
  98 + {{ form.vocabulary.label_tag }}: {{ form.vocabulary }}
  99 + </p>
  100 + {% endif %}
  101 + {% if not form.status.is_hidden %}
  102 + <p class="fieldWrapper">
  103 + {{ form.status.errors }}
  104 + {{ form.status.label_tag }}: {{ form.status }}
  105 + </p>
  106 + {% endif %}
  107 + {% if not form.has_message_from.is_hidden %}
  108 + <p class="fieldWrapper">
  109 + {{ form.has_message_from.errors }}
  110 + {{ form.has_message_from.label_tag }}: {{ form.has_message_from }}
  111 + </p>
  112 + {% endif %}
  113 + {% if not form.example_source.is_hidden or not form.approver.is_hidden %}
  114 + <hr class="filtersSeparator">
  115 + {% endif %}
  116 + {% if not form.example_source.is_hidden %}
  117 + <p class="fieldWrapper">
  118 + {{ form.example_source.errors }}
  119 + {{ form.example_source.label_tag }}: {{ form.example_source }}
  120 + </p>
  121 + {% endif %}
  122 + {% if not form.approver.is_hidden %}
  123 + <p class="fieldWrapper">
  124 + {{ form.approver.errors }}
  125 + {{ form.approver.label_tag }}: {{ form.approver }}
  126 + </p>
  127 + {% endif %}
  128 + </div>
  129 + <div id="schema-filters">
  130 + {% if not form.schema_opinion.is_hidden %}
  131 + <p class="fieldWrapper">
  132 + {{ form.schema_opinion.errors }}
  133 + {{ form.schema_opinion.label_tag }}: {{ form.schema_opinion }}
  134 + </p>
  135 + {% endif %}
  136 + {% if not form.schema_type.is_hidden %}
  137 + <p class="fieldWrapper">
  138 + {{ form.schema_type.errors }}
  139 + {{ form.schema_type.label_tag }}: {{ form.schema_type }}
  140 + </p>
  141 + {% endif %}
  142 + {% if not form.reflex.is_hidden %}
  143 + <p class="fieldWrapper">
  144 + {{ form.reflex.errors }}
  145 + {{ form.reflex.label_tag }}: {{ form.reflex }}
  146 + </p>
  147 + {% endif %}
  148 + {% if not form.negativity.is_hidden %}
  149 + <p class="fieldWrapper">
  150 + {{ form.negativity.errors }}
  151 + {{ form.negativity.label_tag }}: {{ form.negativity }}
  152 + </p>
  153 + {% endif %}
  154 + {% if not form.predicativity.is_hidden %}
  155 + <p class="fieldWrapper">
  156 + {{ form.predicativity.errors }}
  157 + {{ form.predicativity.label_tag }}: {{ form.predicativity }}
  158 + </p>
  159 + {% endif %}
  160 + {% if not form.aspect.is_hidden %}
  161 + <p class="fieldWrapper">
  162 + {{ form.aspect.errors }}
  163 + {{ form.aspect.label_tag }}: {{ form.aspect }}
  164 + </p>
  165 + {% endif %}
  166 + {% if not form.has_argument.is_hidden %}
  167 + <p class="fieldWrapper">
  168 + {{ form.has_argument.errors }}
  169 + {{ form.has_argument.label_tag }}: {{ form.has_argument }}
  170 + </p>
  171 + {% endif %}
  172 + {% if not form.has_position.is_hidden %}
  173 + <p class="fieldWrapper">
  174 + {{ form.has_position.errors }}
  175 + {{ form.has_position.label_tag }}: {{ form.has_position }}
  176 + </p>
  177 + {% endif %}
  178 + {% if not form.filter_frames.is_hidden %}
  179 + <p class="fieldWrapper">
  180 + {{ form.filter_frames.errors }}
  181 + {{ form.filter_frames.label_tag }}: {{ form.filter_frames }}
  182 + </p>
  183 + {% endif %}
  184 + </div>
  185 + <div id="frame-filters">
  186 + {% if not form.frame_opinion.is_hidden %}
  187 + <p class="fieldWrapper">
  188 + {{ form.frame_opinion.errors }}
  189 + {{ form.frame_opinion.label_tag }}: {{ form.frame_opinion }}
  190 + </p>
  191 + {% endif %}
  192 + <hr class="filtersSeparator">
  193 + <div id="sem-arguments">
  194 + <p>Argumenty semantyczne:
  195 + <button type="button" onclick="addSemArgFilter(this)">Dodaj</button>
  196 + <button type="button" onclick="addArgAlternative(this)">Lub</button>
  197 + </p>
  198 + {% for arg_form in sem_args_forms %}
  199 + <div id="sem-argument">
  200 + {% include 'sem_arg_form.html' with form=arg_form only %}
  201 + </div>
  202 + {% endfor %}
  203 + </div>
  204 + </div>
  205 + </div>
  206 + {% for hidden in form.hidden_fields %}
  207 + {{ hidden }}
  208 + {% endfor %}
  209 + {{ form.non_field_errors }}
46 <p class="lexeme-save"> 210 <p class="lexeme-save">
47 <button type="submit" id="filter-form-submit"> 211 <button type="submit" id="filter-form-submit">
48 Filtruj 212 Filtruj
@@ -54,4 +218,5 @@ $(function(){ @@ -54,4 +218,5 @@ $(function(){
54 Anuluj 218 Anuluj
55 </button> 219 </button>
56 </p> 220 </p>
  221 +
57 </form> 222 </form>
dictionary/templates/lemma_preview.html
@@ -8,17 +8,7 @@ @@ -8,17 +8,7 @@
8 8
9 frame_chars_str = '{{frame_char_list}}'; 9 frame_chars_str = '{{frame_char_list}}';
10 frame_chars_str = convertHtml(frame_chars_str); 10 frame_chars_str = convertHtml(frame_chars_str);
11 - window.frame_char_order = serializedObjToObj(frame_chars_str)  
12 -  
13 - serialized_text = '{{serialized_frames}}';  
14 - serialized_text = convertHtml(serialized_text);  
15 - serialized_frames = serialized_text;  
16 - window.prev_frames = serializedObjToObj(serialized_frames);  
17 -  
18 - nkjp_examples_str = '{{nkjp_examples}}';  
19 - nkjp_examples_str = convertHtml(nkjp_examples_str);  
20 - window.prev_nkjp_examples = serializedNkjpToObj(nkjp_examples_str);  
21 - 11 + window.frame_char_order = serializedObjToObj(frame_chars_str);
22 12
23 function selectPrevNkjpTr(id) 13 function selectPrevNkjpTr(id)
24 { 14 {
@@ -132,7 +122,8 @@ function unselectPrevTd(id) @@ -132,7 +122,8 @@ function unselectPrevTd(id)
132 } 122 }
133 123
134 function draw_filtered_prev_frames() 124 function draw_filtered_prev_frames()
135 - { 125 + {
  126 + window.prev_filter_schema_type_val = $('#prev_frame_filter #schema_type_filter').val();
136 window.prev_filter_aspect_val = $('#prev_frame_filter #aspect_filter').val(); 127 window.prev_filter_aspect_val = $('#prev_frame_filter #aspect_filter').val();
137 window.prev_filter_reflex_val = $('#prev_frame_filter #reflex_filter').val(); 128 window.prev_filter_reflex_val = $('#prev_frame_filter #reflex_filter').val();
138 window.prev_filter_neg_val = $('#prev_frame_filter #neg_filter').val(); 129 window.prev_filter_neg_val = $('#prev_frame_filter #neg_filter').val();
@@ -239,6 +230,7 @@ $(document).ready(function() { @@ -239,6 +230,7 @@ $(document).ready(function() {
239 $('#prev_filter_frames_options').click(function(){ 230 $('#prev_filter_frames_options').click(function(){
240 $('#prev_filter_frames_options #options').slideToggle('fast'); 231 $('#prev_filter_frames_options #options').slideToggle('fast');
241 }); 232 });
  233 + $('#prev_frame_filter #schema_type_filter').change(draw_filtered_prev_frames);
242 $('#prev_frame_filter #aspect_filter').change(draw_filtered_prev_frames); 234 $('#prev_frame_filter #aspect_filter').change(draw_filtered_prev_frames);
243 $('#prev_frame_filter #reflex_filter').change(draw_filtered_prev_frames); 235 $('#prev_frame_filter #reflex_filter').change(draw_filtered_prev_frames);
244 $('#prev_frame_filter #neg_filter').change(draw_filtered_prev_frames); 236 $('#prev_frame_filter #neg_filter').change(draw_filtered_prev_frames);
@@ -249,23 +241,6 @@ $(document).ready(function() { @@ -249,23 +241,6 @@ $(document).ready(function() {
249 $('#prev_filter_frames_options #options').click(function(event) { 241 $('#prev_filter_frames_options #options').click(function(event) {
250 event.stopPropagation(); 242 event.stopPropagation();
251 }); 243 });
252 - $('#cancel_prev_frame_filter').click(function(event) {  
253 - window.prev_filter_position_val = '.*';  
254 - window.prev_filter_argument_val = '.*';  
255 - window.prev_filter_aspect_val = '*';  
256 - window.prev_filter_reflex_val = '*';  
257 - window.prev_filter_neg_val = '*';  
258 - window.prev_filter_pred_val = '*';  
259 - window.prev_filter_opinion_val = '*';  
260 - $('#prev_frame_filter #argument_filter').val(window.prev_filter_argument_val);  
261 - $('#prev_frame_filter #position_filter').val(window.prev_filter_position_val);  
262 - $('#prev_frame_filter #aspect_filter').val(window.prev_filter_aspect_val);  
263 - $('#prev_frame_filter #reflex_filter').val(window.prev_filter_reflex_val);  
264 - $('#prev_frame_filter #neg_filter').val(window.prev_filter_neg_val);  
265 - $('#prev_frame_filter #pred_filter').val(window.prev_filter_pred_val);  
266 - $('#prev_frame_filter #opinion_filter').val(window.prev_filter_opinion_val);  
267 - $('#prev_frame_filter #argument_filter').trigger('change');  
268 - });  
269 - draw_filtered_prev_frames(); 244 + $('#cancel_prev_frame_filter').click(cancel_prev_schemata_filtering);
270 }); 245 });
271 </script> 246 </script>
dictionary/templates/lemma_view.html
@@ -9,6 +9,7 @@ @@ -9,6 +9,7 @@
9 <link rel="stylesheet" type="text/css" href="{{ STATIC_URL }}css/examples_table.css"/> 9 <link rel="stylesheet" type="text/css" href="{{ STATIC_URL }}css/examples_table.css"/>
10 <link rel="stylesheet" type="text/css" href="{{ STATIC_URL }}css/aspect_relations_table.css"/> 10 <link rel="stylesheet" type="text/css" href="{{ STATIC_URL }}css/aspect_relations_table.css"/>
11 <link rel="stylesheet" type="text/css" href="{{ STATIC_URL }}css/filter_frames_menu.css"/> 11 <link rel="stylesheet" type="text/css" href="{{ STATIC_URL }}css/filter_frames_menu.css"/>
  12 + <link rel="stylesheet" type="text/css" href="{{ STATIC_URL }}css/lemmas_filtering.css"/>
12 <script type="text/javascript" src="{{ STATIC_URL }}js/lib/grid.locale-pl.js"></script> 13 <script type="text/javascript" src="{{ STATIC_URL }}js/lib/grid.locale-pl.js"></script>
13 <script type="text/javascript"> 14 <script type="text/javascript">
14 jQuery.jgrid.no_legacy_api = true; 15 jQuery.jgrid.no_legacy_api = true;
@@ -20,6 +21,8 @@ @@ -20,6 +21,8 @@
20 <script type="text/javascript" src="{{ STATIC_URL }}js/lemma_grid.js"></script> 21 <script type="text/javascript" src="{{ STATIC_URL }}js/lemma_grid.js"></script>
21 <script type="text/javascript" src="{{ STATIC_URL }}js/semantics_coupling.js"></script> 22 <script type="text/javascript" src="{{ STATIC_URL }}js/semantics_coupling.js"></script>
22 <script type="text/javascript" src="{{ STATIC_URL }}js/lemma-view.js"></script> 23 <script type="text/javascript" src="{{ STATIC_URL }}js/lemma-view.js"></script>
  24 + <script type="text/javascript" src="{{ STATIC_URL }}js/lemmas_filtering.js"></script>
  25 + <script type="text/javascript" src="{{ STATIC_URL }}js/schemata_filtering.js"></script>
23 26
24 {% endblock %} 27 {% endblock %}
25 28
@@ -59,10 +62,7 @@ @@ -59,10 +62,7 @@
59 <div class="tabs"> 62 <div class="tabs">
60 <ul> 63 <ul>
61 <li id="refresh_frames"><a href="#new_frames">{% trans "Schematy" %} [<span id="new-frames-count"></span>]</a></li> 64 <li id="refresh_frames"><a href="#new_frames">{% trans "Schematy" %} [<span id="new-frames-count"></span>]</a></li>
62 -  
63 - {% if perms.dictionary.view_semantics %}  
64 - <li><a href="#semantics">{% trans "Semantyka" %} [<span id="semantic-frames-count"></span>]</a></li>  
65 - {% endif %} 65 + <li><a href="#semantics">{% trans "Semantyka" %} [<span id="semantic-frames-count"></span>]</a></li>
66 <!-- li><a href="#old_frames">{% trans "Stare schematy" %}</a></li --> 66 <!-- li><a href="#old_frames">{% trans "Stare schematy" %}</a></li -->
67 {% if perms.dictionary.add_notes %} 67 {% if perms.dictionary.add_notes %}
68 <li><a href="#notes">{% trans "Notatki" %} [<span id="lemma-notes-count"></span>]</a></li> 68 <li><a href="#notes">{% trans "Notatki" %} [<span id="lemma-notes-count"></span>]</a></li>
@@ -79,10 +79,8 @@ @@ -79,10 +79,8 @@
79 </ul> 79 </ul>
80 <div id="new_frames"> 80 <div id="new_frames">
81 </div> 81 </div>
82 - {% if perms.dictionary.view_semantics %}  
83 - <div id="semantics">  
84 - </div>  
85 - {% endif %} 82 + <div id="semantics">
  83 + </div>
86 {% if perms.dictionary.add_notes %} 84 {% if perms.dictionary.add_notes %}
87 <div id="notes"> 85 <div id="notes">
88 </div> 86 </div>
dictionary/templates/manage_vocabularies.html
@@ -7,7 +7,7 @@ @@ -7,7 +7,7 @@
7 {% endblock %} 7 {% endblock %}
8 8
9 {% block content %} 9 {% block content %}
10 -</br> 10 +<br/>
11 11
12 <table> 12 <table>
13 <tr> 13 <tr>
dictionary/templates/new_frames.html
@@ -6,34 +6,12 @@ @@ -6,34 +6,12 @@
6 $('#add-table-elem-dialog').dialog({ autoOpen: false, 6 $('#add-table-elem-dialog').dialog({ autoOpen: false,
7 modal: 'true', 7 modal: 'true',
8 width: 400}); 8 width: 400});
9 - {% if can_modify %}  
10 - window.can_modify = true;  
11 - {% else %}  
12 - window.can_modify = false;  
13 - {% endif %}  
14 create_example_opinion_dialog(); 9 create_example_opinion_dialog();
15 } 10 }
16 -  
17 - lemma_nkjp_examples_str = '{{lemma_nkjp_examples}}';  
18 - lemma_nkjp_examples_str = convertHtml(lemma_nkjp_examples_str);  
19 - window.nkjp_lemma_examples = serializedNkjpToObj(lemma_nkjp_examples_str);  
20 -  
21 - serialized_text = '{{serialized_frames}}';  
22 - serialized_text = convertHtml(serialized_text);  
23 - serialized_frames = serialized_text;  
24 - schemas = serializedObjToObj(serialized_frames);  
25 -  
26 - nkjp_examples_str = '{{nkjp_examples}}';  
27 - nkjp_examples_str = convertHtml(nkjp_examples_str);  
28 - window.nkjp_examples = serializedNkjpToObj(nkjp_examples_str);  
29 -  
30 - $("span#new-frames-count").empty();  
31 - $("span#new-frames-count").append(schemas.length);  
32 - $("span#lemma-examples-count").empty();  
33 - $("span#lemma-examples-count").append(window.nkjp_lemma_examples.length);  
34 11
35 function draw_filtered_new_frames() 12 function draw_filtered_new_frames()
36 { 13 {
  14 + window.filter_schema_type_val = $('#frame_filter #schema_type_filter').val();
37 window.filter_aspect_val = $('#frame_filter #aspect_filter').val(); 15 window.filter_aspect_val = $('#frame_filter #aspect_filter').val();
38 window.filter_reflex_val = $('#frame_filter #reflex_filter').val(); 16 window.filter_reflex_val = $('#frame_filter #reflex_filter').val();
39 window.filter_neg_val = $('#frame_filter #neg_filter').val(); 17 window.filter_neg_val = $('#frame_filter #neg_filter').val();
@@ -103,15 +81,7 @@ @@ -103,15 +81,7 @@
103 $("button#back").click(function(){backOneModification(); return false;}); 81 $("button#back").click(function(){backOneModification(); return false;});
104 $("button#forward").click(function(){forwardOneModification(); return false;}); 82 $("button#forward").click(function(){forwardOneModification(); return false;});
105 $("button#save").click(function(){save_new_frames(); return false;}); 83 $("button#save").click(function(){save_new_frames(); return false;});
106 - $("button#validate").click(function(){  
107 - if(window.change)  
108 - {  
109 - HideProgressAnimation();  
110 - error_alert('Przed walidacją/zmianą statusu hasło musi zostać zapisane.');  
111 - return false;  
112 - }  
113 - validate_new_frames(false, false);  
114 - }); 84 + $("button#validate").click(function(){validateSchemata(); return false;});
115 85
116 $("button#delete_example").click(function(){ 86 $("button#delete_example").click(function(){
117 delete_nkjp_example(selected_example_id)}); 87 delete_nkjp_example(selected_example_id)});
@@ -286,6 +256,7 @@ $(document).ready(function() { @@ -286,6 +256,7 @@ $(document).ready(function() {
286 $('#filter_frames_options').click(function(){ 256 $('#filter_frames_options').click(function(){
287 $('#filter_frames_options #options').slideToggle('fast'); 257 $('#filter_frames_options #options').slideToggle('fast');
288 }); 258 });
  259 + $('#frame_filter #schema_type_filter').change(draw_filtered_new_frames);
289 $('#frame_filter #aspect_filter').change(draw_filtered_new_frames); 260 $('#frame_filter #aspect_filter').change(draw_filtered_new_frames);
290 $('#frame_filter #reflex_filter').change(draw_filtered_new_frames); 261 $('#frame_filter #reflex_filter').change(draw_filtered_new_frames);
291 $('#frame_filter #neg_filter').change(draw_filtered_new_frames); 262 $('#frame_filter #neg_filter').change(draw_filtered_new_frames);
@@ -296,28 +267,35 @@ $(document).ready(function() { @@ -296,28 +267,35 @@ $(document).ready(function() {
296 $('#filter_frames_options #options').click(function(event) { 267 $('#filter_frames_options #options').click(function(event) {
297 event.stopPropagation(); 268 event.stopPropagation();
298 }); 269 });
299 - $('#cancel_frame_filter').click(function(event) {  
300 - window.filter_position_val = '.*';  
301 - window.filter_argument_val = '.*';  
302 - window.filter_aspect_val = '*';  
303 - window.filter_reflex_val = '*';  
304 - window.filter_neg_val = '*';  
305 - window.filter_pred_val = '*';  
306 - window.filter_opinion_val = '*';  
307 - $('#frame_filter #argument_filter').val(window.filter_argument_val);  
308 - $('#frame_filter #position_filter').val(window.filter_position_val);  
309 - $('#frame_filter #aspect_filter').val(window.filter_aspect_val);  
310 - $('#frame_filter #reflex_filter').val(window.filter_reflex_val);  
311 - $('#frame_filter #neg_filter').val(window.filter_neg_val);  
312 - $('#frame_filter #pred_filter').val(window.filter_pred_val);  
313 - $('#frame_filter #opinion_filter').val(window.filter_opinion_val);  
314 - $('#frame_filter #argument_filter').trigger('change');  
315 - });  
316 -  
317 - frame_class = 'InactiveFrameTable'; 270 + $('#cancel_frame_filter').click(cancel_schemata_filtering);
  271 +
318 {% if can_modify %} 272 {% if can_modify %}
319 - frame_class = 'ActiveFrameTable'; 273 + {% if perms.dictionary.add_syntactic_frames or perms.dictionary.add_phraseologic_frames %}
  274 + $(document).unbind('keydown');
  275 + $(document).bind('keydown', 'shift+s', saveHandle);
  276 + $(document).bind('keydown', 'shift+z', function(evt){backOneModification(); return false; });
  277 + $(document).bind('keydown', 'shift+y', function(evt){forwardOneModification(); return false; });
  278 + $(document).bind('keydown', 'shift+a', function(evt){addElement(); return false; });
  279 + $(document).bind('keydown', 'shift+r', function(evt){removeElement(); return false; });
  280 + $(document).bind('keydown', 'shift+d', function(evt){duplicateElement(); return false; });
  281 + $(document).bind('keydown', 'shift+c', function(evt){copyElement(); return false; });
  282 + $(document).bind('keydown', 'shift+v', function(evt){pasteElement(); return false; });
  283 + $(document).bind('keydown', 'shift+w', function(evt){validateSchemata(); return false; });
  284 + {% if perms.dictionary.add_syntactic_frames %}
  285 + $(document).bind('keydown', 'shift+x', function(evt){cutElement(); return false; });
  286 + $(document).bind('keydown', 'shift+m', function(evt){reserveLemma(); return false; });
  287 + {% endif %}
  288 + {% if perms.dictionary.add_phraseologic_frames %}
  289 + $(document).bind('keydown', 'shift+l', function(evt){addPhraseologicFrame(); return false; });
  290 + $(document).bind('keydown', 'shift+b', function(evt){openAssignPhraseologicFrameDialog(); return false; });
  291 + {% endif %}
  292 + {% endif %}
  293 + {% else %}
  294 + $(document).unbind('keydown');
  295 + {% if perms.dictionary.own_lemmas %}
  296 + $(document).bind('keydown', 'shift+m', function(evt){reserveLemma(); return false; });
  297 + {% endif %}
320 {% endif %} 298 {% endif %}
321 - draw_filtered_frames(window.schemas, 'new-frame-tables', 'new-frame-table', 'frame_filter', window.nkjp_examples, frame_class, window.lemma_entry, window.lemma_entry); 299 +
322 }); 300 });
323 </script> 301 </script>
dictionary/templates/sel_user_stats.html
@@ -123,27 +123,33 @@ @@ -123,27 +123,33 @@
123 <table class='PaymentsTable'> 123 <table class='PaymentsTable'>
124 <tr> 124 <tr>
125 <td class='EmptyCell' colspan=1></td> 125 <td class='EmptyCell' colspan=1></td>
126 - <td class='ColumnHeader' colspan=5>Semantycy:</td>  
127 - <td class='ColumnHeader' colspan=2>Supersemantycy:</td> 126 + <td class='ColumnHeader' colspan=7>Semantycy:</td>
  127 + <td class='ColumnHeader' colspan=3>Supersemantycy:</td>
128 </tr> 128 </tr>
129 <tr> 129 <tr>
130 <td class='ColumnHeader'>Kwota za wykonaną pracę:</td> 130 <td class='ColumnHeader'>Kwota za wykonaną pracę:</td>
131 <td class='ColumnHeader'>Bonus:</td> 131 <td class='ColumnHeader'>Bonus:</td>
132 <td class='ColumnHeader'>Wykonane ramy:</td> 132 <td class='ColumnHeader'>Wykonane ramy:</td>
133 <td class='ColumnHeader'>Poprawnie wykonane ramy:</td> 133 <td class='ColumnHeader'>Poprawnie wykonane ramy:</td>
  134 + <td class='ColumnHeader'>Częściowo poprawnie wykonane ramy:</td>
134 <td class='ColumnHeader'>Błędnie wykonane ramy:</td> 135 <td class='ColumnHeader'>Błędnie wykonane ramy:</td>
  136 + <td class='ColumnHeader'>Dodane powiązania ze składnią:</td>
135 <td class='ColumnHeader'>Skuteczność:</td> 137 <td class='ColumnHeader'>Skuteczność:</td>
136 <td class='ColumnHeader'>Sprawdzone ramy:</td> 138 <td class='ColumnHeader'>Sprawdzone ramy:</td>
137 <td class='ColumnHeader'>Poprawione ramy:</td> 139 <td class='ColumnHeader'>Poprawione ramy:</td>
  140 + <td class='ColumnHeader'>Częściowo poprawione ramy:</td>
138 </tr> 141 </tr>
139 <tr> 142 <tr>
140 <td>{{semantics_work_stats.earned_cash}} zł</td> 143 <td>{{semantics_work_stats.earned_cash}} zł</td>
141 <td>{{semantics_work_stats.bonus_cash}} zł</td> 144 <td>{{semantics_work_stats.bonus_cash}} zł</td>
142 <td>{{semantics_work_stats.made_frames}}</td> 145 <td>{{semantics_work_stats.made_frames}}</td>
143 <td>{{semantics_work_stats.prop_frames}}</td> 146 <td>{{semantics_work_stats.prop_frames}}</td>
  147 + <td>{{semantics_work_stats.part_prop_frames}}</td>
144 <td>{{semantics_work_stats.wrong_frames}}</td> 148 <td>{{semantics_work_stats.wrong_frames}}</td>
  149 + <td>{{semantics_work_stats.added_connections}}</td>
145 <td>{{semantics_work_stats.efficacy}} %</td> 150 <td>{{semantics_work_stats.efficacy}} %</td>
146 <td>{{semantics_work_stats.checked_frames}}</td> 151 <td>{{semantics_work_stats.checked_frames}}</td>
147 <td>{{semantics_work_stats.corr_frames}}</td> 152 <td>{{semantics_work_stats.corr_frames}}</td>
  153 + <td>{{semantics_work_stats.part_corr_frames}}</td>
148 </tr> 154 </tr>
149 </table> 155 </table>
dictionary/templates/tex/slowal.tex
@@ -48,7 +48,7 @@ @@ -48,7 +48,7 @@
48 \begin{document} 48 \begin{document}
49 49
50 {% for lemma in lemmas %} 50 {% for lemma in lemmas %}
51 - {% lemma_tex lemma forloop.counter0 q_frame_opinions sort_reflex_vals sort_aspect_vals sort_neg_vals sort_pred_vals download_dict %} 51 + {% lemma_tex lemma forloop.counter0 q_frame_opinions download_dict %}
52 {% endfor %} 52 {% endfor %}
53 53
54 \end{document} 54 \end{document}
dictionary/templatetags/tex_tags.py
@@ -103,6 +103,8 @@ def lemma_tex(lemma, loop_counter, q_frame_opinions, download_dict): @@ -103,6 +103,8 @@ def lemma_tex(lemma, loop_counter, q_frame_opinions, download_dict):
103 pred_val=pred_val, 103 pred_val=pred_val,
104 aspect_val=aspect_val).order_by('text_rep') 104 aspect_val=aspect_val).order_by('text_rep')
105 for frame in matching_frames: 105 for frame in matching_frames:
  106 + if not lemma.phraseology_ready() and frame.phraseologic:
  107 + continue
106 if not download_dict['frame_opinions'] or (frame_opinions and frame_opinions.filter(frame=frame).count() > 0): 108 if not download_dict['frame_opinions'] or (frame_opinions and frame_opinions.filter(frame=frame).count() > 0):
107 frames.append(frame) 109 frames.append(frame)
108 return {'lemma': lemma, 110 return {'lemma': lemma,
dictionary/validation.py
@@ -480,6 +480,27 @@ def are_examples_approved(lemma, schema, serialized_schema): @@ -480,6 +480,27 @@ def are_examples_approved(lemma, schema, serialized_schema):
480 error = True 480 error = True
481 return error 481 return error
482 482
  483 +################### schemata validation for semantic statuses ##########
  484 +def validate_schemata_for_semantics_and_mark_errors(lemma, status, selected_schema_id):
  485 + error = False
  486 + serialized_schemata = []
  487 + for schema_obj in lemma.frames.all():
  488 + serialized_schema = frameObjToSerializableDict(lemma, schema_obj, True)
  489 + if selected_schema_id and schema_obj.id != selected_schema_id:
  490 + serialized_schemata.append(serialized_schema)
  491 + continue
  492 + if check_schema_for_semantics_and_mark_errors(lemma, status, schema_obj, serialized_schema):
  493 + error = True
  494 + serialized_schemata.append(serialized_schema)
  495 + return serialized_schemata, error
  496 +
  497 +def check_schema_for_semantics_and_mark_errors(lemma, status, schema, serialized_schema):
  498 + error = False
  499 + # WALIDACJA 34: Pod schematem z refl nie mogą być przykłady niepodpięte pod refl
  500 + if check_and_mark_examples_must_use_refl_rule(lemma, schema, serialized_schema):
  501 + error = True
  502 + return error
  503 +
483 ###################### schemas validation ############################## 504 ###################### schemas validation ##############################
484 def validate_schemas_and_mark_errors(lemma, status, selected_frame_id): 505 def validate_schemas_and_mark_errors(lemma, status, selected_frame_id):
485 error = False 506 error = False
dictionary/views.py
1 #-*- coding:utf-8 -*- 1 #-*- coding:utf-8 -*-
2 2
3 -#Copyright (c) 2012, Bartłomiej Nitoń  
4 -#All rights reserved.  
5 -  
6 -#Redistribution and use in source and binary forms, with or without modification, are permitted provided  
7 -#that the following conditions are met:  
8 -  
9 -# Redistributions of source code must retain the above copyright notice, this list of conditions and  
10 -# the following disclaimer.  
11 -# Redistributions in binary form must reproduce the above copyright notice, this list of conditions  
12 -# and the following disclaimer in the documentation and/or other materials provided with the distribution.  
13 -  
14 -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED  
15 -# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A  
16 -# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR  
17 -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED  
18 -# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)  
19 -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING  
20 -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE  
21 -# POSSIBILITY OF SUCH DAMAGE.  
22 -  
23 import datetime 3 import datetime
24 import os 4 import os
25 5
@@ -32,7 +12,8 @@ from django.http import HttpResponse @@ -32,7 +12,8 @@ from django.http import HttpResponse
32 import settings 12 import settings
33 from accounts.models import UserSettings 13 from accounts.models import UserSettings
34 from common.decorators import render 14 from common.decorators import render
35 -from dictionary.ajax_jqgrid import default_sort_rules, default_filter_rules 15 +from dictionary.ajax_jqgrid import default_sort_rules
  16 +from dictionary.filtering import all_filter_rules_loaded, default_filter_rules
36 from dictionary.forms import UserCreateForm, GetVocabularyForm, ArgStatsForm 17 from dictionary.forms import UserCreateForm, GetVocabularyForm, ArgStatsForm
37 from dictionary.models import PositionCategory, NKJP_Opinion, NKJP_Source, \ 18 from dictionary.models import PositionCategory, NKJP_Opinion, NKJP_Source, \
38 Vocabulary, Message, RealizationType 19 Vocabulary, Message, RealizationType
@@ -86,10 +67,12 @@ def lemma_view(request): @@ -86,10 +67,12 @@ def lemma_view(request):
86 if not 'status' in request.session['sort_rules_lemma_preview']: 67 if not 'status' in request.session['sort_rules_lemma_preview']:
87 request.session['sort_rules_lemma_preview']['status'] = { 'priority': None, 'sort_order': 'desc'} 68 request.session['sort_rules_lemma_preview']['status'] = { 'priority': None, 'sort_order': 'desc'}
88 69
89 - if not request.session.has_key('filter_rules'): 70 + if (not request.session.has_key('filter_rules') or
  71 + not all_filter_rules_loaded(request.session['filter_rules'])):
90 request.session['filter_rules'] = default_filter_rules() 72 request.session['filter_rules'] = default_filter_rules()
91 73
92 - if not request.session.has_key('filter_rules_lemma_preview'): 74 + if (not request.session.has_key('filter_rules_lemma_preview') or
  75 + not all_filter_rules_loaded(request.session['filter_rules_lemma_preview'])):
93 request.session['filter_rules_lemma_preview'] = default_filter_rules() 76 request.session['filter_rules_lemma_preview'] = default_filter_rules()
94 77
95 if not request.session.has_key('lemma_preview'): 78 if not request.session.has_key('lemma_preview'):
@@ -137,8 +120,6 @@ def lemma_view(request): @@ -137,8 +120,6 @@ def lemma_view(request):
137 'ajax_user_is_authenticated': reverse('user_is_authenticated'), 120 'ajax_user_is_authenticated': reverse('user_is_authenticated'),
138 121
139 'ajax_semantics': reverse('semantics.views.ajax_semantics'), 122 'ajax_semantics': reverse('semantics.views.ajax_semantics'),
140 - 'ajax_schema_got_assigned_semantics': reverse('schema_got_assigned_semantics'),  
141 - 'ajax_example_got_assigned_semantics': reverse('example_got_assigned_semantics'),  
142 123
143 'ajax_get_note_text': reverse('get_note_text'), 124 'ajax_get_note_text': reverse('get_note_text'),
144 'ajax_lemma_notes_form_submit': reverse('lemma_notes_form_submit'), 125 'ajax_lemma_notes_form_submit': reverse('lemma_notes_form_submit'),
@@ -193,6 +174,7 @@ def lemma_view(request): @@ -193,6 +174,7 @@ def lemma_view(request):
193 'ajax_deselect_preview_tab': reverse('deselect_preview_tab'), 174 'ajax_deselect_preview_tab': reverse('deselect_preview_tab'),
194 'ajax_get_schemata': reverse('get_schemata'), 175 'ajax_get_schemata': reverse('get_schemata'),
195 'ajax_get_examples': reverse('get_examples'), 176 'ajax_get_examples': reverse('get_examples'),
  177 + 'ajax_get_schemata_and_examples': reverse('get_schemata_and_examples'),
196 178
197 # powiazywanie hasel (nieczasownikowe) 179 # powiazywanie hasel (nieczasownikowe)
198 'ajax_relate_entries' : reverse('relate_entries'), 180 'ajax_relate_entries' : reverse('relate_entries'),
@@ -208,6 +190,11 @@ def lemma_view(request): @@ -208,6 +190,11 @@ def lemma_view(request):
208 'ajax_save_columns': reverse('save_columns'), 190 'ajax_save_columns': reverse('save_columns'),
209 'ajax_save_columns': reverse('save_columns'), 191 'ajax_save_columns': reverse('save_columns'),
210 'ajax_get_sort_order': reverse('get_sort_order'), 192 'ajax_get_sort_order': reverse('get_sort_order'),
  193 + 'ajax_sem_arg_form': reverse('sem_arg_form'),
  194 + 'ajax_general_preference_form': reverse('general_preference_form'),
  195 + 'ajax_synset_preference_form': reverse('synset_preference_form'),
  196 + 'ajax_relational_preference_form': reverse('relational_preference_form'),
  197 + 'ajax_synset_context_lookup': reverse('synset_context_lookup'),
211 198
212 # czasowniki podobne 199 # czasowniki podobne
213 'ajax_similar_lemmas_old_form_submit': reverse('similar_lemmas_old_form_submit'), 200 'ajax_similar_lemmas_old_form_submit': reverse('similar_lemmas_old_form_submit'),
@@ -437,7 +424,7 @@ def manage_arg_realizations(request): @@ -437,7 +424,7 @@ def manage_arg_realizations(request):
437 return to_return 424 return to_return
438 425
439 def download_walenty(request): 426 def download_walenty(request):
440 - generation_date = datetime.datetime.now() 427 + generation_date = datetime.datetime.now() - datetime.timedelta(days=1)
441 walenty_file_name = '%s_%s.tar.gz' % ('walenty', generation_date.strftime('%Y%m%d')) 428 walenty_file_name = '%s_%s.tar.gz' % ('walenty', generation_date.strftime('%Y%m%d'))
442 walenty_path = os.path.join(settings.WALENTY_PATH, walenty_file_name) 429 walenty_path = os.path.join(settings.WALENTY_PATH, walenty_file_name)
443 430
semantics/admin.py
1 from django.contrib import admin 1 from django.contrib import admin
2 2
3 -from models import FramePosition, GeneralSelectivePreference, LexicalUnitExamples, \  
4 - SelectivePreferenceRelations, SemanticFrame, SemanticRole, \  
5 - SemanticRolesDisplay 3 +from models import FrameOpinion, FramePosition, GeneralSelectivePreference, \
  4 + LexicalUnitExamples, SelectivePreferenceRelations, SemanticFrame, \
  5 + SemanticRole, SemanticRolesDisplay
6 6
7 7
8 8
9 class SemanticRoleAdmin(admin.ModelAdmin): 9 class SemanticRoleAdmin(admin.ModelAdmin):
10 search_fields = ('role',) 10 search_fields = ('role',)
11 11
  12 +admin.site.register(FrameOpinion)
12 admin.site.register(FramePosition) 13 admin.site.register(FramePosition)
13 admin.site.register(GeneralSelectivePreference) 14 admin.site.register(GeneralSelectivePreference)
14 admin.site.register(LexicalUnitExamples) 15 admin.site.register(LexicalUnitExamples)
semantics/change_log.py
@@ -179,7 +179,7 @@ def backup_lemma_and_get_frames(lemma): @@ -179,7 +179,7 @@ def backup_lemma_and_get_frames(lemma):
179 179
180 def backup_lemma(lemma_id): 180 def backup_lemma(lemma_id):
181 lemma = Lemma.objects.get(id=lemma_id, old=False) 181 lemma = Lemma.objects.get(id=lemma_id, old=False)
182 - lexical_units = LexicalUnit.objects.filter(Q(base = lemma.entry)|Q(base = lemma.entry + u' się')).order_by('sense') 182 + lexical_units = lemma.entry_obj.meanings.order_by('sense')
183 183
184 frame_ids = [] 184 frame_ids = []
185 for lexical_unit in lexical_units: 185 for lexical_unit in lexical_units:
semantics/forms.py 0 → 100644
  1 +#-*- coding:utf-8 -*-
  2 +
  3 +from django.forms import CharField, ChoiceField, Form, ModelChoiceField
  4 +
  5 +from semantics.models import GeneralSelectivePreference, SelectivePreferenceRelations, SemanticRole
  6 +
  7 +class RoleForm(Form):
  8 + negation = ChoiceField(choices=[('', u'---'), ('!', u'!')], required=False,
  9 + label=u'')
  10 + role = ModelChoiceField(label=u'Rola', required=False,
  11 + queryset=SemanticRole.objects.filter(gradient__isnull=True))
  12 + attribute = ModelChoiceField(label=u'Atrybut', required=False,
  13 + queryset=SemanticRole.objects.filter(gradient__isnull=False))
  14 + preference_type = ChoiceField(choices=[('general', u'Predefiniowana'), ('synset', u'Słowosieć'), ('relation', u'Relacja')],
  15 + label=u'Preferencje selekcyjne', required=False)
  16 + preferences = []
  17 +
  18 + def __init__(self, negation=None, sel_role=None, sel_attribute=None,
  19 + sel_preferences=[],
  20 + *args, **kwargs):
  21 + super(RoleForm, self).__init__(*args, **kwargs)
  22 + self.fields['negation'].initial = negation
  23 + self.fields['role'].initial = sel_role
  24 + self.fields['attribute'].initial = sel_attribute
  25 + self.preferences = sel_preferences
  26 +
  27 +class GeneralSelPrefForm(Form):
  28 + general_pref = ModelChoiceField(label=u'Predefiniowana',
  29 + queryset=GeneralSelectivePreference.objects.order_by('name'), required=False)
  30 +
  31 + def pref_type(self):
  32 + return 'general'
  33 +
  34 + def __init__(self, sel_preference=None, *args, **kwargs):
  35 + super(GeneralSelPrefForm, self).__init__(*args, **kwargs)
  36 + self.fields['general_pref'].initial = sel_preference
  37 +
  38 +class SynsetSelPrefForm(Form):
  39 + synset_pref = CharField(label=u'Słowosieć', required=False)
  40 +
  41 + def pref_type(self):
  42 + return 'synset'
  43 +
  44 + def __init__(self, sel_preference=None, *args, **kwargs):
  45 + super(SynsetSelPrefForm, self).__init__(*args, **kwargs)
  46 + self.fields['synset_pref'].initial = sel_preference
  47 +
  48 +class RelationalSelPrefForm(Form):
  49 + relational_pref_relation = ModelChoiceField(label=u'Relacja',
  50 + queryset=SelectivePreferenceRelations.objects.all(), required=False)
  51 + relational_pref_role = ModelChoiceField(label=u'Rola', required=False,
  52 + queryset=SemanticRole.objects.filter(gradient__isnull=True))
  53 + relational_pref_attribute = ModelChoiceField(label=u'Atrybut', required=False,
  54 + queryset=SemanticRole.objects.filter(gradient__isnull=False))
  55 +
  56 + def pref_type(self):
  57 + return 'relational'
  58 +
  59 + def __init__(self, sel_relation=None, sel_role=None, sel_attribute=None, *args, **kwargs):
  60 + super(RelationalSelPrefForm, self).__init__(*args, **kwargs)
  61 + self.fields['relational_pref_relation'].initial = sel_relation
  62 + self.fields['relational_pref_role'].initial = sel_role
  63 + self.fields['relational_pref_attribute'].initial = sel_attribute
  64 +
0 \ No newline at end of file 65 \ No newline at end of file
semantics/management/commands/find_hanging_examples.py
@@ -25,8 +25,18 @@ def print_hanging_examples(lemma): @@ -25,8 +25,18 @@ def print_hanging_examples(lemma):
25 example = lu_ex.example 25 example = lu_ex.example
26 if not lemma.nkjp_examples.filter(id=example.id).exists(): 26 if not lemma.nkjp_examples.filter(id=example.id).exists():
27 print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!' 27 print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
28 - print 'lemma: %s\tlu_ex_id: %d\texample: %s\tex_id: %d' % (lemma.entry_obj.name,  
29 - lu_ex.id,  
30 - example.sentence,  
31 - example.id) 28 + print 'hanging example --> lemma: %s\tlu_ex_id: %d\texample: %s\tex_id: %d' % (lemma.entry_obj.name,
  29 + lu_ex.id,
  30 + example.sentence,
  31 + example.id)
  32 + same_lu_examples = LexicalUnitExamples.objects.filter(lexical_unit=lu, example=example)
  33 + if same_lu_examples.count() > 1:
  34 + print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
  35 + print 'multiple pointers --> lemma: %s\tlu_ex_id: %d\texample: %s\tex_id: %d' % (lemma.entry_obj.name,
  36 + lu_ex.id,
  37 + example.sentence,
  38 + example.id)
  39 + for ex in same_lu_examples.all():
  40 + print 'lu_ex_id: %d' % ex.id
  41 +
32 42
33 \ No newline at end of file 43 \ No newline at end of file
semantics/models.py
@@ -54,6 +54,12 @@ class SemanticFrame(models.Model): @@ -54,6 +54,12 @@ class SemanticFrame(models.Model):
54 return False 54 return False
55 return True 55 return True
56 56
  57 + def connected_schemata(self):
  58 + connected_schemata_ids = []
  59 + for compl in self.complements.all():
  60 + connected_schemata_ids.extend([schema.id for schema in compl.connected_schemata()])
  61 + return Frame.objects.filter(id__in=connected_schemata_ids).distinct()
  62 +
57 def __unicode__(self): 63 def __unicode__(self):
58 complements_str_tab = [unicode(compl) for compl in self.complements.all()] 64 complements_str_tab = [unicode(compl) for compl in self.complements.all()]
59 return u'%d --> %s' % (self.id, u'+'.join(complements_str_tab)) 65 return u'%d --> %s' % (self.id, u'+'.join(complements_str_tab))
@@ -125,6 +131,10 @@ class Complement(models.Model): # pola z ramki @@ -125,6 +131,10 @@ class Complement(models.Model): # pola z ramki
125 return False 131 return False
126 return True 132 return True
127 133
  134 + def connected_schemata(self):
  135 + schemata_ids = [real.frame.id for real in self.realizations.all()]
  136 + return Frame.objects.filter(id__in=schemata_ids).distinct()
  137 +
128 def __unicode__(self): 138 def __unicode__(self):
129 return u'%d:%s' % (self.id, self.roles.all()) 139 return u'%d:%s' % (self.id, self.roles.all())
130 140
semantics/saving.py
@@ -2,7 +2,7 @@ @@ -2,7 +2,7 @@
2 2
3 from django.db.models import Min 3 from django.db.models import Min
4 4
5 -from dictionary.models import Argument, Frame, NKJP_Example, Position 5 +from dictionary.models import Argument, Frame, Lemma, NKJP_Example, Position
6 from semantics.change_log import store_old_versions 6 from semantics.change_log import store_old_versions
7 from semantics.models import Complement, GeneralSelectivePreference, FramePosition,\ 7 from semantics.models import Complement, GeneralSelectivePreference, FramePosition,\
8 LexicalUnitExamples, RelationalSelectivePreference, \ 8 LexicalUnitExamples, RelationalSelectivePreference, \
@@ -280,8 +280,9 @@ def remove_preference(frame_id, complement_id, preference): @@ -280,8 +280,9 @@ def remove_preference(frame_id, complement_id, preference):
280 complement.selective_preference.relations.remove(r) 280 complement.selective_preference.relations.remove(r)
281 281
282 282
283 -def update_meanings(operations): 283 +def update_meanings(lemma_id, operations):
284 translation = {} 284 translation = {}
  285 + entry = Lemma.objects.get(id=lemma_id).entry_obj
285 for operation in operations: 286 for operation in operations:
286 if operation['operation'] == "set_glossa": 287 if operation['operation'] == "set_glossa":
287 if int(operation['unit']) in translation: 288 if int(operation['unit']) in translation:
@@ -302,7 +303,7 @@ def update_meanings(operations): @@ -302,7 +303,7 @@ def update_meanings(operations):
302 unit = int(operation['unit']) 303 unit = int(operation['unit'])
303 remove_example(unit, operation['example']) 304 remove_example(unit, operation['example'])
304 elif operation['operation'] == "add_unit": 305 elif operation['operation'] == "add_unit":
305 - translation[operation['unit']['id']] = add_unit(operation['unit']) 306 + translation[operation['unit']['id']] = add_unit(entry, operation['unit'])
306 elif operation['operation'] == "remove_unit": 307 elif operation['operation'] == "remove_unit":
307 luid = int(operation['luid']) 308 luid = int(operation['luid'])
308 if luid in translation: 309 if luid in translation:
@@ -329,11 +330,11 @@ def remove_example(unit_id, example_id): @@ -329,11 +330,11 @@ def remove_example(unit_id, example_id):
329 lue = LexicalUnitExamples.objects.get(example=nkjp_example, lexical_unit=unit) 330 lue = LexicalUnitExamples.objects.get(example=nkjp_example, lexical_unit=unit)
330 lue.delete() 331 lue.delete()
331 332
332 -def add_unit(unit): # returns new id 333 +def add_unit(entry, unit): # returns new id
333 334
334 s1 = Synset(id=(min(Synset.objects.all().aggregate(Min('id'))['id__min'], 0) - 1)) 335 s1 = Synset(id=(min(Synset.objects.all().aggregate(Min('id'))['id__min'], 0) - 1))
335 s1.save() 336 s1.save()
336 - lu = LexicalUnit(base=unit['base'], sense=unit['sense'], pos=unit['pos'], glossa=unit['glossa'], luid=-1, synset=s1) 337 + lu = LexicalUnit(entry=entry, base=unit['base'], sense=unit['sense'], pos=unit['pos'], glossa=unit['glossa'], luid=-1, synset=s1)
337 lu.save() 338 lu.save()
338 339
339 340
semantics/sem_urls.py
@@ -19,5 +19,10 @@ SEMANTIC_PATTERNS = patterns(&#39;semantics.views&#39;, @@ -19,5 +19,10 @@ SEMANTIC_PATTERNS = patterns(&#39;semantics.views&#39;,
19 url(r'^ajax/update_meanings/$', 'ajax_update_meanings'), 19 url(r'^ajax/update_meanings/$', 'ajax_update_meanings'),
20 url(r'^ajax/modify_frames/$', 'ajax_modify_frames'), 20 url(r'^ajax/modify_frames/$', 'ajax_modify_frames'),
21 url(r'^ajax/plWN_context_lookup/$', 'ajax_plWN_context_lookup'), 21 url(r'^ajax/plWN_context_lookup/$', 'ajax_plWN_context_lookup'),
  22 + url(r'^ajax/synset_context_lookup/$', 'synset_context_lookup'),
22 url(r'^ajax/validate_semantics/$', 'validate_semantics'), 23 url(r'^ajax/validate_semantics/$', 'validate_semantics'),
  24 + url(r'^ajax/sem_arg_form/$', 'sem_arg_form'),
  25 + url(r'^ajax/general_preference_form/$', 'general_preference_form'),
  26 + url(r'^ajax/synset_preference_form/$', 'synset_preference_form'),
  27 + url(r'^ajax/relational_preference_form/$', 'relational_preference_form'),
23 ) 28 )
semantics/static/js/semantics_frames.js
@@ -20,7 +20,7 @@ function getFrames(frames_display){ @@ -20,7 +20,7 @@ function getFrames(frames_display){
20 lexical_units_num.push(frames_display[i].lexical_units); 20 lexical_units_num.push(frames_display[i].lexical_units);
21 lexical_units_frames[i] = []; 21 lexical_units_frames[i] = [];
22 22
23 - frames = frames_display[i].frames; 23 + var frames = frames_display[i].frames;
24 for (j = 0; j < frames.length; j++) { 24 for (j = 0; j < frames.length; j++) {
25 frame_content[frames[j].frame_id] = frames[j]; 25 frame_content[frames[j].frame_id] = frames[j];
26 frame_localization[frames[j].frame_id] = {"units": i, "position": j}; 26 frame_localization[frames[j].frame_id] = {"units": i, "position": j};
@@ -461,14 +461,14 @@ function saveFrames() { @@ -461,14 +461,14 @@ function saveFrames() {
461 } 461 }
462 else { 462 else {
463 frameClick(""); 463 frameClick("");
464 - $.getJSON(ajax_modify_frames, {"operations": JSON.stringify(frames_operations), "lemma_id": lemma_id}, function(data){  
465 - getFrames(data.frames_display);  
466 - displayFrames();  
467 - memorizeConnections(data.connections.connected, data.connections.connected_reverse);  
468 - $("#semantic-frames-count").empty();  
469 - $("#semantic-frames-count").append(data.frames_count);  
470 - updateSchemataConnections();  
471 - }); 464 + $.post(ajax_modify_frames, {"operations": JSON.stringify(frames_operations), "lemma_id": lemma_id}, function(data){
  465 + getFrames(data.frames_display);
  466 + displayFrames();
  467 + memorizeConnections(data.connections.connected, data.connections.connected_reverse);
  468 + $("#semantic-frames-count").empty();
  469 + $("#semantic-frames-count").append(data.frames_count);
  470 + updateSchemataConnections();
  471 + }, 'json');
472 frames_operations = []; 472 frames_operations = [];
473 } 473 }
474 } 474 }
semantics/static/js/semantics_lexical_units.js
@@ -100,9 +100,11 @@ function getLocation(luid) { @@ -100,9 +100,11 @@ function getLocation(luid) {
100 // save all changes to meanings (lexical units) 100 // save all changes to meanings (lexical units)
101 function saveMeanings() { 101 function saveMeanings() {
102 $.ajax({ 102 $.ajax({
  103 + type: "POST",
103 dataType: "json", 104 dataType: "json",
104 url: ajax_update_meanings, 105 url: ajax_update_meanings,
105 - data: {"operations": JSON.stringify(units_operations), "lemma_id": lemma_id}, 106 + data: {"operations": JSON.stringify(units_operations),
  107 + "lemma_id": lemma_id},
106 success: function(data){ 108 success: function(data){
107 memorizeLexicalUnits(data.lexical_units); 109 memorizeLexicalUnits(data.lexical_units);
108 basicLexicalUnitsData(data.informations); 110 basicLexicalUnitsData(data.informations);
@@ -110,6 +112,7 @@ function saveMeanings() { @@ -110,6 +112,7 @@ function saveMeanings() {
110 }, 112 },
111 async: false 113 async: false
112 }); 114 });
  115 +
113 $.ajax({ 116 $.ajax({
114 dataType: "json", 117 dataType: "json",
115 url: ajax_examples, 118 url: ajax_examples,
semantics/static/js/semantics_roles.js
@@ -53,13 +53,20 @@ function memorizeRoles(roles_display, roles_full){ @@ -53,13 +53,20 @@ function memorizeRoles(roles_display, roles_full){
53 function getStyle(frame_id, complement_num) { 53 function getStyle(frame_id, complement_num) {
54 style_type = ""; 54 style_type = "";
55 style_color = ""; 55 style_color = "";
56 - style_value = "" 56 +
  57 + style_value = "";
57 var roles = frame_content[parseInt(frame_id)].display.roles[complement_num].argument; 58 var roles = frame_content[parseInt(frame_id)].display.roles[complement_num].argument;
58 var i; 59 var i;
59 for (i = 0; i < roles.length; i++) { 60 for (i = 0; i < roles.length; i++) {
60 var color = role_color[roles[i]]; 61 var color = role_color[roles[i]];
61 if (color.gradient != "None") { 62 if (color.gradient != "None") {
62 - style_type = "linear-gradient(to " + color.gradient + ", "; 63 + // Safari browser only
  64 + if(navigator.userAgent.indexOf('Safari') != -1 && navigator.userAgent.indexOf('Chrome') == -1) {
  65 + style_type = "-webkit-linear-gradient(" + color.gradient + ", ";
  66 + }
  67 + else {
  68 + style_type = "linear-gradient(to " + color.gradient + ", ";
  69 + }
63 } else { 70 } else {
64 style_color = color.color 71 style_color = color.color
65 } 72 }
@@ -68,7 +75,13 @@ function getStyle(frame_id, complement_num) { @@ -68,7 +75,13 @@ function getStyle(frame_id, complement_num) {
68 style_type = "background-color"; 75 style_type = "background-color";
69 style_value = "rgb(" + style_color + ")"; 76 style_value = "rgb(" + style_color + ")";
70 } else { 77 } else {
71 - style_value = style_type + "rgba(" + style_color + ",0.1), rgba(" + style_color + ",1))"; 78 + // Safari browser only
  79 + if(navigator.userAgent.indexOf('Safari') != -1 && navigator.userAgent.indexOf('Chrome') == -1) {
  80 + style_value = style_type + "rgba(" + style_color + ",1), rgba(" + style_color + ",0.1))";
  81 + }
  82 + else {
  83 + style_value = style_type + "rgba(" + style_color + ",0.1), rgba(" + style_color + ",1))";
  84 + }
72 style_type = "background"; 85 style_type = "background";
73 } 86 }
74 return {"type": style_type, "value": style_value}; 87 return {"type": style_type, "value": style_value};
semantics/templates/general_preference_form.html 0 → 100644
  1 +<p class="fieldWrapper">
  2 + {{ form.general_pref.errors }} + {{ form.general_pref.label_tag }}: {{ form.general_pref }}
  3 + <button type="button" onclick="removeSelPreferenceFilter(this)">Usuń</button>
  4 +</p>
0 \ No newline at end of file 5 \ No newline at end of file
semantics/templates/relational_preference_form.html 0 → 100644
  1 +<p class="fieldWrapper">
  2 + {{ form.relational_pref_relation.errors }} + {{ form.relational_pref_relation.label_tag }}: {{ form.relational_pref_relation }}
  3 + {{ form.relational_pref_role.errors }} Do: {{ form.relational_pref_role }}
  4 + {{ form.relational_pref_attribute.errors }} {{ form.relational_pref_attribute }}
  5 + <button type="button" onclick="removeSelPreferenceFilter(this)">Usuń</button>
  6 +</p>
0 \ No newline at end of file 7 \ No newline at end of file
semantics/templates/sem_arg_form.html 0 → 100644
  1 +{% if form == 'or' %}
  2 + <div>
  3 + <hr class="alterSeparator">
  4 + <input type="hidden" name="or" value="or"><strong>lub</strong>
  5 + <button type="button" onclick="removeAlternative(this)">Usuń</button>
  6 + </div>
  7 +{% else %}
  8 + <hr class="argSeparator">
  9 + <p class="fieldWrapper">
  10 + {{ form.negation.errors }} {{ form.negation }}
  11 + {{ form.role.errors }} {{ form.role.label_tag }}: {{ form.role }}
  12 + {{ form.attribute.errors }} {{ form.attribute.label_tag }}: {{ form.attribute }}
  13 + <button type="button" onclick="removeSemArgFilter(this)">Usuń</button>
  14 + </p>
  15 + <div id="selectional-preferences">
  16 + <p>{{ form.preference_type.errors }} {{ form.preference_type.label_tag }}: {{ form.preference_type }} <button type="button" onclick="addSelectivePreferenceFilter(this)">Dodaj</button></p>
  17 + {% for pref_form in form.preferences %}
  18 + <p id="sel-preference">
  19 + {% if pref_form.pref_type == 'general' %}
  20 + {% include 'general_preference_form.html' with form=pref_form only %}
  21 + {% elif pref_form.pref_type == 'relational' %}
  22 + {% include 'relational_preference_form.html' with form=pref_form only %}
  23 + {% elif pref_form.pref_type == 'synset' %}
  24 + {% include 'synset_preference_form.html' with form=pref_form only %}
  25 + {% endif %}
  26 + </p>
  27 + {% endfor %}
  28 + </div>
  29 +{% endif %}
0 \ No newline at end of file 30 \ No newline at end of file
semantics/templates/synset_preference_form.html 0 → 100644
  1 +<p class="fieldWrapper">
  2 + {{ form.synset_pref.errors }} + {{ form.synset_pref.label_tag }}: {{ form.synset_pref }}
  3 + <button type="button" onclick="removeSelPreferenceFilter(this)">Usuń</button>
  4 + <script type="text/javascript">
  5 + $(function(){
  6 + $('input#id_synset_pref').autocomplete({
  7 + select: function(event, ui){
  8 + },
  9 + source: function(req, add){
  10 + $.getJSON(ajax_synset_context_lookup, req, function(data) {
  11 + var suggestions = [];
  12 + $.each(data['result'], function(i, val){
  13 + suggestions.push(val);
  14 + });
  15 + add(suggestions);
  16 + });
  17 + },
  18 + });
  19 + });
  20 + </script>
  21 +</p>
0 \ No newline at end of file 22 \ No newline at end of file
semantics/utils.py
@@ -2,20 +2,35 @@ @@ -2,20 +2,35 @@
2 2
3 def get_frames_differences(initial_frames, frames): 3 def get_frames_differences(initial_frames, frames):
4 differences = {'matching_frames': [], 4 differences = {'matching_frames': [],
  5 + 'part_matching_frames': [],
5 'missing_frames': []} 6 'missing_frames': []}
  7 + matching_frames = []
6 for ini_frame in initial_frames: 8 for ini_frame in initial_frames:
7 matching_frame = get_matching_frame(frames, ini_frame) 9 matching_frame = get_matching_frame(frames, ini_frame)
8 if matching_frame: 10 if matching_frame:
9 differences['matching_frames'].append(ini_frame) 11 differences['matching_frames'].append(ini_frame)
  12 + matching_frames.append(matching_frame)
10 else: 13 else:
11 differences['missing_frames'].append(ini_frame) 14 differences['missing_frames'].append(ini_frame)
12 - return differences  
13 - 15 + not_matched_frames = list(set(frames) - set(matching_frames))
  16 + for miss_frame in differences['missing_frames']:
  17 + part_matching_frames = get_partially_matching_frames(not_matched_frames, miss_frame)
  18 + if part_matching_frames:
  19 + not_matched_frames.remove(part_matching_frames[0])
  20 + differences['part_matching_frames'].append(miss_frame)
  21 + differences['missing_frames'] = list(set(differences['missing_frames']) - set(differences['part_matching_frames']))
  22 + return differences
  23 +
14 def get_matching_frame(frames, frame_to_find): 24 def get_matching_frame(frames, frame_to_find):
15 for frame in frames.all(): 25 for frame in frames.all():
16 - if frames_match(frame, frame_to_find):  
17 - return frame_to_find 26 + if lexical_units_match(frame, frame_to_find) and frames_match(frame, frame_to_find):
  27 + return frame
18 return None 28 return None
  29 +
  30 +def lexical_units_match(frame1, frame2):
  31 + if set(frame1.lexical_units.all()) == set(frame2.lexical_units.all()):
  32 + return True
  33 + return False
19 34
20 def frames_match(frame1, frame2): 35 def frames_match(frame1, frame2):
21 complements1 = frame1.complements 36 complements1 = frame1.complements
@@ -96,3 +111,38 @@ def matching_synset_relation_exists(rel_to_find, relations): @@ -96,3 +111,38 @@ def matching_synset_relation_exists(rel_to_find, relations):
96 if rel_to_find.to == rel.to: 111 if rel_to_find.to == rel.to:
97 return True 112 return True
98 return False 113 return False
  114 +
  115 +def get_partially_matching_frames(frames, frame_to_find):
  116 + matching_frames = []
  117 + for frame in frames:
  118 + if lexical_units_match(frame, frame_to_find) and frames_partially_match(frame, frame_to_find):
  119 + matching_frames.append(frame)
  120 + return matching_frames
  121 +
  122 +def frames_partially_match(frame1, frame2):
  123 + complements1 = frame1.complements
  124 + complements2 = frame2.complements
  125 + if complements1.count() == complements2.count():
  126 + for compl in complements1.all():
  127 + if not partially_matching_complement_exists(complements2.all(), compl):
  128 + return False
  129 + else:
  130 + return False
  131 + return True
  132 +
  133 +def partially_matching_complement_exists(complements, compl_to_find):
  134 + for compl in complements:
  135 + if complements_partially_match(compl, compl_to_find):
  136 + return True
  137 + return False
  138 +
  139 +def complements_partially_match(compl1, compl2):
  140 + if roles_match(compl1.roles, compl2.roles):
  141 + return True
  142 + return False
  143 +
  144 +def get_structural_matching_frame(frames, frame_to_find):
  145 + for frame in frames.all():
  146 + if frames_match(frame, frame_to_find):
  147 + return frame
  148 + return None
semantics/validation.py
@@ -2,9 +2,9 @@ @@ -2,9 +2,9 @@
2 2
3 from django.db.models import Max 3 from django.db.models import Max
4 4
5 -from dictionary.models import Lemma, reflex_phrase_types, Argument_Model 5 +from dictionary.models import Lemma, reflex_phrase_types
6 from semantics.models import LexicalUnitExamples 6 from semantics.models import LexicalUnitExamples
7 -from semantics.utils import get_matching_frame 7 +from semantics.utils import get_structural_matching_frame
8 8
9 def validate_frames(lemma_id): 9 def validate_frames(lemma_id):
10 lemma = Lemma.objects.get(id=lemma_id) 10 lemma = Lemma.objects.get(id=lemma_id)
@@ -90,7 +90,7 @@ def examples_added(frame): @@ -90,7 +90,7 @@ def examples_added(frame):
90 90
91 def duplicates_exists(frame, actual_frames): 91 def duplicates_exists(frame, actual_frames):
92 frames_to_check = actual_frames.exclude(id=frame.id) 92 frames_to_check = actual_frames.exclude(id=frame.id)
93 - if get_matching_frame(frames_to_check, frame): 93 + if get_structural_matching_frame(frames_to_check, frame):
94 return True 94 return True
95 return False 95 return False
96 96
@@ -182,10 +182,14 @@ def schema_used(schema, frames): @@ -182,10 +182,14 @@ def schema_used(schema, frames):
182 def validate_lexical_units(lemma_id): 182 def validate_lexical_units(lemma_id):
183 error_msg = '' 183 error_msg = ''
184 lemma = Lemma.objects.get(id=lemma_id, old=False) 184 lemma = Lemma.objects.get(id=lemma_id, old=False)
185 - lexical_units = lemma.entry_obj.lexical_units() 185 + lexical_units = lemma.entry_obj.meanings
186 for lex_unit in lexical_units.all(): 186 for lex_unit in lexical_units.all():
187 if not examples_reflex_agreed(lex_unit): 187 if not examples_reflex_agreed(lex_unit):
188 error_msg = u'Semantyka: Znaczenie %s ma podpięte przykłady o niezgodnej zwrotności.' % unicode(lex_unit) 188 error_msg = u'Semantyka: Znaczenie %s ma podpięte przykłady o niezgodnej zwrotności.' % unicode(lex_unit)
  189 + elif hanging_meaning(lex_unit):
  190 + error_msg = u'Semantyka: Znaczenie %s nie jest reprezentowane przez żadną ramę semantyczną.' % unicode(lex_unit)
  191 + if error_msg:
  192 + break
189 return error_msg 193 return error_msg
190 194
191 def examples_reflex_agreed(lexical_unit): 195 def examples_reflex_agreed(lexical_unit):
@@ -197,3 +201,9 @@ def examples_reflex_agreed(lexical_unit): @@ -197,3 +201,9 @@ def examples_reflex_agreed(lexical_unit):
197 lex_example.example.arguments.filter(arguments__type__in=reflex_phrase_types()).exists())): 201 lex_example.example.arguments.filter(arguments__type__in=reflex_phrase_types()).exists())):
198 return False 202 return False
199 return True 203 return True
  204 +
  205 +def hanging_meaning(lexical_unit):
  206 + if lexical_unit.luid < 0 and not lexical_unit.actual_frames().exists():
  207 + return True
  208 + return False
  209 +
200 \ No newline at end of file 210 \ No newline at end of file
semantics/views.py
@@ -15,6 +15,7 @@ from django.core.urlresolvers import reverse @@ -15,6 +15,7 @@ from django.core.urlresolvers import reverse
15 from django.db.models import Q 15 from django.db.models import Q
16 16
17 from common.decorators import render, ajax 17 from common.decorators import render, ajax
  18 +from semantics.forms import GeneralSelPrefForm, RelationalSelPrefForm, RoleForm, SynsetSelPrefForm
18 from semantics.saving import modify_frames, update_meanings 19 from semantics.saving import modify_frames, update_meanings
19 from semantics.validation import validate_schemas, validate_frames, validate_lexical_units 20 from semantics.validation import validate_schemas, validate_frames, validate_lexical_units
20 21
@@ -73,10 +74,14 @@ def reorder_history(frames_list): @@ -73,10 +74,14 @@ def reorder_history(frames_list):
73 @render('frames.json') 74 @render('frames.json')
74 @ajax(method='get', encode_result=False) 75 @ajax(method='get', encode_result=False)
75 def ajax_frames(request, lemma_id): 76 def ajax_frames(request, lemma_id):
  77 + context = create_frames_context(lemma_id, request.user)
  78 + return context
76 79
77 - lemma = Lemma.objects.get(id=lemma_id, old=False)  
78 - # lexical_units = LexicalUnit.objects.filter(Q(base=lemma.entry, pos="czasownik")|Q(base=lemma.entry+u' się', pos="czasownik")).order_by('sense')  
79 - lexical_units = LexicalUnit.objects.filter(Q(base__startswith=lemma.entry + u' ')|Q(base__contains=u' '+lemma.entry+u' ')|Q(base__endswith=u' '+lemma.entry)|Q(base=lemma.entry)).order_by('sense') 80 +def create_frames_context(lemma_id, user):
  81 + lemma = Lemma.objects.get(id=lemma_id)
  82 +
  83 + #lexical_units = LexicalUnit.objects.filter(Q(base__startswith=lemma.entry + u' ')|Q(base__contains=u' '+lemma.entry+u' ')|Q(base__endswith=u' '+lemma.entry)|Q(base=lemma.entry)).order_by('sense')
  84 + lexical_units = lemma.entry_obj.meanings.order_by('sense')
80 85
81 alternations = {} 86 alternations = {}
82 frames_dict = {} 87 frames_dict = {}
@@ -163,8 +168,10 @@ def ajax_frames(request, lemma_id): @@ -163,8 +168,10 @@ def ajax_frames(request, lemma_id):
163 frame_preferences.append([{'csv_id': i, 'csv_class': c, 'preference': p} for i, c, p in zip(idents, frame_ids, row)]) 168 frame_preferences.append([{'csv_id': i, 'csv_class': c, 'preference': p} for i, c, p in zip(idents, frame_ids, row)])
164 169
165 display = {"roles": frame_roles, "preferences": frame_preferences} 170 display = {"roles": frame_roles, "preferences": frame_preferences}
166 - if frame.opinion is None: 171 + if frame.opinion is None and user.is_authenticated():
167 status = u'brak' 172 status = u'brak'
  173 + elif frame.opinion is None and not user.is_authenticated():
  174 + status = u'pewna'
168 else: 175 else:
169 status = frame.opinion.value 176 status = frame.opinion.value
170 frame_display["frames"].append({"frame_id": str(frame.id), "colspan": str(max(len(frame_roles), 1)), "rowspan": str(frame_preferences_rowspan), "status": status, "display": display}) 177 frame_display["frames"].append({"frame_id": str(frame.id), "colspan": str(max(len(frame_roles), 1)), "rowspan": str(frame_preferences_rowspan), "status": status, "display": display})
@@ -206,16 +213,20 @@ def ajax_frames(request, lemma_id): @@ -206,16 +213,20 @@ def ajax_frames(request, lemma_id):
206 @render('units.json') 213 @render('units.json')
207 @ajax(method='get', encode_result=False) 214 @ajax(method='get', encode_result=False)
208 def ajax_units(request, lemma_id): 215 def ajax_units(request, lemma_id):
209 - lemma = Lemma.objects.get(id=lemma_id, old=False)  
210 - # lexical_units = LexicalUnit.objects.filter(Q(base = lemma.entry, pos="czasownik")|Q(base = lemma.entry + u' się', pos="czasownik")).order_by('base', 'sense')  
211 - lexical_units = LexicalUnit.objects.filter(Q(base__startswith=lemma.entry + u' ', pos="czasownik")|Q(base__contains=u' '+lemma.entry+u' ', pos="czasownik")|Q(base__endswith=u' '+lemma.entry, pos="czasownik")|Q(base=lemma.entry, pos="czasownik")).order_by('base', 'sense') 216 + context = create_units_context(lemma_id)
  217 + return context
  218 +
  219 +def create_units_context(lemma_id):
  220 + lemma = Lemma.objects.get(id=lemma_id)
  221 + lexical_units = lemma.entry_obj.meanings.order_by('base', 'sense')
  222 +# lexical_units = LexicalUnit.objects.filter(Q(base__startswith=lemma.entry + u' ', pos="czasownik")|Q(base__contains=u' '+lemma.entry+u' ', pos="czasownik")|Q(base__endswith=u' '+lemma.entry, pos="czasownik")|Q(base=lemma.entry, pos="czasownik")).order_by('base', 'sense')
212 223
213 context = { 224 context = {
214 'lexical_units': [{"id": lu.id, "luid": lu.luid, "base": lu.base, "sense": lu.sense, "pos": lu.pos, "glossa": lu.glossa, "definition": lu.definition, "location": location(lu)} for lu in lexical_units], 225 'lexical_units': [{"id": lu.id, "luid": lu.luid, "base": lu.base, "sense": lu.sense, "pos": lu.pos, "glossa": lu.glossa, "definition": lu.definition, "location": location(lu)} for lu in lexical_units],
215 'informations': {'base': lemma.entry, 'sense': max(['A'] + [chr(ord(lu.sense) + 1) for lu in lexical_units.filter(luid=-1)])}, # TODO: 2 different free senses for with/whthout 'się' 226 'informations': {'base': lemma.entry, 'sense': max(['A'] + [chr(ord(lu.sense) + 1) for lu in lexical_units.filter(luid=-1)])}, # TODO: 2 different free senses for with/whthout 'się'
216 } 227 }
217 -  
218 return context 228 return context
  229 +
219 230
220 def location(lexical_unit): 231 def location(lexical_unit):
221 if lexical_unit.synset is None: 232 if lexical_unit.synset is None:
@@ -341,7 +352,7 @@ def ajax_roles(request): @@ -341,7 +352,7 @@ def ajax_roles(request):
341 @ajax(method='get', encode_result=False) 352 @ajax(method='get', encode_result=False)
342 def ajax_schemas(request, lemma_id): 353 def ajax_schemas(request, lemma_id):
343 354
344 - lemma = Lemma.objects.get(id=lemma_id, old=False) 355 + lemma = Lemma.objects.get(id=lemma_id).entry_obj.actual_lemma()
345 356
346 schemas_all = lemma.frames.all() 357 schemas_all = lemma.frames.all()
347 358
@@ -366,7 +377,7 @@ def ajax_schemas(request, lemma_id): @@ -366,7 +377,7 @@ def ajax_schemas(request, lemma_id):
366 schemas_by_characteristic[characteristic_id] = [] 377 schemas_by_characteristic[characteristic_id] = []
367 schemas_by_characteristic[characteristic_id].append(schema) 378 schemas_by_characteristic[characteristic_id].append(schema)
368 379
369 - lexical_units = LexicalUnit.objects.filter(Q(base=lemma.entry) | Q(base=lemma.entry+u' się')) 380 + lexical_units = lemma.entry_obj.meanings.all()
370 381
371 schemas_display = [] 382 schemas_display = []
372 schema_unit_rank = {} 383 schema_unit_rank = {}
@@ -489,9 +500,9 @@ def get_prepnps(prep, case, number, nouns, _atr): @@ -489,9 +500,9 @@ def get_prepnps(prep, case, number, nouns, _atr):
489 @ajax(method='get', encode_result=False) 500 @ajax(method='get', encode_result=False)
490 def ajax_examples(request, lemma_id): 501 def ajax_examples(request, lemma_id):
491 502
492 - lemma = Lemma.objects.get(id=lemma_id, old=False) 503 + lemma = Lemma.objects.get(id=lemma_id).entry_obj.actual_lemma()
493 nkjp_examples = lemma.nkjp_examples.all() 504 nkjp_examples = lemma.nkjp_examples.all()
494 - lexical_units = LexicalUnit.objects.filter(Q(base = lemma.entry)|Q(base = lemma.entry + u' się')) 505 + lexical_units = lemma.entry_obj.meanings.all()
495 506
496 lexical_units_ids = [lu.id for lu in lexical_units] 507 lexical_units_ids = [lu.id for lu in lexical_units]
497 508
@@ -563,17 +574,19 @@ def ajax_create_complement(request, lemma_id, frame, roles): @@ -563,17 +574,19 @@ def ajax_create_complement(request, lemma_id, frame, roles):
563 complement.roles.add(role) 574 complement.roles.add(role)
564 return ajax_frames(request, lemma.id) 575 return ajax_frames(request, lemma.id)
565 576
566 -@ajax(method='get', encode_result=False) 577 +@ajax(method='post', encode_result=True)
567 def ajax_update_meanings(request, operations, lemma_id): 578 def ajax_update_meanings(request, operations, lemma_id):
568 - update_meanings(operations)  
569 - return ajax_units(request) 579 + update_meanings(lemma_id, operations)
  580 + units_context = create_units_context(lemma_id)
  581 + return units_context
570 582
571 -@ajax(method='get', encode_result=False) 583 +@ajax(method='post', encode_result=True)
572 def ajax_modify_frames(request, operations, lemma_id): 584 def ajax_modify_frames(request, operations, lemma_id):
573 if not request.user.is_authenticated(): 585 if not request.user.is_authenticated():
574 return 'user logged out' 586 return 'user logged out'
575 modify_frames(lemma_id, operations, request.user) 587 modify_frames(lemma_id, operations, request.user)
576 - return ajax_frames(request) 588 + frames_context = create_frames_context(lemma_id, request.user)
  589 + return frames_context
577 590
578 @ajax(method='get', encode_result=True) 591 @ajax(method='get', encode_result=True)
579 def ajax_plWN_context_lookup(request, term): 592 def ajax_plWN_context_lookup(request, term):
@@ -594,6 +607,19 @@ def get_ordered_lexical_units_bases(lexical_units_query): @@ -594,6 +607,19 @@ def get_ordered_lexical_units_bases(lexical_units_query):
594 last_unit_base = lexical_unit.base 607 last_unit_base = lexical_unit.base
595 return lexical_unit_bases 608 return lexical_unit_bases
596 609
  610 +@ajax(method='get', encode_result=True)
  611 +def synset_context_lookup(request, term):
  612 + results = []
  613 + term = unicode(term)
  614 + if len(term) > 0:
  615 + obj_results = LexicalUnit.objects.filter(base__startswith=term)
  616 + results = get_ordered_lexical_units(obj_results)
  617 + return {'result': results}
  618 +
  619 +def get_ordered_lexical_units(lexical_units_query):
  620 + ordered_lexical_units = lexical_units_query.order_by('base', 'sense')
  621 + return [unicode(lu) for lu in ordered_lexical_units]
  622 +
597 @ajax(method='get') 623 @ajax(method='get')
598 def validate_semantics(request, lemma_id, new_status_id): 624 def validate_semantics(request, lemma_id, new_status_id):
599 error_msg = '' 625 error_msg = ''
@@ -611,3 +637,27 @@ def validate_semantics(request, lemma_id, new_status_id): @@ -611,3 +637,27 @@ def validate_semantics(request, lemma_id, new_status_id):
611 if not error_msg: 637 if not error_msg:
612 error_msg = validate_schemas(lemma_id) 638 error_msg = validate_schemas(lemma_id)
613 return {'error_message': error_msg} 639 return {'error_message': error_msg}
  640 +
  641 +@render('sem_arg_form.html')
  642 +@ajax(method='get', encode_result=False)
  643 +def sem_arg_form(request):
  644 + form = RoleForm()
  645 + return {'form': form}
  646 +
  647 +@render('general_preference_form.html')
  648 +@ajax(method='get', encode_result=False)
  649 +def general_preference_form(request):
  650 + form = GeneralSelPrefForm()
  651 + return {'form': form}
  652 +
  653 +@render('synset_preference_form.html')
  654 +@ajax(method='get', encode_result=False)
  655 +def synset_preference_form(request):
  656 + form = SynsetSelPrefForm()
  657 + return {'form': form}
  658 +
  659 +@render('relational_preference_form.html')
  660 +@ajax(method='get', encode_result=False)
  661 +def relational_preference_form(request):
  662 + form = RelationalSelPrefForm()
  663 + return {'form': form}
static/js/script.js
@@ -202,6 +202,7 @@ function resetForm($form) { @@ -202,6 +202,7 @@ function resetForm($form) {
202 $form.find('input:text, input:password, input:file, select, textarea').val(''); 202 $form.find('input:text, input:password, input:file, select, textarea').val('');
203 $form.find('input:radio, input:checkbox') 203 $form.find('input:radio, input:checkbox')
204 .removeAttr('checked').removeAttr('selected'); 204 .removeAttr('checked').removeAttr('selected');
  205 + $form.find('#sem-arguments').remove();
205 } 206 }
206 207
207 // funkcja konwertujaca kody html znakow do znakow 208 // funkcja konwertujaca kody html znakow do znakow
@@ -216,3 +217,8 @@ function convertHtml(str) { @@ -216,3 +217,8 @@ function convertHtml(str) {
216 217
217 String.prototype.startsWith = function(str) 218 String.prototype.startsWith = function(str)
218 {return (this.match("^"+str)==str)} 219 {return (this.match("^"+str)==str)}
  220 +
  221 +String.prototype.endsWith = function(suffix) {
  222 + if (this.length < suffix.length) return false;
  223 + return this.lastIndexOf(suffix) === this.length - suffix.length;
  224 +}
@@ -102,8 +102,6 @@ urlpatterns += patterns(&#39;dictionary.ajax_lemma_view&#39;, @@ -102,8 +102,6 @@ urlpatterns += patterns(&#39;dictionary.ajax_lemma_view&#39;,
102 url(r'^ajax/example_opinion_form_submit/$', 'example_opinion_form_submit'), 102 url(r'^ajax/example_opinion_form_submit/$', 'example_opinion_form_submit'),
103 url(r'^ajax/nkjp_example_form_submit/$', 'nkjp_example_form_submit'), 103 url(r'^ajax/nkjp_example_form_submit/$', 'nkjp_example_form_submit'),
104 url(r'^ajax/semantic_example_form_submit/$', 'semantic_example_form_submit'), 104 url(r'^ajax/semantic_example_form_submit/$', 'semantic_example_form_submit'),
105 - url(r'^ajax/schema_got_assigned_semantics/$', 'schema_got_assigned_semantics'),  
106 - url(r'^ajax/example_got_assigned_semantics/$', 'example_got_assigned_semantics'),  
107 url(r'^ajax/remove_example_from_lemma/$', 'remove_example_from_lemma'), 105 url(r'^ajax/remove_example_from_lemma/$', 'remove_example_from_lemma'),
108 url(r'^ajax/can_confirm_example/$', 'can_confirm_example'), 106 url(r'^ajax/can_confirm_example/$', 'can_confirm_example'),
109 url(r'^ajax/confirm_nkjp_example/$', 'confirm_nkjp_example'), 107 url(r'^ajax/confirm_nkjp_example/$', 'confirm_nkjp_example'),
@@ -123,6 +121,7 @@ urlpatterns += patterns(&#39;dictionary.ajax_lemma_view&#39;, @@ -123,6 +121,7 @@ urlpatterns += patterns(&#39;dictionary.ajax_lemma_view&#39;,
123 url(r'^ajax/deselect_preview_tab/$', 'deselect_preview_tab'), 121 url(r'^ajax/deselect_preview_tab/$', 'deselect_preview_tab'),
124 url(r'^ajax/get_schemata/$', 'get_schemata'), 122 url(r'^ajax/get_schemata/$', 'get_schemata'),
125 url(r'^ajax/get_examples/$', 'get_examples'), 123 url(r'^ajax/get_examples/$', 'get_examples'),
  124 + url(r'^ajax/get_schemata_and_examples/$', 'get_schemata_and_examples'),
126 125
127 # powiazywanie hasel (nieczasownikowe) 126 # powiazywanie hasel (nieczasownikowe)
128 url(r'^ajax/relate_entries/$', 'relate_entries'), 127 url(r'^ajax/relate_entries/$', 'relate_entries'),
wordnet/admin.py
1 from django.contrib import admin 1 from django.contrib import admin
2 2
3 -from models import LexicalUnit 3 +from models import Hypernymy, LexicalUnit, Synonymy
4 4
5 class LexicalUnitAdmin(admin.ModelAdmin): 5 class LexicalUnitAdmin(admin.ModelAdmin):
6 search_fields = ('base',) 6 search_fields = ('base',)
  7 +
  8 +class HypernymyAdmin(admin.ModelAdmin):
  9 + search_fields = ('child__id', 'parent__id')
  10 +
  11 +class SynonymyAdmin(admin.ModelAdmin):
  12 + search_fields = ('child__id', 'parent__id')
7 13
8 admin.site.register(LexicalUnit, LexicalUnitAdmin) 14 admin.site.register(LexicalUnit, LexicalUnitAdmin)
  15 +admin.site.register(Hypernymy, HypernymyAdmin)
  16 +admin.site.register(Synonymy, SynonymyAdmin)
wordnet/models.py
@@ -33,6 +33,8 @@ class LexicalUnit(models.Model): @@ -33,6 +33,8 @@ class LexicalUnit(models.Model):
33 definition = models.TextField(default="") 33 definition = models.TextField(default="")
34 # glossa 34 # glossa
35 glossa = models.TextField(default="") 35 glossa = models.TextField(default="")
  36 + # haslo w Walentym
  37 + entry = models.ForeignKey('dictionary.Entry', null=True, related_name='meanings')
36 38
37 def __unicode__(self): 39 def __unicode__(self):
38 return u'%s-%s' % (self.base, self.sense) 40 return u'%s-%s' % (self.base, self.sense)