Commit c832e2198e73544f0f3fd9ebe61fa6b10f0e02d1
Merge branch 'dev'
Showing
22 changed files
with
806 additions
and
215 deletions
INSTALL_PL
1 | 1 | Instrukcja przeznaczona jest dla systemu operacyjnego Ubuntu. |
2 | 2 | |
3 | 3 | Zainstaluj pipa: |
4 | ->> apt-get update | |
5 | ->> apt-get -y install python-pip | |
4 | +>> sudo apt-get update | |
5 | +>> sudo apt-get -y install python-pip | |
6 | 6 | |
7 | 7 | Zainstaluj Django w wersji 1.4.8: |
8 | ->> pip install Django==1.4.8 | |
8 | +>> sudo pip install Django==1.4.8 | |
9 | 9 | |
10 | 10 | Zainstaluj Django south: |
11 | ->> apt-get install python-django-south | |
11 | +>> sudo apt-get install python-django-south | |
12 | 12 | |
13 | 13 | Zainstaluj Django extensions: |
14 | ->> apt-get install python-django-extensions | |
14 | +>> sudo apt-get install python-django-extensions | |
15 | 15 | |
16 | 16 | Zainstaluj Django registration w wersji 0.8: |
17 | 17 | >> sudo pip install django-registration==0.8 |
18 | 18 | |
19 | 19 | Zainstaluj pythonowy moduł lxml: |
20 | ->> apt-get install python-lxml | |
20 | +>> sudo apt-get install python-lxml | |
21 | 21 | |
22 | 22 | Zainstaluj Postgresa: |
23 | 23 | >> sudo apt-get update |
... | ... | @@ -30,12 +30,12 @@ Stwórz pustą bazę danych dla Slowala: |
30 | 30 | >> createdb slowal -E UTF8 -T template0 -l pl_PL.utf8 |
31 | 31 | Jeśli locale pl_PL.utf8 nie istnieje dodatkowo należy uruchomić komendy: |
32 | 32 | >> sudo locale-gen pl_PL.utf8 |
33 | ->> service postgresql restart | |
33 | +>> sudo service postgresql restart | |
34 | 34 | Załaduj dump bazy danych poleceniem (zrzut bazy umieszczony jest w archiwum INSTALL_PACK.zip, jeśli archiwum nie dołączono do niniejszej paczki sprawdź na stronie http://zil.ipipan.waw.pl/Slowal): |
35 | 35 | >> psql slowal < obraz_bazy.db |
36 | 36 | |
37 | 37 | Zainstaluj gita: |
38 | ->> apt-get install git | |
38 | +>> sudo apt-get install git | |
39 | 39 | |
40 | 40 | Sklonuj repozytorium gitowe z GitLaba: |
41 | 41 | >> git clone http://git.nlp.ipipan.waw.pl/walenty/Slowal.git |
... | ... | @@ -69,10 +69,10 @@ Zgraj pliki statyczne do dedykowanego katalogu poleceniem: |
69 | 69 | >> python manage.py collectstatic |
70 | 70 | |
71 | 71 | Zainstaluj Apacha: |
72 | ->> apt-get install apache2 | |
72 | +>> sudo apt-get install apache2 | |
73 | 73 | |
74 | 74 | Zainstaluj mod-wsgi: |
75 | ->> apt-get install libapache2-mod-wsgi | |
75 | +>> sudo apt-get install libapache2-mod-wsgi | |
76 | 76 | |
77 | 77 | Utwórz plik slowal.wsgi odpowiednio definiując w nim ścieżki do plików statycznych. Przykładowa treść pliku poniżej: |
78 | 78 | -------------------------------------------- |
... | ... | @@ -115,7 +115,7 @@ Skonfiguruj apacha dodając plik konfiguracyjny (np. o nazwie slowal.conf) do fo |
115 | 115 | -------------------------------------------- |
116 | 116 | |
117 | 117 | Uruchom stronę poleceniem: |
118 | ->> a2ensite slowal.conf | |
118 | +>> sudo a2ensite slowal.conf | |
119 | 119 | |
120 | 120 | Zrestartuj apacha: |
121 | 121 | >> sudo service apache2 restart |
... | ... |
accounts/management/commands/get_payments_data.py
0 → 100644
1 | +# -*- coding:utf-8 -*- | |
2 | + | |
3 | +import codecs | |
4 | +import datetime | |
5 | + | |
6 | +from django.contrib.auth.models import User | |
7 | +from django.core.management.base import BaseCommand | |
8 | +from django.db.models import Sum | |
9 | + | |
10 | +from accounts.models import RealizedLemma, RealizedPhraseology, RealizedPhraseologyBinding, \ | |
11 | + RealizedSemantics | |
12 | +from dictionary.ajax_user_stats import get_used_bindings | |
13 | +from dictionary.models import Lemma | |
14 | + | |
15 | + | |
16 | +USERNAME = 'JakubS' | |
17 | +FUNCTION = 'Leksykograf' | |
18 | +POS = 'noun' | |
19 | +STARTDATE = datetime.datetime(2011, 1, 1, 00, 00) | |
20 | +ENDDATE = (datetime.datetime.now() - | |
21 | + datetime.timedelta(days=1)).replace(hour=23, minute=59, second=59) | |
22 | + | |
23 | + | |
24 | +class Command(BaseCommand): | |
25 | + args = 'none' | |
26 | + | |
27 | + def handle(self, **options): | |
28 | + get_payments_data(FUNCTION) | |
29 | + | |
30 | + | |
31 | +def get_payments_data(function): | |
32 | + start = STARTDATE.strftime('%Y%m%d') | |
33 | + end = ENDDATE.strftime('%Y%m%d') | |
34 | + payments_path = 'data/work_%s_%s_%s-%s.csv' % (USERNAME, FUNCTION, start, end) | |
35 | + payments_file = codecs.open(payments_path, 'wt', 'utf-8') | |
36 | + user = User.objects.get(username=USERNAME) | |
37 | + | |
38 | + if function == 'Semantyk': | |
39 | + work_stats = write_semantic_stats(payments_file, user, POS) | |
40 | + elif function == 'Superfrazeolog': | |
41 | + work_stats = write_superphraseologic_stats(payments_file, user, POS) | |
42 | + elif function == 'Frazeolog': | |
43 | + work_stats = write_phraseologic_stats(payments_file, user, POS) | |
44 | + elif function == 'Leksykograf': | |
45 | + work_stats = write_lexicographic_stats(payments_file, user, POS) | |
46 | + elif function == 'Superleksykograf': | |
47 | + work_stats = write_superlexicographic_stats(payments_file, user, POS) | |
48 | + total_earned_cash = work_stats['earned_cash'] | |
49 | + if total_earned_cash > 0.0: | |
50 | + payments_file.write(u'\n%s\t%.2f\n' % (user.username, | |
51 | + total_earned_cash)) | |
52 | + payments_file.close() | |
53 | + | |
54 | + | |
55 | +def write_superlexicographic_stats(payments_file, user, pos): | |
56 | + real_lemmas = RealizedLemma.objects.filter(user_stats__user=user, | |
57 | + lemma__entry_obj__pos__tag=pos, | |
58 | + date__gte=STARTDATE, | |
59 | + date__lte=ENDDATE, | |
60 | + status__type__sym_name='checked', | |
61 | + bonus=False) | |
62 | + | |
63 | + earned_cash = real_lemmas.aggregate(Sum('cash'))['cash__sum'] | |
64 | + if earned_cash == None: | |
65 | + earned_cash = 0.0 | |
66 | + | |
67 | + payments_file.write(u'Sprawdzone:\n') | |
68 | + for done_lemma in real_lemmas.order_by('date'): | |
69 | + payments_file.write(u'%s\t%.2f\t%s\n' % (done_lemma.lemma.entry_obj.name, | |
70 | + done_lemma.cash, | |
71 | + done_lemma.date.strftime('%Y%m%d'))) | |
72 | + | |
73 | + lex_work_stats = {'earned_cash': round(earned_cash, 2)} | |
74 | + return lex_work_stats | |
75 | + | |
76 | + | |
77 | +def write_semantic_stats(payments_file, user, pos): | |
78 | + | |
79 | + real_semantics = RealizedSemantics.objects.filter(user_stats__user=user, | |
80 | + date__gte=STARTDATE, | |
81 | + date__lte=ENDDATE, | |
82 | + entry__pos__tag=pos) | |
83 | + | |
84 | + earned_cash = real_semantics.filter(user_stats__user=user).aggregate(Sum('cash'))['cash__sum'] | |
85 | + if earned_cash == None: | |
86 | + earned_cash = 0.0 | |
87 | + | |
88 | + bonus_cash = real_semantics.filter(user_stats__user=user, | |
89 | + bonus=True).aggregate(Sum('cash'))['cash__sum'] | |
90 | + if bonus_cash == None: | |
91 | + bonus_cash = 0.0 | |
92 | + prop_frames = real_semantics.filter(user_stats__user=user).aggregate(Sum('prop_frames'))[ | |
93 | + 'prop_frames__sum'] | |
94 | + if prop_frames == None: | |
95 | + prop_frames = 0 | |
96 | + part_prop_frames = real_semantics.filter(user_stats__user=user).aggregate(Sum('part_prop_frames'))[ | |
97 | + 'part_prop_frames__sum'] | |
98 | + if part_prop_frames == None: | |
99 | + part_prop_frames = 0 | |
100 | + wrong_frames = real_semantics.filter(user_stats__user=user).aggregate(Sum('wrong_frames'))[ | |
101 | + 'wrong_frames__sum'] | |
102 | + if wrong_frames == None: | |
103 | + wrong_frames = 0 | |
104 | + corr_frames = real_semantics.filter(user_stats__user=user).aggregate(Sum('corr_frames'))[ | |
105 | + 'corr_frames__sum'] | |
106 | + if corr_frames == None: | |
107 | + corr_frames = 0 | |
108 | + part_corr_frames = real_semantics.filter(user_stats__user=user).aggregate(Sum('part_corr_frames'))[ | |
109 | + 'part_corr_frames__sum'] | |
110 | + if part_corr_frames == None: | |
111 | + part_corr_frames = 0 | |
112 | + ncorr_frames = real_semantics.filter(user_stats__user=user).aggregate(Sum('ncorr_frames'))[ | |
113 | + 'ncorr_frames__sum'] | |
114 | + if ncorr_frames == None: | |
115 | + ncorr_frames = 0 | |
116 | + made_frames = real_semantics.filter(user_stats__user=user).aggregate(Sum('made_frames'))[ | |
117 | + 'made_frames__sum'] | |
118 | + if made_frames == None: | |
119 | + made_frames = 0 | |
120 | + added_connections = real_semantics.filter(user_stats__user=user).aggregate(Sum('added_connections'))[ | |
121 | + 'added_connections__sum'] | |
122 | + if added_connections == None: | |
123 | + added_connections = 0 | |
124 | + efficacy = 0.0 | |
125 | + if prop_frames + wrong_frames > 0: | |
126 | + efficacy = float(prop_frames) / float(prop_frames + wrong_frames) * 100.0 | |
127 | + | |
128 | + payments_file.write(u'Wykonane:\n') | |
129 | + done_semantics = real_semantics.filter(bonus=False).order_by('date') | |
130 | + for done_sem in done_semantics: | |
131 | + done_cash = done_sem.cash | |
132 | + try: | |
133 | + done_bonus = real_semantics.get(bonus=True, entry=done_sem.entry).cash | |
134 | + done_cash += done_bonus | |
135 | + except RealizedSemantics.DoesNotExist: | |
136 | + pass | |
137 | + payments_file.write(u'%s\t%.2f\t%s\n' % (done_sem.entry.name, | |
138 | + done_cash, | |
139 | + done_sem.date.strftime('%Y%m%d'))) | |
140 | + | |
141 | + sem_work_stats = {'earned_cash': round(earned_cash, 2), | |
142 | + 'bonus_cash': round(bonus_cash, 2), | |
143 | + 'prop_frames': prop_frames, | |
144 | + 'part_prop_frames': part_prop_frames, | |
145 | + 'wrong_frames': wrong_frames, | |
146 | + 'corr_frames': corr_frames, | |
147 | + 'part_corr_frames': part_corr_frames, | |
148 | + 'checked_frames': ncorr_frames + corr_frames + part_corr_frames, | |
149 | + 'made_frames': made_frames, | |
150 | + 'efficacy': round(efficacy, 2), | |
151 | + 'added_connections': added_connections} | |
152 | + return sem_work_stats | |
153 | + | |
154 | + | |
155 | +def write_superphraseologic_stats(payments_file, user, pos): | |
156 | + added_bindings = RealizedPhraseologyBinding.objects.filter(user_stats__user=user, | |
157 | + date__gte=STARTDATE, | |
158 | + date__lte=ENDDATE) | |
159 | + used_bindings = get_used_bindings(added_bindings) | |
160 | + | |
161 | + checked_phraseology = RealizedPhraseology.objects.filter(user_stats__user=user, | |
162 | + date__gte=STARTDATE, | |
163 | + date__lte=ENDDATE, | |
164 | + bonus=False, | |
165 | + status__type__sym_name='checked_f', | |
166 | + lemma__entry_obj__pos__tag=pos) | |
167 | + | |
168 | + earned_cash_frames = checked_phraseology.aggregate(Sum('cash'))['cash__sum'] | |
169 | + if earned_cash_frames == None: | |
170 | + earned_cash_frames = 0.0 | |
171 | + earned_cash_bindings = used_bindings.aggregate(Sum('cash'))['cash__sum'] | |
172 | + if earned_cash_bindings == None: | |
173 | + earned_cash_bindings = 0.0 | |
174 | + earned_cash = earned_cash_frames + earned_cash_bindings | |
175 | + | |
176 | + phraseologic_empty_frame_value = 1.0 | |
177 | + empty_value = 0.0 | |
178 | + | |
179 | + payments_file.write(u'Sprawdzone:\n') | |
180 | + checked_phraseology = checked_phraseology.order_by('date') | |
181 | + for checked_phr in checked_phraseology: | |
182 | + cash = checked_phr.cash | |
183 | + if cash == 0.0: | |
184 | + cash = phraseologic_empty_frame_value | |
185 | + empty_value += phraseologic_empty_frame_value | |
186 | + payments_file.write(u'%s\t%.2f\t%s\n' % (checked_phr.lemma.entry_obj.name, | |
187 | + cash, | |
188 | + checked_phr.date.strftime('%Y%m%d'))) | |
189 | + earned_cash += empty_value | |
190 | + | |
191 | + payments_file.write(u'\n\nDodane powiazania frazeologiczne:\n') | |
192 | + for binding in used_bindings.order_by('date'): | |
193 | + payments_file.write(u'%s\t%.2f\t%s\n' % (binding.binded_entry.name, | |
194 | + binding.cash, | |
195 | + binding.date.strftime('%Y%m%d'))) | |
196 | + | |
197 | + | |
198 | + phraseology_work_stats = {'earned_cash': round(earned_cash, 2), | |
199 | + 'added_bindings': added_bindings.count(), | |
200 | + 'used_bindings': used_bindings.count()} | |
201 | + return phraseology_work_stats | |
202 | + | |
203 | + | |
204 | +def write_phraseologic_stats(payments_file, user, pos): | |
205 | + added_bindings = RealizedPhraseologyBinding.objects.filter(user_stats__user=user, | |
206 | + date__gte=STARTDATE, | |
207 | + date__lte=ENDDATE) | |
208 | + used_bindings = get_used_bindings(added_bindings) | |
209 | + | |
210 | + checked_and_done_phraseology = RealizedPhraseology.objects.filter(user_stats__user=user, | |
211 | + date__gte=STARTDATE, | |
212 | + date__lte=ENDDATE, | |
213 | + lemma__entry_obj__pos__tag=pos) | |
214 | + | |
215 | + done_phraseology = checked_and_done_phraseology.filter(status__type__sym_name='ready_f', | |
216 | + bonus=False) | |
217 | + | |
218 | + earned_cash_frames = done_phraseology.aggregate(Sum('cash'))['cash__sum'] | |
219 | + if earned_cash_frames == None: | |
220 | + earned_cash_frames = 0.0 | |
221 | + earned_cash_bindings = used_bindings.aggregate(Sum('cash'))['cash__sum'] | |
222 | + if earned_cash_bindings == None: | |
223 | + earned_cash_bindings = 0.0 | |
224 | + earned_cash = earned_cash_frames + earned_cash_bindings | |
225 | + | |
226 | + bonus_cash = checked_and_done_phraseology.filter(bonus=True).aggregate(Sum('cash'))['cash__sum'] | |
227 | + if bonus_cash == None: | |
228 | + bonus_cash = 0.0 | |
229 | + earned_cash += bonus_cash | |
230 | + | |
231 | + phraseologic_empty_frame_value = 1.0 | |
232 | + empty_value = 0.0 | |
233 | + | |
234 | + payments_file.write(u'Wykonane:\n') | |
235 | + for done_phr in done_phraseology.order_by('date'): | |
236 | + cash = done_phr.cash | |
237 | + if cash == 0.0: | |
238 | + cash = phraseologic_empty_frame_value | |
239 | + empty_value += phraseologic_empty_frame_value | |
240 | + try: | |
241 | + done_bonus = checked_and_done_phraseology.get(bonus=True, lemma__entry_obj=done_phr.lemma.entry_obj).cash | |
242 | + cash += done_bonus | |
243 | + except RealizedPhraseology.DoesNotExist: | |
244 | + pass | |
245 | + payments_file.write(u'%s\t%.2f\t%s\n' % (done_phr.lemma.entry_obj.name, | |
246 | + cash, | |
247 | + done_phr.date.strftime('%Y%m%d'))) | |
248 | + | |
249 | + payments_file.write(u'\n\nDodane powiazania frazeologiczne:\n') | |
250 | + for binding in used_bindings.order_by('date'): | |
251 | + payments_file.write(u'%s\t%.2f\t%s\n' % (binding.binded_entry.name, | |
252 | + binding.cash, | |
253 | + binding.date.strftime('%Y%m%d'))) | |
254 | + | |
255 | + earned_cash += empty_value | |
256 | + | |
257 | + phraseology_work_stats = {'earned_cash': round(earned_cash, 2), | |
258 | + 'added_bindings': added_bindings.count(), | |
259 | + 'used_bindings': used_bindings.count(),} | |
260 | + return phraseology_work_stats | |
261 | + | |
262 | + | |
263 | +def write_lexicographic_stats(payments_file, user, pos): | |
264 | + | |
265 | + real_lemmas = RealizedLemma.objects.filter(user_stats__user=user, | |
266 | + lemma__entry_obj__pos__tag=pos, | |
267 | + date__gte=STARTDATE, | |
268 | + date__lte=ENDDATE) | |
269 | + | |
270 | + earned_cash = real_lemmas.filter(status__type__sym_name='ready').aggregate(Sum('cash'))['cash__sum'] | |
271 | + if earned_cash == None: | |
272 | + earned_cash = 0.0 | |
273 | + | |
274 | + lemmas_to_erase_cash = 0.0 | |
275 | + lemmas_marked_to_erase = Lemma.objects.filter(owner=user, | |
276 | + old=False, | |
277 | + status__type__sym_name='erase', | |
278 | + entry_obj__pos__tag=pos) | |
279 | + | |
280 | + payments_file.write(u'Zaznaczone do usunięcia:\n') | |
281 | + for lemma in lemmas_marked_to_erase: | |
282 | + erase_date = lemma.status_history.order_by('-date')[0].date | |
283 | + if erase_date >= STARTDATE and erase_date <= ENDDATE: | |
284 | + payments_file.write(u'%s\t%.2f\t%s\n' % (lemma.entry_obj.name, | |
285 | + 1.0, | |
286 | + erase_date.strftime('%Y%m%d'))) | |
287 | + lemmas_to_erase_cash += 1.0 | |
288 | + earned_cash += lemmas_to_erase_cash | |
289 | + | |
290 | + bonus_cash = real_lemmas.filter(bonus=True).aggregate(Sum('cash'))['cash__sum'] | |
291 | + if bonus_cash == None: | |
292 | + bonus_cash = 0.0 | |
293 | + earned_cash += bonus_cash | |
294 | + | |
295 | + payments_file.write(u'\n\nWykonane:\n') | |
296 | + done_lemmas = real_lemmas.filter(bonus=False, | |
297 | + status__type__sym_name='ready').order_by('date') | |
298 | + for done_lemma in done_lemmas: | |
299 | + cash = done_lemma.cash | |
300 | + try: | |
301 | + bonus = real_lemmas.get(bonus=True, lemma__entry_obj=done_lemma.lemma.entry_obj).cash | |
302 | + cash += bonus | |
303 | + except RealizedLemma.DoesNotExist: | |
304 | + pass | |
305 | + payments_file.write(u'%s\t%.2f\t%s\n' % (done_lemma.lemma.entry_obj.name, | |
306 | + cash, | |
307 | + done_lemma.date.strftime('%Y%m%d'))) | |
308 | + | |
309 | + lex_work_stats = {'earned_cash': round(earned_cash, 2), | |
310 | + 'bonus_cash': round(bonus_cash, 2), | |
311 | + 'lemmas_to_erase_cash': round(lemmas_to_erase_cash, 2)} | |
312 | + return lex_work_stats | |
... | ... |
accounts/models.py
... | ... | @@ -279,6 +279,9 @@ class RealizedSemantics(Model): |
279 | 279 | # wykonane ramki (wypelniane dla semantyka) |
280 | 280 | made_frames = PositiveIntegerField(db_column='wykonane_ramki', |
281 | 281 | default=0) |
282 | + # wspoldzielone ramki (wypelniane dla semantyka) | |
283 | + related_frames = PositiveIntegerField(db_column='wspoldzielone_ramki', | |
284 | + default=0) | |
282 | 285 | # poprawione ramki (wypelniane dla supersemantyka) |
283 | 286 | corr_frames = PositiveIntegerField(db_column='poprawione_ramki', |
284 | 287 | default=0) |
... | ... |
dictionary/ajax_lemma_status.py
... | ... | @@ -117,10 +117,10 @@ def lemma_status_change(request, status_id, lemma_id): |
117 | 117 | not lemma_obj.status.next_statuses.filter(pk=new_status.pk).exists()): |
118 | 118 | raise AjaxError('wrong change') |
119 | 119 | |
120 | - actual_semantic_frames = SemanticFrame.objects.none() | |
120 | + visible_semantic_frames = SemanticFrame.objects.none() | |
121 | 121 | next_status = False |
122 | 122 | if(new_status): |
123 | - actual_semantic_frames = backup_lemma_and_get_frames(lemma_obj) | |
123 | + visible_semantic_frames = backup_lemma_and_get_frames(lemma_obj) | |
124 | 124 | |
125 | 125 | if(new_status and new_status.priority > lemma_obj.status.priority): |
126 | 126 | next_status = True |
... | ... | @@ -223,8 +223,10 @@ def lemma_status_change(request, status_id, lemma_id): |
223 | 223 | and next_status): |
224 | 224 | ### naliczanie oplat za gotowosc semantyczna |
225 | 225 | frame_value = 12.0 |
226 | - update_sem_stats_ready_s(lemma_obj.entry_obj, actual_semantic_frames, | |
227 | - lemma_obj.semanticist, new_status, frame_value) | |
226 | + related_frame_value = 2.0 | |
227 | + update_sem_stats_ready_s(lemma_obj.entry_obj, visible_semantic_frames, | |
228 | + lemma_obj.semanticist, new_status, frame_value, | |
229 | + related_frame_value) | |
228 | 230 | add_new_frames_to_phraseologic_propositions(lemma_obj) |
229 | 231 | changed = True |
230 | 232 | # zmiana statusu hasla na sprawdzone semantycznie |
... | ... | @@ -236,8 +238,8 @@ def lemma_status_change(request, status_id, lemma_id): |
236 | 238 | part_bonus = 2.0 |
237 | 239 | connection_bonus = 0.1 |
238 | 240 | ### naliczanie oplat za sprawdzenie i bonusow |
239 | - update_sem_stats_conf_s(entry=lemma_obj.entry_obj, | |
240 | - semantic_frames=actual_semantic_frames, | |
241 | + update_sem_stats_conf_s(entry=lemma_obj.entry_obj, | |
242 | + checked_sem_frames_backup=visible_semantic_frames, | |
241 | 243 | semanticist=lemma_obj.semanticist, |
242 | 244 | supersemanticist=request.user, |
243 | 245 | status=new_status, |
... | ... | @@ -273,7 +275,7 @@ def lemma_status_change(request, status_id, lemma_id): |
273 | 275 | lemma=lemma_obj, |
274 | 276 | status=new_status) |
275 | 277 | status_change.save() |
276 | - status_change.semantic_frames.add(*actual_semantic_frames.all()) | |
278 | + status_change.semantic_frames.add(*visible_semantic_frames.all()) | |
277 | 279 | lemma_obj.status_history.add(status_change) |
278 | 280 | |
279 | 281 | if new_status: |
... | ... | @@ -472,45 +474,59 @@ def update_lemma_stats_conf_f(lemma, phraseologist, superphraseologist, status, |
472 | 474 | phraseologist.user_stats.phraseology_real_history.add(phraseologist_real_lemma) |
473 | 475 | |
474 | 476 | ####################### semantics ############################# |
475 | -def update_sem_stats_ready_s(entry, semantic_frames, semanticist, status, frame_value): | |
476 | - actual_frames_count = semantic_frames.count() | |
477 | - sem_dict = {'made_frames': actual_frames_count, | |
478 | - 'cash': frame_value*float(actual_frames_count)} | |
477 | +def update_sem_stats_ready_s(entry, visible_semantic_frames, semanticist, status, | |
478 | + frame_value, related_frame_value): | |
479 | + actual_frames = entry.actual_frames() | |
480 | + actual_frames_count = actual_frames.count() | |
479 | 481 | |
480 | - realized_semantics = RealizedSemantics(entry=entry, cash=sem_dict['cash'], | |
481 | - made_frames=sem_dict['made_frames'], | |
482 | + related_frames = entry.related_frames() | |
483 | + related_frames_count = related_frames.count() | |
484 | + | |
485 | + cash = frame_value*float(actual_frames_count) + related_frame_value*float(related_frames_count) | |
486 | + | |
487 | + | |
488 | + realized_semantics = RealizedSemantics(entry=entry, cash=cash, | |
489 | + made_frames=actual_frames_count, | |
490 | + related_frames=related_frames_count, | |
482 | 491 | status=status, bonus=False) |
483 | 492 | realized_semantics.save() |
484 | - realized_semantics.frames.add(*semantic_frames.all()) | |
493 | + realized_semantics.frames.add(*visible_semantic_frames.all()) | |
485 | 494 | semanticist.user_stats.semantics_real_history.add(realized_semantics) |
486 | 495 | |
487 | -def update_sem_stats_conf_s(entry, semantic_frames, semanticist, supersemanticist, status, | |
488 | - checked_frame_value, corrected_frame_value, | |
489 | - bonus_factor, part_bonus_factor, connection_bonus): | |
496 | +def update_sem_stats_conf_s(entry, checked_sem_frames_backup, semanticist, supersemanticist, status, | |
497 | + checked_frame_value, corrected_frame_value, | |
498 | + bonus_factor, part_bonus_factor, connection_bonus): | |
490 | 499 | ready_statuses = Lemma_Status.objects.filter(type__sym_name='ready_s') |
491 | 500 | q_ready_statuses = [Q(status=ready_status) for ready_status in ready_statuses.all()] |
492 | 501 | |
493 | 502 | ready_semantics = RealizedSemantics.objects.filter(reduce(operator.or_, q_ready_statuses)) |
494 | - ready_sem_frames= ready_semantics.get(entry=entry).frames | |
495 | - checked_sem_frames = semantic_frames | |
496 | - ready_to_checked_diffs = get_frames_differences(ready_sem_frames.all(), checked_sem_frames.all()) | |
497 | - checked_to_ready_diffs = get_frames_differences(checked_sem_frames.all(), ready_sem_frames.all()) | |
503 | + ready_sem_visible_frames = ready_semantics.get(entry=entry).frames | |
504 | + ready_sem_actual_frames = entry.filter_local(ready_sem_visible_frames) | |
505 | + ready_sem_related_frames = entry.filter_related(ready_sem_visible_frames) | |
506 | + | |
507 | + checked_sem_actual_frames = entry.actual_frames() | |
508 | + checked_sem_related_frames = entry.related_frames() | |
509 | + | |
510 | + actual_ready_to_checked_diffs = get_frames_differences(ready_sem_actual_frames.all(), checked_sem_actual_frames.all()) | |
511 | + actual_checked_to_ready_diffs = get_frames_differences(checked_sem_actual_frames.all(), ready_sem_actual_frames.all()) | |
512 | + | |
513 | + visible_ready_to_checked_diffs = get_frames_differences(ready_sem_visible_frames.all(), checked_sem_frames_backup.all()) | |
498 | 514 | |
499 | - connections_amount = count_connections(ready_to_checked_diffs) | |
500 | - sem_cash = (bonus_factor*float(len(ready_to_checked_diffs['matching_frames'])) + | |
501 | - part_bonus_factor*float(len(ready_to_checked_diffs['part_matching_frames'])) + | |
515 | + connections_amount = count_connections(entry, visible_ready_to_checked_diffs) | |
516 | + sem_cash = (bonus_factor*float(len(actual_ready_to_checked_diffs['matching_frames'])) + | |
517 | + part_bonus_factor*float(len(actual_ready_to_checked_diffs['part_matching_frames'])) + | |
502 | 518 | connection_bonus*float(connections_amount)) |
503 | - sem_dict = {'same_frames': len(ready_to_checked_diffs['matching_frames']), | |
504 | - 'part_same_frames': len(ready_to_checked_diffs['part_matching_frames']), | |
505 | - 'wrong_frames': len(ready_to_checked_diffs['missing_frames']), | |
519 | + sem_dict = {'same_frames': len(actual_ready_to_checked_diffs['matching_frames']), | |
520 | + 'part_same_frames': len(actual_ready_to_checked_diffs['part_matching_frames']), | |
521 | + 'wrong_frames': len(actual_ready_to_checked_diffs['missing_frames']), | |
506 | 522 | 'added_connections': connections_amount, |
507 | 523 | 'cash': sem_cash} |
508 | 524 | |
509 | - supersem_cash = (float(len(checked_to_ready_diffs['missing_frames'])+len(checked_to_ready_diffs['part_matching_frames']))*corrected_frame_value + | |
510 | - float(len(ready_to_checked_diffs['matching_frames']))*checked_frame_value) | |
511 | - supersem_dict = {'same_frames': len(checked_to_ready_diffs['matching_frames']), | |
512 | - 'part_same_frames': len(checked_to_ready_diffs['part_matching_frames']), | |
513 | - 'redo_frames': len(checked_to_ready_diffs['missing_frames']), | |
525 | + supersem_cash = (float(len(actual_checked_to_ready_diffs['missing_frames'])+len(actual_checked_to_ready_diffs['part_matching_frames']))*corrected_frame_value + | |
526 | + float(len(actual_ready_to_checked_diffs['matching_frames']))*checked_frame_value) | |
527 | + supersem_dict = {'same_frames': len(actual_checked_to_ready_diffs['matching_frames']), | |
528 | + 'part_same_frames': len(actual_checked_to_ready_diffs['part_matching_frames']), | |
529 | + 'redo_frames': len(actual_checked_to_ready_diffs['missing_frames']), | |
514 | 530 | 'cash': supersem_cash} |
515 | 531 | |
516 | 532 | supersem_real_semantics = RealizedSemantics(entry=entry, |
... | ... | @@ -521,7 +537,7 @@ def update_sem_stats_conf_s(entry, semantic_frames, semanticist, supersemanticis |
521 | 537 | status=status, |
522 | 538 | bonus=False) |
523 | 539 | supersem_real_semantics.save() |
524 | - supersem_real_semantics.frames.add(*semantic_frames.all()) | |
540 | + supersem_real_semantics.frames.add(*checked_sem_frames_backup.all()) | |
525 | 541 | supersemanticist.user_stats.semantics_real_history.add(supersem_real_semantics) |
526 | 542 | |
527 | 543 | sem_real_semantics = RealizedSemantics(entry=entry, |
... | ... | @@ -533,15 +549,16 @@ def update_sem_stats_conf_s(entry, semantic_frames, semanticist, supersemanticis |
533 | 549 | status=status, |
534 | 550 | bonus=True) |
535 | 551 | sem_real_semantics.save() |
536 | - sem_real_semantics.frames.add(*semantic_frames.all()) | |
552 | + sem_real_semantics.frames.add(*checked_sem_frames_backup.all()) | |
537 | 553 | semanticist.user_stats.semantics_real_history.add(sem_real_semantics) |
538 | 554 | |
539 | -def count_connections(differences): | |
540 | - amount = 0 | |
555 | +def count_connections(entry, differences): | |
556 | + amount = 0 | |
557 | + schemata = entry.actual_schemata() | |
541 | 558 | for frame in differences['matching_frames']: |
542 | - amount += frame.connected_schemata().count() | |
559 | + amount += frame.connected_schemata().filter(pk__in=schemata).count() | |
543 | 560 | for frame in differences['part_matching_frames']: |
544 | - amount += frame.connected_schemata().count() | |
561 | + amount += frame.connected_schemata().filter(pk__in=schemata).count() | |
545 | 562 | return amount |
546 | 563 | |
547 | 564 | def remove_semantic_payments(entry): |
... | ... |
dictionary/ajax_lemma_view.py
... | ... | @@ -798,6 +798,8 @@ def relate_entries(request, lemma_id, preview_lemma_id): |
798 | 798 | error, lemma, preview_lemma = check_if_selected_and_get(lemma_id, preview_lemma_id) |
799 | 799 | if not error: |
800 | 800 | error = check_if_can_relate(lemma, preview_lemma) |
801 | + if not error: | |
802 | + error = check_if_has_rights_to_relate(lemma, request.user) | |
801 | 803 | if error: |
802 | 804 | raise AjaxError(error) |
803 | 805 | else: |
... | ... | @@ -810,20 +812,42 @@ def check_if_can_relate(lemma, preview_lemma): |
810 | 812 | error = 'same lemma id' |
811 | 813 | elif lemma.entry_obj.pos.tag == preview_lemma.entry_obj.pos.tag: |
812 | 814 | error = 'same part of speech' |
813 | - return error | |
815 | + return error | |
816 | + | |
817 | +def check_if_has_rights_to_relate(lemma, user): | |
818 | + error = '' | |
819 | + if not user_can_modify(lemma, user): | |
820 | + error = 'can not edit' | |
821 | + return error | |
814 | 822 | |
815 | 823 | def add_entries_relation(lemma, preview_lemma): |
816 | 824 | lemma_entry, preview_lemma_entry = get_entries(lemma, preview_lemma) |
817 | 825 | lemma_entry.rel_entries.add(preview_lemma_entry) |
818 | 826 | preview_lemma_entry.rel_entries.add(lemma_entry) |
819 | - | |
827 | + | |
820 | 828 | @ajax(method='post') |
821 | 829 | def disrelate_entries(request, lemma_id, preview_lemma_id): |
822 | 830 | error, lemma, preview_lemma = check_if_selected_and_get(lemma_id, preview_lemma_id) |
823 | 831 | if not error: |
832 | + error = check_if_has_rights_to_relate(lemma, request.user) | |
833 | + if not error: | |
834 | + error = check_if_share_sematic_frames(lemma, preview_lemma) | |
835 | + if error: | |
836 | + raise AjaxError(error) | |
837 | + else: | |
824 | 838 | cancel_entries_relation(request, lemma, preview_lemma) |
825 | 839 | return {} |
826 | 840 | |
841 | +def check_if_share_sematic_frames(lemma, preview_lemma): | |
842 | + error = '' | |
843 | + lemma_visible_frames = lemma.entry_obj.visible_frames() | |
844 | + preview_visible_frames = preview_lemma.entry_obj.visible_frames() | |
845 | + | |
846 | + if (lemma_visible_frames.filter(pk__in=preview_visible_frames).exists() or | |
847 | + preview_visible_frames.filter(pk__in=lemma_visible_frames).exists()): | |
848 | + error = 'shared frames' | |
849 | + return error | |
850 | + | |
827 | 851 | def cancel_entries_relation(request, lemma, preview_lemma): |
828 | 852 | lemma_entry, preview_lemma_entry = get_entries(lemma, preview_lemma) |
829 | 853 | lemma_entry.rel_entries.remove(preview_lemma_entry) |
... | ... |
dictionary/ajax_user_stats.py
... | ... | @@ -239,6 +239,9 @@ def get_semantics_stats(user): |
239 | 239 | ncorr_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('ncorr_frames'))['ncorr_frames__sum'] |
240 | 240 | if ncorr_frames == None: |
241 | 241 | ncorr_frames = 0 |
242 | + related_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('related_frames'))['related_frames__sum'] | |
243 | + if related_frames == None: | |
244 | + related_frames = 0 | |
242 | 245 | made_frames = RealizedSemantics.objects.filter(user_stats__user=user).aggregate(Sum('made_frames'))['made_frames__sum'] |
243 | 246 | if made_frames == None: |
244 | 247 | made_frames = 0 |
... | ... | @@ -257,6 +260,7 @@ def get_semantics_stats(user): |
257 | 260 | 'corr_frames': corr_frames, |
258 | 261 | 'part_corr_frames': part_corr_frames, |
259 | 262 | 'checked_frames': ncorr_frames+corr_frames+part_corr_frames, |
263 | + 'related_frames': related_frames, | |
260 | 264 | 'made_frames': made_frames, |
261 | 265 | 'efficacy': round(efficacy, 2), |
262 | 266 | 'added_connections' : added_connections} |
... | ... |
dictionary/management/commands/create_TEI_walenty.py
... | ... | @@ -5,38 +5,56 @@ import os |
5 | 5 | import tarfile |
6 | 6 | |
7 | 7 | from django.core.management.base import BaseCommand |
8 | +from optparse import make_option | |
8 | 9 | |
9 | 10 | from dictionary.models import Lemma, Frame_Opinion_Value, \ |
10 | - get_ready_statuses | |
11 | + get_statuses | |
11 | 12 | from dictionary.teixml import createteixml, write_phrase_types_expansions_in_TEI |
12 | 13 | from settings import WALENTY_PATH |
13 | 14 | |
14 | 15 | class Command(BaseCommand): |
15 | 16 | args = '<dict dict ...>' |
16 | 17 | help = 'Get Walenty in TEI format.' |
18 | + option_list = BaseCommand.option_list + ( | |
19 | + make_option('--min_status', | |
20 | + action='store', | |
21 | + type='string', | |
22 | + dest='min_status_type', | |
23 | + default='ready', | |
24 | + help='Minimum lemma status.'), | |
25 | + make_option('--start_date', | |
26 | + action='store', | |
27 | + type='string', | |
28 | + dest='start_date', | |
29 | + default='all', | |
30 | + help='Status change start date (format: YYYY-MM-DD).'), | |
31 | + | |
32 | + ) | |
17 | 33 | |
18 | 34 | def handle(self, *args, **options): |
19 | 35 | try: |
20 | 36 | now = datetime.datetime.now().strftime('%Y%m%d') |
21 | - | |
22 | 37 | vocab_names = list(args) |
23 | 38 | vocab_names.sort() |
24 | - if vocab_names: | |
25 | - filename_base = '%s_%s_%s' % ('walenty', '+'.join(vocab_names), now) | |
26 | - else: | |
27 | - filename_base = '%s_%s' % ('walenty', now) | |
39 | + | |
40 | + filename_base = self.create_filename_base(vocab_names, options, now) | |
28 | 41 | |
29 | 42 | base_path = os.path.join(WALENTY_PATH, filename_base) |
30 | 43 | outpath = base_path + '.xml' |
31 | - ready_statuses = get_ready_statuses() | |
44 | + statuses = get_statuses(options['min_status_type']) | |
32 | 45 | |
33 | 46 | lemmas = Lemma.objects.filter(old=False) |
34 | 47 | if vocab_names: |
35 | 48 | lemmas = lemmas.filter(vocabulary__name__in=vocab_names) |
36 | - ready_lemmas = lemmas.filter(status__in=ready_statuses).order_by('entry_obj__name') | |
49 | + lemmas = lemmas.filter(status__in=statuses) | |
50 | + if options['start_date'] != 'all': | |
51 | + lemmas = self.filter_lemmas_by_status_change(lemmas, statuses, options['start_date']) | |
52 | + lemmas = lemmas.order_by('entry_obj__name') | |
53 | + | |
54 | + self.print_statistics(lemmas) | |
37 | 55 | |
38 | 56 | frame_opinion_values = Frame_Opinion_Value.objects.all() |
39 | - createteixml(outpath, ready_lemmas, frame_opinion_values) | |
57 | + createteixml(outpath, lemmas, frame_opinion_values) | |
40 | 58 | archive = tarfile.open(base_path + '-TEI.tar.gz', 'w:gz') |
41 | 59 | |
42 | 60 | phrase_types_expand_path = os.path.join(WALENTY_PATH, |
... | ... | @@ -50,3 +68,49 @@ class Command(BaseCommand): |
50 | 68 | archive.close() |
51 | 69 | os.remove(outpath) |
52 | 70 | os.remove(phrase_types_expand_path) |
71 | + | |
72 | + def create_filename_base(self, vocab_names, options, now): | |
73 | + start_date = '' | |
74 | + if options['start_date'] != 'all': | |
75 | + start_date = '-' + options['start_date'].replace('-', '') | |
76 | + | |
77 | + vocab_names_str = '' | |
78 | + if vocab_names: | |
79 | + vocab_names_str = '-' + '+'.join(vocab_names) | |
80 | + | |
81 | + min_status = '' | |
82 | + if options['min_status_type'] != 'ready': | |
83 | + min_status = '-' + options['min_status_type'] | |
84 | + | |
85 | + filename_base = 'walenty%s%s%s_%s' % (min_status, vocab_names_str, | |
86 | + start_date, now) | |
87 | + return filename_base | |
88 | + | |
89 | + | |
90 | + def filter_lemmas_by_status_change(self, lemmas, statuses, start_date_str): | |
91 | + start_date = self.parse_date(start_date_str) | |
92 | + filtered_lemmas_pks = [] | |
93 | + for lemma in lemmas: | |
94 | + if lemma.status_history.filter(status=statuses[0], date__gte=start_date).exists(): | |
95 | + filtered_lemmas_pks.append(lemma.pk) | |
96 | + return lemmas.filter(pk__in=filtered_lemmas_pks) | |
97 | + | |
98 | + def parse_date(self, date_str): | |
99 | + date_parts = date_str.split('-') | |
100 | + year = int(date_parts[0]) | |
101 | + month = int(date_parts[1].lstrip('0')) | |
102 | + day = int(date_parts[2].lstrip('0')) | |
103 | + date = datetime.datetime(year, month, day, 00, 00) | |
104 | + return date | |
105 | + | |
106 | + def print_statistics(self, lemmas): | |
107 | + count = {'frames': 0, | |
108 | + 'arguments': 0} | |
109 | + for lemma in lemmas: | |
110 | + frames = lemma.entry_obj.actual_frames() | |
111 | + count['frames'] += frames.count() | |
112 | + for frame in frames.all(): | |
113 | + count['arguments'] += frame.complements.count() | |
114 | + print (u'Lemmas:\t%d' % lemmas.count()) | |
115 | + print (u'Frames:\t%d' % count['frames']) | |
116 | + print (u'Arguments:\t%d' % count['arguments']) | |
... | ... |
dictionary/management/commands/get_examples.py
... | ... | @@ -6,7 +6,8 @@ import os |
6 | 6 | |
7 | 7 | from django.core.management.base import BaseCommand |
8 | 8 | |
9 | -from dictionary.models import Lemma, get_ready_statuses | |
9 | +from dictionary.models import Lemma, NKJP_Example, get_ready_statuses, get_checked_statuses | |
10 | +from semantics.models import LexicalUnitExamples | |
10 | 11 | from settings import PROJECT_PATH |
11 | 12 | |
12 | 13 | BASE_PATH = os.path.join(PROJECT_PATH, 'data') |
... | ... | @@ -28,8 +29,13 @@ class Command(BaseCommand): |
28 | 29 | get_examples() |
29 | 30 | |
30 | 31 | def get_examples(): |
31 | - ready_statuses = get_ready_statuses() | |
32 | - write_detailed_examples(ready_statuses) | |
32 | + write_example_sentences('semantyczne-S_sprawdzone-20170811.txt', ['(S) sprawdzone'], True) | |
33 | + write_example_sentences('wszystkie-S_sprawdzone-20170811.txt', ['(S) sprawdzone'], False) | |
34 | + | |
35 | + checked_names = [checked.status for checked in get_checked_statuses()] | |
36 | + write_example_sentences('wszystkie-sprawdzone-20170811.txt', checked_names, False) | |
37 | + # ready_statuses = get_ready_statuses() | |
38 | + # write_detailed_examples(ready_statuses) | |
33 | 39 | # write_examples(ready_statuses) |
34 | 40 | |
35 | 41 | def write_detailed_examples(statuses): |
... | ... | @@ -76,4 +82,51 @@ def write_examples(statuses): |
76 | 82 | examples_file.write('\t\t--> %s\n' % example.sentence) |
77 | 83 | examples_file.write('\n\n') |
78 | 84 | finally: |
79 | - examples_file.close() | |
85 | + examples_file.close() | |
86 | + | |
87 | + | |
88 | +def write_example_sentences(filename, statuses, semantic): | |
89 | + try: | |
90 | + examples_file = codecs.open(os.path.join(BASE_PATH, filename), 'wt', 'utf-8') | |
91 | + for lemma in Lemma.objects.filter(old=False, entry_obj__pos__tag='verb').filter(status__status__in=statuses).order_by('entry_obj__name'): | |
92 | + print lemma | |
93 | + wrong_examples = lemma.nkjp_examples.filter(opinion__opinion=u'zły') | |
94 | + not_wanted_semantic_examples = get_not_needed_semantic_examples(lemma) | |
95 | + wanted_semantic_examples = get_wanted_semantic_examples(lemma) | |
96 | + | |
97 | + for example in lemma.nkjp_examples.filter(source__sym_name__in=['NKJP300M', 'NKJP1800M']): | |
98 | + if (lemma.frame_opinions.filter(frame=example.frame, value__value__in=[u'archaiczny', | |
99 | + u'zły']).exists()): | |
100 | + continue | |
101 | + | |
102 | + if semantic: | |
103 | + if (wanted_semantic_examples.filter(pk=example.pk).exists() and | |
104 | + not wrong_examples.filter(pk=example.pk).exists()): | |
105 | + examples_file.write(u'%s\n' % example.sentence) | |
106 | + else: | |
107 | + if (not not_wanted_semantic_examples.filter(pk=example.pk).exists() and | |
108 | + not wrong_examples.filter(pk=example.pk).exists()): | |
109 | + examples_file.write(u'%s\n' % example.sentence) | |
110 | + | |
111 | + finally: | |
112 | + examples_file.close() | |
113 | + | |
114 | + | |
115 | +def get_not_needed_semantic_examples(lemma): | |
116 | + not_needed_ids = [] | |
117 | + not_needed_frames = lemma.entry_obj.actual_frames().filter(opinion__value__in=[u'archaiczna', u'zła']) | |
118 | + for frame in not_needed_frames: | |
119 | + for lu in frame.lexical_units.all(): | |
120 | + for luex in LexicalUnitExamples.objects.filter(lexical_unit=lu): | |
121 | + not_needed_ids.append(luex.example.id) | |
122 | + return NKJP_Example.objects.filter(id__in=not_needed_ids) | |
123 | + | |
124 | + | |
125 | +def get_wanted_semantic_examples(lemma): | |
126 | + needed_ids = [] | |
127 | + needed_frames = lemma.entry_obj.actual_frames().exclude(opinion__value__in=[u'archaiczna', u'zła']) | |
128 | + for frame in needed_frames: | |
129 | + for lu in frame.lexical_units.all(): | |
130 | + for luex in LexicalUnitExamples.objects.filter(lexical_unit=lu): | |
131 | + needed_ids.append(luex.example.id) | |
132 | + return NKJP_Example.objects.filter(id__in=needed_ids) | |
... | ... |
dictionary/models.py
... | ... | @@ -100,6 +100,10 @@ def get_checked_statuses(): |
100 | 100 | def get_ready_statuses(): |
101 | 101 | ready_type = LemmaStatusType.objects.get(sym_name='ready') |
102 | 102 | return Lemma_Status.objects.filter(type__priority__gte=ready_type.priority).distinct() |
103 | + | |
104 | +def get_statuses(min_status_type): | |
105 | + min_type = LemmaStatusType.objects.get(sym_name=min_status_type) | |
106 | + return Lemma_Status.objects.filter(type__priority__gte=min_type.priority).distinct() | |
103 | 107 | |
104 | 108 | |
105 | 109 | class LemmaStatusType(Model): |
... | ... | @@ -1365,7 +1369,7 @@ class AspectRelationsGroup(Model): |
1365 | 1369 | return ','.join(str_ls) |
1366 | 1370 | |
1367 | 1371 | class Entry(Model): |
1368 | - name = CharField(max_length=64, db_column='nazwa') # usunieto , unique=True | |
1372 | + name = CharField(max_length=64, db_column='nazwa') | |
1369 | 1373 | # czesc mowy |
1370 | 1374 | pos = ForeignKey('POS', db_column='czesc_mowy', related_name='entries') |
1371 | 1375 | # powiazane lematy |
... | ... | @@ -1391,18 +1395,50 @@ class Entry(Model): |
1391 | 1395 | ('change_semantics', u'Może edytować semantykę.'), |
1392 | 1396 | ('view_semantics', u'Może oglądać semantykę.'), |
1393 | 1397 | ) |
1394 | - | |
1398 | + | |
1399 | + def related_frames(self): | |
1400 | + visible = self.visible_frames() | |
1401 | + actual = self.actual_frames() | |
1402 | + return visible.exclude(pk__in=actual) | |
1403 | + | |
1404 | + def visible_frames(self): | |
1405 | + frames = [] | |
1406 | + act_frames = self.actual_frames() | |
1407 | + for frame in self.all_frames(): | |
1408 | + if act_frames.filter(pk=frame.pk).exists(): | |
1409 | + frames.append(frame.pk) | |
1410 | + else: | |
1411 | + for lu in frame.lexical_units.all(): | |
1412 | + if self.meanings.filter(pk=lu.pk).exists(): | |
1413 | + frames.append(frame.pk) | |
1414 | + break | |
1415 | + return get_model('semantics', 'SemanticFrame').objects.filter(pk__in=frames) | |
1416 | + | |
1417 | + def all_frames(self): | |
1418 | + frames = self.actual_frames() | |
1419 | + for entry in self.rel_entries.all(): | |
1420 | + new_frames = entry.actual_frames() | |
1421 | + frames |= new_frames | |
1422 | + return get_model('semantics', 'SemanticFrame').objects.filter(pk__in=frames) | |
1423 | + | |
1395 | 1424 | def actual_frames(self): |
1396 | - # frame_ids = [] | |
1397 | - # lexical_units = self.meanings.order_by('sense') | |
1398 | - # for lexical_unit in lexical_units: | |
1399 | - # frame_ids.extend([f.id for f in lexical_unit.actual_frames()]) | |
1400 | - # return get_model('semantics', 'SemanticFrame').objects.filter(id__in=list(set(frame_ids))) | |
1401 | 1425 | return self.semantic_frames.filter(next__isnull=True, removed=False) |
1426 | + | |
1427 | + def actual_schemata(self): | |
1428 | + return self.lemmas.get(old=False).frames.all() | |
1429 | + | |
1430 | + def filter_local(self, frames): | |
1431 | + return frames.filter(pk__in=self.semantic_frames.all()) | |
1432 | + | |
1433 | + def filter_related(self, frames): | |
1434 | + return frames.exclude(pk__in=self.semantic_frames.all()) | |
1402 | 1435 | |
1403 | 1436 | def matching_connections(self, schema, position, phrase_type): |
1404 | - frames = self.actual_frames() | |
1437 | + | |
1405 | 1438 | matching_connections = [] |
1439 | + | |
1440 | + frames = self.visible_frames() | |
1441 | + | |
1406 | 1442 | for frame in frames: |
1407 | 1443 | for compl in frame.complements.all(): |
1408 | 1444 | matching_realizations = compl.realizations.filter(frame=schema, |
... | ... |
dictionary/saving.py
... | ... | @@ -9,14 +9,17 @@ from wordnet.models import LexicalUnit |
9 | 9 | def get_semantic_operations(lemma, schemata_conversions): |
10 | 10 | connections = [] |
11 | 11 | operations = [] |
12 | - frames = lemma.entry_obj.actual_frames() | |
12 | + | |
13 | + frames = lemma.entry_obj.visible_frames() | |
14 | + | |
13 | 15 | for conv in schemata_conversions: |
14 | 16 | schema_operations = get_reconnect_operations_and_extend_connections(frames, |
15 | 17 | connections, |
16 | 18 | conv['obj'], |
17 | 19 | conv['js']) |
18 | 20 | operations.extend(schema_operations) |
19 | - operations.extend(get_disconnect_operations(frames, connections)) | |
21 | + operations.extend(get_disconnect_operations(lemma, frames, connections)) | |
22 | + | |
20 | 23 | return operations |
21 | 24 | |
22 | 25 | def get_reconnect_operations_and_extend_connections(frames, connections, schema, js_schema): |
... | ... | @@ -75,20 +78,31 @@ def create_phrase_type_ref(schema, position, phrase_type, alternation): |
75 | 78 | def create_operation(operation, arg_ref, phrase_type_ref): |
76 | 79 | return {'operation': operation, 'arg': arg_ref, 'connect': phrase_type_ref} |
77 | 80 | |
78 | -def get_disconnect_operations(frames, connections): | |
81 | +def get_disconnect_operations(lemma, frames, connections): | |
79 | 82 | operations = [] |
83 | + shared_schemata_ids = get_shared_schemata_ids(lemma) | |
80 | 84 | for frame in frames: |
81 | 85 | for compl in frame.complements.all(): |
82 | 86 | conn_dict = next((conn_dict |
83 | 87 | for conn_dict in connections if conn_dict['compl'] == compl.id), None) |
84 | 88 | for real in compl.realizations.all(): |
85 | - if not conn_dict or not real.id in conn_dict['realizations']: | |
86 | - phrase_type_ref = create_phrase_type_ref(real.frame, real.position, | |
87 | - real.argument, real.alternation) | |
88 | - arg_ref = create_argument_ref(frame, compl) | |
89 | - operations.append(create_operation('disconnect', arg_ref, phrase_type_ref)) | |
89 | + if real.frame.id not in shared_schemata_ids: | |
90 | + if not conn_dict or not real.id in conn_dict['realizations']: | |
91 | + phrase_type_ref = create_phrase_type_ref(real.frame, real.position, | |
92 | + real.argument, real.alternation) | |
93 | + arg_ref = create_argument_ref(frame, compl) | |
94 | + operations.append(create_operation('disconnect', arg_ref, phrase_type_ref)) | |
90 | 95 | return operations |
91 | 96 | |
97 | +def get_shared_schemata_ids(lemma): | |
98 | + print lemma | |
99 | + ids = [f.id for f in lemma.frames.all()] | |
100 | + print ids | |
101 | + for connected in lemma.entry_obj.rel_entries.all(): | |
102 | + ids += [f.id for f in connected.actual_lemma().frames.all()] | |
103 | + print ids | |
104 | + return ids | |
105 | + | |
92 | 106 | def update_connections(lemma_id, reconnect_operations, user): |
93 | 107 | modify_frames(lemma_id, reconnect_operations, user) |
94 | 108 | |
... | ... | @@ -114,4 +128,3 @@ def disconnect_example_operation(example_dict, example_obj): |
114 | 128 | |
115 | 129 | def reconnect_examples(lemma, operations): |
116 | 130 | update_meanings(lemma.id, operations) |
117 | - | |
118 | 131 | \ No newline at end of file |
... | ... |
dictionary/static/js/lemma-view.js
... | ... | @@ -205,41 +205,6 @@ function load_content(id) { |
205 | 205 | loadSchemataAndExamples(); |
206 | 206 | |
207 | 207 | createSplitter('framesSplit','new-frame-tables', 'tabs'); |
208 | - /*if(window.can_modify) | |
209 | - { | |
210 | - addSyntacticFramesPerm = user_has_perm('dictionary.add_syntactic_frames'); | |
211 | - addPhraseologicFramesPerm = user_has_perm('dictionary.add_phraseologic_frames'); | |
212 | - | |
213 | - if(addSyntacticFramesPerm || addPhraseologicFramesPerm) { | |
214 | - $(document).unbind('keydown'); | |
215 | - $(document).bind('keydown', 'shift+s', saveHandle); | |
216 | - $(document).bind('keydown', 'shift+z', function(evt){backOneModification(); return false; }); | |
217 | - $(document).bind('keydown', 'shift+y', function(evt){forwardOneModification(); return false; }); | |
218 | - $(document).bind('keydown', 'shift+a', function(evt){addElement(); return false; }); | |
219 | - $(document).bind('keydown', 'shift+r', function(evt){removeElement(); return false; }); | |
220 | - $(document).bind('keydown', 'shift+d', function(evt){duplicateElement(); return false; }); | |
221 | - $(document).bind('keydown', 'shift+c', function(evt){copyElement(); return false; }); | |
222 | - $(document).bind('keydown', 'shift+v', function(evt){pasteElement(); return false; }); | |
223 | - $(document).bind('keydown', 'shift+w', function(evt){validateSchemata(); return false; }); | |
224 | - if(addSyntacticFramesPerm) { | |
225 | - $(document).bind('keydown', 'shift+x', function(evt){cutElement(); return false; }); | |
226 | - $(document).bind('keydown', 'shift+m', function(evt){reserveLemma(); return false; }); | |
227 | - } | |
228 | - if(addPhraseologicFramesPerm) { | |
229 | - $(document).bind('keydown', 'shift+l', function(evt){addPhraseologicFrame(); return false; }); | |
230 | - $(document).bind('keydown', 'shift+b', function(evt){openAssignPhraseologicFrameDialog(); return false; }); | |
231 | - } | |
232 | - } | |
233 | - } | |
234 | - else | |
235 | - { | |
236 | - $(document).unbind('keydown'); | |
237 | - $.get(ajax_user_has_perm, {perm: 'dictionary.own_lemmas'}, function(result) { | |
238 | - if(result['has_perm']) { | |
239 | - $(document).bind('keydown', 'shift+m', function(evt){reserveLemma(); return false; }); | |
240 | - } | |
241 | - }); | |
242 | - }*/ | |
243 | 208 | |
244 | 209 | if(document.getElementById("lemma_example_show")) |
245 | 210 | { |
... | ... | @@ -3527,8 +3492,11 @@ function restore_lemma() { |
3527 | 3492 | return false; |
3528 | 3493 | } |
3529 | 3494 | |
3530 | - function relateEntries() | |
3531 | - { | |
3495 | + function relateEntries() { | |
3496 | + if(semanticsChanged() || window.change) { | |
3497 | + error_alert('Przed dodaniem relacji hasło musi zostać zapisane.'); | |
3498 | + return false; | |
3499 | + } | |
3532 | 3500 | if (confirm('Czy jesteś pewien, że chcesz powiazać hasło z zakładki "Schematy" z hasłem wybranym w zakładce "Podgląd hasła"?')) { |
3533 | 3501 | ShowProgressAnimation(); |
3534 | 3502 | $.ajaxJSON({ |
... | ... | @@ -3541,6 +3509,7 @@ function restore_lemma() { |
3541 | 3509 | |
3542 | 3510 | callback: function(result) { |
3543 | 3511 | $("button#prev_disrelate_entries").css("visibility", "visible"); |
3512 | + reloadFrames(window.lemma_id); | |
3544 | 3513 | HideProgressAnimation(); |
3545 | 3514 | }, |
3546 | 3515 | error_callback: function(xhr, status, error) { |
... | ... | @@ -3562,9 +3531,13 @@ function restore_lemma() { |
3562 | 3531 | HideProgressAnimation(); |
3563 | 3532 | error_alert('Nie można powiązywać haseł reprezentujących tę samą część mowy.'); |
3564 | 3533 | return false; |
3565 | - } | |
3566 | - else | |
3567 | - { | |
3534 | + } | |
3535 | + else if (result == 'can not edit') { | |
3536 | + HideProgressAnimation(); | |
3537 | + error_alert('Brak uprawnień do edycji hasła.'); | |
3538 | + return false; | |
3539 | + } | |
3540 | + else { | |
3568 | 3541 | HideProgressAnimation(); |
3569 | 3542 | return true; |
3570 | 3543 | } |
... | ... | @@ -3574,8 +3547,11 @@ function restore_lemma() { |
3574 | 3547 | } |
3575 | 3548 | } |
3576 | 3549 | |
3577 | - function disrelateEntries() | |
3578 | - { | |
3550 | + function disrelateEntries() { | |
3551 | + if(semanticsChanged() || window.change) { | |
3552 | + error_alert('Przed usunięciem relacji hasło musi zostać zapisane.'); | |
3553 | + return false; | |
3554 | + } | |
3579 | 3555 | if (confirm('Czy jesteś pewien, że chcesz anulować relację hasła wybranego w zakładce "Schematy" z hasłem wybranym w zakładce "Podgląd hasła"?')) { |
3580 | 3556 | ShowProgressAnimation(); |
3581 | 3557 | $.ajaxJSON({ |
... | ... | @@ -3585,8 +3561,9 @@ function restore_lemma() { |
3585 | 3561 | lemma_id: window.lemma_id, |
3586 | 3562 | preview_lemma_id: window.prev_lemma_id |
3587 | 3563 | }, |
3588 | - callback: function(result) { | |
3564 | + callback: function(result) { | |
3589 | 3565 | $("button#prev_disrelate_entries").css("visibility", "hidden"); |
3566 | + reloadFrames(window.lemma_id); | |
3590 | 3567 | HideProgressAnimation(); |
3591 | 3568 | }, |
3592 | 3569 | error_callback: function(xhr, status, error) { |
... | ... | @@ -3594,8 +3571,22 @@ function restore_lemma() { |
3594 | 3571 | error_alert(status + ': ' + error); |
3595 | 3572 | }, |
3596 | 3573 | bad_data_callback: function(result) { |
3597 | - HideProgressAnimation(); | |
3598 | - return true; | |
3574 | + if (result == 'lemma not selected') { | |
3575 | + HideProgressAnimation(); | |
3576 | + error_alert('Nie zaznaczono hasła w zakładce "Schematy" lub "Podglądzie hasła".'); | |
3577 | + return false; | |
3578 | + } else if (result == 'can not edit') { | |
3579 | + HideProgressAnimation(); | |
3580 | + error_alert('Brak uprawnień do edycji hasła.'); | |
3581 | + return false; | |
3582 | + } else if (result == 'shared frames') { | |
3583 | + HideProgressAnimation(); | |
3584 | + error_alert('Nie można anulować: Hasła współdzielą ramy semantyczne.'); | |
3585 | + return false; | |
3586 | + } else { | |
3587 | + HideProgressAnimation(); | |
3588 | + return true; | |
3589 | + } | |
3599 | 3590 | }, |
3600 | 3591 | }); |
3601 | 3592 | return false; |
... | ... |
dictionary/templates/lemma_preview.html
... | ... | @@ -164,11 +164,13 @@ function unselectPrevTd(id) |
164 | 164 | |
165 | 165 | |
166 | 166 | <div id="prev_frames_modif"> |
167 | - {% if perms.dictionary.add_syntactic_frames or perms.dictionary.add_phraseologic_frames %} | |
168 | - <div> | |
169 | - <button type="button" id="prev_copy" style="width:120px">Kopiuj</button> | |
170 | - </div> | |
171 | - {% if perms.dictionary.add_syntactic_frames %} | |
167 | + {% if perms.dictionary.add_syntactic_frames or perms.dictionary.add_phraseologic_frames or perms.dictionary.add_semantic_frames%} | |
168 | + {% if perms.dictionary.add_syntactic_frames or perms.dictionary.add_phraseologic_frames %} | |
169 | + <div> | |
170 | + <button type="button" id="prev_copy" style="width:120px">Kopiuj</button> | |
171 | + </div> | |
172 | + {% endif %} | |
173 | + {% if perms.dictionary.add_syntactic_frames or perms.dictionary.add_semantic_frames %} | |
172 | 174 | <div> |
173 | 175 | <button type="button" id="prev_relate_entries" style="width:120px">Powiąż hasła</button> |
174 | 176 | <button type="button" id="prev_disrelate_entries" style="width:140px; visibility:hidden;">Anuluj powiązanie</button> |
... | ... |
dictionary/templates/lemma_view.html
... | ... | @@ -44,7 +44,7 @@ |
44 | 44 | <span class="ui-icon ui-icon-columns">pokaż/ukryj</span> |
45 | 45 | </button> |
46 | 46 | {% endif %} |
47 | - {% if perms.dictionary.change_lemmas %} | |
47 | + {% if perms.dictionary.change_lemmas or perms.dictionary.change_semantics %} | |
48 | 48 | <button id="reset-similar-button" title="ukryj podobne"> |
49 | 49 | <span class="ui-icon ui-icon-closethick">ukryj podobne</span> |
50 | 50 | </button> |
... | ... | @@ -63,7 +63,6 @@ |
63 | 63 | <ul> |
64 | 64 | <li id="refresh_frames"><a href="#new_frames">{% trans "Schematy" %} [<span id="new-frames-count"></span>]</a></li> |
65 | 65 | <li><a href="#semantics">{% trans "Semantyka" %} [<span id="semantic-frames-count"></span>]</a></li> |
66 | - <!-- li><a href="#old_frames">{% trans "Stare schematy" %}</a></li --> | |
67 | 66 | {% if perms.dictionary.add_notes %} |
68 | 67 | <li><a href="#notes">{% trans "Notatki" %} [<span id="lemma-notes-count"></span>]</a></li> |
69 | 68 | {% endif %} |
... | ... | @@ -73,6 +72,7 @@ |
73 | 72 | <li><a href="#change_ctrl">{% trans "Kontrola zmian" %}</a></li> |
74 | 73 | <li><a href="#status">{% trans "Status" %}</a></li> |
75 | 74 | {% elif perms.dictionary.change_semantics %} |
75 | + <li><a href="#preview_lemma">{% trans "Podgląd hasła" %}</a></li> | |
76 | 76 | <li><a href="#status">{% trans "Status" %}</a></li> |
77 | 77 | {% endif %} |
78 | 78 | <li id="lemma_desc" style="float:right;"></li> |
... | ... | @@ -95,6 +95,8 @@ |
95 | 95 | <div id="status"> |
96 | 96 | </div> |
97 | 97 | {% elif perms.dictionary.change_semantics %} |
98 | + <div id="preview_lemma"> | |
99 | + </div> | |
98 | 100 | <div id="status"> |
99 | 101 | </div> |
100 | 102 | {% endif %} |
... | ... |
dictionary/templates/sel_user_stats.html
... | ... | @@ -123,12 +123,13 @@ |
123 | 123 | <table class='PaymentsTable'> |
124 | 124 | <tr> |
125 | 125 | <td class='EmptyCell' colspan=1></td> |
126 | - <td class='ColumnHeader' colspan=7>Semantycy:</td> | |
126 | + <td class='ColumnHeader' colspan=8>Semantycy:</td> | |
127 | 127 | <td class='ColumnHeader' colspan=3>Supersemantycy:</td> |
128 | 128 | </tr> |
129 | 129 | <tr> |
130 | 130 | <td class='ColumnHeader'>Kwota za wykonaną pracę:</td> |
131 | 131 | <td class='ColumnHeader'>Bonus:</td> |
132 | + <td class='ColumnHeader'>Współdzielone ramy:</td> | |
132 | 133 | <td class='ColumnHeader'>Wykonane ramy:</td> |
133 | 134 | <td class='ColumnHeader'>Poprawnie wykonane ramy:</td> |
134 | 135 | <td class='ColumnHeader'>Częściowo poprawnie wykonane ramy:</td> |
... | ... | @@ -142,6 +143,7 @@ |
142 | 143 | <tr> |
143 | 144 | <td>{{semantics_work_stats.earned_cash}} zł</td> |
144 | 145 | <td>{{semantics_work_stats.bonus_cash}} zł</td> |
146 | + <td>{{semantics_work_stats.related_frames}}</td> | |
145 | 147 | <td>{{semantics_work_stats.made_frames}}</td> |
146 | 148 | <td>{{semantics_work_stats.prop_frames}}</td> |
147 | 149 | <td>{{semantics_work_stats.part_prop_frames}}</td> |
... | ... |
semantics/management/commands/find_hanging_connections.py
... | ... | @@ -16,7 +16,7 @@ class Command(BaseCommand): |
16 | 16 | def find_hanging_connections(): |
17 | 17 | lemmas = Lemma.objects.filter(old=False).order_by('entry_obj__name') |
18 | 18 | for lemma in lemmas: |
19 | - frames = lemma.entry_obj.actual_frames() | |
19 | + frames = lemma.entry_obj.visible_frames() | |
20 | 20 | for frame in frames: |
21 | 21 | for compl in frame.complements.all(): |
22 | 22 | for real in compl.realizations.all(): |
... | ... |
semantics/phraseology_generator.py
... | ... | @@ -124,7 +124,7 @@ def get_nps(cases, number, nouns, atr): |
124 | 124 | filtered = [] |
125 | 125 | for option in options: |
126 | 126 | (orth, tag) = option |
127 | - if u':' + case in tag: | |
127 | + if u':' + case in tag or u'.' + case in tag: | |
128 | 128 | filtered.append(option) |
129 | 129 | options_temp += filtered |
130 | 130 | else: |
... | ... |
semantics/static/js/semantics_connections.js
1 | 1 | var connected = {}; // dictionaries of connections and disconnections between frames and schemas |
2 | 2 | var connected_reverse = {}; |
3 | 3 | |
4 | +function getConnected(frame_id) { /* TODO */ | |
5 | + return []; | |
6 | +} | |
7 | + | |
8 | +function removeFrameConnections(frame_id) { /* TODO */ | |
9 | + return; | |
10 | +} | |
11 | + | |
12 | + | |
4 | 13 | function memorizeConnections(arguments_connected, frames_connection){ |
5 | 14 | connected = arguments_connected; |
6 | 15 | connected_reverse = frames_connection; |
... | ... |
semantics/static/js/semantics_frames.js
... | ... | @@ -4,23 +4,6 @@ var free_complement_id = -1; |
4 | 4 | var free_frame_id = -1; |
5 | 5 | var free_preference_id = -1; |
6 | 6 | var semantic_opinion_vals = []; |
7 | -var connected_entries = [] | |
8 | - | |
9 | - | |
10 | -function getConnected(frames_display) { | |
11 | - var i, j; | |
12 | - for (i = 0; i < frames_display.length; i++) { | |
13 | - lexical_units_num.push(frames_display[i].lexical_units); | |
14 | - lexical_units_frames[i] = []; | |
15 | - | |
16 | - var frames = frames_display[i].frames; | |
17 | - for (j = 0; j < frames.length; j++) { | |
18 | - frame_content[frames[j].frame_id] = frames[j]; | |
19 | - frame_localization[frames[j].frame_id] = {"units": i, "position": j}; | |
20 | - lexical_units_frames[i].push(frames[j].frame_id); | |
21 | - } | |
22 | - } | |
23 | -} | |
24 | 7 | |
25 | 8 | function selectedFrame() { |
26 | 9 | return "frame_" + highlighted_id + "_"; |
... | ... | @@ -74,6 +57,7 @@ function getDisplay(visibility, checkboxes) { |
74 | 57 | display = ''; |
75 | 58 | var i, j; |
76 | 59 | for ( i = 0; i < lexical_units_num.length; i++) { |
60 | + if (lexical_units_frames[i].length > 0){ | |
77 | 61 | var visible = frame_content[lexical_units_frames[i][0]].visible; |
78 | 62 | if (visible == visibility) { |
79 | 63 | display += '<div id="lus_' + i + '_">'; |
... | ... | @@ -121,11 +105,25 @@ function getDisplay(visibility, checkboxes) { |
121 | 105 | display += '</div>'; |
122 | 106 | |
123 | 107 | display += '</div>'; |
124 | - } | |
108 | + } | |
109 | + } | |
125 | 110 | } |
126 | 111 | return display; |
127 | 112 | } |
128 | 113 | |
114 | +function reloadFrames(lid){ | |
115 | + $.getJSON(ajax_frames, {lemma_id: lid}, function(data){ | |
116 | + lexical_units_frames = []; | |
117 | + getFrames(data.frames_display); | |
118 | + memorizeConnections(data.connections.connected, | |
119 | + data.connections.connected_reverse); | |
120 | + alternationCounts(data.alternations); | |
121 | + displayFrames(); | |
122 | + $("#semantic-frames-count").empty(); | |
123 | + $("#semantic-frames-count").append(data.frames_count); | |
124 | + }); | |
125 | +} | |
126 | + | |
129 | 127 | function displayFrames(){ |
130 | 128 | $("#frames").html(getDisplay(true, false)); |
131 | 129 | } |
... | ... | @@ -204,7 +202,7 @@ function newFrame(units) { |
204 | 202 | } |
205 | 203 | lexical_units_num.push(units_list); |
206 | 204 | |
207 | - frame_content[free_frame_id] = {colspan: "1", rowspan: "1", status: "brak", frame_id: "" + x, display: {preferences:[[]], roles:[]}, lemma: {include: false}}; | |
205 | + frame_content[free_frame_id] = {colspan: "1", rowspan: "1", status: "brak", frame_id: "" + x, display: {preferences:[[]], roles:[]}, lemma: {include: false}, local:true, visible:true}; | |
208 | 206 | frames_operations.push({operation: "create_frame", meanings: units_list, id: x}); |
209 | 207 | free_frame_id = free_frame_id - 1; |
210 | 208 | } |
... | ... |
semantics/static/js/semantics_lexical_units.js
... | ... | @@ -4,6 +4,7 @@ var free_luid = -1; // these ids will be temporarely given to new le |
4 | 4 | var free_sense; // what sense should be given to new lexical unit |
5 | 5 | var lexical_units_frames = []; |
6 | 6 | var lexical_units_num = []; |
7 | +var part_of_speech; | |
7 | 8 | |
8 | 9 | // store lexical units from database |
9 | 10 | function memorizeLexicalUnits(input_lexical_units) { |
... | ... | @@ -18,8 +19,10 @@ function memorizeLexicalUnits(input_lexical_units) { |
18 | 19 | function basicLexicalUnitsData(info){ |
19 | 20 | base = info.base; |
20 | 21 | free_sense = info.sense; |
22 | + part_of_speech = info.pos; | |
21 | 23 | } |
22 | 24 | |
25 | + | |
23 | 26 | // create new lexical_unit |
24 | 27 | function createLexicalUnit(refl, glossa, relation, to) { |
25 | 28 | |
... | ... | @@ -34,7 +37,7 @@ function createLexicalUnit(refl, glossa, relation, to) { |
34 | 37 | refl_text = ""; |
35 | 38 | } |
36 | 39 | |
37 | - var lu = {base: base + refl_text, glossa: "" + glossa, definition: "", id: free_luid, luid: -1, refl: refl, glossa: glossa, pos: "czasownik", sense: free_sense, relation: relation, to: to, location: ""}; | |
40 | + var lu = {base: base + refl_text, glossa: "" + glossa, definition: "", id: free_luid, luid: -1, refl: refl, glossa: glossa, pos: part_of_speech, sense: free_sense, relation: relation, to: to, location: ""}; | |
38 | 41 | var operation = {operation: 'add_unit', unit:lu}; |
39 | 42 | lexical_units.push(lu); |
40 | 43 | lexical_unit_examples[free_luid] = [] |
... | ... | @@ -225,18 +228,20 @@ function getMeaningsSelectionForFrame(frame_id) { |
225 | 228 | sid_alt = rows[j].split('_'); |
226 | 229 | var sch = "schema_" + sid_alt[0] + "_"; |
227 | 230 | var k; |
228 | - for (k = 0; k < schemas_content[sch].display.arguments[0].length; k++) { | |
229 | - var proper = schemas_content[sch].display.arguments[0][k].csv_id + "alt_" + sid_alt[1] + "_"; | |
230 | - if (connected[lem].indexOf(proper) != -1) { | |
231 | - if (schemas_content[sch].display.arguments[0][k].vrb != null && | |
232 | - schemas_content[sch].display.arguments[0][k].vrb.length > 0) { | |
233 | - pre = pre.concat(schemas_content[sch].display.arguments[0][k].lex); | |
234 | - vrb = schemas_content[sch].display.arguments[0][k].vrb; | |
235 | - } else { | |
236 | - options.push(schemas_content[sch].display.arguments[0][k].lex); | |
237 | - } | |
238 | - } | |
239 | - } | |
231 | + if (typeof(schemas_content[sch]) != 'undefined'){ | |
232 | + for (k = 0; k < schemas_content[sch].display.arguments[0].length; k++) { | |
233 | + var proper = schemas_content[sch].display.arguments[0][k].csv_id + "alt_" + sid_alt[1] + "_"; | |
234 | + if (connected[lem].indexOf(proper) != -1) { | |
235 | + if (schemas_content[sch].display.arguments[0][k].vrb != null && | |
236 | + schemas_content[sch].display.arguments[0][k].vrb.length > 0) { | |
237 | + pre = pre.concat(schemas_content[sch].display.arguments[0][k].lex); | |
238 | + vrb = schemas_content[sch].display.arguments[0][k].vrb; | |
239 | + } else { | |
240 | + options.push(schemas_content[sch].display.arguments[0][k].lex); | |
241 | + } | |
242 | + } | |
243 | + } | |
244 | + } | |
240 | 245 | if (vrb.length == 0) { |
241 | 246 | var lex = {lemma: [base], pre: pre, args: options}; |
242 | 247 | if (hasRefl(sch)) { |
... | ... | @@ -441,7 +446,7 @@ function getLexicalUnit(luid) { |
441 | 446 | |
442 | 447 | |
443 | 448 | function addPhraseologicalUnit(mwe, glossa, relation, to) { |
444 | - var lu = {base: mwe, glossa: "" + glossa, definition: "", id: free_luid, luid: -1, refl: "false", glossa: glossa, pos: "czasownik", sense: "A", relation: relation, to: to, location: ""}; | |
449 | + var lu = {base: mwe, glossa: "" + glossa, definition: "", id: free_luid, luid: -1, refl: "false", glossa: glossa, pos: part_of_speech, sense: "A", relation: relation, to: to, location: ""}; | |
445 | 450 | var operation = {operation: 'add_unit', unit:lu}; |
446 | 451 | lexical_units.push(lu); |
447 | 452 | lexical_unit_examples[free_luid] = []; |
... | ... | @@ -451,7 +456,7 @@ function addPhraseologicalUnit(mwe, glossa, relation, to) { |
451 | 456 | } |
452 | 457 | |
453 | 458 | function unlockPhraseologicalUnit(mwe) { |
454 | - var lu = {base: mwe.lu.split('-')[0], glossa: "", definition: "", id: mwe.id, luid: mwe.luid, refl: false, glossa: "", pos: "czasownik", sense: mwe.lu.split('-')[1], relation: 2, to: -1, location: ""}; | |
459 | + var lu = {base: mwe.lu.split('-')[0], glossa: "", definition: "", id: mwe.id, luid: mwe.luid, refl: false, glossa: "", pos: part_of_speech, sense: mwe.lu.split('-')[1], relation: 2, to: -1, location: ""}; | |
455 | 460 | lexical_units.push(lu); |
456 | 461 | return mwe.id; |
457 | 462 | } |
... | ... |
semantics/static/js/semantics_view.js
... | ... | @@ -56,7 +56,7 @@ function changeSynsetInput() { |
56 | 56 | } |
57 | 57 | |
58 | 58 | function openMeaningsMenu() { |
59 | - if(window.lemma_id != getActualLemmaId(window.lemma_id)){ | |
59 | + if(window.lemma_id != getActualLemmaId(window.lemma_id)){ | |
60 | 60 | alertOldSchemas(); |
61 | 61 | } |
62 | 62 | else { |
... | ... | @@ -185,7 +185,7 @@ function displayMeanings() { |
185 | 185 | e.preventDefault(); |
186 | 186 | if (parseInt(f.relation) != 2) { |
187 | 187 | $.prompt.removeState('state12'); |
188 | - $.prompt.addState('state12', {title: 'Znaczenia', html: getFormContent(f) + getRelation(f) + "w stosunku do:<br />" + getSynsets(f.context, "czasownik"), buttons: {Wstecz: -1, Anuluj: 0, Zatwierdź: 1}, focus: 1, submit: submitSynsetSelection}, 'state11'); | |
188 | + $.prompt.addState('state12', {title: 'Znaczenia', html: getFormContent(f) + getRelation(f) + "w stosunku do:<br />" + getSynsets(f.context, part_of_speech), buttons: {Wstecz: -1, Anuluj: 0, Zatwierdź: 1}, focus: 1, submit: submitSynsetSelection}, 'state11'); | |
189 | 189 | $.prompt.goToState('state12'); |
190 | 190 | } else { |
191 | 191 | createLexicalUnit(f.refl, f.glossa, f.relation, -1); |
... | ... | @@ -207,12 +207,16 @@ function displayMeanings() { |
207 | 207 | |
208 | 208 | var getFormContent = |
209 | 209 | function(f) { |
210 | - var result = '<label>Zwrotny <input type="checkbox" name="refl" value="true" disabled'; | |
210 | + var result = ''; | |
211 | + if (part_of_speech == 'czasownik') { | |
212 | + result += '<label>Zwrotny <input type="checkbox" name="refl" value="true" disabled'; | |
211 | 213 | if (f.refl == 'true') { |
212 | 214 | result += ' checked'; |
213 | - } | |
214 | - result += '></label><br /><label>Glossa <input type="text" name="glossa" value="' + f.glossa + '" disabled></label><br />'; | |
215 | - return result; | |
215 | + } | |
216 | + result += '></label><br />'; | |
217 | + } | |
218 | + result += '<label>Glossa <input type="text" name="glossa" value="' + f.glossa + '" disabled></label><br />'; | |
219 | + return result; | |
216 | 220 | }; |
217 | 221 | |
218 | 222 | var getRelation = |
... | ... | @@ -262,6 +266,16 @@ function displayMeanings() { |
262 | 266 | } |
263 | 267 | }; |
264 | 268 | |
269 | + var addMeaningsHtml = | |
270 | + function() { | |
271 | + var result = ''; | |
272 | + if (part_of_speech == 'czasownik') { | |
273 | + result += '<label>Zwrotny <input type="checkbox" name="refl" value="true"></label><br />'; | |
274 | + } | |
275 | + result += '<label>Glossa <input type="text" name="glossa" value=""></label><br />'; | |
276 | + return result; | |
277 | + }; | |
278 | + | |
265 | 279 | var display_meanings = { |
266 | 280 | state0: { |
267 | 281 | title: 'Znaczenia', |
... | ... | @@ -272,8 +286,7 @@ function displayMeanings() { |
272 | 286 | }, |
273 | 287 | state1: { |
274 | 288 | title: 'Dodawanie znaczenia', |
275 | - html: '<label>Zwrotny <input type="checkbox" name="refl" value="true"></label><br />'+ | |
276 | - '<label>Glossa <input type="text" name="glossa" value=""></label><br />', | |
289 | + html: addMeaningsHtml(), | |
277 | 290 | buttons: { "Anuluj": -1, "Potwierdź": 1 }, |
278 | 291 | focus: 1, |
279 | 292 | submit:function(e,v,m,f){ |
... | ... | @@ -339,7 +352,13 @@ function displayMeanings() { |
339 | 352 | if (change == true) { |
340 | 353 | alertSemantics(); |
341 | 354 | } else { |
342 | - $.prompt(display_meanings); | |
355 | + $.prompt(display_meanings, | |
356 | + {close:function(e){ | |
357 | + window.units_operations = []; | |
358 | + $.getJSON(ajax_units, {lemma_id: window.lemma_id}, function(data){ | |
359 | + memorizeLexicalUnits(data.lexical_units); | |
360 | + basicLexicalUnitsData(data.informations); | |
361 | + })}}); | |
343 | 362 | } |
344 | 363 | |
345 | 364 | } |
... | ... | @@ -529,7 +548,7 @@ function changeLexicalUnits() { |
529 | 548 | { |
530 | 549 | title: 'Znaczenia', |
531 | 550 | html: '<label>Glossa <input type="text" name="glossa" value="' + gloss + '" disabled></label><br />' + |
532 | - getRelation(f) + "w stosunku do:<br />" + getSynsets(f.context, "czasownik"), | |
551 | + getRelation(f) + "w stosunku do:<br />" + getSynsets(f.context, part_of_speech), | |
533 | 552 | buttons: {Wstecz: -1, Anuluj: 0, Zatwierdź: 1}, |
534 | 553 | focus: 1, |
535 | 554 | submit: addPhraseology |
... | ... | @@ -818,7 +837,7 @@ function removeFromFrame() { |
818 | 837 | // highlighted_id = "26" |
819 | 838 | |
820 | 839 | removeFrame(highlighted_id); |
821 | - | |
840 | + | |
822 | 841 | frameClick("frame_" + highlighted_id + "_"); |
823 | 842 | displayFrames(); |
824 | 843 | |
... | ... | @@ -936,7 +955,10 @@ function frameClick(clicked_id) { |
936 | 955 | } else { |
937 | 956 | if (clicked_id.split('_').length == 3) { // frame click |
938 | 957 | var frame_id = clicked_id.split('_')[1]; |
939 | - var local = frame_content[frame_id].local; | |
958 | + var local = true; | |
959 | + if (frame_content[frame_id] != null){ | |
960 | + local = frame_content[frame_id].local; | |
961 | + } | |
940 | 962 | |
941 | 963 | if (highlighted_id != "") { |
942 | 964 | deselect(); |
... | ... | @@ -972,7 +994,10 @@ function frameClick(clicked_id) { |
972 | 994 | } |
973 | 995 | } else if (clicked_id.split('_')[4] == 'lemma') { // part of lemma click |
974 | 996 | frame_id = clicked_id.split('_')[1] |
975 | - var local = frame_content[frame_id].local; | |
997 | + var local = true; | |
998 | + if (frame_content[frame_id] != null){ | |
999 | + local = frame_content[frame_id].local; | |
1000 | + } | |
976 | 1001 | |
977 | 1002 | if (highlighted_id != "") { |
978 | 1003 | deselect(); |
... | ... | @@ -1010,7 +1035,10 @@ function frameClick(clicked_id) { |
1010 | 1035 | } |
1011 | 1036 | } else { // argument click |
1012 | 1037 | frame_id = clicked_id.split('_')[1] |
1013 | - var local = frame_content[frame_id].local; | |
1038 | + var local = true; | |
1039 | + if (frame_content[frame_id] != null){ | |
1040 | + local = frame_content[frame_id].local; | |
1041 | + } | |
1014 | 1042 | |
1015 | 1043 | if (highlighted_id != "") { |
1016 | 1044 | deselect(); |
... | ... |
semantics/validation.py
... | ... | @@ -8,15 +8,15 @@ from semantics.utils import get_structural_matching_frame |
8 | 8 | |
9 | 9 | def validate_frames(lemma_id): |
10 | 10 | lemma = Lemma.objects.get(id=lemma_id) |
11 | - actual_frames = lemma.entry_obj.actual_frames() | |
11 | + visible_frames = lemma.entry_obj.visible_frames() | |
12 | 12 | error_msg = u'' |
13 | - for frame in actual_frames.all(): | |
14 | - error_msg = frame_valid(lemma, frame, actual_frames) | |
13 | + for frame in visible_frames.all(): | |
14 | + error_msg = frame_valid(lemma, frame, visible_frames) | |
15 | 15 | if error_msg: |
16 | 16 | break |
17 | 17 | return error_msg |
18 | 18 | |
19 | -def frame_valid(lemma, frame, actual_frames): | |
19 | +def frame_valid(lemma, frame, frames): | |
20 | 20 | error_msg = '' |
21 | 21 | complements = frame.complements.all() |
22 | 22 | if not arguments_exists(complements): |
... | ... | @@ -31,9 +31,9 @@ def frame_valid(lemma, frame, actual_frames): |
31 | 31 | error_msg = u'Semantyka: Rama semantyczna %d zawiera argumenty, które nie są powiązane z żadnym schematem.' % frame.id |
32 | 32 | elif not preferences_selected(complements): |
33 | 33 | error_msg = u'Semantyka: Rama semantyczna %d zawiera argumenty bez zdefiniowanych preferencji selekcyjnych.' % frame.id |
34 | - elif not examples_added(frame): | |
34 | + elif not examples_added(lemma, frame): | |
35 | 35 | error_msg = u'Semantyka: Rama semantyczna %d nie ma dopiętych przykładów.' % frame.id |
36 | - elif duplicates_exists(frame, actual_frames): | |
36 | + elif duplicates_exists(frame, frames): | |
37 | 37 | error_msg = u'Semantyka: Rama semantyczna %d posiada duplikaty.' % frame.id |
38 | 38 | elif not schemas_reflex_agreed(lemma, frame): |
39 | 39 | error_msg = u'Semantyka: Rama semantyczna %d ma dopięte elementy o niezgodnej zwrotności.' % frame.id |
... | ... | @@ -101,14 +101,20 @@ def preference_valid(complement): |
101 | 101 | return True |
102 | 102 | return False |
103 | 103 | |
104 | -def examples_added(frame): | |
104 | +def examples_added(lemma, frame): | |
105 | + local_examples = lemma.nkjp_examples.all() | |
105 | 106 | for lexical_unit in frame.lexical_units.all(): |
106 | - if LexicalUnitExamples.objects.filter(lexical_unit=lexical_unit).exists(): | |
107 | + if LexicalUnitExamples.objects.filter(lexical_unit=lexical_unit, | |
108 | + example__in=local_examples).exists(): | |
107 | 109 | return True |
108 | 110 | return False |
109 | 111 | |
110 | -def duplicates_exists(frame, actual_frames): | |
111 | - frames_to_check = actual_frames.exclude(id=frame.id) | |
112 | +def duplicates_exists(frame, frames): | |
113 | + # frazeologicznych ram nie sprawdzamy | |
114 | + if frame.complements.filter(roles__role='Lemma').exists(): | |
115 | + return False | |
116 | + | |
117 | + frames_to_check = frames.exclude(id=frame.id) | |
112 | 118 | if get_structural_matching_frame(frames_to_check, frame): |
113 | 119 | return True |
114 | 120 | return False |
... | ... | @@ -236,7 +242,7 @@ def validate_schemas(lemma_id): |
236 | 242 | return error_msg |
237 | 243 | |
238 | 244 | def all_schemas_used(lemma): |
239 | - frames = lemma.entry_obj.actual_frames() | |
245 | + frames = lemma.entry_obj.visible_frames() | |
240 | 246 | schemas = lemma.frames |
241 | 247 | for schema in schemas.all(): |
242 | 248 | if not schema_is_bad(lemma, schema) and not schema_used(schema, frames): |
... | ... | @@ -282,4 +288,3 @@ def hanging_meaning(lexical_unit): |
282 | 288 | if lexical_unit.luid < 0 and not lexical_unit.actual_frames().exists(): |
283 | 289 | return True |
284 | 290 | return False |
285 | - | |
286 | 291 | \ No newline at end of file |
... | ... |
semantics/views.py
... | ... | @@ -92,10 +92,7 @@ def create_frames_context(lemma_id, user): |
92 | 92 | frames_dict = {} |
93 | 93 | frame_units = {} |
94 | 94 | |
95 | - frames = lemma.entry_obj.actual_frames() | |
96 | - for entry in connected: | |
97 | - new_frames = entry.actual_frames() | |
98 | - frames |= new_frames | |
95 | + frames = lemma.entry_obj.all_frames() | |
99 | 96 | |
100 | 97 | for frame in frames: |
101 | 98 | alternations[frame.id] = {} |
... | ... | @@ -129,11 +126,13 @@ def create_frames_context(lemma_id, user): |
129 | 126 | lemma_entry = lemma.entry_obj |
130 | 127 | if (lu_entry.name, lu_entry.pos.tag) == (lemma_entry.name, lemma_entry.pos.tag): |
131 | 128 | frame_display["visible"] = True |
129 | + | |
132 | 130 | for frame in type_frames[t]: |
133 | 131 | frame_entry = frame.entry |
134 | 132 | lemma_entry = lemma.entry_obj |
135 | 133 | if (frame_entry.name, frame_entry.pos.tag) == (lemma_entry.name, lemma_entry.pos.tag): |
136 | 134 | frame_display["local"] = True |
135 | + frame_display["visible"] = True | |
137 | 136 | else: |
138 | 137 | frame_display["local"] = False |
139 | 138 | |
... | ... | @@ -223,6 +222,8 @@ def create_frames_context(lemma_id, user): |
223 | 222 | else: |
224 | 223 | lemma_info = {"include": False} |
225 | 224 | frame_display["frames"].append({"frame_id": str(frame.id), "colspan": str(max(len(frame_roles), 1)), "rowspan": str(frame_preferences_rowspan), "status": status, "display": display, "lemma": lemma_info}) |
225 | + | |
226 | + schemata_ids = [f.id for f in lemma.frames.all()] | |
226 | 227 | |
227 | 228 | for complement, complement_class in zip(frame_complements, frame_ids): |
228 | 229 | if complement_class not in complement_arguments: |
... | ... | @@ -230,29 +231,40 @@ def create_frames_context(lemma_id, user): |
230 | 231 | |
231 | 232 | for schema_position in complement.realizations.all(): |
232 | 233 | schema = schema_position.frame |
233 | - position = schema_position.position | |
234 | - argument = schema_position.argument | |
235 | - alternation = schema_position.alternation | |
236 | - realization_id = u'schema_' + str(schema.id) + u'_pos_' + str(position.id) + '_arg_' + str(argument.id) + '_' + 'alt_' + str(alternation) + '_' | |
237 | - complement_arguments[complement_class].append(realization_id) | |
238 | - if realization_id not in arguments_frame_connected: | |
239 | - arguments_frame_connected[realization_id] = [] | |
240 | - arguments_frame_connected[realization_id].append('frame_' + str(frame.id) + '_') | |
241 | - if schema.id in alternations[frame.id]: | |
242 | - alternations[frame.id][schema.id] = max(alternations[frame.id][schema.id], alternation) | |
243 | - else: | |
244 | - alternations[frame.id][schema.id] = alternation | |
245 | - # alternations[frame.id] = {} | |
234 | + if schema.id in schemata_ids: | |
235 | + position = schema_position.position | |
236 | + argument = schema_position.argument | |
237 | + alternation = schema_position.alternation | |
238 | + realization_id = u'schema_' + str(schema.id) + u'_pos_' + str(position.id) + '_arg_' + str(argument.id) + '_' + 'alt_' + str(alternation) + '_' | |
239 | + complement_arguments[complement_class].append(realization_id) | |
240 | + if realization_id not in arguments_frame_connected: | |
241 | + arguments_frame_connected[realization_id] = [] | |
242 | + arguments_frame_connected[realization_id].append('frame_' + str(frame.id) + '_') | |
243 | + if schema.id in alternations[frame.id]: | |
244 | + alternations[frame.id][schema.id] = max(alternations[frame.id][schema.id], alternation) | |
245 | + else: | |
246 | + alternations[frame.id][schema.id] = alternation | |
247 | + # alternations[frame.id] = {} | |
246 | 248 | |
247 | 249 | |
248 | 250 | frames_display.append(frame_display) |
249 | 251 | |
250 | 252 | # ala["ma"] = "kot" |
253 | + | |
254 | + frames_count_local = 0 | |
255 | + frames_count_imported = 0 | |
256 | + for frame in frames_display: | |
257 | + if frame['visible']: | |
258 | + if frame['local']: | |
259 | + frames_count_local += 1 | |
260 | + else: | |
261 | + frames_count_imported += 1 | |
262 | + frames_count = str(frames_count_local) + "+" + str(frames_count_imported) | |
251 | 263 | |
252 | 264 | context = { |
253 | 265 | 'frames_display': frames_display, |
254 | 266 | 'connections': {'connected': complement_arguments, 'connected_reverse': arguments_frame_connected}, |
255 | - 'frames_count': lemma.entry_obj.actual_frames().count(), | |
267 | + 'frames_count': frames_count, | |
256 | 268 | 'alternations': alternations |
257 | 269 | } |
258 | 270 | |
... | ... | @@ -288,12 +300,23 @@ def ajax_units(request, lemma_id): |
288 | 300 | |
289 | 301 | def create_units_context(lemma_id): |
290 | 302 | lemma = Lemma.objects.get(id=lemma_id) |
303 | + pos_en = lemma.entry_obj.pos.tag | |
304 | + pos = 'brak' | |
305 | + if pos_en == 'adj': | |
306 | + pos = 'przymiotnik' | |
307 | + elif pos_en == 'noun': | |
308 | + pos = 'rzeczownik' | |
309 | + elif pos_en == 'adv': | |
310 | + pos = 'przysłówek' | |
311 | + elif pos_en == 'verb': | |
312 | + pos = 'czasownik' | |
291 | 313 | lexical_units = lemma.entry_obj.meanings.order_by('base', 'sense') |
292 | 314 | # lexical_units = LexicalUnit.objects.filter(Q(base__startswith=lemma.entry + u' ', pos="czasownik")|Q(base__contains=u' '+lemma.entry+u' ', pos="czasownik")|Q(base__endswith=u' '+lemma.entry, pos="czasownik")|Q(base=lemma.entry, pos="czasownik")).order_by('base', 'sense') |
293 | 315 | |
294 | 316 | context = { |
295 | 317 | 'lexical_units': [{"id": lu.id, "luid": lu.luid, "base": lu.base, "sense": lu.sense, "pos": lu.pos, "glossa": lu.glossa, "definition": lu.definition, "location": location(lu)} for lu in lexical_units], |
296 | - 'informations': {'base': lemma.entry, 'sense': max(['A'] + [chr(ord(lu.sense) + 1) for lu in lexical_units.filter(luid=-1)])}, # TODO: 2 different free senses for with/whthout 'się' | |
318 | + 'informations': {'base': lemma.entry, 'sense': max(['A'] + [chr(ord(lu.sense) + 1) for lu in lexical_units.filter(luid=-1)]), # TODO: 2 different free senses for with/without 'się' | |
319 | + 'pos': pos} | |
297 | 320 | } |
298 | 321 | return context |
299 | 322 | |
... | ... |