diff --git a/LICENSE b/LICENSE index 68035cc..6baacb7 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2012, Bartłomiej Nitoń +Copyright (c) 2015, Bartłomiej Nitoń All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/accounts/admin.py b/accounts/admin.py index a148c51..ff6f8f4 100644 --- a/accounts/admin.py +++ b/accounts/admin.py @@ -39,7 +39,8 @@ class RealizedPhraseologyAdmin(admin.ModelAdmin): readonly_fields = ('date',) search_fields = ('lemma__entry',) -class RealizedSemanticsAdmin(admin.ModelAdmin): +class RealizedSemanticsAdmin(admin.ModelAdmin): + exclude = ('entry',) list_filter = ('status', 'bonus',) search_fields = ('entry__name',) diff --git a/common/js_to_obj.py b/common/js_to_obj.py index 7fa523d..ba9d9fa 100644 --- a/common/js_to_obj.py +++ b/common/js_to_obj.py @@ -43,7 +43,7 @@ def jsFrameToObj(frame, lemma_entry): for position in frame['positions']: if len(position['arguments']) > 0: pos_obj = jsPosToObj(position) - positions_objs.append(pos_obj) + positions_objs.append(pos_obj) sorted_positions = [] sorted_pos_dict = sortPositions(positions_objs) @@ -125,7 +125,7 @@ def jsArgToObj(argument): arg_obj = Argument.objects.get(text_rep=argument['text_rep']) return arg_obj -def frameObjToSerializableDict(lemma, frame): +def frameObjToSerializableDict(lemma, frame, with_connections=False): frame_opinion = '' frame_opinions_tab = lemma.frame_opinions.filter(frame__text_rep=frame.text_rep) if frame_opinions_tab: @@ -155,11 +155,16 @@ def frameObjToSerializableDict(lemma, frame): 'tooltip' : ''} for argument in position['arguments']: + connections = [] + if with_connections: + entry = lemma.entry_obj + connections = entry.matching_connections(frame, position['position'], argument) argument_dict = { 'id' : argument.id, 'text_rep': argument.text_rep, 'type' : argument.type, 'error' : False, - 'tooltip' : ''} + 'tooltip' : '', + 'connections': connections} position_dict['arguments'].append(argument_dict) frame_dict['positions'].append(position_dict) @@ -168,4 +173,4 @@ def frameObjToSerializableDict(lemma, frame): for char in frame_char_objs: frame_dict['characteristics'].append(char.value.value) - return frame_dict + return frame_dict diff --git a/dictionary/ajax_lemma_status.py b/dictionary/ajax_lemma_status.py index a0e5256..546a485 100644 --- a/dictionary/ajax_lemma_status.py +++ b/dictionary/ajax_lemma_status.py @@ -63,7 +63,8 @@ def get_lemma_status(request, id): return {'lemma': selected_lemma, 'abort_status': abort_status, 'next_statuses': next_statuses, - 'pos': pos} + 'pos': pos, + 'status_changes': selected_lemma.status_history.order_by('-date')} def phraseologic_status_changes(user, selected_lemma): phraseologic_change = False diff --git a/dictionary/ajax_lemma_view.py b/dictionary/ajax_lemma_view.py index ea65140..1d10f9d 100644 --- a/dictionary/ajax_lemma_view.py +++ b/dictionary/ajax_lemma_view.py @@ -50,15 +50,20 @@ from dictionary.forms import AddPositionForm, FrameForm, Pos_Cat_Form, \ SimilarLemmasNewForm, ChangeUserFunctionForm, \ ExampleOpinionForm, \ FrameConversionForm, CreatePositionForm, AssignPhraseologicFrameForm +from dictionary.saving import connect_example_operation, disconnect_all_examples_operations, \ + get_semantic_operations, update_connections, reconnect_examples, \ + disconnect_example_operation + from common.decorators import render, ajax, AjaxError from common.util import triple_arg_poss from dictionary.validation import find_similar_frames, get_all_test_missing_frames, get_aspect_rel_lemmas, \ get_wrong_aspect_frames, validate_B_frames, get_deriv_miss_frames_message, \ validate_phraseology_binded_frames, validate_rule_5, \ validate_examples_and_mark_errors, validate_schemas_and_mark_errors, \ - get_missing_aspects_msg + get_missing_aspects_msg, validate_same_positions_schemata from semantics.models import LexicalUnitExamples + from wordnet.models import LexicalUnit from settings import PROJECT_PATH @@ -160,6 +165,7 @@ def prepareFrameTable(frame): def nkjpExamplesObjToJs(nkjp_examples, user, lemma): example_dict_list = [] + lexical_units = lemma.entry_obj.lexical_units() for example in nkjp_examples: frame = example.frame; frame_table_id = 'frame_'+str(frame.id)+'_' @@ -193,11 +199,21 @@ def nkjpExamplesObjToJs(nkjp_examples, user, lemma): 'opinion' : example.opinion.opinion, 'comment' : comment, 'confirmed' : confirmed, - 'semantic' : example.semantic} + 'semantic' : example.semantic, + 'lexical_unit' : get_example_lexical_unit_id(lexical_units, example)} example_dict_list.append(example_dict) return example_dict_list +def get_example_lexical_unit_id(lexical_units, example): + unit_id = -1 + for lex_unit in lexical_units: + if LexicalUnitExamples.objects.filter(example=example, + lexical_unit=lex_unit).exists(): + unit_id = lex_unit.id + break + return unit_id + def nkjpLemmaExamplesObjToJs(nkjp_examples, user, lemma): example_dict_list = [] for example in nkjp_examples: @@ -223,7 +239,9 @@ def nkjpLemmaExamplesObjToJs(nkjp_examples, user, lemma): 'source' : example.source.source, 'opinion' : example.opinion.opinion, 'comment' : comment, - 'confirmed' : confirmed} + 'confirmed' : confirmed, + 'semantic' : False, + 'lexical_unit' : -1} example_dict_list.append(example_dict) return example_dict_list @@ -369,7 +387,7 @@ def get_new_frames(request, id): serialized_frames = [] for frame in new_frames: - serialized_frames.append(frameObjToSerializableDict(selected_lemma, frame)) + serialized_frames.append(frameObjToSerializableDict(selected_lemma, frame, True)) json_frames = json_encode(serialized_frames) # konwertowanie przykladow na zrozumiale przez java sript @@ -2067,7 +2085,7 @@ def frame_form_submit(request, form_data): text_rep = selected_frame.text_rep json_frame = json_encode(frameObjToSerializableDict(lemma_obj, selected_frame)) json_examples = json_encode([]) - else: + elif form_dict['frame_str']: frame = json_decode(form_dict['frame_str']) positions_objs = [] for position in frame['positions']: @@ -2104,6 +2122,8 @@ def frame_form_submit(request, form_data): frame_id = new_frame_obj.id text_rep = new_frame_obj.text_rep json_examples = json_encode([]) + else: + raise AjaxError('data error') return {'id' : frame_id, 'text_rep': text_rep, @@ -2405,7 +2425,8 @@ def save_new_frames(request, data, id, examples, lemma_examples): for B_frame in old_object.B_frames.all(): new_lemma_ver.B_frames.add(B_frame) - # tworzenie ramek i dolaczanie ich do czasownika + # tworzenie ramek i dolaczanie ich do czasownika + schemata_conversions = [] for frame in frames: frame_obj = jsFrameToObj(frame, new_lemma_ver.entry) # blokuje zapisywanie ramek frazeologicznych bez argumentow frazeologicznych @@ -2424,8 +2445,14 @@ def save_new_frames(request, data, id, examples, lemma_examples): frame_opinion_obj.save() new_lemma_ver.frame_opinions.add(frame_opinion_obj) new_lemma_ver.frames.add(frame_obj) + schemata_conversions.append({'js': frame, 'obj': frame_obj}) + + # reconnect semantics + sem_reconnect_operations = get_semantic_operations(new_lemma_ver, schemata_conversions) + update_connections(new_lemma_ver.id, sem_reconnect_operations, request.user) # dodawanie przykladow do ramek + reconnect_examples_operations = disconnect_all_examples_operations(old_object) for example in decoded_examples: frame_obj = jsFrameToObj(example['frame'], new_lemma_ver.entry) # blokuje zapisywanie przykladow z ramek frazeologicznych bez argumentow frazeologicznych @@ -2469,16 +2496,18 @@ def save_new_frames(request, data, id, examples, lemma_examples): for argument in arg_selection['arguments']: try: arg_obj = jsArgToObj(argument) - argument_objs.append(arg_obj) - if len(nkjp_arg_sel_query.all()) > 0: # Q objectem to zalatwic - nkjp_arg_sel_query = nkjp_arg_sel_query.filter(arguments=arg_obj) + if arg_obj not in argument_objs: + argument_objs.append(arg_obj) + if len(nkjp_arg_sel_query.all()) > 0: # Q objectem to zalatwic + nkjp_arg_sel_query = nkjp_arg_sel_query.filter(arguments=arg_obj) except TypeError: pass - + + nkjp_arg_sel_obj = None if len(nkjp_arg_sel_query.all()) > 0: for nkjp_arg_sel in nkjp_arg_sel_query.all(): - if len(nkjp_arg_sel.arguments.all()) == len(argument_objs): + if len(nkjp_arg_sel.arguments.all()) == len(argument_objs): # zrobic list(set(argument_objs)) nkjp_arg_sel_obj = nkjp_arg_sel break if not nkjp_arg_sel_obj: @@ -2518,7 +2547,13 @@ def save_new_frames(request, data, id, examples, lemma_examples): nkjp_example_obj.save() for argument_selection in argument_selections: nkjp_example_obj.arguments.add(argument_selection) - new_lemma_ver.nkjp_examples.add(nkjp_example_obj) + new_lemma_ver.nkjp_examples.add(nkjp_example_obj) + if example['lexical_unit'] > 0: + try: + reconnect_examples_operations.remove(disconnect_example_operation(example, nkjp_example_obj)) + except ValueError: + reconnect_examples_operations.append(connect_example_operation(example, nkjp_example_obj)) + reconnect_examples(reconnect_examples_operations) # dodawanie przykladow nkjp do czasownika for example in decoded_lemma_examples: @@ -2544,23 +2579,23 @@ def save_new_frames(request, data, id, examples, lemma_examples): nkjp_lemma_example_obj.save() new_lemma_ver.lemma_nkjp_examples.add(nkjp_lemma_example_obj) - old_object.locker = None; + old_object.locker = None old_object.save() - new_lemma_ver.locker = None; + new_lemma_ver.locker = None try: new_lemma_ver = Lemma.objects.get(entry=old_object.entry, owner=old_object.owner, vocabulary=old_object.vocabulary, status=old_object.status, old=False) raise AjaxError('concurrent access') except: - new_lemma_ver.old = False; - new_lemma_ver.save(); + new_lemma_ver.old = False + new_lemma_ver.save() return {'id' : new_lemma_ver.id, 'entry' : new_lemma_ver.entry, 'error_message': '', 'frames' : ''} - + ############## WALIDACJA ##################### @ajax(method='post') @@ -2623,6 +2658,7 @@ def validate_new_frames(request, data, id, examples, lemma_examples, message_content += u'\t- %s\n' % (miss_frame.text_rep) message_content += '\n' message_content += deriv_miss_frames_msg + message_content += validate_same_positions_schemata(old_object) frames_to_merge = find_similar_frames(old_object.frames.all()) if len(frames_to_merge) > 0: message_content += u'Sugerowane jest połączenie poniższych schematów, zawierają one często koordynujące się typy fraz:\n' @@ -3157,3 +3193,21 @@ def delete_user(request, user_id): def deselect_preview_tab(request): request.session['lemma_preview'] = False return {} + +@ajax(method='get') +def get_schemata(request, lemma_id): + lemma = Lemma.objects.get(id=lemma_id) + schemata = lemma.frames.order_by('text_rep') + serialized_schemata = [frameObjToSerializableDict(lemma, schema, True) for schema in schemata] + json_schemata = json_encode(serialized_schemata) + return {'schemata': json_schemata, + 'can_modify': user_can_modify(lemma, request.user)} + +@ajax(method='get') +def get_examples(request, lemma_id): + lemma = Lemma.objects.get(id=lemma_id) + examples = lemma.nkjp_examples.all() + examples_js = nkjpExamplesObjToJs(examples, request.user, lemma) + json_examples = json_encode(examples_js) + return {'examples': json_examples, + 'can_modify': user_can_modify(lemma, request.user)} diff --git a/dictionary/models.py b/dictionary/models.py index 8b35f9b..ae8c2ad 100644 --- a/dictionary/models.py +++ b/dictionary/models.py @@ -328,7 +328,12 @@ class StatusChange(Model): blank=True, null=True) def __unicode__(self): - return self.lemma.entry + ':' + str(self.date) + return self.lemma.entry + ':' + str(self.date) + + class Meta: + permissions = ( + ('view_status_changes', u'Może oglądać historię zmian statusu'), + ) class Frame_Opinion(Model): frame = ForeignKey('Frame', db_column='ramka', related_name='opinions', @@ -1387,6 +1392,20 @@ class Entry(Model): frame_ids.extend([f.id for f in lexical_unit.actual_frames()]) return get_model('semantics', 'SemanticFrame').objects.filter(id__in=list(set(frame_ids))) + def matching_connections(self, schema, position, phrase_type): + frames = self.actual_frames() + matching_connections = [] + for frame in frames: + for compl in frame.complements.all(): + matching_realizations = compl.realizations.filter(frame=schema, + position=position, + argument=phrase_type) + if matching_realizations.exists(): + realizations_ids = [real.id for real in matching_realizations.all()] + matching_connections.append({'compl': compl.id, + 'realizations': realizations_ids}) + return matching_connections + def __unicode__(self): return self.name diff --git a/dictionary/saving.py b/dictionary/saving.py new file mode 100644 index 0000000..f0bf09c --- /dev/null +++ b/dictionary/saving.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- + +from common.js_to_obj import jsPosToObj +from dictionary.models import Argument +from semantics.models import Complement, LexicalUnitExamples +from semantics.saving import modify_frames, update_meanings +from wordnet.models import LexicalUnit + +def get_semantic_operations(lemma, schemata_conversions): + connections = [] + operations = [] + frames = lemma.entry_obj.actual_frames() + for conv in schemata_conversions: + schema_operations = get_reconnect_operations_and_extend_connections(frames, + connections, + conv['obj'], + conv['js']) + operations.extend(schema_operations) + operations.extend(get_disconnect_operations(frames, connections)) + return operations + +def get_reconnect_operations_and_extend_connections(frames, connections, schema, js_schema): + operations = [] + used_poss_ids = [] + for js_position in js_schema['positions']: + if len(js_position['arguments']) > 0: + position = get_position(schema, js_position, used_poss_ids) + for js_phrase_type in js_position['arguments']: + phrase_type = Argument.objects.get(text_rep=js_phrase_type['text_rep']) + new_connection_target = {'schema': schema, + 'position': position, + 'phrase_type': phrase_type} + for conn in js_phrase_type['connections']: + operations.extend(reconnect_operations(frames, conn, new_connection_target)) + conn_dict = next((conn_dict + for conn_dict in connections if conn_dict['compl'] == conn['compl']), None) + if conn_dict: + conn_dict['realizations'].extend(conn['realizations']) + else: + connections.append({'compl': conn['compl'], + 'realizations': conn['realizations']}) + return operations + +def get_position(schema, js_position, used_poss_ids): + position = jsPosToObj(js_position) + same_poss = schema.positions.filter(text_rep=position.text_rep) + unused_same_poss = same_poss.exclude(id__in=used_poss_ids).order_by('id') + position = unused_same_poss[0] + used_poss_ids.append(position.id) + return position + +def reconnect_operations(frames, connection, new_target): + operations = [] + compl = Complement.objects.get(id=connection['compl']) + frame = frames.get(complements=compl) + arg_ref = create_argument_ref(frame, compl) + for real_id in connection['realizations']: + realization = compl.realizations.get(id=real_id) + old_phrase_type_ref = create_phrase_type_ref(realization.frame, realization.position, + realization.argument, realization.alternation) + new_phrase_type_ref = create_phrase_type_ref(new_target['schema'], new_target['position'], + new_target['phrase_type'], realization.alternation) + if new_phrase_type_ref != old_phrase_type_ref: + operations.append(create_operation('disconnect', arg_ref, old_phrase_type_ref)) + operations.append(create_operation('connect', arg_ref, new_phrase_type_ref)) + return operations + +def create_argument_ref(frame, complement): + return 'frame_%d_comp_%d_' % (frame.id, complement.id) + +def create_phrase_type_ref(schema, position, phrase_type, alternation): + return 'schema_%d_pos_%d_arg_%d_alt_%d_' % (schema.id, position.id, + phrase_type.id, alternation) + +def create_operation(operation, arg_ref, phrase_type_ref): + return {'operation': operation, 'arg': arg_ref, 'connect': phrase_type_ref} + +def get_disconnect_operations(frames, connections): + operations = [] + for frame in frames: + for compl in frame.complements.all(): + conn_dict = next((conn_dict + for conn_dict in connections if conn_dict['compl'] == compl.id), None) + for real in compl.realizations.all(): + if not conn_dict or not real.id in conn_dict['realizations']: + phrase_type_ref = create_phrase_type_ref(real.frame, real.position, + real.argument, real.alternation) + arg_ref = create_argument_ref(frame, compl) + operations.append(create_operation('disconnect', arg_ref, phrase_type_ref)) + return operations + +def update_connections(lemma_id, reconnect_operations, user): + modify_frames(lemma_id, reconnect_operations, user) + +def disconnect_all_examples_operations(lemma): + operations = [] + lex_units = lemma.entry_obj.lexical_units().all() + for lu in lex_units: + lu_examples = LexicalUnitExamples.objects.filter(lexical_unit=lu) + for lu_ex in lu_examples: + example = lu_ex.example + operations.append({'operation': 'remove_example', + 'unit': lu.id, + 'example': example.id}) + return operations + +def connect_example_operation(example_dict, example_obj): + lu = LexicalUnit.objects.get(id=example_dict['lexical_unit']) + return {'operation': 'add_example', 'unit': lu.id, 'example': example_obj.id} + +def disconnect_example_operation(example_dict, example_obj): + lu = LexicalUnit.objects.get(id=example_dict['lexical_unit']) + return {'operation': 'remove_example', 'unit': lu.id, 'example': example_obj.id} + +def reconnect_examples(operations): + update_meanings(operations) + \ No newline at end of file diff --git a/dictionary/static/js/argument_form_utils.js b/dictionary/static/js/argument_form_utils.js index 0f849ed..86a807b 100644 --- a/dictionary/static/js/argument_form_utils.js +++ b/dictionary/static/js/argument_form_utils.js @@ -11,13 +11,14 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND */ // argument class -function argument(id, text_rep, type) +function argument(id, text_rep, type, connections) { this.id = id this.text_rep = text_rep; this.type = type; this.error = false; this.tooltip = ''; + this.connections = connections; } function arguments_form_change(lastActualValueIdx, arg_id, this_form, lemma_id) { @@ -160,7 +161,7 @@ function argument_form_submit() { }, callback: function(result) { - var arg = new argument(result["id"], result["text_rep"], result["type"]); + var arg = new argument(result["id"], result["text_rep"], result["type"], []); // dodawanie argumentow if(this_dialog.dialog( "option" , "title").startsWith("Dodawanie")) diff --git a/dictionary/static/js/lemma-view.js b/dictionary/static/js/lemma-view.js index fd20ab9..a75776c 100644 --- a/dictionary/static/js/lemma-view.js +++ b/dictionary/static/js/lemma-view.js @@ -75,6 +75,15 @@ var nkjp_source_tab = ax_nkjp_source_vals; function alertUserNotAuthenticated() { error_alert('Przed wykonaniem działania odśwież okno przeglądarki, a następnie zaloguj się ponownie do narzędzia.'); } + +function resetLemmaVersions() { + window.frames_modif = new Array(); + window.frames_modif_idx = 0; + var lemma_version = new Lemma_Version(window.schemas, + window.nkjp_examples, + window.nkjp_lemma_examples); + frames_modif.push(lemma_version); +} function initiateFrameFilters() { @@ -437,7 +446,7 @@ function load_content(id) { window.notesNotSaved = false; window.lemmaExNotSaved = false; - $('#new_frames').load(ajax_new_frames, 'id='+id, function(){ + $('#new_frames').load(ajax_new_frames, 'id='+id, function(){ window.lemma_id = id; createSplitter('framesSplit','new-frame-tables', 'tabs'); if(window.can_modify) @@ -578,9 +587,10 @@ function Nkjp_example(example_id, frame_id, arguments_ids, sentence, source, opi this.opinion = opinion; this.comment = comment; this.semantic = semantic; + this.lexical_unit = -1; } -function Nkjp_example_ajax(frame, arg_selections, sentence, source, opinion, comment, semantic) +function Nkjp_example_ajax(frame, arg_selections, sentence, source, opinion, comment, semantic, lexical_unit) { this.frame = frame; this.arg_selections = arg_selections; @@ -589,6 +599,7 @@ function Nkjp_example_ajax(frame, arg_selections, sentence, source, opinion, com this.opinion = opinion; this.comment = comment; this.semantic = semantic; + this.lexical_unit = lexical_unit; } function Nkjp_ArgSelection(position, arguments) @@ -629,7 +640,7 @@ function frameToTableRows(frame) arguments_row.push(frame.positions[i].arguments[arg_number]); else { - arguments_row.push(new argument(new_elem_id, '', '')); + arguments_row.push(new argument(new_elem_id, '', '', [])); new_elem_id--; } } @@ -866,8 +877,7 @@ function needConfirmation(nkjpInstance) { function unpin_nkjp_example(example_tabId) { if(example_tabId != -1 && - !checkIfSemChangedAndAlert() && - !exampleGotAssignedSemantics(example_tabId)) + !checkIfSemChangedAndAlert())// && !exampleGotAssignedSemantics(example_tabId)) { example_id = example_tabId.replace('nkjp_', ''); for(var i=0; i<window.nkjp_examples.length; i++) @@ -992,60 +1002,6 @@ function getNkjpExampleInstance(nkjp_examples, example_id) return ''; } -//////////////////////// semantics ////////////////////////////// - -function schemaGotAssignedSemantics(element_id) -{ - var semanticsAssigned = true; - var id_map = parseId(element_id); - var schema_id = id_map['frame_id']; - if(schema_id < 0) { - semanticsAssigned = false; - } - else { - jQuery.ajax({ - type: 'get', - url: ajax_schema_got_assigned_semantics, - data: {lemma_id: window.lemma_id, - schema_id: schema_id}, - success: function(result) { - semanticsAssigned = result['got_assigned_semantics']; - }, - async: false - }); - } - if(semanticsAssigned) { - error_alert('Nie można zmodyfikować. Element jest wykorzystywany w ramkach semantycznych.'); - } - return semanticsAssigned; -} - -function exampleGotAssignedSemantics(example_tab_id) -{ - var semanticsAssigned = true; - var example_id = example_tab_id.replace('nkjp_', ''); - if (example_id < 0) { - semanticsAssigned = false; - } - else { - jQuery.ajax({ - type: 'get', - url: ajax_example_got_assigned_semantics, - data: {lemma_id: window.lemma_id, - example_id: example_id}, - success: function(result) { - semanticsAssigned = result['got_assigned_semantics']; - }, - async: false - }); - } - if(semanticsAssigned) { - error_alert('Nie można zmodyfikować. Przykład jest wykorzystywany w ramkach semantycznych.'); - } - return semanticsAssigned; -} -////////////////////////////////////////////////////// - function getNkjpLemmaExampleInstance(nkjp_examples, example_id) { var example_id = example_id.replace('nkjpLemma_', ''); @@ -1058,7 +1014,7 @@ function getNkjpLemmaExampleInstance(nkjp_examples, example_id) } function remove_semantic_example(example_id) { - if(example_id != -1 && !checkIfSemChangedAndAlert() && !exampleGotAssignedSemantics(example_id)) + if(example_id != -1 && !checkIfSemChangedAndAlert())// && !exampleGotAssignedSemantics(example_id)) { example_id = example_id.replace('nkjp_', ''); for(var i=0; i<nkjp_examples.length; i++) @@ -1421,7 +1377,7 @@ function add_pos_form_submit() { var arguments = new Array(); for(var i=0; i<argsObj.length; i++) { - arguments.push(new argument(argsObj[i].pk, argsObj[i].fields.text_rep, argsObj[i].fields.type)); + arguments.push(new argument(argsObj[i].pk, argsObj[i].fields.text_rep, argsObj[i].fields.type, [])); } var cats = new Array(); @@ -1570,7 +1526,11 @@ function can_add_position_category(lemma_id) { } function openEditForm(id) { - if(window.can_modify && !checkIfSemChangedAndAlert() && !schemaGotAssignedSemantics(id)) { + if(window.can_modify && !checkIfSemChangedAndAlert()) { + /*if(schemaGotAssignedSemantics(id)) { + semanticsAssignedAlert(); + }*/ + editedFrameInstance = getFrameInstance(id, window.schemas); elemInstance = getElementInstance(id, window.schemas); addSyntacticFramesPerm = user_has_perm('dictionary.add_syntactic_frames'); @@ -1699,7 +1659,7 @@ function convertExample(nkjp_example) var convertedExample = new Nkjp_example_ajax(frame['element'], argument_selections, nkjp_example.sentence, nkjp_example.source, nkjp_example.opinion, nkjp_example.comment, - nkjp_example.semantic); + nkjp_example.semantic, nkjp_example.lexical_unit); return convertedExample; } @@ -1790,7 +1750,7 @@ function save_new_frames() { data: data, id: lemma_id, examples: examples_data, - lemma_examples: lemma_examples_data + lemma_examples: lemma_examples_data, }, callback: function(result) { @@ -1954,8 +1914,7 @@ function frame_form_submit() { } }); - if(window.addedFrame) - { + if(window.addedFrame) { frame = JSON.stringify(window.addedFrame); } else if(propose_phraseology) { @@ -2003,9 +1962,8 @@ function frame_form_submit() { callback: function(result) { edited_frame_id = null; - old_edited_frame_id = ''; - if(window.addedFrame) - { + old_edited_frame_id = ''; + if(window.addedFrame) { edited_frame = $.evalJSON($.toJSON(window.addedFrame)); edited_frame = serializedObjToObj(result['frame']); edited_frame.id = window.addedFrame.id; @@ -2041,10 +1999,11 @@ function frame_form_submit() { } else { edited_frame = getElementInstance(edited_id, schemas); - old_edited_frame_id = edited_frame['element'].id; - edited_frame['element'].id = new_elem_id; - edited_frame_id = new_elem_id; - new_elem_id--; + var old_edited_frame_id = edited_frame['element'].id; + //edited_frame['element'].id = new_elem_id; tuta zmienilem + edited_frame['element'].id = result['id']; + edited_frame_id = edited_frame['element'].id; + //new_elem_id--; edited_frame['element'].text_rep = result['text_rep']; edited_frame['element'].characteristics = result['characteristics']; edited_frame['element'].opinion = result['opinion']; @@ -2071,7 +2030,12 @@ function frame_form_submit() { HideProgressAnimation(); error_alert('Wypełnij wszystkie niezbędne pola formularza.'); return false; - } + } + else if (result == 'data error') { + HideProgressAnimation(); + error_alert('Brakuje danych do stworzenia schematu.'); + return false; + } else { HideProgressAnimation(); @@ -3687,6 +3651,7 @@ function restore_lemma() { to_copy_elem['type'] == 'frame') { var frame = $.evalJSON($.toJSON(to_copy_elem['element'])); + clearSemanticConnections(to_copy_elem['type'], frame); // kopiowanie schematu z podgladu (moze wymagac konwersji) if(elem_in_bucket['lemma_id'] && need_conversion && can_be_converted && @@ -3711,6 +3676,7 @@ function restore_lemma() { to_copy_elem['type'] == 'argument' && !need_conversion) { var argument = $.evalJSON($.toJSON(to_copy_elem['element'])); + clearSemanticConnections(to_copy_elem['type'], argument); argument.id = new_elem_id; new_elem_id--; target_elem['element'].arguments.push(argument); @@ -3721,6 +3687,7 @@ function restore_lemma() { to_copy_elem['type'] == 'position' && !need_conversion) { var position = $.evalJSON($.toJSON(to_copy_elem['element'])); + clearSemanticConnections(to_copy_elem['type'], position); position.id = new_elem_id; new_elem_id--; target_elem['element'].positions.push(position); @@ -3813,8 +3780,12 @@ function restore_lemma() { if(window.selected_id != -1) { var assignedExamples = []; if(canModifyFrame(window.selected_id, window.schemas) && - !checkIfSemChangedAndAlert() && - !schemaGotAssignedSemantics(window.selected_id)) { + !checkIfSemChangedAndAlert()) { + + /*if(schemaGotAssignedSemantics(window.selected_id)) { + semanticsAssignedAlert(); + }*/ + assignedExamples = gotAssignedExample(nkjp_examples, selected_id, true); if(assignedExamples.length == 0) { schemas = removeFrameElement(selected_id, schemas); @@ -3829,8 +3800,10 @@ function restore_lemma() { function addElement() { if(!checkIfSemChangedAndAlert() && - (window.selected_id == -1 || (canModifyFrame(window.selected_id, window.schemas) && - !schemaGotAssignedSemantics(window.selected_id)))) { + (window.selected_id == -1 || canModifyFrame(window.selected_id, window.schemas))) { + /*if(schemaGotAssignedSemantics(window.selected_id)) { + semanticsAssignedAlert(); + }*/ window.schemas = addFrameElementDialog(window.selected_id, window.schemas); } } @@ -3967,8 +3940,11 @@ function restore_lemma() { { if(window.elem_in_bucket && !checkIfSemChangedAndAlert() && (window.selected_id == -1 || - (canModifyFrame(window.selected_id, window.schemas) && - !schemaGotAssignedSemantics(window.selected_id)))) { + canModifyFrame(window.selected_id, window.schemas))) { + + /*if(schemaGotAssignedSemantics(window.selected_id)) { + semanticsAssignedAlert(); + }*/ pasteFrameElement(selected_id, elem_in_bucket, schemas); } } @@ -3999,9 +3975,17 @@ function restore_lemma() { canModifyFrame(window.selected_id, window.schemas) && !checkIfSemChangedAndAlert()) { + /*if(getElementInstance(selected_id, schemas)['type'] != 'frame' && + schemaGotAssignedSemantics(selected_id)) { + semanticsAssignedAlert(); + return; + }*/ + elem_in_bucket = getElementInstance(selected_id, schemas); + var parent_elem = getParentInstance(selected_id, schemas); var duplicate = $.evalJSON($.toJSON(elem_in_bucket['element'])); + clearSemanticConnections(elem_in_bucket['type'], duplicate); duplicate.id = new_elem_id; new_elem_id--; @@ -4040,6 +4024,7 @@ function restore_lemma() { && !schema['element'].is_phraseologic && user_has_perm('dictionary.add_phraseologic_frames')) { var duplicate = $.evalJSON($.toJSON(schema['element'])); + clearSemanticConnections(schema['type'], duplicate); duplicate.id = new_elem_id; new_elem_id--; duplicate.is_phraseologic = true; @@ -4108,7 +4093,7 @@ function restore_lemma() { function delete_nkjp_example(example_id) { - if(example_id != -1 && !checkIfSemChangedAndAlert() && !exampleGotAssignedSemantics(example_id)) + if(example_id != -1 && !checkIfSemChangedAndAlert())// && !exampleGotAssignedSemantics(example_id)) { example_id = selected_example_id.replace('nkjp_', ''); for(var i=0; i<nkjp_examples.length; i++) @@ -4143,8 +4128,8 @@ function restore_lemma() { function delete_all_nkjp_examples(frame_id) { if(canModifyFrame(frame_id, window.schemas) && - !checkIfSemChangedAndAlert() && - !schemaGotAssignedSemantics(frame_id)) { + !checkIfSemChangedAndAlert())// && !schemaGotAssignedSemantics(frame_id)) + { var new_example_tab = new Array(); for(var i=0; i<nkjp_examples.length; i++) { @@ -4202,7 +4187,7 @@ function restore_lemma() { function modify_nkjp_example(example_id) { - if(example_id != -1 && !checkIfSemChangedAndAlert() && !exampleGotAssignedSemantics(example_id)) + if(example_id != -1 && !checkIfSemChangedAndAlert())// && !exampleGotAssignedSemantics(example_id)) { var example = ''; for(var i=0; i<window.nkjp_examples.length; i++) @@ -5092,21 +5077,6 @@ function gridPreviewContent(isPreview) { return false; } -function semanticsChanged() { - if(window.frames_operations.length > 0) { - return true; - } - return false; -} - -function checkIfSemChangedAndAlert() { - if(semanticsChanged()) { - alert('Przed dokonaniem zmiany zapisz semantykę.'); - return true; - } - return false; -} - ///////////////////////////////////// $(function(){ diff --git a/dictionary/static/js/semantics_coupling.js b/dictionary/static/js/semantics_coupling.js new file mode 100644 index 0000000..bbe6c17 --- /dev/null +++ b/dictionary/static/js/semantics_coupling.js @@ -0,0 +1,154 @@ +function schemaGotAssignedSemantics(element_id) { + var semanticsAssigned = true; + var id_map = parseId(element_id); + var schema_id = id_map['frame_id']; + if(schema_id < 0) { + semanticsAssigned = false; + } + else { + jQuery.ajax({ + type: 'get', + url: ajax_schema_got_assigned_semantics, + data: {lemma_id: window.lemma_id, + schema_id: schema_id}, + success: function(result) { + semanticsAssigned = result['got_assigned_semantics']; + }, + async: false + }); + } + return semanticsAssigned; +} + +function semanticsAssignedAlert() { + error_alert('Działaj rozważnie, element jest wykorzystywany w ramach semantycznych.'); +} + +function exampleGotAssignedSemantics(example_tab_id) +{ + var semanticsAssigned = true; + var example_id = example_tab_id.replace('nkjp_', ''); + if (example_id < 0) { + semanticsAssigned = false; + } + else { + jQuery.ajax({ + type: 'get', + url: ajax_example_got_assigned_semantics, + data: {lemma_id: window.lemma_id, + example_id: example_id}, + success: function(result) { + semanticsAssigned = result['got_assigned_semantics']; + }, + async: false + }); + } + return semanticsAssigned; +} + +function semanticsAssignedExampleAlert() { + error_alert('Działaj rozważnie, przykład jest wykorzystywany w ramach semantycznych.'); +} + +function semanticsChanged() { + if(window.frames_operations.length > 0) { + return true; + } + return false; +} + +function checkIfSemChangedAndAlert() { + if(semanticsChanged()) { + alert('Przed dokonaniem zmiany zapisz semantykę.'); + return true; + } + return false; +} + +function clearSemanticConnections(type, schemaElement) { + if(type == 'frame') { + clearSchemaConnections(schemaElement); + } + else if(type == 'position') { + clearPositionConnections(schemaElement); + } + else if(type == 'argument') { + clearPhraseTypeConnections(schemaElement); + } +} + +function clearSchemaConnections(schema) { + for(var i=0; i<schema.positions.length; i++) { + clearPositionConnections(schema.positions[i]); + } +} + +function clearPositionConnections(position) { + for(var i=0; i<position.arguments.length; i++) { + clearPhraseTypeConnections(position.arguments[i]); + } +} + +function clearPhraseTypeConnections(phraseType) { + phraseType.connections = []; +} + +function updateSchemataConnections() { + $('#new-frame-tables').empty(); + $("#show_nkjp_table").empty(); + $.ajaxJSON({ + method: 'get', + url: ajax_get_schemata, + data: { + lemma_id: window.lemma_id + }, + + callback: function(result) { + window.schemas = serializedObjToObj(result['schemata']); + resetLemmaVersions(); + var frame_class = 'InactiveFrameTable'; + if(result['can_modify']) { + frame_class = 'ActiveFrameTable'; + } + draw_filtered_frames(window.schemas, 'new-frame-tables', 'new-frame-table', + 'frame_filter', window.nkjp_examples, frame_class, + window.lemma_entry, window.lemma_entry); + }, + error_callback: function(xhr, status, error) { + error_alert(status + ': ' + error); + }, + bad_data_callback: function(result) { + return true; + }, + }); +} + +function updateExamplesConnections() { + $('#new-frame-tables').empty(); + $("#show_nkjp_table").empty(); + $.ajaxJSON({ + method: 'get', + url: ajax_get_examples, + data: { + lemma_id: window.lemma_id + }, + + callback: function(result) { + window.nkjp_examples = serializedNkjpToObj(result['examples']); + resetLemmaVersions(); + var frame_class = 'InactiveFrameTable'; + if(result['can_modify']) { + frame_class = 'ActiveFrameTable'; + } + draw_filtered_frames(window.schemas, 'new-frame-tables', 'new-frame-table', + 'frame_filter', window.nkjp_examples, frame_class, + window.lemma_entry, window.lemma_entry); + }, + error_callback: function(xhr, status, error) { + error_alert(status + ': ' + error); + }, + bad_data_callback: function(result) { + return true; + }, + }); +} diff --git a/dictionary/templates/lemma_status.html b/dictionary/templates/lemma_status.html index 8ca2ea8..dede1b7 100644 --- a/dictionary/templates/lemma_status.html +++ b/dictionary/templates/lemma_status.html @@ -1,60 +1,82 @@ -<div id="lemma-status"> - <table class='StatusTable'> - <tr> - <td class='ColumnHeader'>Hasło:</td> - <td>{{lemma.entry}}</td> - </tr> - <tr> - <td class='ColumnHeader'>Właściciel:</td> - <td> - {% if lemma.owner %} - {{lemma.owner.username}} - {% else %} - brak - {% endif %} - </td> - </tr> - <tr> - <td class='ColumnHeader'>Frazeolog:</td> - <td> - {% if lemma.phraseologist %} - {{lemma.phraseologist.username}} - {% else %} - brak - {% endif %} - </td> - </tr> - <tr> - <td class='ColumnHeader'>Semantyk:</td> - <td> - {% if lemma.semanticist %} - {{lemma.semanticist.username}} - {% else %} - brak - {% endif %} - </td> - </tr> - <tr> - <td class='ColumnHeader'>Słownik:</td> - <td>{{lemma.vocabulary.name}}</td> - </tr> - <tr> - <td class='ColumnHeader'>Status:</td> - <td>{{lemma.status.status}}</td> - </tr> - <tr> - <td class='ColumnHeader'>Część mowy:</td> - <td>{{pos.name}}</td> - </tr> - </table> -</div> -<div id="lemma-status-change"> - {% if abort_status %} - <button type="button" id="{{ abort_status.id }}" style="width:120px">Zmień na "{{ abort_status.status }}"</button> - {% endif %} - {% for next_status in next_statuses %} - <button type="button" id="{{ next_status.id }}" style="width:120px">Zmień na "{{ next_status.status }}"</button> - {% endfor %} +<div style="display:flex;"> + <div> + <div id="lemma-status"> + <table class='StatusTable'> + <tr> + <td class='ColumnHeader'>Hasło:</td> + <td>{{lemma.entry}}</td> + </tr> + <tr> + <td class='ColumnHeader'>Właściciel:</td> + <td> + {% if lemma.owner %} + {{lemma.owner.username}} + {% else %} + brak + {% endif %} + </td> + </tr> + <tr> + <td class='ColumnHeader'>Frazeolog:</td> + <td> + {% if lemma.phraseologist %} + {{lemma.phraseologist.username}} + {% else %} + brak + {% endif %} + </td> + </tr> + <tr> + <td class='ColumnHeader'>Semantyk:</td> + <td> + {% if lemma.semanticist %} + {{lemma.semanticist.username}} + {% else %} + brak + {% endif %} + </td> + </tr> + <tr> + <td class='ColumnHeader'>Słownik:</td> + <td>{{lemma.vocabulary.name}}</td> + </tr> + <tr> + <td class='ColumnHeader'>Status:</td> + <td>{{lemma.status.status}}</td> + </tr> + <tr> + <td class='ColumnHeader'>Część mowy:</td> + <td>{{pos.name}}</td> + </tr> + </table> + </div> + <div id="lemma-status-change"> + {% if abort_status %} + <button type="button" id="{{ abort_status.id }}" style="width:120px">Zmień na "{{ abort_status.status }}"</button> + {% endif %} + {% for next_status in next_statuses %} + <button type="button" id="{{ next_status.id }}" style="width:120px">Zmień na "{{ next_status.status }}"</button> + {% endfor %} + </div> + </div> + {% if perms.dictionary.view_status_changes and status_changes %} + <div class="status-changes"> + <table class='ChangeControlTable' style="margin:15px;"> + <tr> + <td class='ColumnHeader'>Czas zmiany:</td> + <td class='ColumnHeader'>Docelowy status:</td> + <td class='ColumnHeader'>Osoba dokonująca zmiany:</td> + </tr> + {% for change in status_changes %} + <tr class='ChangeControlTableRow' id="change_{{version.id}}"> + <td>{{ change.date }}</td> + <td>{{ change.status.status }}</td> + <td>{{ change.changer.username }}</td> + </tr> + {% endfor %} + </table> + </div> + {% endif %} </div> <div id="ready-note-dialog"> </div> diff --git a/dictionary/templates/lemma_view.html b/dictionary/templates/lemma_view.html index 7273429..b23428a 100644 --- a/dictionary/templates/lemma_view.html +++ b/dictionary/templates/lemma_view.html @@ -18,6 +18,7 @@ <script type="text/javascript" src="{{ STATIC_URL }}js/lib/jquery.multiselect.js"></script> <script type="text/javascript" src="{{ STATIC_URL }}js/jqgrid-patch.js"></script> <script type="text/javascript" src="{{ STATIC_URL }}js/lemma_grid.js"></script> + <script type="text/javascript" src="{{ STATIC_URL }}js/semantics_coupling.js"></script> <script type="text/javascript" src="{{ STATIC_URL }}js/lemma-view.js"></script> {% endblock %} diff --git a/dictionary/validation.py b/dictionary/validation.py index f200f94..db12685 100644 --- a/dictionary/validation.py +++ b/dictionary/validation.py @@ -418,6 +418,30 @@ def create_miss_binded_frames_msg_content(missing_frames): message_content += '\n' return message_content +####################### same positions validation ####################### +def validate_same_positions_schemata(lemma): + msg_content = '' + same_positions_schemata = get_same_positions_schemata(lemma) + if len(same_positions_schemata) > 0: + msg_content = same_positions_message(same_positions_schemata) + return msg_content + +def get_same_positions_schemata(lemma): + same_positions_schemata = [] + for schema in lemma.frames.all(): + for pos in schema.positions.all(): + if schema.positions.filter(text_rep=pos.text_rep).count() > 1: + same_positions_schemata.append(schema) + break + return same_positions_schemata + +def same_positions_message(same_positions_schemata): + message_content = u'W następujących schematach występuje więcej niż jedna identyczna pozycja:\n' + for schema in same_positions_schemata: + message_content += u'\t- [%d] %s\n' % (schema.id, schema.text_rep) + message_content += '\n' + return message_content + ####################### WALIDACJA ############################ def get_napprv_examples(lemma): nApprovedExamples = lemma.nkjp_examples.filter(source__confirmation_required=True, @@ -436,7 +460,7 @@ def validate_examples_and_mark_errors(lemma, status_obj, selected_frame_id): error = False serialized_frames = [] for frame_obj in lemma.frames.all(): - serialized_frame = frameObjToSerializableDict(lemma, frame_obj) + serialized_frame = frameObjToSerializableDict(lemma, frame_obj, True) if selected_frame_id and frame_obj.id != selected_frame_id: serialized_frames.append(serialized_frame) continue @@ -461,7 +485,7 @@ def validate_schemas_and_mark_errors(lemma, status, selected_frame_id): error = False serialized_frames = [] for frame_obj in lemma.frames.all(): - serialized_frame = frameObjToSerializableDict(lemma, frame_obj) + serialized_frame = frameObjToSerializableDict(lemma, frame_obj, True) if selected_frame_id and frame_obj.id != selected_frame_id: serialized_frames.append(serialized_frame) continue diff --git a/dictionary/views.py b/dictionary/views.py index 1cb0a07..853131b 100644 --- a/dictionary/views.py +++ b/dictionary/views.py @@ -191,6 +191,8 @@ def lemma_view(request): 'ajax_get_compatible_schema_chars' : reverse('get_compatible_schema_chars'), 'ajax_deselect_preview_tab': reverse('deselect_preview_tab'), + 'ajax_get_schemata': reverse('get_schemata'), + 'ajax_get_examples': reverse('get_examples'), # powiazywanie hasel (nieczasownikowe) 'ajax_relate_entries' : reverse('relate_entries'), diff --git a/semantics/admin.py b/semantics/admin.py index 0a18904..a90aaa3 100644 --- a/semantics/admin.py +++ b/semantics/admin.py @@ -1,14 +1,18 @@ from django.contrib import admin -from models import FramePosition, SemanticRole, SelectivePreferenceRelations, \ - SemanticRolesDisplay, GeneralSelectivePreference +from models import FramePosition, GeneralSelectivePreference, LexicalUnitExamples, \ + SelectivePreferenceRelations, SemanticFrame, SemanticRole, \ + SemanticRolesDisplay + class SemanticRoleAdmin(admin.ModelAdmin): search_fields = ('role',) -admin.site.register(GeneralSelectivePreference) admin.site.register(FramePosition) +admin.site.register(GeneralSelectivePreference) +admin.site.register(LexicalUnitExamples) +admin.site.register(SemanticFrame) admin.site.register(SelectivePreferenceRelations) admin.site.register(SemanticRolesDisplay) admin.site.register(SemanticRole, SemanticRoleAdmin) diff --git a/semantics/management/commands/find_hanging_connections.py b/semantics/management/commands/find_hanging_connections.py new file mode 100644 index 0000000..25a8b02 --- /dev/null +++ b/semantics/management/commands/find_hanging_connections.py @@ -0,0 +1,37 @@ +#-*- coding:utf-8 -*- + +import datetime + +from django.core.management.base import BaseCommand + +from dictionary.models import Lemma + +class Command(BaseCommand): + args = 'none' + help = "" + + def handle(self, **options): + find_hanging_connections() + +def find_hanging_connections(): + lemmas = Lemma.objects.filter(old=False).order_by('entry_obj__name') + for lemma in lemmas: + frames = lemma.entry_obj.actual_frames() + for frame in frames: + for compl in frame.complements.all(): + for real in compl.realizations.all(): + match = False + matching_schemata = lemma.frames.filter(id=real.frame.id).all() + for schema in matching_schemata: + matching_poss = schema.positions.filter(id=real.position.id, + arguments=real.argument) + if matching_poss.exists(): + match = True + break + if not match: + compl_ref = 'frame_%d_comp_%d_' % (frame.id, compl.id) + print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!' + print 'lemma: %s\tcomplement: %s\trealization: %s' % (lemma.entry_obj.name, + compl_ref, + unicode(real)) + \ No newline at end of file diff --git a/semantics/management/commands/find_hanging_examples.py b/semantics/management/commands/find_hanging_examples.py new file mode 100644 index 0000000..241844a --- /dev/null +++ b/semantics/management/commands/find_hanging_examples.py @@ -0,0 +1,32 @@ +#-*- coding:utf-8 -*- + +from django.core.management.base import BaseCommand + +from dictionary.models import Lemma +from semantics.models import LexicalUnitExamples + +class Command(BaseCommand): + args = 'none' + help = "" + + def handle(self, **options): + find_hanging_examples() + +def find_hanging_examples(): + lemmas = Lemma.objects.filter(old=False).order_by('entry_obj__name') + for lemma in lemmas: + print_hanging_examples(lemma) + +def print_hanging_examples(lemma): + lex_units = lemma.entry_obj.lexical_units().all() + for lu in lex_units: + lu_examples = LexicalUnitExamples.objects.filter(lexical_unit=lu) + for lu_ex in lu_examples: + example = lu_ex.example + if not lemma.nkjp_examples.filter(id=example.id).exists(): + print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!' + print 'lemma: %s\tlu_ex_id: %d\texample: %s\tex_id: %d' % (lemma.entry_obj.name, + lu_ex.id, + example.sentence, + example.id) + \ No newline at end of file diff --git a/semantics/models.py b/semantics/models.py index ed795de..4e98ed9 100644 --- a/semantics/models.py +++ b/semantics/models.py @@ -49,6 +49,11 @@ class SemanticFrame(models.Model): return True return False + def opinion_selected(self): + if not self.opinion: + return False + return True + def __unicode__(self): complements_str_tab = [unicode(compl) for compl in self.complements.all()] return u'%d --> %s' % (self.id, u'+'.join(complements_str_tab)) @@ -62,6 +67,10 @@ class FramePosition(models.Model): argument = models.ForeignKey(Argument) # numer alternacji alternation = models.IntegerField(default=1) + + def __unicode__(self): + return 'schema_%d_pos_%d_arg_%d_alt_%d_' % (self.frame.id, self.position.id, + self.argument.id, self.alternation) class LexicalUnitExamples(models.Model): example = models.ForeignKey(NKJP_Example) @@ -110,6 +119,12 @@ class Complement(models.Model): # pola z ramki # realizacje tego argumentu w schematach składniowych realizations = models.ManyToManyField(FramePosition) + def has_only_phraseologic_realizations(self): + for real in self.realizations.all(): + if not real.argument.is_phraseologic(): + return False + return True + def __unicode__(self): return u'%d:%s' % (self.id, self.roles.all()) diff --git a/semantics/saving.py b/semantics/saving.py new file mode 100644 index 0000000..9afb9d7 --- /dev/null +++ b/semantics/saving.py @@ -0,0 +1,358 @@ +# -*- coding: utf-8 -*- + +from django.db.models import Min + +from dictionary.models import Argument, Frame, NKJP_Example, Position +from semantics.change_log import store_old_versions +from semantics.models import Complement, GeneralSelectivePreference, FramePosition,\ + LexicalUnitExamples, RelationalSelectivePreference, \ + SelectivePreference, SelectivePreferenceRelations, \ + SemanticFrame, SemanticRole, FrameOpinion +from wordnet.models import Hypernymy, LexicalUnit, Synonymy, Synset + +def modify_frames(lemma_id, operations, user): + store_old_versions(lemma_id, operations, user) + make_operations(operations) + +def make_operations(operations): + translation = {'frame_id': {}, 'complement_id': {}, 'preference_id': {}} + for operation in operations: + if operation['operation'] == "create_frame": + luids = [int(m['id']) for m in operation['meanings']] + translation['frame_id'][int(operation['id'])] = create_frame(luids) + elif operation['operation'] == "remove_frame": + if int(operation['id']) in translation['frame_id']: + frame_id = translation['frame_id'][int(operation['id'])] + else: + frame_id = int(operation['id']) + remove_frame(frame_id) + elif operation['operation'] == "add_argument": + if int(operation['frame_id']) in translation['frame_id']: + frame_id = translation['frame_id'][int(operation['frame_id'])] + else: + frame_id = int(operation['frame_id']) + roles = [int(r) for r in operation['role']] + translation['complement_id'][int(operation['id'])] = add_argument(frame_id, roles) + elif operation['operation'] == "remove_argument": + if int(operation['frame_id']) in translation['frame_id']: + frame_id = translation['frame_id'][int(operation['frame_id'])] + else: + frame_id = int(operation['frame_id']) + if int(operation['complement_id']) in translation['complement_id']: + complement_id = translation['complement_id'][int(operation['complement_id'])] + else: + complement_id = int(operation['complement_id']) + remove_argument(frame_id, complement_id) + elif operation['operation'] == "connect": + frame_data = operation['arg'].split('_') + if int(frame_data[1]) in translation['frame_id']: + frame_id = translation['frame_id'][int(frame_data[1])] + else: + frame_id = int(frame_data[1]) + if int(frame_data[3]) in translation['complement_id']: + complement_id = translation['complement_id'][int(frame_data[3])] + else: + complement_id = int(frame_data[3]) + schema_data = operation['connect'].split('_') + schema_id = int(schema_data[1]) + position_id = int(schema_data[3]) + argument_id = int(schema_data[5]) + alternation = int(schema_data[7]) + connect(frame_id, complement_id, schema_id, position_id, argument_id, alternation) + elif operation['operation'] == "disconnect": + frame_data = operation['arg'].split('_') + if int(frame_data[1]) in translation['frame_id']: + frame_id = translation['frame_id'][int(frame_data[1])] + else: + frame_id = int(frame_data[1]) + if int(frame_data[3]) in translation['complement_id']: + complement_id = translation['complement_id'][int(frame_data[3])] + else: + complement_id = int(frame_data[3]) + schema_data = operation['connect'].split('_') + schema_id = int(schema_data[1]) + position_id = int(schema_data[3]) + argument_id = int(schema_data[5]) + alternation = int(schema_data[7]) + disconnect(frame_id, complement_id, schema_id, position_id, argument_id, alternation) + elif operation['operation'] == "assign_role": + if int(operation['frame_id']) in translation['frame_id']: + frame_id = translation['frame_id'][int(operation['frame_id'])] + else: + frame_id = int(operation['frame_id']) + if int(operation['complement_id']) in translation['complement_id']: + complement_id = translation['complement_id'][int(operation['complement_id'])] + else: + complement_id = int(operation['complement_id']) + roles = [int(r) for r in operation['role']] + assign_role(frame_id, complement_id, roles) + elif operation['operation'] == "change_units": + if int(operation['frame_id']) in translation['frame_id']: + frame_id = translation['frame_id'][int(operation['frame_id'])] + else: + frame_id = int(operation['frame_id']) + luids = [int(m) for m in operation['units']] + change_units(frame_id, luids) + elif operation['operation'] == "set_opinion": + if int(operation['frame_id']) in translation['frame_id']: + frame_id = translation['frame_id'][int(operation['frame_id'])] + else: + frame_id = int(operation['frame_id']) + opinion = operation['opinion'] + set_opinion(frame_id, opinion) + elif operation['operation'] == "add_preference": + # {operation: 'add_preference', frame_id: frame_id, complement_id: complement_id, preference_id: preference_id, preference: preference} + if int(operation['frame_id']) in translation['frame_id']: + frame_id = translation['frame_id'][int(operation['frame_id'])] + else: + frame_id = int(operation['frame_id']) + if int(operation['complement_id']) in translation['complement_id']: + complement_id = translation['complement_id'][int(operation['complement_id'])] + else: + complement_id = int(operation['complement_id']) + preference_id = add_preference(frame_id, complement_id, operation['preference']['type'], operation['preference']['content']) + translation['preference_id'][operation['preference_id']] = preference_id + elif operation['operation'] == "remove_preference": + # {operation: 'remove_preference', frame_id: frame_id, complement_id: complement_id, preference_id: preference_id} + if int(operation['frame_id']) in translation['frame_id']: + frame_id = translation['frame_id'][int(operation['frame_id'])] + else: + frame_id = int(operation['frame_id']) + if int(operation['complement_id']) in translation['complement_id']: + complement_id = translation['complement_id'][int(operation['complement_id'])] + else: + complement_id = int(operation['complement_id']) + if operation['preference_id'] in translation['preference_id']: + preference_id = translation['preference_id'][operation['preference_id']] + else: + preference_id = (operation['preference_id'][0], int(operation['preference_id'][1:])) + + remove_preference(frame_id, complement_id, preference_id) + else: + pass + +def create_frame(luids): + frame = SemanticFrame() + frame.save() + for id in luids: + lu = LexicalUnit.objects.get(id=id) + frame.lexical_units.add(lu) + return frame.id + +def add_argument(frame_id, roles): + if validate_roles(roles): + frame = SemanticFrame.objects.get(id=frame_id) + complement = Complement(frame=frame) + complement.save() + frame.complements.add(complement) + role_objects = [] + for r in roles: + role_objects.append(SemanticRole.objects.get(id=r)) + complement.roles = role_objects + return complement.id + +def remove_frame(frame_id): + frame = SemanticFrame.objects.get(id=frame_id) + frame.removed = True + frame.save() + +def remove_argument(frame_id, complement_id): + Complement.objects.get(id=complement_id).delete() + +def connect(frame_id, complement_id, schema_id, position_id, argument_id, alternation): + schema = Frame.objects.get(id=schema_id) + position = Position.objects.get(id=position_id) + argument = Argument.objects.get(id=argument_id) + fpas = FramePosition.objects.filter(frame=schema, position=position, argument=argument, alternation=alternation) + if len(fpas) > 0: + fpa = fpas[0] + else: + fpa = FramePosition(frame=schema, position=position, argument=argument, alternation=alternation) + fpa.save() + complement = Complement.objects.get(id=complement_id) + complement.realizations.add(fpa) + +def disconnect(frame_id, complement_id, schema_id, position_id, argument_id, alternation): + schema = Frame.objects.get(id=schema_id) + position = Position.objects.get(id=position_id) + argument = Argument.objects.get(id=argument_id) + fpas = FramePosition.objects.filter(frame=schema, position=position, argument=argument, alternation=alternation) + if len(fpas) > 0: + fpa = fpas[0] + else: + return + complement = Complement.objects.get(id=complement_id) + complement.realizations.remove(fpa) + +def assign_role(frame_id, complement_id, roles): + if validate_roles(roles): + role_objects = [] + for r in roles: + role_objects.append(SemanticRole.objects.get(id=r)) + complement = Complement.objects.get(id=complement_id) + complement.roles = role_objects + +def validate_roles(roles): + role_objects = [] + for r in roles: + role_objects.append(SemanticRole.objects.get(id=r)) + if len(role_objects) > 2: + return False + ok = False + for r in role_objects: + if not r.color == None: + ok = not ok + return ok + +def change_units(frame_id, luids): + frame = SemanticFrame.objects.get(id=frame_id) + frame.lexical_units = [] + for id in luids: + lu = LexicalUnit.objects.get(id=id) + frame.lexical_units.add(lu) + +def set_opinion(frame_id, opinion): + frame = SemanticFrame.objects.get(id=frame_id) + frame_opinion = FrameOpinion.objects.get(short=opinion) + frame.opinion = frame_opinion + frame.save() + +# preference_id = add_preference(frame_id, complement_id, operation['preference']['type'], operation['preference']['content']) +def add_preference(frame_id, complement_id, preference_type, preference_content): + + complement = Complement.objects.get(id=complement_id) + if complement.selective_preference is None: + sp = SelectivePreference() + sp.save() + complement.selective_preference = sp + complement.save() + + if preference_type == 'g': + general = GeneralSelectivePreference.objects.get(id=int(preference_content)) + complement.selective_preference.generals.add(general) + return ('g', general.id) + elif preference_type == 's': + synset = Synset.objects.get(id=int(preference_content)) + complement.selective_preference.synsets.add(synset) + return ('s', synset.id) + elif preference_type == 'r': + relation = SelectivePreferenceRelations.objects.get(id=int(preference_content['relation'])) + argument = [int(a) for a in preference_content['to'].split(',')] + frame = SemanticFrame.objects.get(id=frame_id) + candidates = Complement.objects.filter(frame=frame) + found = None + for c in candidates: + if len(c.roles.all()) == len(argument): + roles = [r.id for r in c.roles.all()] + ok = True + for a in argument: + if a not in roles: + ok = False + if ok: + found = c + break + if found is not None: + rsp = RelationalSelectivePreference(relation=relation, to=found) + rsp.save() + complement.selective_preference.relations.add(rsp) + return ('r', rsp.id) + else: + return -1 + else: + return -1 + +# remove_preference(frame_id, complement_id, preference) +def remove_preference(frame_id, complement_id, preference): + preference_type, preference_id = preference + if preference_type == 'g': + complement = Complement.objects.get(id=complement_id) + g = complement.selective_preference.generals.get(id = int(preference_id)) + complement.selective_preference.generals.remove(g) + elif preference_type == 's': + complement = Complement.objects.get(id=complement_id) + s = complement.selective_preference.synsets.get(id = int(preference_id)) + complement.selective_preference.synsets.remove(s) + elif preference_type == 'r': + complement = Complement.objects.get(id=complement_id) + r = complement.selective_preference.relations.get(id = int(preference_id)) + complement.selective_preference.relations.remove(r) + + +def update_meanings(operations): + translation = {} + for operation in operations: + if operation['operation'] == "set_glossa": + if int(operation['unit']) in translation: + unit = translation[int(operation['unit'])] + else: + unit = int(operation['unit']) + set_glossa(unit, operation['value']) + elif operation['operation'] == "add_example": + if int(operation['unit']) in translation: + unit = translation[int(operation['unit'])] + else: + unit = int(operation['unit']) + add_example(unit, operation['example']) + elif operation['operation'] == "remove_example": + if int(operation['unit']) in translation: + unit = translation[int(operation['unit'])] + else: + unit = int(operation['unit']) + remove_example(unit, operation['example']) + elif operation['operation'] == "add_unit": + translation[operation['unit']['id']] = add_unit(operation['unit']) + elif operation['operation'] == "remove_unit": + luid = int(operation['luid']) + if luid in translation: + remove_unit(translation[luid]) + else: + remove_unit(luid) + else: + pass + +def set_glossa(unit_id, new_glossa): + unit = LexicalUnit.objects.get(id=unit_id) + unit.glossa = new_glossa + unit.save() + +def add_example(unit_id, example_id): + unit = LexicalUnit.objects.get(id=unit_id) + nkjp_example = NKJP_Example.objects.get(id=example_id) + lue = LexicalUnitExamples(example=nkjp_example, lexical_unit=unit) + lue.save() + +def remove_example(unit_id, example_id): + unit = LexicalUnit.objects.get(id=unit_id) + nkjp_example = NKJP_Example.objects.get(id=example_id) + lue = LexicalUnitExamples.objects.get(example=nkjp_example, lexical_unit=unit) + lue.delete() + +def add_unit(unit): # returns new id + + s1 = Synset(id=(min(Synset.objects.all().aggregate(Min('id'))['id__min'], 0) - 1)) + s1.save() + lu = LexicalUnit(base=unit['base'], sense=unit['sense'], pos=unit['pos'], glossa=unit['glossa'], luid=-1, synset=s1) + lu.save() + + + if int(unit['relation']) == 1: + s2 = Synset.objects.get(id=int(unit['to'])) + r = Synonymy(parent=s1, child=s2) + r.save() + r = Synonymy(parent=s2, child=s1) + r.save() + elif int(unit['relation']) == 0: + s2 = Synset.objects.get(id=int(unit['to'])) + r = Hypernymy(parent=s1, child=s2) + r.save() + else: + pass + + return lu.id + +def remove_unit(luid): + lu = LexicalUnit.objects.get(id=luid) + if lu.luid is not None and lu.luid >= 0: + return + else: + lu.delete() diff --git a/semantics/static/js/semantics_connections.js b/semantics/static/js/semantics_connections.js index 8f5fbf9..51fcc56 100644 --- a/semantics/static/js/semantics_connections.js +++ b/semantics/static/js/semantics_connections.js @@ -1,5 +1,5 @@ -var connected = {}; // dictionaries of connections and disconnections between frames and schemas -var connected_reverse = {}; + var connected = {}; // dictionaries of connections and disconnections between frames and schemas + var connected_reverse = {}; function memorizeConnections(arguments_connected, frames_connection){ connected = arguments_connected; diff --git a/semantics/static/js/semantics_frames.js b/semantics/static/js/semantics_frames.js index e634747..87183cd 100644 --- a/semantics/static/js/semantics_frames.js +++ b/semantics/static/js/semantics_frames.js @@ -463,8 +463,8 @@ function saveFrames() { memorizeConnections(data.connections.connected, data.connections.connected_reverse); $("#semantic-frames-count").empty(); $("#semantic-frames-count").append(data.frames_count); + updateSchemataConnections(); }); frames_operations = []; } } - diff --git a/semantics/static/js/semantics_lexical_units.js b/semantics/static/js/semantics_lexical_units.js index 6dac2cd..266e504 100644 --- a/semantics/static/js/semantics_lexical_units.js +++ b/semantics/static/js/semantics_lexical_units.js @@ -97,6 +97,7 @@ function saveMeanings() { success: function(data){ memorizeLexicalUnits(data.lexical_units); basicLexicalUnitsData(data.informations); + updateExamplesConnections(); }, async: false }); diff --git a/semantics/templates/opinions.json b/semantics/templates/opinions.json new file mode 100644 index 0000000..92d3c7e --- /dev/null +++ b/semantics/templates/opinions.json @@ -0,0 +1,5 @@ +{% load jsonify %} + +{ + "opinions": {{ opinions|jsonify }} +} diff --git a/semantics/validation.py b/semantics/validation.py index c3f5e94..bd333a5 100644 --- a/semantics/validation.py +++ b/semantics/validation.py @@ -2,12 +2,12 @@ from django.db.models import Max -from dictionary.models import Lemma, reflex_phrase_types +from dictionary.models import Lemma, reflex_phrase_types, Argument_Model from semantics.models import LexicalUnitExamples from semantics.utils import get_matching_frame def validate_frames(lemma_id): - lemma = Lemma.objects.get(id=lemma_id, old=False) + lemma = Lemma.objects.get(id=lemma_id) actual_frames = lemma.entry_obj.actual_frames() error_msg = u'' for frame in actual_frames.all(): @@ -21,6 +21,8 @@ def frame_valid(lemma, frame, actual_frames): complements = frame.complements.all() if not arguments_exists(complements): error_msg = u'Semantyka: Rama semantyczna %d jest pusta.' % frame.id + elif not frame.opinion_selected(): + error_msg = u'Semantyka: Rama semantyczna %d nie ma wybranej opinii.' % frame.id elif not roles_unique(complements): error_msg = u'Semantyka: Rama semantyczna %d nie zawiera unikalnych ról.' % frame.id elif not arguments_pinned(complements): @@ -62,7 +64,9 @@ def arguments_pinned(complements): def preferences_selected(complements): for complement in complements: - if not preference_valid(complement): + if complement.realizations.exists() and complement.has_only_phraseologic_realizations(): + pass + elif not preference_valid(complement): return False return True diff --git a/semantics/views.py b/semantics/views.py index 1061343..69eca1f 100644 --- a/semantics/views.py +++ b/semantics/views.py @@ -1,24 +1,19 @@ # -*- coding: utf-8 -*- -import locale from semantics.models import SemanticRole, SemanticFrame, Complement, \ LexicalUnit, FrameRankings, SemanticRolesDisplay, \ - LexicalUnitExamples, FramePosition, SelectivePreference, \ - RelationalSelectivePreference, SelectivePreferenceRelations, \ + LexicalUnitExamples, SelectivePreferenceRelations, \ GeneralSelectivePreference, FrameOpinion + from dictionary.models import Frame_Char_Model, Lemma, Lemma_Status, \ - sort_arguments, sort_positions, NKJP_Example, \ - Frame, Position, Argument -from wordnet.models import Hypernymy, Synonymy, Synset + sort_arguments, sort_positions from dictionary.ajax_lemma_view import user_can_modify from django.core.exceptions import SuspiciousOperation from django.core.urlresolvers import reverse from django.db.models import Q -from datetime import datetime -from django.db.models import Min -from common.decorators import render, ajax, AjaxError -from semantics.change_log import store_old_versions +from common.decorators import render, ajax +from semantics.saving import modify_frames, update_meanings from semantics.validation import validate_schemas, validate_frames, validate_lexical_units @@ -469,357 +464,15 @@ def ajax_create_complement(request, lemma_id, frame, roles): @ajax(method='get', encode_result=False) def ajax_update_meanings(request, operations, lemma_id): - translation = {} - for operation in operations: - if operation['operation'] == "set_glossa": - if int(operation['unit']) in translation: - unit = translation[int(operation['unit'])] - else: - unit = int(operation['unit']) - set_glossa(unit, operation['value']) - elif operation['operation'] == "add_example": - if int(operation['unit']) in translation: - unit = translation[int(operation['unit'])] - else: - unit = int(operation['unit']) - add_example(unit, operation['example']) - elif operation['operation'] == "remove_example": - if int(operation['unit']) in translation: - unit = translation[int(operation['unit'])] - else: - unit = int(operation['unit']) - remove_example(unit, operation['example']) - elif operation['operation'] == "add_unit": - translation[operation['unit']['id']] = add_unit(operation['unit']) - elif operation['operation'] == "remove_unit": - luid = int(operation['luid']) - if luid in translation: - remove_unit(translation[luid]) - else: - remove_unit(luid) - else: - pass - + update_meanings(operations) return ajax_units(request) - -def set_glossa(unit_id, new_glossa): - unit = LexicalUnit.objects.get(id=unit_id) - unit.glossa = new_glossa - unit.save() - -def add_example(unit_id, example_id): - unit = LexicalUnit.objects.get(id=unit_id) - nkjp_example = NKJP_Example.objects.get(id=example_id) - lue = LexicalUnitExamples(example=nkjp_example, lexical_unit=unit) - lue.save() - -def remove_example(unit_id, example_id): - unit = LexicalUnit.objects.get(id=unit_id) - nkjp_example = NKJP_Example.objects.get(id=example_id) - lue = LexicalUnitExamples.objects.get(example=nkjp_example, lexical_unit=unit) - lue.delete() - -def add_unit(unit): # returns new id - - s1 = Synset(id=(min(Synset.objects.all().aggregate(Min('id'))['id__min'], 0) - 1)) - s1.save() - lu = LexicalUnit(base=unit['base'], sense=unit['sense'], pos=unit['pos'], glossa=unit['glossa'], luid=-1, synset=s1) - lu.save() - - - if int(unit['relation']) == 1: - s2 = Synset.objects.get(id=int(unit['to'])) - r = Synonymy(parent=s1, child=s2) - r.save() - r = Synonymy(parent=s2, child=s1) - r.save() - elif int(unit['relation']) == 0: - s2 = Synset.objects.get(id=int(unit['to'])) - r = Hypernymy(parent=s1, child=s2) - r.save() - else: - pass - - return lu.id - -def remove_unit(luid): - lu = LexicalUnit.objects.get(id=luid) - if lu.luid is not None and lu.luid >= 0: - return - else: - lu.delete() @ajax(method='get', encode_result=False) def ajax_modify_frames(request, operations, lemma_id): - if not request.user.is_authenticated(): return 'user logged out' - - store_old_versions(lemma_id, operations, request.user) - - translation = {'frame_id': {}, 'complement_id': {}, 'preference_id': {}} - for operation in operations: - if operation['operation'] == "create_frame": - luids = [int(m['id']) for m in operation['meanings']] - translation['frame_id'][int(operation['id'])] = create_frame(luids) - elif operation['operation'] == "remove_frame": - if int(operation['id']) in translation['frame_id']: - frame_id = translation['frame_id'][int(operation['id'])] - else: - frame_id = int(operation['id']) - remove_frame(frame_id) - elif operation['operation'] == "add_argument": - if int(operation['frame_id']) in translation['frame_id']: - frame_id = translation['frame_id'][int(operation['frame_id'])] - else: - frame_id = int(operation['frame_id']) - roles = [int(r) for r in operation['role']] - translation['complement_id'][int(operation['id'])] = add_argument(frame_id, roles) - elif operation['operation'] == "remove_argument": - if int(operation['frame_id']) in translation['frame_id']: - frame_id = translation['frame_id'][int(operation['frame_id'])] - else: - frame_id = int(operation['frame_id']) - if int(operation['complement_id']) in translation['complement_id']: - complement_id = translation['complement_id'][int(operation['complement_id'])] - else: - complement_id = int(operation['complement_id']) - remove_argument(frame_id, complement_id) - elif operation['operation'] == "connect": - frame_data = operation['arg'].split('_') - if int(frame_data[1]) in translation['frame_id']: - frame_id = translation['frame_id'][int(frame_data[1])] - else: - frame_id = int(frame_data[1]) - if int(frame_data[3]) in translation['complement_id']: - complement_id = translation['complement_id'][int(frame_data[3])] - else: - complement_id = int(frame_data[3]) - schema_data = operation['connect'].split('_') - schema_id = int(schema_data[1]) - position_id = int(schema_data[3]) - argument_id = int(schema_data[5]) - alternation = int(schema_data[7]) - connect(frame_id, complement_id, schema_id, position_id, argument_id, alternation) - elif operation['operation'] == "disconnect": - frame_data = operation['arg'].split('_') - if int(frame_data[1]) in translation['frame_id']: - frame_id = translation['frame_id'][int(frame_data[1])] - else: - frame_id = int(frame_data[1]) - if int(frame_data[3]) in translation['complement_id']: - complement_id = translation['complement_id'][int(frame_data[3])] - else: - complement_id = int(frame_data[3]) - schema_data = operation['connect'].split('_') - schema_id = int(schema_data[1]) - position_id = int(schema_data[3]) - argument_id = int(schema_data[5]) - alternation = int(schema_data[7]) - disconnect(frame_id, complement_id, schema_id, position_id, argument_id, alternation) - elif operation['operation'] == "assign_role": - if int(operation['frame_id']) in translation['frame_id']: - frame_id = translation['frame_id'][int(operation['frame_id'])] - else: - frame_id = int(operation['frame_id']) - if int(operation['complement_id']) in translation['complement_id']: - complement_id = translation['complement_id'][int(operation['complement_id'])] - else: - complement_id = int(operation['complement_id']) - roles = [int(r) for r in operation['role']] - assign_role(frame_id, complement_id, roles) - elif operation['operation'] == "change_units": - if int(operation['frame_id']) in translation['frame_id']: - frame_id = translation['frame_id'][int(operation['frame_id'])] - else: - frame_id = int(operation['frame_id']) - luids = [int(m) for m in operation['units']] - change_units(frame_id, luids) - elif operation['operation'] == "set_opinion": - if int(operation['frame_id']) in translation['frame_id']: - frame_id = translation['frame_id'][int(operation['frame_id'])] - else: - frame_id = int(operation['frame_id']) - opinion = operation['opinion'] - set_opinion(frame_id, opinion) - elif operation['operation'] == "add_preference": - # {operation: 'add_preference', frame_id: frame_id, complement_id: complement_id, preference_id: preference_id, preference: preference} - if int(operation['frame_id']) in translation['frame_id']: - frame_id = translation['frame_id'][int(operation['frame_id'])] - else: - frame_id = int(operation['frame_id']) - if int(operation['complement_id']) in translation['complement_id']: - complement_id = translation['complement_id'][int(operation['complement_id'])] - else: - complement_id = int(operation['complement_id']) - preference_id = add_preference(frame_id, complement_id, operation['preference']['type'], operation['preference']['content']) - translation['preference_id'][operation['preference_id']] = preference_id - elif operation['operation'] == "remove_preference": - # {operation: 'remove_preference', frame_id: frame_id, complement_id: complement_id, preference_id: preference_id} - if int(operation['frame_id']) in translation['frame_id']: - frame_id = translation['frame_id'][int(operation['frame_id'])] - else: - frame_id = int(operation['frame_id']) - if int(operation['complement_id']) in translation['complement_id']: - complement_id = translation['complement_id'][int(operation['complement_id'])] - else: - complement_id = int(operation['complement_id']) - if operation['preference_id'] in translation['preference_id']: - preference_id = translation['preference_id'][operation['preference_id']] - else: - preference_id = (operation['preference_id'][0], int(operation['preference_id'][1:])) - - remove_preference(frame_id, complement_id, preference_id) - else: - pass - + modify_frames(lemma_id, operations, request.user) return ajax_frames(request) - -def create_frame(luids): - frame = SemanticFrame() - frame.save() - for id in luids: - lu = LexicalUnit.objects.get(id=id) - frame.lexical_units.add(lu) - return frame.id - -def change_units(frame_id, luids): - frame = SemanticFrame.objects.get(id=frame_id) - frame.lexical_units = [] - for id in luids: - lu = LexicalUnit.objects.get(id=id) - frame.lexical_units.add(lu) - -def set_opinion(frame_id, opinion): - frame = SemanticFrame.objects.get(id=frame_id) - frame_opinion = FrameOpinion.objects.get(short=opinion) - frame.opinion = frame_opinion - frame.save() - -def remove_frame(frame_id): - frame = SemanticFrame.objects.get(id=frame_id) - frame.removed = True - frame.save() - -def add_argument(frame_id, roles): - if validate_roles(roles): - frame = SemanticFrame.objects.get(id=frame_id) - complement = Complement(frame=frame) - complement.save() - frame.complements.add(complement) - role_objects = [] - for r in roles: - role_objects.append(SemanticRole.objects.get(id=r)) - complement.roles = role_objects - return complement.id - -def remove_argument(frame_id, complement_id): - Complement.objects.get(id=complement_id).delete() - -def connect(frame_id, complement_id, schema_id, position_id, argument_id, alternation): - schema = Frame.objects.get(id=schema_id) - position = Position.objects.get(id=position_id) - argument = Argument.objects.get(id=argument_id) - fpas = FramePosition.objects.filter(frame=schema, position=position, argument=argument, alternation=alternation) - if len(fpas) > 0: - fpa = fpas[0] - else: - fpa = FramePosition(frame=schema, position=position, argument=argument, alternation=alternation) - fpa.save() - complement = Complement.objects.get(id=complement_id) - complement.realizations.add(fpa) - -def disconnect(frame_id, complement_id, schema_id, position_id, argument_id, alternation): - schema = Frame.objects.get(id=schema_id) - position = Position.objects.get(id=position_id) - argument = Argument.objects.get(id=argument_id) - fpas = FramePosition.objects.filter(frame=schema, position=position, argument=argument, alternation=alternation) - if len(fpas) > 0: - fpa = fpas[0] - else: - return - complement = Complement.objects.get(id=complement_id) - complement.realizations.remove(fpa) - -def assign_role(frame_id, complement_id, roles): - if validate_roles(roles): - role_objects = [] - for r in roles: - role_objects.append(SemanticRole.objects.get(id=r)) - complement = Complement.objects.get(id=complement_id) - complement.roles = role_objects - -def validate_roles(roles): - role_objects = [] - for r in roles: - role_objects.append(SemanticRole.objects.get(id=r)) - if len(role_objects) > 2: - return False - ok = False - for r in role_objects: - if not r.color == None: - ok = not ok - return ok - -# preference_id = add_preference(frame_id, complement_id, operation['preference']['type'], operation['preference']['content']) -def add_preference(frame_id, complement_id, preference_type, preference_content): - - complement = Complement.objects.get(id=complement_id) - if complement.selective_preference is None: - sp = SelectivePreference() - sp.save() - complement.selective_preference = sp - complement.save() - - if preference_type == 'g': - general = GeneralSelectivePreference.objects.get(id=int(preference_content)) - complement.selective_preference.generals.add(general) - return ('g', general.id) - elif preference_type == 's': - synset = Synset.objects.get(id=int(preference_content)) - complement.selective_preference.synsets.add(synset) - return ('s', synset.id) - elif preference_type == 'r': - relation = SelectivePreferenceRelations.objects.get(id=int(preference_content['relation'])) - argument = [int(a) for a in preference_content['to'].split(',')] - frame = SemanticFrame.objects.get(id=frame_id) - candidates = Complement.objects.filter(frame=frame) - found = None - for c in candidates: - if len(c.roles.all()) == len(argument): - roles = [r.id for r in c.roles.all()] - ok = True - for a in argument: - if a not in roles: - ok = False - if ok: - found = c - break - if found is not None: - rsp = RelationalSelectivePreference(relation=relation, to=found) - rsp.save() - complement.selective_preference.relations.add(rsp) - return ('r', rsp.id) - else: - return -1 - else: - return -1 - -# remove_preference(frame_id, complement_id, preference) -def remove_preference(frame_id, complement_id, preference): - preference_type, preference_id = preference - if preference_type == 'g': - complement = Complement.objects.get(id=complement_id) - g = complement.selective_preference.generals.get(id = int(preference_id)) - complement.selective_preference.generals.remove(g) - elif preference_type == 's': - complement = Complement.objects.get(id=complement_id) - s = complement.selective_preference.synsets.get(id = int(preference_id)) - complement.selective_preference.synsets.remove(s) - elif preference_type == 'r': - complement = Complement.objects.get(id=complement_id) - r = complement.selective_preference.relations.get(id = int(preference_id)) - complement.selective_preference.relations.remove(r) @ajax(method='get', encode_result=True) def ajax_plWN_context_lookup(request, term): @@ -843,14 +496,17 @@ def get_ordered_lexical_units_bases(lexical_units_query): @ajax(method='get') def validate_semantics(request, lemma_id, new_status_id): error_msg = '' - try: - status = Lemma_Status.objects.get(id=new_status_id) - except Lemma_Status.DoesNotExist: - status = None - if status and status.check_semantics: - error_msg = validate_frames(lemma_id) - if not error_msg: - error_msg = validate_lexical_units(lemma_id) - if not error_msg: - error_msg = validate_schemas(lemma_id) + if Lemma.objects.get(id=lemma_id).old: + error_msg = u'Odśwież hasło, widoczna wersja nie jest aktualna.' + else: + try: + status = Lemma_Status.objects.get(id=new_status_id) + except Lemma_Status.DoesNotExist: + status = None + if status and status.check_semantics: + error_msg = validate_frames(lemma_id) + if not error_msg: + error_msg = validate_lexical_units(lemma_id) + if not error_msg: + error_msg = validate_schemas(lemma_id) return {'error_message': error_msg} diff --git a/urls.py b/urls.py index 1b9858f..329e9a2 100644 --- a/urls.py +++ b/urls.py @@ -121,6 +121,8 @@ urlpatterns += patterns('dictionary.ajax_lemma_view', url(r'^ajax/user_is_authenticated/$', 'user_is_authenticated'), url(r'^ajax/deselect_preview_tab/$', 'deselect_preview_tab'), + url(r'^ajax/get_schemata/$', 'get_schemata'), + url(r'^ajax/get_examples/$', 'get_examples'), # powiazywanie hasel (nieczasownikowe) url(r'^ajax/relate_entries/$', 'relate_entries'),