Commit ea36a40a92280fcc957e2d2fff508792e4434add
Merge branch 'tomek' into dev
Showing
5 changed files
with
54 additions
and
21 deletions
dictionary/models.py
... | ... | @@ -1389,11 +1389,12 @@ class Entry(Model): |
1389 | 1389 | ) |
1390 | 1390 | |
1391 | 1391 | def actual_frames(self): |
1392 | - frame_ids = [] | |
1393 | - lexical_units = self.meanings.order_by('sense') | |
1394 | - for lexical_unit in lexical_units: | |
1395 | - frame_ids.extend([f.id for f in lexical_unit.actual_frames()]) | |
1396 | - return get_model('semantics', 'SemanticFrame').objects.filter(id__in=list(set(frame_ids))) | |
1392 | + # frame_ids = [] | |
1393 | + # lexical_units = self.meanings.order_by('sense') | |
1394 | + # for lexical_unit in lexical_units: | |
1395 | + # frame_ids.extend([f.id for f in lexical_unit.actual_frames()]) | |
1396 | + # return get_model('semantics', 'SemanticFrame').objects.filter(id__in=list(set(frame_ids))) | |
1397 | + return self.semantic_frames.filter(next__isnull=True, removed=False) | |
1397 | 1398 | |
1398 | 1399 | def matching_connections(self, schema, position, phrase_type): |
1399 | 1400 | frames = self.actual_frames() |
... | ... | @@ -1674,4 +1675,4 @@ def getHighestPriorFrame(positions_dict_list, row_idx): |
1674 | 1675 | return position_dict |
1675 | 1676 | elif position_dict['arguments'][row_idx]==sorted_args[0]: |
1676 | 1677 | return position_dict |
1677 | - | |
1678 | 1678 | \ No newline at end of file |
1679 | + | |
... | ... |
semantics/change_log.py
... | ... | @@ -97,6 +97,7 @@ def backup_frames(frame_ids, complement_ids, preference_ids, author=None): |
97 | 97 | f.lexical_units = frame.lexical_units.all() |
98 | 98 | f.complements = frame.complements.all() |
99 | 99 | f.author = frame.author |
100 | + f.entry = frame.entry | |
100 | 101 | frame.author = author |
101 | 102 | f.save() |
102 | 103 | prev = SemanticFrame.objects.filter(next=frame) |
... | ... |
semantics/static/js/semantics_lexical_units.js
... | ... | @@ -122,6 +122,22 @@ function saveMeanings() { |
122 | 122 | }, |
123 | 123 | async: false |
124 | 124 | }); |
125 | + | |
126 | + $.ajax({ | |
127 | + dataType: "json", | |
128 | + url: ajax_frames, | |
129 | + data: {"lemma_id": lemma_id}, | |
130 | + success: function(data){ | |
131 | + getFrames(data.frames_display); | |
132 | + displayFrames(); | |
133 | + memorizeConnections(data.connections.connected, data.connections.connected_reverse); | |
134 | + alternationCounts(data.alternations); | |
135 | + $("#semantic-frames-count").empty(); | |
136 | + $("#semantic-frames-count").append(data.frames_count); | |
137 | + }, | |
138 | + async: false | |
139 | + }); | |
140 | + | |
125 | 141 | units_operations = []; |
126 | 142 | } |
127 | 143 | |
... | ... |
semantics/static/js/semantics_roles.js
... | ... | @@ -22,7 +22,11 @@ function rolesToHtml(roles_display, full){ |
22 | 22 | for (k = 0; k < roles_display[i][j].roles.length; k++) { |
23 | 23 | table += '<div style='; |
24 | 24 | if (roles_display[i][j].roles[k].color != 'None') { |
25 | - table += '"background-color: rgb(' + roles_display[i][j].roles[k].color + ')'; | |
25 | + if (roles_display[i][j].roles[k].color == '256,256,256') { | |
26 | + table += '"background-color: rgb(256,256,256); font-weight:bold; margin: auto; width: 25%"'; | |
27 | + } else { | |
28 | + table += '"background-color: rgb(' + roles_display[i][j].roles[k].color + ')'; | |
29 | + } | |
26 | 30 | } else { |
27 | 31 | table += '"background: linear-gradient(to ' + roles_display[i][j].roles[k].gradient + ', rgba(100,100,100,0.1), rgba(100,100,100,1))'; |
28 | 32 | } |
... | ... |
semantics/views.py
... | ... | @@ -81,35 +81,46 @@ def create_frames_context(lemma_id, user): |
81 | 81 | lemma = Lemma.objects.get(id=lemma_id) |
82 | 82 | |
83 | 83 | #lexical_units = LexicalUnit.objects.filter(Q(base__startswith=lemma.entry + u' ')|Q(base__contains=u' '+lemma.entry+u' ')|Q(base__endswith=u' '+lemma.entry)|Q(base=lemma.entry)).order_by('sense') |
84 | - lexical_units = lemma.entry_obj.meanings.order_by('sense') | |
84 | +# lexical_units = lemma.entry_obj.meanings.order_by('sense') | |
85 | 85 | |
86 | 86 | alternations = {} |
87 | 87 | frames_dict = {} |
88 | 88 | frame_units = {} |
89 | - for lexical_unit in lexical_units: | |
90 | - frames = lexical_unit.actual_frames() | |
91 | - for frame in frames: | |
92 | - alternations[frame.id] = {} | |
93 | - frames_dict[frame.id] = frame | |
94 | - if frame.id not in frame_units: | |
95 | - frame_units[frame.id] = [] | |
96 | - frame_units[frame.id].append(lexical_unit) | |
97 | - | |
89 | +# for lexical_unit in lexical_units: | |
90 | +# frames = lexical_unit.actual_frames() | |
91 | +# for frame in frames: | |
92 | +# alternations[frame.id] = {} | |
93 | +# frames_dict[frame.id] = frame | |
94 | +# if frame.id not in frame_units: | |
95 | +# frame_units[frame.id] = [] | |
96 | +# frame_units[frame.id].append(lexical_unit) | |
97 | + | |
98 | + frames = lemma.entry_obj.actual_frames() | |
99 | + for frame in frames: | |
100 | + alternations[frame.id] = {} | |
101 | + frames_dict[frame.id] = frame | |
102 | + frame_units[frame.id] = frame.lexical_units.all() | |
103 | + | |
98 | 104 | type_frames = {} |
99 | 105 | for frame_id in frame_units: |
100 | 106 | t = tuple(frame_units[frame_id]) |
101 | 107 | if t not in type_frames: |
102 | 108 | type_frames[t] = [] |
103 | 109 | type_frames[tuple(frame_units[frame_id])].append(frames_dict[frame_id]) |
110 | + | |
104 | 111 | |
105 | 112 | frames_display = [] |
106 | 113 | complement_arguments = {} |
107 | 114 | arguments_frame_connected = {} |
108 | 115 | |
109 | - for t in type_frames: | |
110 | - type_frames[t] = reorder_history(type_frames[t]) | |
111 | - | |
112 | - for t in type_frames: | |
116 | +# for t in type_frames: | |
117 | +# type_frames[t] = reorder_history(type_frames[t]) | |
118 | + | |
119 | + k = type_frames.keys() | |
120 | + k.sort(key=lambda tup: len(tup)) | |
121 | + for t in k: | |
122 | + | |
123 | +# for t in type_frames: | |
113 | 124 | frame_display = {"lexical_units": [], "frames": []} |
114 | 125 | for lu in list(t): |
115 | 126 | frame_display["lexical_units"].append({"id": str(lu.id), "base": lu.base, "sense": str(lu.sense)}) |
... | ... |