Skip to content

Commit

Permalink
Merge pull request #50 from AI4Bharat/IDC_modification_PU
Browse files Browse the repository at this point in the history
  • Loading branch information
ishvindersethi22 authored Mar 28, 2024
2 parents f2e64ac + ba103a5 commit 3a68bb3
Showing 1 changed file with 122 additions and 61 deletions.
183 changes: 122 additions & 61 deletions backend/tasks/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1319,7 +1319,9 @@ def partial_update(self, request, pk=None):
== "AcousticNormalisedTranscriptionEditing"
else False
)

interaction_llm = False
if "interaction_llm" in request.data:
interaction_llm = True
# Base annotation update
if annotation_obj.annotation_type == ANNOTATOR_ANNOTATION:
if request.user not in task.annotation_users.all():
Expand All @@ -1346,16 +1348,28 @@ def partial_update(self, request, pk=None):
annotation_obj.task.project_id.project_type
== "InstructionDrivenChat"
):
output_result = get_llm_output(
request.data["result"],
annotation_obj.task,
annotation_obj,
annotation_obj.task.project_id.metadata_json,
)
# store the result of all checks as well
annotation_obj.result.append(
{"prompt": request.data["result"], "output": output_result}
)
if not interaction_llm:
output_result = get_llm_output(
request.data["result"],
annotation_obj.task,
annotation_obj,
annotation_obj.task.project_id.metadata_json,
)
# store the result of all checks as well
annotation_obj.result.append(
{
"prompt": request.data["result"],
"output": output_result,
}
)
# remove this check when you store the check results also
if (
interaction_llm
and isinstance(request.data["result"], list)
and isinstance(annotation_obj.result, list)
and len(annotation_obj.result) < len(request.data["result"])
):
annotation_obj.result = request.data["result"]
annotation_obj.meta_stats = (
compute_meta_stats_for_instruction_driven_chat(
annotation_obj.result
Expand Down Expand Up @@ -1421,16 +1435,25 @@ def partial_update(self, request, pk=None):
annotation_obj.task.project_id.project_type
== "InstructionDrivenChat"
):
output_result = get_llm_output(
request.data["result"],
annotation_obj.task,
annotation_obj,
annotation_obj.task.project_id.metadata_json,
)
# store the result of all checks as well
annotation_obj.result.append(
{"prompt": request.data["result"], "output": output_result}
)
if not interaction_llm:
output_result = get_llm_output(
request.data["result"],
annotation_obj.task,
annotation_obj,
annotation_obj.task.project_id.metadata_json,
)
# store the result of all checks as well
annotation_obj.result.append(
{"prompt": request.data["result"], "output": output_result}
)
# remove this check when you store the check results also
if (
interaction_llm
and isinstance(request.data["result"], list)
and isinstance(annotation_obj.result, list)
and len(annotation_obj.result) < len(request.data["result"])
):
annotation_obj.result = request.data["result"]
is_IDC = True
request.data["result"] = annotation_obj.result
request.data[
Expand Down Expand Up @@ -1500,16 +1523,27 @@ def partial_update(self, request, pk=None):
annotation_obj.task.project_id.project_type
== "InstructionDrivenChat"
):
output_result = get_llm_output(
request.data["result"],
annotation_obj.task,
annotation_obj,
annotation_obj.task.project_id.metadata_json,
)
# store the result of all checks as well
annotation_obj.result.append(
{"prompt": request.data["result"], "output": output_result}
)
if not interaction_llm:
output_result = get_llm_output(
request.data["result"],
annotation_obj.task,
annotation_obj,
annotation_obj.task.project_id.metadata_json,
)
# store the result of all checks as well
annotation_obj.result.append(
{
"prompt": request.data["result"],
"output": output_result,
}
)
if (
interaction_llm
and isinstance(request.data["result"], list)
and isinstance(annotation_obj.result, list)
and len(annotation_obj.result) < len(request.data["result"])
):
annotation_obj.result = request.data["result"]
annotation_obj.meta_stats = (
compute_meta_stats_for_instruction_driven_chat(
annotation_obj.result
Expand Down Expand Up @@ -1614,16 +1648,24 @@ def partial_update(self, request, pk=None):
annotation_obj.task.project_id.project_type
== "InstructionDrivenChat"
):
output_result = get_llm_output(
request.data["result"],
annotation_obj.task,
annotation_obj,
annotation_obj.task.project_id.metadata_json,
)
# store the result of all checks as well
annotation_obj.result.append(
{"prompt": request.data["result"], "output": output_result}
)
if not interaction_llm:
output_result = get_llm_output(
request.data["result"],
annotation_obj.task,
annotation_obj,
annotation_obj.task.project_id.metadata_json,
)
# store the result of all checks as well
annotation_obj.result.append(
{"prompt": request.data["result"], "output": output_result}
)
if (
interaction_llm
and isinstance(request.data["result"], list)
and isinstance(annotation_obj.result, list)
and len(annotation_obj.result) < len(request.data["result"])
):
annotation_obj.result = request.data["result"]
is_IDC = True
request.data["result"] = annotation_obj.result
request.data[
Expand Down Expand Up @@ -1720,16 +1762,27 @@ def partial_update(self, request, pk=None):
annotation_obj.task.project_id.project_type
== "InstructionDrivenChat"
):
output_result = get_llm_output(
request.data["result"],
annotation_obj.task,
annotation_obj,
annotation_obj.task.project_id.metadata_json,
)
# store the result of all checks as well
annotation_obj.result.append(
{"prompt": request.data["result"], "output": output_result}
)
if not interaction_llm:
output_result = get_llm_output(
request.data["result"],
annotation_obj.task,
annotation_obj,
annotation_obj.task.project_id.metadata_json,
)
# store the result of all checks as well
annotation_obj.result.append(
{
"prompt": request.data["result"],
"output": output_result,
}
)
if (
interaction_llm
and isinstance(request.data["result"], list)
and isinstance(annotation_obj.result, list)
and len(annotation_obj.result) < len(request.data["result"])
):
annotation_obj.result = request.data["result"]
annotation_obj.meta_stats = (
compute_meta_stats_for_instruction_driven_chat(
annotation_obj.result
Expand Down Expand Up @@ -1825,16 +1878,24 @@ def partial_update(self, request, pk=None):
annotation_obj.task.project_id.project_type
== "InstructionDrivenChat"
):
output_result = get_llm_output(
request.data["result"],
annotation_obj.task,
annotation_obj,
annotation_obj.task.project_id.metadata_json,
)
# store the result of all checks as well
annotation_obj.result.append(
{"prompt": request.data["result"], "output": output_result}
)
if not interaction_llm:
output_result = get_llm_output(
request.data["result"],
annotation_obj.task,
annotation_obj,
annotation_obj.task.project_id.metadata_json,
)
# store the result of all checks as well
annotation_obj.result.append(
{"prompt": request.data["result"], "output": output_result}
)
if (
interaction_llm
and isinstance(request.data["result"], list)
and isinstance(annotation_obj.result, list)
and len(annotation_obj.result) < len(request.data["result"])
):
annotation_obj.result = request.data["result"]
is_IDC = True
request.data["result"] = annotation_obj.result
request.data[
Expand Down

0 comments on commit 3a68bb3

Please sign in to comment.