Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 44 additions & 4 deletions app/data/responseHandle.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,26 @@ def handle_mcq(response):
cleaned_options = [re.sub(r'^[A-Za-z]\)\s*|^[A-Za-z]\.\s*', '', option).strip() for option in options]

# Return as dictionary
return {
"question": question,
"options": cleaned_options,
"correct_answer": correct_answer_index
# return {
# "question": question,
# "options": cleaned_options,
# "correct_answer": correct_answer_index
# }
print(correct_answer_index)
response_options = []
for index, option in enumerate(options):
response_options.append({
"optionText": option,
"marks": 1 if index == correct_answer_index else 0,
"correct": index == correct_answer_index
})

response = {
"questionText": question,
"difficultyLevel": "EASY", # Or determined dynamically based on your criteria
"options": response_options
}
return response

except Exception as e:
print(f"Error: {e}")
Expand All @@ -69,3 +84,28 @@ def handle_mcq(response):

# parsed_data = handle_mcq(response)
# print(parsed_data)


def handle_essay(original_question):
# Original data format
question = original_question["question"]
answers = original_question["answers"]

# New data structure
new_question = {
"questionText": question,
"difficultyLevel": "MEDIUM", # Static value, change as needed
"coveringPoints": []
}

# Convert each answer to a covering point
for answer in answers:
new_question["coveringPoints"].append({
"coveringPointText": answer,
"marks": 5 # Assuming each point is worth 5 marks
})

return new_question



37 changes: 24 additions & 13 deletions app/routers/questionGenerate.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,16 @@ class EssayQuestionRequest(BaseModel):
text: str
examid: str

class MCQListRequest(BaseModel):
text: str
examid: str
choices: int = 4
num_questions: int = 1

class EssayListRequest(BaseModel):
text: str
examid: str
num_questions: int = 1


router = APIRouter()
Expand Down Expand Up @@ -42,33 +52,34 @@ async def generate_essay_question(request: EssayQuestionRequest) -> dict:
logger.error(f"An error occurred while generating the essay question: {str(e)}")
raise HTTPException(status_code=500, detail=f"An error occurred while generating the essay question: {str(e)}")

@router.post("/generate-questions/mcq/", response_model=list[dict])
async def generate_mcq_questions(text: str = Query(..., description="The text to generate multiple choice questions for"),
examid: str = Query(..., description="The ID of the exam related to the text"),
choices: int = Query(4, description="The number of choices for the multiple choice questions"),
num_questions: int = Query(1, description="The number of questions to generate")
) -> list[dict]:
@router.post("/generate-questions/mcq/", response_model=dict)
async def generate_mcq_questions(request: MCQListRequest) -> dict:
"""Endpoint to generate multiple choice questions for a given text using OpenAI's model."""
try:
# Assuming 'prompt' function is synchronous; if it's async, use 'await prompt(text, examid)'
question_responses = generate_list(text, examid, question_type='mcq', choices=choices, num_questions=num_questions)
question_responses = generate_list(request.text, request.examid, question_type='mcq', choices=request.choices, num_questions=request.num_questions)


logger.info(f"Generated multiple choice questions: {question_responses}")
return question_responses
# return {
# "success": True,
# "questions": question_responses
# }

except Exception as e:
# Catching a broad exception is not best practice; adjust according to specific exceptions expected from 'prompt'
logger.error(f"An error occurred while generating the multiple choice questions: {str(e)}")
raise HTTPException(status_code=500, detail=f"An error occurred while generating the multiple choice questions: {str(e)}")


@router.post("/generate-questions/essay/", response_model=list[dict])
async def generate_essay_questions(text: str = Query(..., description="The text to generate essay questions for"),
examid: str = Query(..., description="The ID of the exam related to the text"),
num_questions: int = Query(1, description="The number of questions to generate")
) -> list[dict]:
@router.post("/generate-questions/essay/", response_model=dict)
async def generate_essay_questions(request: EssayListRequest) -> dict:
"""Endpoint to generate essay questions for a given text using OpenAI's model."""
try:
# Assuming 'prompt' function is synchronous; if it's async, use 'await prompt(text, examid, question_type='essay')
question_responses = generate_list(text, examid, question_type='essay', num_questions=num_questions)
# question_responses = generate_list(text, examid, question_type='essay', num_questions=num_questions)
question_responses = generate_list(request.text, request.examid, question_type='essay', num_questions=request.num_questions)
logger.info(f"Generated essay questions: {question_responses}")
return question_responses
except Exception as e:
Expand Down
24 changes: 21 additions & 3 deletions app/services/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,16 +140,34 @@ def generate_list(self,text: str, question_type: str = "mcq", choices: int = 4,
if question_type == "mcq":
for i in range(len(result)):
formatted_result.append(responseHandle.handle_mcq(result[i]))
return formatted_result
print(formatted_result)
# return formatted_result

return {
"success": True,
"questions": formatted_result
}

elif question_type == "essay":
return result

for i in range(len(result)):
formatted_result.append(responseHandle.handle_essay(result[i]))

# return result
return {
"success": True,
"questions": formatted_result
}

except Exception as e:
log.logger.error(f"Error in generating list: {e}")

print(formatted_result)

return []
return {
"success": False,
"questions": []
}



Expand Down