add question generation

This commit is contained in:
Robin COuret
2026-02-19 18:00:42 +01:00
parent e0e50af706
commit 1b00ab25f7
10 changed files with 62 additions and 6 deletions

View File

@@ -1 +1,3 @@
DATABASE_URI="sqlite:///database.db" DATABASE_URI="sqlite:///database.db"
LANGUAGE_MODEL_API="http://localhost:8080/v1"
MODEL_NAME="SmolLM3-Q4_K_M.gguf"

View File

@@ -3,6 +3,7 @@ FastAPI server to run Manolia
# Run # Run
``` ```
cd src/app
fastapi dev main.py fastapi dev main.py
``` ```

View File

@@ -1,3 +1,7 @@
fastapi==0.128.6 fastapi==0.128.6
sqlmodel==0.0.32 sqlmodel==0.0.32
python-dotenv==1.2.1 python-dotenv==1.2.1
openai==2.21.0
spacy==3.8.11
# python -m spacy download en_core_web_sm
# python -m spacy download fr_core_news_sm

View File

@@ -4,6 +4,8 @@ from src.app.models.knowledge import Knowledge
from src.app.crud.crud_knowledges import create_knowledge, read_knowledges, read_knowledge, update_knowledge, delete_knowledge from src.app.crud.crud_knowledges import create_knowledge, read_knowledges, read_knowledge, update_knowledge, delete_knowledge
from src.app.crud.crud_questions import read_questions as read_questions_crud from src.app.crud.crud_questions import read_questions as read_questions_crud
from src.app.services.language_generation import completion
#Added in __ini__ #Added in __ini__
router = APIRouter(tags=["knowledges"]) router = APIRouter(tags=["knowledges"])
@@ -38,9 +40,9 @@ def delete(id: int):
#TODO: find pattern #TODO: find pattern
@router.post("/knowledges/{id}/questions") @router.post("/knowledges/{id}/questions")
def create_questions(id: int): def create_questions(id: int):
#SLM Generation question = completion(read_knowledge(id))
#create_question() #create_question()
return True return question
@router.get("/knowledges/{id}/questions") @router.get("/knowledges/{id}/questions")
def read_questions(id: int): def read_questions(id: int):

View File

@@ -1,9 +1,9 @@
from dotenv import load_dotenv
import os import os
#import secrets from dotenv import load_dotenv
from sqlmodel import Session, SQLModel, create_engine from sqlmodel import Session, SQLModel, create_engine
load_dotenv() load_dotenv()
database_uri=os.environ.get("DATABASE_URI") database_uri=os.environ.get("DATABASE_URI")
connect_args = {"check_same_thread": False} connect_args = {"check_same_thread": False}

View File

@@ -0,0 +1,47 @@
import os
import spacy
from openai import OpenAI
from pydantic import BaseModel
from src.app.models.knowledge import Knowledge
language_model_api=os.environ.get("LANGUAGE_MODEL_API")
model_name=os.environ.get("LANGUAGE_MODEL_NAME")
client = OpenAI(
base_url=language_model_api,
api_key = "sk-no-key-required"
)
nlp = spacy.load("fr_core_news_sm")
def completion(knowledge: Knowledge):
context = "Texte : ```" + knowledge.content + "```"
instruction = "A partir du texte génère 3 questions :"
prompt = context + "\n" + instruction
#SLM processing
response = client.responses.create(
model=model_name,
input=[
{"role": "system", "content": "Question Generation"},
{"role": "user", "content": prompt}],
)
text_response = response.output[0].content[0].text
#Sentence segmentation
doc = nlp(text_response)
sents = list()
for sentence in doc.sents:
sents.append(sentence.text)
#Interrogation sentence detection
questions = list()
for sent in sents:
index_mark = sent.rfind("?")
if(index_mark > 0):
questions.append(sent[0:index_mark+1])
return {"questions": questions, "sentence":sents}