add question generation
This commit is contained in:
@@ -1 +1,3 @@
|
|||||||
DATABASE_URI="sqlite:///database.db"
|
DATABASE_URI="sqlite:///database.db"
|
||||||
|
LANGUAGE_MODEL_API="http://localhost:8080/v1"
|
||||||
|
MODEL_NAME="SmolLM3-Q4_K_M.gguf"
|
||||||
@@ -3,6 +3,7 @@ FastAPI server to run Manolia
|
|||||||
|
|
||||||
# Run
|
# Run
|
||||||
```
|
```
|
||||||
|
cd src/app
|
||||||
fastapi dev main.py
|
fastapi dev main.py
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,7 @@
|
|||||||
fastapi==0.128.6
|
fastapi==0.128.6
|
||||||
sqlmodel==0.0.32
|
sqlmodel==0.0.32
|
||||||
python-dotenv==1.2.1
|
python-dotenv==1.2.1
|
||||||
|
openai==2.21.0
|
||||||
|
spacy==3.8.11
|
||||||
|
# python -m spacy download en_core_web_sm
|
||||||
|
# python -m spacy download fr_core_news_sm
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -4,6 +4,8 @@ from src.app.models.knowledge import Knowledge
|
|||||||
from src.app.crud.crud_knowledges import create_knowledge, read_knowledges, read_knowledge, update_knowledge, delete_knowledge
|
from src.app.crud.crud_knowledges import create_knowledge, read_knowledges, read_knowledge, update_knowledge, delete_knowledge
|
||||||
from src.app.crud.crud_questions import read_questions as read_questions_crud
|
from src.app.crud.crud_questions import read_questions as read_questions_crud
|
||||||
|
|
||||||
|
from src.app.services.language_generation import completion
|
||||||
|
|
||||||
#Added in __ini__
|
#Added in __ini__
|
||||||
router = APIRouter(tags=["knowledges"])
|
router = APIRouter(tags=["knowledges"])
|
||||||
|
|
||||||
@@ -38,9 +40,9 @@ def delete(id: int):
|
|||||||
#TODO: find pattern
|
#TODO: find pattern
|
||||||
@router.post("/knowledges/{id}/questions")
|
@router.post("/knowledges/{id}/questions")
|
||||||
def create_questions(id: int):
|
def create_questions(id: int):
|
||||||
#SLM Generation
|
question = completion(read_knowledge(id))
|
||||||
#create_question()
|
#create_question()
|
||||||
return True
|
return question
|
||||||
|
|
||||||
@router.get("/knowledges/{id}/questions")
|
@router.get("/knowledges/{id}/questions")
|
||||||
def read_questions(id: int):
|
def read_questions(id: int):
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
from dotenv import load_dotenv
|
|
||||||
import os
|
import os
|
||||||
#import secrets
|
from dotenv import load_dotenv
|
||||||
from sqlmodel import Session, SQLModel, create_engine
|
from sqlmodel import Session, SQLModel, create_engine
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
database_uri=os.environ.get("DATABASE_URI")
|
database_uri=os.environ.get("DATABASE_URI")
|
||||||
|
|
||||||
connect_args = {"check_same_thread": False}
|
connect_args = {"check_same_thread": False}
|
||||||
|
|||||||
Binary file not shown.
47
server/src/app/services/language_generation.py
Normal file
47
server/src/app/services/language_generation.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
import os
|
||||||
|
import spacy
|
||||||
|
|
||||||
|
from openai import OpenAI
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from src.app.models.knowledge import Knowledge
|
||||||
|
|
||||||
|
language_model_api=os.environ.get("LANGUAGE_MODEL_API")
|
||||||
|
model_name=os.environ.get("LANGUAGE_MODEL_NAME")
|
||||||
|
|
||||||
|
client = OpenAI(
|
||||||
|
base_url=language_model_api,
|
||||||
|
api_key = "sk-no-key-required"
|
||||||
|
)
|
||||||
|
|
||||||
|
nlp = spacy.load("fr_core_news_sm")
|
||||||
|
|
||||||
|
def completion(knowledge: Knowledge):
|
||||||
|
|
||||||
|
context = "Texte : ```" + knowledge.content + "```"
|
||||||
|
instruction = "A partir du texte génère 3 questions :"
|
||||||
|
prompt = context + "\n" + instruction
|
||||||
|
|
||||||
|
#SLM processing
|
||||||
|
response = client.responses.create(
|
||||||
|
model=model_name,
|
||||||
|
input=[
|
||||||
|
{"role": "system", "content": "Question Generation"},
|
||||||
|
{"role": "user", "content": prompt}],
|
||||||
|
)
|
||||||
|
text_response = response.output[0].content[0].text
|
||||||
|
|
||||||
|
#Sentence segmentation
|
||||||
|
doc = nlp(text_response)
|
||||||
|
sents = list()
|
||||||
|
for sentence in doc.sents:
|
||||||
|
sents.append(sentence.text)
|
||||||
|
|
||||||
|
#Interrogation sentence detection
|
||||||
|
questions = list()
|
||||||
|
for sent in sents:
|
||||||
|
index_mark = sent.rfind("?")
|
||||||
|
if(index_mark > 0):
|
||||||
|
questions.append(sent[0:index_mark+1])
|
||||||
|
|
||||||
|
return {"questions": questions, "sentence":sents}
|
||||||
|
|
||||||
Reference in New Issue
Block a user