modification model
This commit is contained in:
parent
50bf694363
commit
a9b79304f8
2 changed files with 36 additions and 37 deletions
|
@ -19,7 +19,6 @@ google-api-python-client==2.173.0
|
||||||
google-auth==2.40.3
|
google-auth==2.40.3
|
||||||
google-auth-httplib2==0.2.0
|
google-auth-httplib2==0.2.0
|
||||||
google-genai==1.21.1
|
google-genai==1.21.1
|
||||||
google-generativeai==0.8.5
|
|
||||||
googleapis-common-protos==1.70.0
|
googleapis-common-protos==1.70.0
|
||||||
greenlet==3.2.3
|
greenlet==3.2.3
|
||||||
grpcio==1.73.0
|
grpcio==1.73.0
|
||||||
|
@ -44,6 +43,7 @@ pydantic-settings==2.10.1
|
||||||
pydantic_core==2.33.2
|
pydantic_core==2.33.2
|
||||||
pyparsing==3.2.3
|
pyparsing==3.2.3
|
||||||
pypdf==5.6.1
|
pypdf==5.6.1
|
||||||
|
PyPDF2==3.0.1
|
||||||
python-dateutil==2.9.0.post0
|
python-dateutil==2.9.0.post0
|
||||||
python-docx==1.2.0
|
python-docx==1.2.0
|
||||||
python-dotenv==1.1.1
|
python-dotenv==1.1.1
|
||||||
|
|
|
@ -3,13 +3,13 @@ import logging
|
||||||
import sys
|
import sys
|
||||||
from typing import Optional, Dict, Any
|
from typing import Optional, Dict, Any
|
||||||
|
|
||||||
from google import genai
|
# MODIFIÉ ICI: Supprime 'from google.genai import types' car types.GenerateContentConfig n'est plus utilisé de cette manière
|
||||||
from google.genai import types
|
import google.generativeai as genai
|
||||||
import mistralai
|
import mistralai
|
||||||
from mistralai.client import MistralClient
|
from mistralai.client import MistralClient
|
||||||
|
|
||||||
from fastapi import HTTPException, status
|
from fastapi import HTTPException, status
|
||||||
import anyio # <-- NOUVELLE IMPORTATION : Pour gérer les appels synchrones dans async
|
import anyio
|
||||||
|
|
||||||
from core.config import settings
|
from core.config import settings
|
||||||
|
|
||||||
|
@ -28,28 +28,29 @@ try:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error during mistralai debug info collection: {e}")
|
logger.error(f"Error during mistralai debug info collection: {e}")
|
||||||
|
|
||||||
|
# --- Configuration globale du client Gemini (Ce bloc est maintenant supprimé car la configuration est faite via GenerativeModel) ---
|
||||||
|
# Vous pouvez retirer ce bloc si vous l'aviez :
|
||||||
|
# if settings.LLM_PROVIDER == "gemini" and settings.GEMINI_API_KEY:
|
||||||
|
# try:
|
||||||
|
# genai.configure(
|
||||||
|
# api_key=settings.GEMINI_API_KEY,
|
||||||
|
# client_options={"api_endpoint": "generativelanguage.googleapis.com"}
|
||||||
|
# )
|
||||||
|
# logger.info("GenAI client globally configured with API endpoint.")
|
||||||
|
# except Exception as e:
|
||||||
|
# logger.error(f"Erreur lors de la configuration globale de GenAI: {e}")
|
||||||
|
|
||||||
|
|
||||||
class AIService:
|
class AIService:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.provider = settings.LLM_PROVIDER
|
self.provider = settings.LLM_PROVIDER
|
||||||
self.model_name = settings.GEMINI_MODEL_NAME if self.provider == "gemini" else settings.MISTRAL_MODEL_NAME
|
self.model_name = settings.GEMINI_MODEL_NAME if self.provider == "gemini" else settings.MISTRAL_MODEL_NAME
|
||||||
|
|
||||||
self.raw_safety_settings = [
|
self.raw_safety_settings = [
|
||||||
{
|
{"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
|
||||||
"category": "HARM_CATEGORY_HARASSMENT",
|
{"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
|
||||||
"threshold": "BLOCK_NONE"
|
{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"},
|
||||||
},
|
{"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"},
|
||||||
{
|
|
||||||
"category": "HARM_CATEGORY_HATE_SPEECH",
|
|
||||||
"threshold": "BLOCK_NONE"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
|
||||||
"threshold": "BLOCK_NONE"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
|
|
||||||
"threshold": "BLOCK_NONE"
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
self.raw_generation_config = {
|
self.raw_generation_config = {
|
||||||
|
@ -60,21 +61,20 @@ class AIService:
|
||||||
|
|
||||||
if self.provider == "gemini":
|
if self.provider == "gemini":
|
||||||
try:
|
try:
|
||||||
self.client = genai.Client(api_key=settings.GEMINI_API_KEY)
|
# MODIFICATION CRUCIALE ICI : On initialise directement le GenerativeModel
|
||||||
|
# genai.Client() et types.GenerateContentConfig ne sont plus utilisés directement ici
|
||||||
self.gemini_config = types.GenerateContentConfig(
|
self.model = genai.GenerativeModel(
|
||||||
temperature=self.raw_generation_config["temperature"],
|
model_name=self.model_name,
|
||||||
top_p=self.raw_generation_config["top_p"],
|
safety_settings=self.raw_safety_settings, # Passez safety_settings ici
|
||||||
top_k=self.raw_generation_config["top_k"],
|
generation_config=self.raw_generation_config # Passez generation_config ici
|
||||||
safety_settings=[
|
# La clé API est lue automatiquement depuis GEMINI_API_KEY si elle est configurée.
|
||||||
types.SafetySetting(category=s["category"], threshold=s["threshold"])
|
# Ou vous pouvez la passer explicitement: api_key=settings.GEMINI_API_KEY
|
||||||
for s in self.raw_safety_settings
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
logger.info(f"Modèle Gemini GenerativeModel initialisé avec modèle : {self.model_name}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Erreur d'initialisation du client Gemini: {e}")
|
logger.error(f"Erreur d'initialisation du modèle Gemini: {e}")
|
||||||
raise ValueError(f"Impossible d'initialiser le client Gemini. Vérifiez votre GEMINI_API_KEY. Erreur: {e}")
|
raise ValueError(f"Impossible d'initialiser le modèle Gemini. Vérifiez votre GEMINI_API_KEY et le nom du modèle. Erreur: {e}")
|
||||||
|
|
||||||
elif self.provider == "mistral":
|
elif self.provider == "mistral":
|
||||||
if not settings.MISTRAL_API_KEY:
|
if not settings.MISTRAL_API_KEY:
|
||||||
|
@ -121,12 +121,11 @@ class AIService:
|
||||||
{"role": "user", "parts": [{"text": prompt}]}
|
{"role": "user", "parts": [{"text": prompt}]}
|
||||||
]
|
]
|
||||||
|
|
||||||
# MODIFIÉ ICI : Utilisation de anyio.to_thread.run_sync pour l'appel synchrone
|
# MODIFIÉ ICI : 'contents' est maintenant passé comme argument positionnel direct à generate_content
|
||||||
response = await anyio.to_thread.run_sync(
|
response = await anyio.to_thread.run_sync(
|
||||||
self.client.models.generate_content,
|
self.model.generate_content,
|
||||||
model=self.model_name,
|
contents, # <-- Correction pour l'erreur "unexpected keyword argument 'contents'"
|
||||||
contents=contents,
|
# Les configurations (température, safety_settings) sont déjà définies lors de l'initialisation de self.model
|
||||||
config=self.gemini_config,
|
|
||||||
)
|
)
|
||||||
response_content = response.text
|
response_content = response.text
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue