Skip to content
Snippets Groups Projects
Commit 673dcd34 authored by Pablo Ruiz Fabo's avatar Pablo Ruiz Fabo
Browse files

ajouts partie 2 TP

parent 76b5f2c8
Branches
No related merge requests found
"""
Mistral client that mixes general chat plus 'function-calling', i.e.
the chat client can call functions (like getting weather information
or library catalogue info from an external API) depending on user input.
"""
import json
import os
from mistralai import Mistral
from mistralai.models.chatcompletionresponse import ChatCompletionResponse # for type hints
import requests
# To convert language names (not strictly necessary)
from iso639 import Lang
from iso639 import is_language
def get_weather(location: str, unit: str='c') -> dict:
"""
Fetches the weather data for a given location and unit.
Parameters:
location (str): The location to get the weather for.
unit (str): The temperature unit ('c' for Celsius (default), 'f' for Fahrenheit).
Returns:
dict: Weather information including temperature, conditions, and more.
"""
# API configuration
# Create a free token at https://www.weatherapi.com/, no credit card required
# and add it as an environment variable before running the script
api_key = os.environ.get("WEATHER_API_KEY")
base_url = "https://api.weatherapi.com/v1/current.json"
# Unit conversion for OpenWeatherMap
units_map = {"c": "metric", "f": "imperial"}
units = units_map.get(unit, "metric")
# Request parameters
params = {
"q": location,
"key": api_key,
}
try:
# Make the API request
response = requests.get(base_url, params=params)
response.raise_for_status()
data = response.json()
# Parse the response data
weather_info = {
"location": data["location"]["name"],
"temperature": data["current"]["temp_c"] if units == "metric" else data["current"]["temp_f"],
"conditions": data["current"]["condition"]["text"][0].lower() + data["current"]["condition"]["text"][1:],
}
#return ": ".join((weather_info["location"], str(weather_info["temperature"])))
return weather_info
except requests.exceptions.RequestException as e:
return {"error": str(e)}
except (KeyError, IndexError):
return {"error": "Unexpected response structure from the weather API."}
class LangNameException(Exception):
pass
class OpenLibraryException(Exception):
pass
def get_openlibrary_data(author:str, lang: str='fre') -> dict:
"""
Fetches works by a given author and language from OpenLibrary API,
see https://openlibrary.org/dev/docs/api/search
Args:
author: Author whose works are to be fetched.
lang: Language code for the works (try ISO 639-2/B).
Returns:
dict: Information about the author and their works.
"""
try:
# Make the API request
resp = requests.get(f"http://openlibrary.org/search.json?author={author}&lang={lang}")
return resp.json()
except requests.exceptions.RequestException as e:
raise OpenLibraryException(f"Error fetching data from OpenLibrary: {e}")
def parse_lang_response(completion: ChatCompletionResponse) -> dict:
"""
Parses the language response from the model.
Args:
completion (ChatCompletionResponse): The completion from the model.
Returns:
dict: The language name in English or error
"""
clean_response = completion.choices[0].message.content.replace("json", "").replace("```", "").strip()
try:
return json.loads(clean_response)
except (json.JSONDecodeError, KeyError):
raise LangNameException("Error: Language not found.")
if __name__ == "__main__":
# Tool specification
# The 'name' argument is the name of the function
# Then we also define the expected arguments
tools = [
{
"type": "function",
"function": {
"name": "get_weather",
"parameters": {
"type": "object",
"properties": {
"location": {"type": "string"},
"unit": {"type": "string"},
},
"required": ["location", "unit"],
"additionalProperties": False,
},
},
},
{
"type": "function",
"function": {
"name": "get_openlibrary_data",
"parameters": {
"type": "object",
"properties": {
"author": {"type": "string"},
"lang": {"type": "string"},
},
"required": ["author", "lang"],
"additionalProperties": False,
},
},
}
]
client = Mistral(api_key=os.getenv("MISTRAL_API_KEY_P"))
# Starts with system message to define the assistant's behavior (helpful assistant)
# Also added the request to use 'get_weather' if the user question is about the weather
conversation = [{"role": "system", "content": "You are a helpful assistant. Use the 'get_weather' function if the user asks for weather information"}]
print("This is the beginning of your chat with AI. [To exit, send \"###\".]")
while True:
# Take user input
user_input = input("\nYou: ")
if user_input.strip() == "###":
print("Exiting chat. Goodbye!")
break
# Add user message to conversation
conversation.append({"role": "user", "content": user_input})
# Get assistant's response
try:
response = client.chat.complete(
model="mistral-small-latest",
messages=conversation,
tools=tools
)
# If the model detects that the question is about the weather
# or asking about books by an author, the response will contain
# a field named `tool_calls`. The field contains the information required
# to call the relevant function (location for `get_weather` or
# author and language for `get_openlibrary_data`).
# The model extracts this information dynamically from the user input.
# We create code to call the relevant function using as arguments the
# info extracted by the model, and provide our own custom response
tool_calls = response.choices[0].message.tool_calls
if tool_calls is not None:
if tool_calls[0].function.name == "get_weather":
# For the function called, the API returns a string that can be parsed as JSON
city = json.loads(tool_calls[0].function.arguments)["location"]
# Since the unit is not obligatory (default is celsius) the argument may not be in the JSON
if "unit" in json.loads(tool_calls[0].function.arguments):
temp_unit = json.loads(tool_calls[0].function.arguments)["unit"]
else:
temp_unit = "c"
weather_infos = get_weather(city, temp_unit)
tool_response = f"{city}: {weather_infos['temperature']} {temp_unit}."
assistant_response = tool_response
print(f"\nAssistant (weather): {assistant_response}")
#print(response.choices[0].message.tool_calls)
elif tool_calls[0].function.name == "get_openlibrary_data":
author = json.loads(tool_calls[0].function.arguments)["author"]
language_raw = json.loads(tool_calls[0].function.arguments)["lang"]
# Check if the language given by Mistral is already in the ISO 639-2/B format
if is_language(language_raw, ("pt2b")):
language = language_raw
else:
# In case conversation was not in English, ask the model for the English translation
# of the language name. Not needed if interaction with the model is always in English
language_request = client.chat.complete(
model="mistral-small-latest",
messages=[{"role": "user", "content": f"Provide a JSON response with a single field called 'language' containing the English translation of '{language_raw}'. If the language name is already in English, write the English name for the language directly in the 'language' field."}],
response_format={"type": "json_object"})
try:
language_name_eng = parse_lang_response(language_request)["language"]
language = Lang(language_name_eng).pt2b
except LangNameException as e:
print(f"Error with language name: {e}, defaulting to English")
language = "eng"
openlibrary_infos = get_openlibrary_data(author, language)
tool_response = f"{author} (lang {language}):"
for work in openlibrary_infos["docs"]:
work_author = work.get("author_name", [])
work_language = work.get("language", [])
work_title = work.get("title", "?")
work_year = work.get("first_publish_year", "?")
# This is not perfect because the title may not be in the
# language we are looking for, even if the language is listed
# for the work, because translations to it exist
if author in work_author and language in work_language:
tool_response += f"\n- {work_title} ({work_year})"
assistant_response = tool_response
print(f"\nAssistant (openlibrary): {assistant_response}")
# If the model does not detect information for a function call in user input,
# it will provide a normal model completion
else:
assistant_response = response.choices[0].message.content
print(f"\nAssistant (no function call): {assistant_response}")
# You can print the complete response (not just the text content) for debugging
#print(f"\nAssistant: {response}")
# Add assistant's message to conversation
# Note: this will make prompts longer and longer, you may want to limit this after some speech turns
conversation.append({"role": "assistant", "content": assistant_response})
# Limited here to last two turns only (this has disadvantages because
# earlier context is lost, but it keeps the context short)
conversation = [conversation[0]] + conversation[-1:]
except Exception as e:
print(f"An error occurred: {e}")
break
"""
Mistral client that mixes general chat plus 'function-calling', i.e.
the chat client can call functions (like getting weather information
or library catalogue info from an external API) depending on user input.
"""
import json
import os
from mistralai import Mistral
from mistralai.models.chatcompletionresponse import ChatCompletionResponse # for type hints
import requests
# To convert language names (not strictly necessary)
from iso639 import Lang
from iso639 import is_language
def get_weather(location: str, unit: str='c') -> dict:
"""
Fetches the weather data for a given location and unit.
Parameters:
location (str): The location to get the weather for.
unit (str): The temperature unit ('c' for Celsius (default), 'f' for Fahrenheit).
Returns:
dict: Weather information including temperature, conditions, and more.
"""
# API configuration
# Create a free token at https://www.weatherapi.com/, no credit card required
# and add it as an environment variable before running the script
api_key = os.environ.get("WEATHER_API_KEY")
base_url = "https://api.weatherapi.com/v1/current.json"
# Unit conversion for OpenWeatherMap
units_map = {"c": "metric", "f": "imperial"}
units = units_map.get(unit, "metric")
# Request parameters
params = {
"q": location,
"key": api_key,
}
try:
# Make the API request
response = requests.get(base_url, params=params)
response.raise_for_status()
data = response.json()
# Parse the response data
weather_info = {
"location": data["location"]["name"],
"temperature": data["current"]["temp_c"] if units == "metric" else data["current"]["temp_f"],
"conditions": data["current"]["condition"]["text"][0].lower() + data["current"]["condition"]["text"][1:],
}
#return ": ".join((weather_info["location"], str(weather_info["temperature"])))
return weather_info
except requests.exceptions.RequestException as e:
return {"error": str(e)}
except (KeyError, IndexError):
return {"error": "Unexpected response structure from the weather API."}
class LangNameException(Exception):
pass
class OpenLibraryException(Exception):
pass
def get_openlibrary_data(author:str, lang: str='fre') -> dict:
"""
Fetches works by a given author and language from OpenLibrary API,
see https://openlibrary.org/dev/docs/api/search
Args:
author: Author whose works are to be fetched.
lang: Language code for the works (try ISO 639-2/B).
Returns:
#TODO à définir (on peut retourner p. ex. un dictionnaire avec les informations)
"""
#TODO définir la fonction
def parse_lang_response(completion: ChatCompletionResponse) -> dict:
"""
Parses the language response from the model.
Args:
completion (ChatCompletionResponse): The completion from the model.
Returns:
dict: The language name in English or error
"""
clean_response = completion.choices[0].message.content.replace("json", "").replace("```", "").strip()
try:
return json.loads(clean_response)
except (json.JSONDecodeError, KeyError):
raise LangNameException("Error: Language not found.")
if __name__ == "__main__":
# Tool specification
# The 'name' argument is the name of the function
# Then we also define the expected arguments
tools = [
{
"type": "function",
"function": {
"name": "get_weather",
"parameters": {
"type": "object",
"properties": {
"location": {"type": "string"},
"unit": {"type": "string"},
},
"required": ["location", "unit"],
"additionalProperties": False,
},
},
}
#TODO ajouter la partie pour `get_openlibrary_data`
]
client = Mistral(api_key=os.getenv("MISTRAL_API_KEY_P"))
# Starts with system message to define the assistant's behavior (helpful assistant)
# Also added the request to use 'get_weather' if the user question is about the weather
conversation = [{"role": "system", "content": "You are a helpful assistant. Use the 'get_weather' function if the user asks for weather information"}]
print("This is the beginning of your chat with AI. [To exit, send \"###\".]")
while True:
# Take user input
user_input = input("\nYou: ")
if user_input.strip() == "###":
print("Exiting chat. Goodbye!")
break
# Add user message to conversation
conversation.append({"role": "user", "content": user_input})
# Get assistant's response
try:
response = client.chat.complete(
model="mistral-small-latest",
messages=conversation,
tools=tools
)
# If the model detects that the question is about the weather
# or asking about books by an author, the response will contain
# a field named `tool_calls`. The field contains the information required
# to call the relevant function (location for `get_weather` or
# author and language for `get_openlibrary_data`).
# The model extracts this information dynamically from the user input.
# We create code to call the relevant function using as arguments the
# info extracted by the model, and provide our own custom response
tool_calls = response.choices[0].message.tool_calls
if tool_calls is not None:
if tool_calls[0].function.name == "get_weather":
# For the function called, the API returns a string that can be parsed as JSON
city = json.loads(tool_calls[0].function.arguments)["location"]
# Since the unit is not obligatory (default is celsius) the argument may not be in the JSON
if "unit" in json.loads(tool_calls[0].function.arguments):
temp_unit = json.loads(tool_calls[0].function.arguments)["unit"]
else:
temp_unit = "c"
weather_infos = get_weather(city, temp_unit)
tool_response = f"{city}: {weather_infos['temperature']} {temp_unit}."
assistant_response = tool_response
print(f"\nAssistant (weather): {assistant_response}")
#print(response.choices[0].message.tool_calls)
elif tool_calls[0].function.name == "get_openlibrary_data":
#TODO définir la gestion de la sortie de `get_openlibrary_data`
# et affecter la chaîne obtenue à `tool_response`
tool_response = ""
assistant_response = tool_response
print(f"\nAssistant (openlibrary): {assistant_response}")
# If the model does not detect information for a function call in user input,
# it will provide a normal model completion
else:
assistant_response = response.choices[0].message.content
print(f"\nAssistant (no function call): {assistant_response}")
# You can print the complete response (not just the text content) for debugging
#print(f"\nAssistant: {response}")
# Add assistant's message to conversation
# Note: this will make prompts longer and longer, you may want to limit this after some speech turns
conversation.append({"role": "assistant", "content": assistant_response})
# Limited here to last two turns only (this has disadvantages because
# earlier context is lost, but it keeps the context short)
conversation = [conversation[0]] + conversation[-1:]
except Exception as e:
print(f"An error occurred: {e}")
break
......@@ -13,3 +13,39 @@
Dans cette partie du TP il s'agit de travailler avec la fonctionnalité [*function calling*](https://docs.mistral.ai/capabilities/function_calling/) de l'API de Mistral. Cette fonctionnalité permet de définir des fonctions qui peuvent être appelées par l'API de Mistral lors d'un dialogue interactif avec un des modèles. Quand le texte saisi par l'utilistaeur·trice contient des informations pertinentes pour appeler une de ces fonctions, le besoin de déclencher la fonction et les valeurs des paramètres sont détectés par le modèle. L'API appelle ensuite la fonction.
Les informations retournées par la fonction peuvent être utilisées dans des tours de parole subséquents pour répondre à de nouvelles questions de l'utilisateur·trice. Nous combinons ainsi les capacités d'analyse et génération d'un LLM avec des informations externes qui ne pouvaient pas être disponibles lors de l'entraînement du modèle. P. ex. le modèle peut avoir une fonction pour détecter une demande sur la météo actuelle dans une ville, détecter la ville et l'échelle (celcius ou fahrenheit) et retourner la température actuelle. Ensuite il peut émettre par exemple un avis sur type commment s'habiller en fonction des informations sur la météo.
## Prérequis
### Clé API Mistral
Nous aurons besoin d'un clé (gratuit) pour utiliser Mistral. Pour obtenir une clé, il faut s'inscrire sur [https://console.mistral.ai/](https://console.mistral.ai/) et choisir le plan *Experimental*. En ce faisant, il ne sera pas nécessaire de fournir des informations de paiement. Suivez les instructions pour créer la clé depuis l'option *API Keys* dans le menu de gauche.
Après création de la clé, vous pouvez l'ajouter comme variable d'environnement, p. ex. avec un nom de variable comme `MISTRAL_API_KEY`. Vous pouvez aussi la saisir directement dans le script, mais ce n'est pas une bonne pratique si le code va être partagé.
Il faut donner la clé lors de l'instantation du client Mistral. P. ex., si la clé est dans une variable d'environnement `MISTRAL_API_KEY`: :
```python
client = Mistral(api_key=os.getenv("MISTRAL_API_KEY"))
```
### Clé API méteo
Il est aussi nécessaire d'avoir une clé gratuite pour l'une des deux API utilisées dans le *function calling* :
- [Weather API](https://www.weatherapi.com/) pour obtenir des informations sur la météo. La clé est gratuite et ne demande pas d'informations de paiement
La deuxième API ([Open Library](https://openlibrary.org/dev/docs/api/search)) ne demande pas de clé
## Tâche à faire
Le client Mistral dans `assistant_mistral_a_completer.py` permet d'interagir avec un modèle Mistral de façon libre, jusqu'à la saisie de `###` par l'utilisateur·trice. Le modèle peut aussi répondre à des questions sur la météo actuelle dans une ville. Pour cela, il utilise la fonction `get_weather` qui est déjà implémentée dans le fichier `functions.py`. Cette fonction appelle l'API de [OpenWeatherMap](https://openweathermap.org/) pour obtenir les informations sur la météo.
La tâche consiste à ajouter du code pour permettre à l'assistant de réagir à une requête utilisateur qui exploite des informations disponibles sur Open Library: obtenir des ouvrages par un auteur·trice dans une langue donnée ; une solution pour cette tâche concrète est disponible sur `assistant_mistral.py`. Le modèle détectera que le tour de parole consiste à une demande d'information sur les ouvrages par un auteur·trice et appellera `get_openlibrary_data ` avec les paramètres nécessaires.
Vous pouvez compléter `assistant_mistral_a_completer.py` ou faire votre propre code du début.
Si vous travaillez à partir du script `assistant_mistral_a_completer.py`, il faut compléter aux endroits suivants :
- La fonction `get_openlibrary_data`
- La définition de la gestion de `get_openlibrary_data` dans la liste de ressources définies dans la variable `tools`.
- La gestion de la réponse de `get_openlibrary_data` dans la boucle principale de l'assistant : Comment gérer la réponse de la fonction pour intégrer ses résultats à la conversation, une fos que le modèle a détecté la demande de l'utilisateur·trice et appelé la fonction.
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment