Mistral AI API

parmarjatin4911@gmail.com - Jan 28 - - Dev Community

Mistral AI API

from mistralai.client import MistralClient
from mistralai.models.chat_completion import ChatMessage
import os

api_key = os.environ["MISTRAL_API_KEY"]
model = "mistral-small" # Use "mistral-tiny" for "Mistral-7B-v0.2"

client = MistralClient(api_key=api_key)

messages = [
ChatMessage(role="user", content="Give me a meal plan for today")
]

No streaming

chat_response = client.chat(
model=model,
messages=messages,
)
print(chat_response.choices[0].message.content)

With streaming

for chunk in client.chat_stream(model=model, messages=messages):
if chunk.choices[0].delta.content:
print(chunk.choices[0].delta.content, end="")

import gradio as gr
from mistralai.client import MistralClient
from mistralai.models.chat_completion import ChatMessage
import os

def chat_with_mistral(user_input):
api_key = os.environ["MISTRAL_API_KEY"]
model = "mistral-small" # Use "Mistral-7B-v0.2" for "mistral-tiny"

client = MistralClient(api_key=api_key)
messages = [ChatMessage(role="user", content=user_input)]

chat_response = client.chat(model=model, messages=messages)
return chat_response.choices[0].message.content
Enter fullscreen mode Exit fullscreen mode

iface = gr.Interface(
fn=chat_with_mistral,
inputs=gr.components.Textbox(label="Enter Your Message"),
outputs=gr.components.Markdown(label="Chatbot Response"),
title="Mistral AI Chatbot",
description="Interact with the Mistral API via this chatbot. Enter a message and get a response.",
examples=[["Give me a meal plan for today"]],
allow_flagging="never"
)

iface.launch()

Categories

. . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
Terabox Video Player