#vllm serve "/root/llama_3.1_8b_function_calling/checkpoint-669" --chat-template /root/template_chatml.jinja
from openai import OpenAI
# Set OpenAI's API key and API base to use vLLM's API server.
openai_api_key = "EMPTY"
openai_api_base = "http://localhost:8000/v1"
client = OpenAI(
api_key=openai_api_key,
base_url=openai_api_base,
)
chat_response = client.chat.completions.create(
model="/root/llama_3.1_8b_function_calling/checkpoint-669",
messages=[
# {"role": "system", "content": """You are a function calling AI model. You are provided with function signatures within XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. For each function call return a json object with function name and arguments within XML tags as follows:
#
# {"name": ,"arguments": }
#
# Here are the available tools:
# {
# "name": "create_new_user",
# "description": "Creates a new user in the database with the provided username, email, and encrypted password.",
# "parameters": {
# "properties": {
# "username": {
# "type": "string",
# "description": "The username for the new user."
# },
# "email": {
# "type": "string",
# "description": "The email address for the new user.",
# "format": "email"
# },
# "password": {
# "type": "string",
# "description": "The password for the new user which will be encrypted before storage."
# }
# },
# "required": [
# "username",
# "email",
# "password"
# ]
# },
# "required": [
# "username",
# "email",
# "password"
# ]
# } """},
{"role": "user", "content": """Who's the president of the France?"""}]
# {"role": "assistant", "content": """Sure, I can help with that. I'll need some details from you. Can you provide the username, email, and password you'd like to use?"""},
# {"role": "user", "content": """Sure, here they are: username: johndoe, email: johndoe@gmail.com, password: password123"""}]
)
print("Chat response:", chat_response)