Model Card for Model ID

Model Details

Model Description

This models fine-tuning for function calling task, and communicate

Training time: more than 1 hour

Steps: 200 (16)

Usage:

import json
from typing import Any, Dict, List
from transformers import AutoModelForCausalLM, AutoTokenizer

models = [
    'beyoru/Neeru'
]
model_name = models[-1]
model = AutoModelForCausalLM.from_pretrained(
    model_name,
    torch_dtype=torch.float16
)
tokenizer = AutoTokenizer.from_pretrained(model_name)


SYSTEM_PROMPT = """You are a helpful assistant with access to the following functions. Use them if required

You may call one or more functions to assist with the user query.

Before calling a function, ensure that all required parameters are provided. If any required parameter is missing, ask the user for the missing information before proceeding.

You are provided with function:

{tools}

For each function call, return a json object with function name and arguments:

[{{"name": "<function-name>", "arguments": <args-json-object>}}]
"""
TOOL_PROMPT = "{tool_text}"

get_weather_api = {
    "type": "function",
    "function": {
        "name": "get_weather",
        "description": "Get the current weather for a location",
        "parameters": {
            "type": "object",
            "properties": {
                "location": {
                    "type": "str",
                    "description": "The city and state",
                },
                "unit": {
                    "type": "str",
                    "enum": ["celsius", "fahrenheit"],
                    "description": "The unit of temperature to return",
                },
            },
            "required": ["location"],
        },
    },
}


get_search_api = {
    "type": "function",
    "function": {
        "name": "get_search",
        "description": "Search the web for a query",
        "parameters": {
            "type": "object",
            "properties": {
                "query": {
                    "type": "string",
                    "description": "The search query"
                }
            },
            "required": ["query"]
        }
    }
}

TOOL_PROMPT = "{tool_text}"

get_weather_api = {
    "type": "function",
    "function": {
        "name": "get_weather",
        "description": "Get the current weather for a location",
        "parameters": {
            "type": "object",
            "properties": {
                "location": {"type": "string", "description": "The city and state"},
                "unit": {"type": "string", "enum": ["celsius", "fahrenheit"], "description": "The unit of temperature to return"},
            },
            "required": ["location"],
        },
    },
}

get_search_api = {
    "type": "function",
    "function": {
        "name": "get_search",
        "description": "Search the web for a query",
        "parameters": {
            "type": "object",
            "properties": {
                "query": {"type": "string", "description": "The search query"}
            },
            "required": ["query"],
        },
    },
}

get_warehouse_api = {
    "type": "function",
    "function": {
        "name": "get_warehouse_info",
        "description": "Get warehouse information based on multiple parameters",
        "parameters": {
            "type": "object",
            "properties": {
                "warehouse_id": {
                    "type": "string",
                    "description": "Unique identifier of the warehouse"
                },
                "location": {
                    "type": "string",
                    "description": "Location of the warehouse"
                },
                "status": {
                    "type": "string",
                    "enum": ["active", "inactive", "under maintenance"],
                    "description": "Operational status of the warehouse"
                },
                "capacity": {
                    "type": "integer",
                    "description": "Total storage capacity of the warehouse"
                },
                "current_stock": {
                    "type": "integer",
                    "description": "Current stock available in the warehouse"
                },
                "manager": {
                    "type": "string",
                    "description": "Name of the warehouse manager"
                },
                "contact": {
                    "type": "string",
                    "description": "Contact details of the warehouse"
                },
                "operating_hours": {
                    "type": "string",
                    "description": "Operating hours of the warehouse"
                }
            },
            "required": ["warehouse_id", "location", "status"],
        },
    },
}



TOOLS = [get_search_api, get_weather_api, get_warehouse_api]

def convert_tools(tools: List[Dict[str, Any]]) -> str:
    return json.dumps(tools, indent=2)


def format_prompt(tools: List[Dict[str, Any]]) -> str:
    tool_text = convert_tools(tools)
    return f"{SYSTEM_PROMPT.format(tools=tool_text)}"


system_prompt = format_prompt(TOOLS)

# print(system_prompt)

"""
Simple loop for conversations

"""

while True:
    user_input = input("User: ")
    messages = [
        {"role": "system", "content": system_prompt},
        {"role": "user", "content": user_input},
    ]

    inputs = tokenizer.apply_chat_template(
        messages, add_generation_prompt=True, return_tensors="pt" 
    ) # You can added more history conversations here

    outputs = model.generate(
        inputs,
        max_new_tokens=2048,
        top_p=0.95,
        temperature=0.7,
    ) # Suggest parameters

    response_text = tokenizer.decode(outputs[0][len(inputs[0]) :], skip_special_tokens=True)

    print("Assistant:", response_text)

Config

all_linear layers
r = 64
alpha = 512
Downloads last month
240
Inference Providers NEW
Input a message to start chatting with beyoru/Neeru.

Model tree for beyoru/Neeru

Base model

Qwen/Qwen2.5-3B
Finetuned
(150)
this model

Collection including beyoru/Neeru