Interact with Vertex AI as you normally would. Our Instrumentor will automatically trace and send the telemetry data to our platform.
Copy
import vertexaifrom vertexai.generative_models import FunctionDeclaration, GenerativeModel, Part, Toolvertexai.init( project="project_name",)# Describe a function by specifying its schema (JsonSchema format)get_current_weather_func = FunctionDeclaration( name="get_current_weather", description="Get the current weather in a given location", parameters={ "type": "object", "properties": { "location": { "type": "string", "description": "The city and state, e.g. San Francisco, CA", }, "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, }, "required": ["location"], },)# Tool is a collection of related functionsweather_tool = Tool(function_declarations=[get_current_weather_func])# Use tools in chatchat = GenerativeModel("gemini-1.5-flash", tools=[weather_tool]).start_chat()
if __name__ == "__main__": # Send a message to the model. The model will respond with a function call. for response in chat.send_message( "What is the weather like in Boston?", stream=True ): print(response) # Then send a function response to the model. The model will use it to answer. for response in chat.send_message( Part.from_function_response( name="get_current_weather", response={"content": {"weather": "super nice"}}, ), stream=True, ): print(response)