db=db)agent_kwargs={"system_message":SystemMessage(content="You are an expert SQL data analyst.")}llm=ChatOpenAI(temperature=0,model="gpt-3.5-turbo-0613")agent=initialize_agent(toolkit.get_tools(),llm,agent=AgentType.OPENAI_FUNCTIONS,verbose=True,agent...
Some OpenAI models and Gemini do not and have a generous free tier for its API. Create a new project The easiest way to get started with Mastra is by using create-mastra. This CLI tool enables you to quickly start building a new Mastra application, with everything set up for you. ...
# Required export OPENSERV_API_KEY=your_api_key_here # Optional export OPENAI_API_KEY=your_openai_key_here # If using OpenAI process runtime export PORT=7378 # Custom port (default: 7378) Initialize Your Agent import { Agent } from '@openserv-labs/sdk' import { z } from 'zod' const...
opts); } } // See sample OpenAIController included in the code base for inspiration // or extend and override it as you see fit export class MyAIController implements IAIController { public async getResponse( messages: any[], definitions: any[], fnCall = "auto" ) { // send previous ...
Or you can use these clients asLLM-based toolswhere the LLM decides when and how to invoke the underlying functions for you. This works across all of the major AI SDKs via adapters. Here's an example usingVercel's AI SDK: // sdk-specific importsimport{openai}from'@ai-sdk/openai'import...