Agentic AI Sample with Gemini with PhiData
Agentic AI Sample with Gemini with PhiData
It runs directly in G-Colab Gemini - Phidata
from phi.agent import Agent, RunResponse
from phi.model.google import Gemini
agent = Agent(
model=Gemini(id="gemini-1.5-flash"),
show_tool_calls=True,
markdown=True,
)
# Get the response in a variable
# run: RunResponse = agent.run("Share a 2 sentence horror story.")
# print(run.content)
# Print the response in the terminal
agent.print_response("Share a 2 sentence horror story.")
It runs directly in G-Colab
from phi.agent import Agent
from phi.model.openai import OpenAIChat
from phi.model.google import GeminiOpenAIChat
from phi.tools.duckduckgo import DuckDuckGo
web_agent = Agent(
name="Web Agent",
#model=OpenAIChat(id="gpt-4o"),
model=GeminiOpenAIChat(id="gemini-1.5-flash"),
tools=[DuckDuckGo()],
instructions=["Always include sources"],
show_tool_calls=True,
markdown=True,
)
web_agent.print_response("Tell me about OpenAI Sora?", stream=True)
Comments
Post a Comment