import subprocess, sys
subprocess.check_call(
[sys.executable, "-m", "pip", "install", "-qU", "google-genai"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
import getpass, json, textwrap, os, time
from google import genai
from google.genai import sorts
if "GOOGLE_API_KEY" not in os.environ:
os.environ["GOOGLE_API_KEY"] = getpass.getpass("Enter your Gemini API key: ")
consumer = genai.Consumer(api_key=os.environ["GOOGLE_API_KEY"])
TOOL_COMBO_MODEL = "gemini-3-flash-preview"
MAPS_MODEL = "gemini-2.5-flash"
DIVIDER = "=" * 72
def heading(title: str):
print(f"n{DIVIDER}")
print(f" {title}")
print(DIVIDER)
def wrap(textual content: str, width: int = 80):
for line in textual content.splitlines():
print(textwrap.fill(line, width=width) if line.strip() else "")
def describe_parts(response):
elements = response.candidates[0].content material.elements
fc_ids = {}
for i, half in enumerate(elements):
prefix = f" Half {i:second}:"
if hasattr(half, "tool_call") and half.tool_call:
tc = half.tool_call
print(f"{prefix} [toolCall] kind={tc.tool_type} id={tc.id}")
if hasattr(half, "tool_response") and half.tool_response:
tr = half.tool_response
print(f"{prefix} [toolResponse] kind={tr.tool_type} id={tr.id}")
if hasattr(half, "executable_code") and half.executable_code:
code = half.executable_code.code[:90].change("n", " ↵ ")
print(f"{prefix} [executableCode] {code}...")
if hasattr(half, "code_execution_result") and half.code_execution_result:
out = (half.code_execution_result.output or "")[:90]
print(f"{prefix} [codeExecResult] {out}")
if hasattr(half, "function_call") and half.function_call:
fc = half.function_call
fc_ids[fc.name] = fc.id
print(f"{prefix} [functionCall] title={fc.title} id={fc.id}")
print(f" └─ args: {dict(fc.args)}")
if hasattr(half, "textual content") and half.textual content:
snippet = half.textual content[:110].change("n", " ")
print(f"{prefix} [text] {snippet}...")
if hasattr(half, "thought_signature") and half.thought_signature:
print(f" └─ thought_signature current ✓")
return fc_ids
heading("DEMO 1: Mix Google Search + Customized Perform in One Request")
print("""
This demo exhibits the flagship new function: passing BOTH a built-in device
(Google Search) and a customized operate declaration in a single API name.
Gemini will:
Flip 1 → Search the online for real-time information, then request our customized
operate to get climate knowledge.
Flip 2 → We provide the operate response; Gemini synthesizes the whole lot.
Key factors:
• google_search and function_declarations go within the SAME Instrument object
• include_server_side_tool_invocations have to be True (on ToolConfig)
• Return ALL elements (incl. thought_signatures) in subsequent turns
""")
get_weather_func = sorts.FunctionDeclaration(
title="getWeather",
description="Will get the present climate for a requested metropolis.",
parameters=sorts.Schema(
kind="OBJECT",
properties={
"metropolis": sorts.Schema(
kind="STRING",
description="The town and state, e.g. Utqiagvik, Alaska",
),
},
required=["city"],
),
)
print("▶ Flip 1: Sending immediate with Google Search + getWeather instruments...n")
response_1 = consumer.fashions.generate_content(
mannequin=TOOL_COMBO_MODEL,
contents=(
"What's the northernmost metropolis in the US? "
"What is the climate like there at this time?"
),
config=sorts.GenerateContentConfig(
instruments=[
types.Tool(
google_search=types.GoogleSearch(),
function_declarations=[get_weather_func],
),
],
tool_config=sorts.ToolConfig(
include_server_side_tool_invocations=True,
),
),
)
print(" Elements returned by the mannequin:n")
fc_ids = describe_parts(response_1)
function_call_id = fc_ids.get("getWeather")
print(f"n ✅ Captured function_call id for getWeather: {function_call_id}")
print("n▶ Flip 2: Returning operate consequence & requesting ultimate synthesis...n")
historical past = [
types.Content(
role="user",
parts=[
types.Part(
text=(
"What is the northernmost city in the United States? "
"What's the weather like there today?"
)
)
],
),
response_1.candidates[0].content material,
sorts.Content material(
position="consumer",
elements=[
types.Part(
function_response=types.FunctionResponse(
name="getWeather",
response={"response": "Very cold. 22°F / -5.5°C with strong Arctic winds."},
id=function_call_id,
)
)
],
),
]
response_2 = consumer.fashions.generate_content(
mannequin=TOOL_COMBO_MODEL,
contents=historical past,
config=sorts.GenerateContentConfig(
instruments=[
types.Tool(
google_search=types.GoogleSearch(),
function_declarations=[get_weather_func],
),
],
tool_config=sorts.ToolConfig(
include_server_side_tool_invocations=True,
),
),
)
print(" ✅ Ultimate synthesized response:n")
for half in response_2.candidates[0].content material.elements:
if hasattr(half, "textual content") and half.textual content:
wrap(half.textual content)
Elevate your perspective with NextTech Information, the place innovation meets perception.
Uncover the most recent breakthroughs, get unique updates, and join with a worldwide community of future-focused thinkers.
Unlock tomorrow’s tendencies at this time: learn extra, subscribe to our e-newsletter, and turn into a part of the NextTech group at NextTech-news.com

