- Create functions that might be useful to an LLM. Math is a good example because LLMs are bad at math.
- Do an LLM Call and let gemini know about the available tools
import { GoogleGenAI, type FunctionDeclaration } from '@google/genai';
function calculateSum(a, b) {
return `Sum is ${a + b}`;
}
const ai = new GoogleGenAI({
apiKey: 'API_KEY',
});
const sumFunctionCall: FunctionDeclaration = {
name: 'sum',
description: 'This tool is used to find the sum of 2 numbers',
parametersJsonSchema: {
type: 'object',
properties:{
a: {
type:'number',
},
b: {
type:'number',
},
},
required: ['a', 'b'],
},
};
const response = await ai.models.generateContent({
model: 'gemini-2.0-flash',
contents: 'Whats the sum of 2 and 3',
config: {
tools: [{functionDeclarations: [sumFunctionCall]}]
}
});
if (!response) {
process.exit()
}
const functionCalls = response.functionCalls;
if (functionCalls && functionCalls.length > 0) {
console.log(`Processing ${functionCalls.length} function calls:`);
const functionResponses = [];
for (const call of functionCalls) {
console.log(`Executing: ${call.name} with args:`, call.args);
let functionResult;
switch (call.name) {
case 'sum':
functionResult = calculateSum(call?.args?.a, call?.args?.b);
break;
default:
functionResult = 'Unknown function';
}
functionResponses.push({
functionResponse: {
name: call.name,
response: { result: functionResult }
}
});
}
const finalResult = await ai.models.generateContent({
model: 'gemini-2.0-flash',
contents: [
{
role: "user",
parts: [{text: 'Whats the sum of 2 and 3'}]
},
response.candidates[0].content,
{ role: 'user', parts: functionResponses }
],
});
console.log(finalResult.text)
} else {
console.log('Direct response:', response.text);
}