@@ -191,14 +191,12 @@ def init_chat_model(
191
191
configurable_model = init_chat_model(temperature=0)
192
192
193
193
configurable_model.invoke(
194
- "what's your name",
195
- config={"configurable": {"model": "gpt-4o"}}
194
+ "what's your name", config={"configurable": {"model": "gpt-4o"}}
196
195
)
197
196
# GPT-4o response
198
197
199
198
configurable_model.invoke(
200
- "what's your name",
201
- config={"configurable": {"model": "claude-3-5-sonnet-latest"}}
199
+ "what's your name", config={"configurable": {"model": "claude-3-5-sonnet-latest"}}
202
200
)
203
201
# claude-3.5 sonnet response
204
202
@@ -213,7 +211,7 @@ def init_chat_model(
213
211
"openai:gpt-4o",
214
212
configurable_fields="any", # this allows us to configure other params like temperature, max_tokens, etc at runtime.
215
213
config_prefix="foo",
216
- temperature=0
214
+ temperature=0,
217
215
)
218
216
219
217
configurable_model_with_default.invoke("what's your name")
@@ -224,9 +222,9 @@ def init_chat_model(
224
222
config={
225
223
"configurable": {
226
224
"foo_model": "anthropic:claude-3-5-sonnet-latest",
227
- "foo_temperature": 0.6
225
+ "foo_temperature": 0.6,
228
226
}
229
- }
227
+ },
230
228
)
231
229
# Claude-3.5 sonnet response with temperature 0.6
232
230
@@ -241,31 +239,34 @@ def init_chat_model(
241
239
from langchain.chat_models import init_chat_model
242
240
from pydantic import BaseModel, Field
243
241
242
+
244
243
class GetWeather(BaseModel):
245
244
'''Get the current weather in a given location'''
246
245
247
246
location: str = Field(..., description="The city and state, e.g. San Francisco, CA")
248
247
248
+
249
249
class GetPopulation(BaseModel):
250
250
'''Get the current population in a given location'''
251
251
252
252
location: str = Field(..., description="The city and state, e.g. San Francisco, CA")
253
253
254
+
254
255
configurable_model = init_chat_model(
255
- "gpt-4o",
256
- configurable_fields=("model", "model_provider"),
257
- temperature=0
256
+ "gpt-4o", configurable_fields=("model", "model_provider"), temperature=0
258
257
)
259
258
260
- configurable_model_with_tools = configurable_model.bind_tools([GetWeather, GetPopulation])
259
+ configurable_model_with_tools = configurable_model.bind_tools(
260
+ [GetWeather, GetPopulation]
261
+ )
261
262
configurable_model_with_tools.invoke(
262
263
"Which city is hotter today and which is bigger: LA or NY?"
263
264
)
264
265
# GPT-4o response with tool calls
265
266
266
267
configurable_model_with_tools.invoke(
267
268
"Which city is hotter today and which is bigger: LA or NY?",
268
- config={"configurable": {"model": "claude-3-5-sonnet-latest"}}
269
+ config={"configurable": {"model": "claude-3-5-sonnet-latest"}},
269
270
)
270
271
# Claude-3.5 sonnet response with tools
271
272
0 commit comments