From 35b5906eb99e6e24f66a39c673a5d7128e643962 Mon Sep 17 00:00:00 2001 From: Gaurav Goyal Date: Sun, 21 Dec 2025 18:03:23 +0530 Subject: [PATCH] Update model in ChatGroq initialization --- src/oss/python/integrations/chat/groq.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/oss/python/integrations/chat/groq.mdx b/src/oss/python/integrations/chat/groq.mdx index 11dc8f726a..a86f4eca61 100644 --- a/src/oss/python/integrations/chat/groq.mdx +++ b/src/oss/python/integrations/chat/groq.mdx @@ -76,7 +76,7 @@ If you choose to set a `reasoning_format`, you must ensure that the model you ar from langchain_groq import ChatGroq llm = ChatGroq( - model="deepseek-r1-distill-llama-70b", + model="qwen/qwen3-32b", temperature=0, max_tokens=None, reasoning_format="parsed",