Since Cohere supports a variety of date-stamped models, we explicitly list the latest models but
allow any name in the type hints.
See Cohere's docs for a list of all available models.
ALL FIELDS MUST BE cohere_ PREFIXED SO YOU CAN MERGE THEM WITH OTHER MODELS.
Source code in pydantic_ai_slim/pydantic_ai/models/cohere.py
8586878889
classCohereModelSettings(ModelSettings,total=False):"""Settings used for a Cohere model request. ALL FIELDS MUST BE `cohere_` PREFIXED SO YOU CAN MERGE THEM WITH OTHER MODELS. """
@dataclass(init=False)classCohereModel(Model):"""A model that uses the Cohere API. Internally, this uses the [Cohere Python client]( https://github.com/cohere-ai/cohere-python) to interact with the API. Apart from `__init__`, all methods are private or match those of the base class. """client:AsyncClientV2=field(repr=False)_model_name:CohereModelName=field(repr=False)_system:str=field(default='cohere',repr=False)def__init__(self,model_name:CohereModelName,*,provider:Literal['cohere']|Provider[AsyncClientV2]='cohere',profile:ModelProfileSpec|None=None,):"""Initialize an Cohere model. Args: model_name: The name of the Cohere model to use. List of model names available [here](https://docs.cohere.com/docs/models#command). provider: The provider to use for authentication and API access. Can be either the string 'cohere' or an instance of `Provider[AsyncClientV2]`. If not provided, a new provider will be created using the other parameters. profile: The model profile to use. Defaults to a profile picked by the provider based on the model name. """self._model_name=model_nameifisinstance(provider,str):provider=infer_provider(provider)self.client=provider.clientself._profile=profileorprovider.model_profile@propertydefbase_url(self)->str:client_wrapper=self.client._client_wrapper# type: ignorereturnstr(client_wrapper.get_base_url())asyncdefrequest(self,messages:list[ModelMessage],model_settings:ModelSettings|None,model_request_parameters:ModelRequestParameters,)->ModelResponse:check_allow_model_requests()response=awaitself._chat(messages,cast(CohereModelSettings,model_settingsor{}),model_request_parameters)model_response=self._process_response(response)model_response.usage.requests=1returnmodel_response@propertydefmodel_name(self)->CohereModelName:"""The model name."""returnself._model_name@propertydefsystem(self)->str:"""The system / model provider."""returnself._systemasyncdef_chat(self,messages:list[ModelMessage],model_settings:CohereModelSettings,model_request_parameters:ModelRequestParameters,)->ChatResponse:tools=self._get_tools(model_request_parameters)cohere_messages=self._map_messages(messages)try:returnawaitself.client.chat(model=self._model_name,messages=cohere_messages,tools=toolsorOMIT,max_tokens=model_settings.get('max_tokens',OMIT),stop_sequences=model_settings.get('stop_sequences',OMIT),temperature=model_settings.get('temperature',OMIT),p=model_settings.get('top_p',OMIT),seed=model_settings.get('seed',OMIT),presence_penalty=model_settings.get('presence_penalty',OMIT),frequency_penalty=model_settings.get('frequency_penalty',OMIT),)exceptApiErrorase:if(status_code:=e.status_code)andstatus_code>=400:raiseModelHTTPError(status_code=status_code,model_name=self.model_name,body=e.body)fromeraise# pragma: lax no coverdef_process_response(self,response:ChatResponse)->ModelResponse:"""Process a non-streamed response, and prepare a message to return."""parts:list[ModelResponsePart]=[]ifresponse.message.contentisnotNoneandlen(response.message.content)>0:# While Cohere's API returns a list, it only does that for future proofing# and currently only one item is being returned.choice=response.message.content[0]parts.extend(split_content_into_text_and_thinking(choice.text))forcinresponse.message.tool_callsor[]:ifc.functionandc.function.nameandc.function.arguments:# pragma: no branchparts.append(ToolCallPart(tool_name=c.function.name,args=c.function.arguments,tool_call_id=c.idor_generate_tool_call_id(),))returnModelResponse(parts=parts,usage=_map_usage(response),model_name=self._model_name)def_map_messages(self,messages:list[ModelMessage])->list[ChatMessageV2]:"""Just maps a `pydantic_ai.Message` to a `cohere.ChatMessageV2`."""cohere_messages:list[ChatMessageV2]=[]formessageinmessages:ifisinstance(message,ModelRequest):cohere_messages.extend(self._map_user_message(message))elifisinstance(message,ModelResponse):texts:list[str]=[]tool_calls:list[ToolCallV2]=[]foriteminmessage.parts:ifisinstance(item,TextPart):texts.append(item.content)elifisinstance(item,ThinkingPart):# NOTE: We don't send ThinkingPart to the providers yet. If you are unsatisfied with this,# please open an issue. The below code is the code to send thinking to the provider.# texts.append(f'<think>\n{item.content}\n</think>')passelifisinstance(item,ToolCallPart):tool_calls.append(self._map_tool_call(item))else:assert_never(item)message_param=AssistantChatMessageV2(role='assistant')iftexts:message_param.content=[TextAssistantMessageContentItem(text='\n\n'.join(texts))]iftool_calls:message_param.tool_calls=tool_callscohere_messages.append(message_param)else:assert_never(message)ifinstructions:=self._get_instructions(messages):cohere_messages.insert(0,SystemChatMessageV2(role='system',content=instructions))returncohere_messagesdef_get_tools(self,model_request_parameters:ModelRequestParameters)->list[ToolV2]:tools=[self._map_tool_definition(r)forrinmodel_request_parameters.function_tools]ifmodel_request_parameters.output_tools:tools+=[self._map_tool_definition(r)forrinmodel_request_parameters.output_tools]returntools@staticmethoddef_map_tool_call(t:ToolCallPart)->ToolCallV2:returnToolCallV2(id=_guard_tool_call_id(t=t),type='function',function=ToolCallV2Function(name=t.tool_name,arguments=t.args_as_json_str(),),)@staticmethoddef_map_tool_definition(f:ToolDefinition)->ToolV2:returnToolV2(type='function',function=ToolV2Function(name=f.name,description=f.description,parameters=f.parameters_json_schema,),)@classmethoddef_map_user_message(cls,message:ModelRequest)->Iterable[ChatMessageV2]:forpartinmessage.parts:ifisinstance(part,SystemPromptPart):yieldSystemChatMessageV2(role='system',content=part.content)elifisinstance(part,UserPromptPart):ifisinstance(part.content,str):yieldUserChatMessageV2(role='user',content=part.content)else:raiseRuntimeError('Cohere does not yet support multi-modal inputs.')elifisinstance(part,ToolReturnPart):yieldToolChatMessageV2(role='tool',tool_call_id=_guard_tool_call_id(t=part),content=part.model_response_str(),)elifisinstance(part,RetryPromptPart):ifpart.tool_nameisNone:yieldUserChatMessageV2(role='user',content=part.model_response())# pragma: no coverelse:yieldToolChatMessageV2(role='tool',tool_call_id=_guard_tool_call_id(t=part),content=part.model_response(),)else:assert_never(part)
The provider to use for authentication and API access. Can be either the string
'cohere' or an instance of Provider[AsyncClientV2]. If not provided, a new provider will be
created using the other parameters.
'cohere'
profile
ModelProfileSpec | None
The model profile to use. Defaults to a profile picked by the provider based on the model name.
None
Source code in pydantic_ai_slim/pydantic_ai/models/cohere.py
def__init__(self,model_name:CohereModelName,*,provider:Literal['cohere']|Provider[AsyncClientV2]='cohere',profile:ModelProfileSpec|None=None,):"""Initialize an Cohere model. Args: model_name: The name of the Cohere model to use. List of model names available [here](https://docs.cohere.com/docs/models#command). provider: The provider to use for authentication and API access. Can be either the string 'cohere' or an instance of `Provider[AsyncClientV2]`. If not provided, a new provider will be created using the other parameters. profile: The model profile to use. Defaults to a profile picked by the provider based on the model name. """self._model_name=model_nameifisinstance(provider,str):provider=infer_provider(provider)self.client=provider.clientself._profile=profileorprovider.model_profile