lmflow.utils.conversation_template.llama#

Attributes#

Classes#

Module Contents#

lmflow.utils.conversation_template.llama.logger[source]#
class lmflow.utils.conversation_template.llama.Llama2ConversationTemplate[source]#

Bases: lmflow.utils.conversation_template.base.ConversationTemplate

_encode(tokenizer: transformers.PreTrainedTokenizer, messages: List[Dict[str, str]], system: str | None = None, tools: str | None = None, **kwargs) Sequence[Tuple[List[int], List[int]]][source]#
class lmflow.utils.conversation_template.llama.Llama2ConversationTemplateForTool[source]#

Bases: Llama2ConversationTemplate

_encode(tokenizer: transformers.PreTrainedTokenizer, messages: List[Dict[str, str]], system: str | None = None, tools: str | None = None, **kwargs) Sequence[Tuple[List[int], List[int]]][source]#
lmflow.utils.conversation_template.llama.LLAMA3_TEMPLATE[source]#
lmflow.utils.conversation_template.llama.LLAMA3_TEMPLATE_FOR_TOOL[source]#
lmflow.utils.conversation_template.llama.LLAMA2_TEMPLATE[source]#
lmflow.utils.conversation_template.llama.LLAMA2_TEMPLATE_FOR_TOOL[source]#