Skip to content

ollama

dandy.llm.request.ollama

OllamaRequestOptions

Bases: BaseModel

num_ctx = None class-attribute instance-attribute

num_predict = None class-attribute instance-attribute

seed = None class-attribute instance-attribute

temperature = None class-attribute instance-attribute

OllamaRequestBody

Bases: BaseRequestBody

options instance-attribute

stream = False class-attribute instance-attribute

format = {} class-attribute instance-attribute

token_usage property

add_message

Source code in dandy/llm/request/ollama.py
def add_message(
        self,
        role: RoleLiteralStr,
        content: str,
        images: Union[List[str], None] = None
) -> None:
    self.messages.append(
        RequestMessage(
            role=role,
            content=content,
            images=images
        )
    )

get_context_length

Source code in dandy/llm/request/ollama.py
def get_context_length(self) -> int:
    return self.options.num_ctx

get_max_completion_tokens

Source code in dandy/llm/request/ollama.py
def get_max_completion_tokens(self) -> int:
    return self.options.num_predict

get_seed

Source code in dandy/llm/request/ollama.py
def get_seed(self):
    return self.options.seed

get_temperature

Source code in dandy/llm/request/ollama.py
def get_temperature(self):
    return self.options.temperature

set_format_to_json_schema

Source code in dandy/llm/request/ollama.py
def set_format_to_json_schema(self, json_schema: dict):
    self.format = json_schema

set_format_to_text

Source code in dandy/llm/request/ollama.py
def set_format_to_text(self):
    self.format = None

to_dict

Source code in dandy/llm/request/ollama.py
def to_dict(self) -> dict:
    return self.model_dump()