Skip to content

mixin

dandy.llm.mixin

LlmServiceMixin

Bases: BaseServiceMixin

Source code in dandy/llm/mixin.py
def __init__(self, **kwargs):
    self.llm_intel_class = self.__class__.llm_intel_class
    self.llm.set_obj_service_instance(
        self,
        None,
    )
    super().__init__(**kwargs)

llm_config = 'DEFAULT' class-attribute instance-attribute

llm_config_options = llm_configs['DEFAULT'].options class-attribute instance-attribute

llm_role = 'Helpful Assistant' class-attribute instance-attribute

llm_task = 'Read the users request and provide the correct response based on context.' class-attribute instance-attribute

llm_guidelines = None class-attribute instance-attribute

llm_system_override_prompt = None class-attribute instance-attribute

llm = LlmService() class-attribute

llm_intel_class = self.__class__.llm_intel_class class-attribute instance-attribute

get_llm_description classmethod

Source code in dandy/llm/mixin.py
@classmethod
def get_llm_description(cls) -> str | None:
    if cls.llm_role:
        if cls.llm_task:
            return f'{cls.llm_role}: {cls.llm_task}'

        return f'{cls.llm_role}'

    return None

reset_services

Source code in dandy/llm/mixin.py
def reset_services(self):
    super().reset_services()
    self.llm.reset_service()