danoan.llm_assistant.common.model module

Data models shared by prompt and runner modules.

class danoan.llm_assistant.common.model.LLMAssistantConfiguration(runner: danoan.llm_assistant.common.model.RunnerConfiguration | None = None, prompt: danoan.llm_assistant.common.model.PromptRepositoryConfiguration | None = None)[source]

Bases: object

Parameters:
classmethod from_dict(runner: Dict[str, Any] | None = None, prompt: Dict[str, Any] | None = None)[source]
Parameters:
  • runner (Dict[str, Any] | None)

  • prompt (Dict[str, Any] | None)

prompt: PromptRepositoryConfiguration | None = None
runner: RunnerConfiguration | None = None
class danoan.llm_assistant.common.model.PromptConfiguration(name: str, system_prompt: str, user_prompt: str, model: str | None = None)[source]

Bases: object

Parameters:
  • name (str)

  • system_prompt (str)

  • user_prompt (str)

  • model (str | None)

model: str | None = None
name: str
system_prompt: str
user_prompt: str
class danoan.llm_assistant.common.model.PromptRepositoryConfiguration(git_user: str, prompt_collection_folder: pathlib.Path, versioning: Dict[str, str] | None = None)[source]

Bases: object

Parameters:
  • git_user (str)

  • prompt_collection_folder (Path)

  • versioning (Dict[str, str] | None)

git_user: str
prompt_collection_folder: Path
versioning: Dict[str, str] | None = None
class danoan.llm_assistant.common.model.RunnerConfiguration(openai_key: str | None = None, model: str | None = None, use_cache: bool = False, cache_path: pathlib.Path | None = None)[source]

Bases: object

Parameters:
  • openai_key (str | None)

  • model (str | None)

  • use_cache (bool)

  • cache_path (Path | None)

cache_path: Path | None = None
model: str | None = None
openai_key: str | None = None
use_cache: bool = False