add ollama options as comments and debug env issues in config.py

This commit is contained in:
Arnav Agrawal 2024-12-27 12:40:20 +05:30
parent b883f52a11
commit c672d75302
2 changed files with 6 additions and 6 deletions

View File

@ -8,9 +8,9 @@ reload = false
storage = "aws-s3"
database = "mongodb"
vector_store = "mongodb"
embedding = "openai"
completion = "openai"
parser = "combined"
embedding = "openai" # "ollama"
completion = "openai" # "ollama"
parser = "combined" # "unstructured"
# Storage Configuration
[storage.aws]
@ -32,10 +32,10 @@ similarity_metric = "dotProduct"
# Model Configurations
[models]
[models.embedding]
model_name = "text-embedding-3-small"
model_name = "text-embedding-3-small" # "nomic-embed-text"
[models.completion]
model_name = "gpt-4o-mini"
model_name = "gpt-4o-mini" # "llama3.1"
default_max_tokens = 1000
default_temperature = 0.7

View File

@ -63,7 +63,7 @@ class Settings(BaseSettings):
@lru_cache()
def get_settings() -> Settings:
"""Get cached settings instance."""
load_dotenv()
load_dotenv(override=True)
# Load config.toml
with open("config.toml", "rb") as f: