Skip to content

Commit

Permalink
refactor(agent): Remove redundant parsing functions in config classes
Browse files Browse the repository at this point in the history
Pydantic has built-in typecasting logic, so converting to Python builtin types and enums is done automatically.
  • Loading branch information
Pwuts committed Apr 18, 2024
1 parent 7082e63 commit 49a08ba
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 33 deletions.
37 changes: 11 additions & 26 deletions autogpts/autogpt/autogpt/config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,38 +63,31 @@ class Config(SystemSettings, arbitrary_types_allowed=True):

# File storage
file_storage_backend: FileStorageBackendName = UserConfigurable(
default=FileStorageBackendName.LOCAL,
from_env=lambda: FileStorageBackendName(v)
if (v := os.getenv("FILE_STORAGE_BACKEND"))
else None,
default=FileStorageBackendName.LOCAL, from_env="FILE_STORAGE_BACKEND"
)

##########################
# Agent Control Settings #
##########################
# Paths
ai_settings_file: Path = UserConfigurable(
default=AI_SETTINGS_FILE,
from_env=lambda: Path(f) if (f := os.getenv("AI_SETTINGS_FILE")) else None,
default=AI_SETTINGS_FILE, from_env="AI_SETTINGS_FILE"
)
prompt_settings_file: Path = UserConfigurable(
default=PROMPT_SETTINGS_FILE,
from_env=lambda: Path(f) if (f := os.getenv("PROMPT_SETTINGS_FILE")) else None,
from_env="PROMPT_SETTINGS_FILE",
)

# Model configuration
fast_llm: str = UserConfigurable(
default="gpt-3.5-turbo-0125",
from_env=lambda: os.getenv("FAST_LLM"),
from_env="FAST_LLM",
)
smart_llm: str = UserConfigurable(
default="gpt-4-turbo-preview",
from_env=lambda: os.getenv("SMART_LLM"),
)
temperature: float = UserConfigurable(
default=0,
from_env=lambda: float(v) if (v := os.getenv("TEMPERATURE")) else None,
from_env="SMART_LLM",
)
temperature: float = UserConfigurable(default=0, from_env="TEMPERATURE")
openai_functions: bool = UserConfigurable(
default=False, from_env=lambda: os.getenv("OPENAI_FUNCTIONS", "False") == "True"
)
Expand All @@ -115,10 +108,7 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
memory_backend: str = UserConfigurable("json_file", from_env="MEMORY_BACKEND")
memory_index: str = UserConfigurable("auto-gpt-memory", from_env="MEMORY_INDEX")
redis_host: str = UserConfigurable("localhost", from_env="REDIS_HOST")
redis_port: int = UserConfigurable(
default=6379,
from_env=lambda: int(v) if (v := os.getenv("REDIS_PORT")) else None,
)
redis_port: int = UserConfigurable(default=6379, from_env="REDIS_PORT")
redis_password: str = UserConfigurable("", from_env="REDIS_PASSWORD")
wipe_redis_on_start: bool = UserConfigurable(
default=True,
Expand Down Expand Up @@ -170,10 +160,7 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
sd_webui_url: Optional[str] = UserConfigurable(
default="http://localhost:7860", from_env="SD_WEBUI_URL"
)
image_size: int = UserConfigurable(
default=256,
from_env=lambda: int(v) if (v := os.getenv("IMAGE_SIZE")) else None,
)
image_size: int = UserConfigurable(default=256, from_env="IMAGE_SIZE")

# Audio to text
audio_to_text_provider: str = UserConfigurable(
Expand All @@ -198,8 +185,7 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
###################
plugins_dir: str = UserConfigurable("plugins", from_env="PLUGINS_DIR")
plugins_config_file: Path = UserConfigurable(
default=PLUGINS_CONFIG_FILE,
from_env=lambda: Path(f) if (f := os.getenv("PLUGINS_CONFIG_FILE")) else None,
default=PLUGINS_CONFIG_FILE, from_env="PLUGINS_CONFIG_FILE"
)
plugins_config: PluginsConfig = Field(
default_factory=lambda: PluginsConfig(plugins={})
Expand All @@ -223,8 +209,7 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
# OpenAI
openai_credentials: Optional[OpenAICredentials] = None
azure_config_file: Optional[Path] = UserConfigurable(
default=AZURE_CONFIG_FILE,
from_env=lambda: Path(f) if (f := os.getenv("AZURE_CONFIG_FILE")) else None,
default=AZURE_CONFIG_FILE, from_env="AZURE_CONFIG_FILE"
)

# Github
Expand All @@ -234,7 +219,7 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
# Google
google_api_key: Optional[str] = UserConfigurable(from_env="GOOGLE_API_KEY")
google_custom_search_engine_id: Optional[str] = UserConfigurable(
from_env=lambda: os.getenv("GOOGLE_CUSTOM_SEARCH_ENGINE_ID"),
from_env="GOOGLE_CUSTOM_SEARCH_ENGINE_ID",
)

# Huggingface
Expand Down
4 changes: 1 addition & 3 deletions autogpts/autogpt/autogpt/file_storage/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,7 @@

class S3FileStorageConfiguration(FileStorageConfiguration):
bucket: str = UserConfigurable("autogpt", from_env="STORAGE_BUCKET")
s3_endpoint_url: Optional[SecretStr] = UserConfigurable(
from_env=lambda: SecretStr(v) if (v := os.getenv("S3_ENDPOINT_URL")) else None
)
s3_endpoint_url: Optional[SecretStr] = UserConfigurable(from_env="S3_ENDPOINT_URL")


class S3FileStorage(FileStorage):
Expand Down
7 changes: 3 additions & 4 deletions autogpts/autogpt/autogpt/logs/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,7 @@ class LoggingConfig(SystemConfiguration):

# Console output
log_format: LogFormatName = UserConfigurable(
default=LogFormatName.SIMPLE,
from_env=lambda: LogFormatName(os.getenv("LOG_FORMAT", "simple")),
default=LogFormatName.SIMPLE, from_env="LOG_FORMAT"
)
plain_console_output: bool = UserConfigurable(
default=False,
Expand All @@ -69,8 +68,8 @@ class LoggingConfig(SystemConfiguration):
log_dir: Path = LOG_DIR
log_file_format: Optional[LogFormatName] = UserConfigurable(
default=LogFormatName.SIMPLE,
from_env=lambda: LogFormatName(
os.getenv("LOG_FILE_FORMAT", os.getenv("LOG_FORMAT", "simple"))
from_env=lambda: os.getenv(
"LOG_FILE_FORMAT", os.getenv("LOG_FORMAT", "simple")
),
)

Expand Down

0 comments on commit 49a08ba

Please sign in to comment.