diff options
author | Krzysztof Czerwinski <34861343+kcze@users.noreply.github.com> | 2024-05-15 23:37:53 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-05-16 00:37:53 +0200 |
commit | e8d7dfa386f9e104bdb0b42aaedbf537318544ba (patch) | |
tree | 071667ffabbc567e96cdcff4e4652d64826505bf /autogpts | |
parent | Fix poetry install issue on ARM64 & python 3.12 (#7145) (diff) | |
download | Auto-GPT-master.tar.gz Auto-GPT-master.tar.bz2 Auto-GPT-master.zip |
Moved from `autogpt` to `forge`:
- `autogpt.config` -> `forge.config`
- `autogpt.processing` -> `forge.content_processing`
- `autogpt.file_storage` -> `forge.file_storage`
- `autogpt.logs` -> `forge.logging`
- `autogpt.speech` -> `forge.speech`
- `autogpt.agents.(base|components|protocols)` -> `forge.agent.*`
- `autogpt.command_decorator` -> `forge.command.decorator`
- `autogpt.models.(command|command_parameter)` -> `forge.command.(command|parameter)`
- `autogpt.(commands|components|features)` -> `forge.components`
- `autogpt.core.utils.json_utils` -> `forge.json.parsing`
- `autogpt.prompts.utils` -> `forge.llm.prompting.utils`
- `autogpt.core.prompting.(base|schema|utils)` -> `forge.llm.prompting.*`
- `autogpt.core.resource.model_providers` -> `forge.llm.providers`
- `autogpt.llm.providers.openai` + `autogpt.core.resource.model_providers.utils`
-> `forge.llm.providers.utils`
- `autogpt.models.action_history:Action*` -> `forge.models.action`
- `autogpt.core.configuration.schema` -> `forge.models.config`
- `autogpt.core.utils.json_schema` -> `forge.models.json_schema`
- `autogpt.core.resource.schema` -> `forge.models.providers`
- `autogpt.models.utils` -> `forge.models.utils`
- `forge.sdk.(errors|utils)` + `autogpt.utils.(exceptions|file_operations_utils|validators)`
-> `forge.utils.(exceptions|file_operations|url_validator)`
- `autogpt.utils.utils` -> `forge.utils.const` + `forge.utils.yaml_validator`
Moved within `forge`:
- forge/prompts/* -> forge/llm/prompting/*
The rest are mostly import updates, and some sporadic removals and necessary updates (for example to fix circular deps):
- Changed `CommandOutput = Any` to remove coupling with `ContextItem` (no longer needed)
- Removed unused `Singleton` class
- Reluctantly moved `speech` to forge due to coupling (tts needs to be changed into component)
- Moved `function_specs_from_commands` and `core/resource/model_providers` to `llm/providers` (resources were a `core` thing and are no longer relevant)
- Keep tests in `autogpt` to reduce changes in this PR
- Removed unused memory-related code from tests
- Removed duplicated classes: `FancyConsoleFormatter`, `BelowLevelFilter`
- `prompt_settings.yaml` is in both `autogpt` and `forge` because for some reason doesn't work when placed in just one dir (need to be taken care of)
- Removed `config` param from `clean_input`, it wasn't used and caused circular dependency
- Renamed `BaseAgentActionProposal` to `ActionProposal`
- Updated `pyproject.toml` in `forge` and `autogpt`
- Moved `Action*` models from `forge/components/action_history/model.py` to `forge/models/action.py` as those are relevant to the entire agent and not just `EventHistoryComponent` + to reduce coupling
- Renamed `DEFAULT_ASK_COMMAND` to `ASK_COMMAND` and `DEFAULT_FINISH_COMMAND` to `FINISH_COMMAND`
- Renamed `AutoGptFormatter` to `ForgeFormatter` and moved to `forge`
Includes changes from PR https://github.com/Significant-Gravitas/AutoGPT/pull/7148
---------
Co-authored-by: Reinier van der Leer <pwuts@agpt.co>
Diffstat (limited to 'autogpts')
178 files changed, 2038 insertions, 1179 deletions
diff --git a/autogpts/autogpt/agbenchmark_config/benchmarks.py b/autogpts/autogpt/agbenchmark_config/benchmarks.py index c574dc303..7f60774fd 100644 --- a/autogpts/autogpt/agbenchmark_config/benchmarks.py +++ b/autogpts/autogpt/agbenchmark_config/benchmarks.py @@ -3,12 +3,14 @@ import logging import sys from pathlib import Path +from forge.config.ai_profile import AIProfile +from forge.config.config import ConfigBuilder +from forge.file_storage import FileStorageBackendName, get_storage +from forge.logging.config import configure_logging + from autogpt.agent_manager.agent_manager import AgentManager from autogpt.agents.agent import Agent, AgentConfiguration, AgentSettings from autogpt.app.main import _configure_llm_provider, run_interaction_loop -from autogpt.config import AIProfile, ConfigBuilder -from autogpt.file_storage import FileStorageBackendName, get_storage -from autogpt.logs.config import configure_logging LOG_DIR = Path(__file__).parent / "logs" diff --git a/autogpts/autogpt/autogpt/agent_factory/configurators.py b/autogpts/autogpt/autogpt/agent_factory/configurators.py index b727beb99..18b63a01c 100644 --- a/autogpts/autogpt/autogpt/agent_factory/configurators.py +++ b/autogpts/autogpt/autogpt/agent_factory/configurators.py @@ -1,9 +1,12 @@ from typing import Optional +from forge.config.ai_directives import AIDirectives +from forge.config.ai_profile import AIProfile +from forge.config.config import Config +from forge.file_storage.base import FileStorage +from forge.llm.providers import ChatModelProvider + from autogpt.agents.agent import Agent, AgentConfiguration, AgentSettings -from autogpt.config import AIDirectives, AIProfile, Config -from autogpt.core.resource.model_providers import ChatModelProvider -from autogpt.file_storage.base import FileStorage def create_agent( diff --git a/autogpts/autogpt/autogpt/agent_factory/generators.py b/autogpts/autogpt/autogpt/agent_factory/generators.py index 9f9c44600..3b60dfa30 100644 --- a/autogpts/autogpt/autogpt/agent_factory/generators.py +++ b/autogpts/autogpt/autogpt/agent_factory/generators.py @@ -2,16 +2,16 @@ from __future__ import annotations from typing import TYPE_CHECKING -from autogpt.config.ai_directives import AIDirectives -from autogpt.file_storage.base import FileStorage - -from .configurators import _configure_agent -from .profile_generator import generate_agent_profile_for_task +from forge.config.ai_directives import AIDirectives +from forge.file_storage.base import FileStorage if TYPE_CHECKING: from autogpt.agents.agent import Agent - from autogpt.config import Config - from autogpt.core.resource.model_providers.schema import ChatModelProvider + from forge.config.config import Config + from forge.llm.providers.schema import ChatModelProvider + +from .configurators import _configure_agent +from .profile_generator import generate_agent_profile_for_task async def generate_agent_for_task( diff --git a/autogpts/autogpt/autogpt/agent_factory/profile_generator.py b/autogpts/autogpt/autogpt/agent_factory/profile_generator.py index 78afbe51a..32851194c 100644 --- a/autogpts/autogpt/autogpt/agent_factory/profile_generator.py +++ b/autogpts/autogpt/autogpt/agent_factory/profile_generator.py @@ -1,20 +1,18 @@ import json import logging -from autogpt.config import AIDirectives, AIProfile, Config -from autogpt.core.configuration import SystemConfiguration, UserConfigurable -from autogpt.core.prompting import ( - ChatPrompt, - LanguageModelClassification, - PromptStrategy, -) -from autogpt.core.resource.model_providers.schema import ( +from forge.config.ai_directives import AIDirectives +from forge.config.ai_profile import AIProfile +from forge.config.config import Config +from forge.llm.prompting import ChatPrompt, LanguageModelClassification, PromptStrategy +from forge.llm.providers.schema import ( AssistantChatMessage, ChatMessage, ChatModelProvider, CompletionModelFunction, ) -from autogpt.core.utils.json_schema import JSONSchema +from forge.models.config import SystemConfiguration, UserConfigurable +from forge.models.json_schema import JSONSchema logger = logging.getLogger(__name__) diff --git a/autogpts/autogpt/autogpt/agent_manager/agent_manager.py b/autogpts/autogpt/autogpt/agent_manager/agent_manager.py index 011f868b0..6c9f2c495 100644 --- a/autogpts/autogpt/autogpt/agent_manager/agent_manager.py +++ b/autogpts/autogpt/autogpt/agent_manager/agent_manager.py @@ -3,8 +3,9 @@ from __future__ import annotations import uuid from pathlib import Path +from forge.file_storage.base import FileStorage + from autogpt.agents.agent import AgentSettings -from autogpt.file_storage.base import FileStorage class AgentManager: diff --git a/autogpts/autogpt/autogpt/agents/__init__.py b/autogpts/autogpt/autogpt/agents/__init__.py index f6ae48c59..1ee5800d7 100644 --- a/autogpts/autogpt/autogpt/agents/__init__.py +++ b/autogpts/autogpt/autogpt/agents/__init__.py @@ -1,9 +1,7 @@ -from .agent import Agent, OneShotAgentActionProposal -from .base import BaseAgent, BaseAgentActionProposal +from .agent import Agent +from .prompt_strategies.one_shot import OneShotAgentActionProposal __all__ = [ - "BaseAgent", "Agent", - "BaseAgentActionProposal", "OneShotAgentActionProposal", ] diff --git a/autogpts/autogpt/autogpt/agents/agent.py b/autogpts/autogpt/autogpt/agents/agent.py index 0c92a907f..fef7b84ea 100644 --- a/autogpts/autogpt/autogpt/agents/agent.py +++ b/autogpts/autogpt/autogpt/agents/agent.py @@ -6,66 +6,67 @@ from datetime import datetime from typing import TYPE_CHECKING, Optional import sentry_sdk -from pydantic import Field - -from autogpt.commands.execute_code import CodeExecutorComponent -from autogpt.commands.git_operations import GitOperationsComponent -from autogpt.commands.image_gen import ImageGeneratorComponent -from autogpt.commands.system import SystemComponent -from autogpt.commands.user_interaction import UserInteractionComponent -from autogpt.commands.web_search import WebSearchComponent -from autogpt.commands.web_selenium import WebSeleniumComponent -from autogpt.components.event_history import EventHistoryComponent -from autogpt.core.configuration import Configurable -from autogpt.core.prompting import ChatPrompt -from autogpt.core.resource.model_providers import ( +from forge.agent.base import BaseAgent, BaseAgentConfiguration, BaseAgentSettings +from forge.agent.protocols import ( + AfterExecute, + AfterParse, + CommandProvider, + DirectiveProvider, + MessageProvider, +) +from forge.command.command import Command, CommandOutput +from forge.components.action_history import ( + ActionHistoryComponent, + EpisodicActionHistory, +) +from forge.components.code_executor.code_executor import CodeExecutorComponent +from forge.components.context.context import AgentContext, ContextComponent +from forge.components.file_manager import FileManagerComponent +from forge.components.git_operations import GitOperationsComponent +from forge.components.image_gen import ImageGeneratorComponent +from forge.components.system import SystemComponent +from forge.components.user_interaction import UserInteractionComponent +from forge.components.watchdog import WatchdogComponent +from forge.components.web import WebSearchComponent, WebSeleniumComponent +from forge.file_storage.base import FileStorage +from forge.llm.prompting.schema import ChatPrompt +from forge.llm.providers import ( AssistantFunctionCall, ChatMessage, ChatModelProvider, ChatModelResponse, ) -from autogpt.core.runner.client_lib.logging.helpers import dump_prompt -from autogpt.file_storage.base import FileStorage -from autogpt.llm.providers.openai import function_specs_from_commands -from autogpt.logs.log_cycle import ( - CURRENT_CONTEXT_FILE_NAME, - NEXT_ACTION_FILE_NAME, - USER_INPUT_FILE_NAME, - LogCycleHandler, -) -from autogpt.models.action_history import ( +from forge.llm.providers.utils import function_specs_from_commands +from forge.models.action import ( ActionErrorResult, ActionInterruptedByHuman, ActionResult, ActionSuccessResult, - EpisodicActionHistory, ) -from autogpt.models.command import Command, CommandOutput -from autogpt.utils.exceptions import ( +from forge.models.config import Configurable +from forge.utils.exceptions import ( AgentException, AgentTerminated, CommandExecutionError, UnknownCommandError, ) +from pydantic import Field + +from autogpt.app.log_cycle import ( + CURRENT_CONTEXT_FILE_NAME, + NEXT_ACTION_FILE_NAME, + USER_INPUT_FILE_NAME, + LogCycleHandler, +) +from autogpt.core.runner.client_lib.logging.helpers import dump_prompt -from .base import BaseAgent, BaseAgentConfiguration, BaseAgentSettings -from .features.agent_file_manager import FileManagerComponent -from .features.context import AgentContext, ContextComponent -from .features.watchdog import WatchdogComponent from .prompt_strategies.one_shot import ( OneShotAgentActionProposal, OneShotAgentPromptStrategy, ) -from .protocols import ( - AfterExecute, - AfterParse, - CommandProvider, - DirectiveProvider, - MessageProvider, -) if TYPE_CHECKING: - from autogpt.config import Config + from forge.config.config import Config logger = logging.getLogger(__name__) @@ -114,13 +115,13 @@ class Agent(BaseAgent, Configurable[AgentSettings]): # Components self.system = SystemComponent(legacy_config, settings.ai_profile) - self.history = EventHistoryComponent( + self.history = ActionHistoryComponent( settings.history, self.send_token_limit, lambda x: self.llm_provider.count_tokens(x, self.llm.name), legacy_config, llm_provider, - ) + ).run_after(WatchdogComponent) self.user_interaction = UserInteractionComponent(legacy_config) self.file_manager = FileManagerComponent(settings, file_storage) self.code_executor = CodeExecutorComponent( @@ -135,7 +136,9 @@ class Agent(BaseAgent, Configurable[AgentSettings]): self.web_search = WebSearchComponent(legacy_config) self.web_selenium = WebSeleniumComponent(legacy_config, llm_provider, self.llm) self.context = ContextComponent(self.file_manager.workspace, settings.context) - self.watchdog = WatchdogComponent(settings.config, settings.history) + self.watchdog = WatchdogComponent(settings.config, settings.history).run_after( + ContextComponent + ) self.created_at = datetime.now().strftime("%Y%m%d_%H%M%S") """Timestamp the agent was created; only used for structured debug logging.""" diff --git a/autogpts/autogpt/autogpt/agents/prompt_strategies/one_shot.py b/autogpts/autogpt/autogpt/agents/prompt_strategies/one_shot.py index ff08f4669..57166503c 100644 --- a/autogpts/autogpt/autogpt/agents/prompt_strategies/one_shot.py +++ b/autogpts/autogpt/autogpt/agents/prompt_strategies/one_shot.py @@ -6,26 +6,22 @@ import re from logging import Logger import distro -from pydantic import Field - -from autogpt.agents.base import BaseAgentActionProposal -from autogpt.config import AIDirectives, AIProfile -from autogpt.core.configuration.schema import SystemConfiguration, UserConfigurable -from autogpt.core.prompting import ( - ChatPrompt, - LanguageModelClassification, - PromptStrategy, -) -from autogpt.core.resource.model_providers.schema import ( +from forge.config.ai_directives import AIDirectives +from forge.config.ai_profile import AIProfile +from forge.json.parsing import extract_dict_from_json +from forge.llm.prompting import ChatPrompt, LanguageModelClassification, PromptStrategy +from forge.llm.prompting.utils import format_numbered_list +from forge.llm.providers.schema import ( AssistantChatMessage, ChatMessage, CompletionModelFunction, ) -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.core.utils.json_utils import extract_dict_from_json -from autogpt.models.utils import ModelWithSummary -from autogpt.prompts.utils import format_numbered_list -from autogpt.utils.exceptions import InvalidAgentResponseError +from forge.models.action import ActionProposal +from forge.models.config import SystemConfiguration, UserConfigurable +from forge.models.json_schema import JSONSchema +from forge.models.utils import ModelWithSummary +from forge.utils.exceptions import InvalidAgentResponseError +from pydantic import Field _RESPONSE_INTERFACE_NAME = "AssistantResponse" @@ -46,7 +42,7 @@ class AssistantThoughts(ModelWithSummary): return self.text -class OneShotAgentActionProposal(BaseAgentActionProposal): +class OneShotAgentActionProposal(ActionProposal): thoughts: AssistantThoughts diff --git a/autogpts/autogpt/autogpt/app/agent_protocol_server.py b/autogpts/autogpt/autogpt/app/agent_protocol_server.py index 2eb09706e..48cdb00f5 100644 --- a/autogpts/autogpt/autogpt/app/agent_protocol_server.py +++ b/autogpts/autogpt/autogpt/app/agent_protocol_server.py @@ -10,8 +10,11 @@ from fastapi import APIRouter, FastAPI, UploadFile from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import RedirectResponse, StreamingResponse from fastapi.staticfiles import StaticFiles +from forge.config.config import Config +from forge.file_storage import FileStorage +from forge.llm.providers import ChatModelProvider, ModelProviderBudget +from forge.models.action import ActionErrorResult, ActionSuccessResult from forge.sdk.db import AgentDB -from forge.sdk.errors import NotFoundError from forge.sdk.middlewares import AgentMiddleware from forge.sdk.model import ( Artifact, @@ -24,6 +27,8 @@ from forge.sdk.model import ( TaskStepsListResponse, ) from forge.sdk.routes.agent_protocol import base_router +from forge.utils.const import ASK_COMMAND, FINISH_COMMAND +from forge.utils.exceptions import AgentFinished, NotFoundError from hypercorn.asyncio import serve as hypercorn_serve from hypercorn.config import Config as HypercornConfig from sentry_sdk import set_user @@ -32,12 +37,6 @@ from autogpt.agent_factory.configurators import configure_agent_with_state from autogpt.agent_factory.generators import generate_agent_for_task from autogpt.agent_manager import AgentManager from autogpt.app.utils import is_port_free -from autogpt.config import Config -from autogpt.core.resource.model_providers import ChatModelProvider, ModelProviderBudget -from autogpt.file_storage import FileStorage -from autogpt.models.action_history import ActionErrorResult, ActionSuccessResult -from autogpt.utils.exceptions import AgentFinished -from autogpt.utils.utils import DEFAULT_ASK_COMMAND, DEFAULT_FINISH_COMMAND logger = logging.getLogger(__name__) @@ -226,7 +225,7 @@ class AgentProtocolServer: input=step_request, is_last=( last_proposal is not None - and last_proposal.use_tool.name == DEFAULT_FINISH_COMMAND + and last_proposal.use_tool.name == FINISH_COMMAND and execute_approved ), ) @@ -240,7 +239,7 @@ class AgentProtocolServer: ) ) - if last_proposal.use_tool.name == DEFAULT_ASK_COMMAND: + if last_proposal.use_tool.name == ASK_COMMAND: tool_result = ActionSuccessResult(outputs=user_input) agent.event_history.register_result(tool_result) elif execute_approved: @@ -296,13 +295,13 @@ class AgentProtocolServer: + ("\n\n" if "\n" in str(tool_result) else " ") + f"{tool_result}\n\n" ) - if last_proposal and last_proposal.use_tool.name != DEFAULT_ASK_COMMAND + if last_proposal and last_proposal.use_tool.name != ASK_COMMAND else "" ) output += f"{assistant_response.thoughts.speak}\n\n" output += ( f"Next Command: {next_tool_to_use}" - if next_tool_to_use.name != DEFAULT_ASK_COMMAND + if next_tool_to_use.name != ASK_COMMAND else next_tool_to_use.arguments["question"] ) diff --git a/autogpts/autogpt/autogpt/app/cli.py b/autogpts/autogpt/autogpt/app/cli.py index e6ca0a783..0ff35f5a4 100644 --- a/autogpts/autogpt/autogpt/app/cli.py +++ b/autogpts/autogpt/autogpt/app/cli.py @@ -4,8 +4,7 @@ from pathlib import Path from typing import Optional import click - -from autogpt.logs.config import LogFormatName +from forge.logging.config import LogFormatName from .telemetry import setup_telemetry diff --git a/autogpts/autogpt/autogpt/app/configurator.py b/autogpts/autogpt/autogpt/app/configurator.py index 2463b6fcf..5589953b2 100644 --- a/autogpts/autogpt/autogpt/app/configurator.py +++ b/autogpts/autogpt/autogpt/app/configurator.py @@ -7,13 +7,12 @@ from typing import Literal, Optional import click from colorama import Back, Fore, Style +from forge.config.config import GPT_3_MODEL, GPT_4_MODEL, Config +from forge.llm.providers import ModelName, MultiProvider +from forge.logging.helpers import request_user_double_check +from forge.utils.yaml_validator import validate_yaml_file -from autogpt.config import Config -from autogpt.config.config import GPT_3_MODEL, GPT_4_MODEL -from autogpt.core.resource.model_providers import ModelName, MultiProvider -from autogpt.logs.helpers import request_user_double_check from autogpt.memory.vector import get_supported_memory_backends -from autogpt.utils import utils logger = logging.getLogger(__name__) @@ -107,7 +106,7 @@ async def apply_overrides_to_config( file = ai_settings_file # Validate file - (validated, message) = utils.validate_yaml_file(file) + (validated, message) = validate_yaml_file(file) if not validated: logger.fatal(extra={"title": "FAILED FILE VALIDATION:"}, msg=message) request_user_double_check() @@ -120,7 +119,7 @@ async def apply_overrides_to_config( file = prompt_settings_file # Validate file - (validated, message) = utils.validate_yaml_file(file) + (validated, message) = validate_yaml_file(file) if not validated: logger.fatal(extra={"title": "FAILED FILE VALIDATION:"}, msg=message) request_user_double_check() diff --git a/autogpts/autogpt/autogpt/app/input.py b/autogpts/autogpt/autogpt/app/input.py new file mode 100644 index 000000000..c04fa3822 --- /dev/null +++ b/autogpts/autogpt/autogpt/app/input.py @@ -0,0 +1,19 @@ +import logging + +import click + +logger = logging.getLogger(__name__) + + +def clean_input(prompt: str = ""): + try: + # ask for input, default when just pressing Enter is y + logger.debug("Asking user via keyboard...") + + return click.prompt( + text=prompt, prompt_suffix=" ", default="", show_default=False + ) + except KeyboardInterrupt: + logger.info("You interrupted AutoGPT") + logger.info("Quitting...") + exit(0) diff --git a/autogpts/autogpt/autogpt/logs/log_cycle.py b/autogpts/autogpt/autogpt/app/log_cycle.py index 062455fcb..ea4e14029 100644 --- a/autogpts/autogpt/autogpt/logs/log_cycle.py +++ b/autogpts/autogpt/autogpt/app/log_cycle.py @@ -3,7 +3,7 @@ import os from pathlib import Path from typing import Any, Dict, Union -from .config import LOG_DIR +from forge.logging.config import LOG_DIR DEFAULT_PREFIX = "agent" CURRENT_CONTEXT_FILE_NAME = "current_context.json" diff --git a/autogpts/autogpt/autogpt/app/main.py b/autogpts/autogpt/autogpt/app/main.py index 04354fb10..66135f885 100644 --- a/autogpts/autogpt/autogpt/app/main.py +++ b/autogpts/autogpt/autogpt/app/main.py @@ -14,42 +14,37 @@ from types import FrameType from typing import TYPE_CHECKING, Optional from colorama import Fore, Style -from forge.sdk.db import AgentDB - -if TYPE_CHECKING: - from autogpt.agents.agent import Agent - from autogpt.agents.base import BaseAgentActionProposal +from forge.components.code_executor import ( + is_docker_available, + we_are_running_in_a_docker_container, +) +from forge.config.ai_directives import AIDirectives +from forge.config.ai_profile import AIProfile +from forge.config.config import Config, ConfigBuilder, assert_config_has_openai_api_key +from forge.db import AgentDB +from forge.file_storage import FileStorageBackendName, get_storage +from forge.llm.providers import MultiProvider +from forge.logging.config import configure_logging +from forge.logging.helpers import print_attribute, speak +from forge.models.action import ActionInterruptedByHuman, ActionProposal +from forge.models.utils import ModelWithSummary +from forge.utils.const import FINISH_COMMAND +from forge.utils.exceptions import AgentTerminated, InvalidAgentResponseError from autogpt.agent_factory.configurators import configure_agent_with_state, create_agent from autogpt.agent_factory.profile_generator import generate_agent_profile_for_task from autogpt.agent_manager import AgentManager from autogpt.agents.prompt_strategies.one_shot import AssistantThoughts -from autogpt.commands.execute_code import ( - is_docker_available, - we_are_running_in_a_docker_container, -) -from autogpt.config import ( - AIDirectives, - AIProfile, - Config, - ConfigBuilder, - assert_config_has_openai_api_key, -) -from autogpt.core.resource.model_providers import MultiProvider from autogpt.core.runner.client_lib.utils import coroutine -from autogpt.file_storage import FileStorageBackendName, get_storage -from autogpt.logs.config import configure_logging -from autogpt.logs.helpers import print_attribute, speak -from autogpt.models.action_history import ActionInterruptedByHuman -from autogpt.models.utils import ModelWithSummary -from autogpt.utils.exceptions import AgentTerminated, InvalidAgentResponseError -from autogpt.utils.utils import DEFAULT_FINISH_COMMAND + +if TYPE_CHECKING: + from autogpt.agents.agent import Agent from .configurator import apply_overrides_to_config +from .input import clean_input from .setup import apply_overrides_to_ai_settings, interactively_revise_ai_settings from .spinner import Spinner from .utils import ( - clean_input, get_legal_warning, markdown_to_ansi_style, print_git_branch_info, @@ -176,7 +171,6 @@ async def run_auto_gpt( + "\n".join(f"{i} - {id}" for i, id in enumerate(existing_agents, 1)) ) load_existing_agent = clean_input( - config, "Enter the number or name of the agent to run," " or hit enter to create a new one:", ) @@ -203,7 +197,7 @@ async def run_auto_gpt( if load_existing_agent: agent_state = None while True: - answer = clean_input(config, "Resume? [Y/n]") + answer = clean_input("Resume? [Y/n]") if answer == "" or answer.lower() == "y": agent_state = agent_manager.load_agent_state(load_existing_agent) break @@ -230,14 +224,14 @@ async def run_auto_gpt( if ( (current_episode := agent.event_history.current_episode) - and current_episode.action.use_tool.name == DEFAULT_FINISH_COMMAND + and current_episode.action.use_tool.name == FINISH_COMMAND and not current_episode.result ): # Agent was resumed after `finish` -> rewrite result of `finish` action finish_reason = current_episode.action.use_tool.arguments["reason"] print(f"Agent previously self-terminated; reason: '{finish_reason}'") new_assignment = clean_input( - config, "Please give a follow-up question or assignment:" + "Please give a follow-up question or assignment:" ) agent.event_history.register_result( ActionInterruptedByHuman(feedback=new_assignment) @@ -269,7 +263,6 @@ async def run_auto_gpt( task = "" while task.strip() == "": task = clean_input( - config, "Enter the task that you want AutoGPT to execute," " with as much detail as possible:", ) @@ -343,7 +336,6 @@ async def run_auto_gpt( # Allow user to Save As other ID save_as_id = clean_input( - config, f"Press enter to save as '{agent_id}'," " or enter a different ID to save to:", ) @@ -626,7 +618,7 @@ async def run_interaction_loop( def update_user( ai_profile: AIProfile, - action_proposal: "BaseAgentActionProposal", + action_proposal: "ActionProposal", speak_mode: bool = False, ) -> None: """Prints the assistant's thoughts and the next command to the user. @@ -695,7 +687,7 @@ async def get_user_feedback( while user_feedback is None: # Get input from user - console_input = clean_input(config, Fore.MAGENTA + "Input:" + Style.RESET_ALL) + console_input = clean_input(Fore.MAGENTA + "Input:" + Style.RESET_ALL) # Parse user input if console_input.lower().strip() == config.authorise_key: diff --git a/autogpts/autogpt/autogpt/app/setup.py b/autogpts/autogpt/autogpt/app/setup.py index 94460e62f..b70dba704 100644 --- a/autogpts/autogpt/autogpt/app/setup.py +++ b/autogpts/autogpt/autogpt/app/setup.py @@ -2,9 +2,12 @@ import logging from typing import Optional -from autogpt.app.utils import clean_input -from autogpt.config import AIDirectives, AIProfile, Config -from autogpt.logs.helpers import print_attribute +from forge.config.ai_directives import AIDirectives +from forge.config.ai_profile import AIProfile +from forge.config.config import Config +from forge.logging.helpers import print_attribute + +from .input import clean_input logger = logging.getLogger(__name__) @@ -69,20 +72,18 @@ async def interactively_revise_ai_settings( ) if ( - clean_input(app_config, "Continue with these settings? [Y/n]").lower() + clean_input("Continue with these settings? [Y/n]").lower() or app_config.authorise_key ) == app_config.authorise_key: break # Ask for revised ai_profile ai_profile.ai_name = ( - clean_input(app_config, "Enter AI name (or press enter to keep current):") + clean_input("Enter AI name (or press enter to keep current):") or ai_profile.ai_name ) ai_profile.ai_role = ( - clean_input( - app_config, "Enter new AI role (or press enter to keep current):" - ) + clean_input("Enter new AI role (or press enter to keep current):") or ai_profile.ai_role ) @@ -93,7 +94,6 @@ async def interactively_revise_ai_settings( print_attribute(f"Constraint {i+1}:", f'"{constraint}"') new_constraint = ( clean_input( - app_config, f"Enter new constraint {i+1}" " (press enter to keep current, or '-' to remove):", ) @@ -111,7 +111,6 @@ async def interactively_revise_ai_settings( # Add new constraints while True: new_constraint = clean_input( - app_config, "Press enter to finish, or enter a constraint to add:", ) if not new_constraint: @@ -125,7 +124,6 @@ async def interactively_revise_ai_settings( print_attribute(f"Resource {i+1}:", f'"{resource}"') new_resource = ( clean_input( - app_config, f"Enter new resource {i+1}" " (press enter to keep current, or '-' to remove):", ) @@ -142,7 +140,6 @@ async def interactively_revise_ai_settings( # Add new resources while True: new_resource = clean_input( - app_config, "Press enter to finish, or enter a resource to add:", ) if not new_resource: @@ -156,7 +153,6 @@ async def interactively_revise_ai_settings( print_attribute(f"Best Practice {i+1}:", f'"{best_practice}"') new_best_practice = ( clean_input( - app_config, f"Enter new best practice {i+1}" " (press enter to keep current, or '-' to remove):", ) @@ -173,7 +169,6 @@ async def interactively_revise_ai_settings( # Add new best practices while True: new_best_practice = clean_input( - app_config, "Press enter to finish, or add a best practice to add:", ) if not new_best_practice: diff --git a/autogpts/autogpt/autogpt/app/utils.py b/autogpts/autogpt/autogpt/app/utils.py index f1a89f490..e27aa5d1c 100644 --- a/autogpts/autogpt/autogpt/app/utils.py +++ b/autogpts/autogpt/autogpt/app/utils.py @@ -7,31 +7,16 @@ import sys from pathlib import Path from typing import TYPE_CHECKING -import click import requests from colorama import Fore, Style from git import InvalidGitRepositoryError, Repo if TYPE_CHECKING: - from autogpt.config import Config + from forge.config.config import Config logger = logging.getLogger(__name__) -def clean_input(config: "Config", prompt: str = ""): - try: - # ask for input, default when just pressing Enter is y - logger.debug("Asking user via keyboard...") - - return click.prompt( - text=prompt, prompt_suffix=" ", default="", show_default=False - ) - except KeyboardInterrupt: - logger.info("You interrupted AutoGPT") - logger.info("Quitting...") - exit(0) - - def get_bulletin_from_web(): try: response = requests.get( diff --git a/autogpts/autogpt/autogpt/core/ability/base.py b/autogpts/autogpt/autogpt/core/ability/base.py index 2686c101c..d6acd8701 100644 --- a/autogpts/autogpt/autogpt/core/ability/base.py +++ b/autogpts/autogpt/autogpt/core/ability/base.py @@ -3,13 +3,13 @@ from pprint import pformat from typing import Any, ClassVar import inflection +from forge.llm.providers import CompletionModelFunction +from forge.models.config import SystemConfiguration +from forge.models.json_schema import JSONSchema from pydantic import Field -from autogpt.core.configuration import SystemConfiguration from autogpt.core.planning.simple import LanguageModelConfiguration from autogpt.core.plugin.base import PluginLocation -from autogpt.core.resource.model_providers import CompletionModelFunction -from autogpt.core.utils.json_schema import JSONSchema from .schema import AbilityResult diff --git a/autogpts/autogpt/autogpt/core/ability/builtins/create_new_ability.py b/autogpts/autogpt/autogpt/core/ability/builtins/create_new_ability.py index 55550cafc..110cff014 100644 --- a/autogpts/autogpt/autogpt/core/ability/builtins/create_new_ability.py +++ b/autogpts/autogpt/autogpt/core/ability/builtins/create_new_ability.py @@ -1,10 +1,11 @@ import logging from typing import ClassVar +from forge.models.json_schema import JSONSchema + from autogpt.core.ability.base import Ability, AbilityConfiguration from autogpt.core.ability.schema import AbilityResult from autogpt.core.plugin.simple import PluginLocation, PluginStorageFormat -from autogpt.core.utils.json_schema import JSONSchema class CreateNewAbility(Ability): diff --git a/autogpts/autogpt/autogpt/core/ability/builtins/file_operations.py b/autogpts/autogpt/autogpt/core/ability/builtins/file_operations.py index 08dc8c7a9..44b4d5f9c 100644 --- a/autogpts/autogpt/autogpt/core/ability/builtins/file_operations.py +++ b/autogpts/autogpt/autogpt/core/ability/builtins/file_operations.py @@ -2,10 +2,11 @@ import logging import os from typing import ClassVar +from forge.models.json_schema import JSONSchema + from autogpt.core.ability.base import Ability, AbilityConfiguration from autogpt.core.ability.schema import AbilityResult, ContentType, Knowledge from autogpt.core.plugin.simple import PluginLocation, PluginStorageFormat -from autogpt.core.utils.json_schema import JSONSchema from autogpt.core.workspace import Workspace diff --git a/autogpts/autogpt/autogpt/core/ability/builtins/query_language_model.py b/autogpts/autogpt/autogpt/core/ability/builtins/query_language_model.py index 7a6ae68ee..f694651db 100644 --- a/autogpts/autogpt/autogpt/core/ability/builtins/query_language_model.py +++ b/autogpts/autogpt/autogpt/core/ability/builtins/query_language_model.py @@ -1,17 +1,18 @@ import logging from typing import ClassVar -from autogpt.core.ability.base import Ability, AbilityConfiguration -from autogpt.core.ability.schema import AbilityResult -from autogpt.core.planning.simple import LanguageModelConfiguration -from autogpt.core.plugin.simple import PluginLocation, PluginStorageFormat -from autogpt.core.resource.model_providers import ( +from forge.llm.providers import ( ChatMessage, ChatModelProvider, ModelProviderName, OpenAIModelName, ) -from autogpt.core.utils.json_schema import JSONSchema +from forge.models.json_schema import JSONSchema + +from autogpt.core.ability.base import Ability, AbilityConfiguration +from autogpt.core.ability.schema import AbilityResult +from autogpt.core.planning.simple import LanguageModelConfiguration +from autogpt.core.plugin.simple import PluginLocation, PluginStorageFormat class QueryLanguageModel(Ability): diff --git a/autogpts/autogpt/autogpt/core/ability/simple.py b/autogpts/autogpt/autogpt/core/ability/simple.py index 962413182..fad09ca69 100644 --- a/autogpts/autogpt/autogpt/core/ability/simple.py +++ b/autogpts/autogpt/autogpt/core/ability/simple.py @@ -1,16 +1,17 @@ import logging +from forge.llm.providers import ( + ChatModelProvider, + CompletionModelFunction, + ModelProviderName, +) +from forge.models.config import Configurable, SystemConfiguration, SystemSettings + from autogpt.core.ability.base import Ability, AbilityConfiguration, AbilityRegistry from autogpt.core.ability.builtins import BUILTIN_ABILITIES from autogpt.core.ability.schema import AbilityResult -from autogpt.core.configuration import Configurable, SystemConfiguration, SystemSettings from autogpt.core.memory.base import Memory from autogpt.core.plugin.simple import SimplePluginService -from autogpt.core.resource.model_providers import ( - ChatModelProvider, - CompletionModelFunction, - ModelProviderName, -) from autogpt.core.workspace.base import Workspace diff --git a/autogpts/autogpt/autogpt/core/agent/simple.py b/autogpts/autogpt/autogpt/core/agent/simple.py index ea113dafc..4b2783182 100644 --- a/autogpts/autogpt/autogpt/core/agent/simple.py +++ b/autogpts/autogpt/autogpt/core/agent/simple.py @@ -3,6 +3,8 @@ from datetime import datetime from pathlib import Path from typing import Any +from forge.llm.providers import CompletionModelFunction, OpenAIProvider, OpenAISettings +from forge.models.config import Configurable, SystemConfiguration, SystemSettings from pydantic import BaseModel from autogpt.core.ability import ( @@ -11,7 +13,6 @@ from autogpt.core.ability import ( SimpleAbilityRegistry, ) from autogpt.core.agent.base import Agent -from autogpt.core.configuration import Configurable, SystemConfiguration, SystemSettings from autogpt.core.memory import MemorySettings, SimpleMemory from autogpt.core.planning import PlannerSettings, SimplePlanner, Task, TaskStatus from autogpt.core.plugin.simple import ( @@ -19,11 +20,6 @@ from autogpt.core.plugin.simple import ( PluginStorageFormat, SimplePluginService, ) -from autogpt.core.resource.model_providers import ( - CompletionModelFunction, - OpenAIProvider, - OpenAISettings, -) from autogpt.core.workspace.simple import SimpleWorkspace, WorkspaceSettings @@ -92,9 +88,7 @@ class SimpleAgent(Agent, Configurable): ), openai_provider=PluginLocation( storage_format=PluginStorageFormat.INSTALLED_PACKAGE, - storage_route=( - "autogpt.core.resource.model_providers.OpenAIProvider" - ), + storage_route=("forge.llm.model_providers.OpenAIProvider"), ), planning=PluginLocation( storage_format=PluginStorageFormat.INSTALLED_PACKAGE, diff --git a/autogpts/autogpt/autogpt/core/configuration/__init__.py b/autogpts/autogpt/autogpt/core/configuration/__init__.py deleted file mode 100644 index 231819299..000000000 --- a/autogpts/autogpt/autogpt/core/configuration/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -"""The configuration encapsulates settings for all Agent subsystems.""" -from autogpt.core.configuration.schema import ( - Configurable, - SystemConfiguration, - SystemSettings, - UserConfigurable, -) - -__all__ = [ - "Configurable", - "SystemConfiguration", - "SystemSettings", - "UserConfigurable", -] diff --git a/autogpts/autogpt/autogpt/core/memory/simple.py b/autogpts/autogpt/autogpt/core/memory/simple.py index 2433f48bc..9a062d704 100644 --- a/autogpts/autogpt/autogpt/core/memory/simple.py +++ b/autogpts/autogpt/autogpt/core/memory/simple.py @@ -1,7 +1,8 @@ import json import logging -from autogpt.core.configuration import Configurable, SystemConfiguration, SystemSettings +from forge.models.config import Configurable, SystemConfiguration, SystemSettings + from autogpt.core.memory.base import Memory from autogpt.core.workspace import Workspace diff --git a/autogpts/autogpt/autogpt/core/planning/prompt_strategies/initial_plan.py b/autogpts/autogpt/autogpt/core/planning/prompt_strategies/initial_plan.py index ae137a985..83f495e40 100644 --- a/autogpts/autogpt/autogpt/core/planning/prompt_strategies/initial_plan.py +++ b/autogpts/autogpt/autogpt/core/planning/prompt_strategies/initial_plan.py @@ -1,16 +1,16 @@ import logging -from autogpt.core.configuration import SystemConfiguration, UserConfigurable -from autogpt.core.planning.schema import Task, TaskType -from autogpt.core.prompting import PromptStrategy -from autogpt.core.prompting.schema import ChatPrompt, LanguageModelClassification -from autogpt.core.prompting.utils import to_numbered_list -from autogpt.core.resource.model_providers import ( +from forge.llm.prompting import ChatPrompt, LanguageModelClassification, PromptStrategy +from forge.llm.prompting.utils import to_numbered_list +from forge.llm.providers import ( AssistantChatMessage, ChatMessage, CompletionModelFunction, ) -from autogpt.core.utils.json_schema import JSONSchema +from forge.models.config import SystemConfiguration, UserConfigurable +from forge.models.json_schema import JSONSchema + +from autogpt.core.planning.schema import Task, TaskType logger = logging.getLogger(__name__) diff --git a/autogpts/autogpt/autogpt/core/planning/prompt_strategies/name_and_goals.py b/autogpts/autogpt/autogpt/core/planning/prompt_strategies/name_and_goals.py index 133b4590d..f4864e2a8 100644 --- a/autogpts/autogpt/autogpt/core/planning/prompt_strategies/name_and_goals.py +++ b/autogpts/autogpt/autogpt/core/planning/prompt_strategies/name_and_goals.py @@ -1,14 +1,13 @@ import logging -from autogpt.core.configuration import SystemConfiguration, UserConfigurable -from autogpt.core.prompting import PromptStrategy -from autogpt.core.prompting.schema import ChatPrompt, LanguageModelClassification -from autogpt.core.resource.model_providers import ( +from forge.llm.prompting import ChatPrompt, LanguageModelClassification, PromptStrategy +from forge.llm.providers import ( AssistantChatMessage, ChatMessage, CompletionModelFunction, ) -from autogpt.core.utils.json_schema import JSONSchema +from forge.models.config import SystemConfiguration, UserConfigurable +from forge.models.json_schema import JSONSchema logger = logging.getLogger(__name__) diff --git a/autogpts/autogpt/autogpt/core/planning/prompt_strategies/next_ability.py b/autogpts/autogpt/autogpt/core/planning/prompt_strategies/next_ability.py index 0d6daad2e..b397923d9 100644 --- a/autogpts/autogpt/autogpt/core/planning/prompt_strategies/next_ability.py +++ b/autogpts/autogpt/autogpt/core/planning/prompt_strategies/next_ability.py @@ -1,16 +1,16 @@ import logging -from autogpt.core.configuration import SystemConfiguration, UserConfigurable -from autogpt.core.planning.schema import Task -from autogpt.core.prompting import PromptStrategy -from autogpt.core.prompting.schema import ChatPrompt, LanguageModelClassification -from autogpt.core.prompting.utils import to_numbered_list -from autogpt.core.resource.model_providers import ( +from forge.llm.prompting import ChatPrompt, LanguageModelClassification, PromptStrategy +from forge.llm.prompting.utils import to_numbered_list +from forge.llm.providers import ( AssistantChatMessage, ChatMessage, CompletionModelFunction, ) -from autogpt.core.utils.json_schema import JSONSchema +from forge.models.config import SystemConfiguration, UserConfigurable +from forge.models.json_schema import JSONSchema + +from autogpt.core.planning.schema import Task logger = logging.getLogger(__name__) diff --git a/autogpts/autogpt/autogpt/core/planning/simple.py b/autogpts/autogpt/autogpt/core/planning/simple.py index 356e6712e..6a1019821 100644 --- a/autogpts/autogpt/autogpt/core/planning/simple.py +++ b/autogpts/autogpt/autogpt/core/planning/simple.py @@ -3,24 +3,24 @@ import platform import time import distro - -from autogpt.core.configuration import ( +from forge.llm.prompting import PromptStrategy +from forge.llm.prompting.schema import LanguageModelClassification +from forge.llm.providers import ( + ChatModelProvider, + ChatModelResponse, + CompletionModelFunction, + ModelProviderName, + OpenAIModelName, +) +from forge.models.config import ( Configurable, SystemConfiguration, SystemSettings, UserConfigurable, ) + from autogpt.core.planning import prompt_strategies from autogpt.core.planning.schema import Task -from autogpt.core.prompting import PromptStrategy -from autogpt.core.prompting.schema import LanguageModelClassification -from autogpt.core.resource.model_providers import ( - ChatModelProvider, - ChatModelResponse, - CompletionModelFunction, - ModelProviderName, - OpenAIModelName, -) from autogpt.core.runner.client_lib.logging.helpers import dump_prompt from autogpt.core.workspace import Workspace diff --git a/autogpts/autogpt/autogpt/core/plugin/base.py b/autogpts/autogpt/autogpt/core/plugin/base.py index 4066a18c0..8eb1dce65 100644 --- a/autogpts/autogpt/autogpt/core/plugin/base.py +++ b/autogpts/autogpt/autogpt/core/plugin/base.py @@ -2,17 +2,14 @@ import abc import enum from typing import TYPE_CHECKING, Type +from forge.models.config import SystemConfiguration, UserConfigurable from pydantic import BaseModel -from autogpt.core.configuration import SystemConfiguration, UserConfigurable - if TYPE_CHECKING: + from forge.llm.providers import ChatModelProvider, EmbeddingModelProvider + from autogpt.core.ability import Ability, AbilityRegistry from autogpt.core.memory import Memory - from autogpt.core.resource.model_providers import ( - ChatModelProvider, - EmbeddingModelProvider, - ) # Expand to other types as needed PluginType = ( diff --git a/autogpts/autogpt/autogpt/core/prompting/utils.py b/autogpts/autogpt/autogpt/core/prompting/utils.py deleted file mode 100644 index 865b3fc08..000000000 --- a/autogpts/autogpt/autogpt/core/prompting/utils.py +++ /dev/null @@ -1,9 +0,0 @@ -def to_numbered_list( - items: list[str], no_items_response: str = "", **template_args -) -> str: - if items: - return "\n".join( - f"{i+1}. {item.format(**template_args)}" for i, item in enumerate(items) - ) - else: - return no_items_response diff --git a/autogpts/autogpt/autogpt/core/resource/__init__.py b/autogpts/autogpt/autogpt/core/resource/__init__.py deleted file mode 100644 index 897e08777..000000000 --- a/autogpts/autogpt/autogpt/core/resource/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from autogpt.core.resource.schema import ( - ProviderBudget, - ProviderCredentials, - ProviderSettings, - ProviderUsage, - ResourceType, -) - -__all__ = [ - "ProviderBudget", - "ProviderCredentials", - "ProviderSettings", - "ProviderUsage", - "ResourceType", -] diff --git a/autogpts/autogpt/autogpt/core/runner/cli_web_app/server/api.py b/autogpts/autogpt/autogpt/core/runner/cli_web_app/server/api.py index 6e3fee604..76c68a7e8 100644 --- a/autogpts/autogpt/autogpt/core/runner/cli_web_app/server/api.py +++ b/autogpts/autogpt/autogpt/core/runner/cli_web_app/server/api.py @@ -1,12 +1,13 @@ import logging from agent_protocol import StepHandler, StepResult +from forge.config.ai_profile import AIProfile +from forge.config.config import ConfigBuilder +from forge.llm.prompting.prompt import DEFAULT_TRIGGERING_PROMPT +from forge.logging.helpers import user_friendly_output from autogpt.agents import Agent from autogpt.app.main import UserFeedback -from autogpt.config import AIProfile, ConfigBuilder -from autogpt.logs.helpers import user_friendly_output -from autogpt.prompts.prompt import DEFAULT_TRIGGERING_PROMPT async def task_handler(task_input) -> StepHandler: diff --git a/autogpts/autogpt/autogpt/core/runner/client_lib/logging/config.py b/autogpts/autogpt/autogpt/core/runner/client_lib/logging/config.py index 56f79f5fe..912b0751f 100644 --- a/autogpts/autogpt/autogpt/core/runner/client_lib/logging/config.py +++ b/autogpts/autogpt/autogpt/core/runner/client_lib/logging/config.py @@ -1,7 +1,7 @@ import logging import sys -from colorama import Fore, Style +from forge.logging import BelowLevelFilter, FancyConsoleFormatter from openai._base_client import log as openai_logger SIMPLE_LOG_FORMAT = "%(asctime)s %(levelname)s %(message)s" @@ -25,58 +25,3 @@ def configure_root_logger(): # Disable debug logging from OpenAI library openai_logger.setLevel(logging.WARNING) - - -class FancyConsoleFormatter(logging.Formatter): - """ - A custom logging formatter designed for console output. - - This formatter enhances the standard logging output with color coding. The color - coding is based on the level of the log message, making it easier to distinguish - between different types of messages in the console output. - - The color for each level is defined in the LEVEL_COLOR_MAP class attribute. - """ - - # level -> (level & text color, title color) - LEVEL_COLOR_MAP = { - logging.DEBUG: Fore.LIGHTBLACK_EX, - logging.INFO: Fore.BLUE, - logging.WARNING: Fore.YELLOW, - logging.ERROR: Fore.RED, - logging.CRITICAL: Fore.RED + Style.BRIGHT, - } - - def format(self, record: logging.LogRecord) -> str: - # Make sure `msg` is a string - if not hasattr(record, "msg"): - record.msg = "" - elif not type(record.msg) is str: - record.msg = str(record.msg) - - # Determine default color based on error level - level_color = "" - if record.levelno in self.LEVEL_COLOR_MAP: - level_color = self.LEVEL_COLOR_MAP[record.levelno] - record.levelname = f"{level_color}{record.levelname}{Style.RESET_ALL}" - - # Determine color for message - color = getattr(record, "color", level_color) - color_is_specified = hasattr(record, "color") - - # Don't color INFO messages unless the color is explicitly specified. - if color and (record.levelno != logging.INFO or color_is_specified): - record.msg = f"{color}{record.msg}{Style.RESET_ALL}" - - return super().format(record) - - -class BelowLevelFilter(logging.Filter): - """Filter for logging levels below a certain threshold.""" - - def __init__(self, below_level: int): - super().__init__() - self.below_level = below_level - - def filter(self, record: logging.LogRecord): - return record.levelno < self.below_level diff --git a/autogpts/autogpt/autogpt/core/runner/client_lib/logging/helpers.py b/autogpts/autogpt/autogpt/core/runner/client_lib/logging/helpers.py index 53d0964d4..4aa908afa 100644 --- a/autogpts/autogpt/autogpt/core/runner/client_lib/logging/helpers.py +++ b/autogpts/autogpt/autogpt/core/runner/client_lib/logging/helpers.py @@ -2,8 +2,9 @@ from math import ceil, floor from typing import TYPE_CHECKING if TYPE_CHECKING: - from autogpt.core.prompting import ChatPrompt - from autogpt.core.resource.model_providers import ChatMessage + from forge.llm.prompting import ChatPrompt + from forge.llm.providers.schema import ChatMessage + SEPARATOR_LENGTH = 42 diff --git a/autogpts/autogpt/autogpt/core/workspace/base.py b/autogpts/autogpt/autogpt/core/workspace/base.py index b011056c3..b3f1a9fc8 100644 --- a/autogpts/autogpt/autogpt/core/workspace/base.py +++ b/autogpts/autogpt/autogpt/core/workspace/base.py @@ -6,7 +6,7 @@ import typing from pathlib import Path if typing.TYPE_CHECKING: - from autogpt.core.configuration import AgentConfiguration + from autogpt.core.agent.simple import AgentConfiguration class Workspace(abc.ABC): diff --git a/autogpts/autogpt/autogpt/core/workspace/simple.py b/autogpts/autogpt/autogpt/core/workspace/simple.py index 1c7a3f903..de1eb9364 100644 --- a/autogpts/autogpt/autogpt/core/workspace/simple.py +++ b/autogpts/autogpt/autogpt/core/workspace/simple.py @@ -3,14 +3,14 @@ import logging import typing from pathlib import Path -from pydantic import SecretField - -from autogpt.core.configuration import ( +from forge.models.config import ( Configurable, SystemConfiguration, SystemSettings, UserConfigurable, ) +from pydantic import SecretField + from autogpt.core.workspace.base import Workspace if typing.TYPE_CHECKING: diff --git a/autogpts/autogpt/autogpt/llm/providers/openai.py b/autogpts/autogpt/autogpt/llm/providers/openai.py deleted file mode 100644 index e6423827c..000000000 --- a/autogpts/autogpt/autogpt/llm/providers/openai.py +++ /dev/null @@ -1,30 +0,0 @@ -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING, Callable, Iterable, TypeVar - -if TYPE_CHECKING: - from autogpt.models.command import Command - -from autogpt.core.resource.model_providers import CompletionModelFunction - -logger = logging.getLogger(__name__) - - -T = TypeVar("T", bound=Callable) - - -def function_specs_from_commands( - commands: Iterable[Command], -) -> list[CompletionModelFunction]: - """Get OpenAI-consumable function specs for the agent's available commands. - see https://platform.openai.com/docs/guides/gpt/function-calling - """ - return [ - CompletionModelFunction( - name=command.names[0], - description=command.description, - parameters={param.name: param.spec for param in command.parameters}, - ) - for command in commands - ] diff --git a/autogpts/autogpt/autogpt/logs/__init__.py b/autogpts/autogpt/autogpt/logs/__init__.py deleted file mode 100644 index f48d5a6f1..000000000 --- a/autogpts/autogpt/autogpt/logs/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -from .config import configure_logging -from .helpers import user_friendly_output -from .log_cycle import ( - CURRENT_CONTEXT_FILE_NAME, - NEXT_ACTION_FILE_NAME, - PROMPT_SUMMARY_FILE_NAME, - PROMPT_SUPERVISOR_FEEDBACK_FILE_NAME, - SUMMARY_FILE_NAME, - SUPERVISOR_FEEDBACK_FILE_NAME, - USER_INPUT_FILE_NAME, - LogCycleHandler, -) - -__all__ = [ - "configure_logging", - "user_friendly_output", - "CURRENT_CONTEXT_FILE_NAME", - "NEXT_ACTION_FILE_NAME", - "PROMPT_SUMMARY_FILE_NAME", - "PROMPT_SUPERVISOR_FEEDBACK_FILE_NAME", - "SUMMARY_FILE_NAME", - "SUPERVISOR_FEEDBACK_FILE_NAME", - "USER_INPUT_FILE_NAME", - "LogCycleHandler", -] diff --git a/autogpts/autogpt/autogpt/memory/vector/__init__.py b/autogpts/autogpt/autogpt/memory/vector/__init__.py index bcef31a41..2f9e121ac 100644 --- a/autogpts/autogpt/autogpt/memory/vector/__init__.py +++ b/autogpts/autogpt/autogpt/memory/vector/__init__.py @@ -1,4 +1,4 @@ -from autogpt.config import Config +from forge.config.config import Config from .memory_item import MemoryItem, MemoryItemFactory, MemoryItemRelevance from .providers.base import VectorMemoryProvider as VectorMemory diff --git a/autogpts/autogpt/autogpt/memory/vector/memory_item.py b/autogpts/autogpt/autogpt/memory/vector/memory_item.py index 8d03d0209..3de65fe25 100644 --- a/autogpts/autogpt/autogpt/memory/vector/memory_item.py +++ b/autogpts/autogpt/autogpt/memory/vector/memory_item.py @@ -6,16 +6,11 @@ from typing import Literal import ftfy import numpy as np +from forge.config.config import Config +from forge.content_processing.text import chunk_content, split_text, summarize_text +from forge.llm.providers import ChatMessage, ChatModelProvider, EmbeddingModelProvider from pydantic import BaseModel -from autogpt.config import Config -from autogpt.core.resource.model_providers import ( - ChatMessage, - ChatModelProvider, - EmbeddingModelProvider, -) -from autogpt.processing.text import chunk_content, split_text, summarize_text - from .utils import Embedding, get_embedding logger = logging.getLogger(__name__) diff --git a/autogpts/autogpt/autogpt/memory/vector/providers/base.py b/autogpts/autogpt/autogpt/memory/vector/providers/base.py index 8883f1346..b227840f1 100644 --- a/autogpts/autogpt/autogpt/memory/vector/providers/base.py +++ b/autogpts/autogpt/autogpt/memory/vector/providers/base.py @@ -4,8 +4,7 @@ import logging from typing import MutableSet, Sequence import numpy as np - -from autogpt.config.config import Config +from forge.config.config import Config from .. import MemoryItem, MemoryItemRelevance from ..utils import Embedding, get_embedding diff --git a/autogpts/autogpt/autogpt/memory/vector/providers/json_file.py b/autogpts/autogpt/autogpt/memory/vector/providers/json_file.py index efab7e8f6..7fe6d5fb8 100644 --- a/autogpts/autogpt/autogpt/memory/vector/providers/json_file.py +++ b/autogpts/autogpt/autogpt/memory/vector/providers/json_file.py @@ -5,8 +5,7 @@ from pathlib import Path from typing import Iterator import orjson - -from autogpt.config import Config +from forge.config.config import Config from ..memory_item import MemoryItem from .base import VectorMemoryProvider diff --git a/autogpts/autogpt/autogpt/memory/vector/providers/no_memory.py b/autogpts/autogpt/autogpt/memory/vector/providers/no_memory.py index 01f6c1801..9b9e92d91 100644 --- a/autogpts/autogpt/autogpt/memory/vector/providers/no_memory.py +++ b/autogpts/autogpt/autogpt/memory/vector/providers/no_memory.py @@ -3,7 +3,7 @@ from __future__ import annotations from typing import Iterator, Optional -from autogpt.config.config import Config +from forge.config.config import Config from .. import MemoryItem from .base import VectorMemoryProvider diff --git a/autogpts/autogpt/autogpt/memory/vector/utils.py b/autogpts/autogpt/autogpt/memory/vector/utils.py index e201b738e..05ebf51d4 100644 --- a/autogpts/autogpt/autogpt/memory/vector/utils.py +++ b/autogpts/autogpt/autogpt/memory/vector/utils.py @@ -2,9 +2,8 @@ import logging from typing import Any, Sequence, overload import numpy as np - -from autogpt.config import Config -from autogpt.core.resource.model_providers import EmbeddingModelProvider +from forge.config.config import Config +from forge.llm.providers import EmbeddingModelProvider logger = logging.getLogger(__name__) diff --git a/autogpts/autogpt/autogpt/prompts/__init__.py b/autogpts/autogpt/autogpt/prompts/__init__.py deleted file mode 100644 index e69de29bb..000000000 --- a/autogpts/autogpt/autogpt/prompts/__init__.py +++ /dev/null diff --git a/autogpts/autogpt/autogpt/speech/__init__.py b/autogpts/autogpt/autogpt/speech/__init__.py deleted file mode 100644 index d5f0f2e0f..000000000 --- a/autogpts/autogpt/autogpt/speech/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -"""This module contains the speech recognition and speech synthesis functions.""" -from autogpt.speech.say import TextToSpeechProvider, TTSConfig - -__all__ = ["TextToSpeechProvider", "TTSConfig"] diff --git a/autogpts/autogpt/autogpt/url_utils/__init__.py b/autogpts/autogpt/autogpt/url_utils/__init__.py deleted file mode 100644 index e69de29bb..000000000 --- a/autogpts/autogpt/autogpt/url_utils/__init__.py +++ /dev/null diff --git a/autogpts/autogpt/autogpt/utils/retry_decorator.py b/autogpts/autogpt/autogpt/utils/retry_decorator.py deleted file mode 100644 index 23cb45d0f..000000000 --- a/autogpts/autogpt/autogpt/utils/retry_decorator.py +++ /dev/null @@ -1,31 +0,0 @@ -import inspect -from typing import Optional - -import sentry_sdk - - -def retry(retry_count: int = 3, pass_exception: str = "exception"): - """Decorator to retry a function multiple times on failure. - Can pass the exception to the function as a keyword argument.""" - - def decorator(func): - params = inspect.signature(func).parameters - - async def wrapper(*args, **kwargs): - exception: Optional[Exception] = None - attempts = 0 - while attempts < retry_count: - try: - if pass_exception in params: - kwargs[pass_exception] = exception - return await func(*args, **kwargs) - except Exception as e: - attempts += 1 - exception = e - sentry_sdk.capture_exception(e) - if attempts >= retry_count: - raise e - - return wrapper - - return decorator diff --git a/autogpts/autogpt/autogpt/utils/singleton.py b/autogpts/autogpt/autogpt/utils/singleton.py deleted file mode 100644 index 46c6256e0..000000000 --- a/autogpts/autogpt/autogpt/utils/singleton.py +++ /dev/null @@ -1,16 +0,0 @@ -"""The singleton metaclass for ensuring only one instance of a class.""" -import abc - - -class Singleton(abc.ABCMeta, type): - """ - Singleton metaclass for ensuring only one instance of a class. - """ - - _instances = {} - - def __call__(cls, *args, **kwargs): - """Call method for the singleton metaclass.""" - if cls not in cls._instances: - cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) - return cls._instances[cls] diff --git a/autogpts/autogpt/poetry.lock b/autogpts/autogpt/poetry.lock index 0a19469e1..e04e85c77 100644 --- a/autogpts/autogpt/poetry.lock +++ b/autogpts/autogpt/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "agbenchmark" @@ -311,36 +311,55 @@ description = "" optional = false python-versions = "^3.10" files = [] -develop = false +develop = true [package.dependencies] aiohttp = "^3.8.5" +anthropic = "^0.25.1" +beautifulsoup4 = "^4.12.2" +boto3 = "^1.33.6" bs4 = "^0.0.1" +charset-normalizer = "^3.1.0" chromadb = "^0.4.10" -colorlog = "^6.7.0" +click = "*" +colorama = "^0.4.6" +demjson3 = "^3.0.0" +docker = "*" duckduckgo-search = "^5.0.0" +fastapi = "^0.109.1" +gitpython = "^3.1.32" +google-api-python-client = "*" google-cloud-storage = "^2.13.0" jinja2 = "^3.1.2" +jsonschema = "*" litellm = "^1.17.9" openai = "^1.7.2" +Pillow = "*" +playsound = "~1.2.2" +pydantic = "*" +pylatexenc = "*" +pypdf = "^3.1.0" +python-docx = "*" python-dotenv = "^1.0.0" python-multipart = "^0.0.7" +pyyaml = "^6.0" +requests = "*" selenium = "^4.13.0" +sentry-sdk = "^1.40.4" +spacy = "^3.0.0" sqlalchemy = "^2.0.19" tenacity = "^8.2.2" +tiktoken = "^0.5.0" toml = "^0.10.2" uvicorn = "^0.23.2" webdriver-manager = "^4.0.1" [package.extras] -benchmark = ["agbenchmark @ git+https://github.com/Significant-Gravitas/AutoGPT.git#subdirectory=benchmark"] +benchmark = ["agbenchmark @ file:///home/reinier/code/agpt/Auto-GPT/benchmark"] [package.source] -type = "git" -url = "https://github.com/Significant-Gravitas/AutoGPT.git" -reference = "HEAD" -resolved_reference = "fd3f8fa5fc86271e4e319258fefdb3065d1aa0d4" -subdirectory = "autogpts/forge" +type = "directory" +url = "../forge" [[package]] name = "backoff" @@ -1368,23 +1387,6 @@ humanfriendly = ">=9.1" cron = ["capturer (>=2.4)"] [[package]] -name = "colorlog" -version = "6.8.0" -description = "Add colours to the output of Python's logging module." -optional = false -python-versions = ">=3.6" -files = [ - {file = "colorlog-6.8.0-py3-none-any.whl", hash = "sha256:4ed23b05a1154294ac99f511fabe8c1d6d4364ec1f7fc989c7fb515ccc29d375"}, - {file = "colorlog-6.8.0.tar.gz", hash = "sha256:fbb6fdf9d5685f2517f388fb29bb27d54e8654dd31f58bc2a3b217e967a95ca6"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -development = ["black", "flake8", "mypy", "pytest", "types-colorama"] - -[[package]] name = "confection" version = "0.1.4" description = "The sweetest config system for Python" @@ -1683,25 +1685,6 @@ files = [ ] [[package]] -name = "dnspython" -version = "2.4.2" -description = "DNS toolkit" -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"}, - {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"}, -] - -[package.extras] -dnssec = ["cryptography (>=2.6,<42.0)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"] -doq = ["aioquic (>=0.9.20)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.23)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] - -[[package]] name = "docker" version = "7.0.0" description = "A Python library for the Docker Engine API." @@ -3211,24 +3194,6 @@ extra-proxy = ["streamlit (>=1.29.0,<2.0.0)"] proxy = ["backoff", "fastapi (>=0.104.1,<0.105.0)", "gunicorn (>=21.2.0,<22.0.0)", "orjson (>=3.9.7,<4.0.0)", "pyyaml (>=6.0,<7.0)", "rq", "uvicorn (>=0.22.0,<0.23.0)"] [[package]] -name = "loguru" -version = "0.7.2" -description = "Python logging made (stupidly) simple" -optional = false -python-versions = ">=3.5" -files = [ - {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, - {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} -win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} - -[package.extras] -dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] - -[[package]] name = "lxml" version = "5.1.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." @@ -4419,31 +4384,6 @@ typing = ["typing-extensions"] xmp = ["defusedxml"] [[package]] -name = "pinecone-client" -version = "2.2.4" -description = "Pinecone client and SDK" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pinecone-client-2.2.4.tar.gz", hash = "sha256:2c1cc1d6648b2be66e944db2ffa59166a37b9164d1135ad525d9cd8b1e298168"}, - {file = "pinecone_client-2.2.4-py3-none-any.whl", hash = "sha256:5bf496c01c2f82f4e5c2dc977cc5062ecd7168b8ed90743b09afcc8c7eb242ec"}, -] - -[package.dependencies] -dnspython = ">=2.0.0" -loguru = ">=0.5.0" -numpy = ">=1.22.0" -python-dateutil = ">=2.5.3" -pyyaml = ">=5.4" -requests = ">=2.19.0" -tqdm = ">=4.64.1" -typing-extensions = ">=3.7.4" -urllib3 = ">=1.21.1" - -[package.extras] -grpc = ["googleapis-common-protos (>=1.53.0)", "grpc-gateway-protoc-gen-openapiv2 (==0.1.0)", "grpcio (>=1.44.0)", "lz4 (>=3.1.3)", "protobuf (>=3.20.0,<3.21.0)"] - -[[package]] name = "platformdirs" version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." @@ -5315,7 +5255,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -5370,24 +5309,6 @@ lxml = "*" test = ["timeout-decorator"] [[package]] -name = "redis" -version = "5.0.1" -description = "Python client for Redis database and key-value store" -optional = false -python-versions = ">=3.7" -files = [ - {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, - {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, -] - -[package.dependencies] -async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} - -[package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] - -[[package]] name = "referencing" version = "0.32.1" description = "JSON Referencing + Python" @@ -7046,20 +6967,6 @@ files = [ ] [[package]] -name = "win32-setctime" -version = "1.1.0" -description = "A small Python utility to set file creation time on Windows" -optional = false -python-versions = ">=3.5" -files = [ - {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, - {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, -] - -[package.extras] -dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] - -[[package]] name = "wrapt" version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." @@ -7276,4 +7183,4 @@ benchmark = ["agbenchmark"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "32d6e9f337ee33e712c42a21a0abca1f5d8d18be44bb2a26c08b375070eda1f9" +content-hash = "d79316409dd12b59677b9d5c31717f8147bac58ee96d42ce9fb0d01cdcf826b0" diff --git a/autogpts/autogpt/pyproject.toml b/autogpts/autogpt/pyproject.toml index 0ad7992cf..c176aedfe 100644 --- a/autogpts/autogpt/pyproject.toml +++ b/autogpts/autogpt/pyproject.toml @@ -23,21 +23,16 @@ serve = "autogpt.app.cli:serve" [tool.poetry.dependencies] python = "^3.10" anthropic = "^0.25.1" -# autogpt-forge = { path = "../forge" } -autogpt-forge = {git = "https://github.com/Significant-Gravitas/AutoGPT.git", subdirectory = "autogpts/forge"} +autogpt-forge = { path = "../forge", develop = true } +# autogpt-forge = {git = "https://github.com/Significant-Gravitas/AutoGPT.git", subdirectory = "autogpts/forge"} beautifulsoup4 = "^4.12.2" -boto3 = "^1.33.6" charset-normalizer = "^3.1.0" click = "*" colorama = "^0.4.6" -demjson3 = "^3.0.0" distro = "^1.8.0" -docker = "*" -duckduckgo-search = "^5.0.0" en-core-web-sm = {url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.7.1/en_core_web_sm-3.7.1-py3-none-any.whl"} fastapi = "^0.109.1" ftfy = "^6.1.1" -gitpython = "^3.1.32" google-api-python-client = "*" gTTS = "^2.3.1" hypercorn = "^0.14.4" @@ -47,23 +42,16 @@ numpy = "*" openai = "^1.7.2" orjson = "^3.8.10" Pillow = "*" -pinecone-client = "^2.2.1" -playsound = "~1.2.2" pydantic = "*" -pylatexenc = "*" -pypdf = "^3.1.0" python-docx = "*" python-dotenv = "^1.0.0" pyyaml = "^6.0" readability-lxml = "^0.8.1" -redis = "*" requests = "*" -selenium = "^4.11.2" sentry-sdk = "^1.40.4" spacy = "^3.7.4" tenacity = "^8.2.2" tiktoken = "^0.5.0" -webdriver-manager = "*" # OpenAI and Generic plugins import openapi-python-client = "^0.14.0" diff --git a/autogpts/autogpt/scripts/git_log_to_release_notes.py b/autogpts/autogpt/scripts/git_log_to_release_notes.py index 8b03dff68..ba121e406 100755 --- a/autogpts/autogpt/scripts/git_log_to_release_notes.py +++ b/autogpts/autogpt/scripts/git_log_to_release_notes.py @@ -5,10 +5,10 @@ from pathlib import Path from typing import Optional import click +from forge.llm.providers import ChatMessage, MultiProvider +from forge.llm.providers.anthropic import AnthropicModelName from git import Repo, TagReference -from autogpt.core.resource.model_providers import ChatMessage, MultiProvider -from autogpt.core.resource.model_providers.anthropic import AnthropicModelName from autogpt.core.runner.client_lib.utils import coroutine @@ -132,8 +132,7 @@ Do not mention the changes in the example when writing your release notes! if __name__ == "__main__": import dotenv - - from autogpt.logs.config import configure_logging + from forge.logging.config import configure_logging configure_logging(debug=True) diff --git a/autogpts/autogpt/tests/conftest.py b/autogpts/autogpt/tests/conftest.py index 64376446d..4aae58bd7 100644 --- a/autogpts/autogpt/tests/conftest.py +++ b/autogpts/autogpt/tests/conftest.py @@ -5,22 +5,21 @@ import uuid from pathlib import Path import pytest -from pytest_mock import MockerFixture - -from autogpt.agents.agent import Agent, AgentConfiguration, AgentSettings -from autogpt.app.main import _configure_llm_provider -from autogpt.config import AIProfile, Config, ConfigBuilder -from autogpt.core.resource.model_providers import ChatModelProvider -from autogpt.file_storage.local import ( +from forge.config.ai_profile import AIProfile +from forge.config.config import Config, ConfigBuilder +from forge.file_storage.local import ( FileStorage, FileStorageConfiguration, LocalFileStorage, ) -from autogpt.logs.config import configure_logging +from forge.llm.providers import ChatModelProvider +from forge.logging.config import configure_logging + +from autogpt.agents.agent import Agent, AgentConfiguration, AgentSettings +from autogpt.app.main import _configure_llm_provider pytest_plugins = [ "tests.integration.agent_factory", - "tests.integration.memory.utils", "tests.vcr", ] @@ -50,7 +49,6 @@ def storage(app_data_dir: Path) -> FileStorage: def config( tmp_project_root: Path, app_data_dir: Path, - mocker: MockerFixture, ): if not os.environ.get("OPENAI_API_KEY"): os.environ["OPENAI_API_KEY"] = "sk-dummy" diff --git a/autogpts/autogpt/tests/integration/agent_factory.py b/autogpts/autogpt/tests/integration/agent_factory.py index b51759f85..c518f1f91 100644 --- a/autogpts/autogpt/tests/integration/agent_factory.py +++ b/autogpts/autogpt/tests/integration/agent_factory.py @@ -1,22 +1,9 @@ import pytest +from forge.config.ai_profile import AIProfile +from forge.config.config import Config +from forge.file_storage import FileStorageBackendName, get_storage from autogpt.agents.agent import Agent, AgentConfiguration, AgentSettings -from autogpt.agents.prompt_strategies.one_shot import OneShotAgentPromptStrategy -from autogpt.config import AIProfile, Config -from autogpt.file_storage import FileStorageBackendName, get_storage -from autogpt.memory.vector import get_memory - - -@pytest.fixture -def memory_json_file(config: Config): - was_memory_backend = config.memory_backend - - config.memory_backend = "json_file" - memory = get_memory(config) - memory.clear() - yield memory - - config.memory_backend = was_memory_backend @pytest.fixture @@ -29,10 +16,6 @@ def dummy_agent(config: Config, llm_provider, memory_json_file): ], ) - agent_prompt_config = OneShotAgentPromptStrategy.default_configuration.copy( - deep=True - ) - agent_prompt_config.use_functions_api = config.openai_functions agent_settings = AgentSettings( name=Agent.default_settings.name, description=Agent.default_settings.description, @@ -42,7 +25,6 @@ def dummy_agent(config: Config, llm_provider, memory_json_file): smart_llm=config.smart_llm, use_functions_api=config.openai_functions, ), - prompt_config=agent_prompt_config, history=Agent.default_settings.history.copy(deep=True), ) diff --git a/autogpts/autogpt/tests/integration/memory/__init__.py b/autogpts/autogpt/tests/integration/memory/__init__.py deleted file mode 100644 index e69de29bb..000000000 --- a/autogpts/autogpt/tests/integration/memory/__init__.py +++ /dev/null diff --git a/autogpts/autogpt/tests/integration/test_execute_code.py b/autogpts/autogpt/tests/integration/test_execute_code.py index 8b82fb63d..3d993e6c5 100644 --- a/autogpts/autogpt/tests/integration/test_execute_code.py +++ b/autogpts/autogpt/tests/integration/test_execute_code.py @@ -4,15 +4,15 @@ import tempfile from pathlib import Path import pytest - -from autogpt.agents.agent import Agent -from autogpt.commands.execute_code import ( +from forge.components.code_executor import ( ALLOWLIST_CONTROL, CodeExecutorComponent, is_docker_available, we_are_running_in_a_docker_container, ) -from autogpt.utils.exceptions import InvalidArgumentError, OperationNotAllowedError +from forge.utils.exceptions import InvalidArgumentError, OperationNotAllowedError + +from autogpt.agents.agent import Agent @pytest.fixture diff --git a/autogpts/autogpt/tests/integration/test_image_gen.py b/autogpts/autogpt/tests/integration/test_image_gen.py index 6081db525..e7427e22b 100644 --- a/autogpts/autogpt/tests/integration/test_image_gen.py +++ b/autogpts/autogpt/tests/integration/test_image_gen.py @@ -4,10 +4,10 @@ from pathlib import Path from unittest.mock import patch import pytest +from forge.components.image_gen import ImageGeneratorComponent from PIL import Image from autogpt.agents.agent import Agent -from autogpt.commands.image_gen import ImageGeneratorComponent @pytest.fixture diff --git a/autogpts/autogpt/tests/integration/test_setup.py b/autogpts/autogpt/tests/integration/test_setup.py index 3c66e257f..3d8535a88 100644 --- a/autogpts/autogpt/tests/integration/test_setup.py +++ b/autogpts/autogpt/tests/integration/test_setup.py @@ -1,13 +1,14 @@ from unittest.mock import patch import pytest +from forge.config.ai_directives import AIDirectives +from forge.config.ai_profile import AIProfile +from forge.config.config import Config from autogpt.app.setup import ( apply_overrides_to_ai_settings, interactively_revise_ai_settings, ) -from autogpt.config import AIDirectives, Config -from autogpt.config.ai_profile import AIProfile @pytest.mark.asyncio diff --git a/autogpts/autogpt/tests/integration/test_web_selenium.py b/autogpts/autogpt/tests/integration/test_web_selenium.py index 085a4b568..e8198af3a 100644 --- a/autogpts/autogpt/tests/integration/test_web_selenium.py +++ b/autogpts/autogpt/tests/integration/test_web_selenium.py @@ -1,7 +1,7 @@ import pytest +from forge.components.web.selenium import BrowsingError, WebSeleniumComponent from autogpt.agents.agent import Agent -from autogpt.commands.web_selenium import BrowsingError, WebSeleniumComponent @pytest.fixture diff --git a/autogpts/autogpt/autogpt/llm/providers/__init__.py b/autogpts/autogpt/tests/memory/__init__.py index e69de29bb..e69de29bb 100644 --- a/autogpts/autogpt/autogpt/llm/providers/__init__.py +++ b/autogpts/autogpt/tests/memory/__init__.py diff --git a/autogpts/autogpt/tests/integration/memory/_test_json_file_memory.py b/autogpts/autogpt/tests/memory/_test_json_file_memory.py index 94bf0d1bd..f0420205a 100644 --- a/autogpts/autogpt/tests/integration/memory/_test_json_file_memory.py +++ b/autogpts/autogpt/tests/memory/_test_json_file_memory.py @@ -2,9 +2,9 @@ """Tests for JSONFileMemory class""" import orjson import pytest +from forge.config.config import Config +from forge.file_storage import FileStorage -from autogpt.config import Config -from autogpt.file_storage import FileStorage from autogpt.memory.vector import JSONFileMemory, MemoryItem diff --git a/autogpts/autogpt/tests/integration/memory/conftest.py b/autogpts/autogpt/tests/memory/conftest.py index 64ac651de..64ac651de 100644 --- a/autogpts/autogpt/tests/integration/memory/conftest.py +++ b/autogpts/autogpt/tests/memory/conftest.py diff --git a/autogpts/autogpt/tests/integration/memory/utils.py b/autogpts/autogpt/tests/memory/utils.py index aea12832f..ca97a7e3f 100644 --- a/autogpts/autogpt/tests/integration/memory/utils.py +++ b/autogpts/autogpt/tests/memory/utils.py @@ -1,11 +1,11 @@ import numpy import pytest +from forge.config.config import Config +from forge.llm.providers import OPEN_AI_EMBEDDING_MODELS from pytest_mock import MockerFixture import autogpt.memory.vector.memory_item as vector_memory_item import autogpt.memory.vector.providers.base as memory_provider_base -from autogpt.config.config import Config -from autogpt.core.resource.model_providers import OPEN_AI_EMBEDDING_MODELS from autogpt.memory.vector import get_memory from autogpt.memory.vector.utils import Embedding diff --git a/autogpts/autogpt/tests/unit/test_ai_profile.py b/autogpts/autogpt/tests/unit/test_ai_profile.py index a60de4d9d..6697bc738 100644 --- a/autogpts/autogpt/tests/unit/test_ai_profile.py +++ b/autogpts/autogpt/tests/unit/test_ai_profile.py @@ -1,5 +1,5 @@ -from autogpt.config.ai_profile import AIProfile -from autogpt.file_storage.base import FileStorage +from forge.config.ai_profile import AIProfile +from forge.file_storage import FileStorage """ Test cases for the AIProfile class, which handles loads the AI configuration diff --git a/autogpts/autogpt/tests/unit/test_config.py b/autogpts/autogpt/tests/unit/test_config.py index d6120dec6..f5f6aa2fe 100644 --- a/autogpts/autogpt/tests/unit/test_config.py +++ b/autogpts/autogpt/tests/unit/test_config.py @@ -8,16 +8,13 @@ from typing import Any from unittest import mock import pytest +from forge.config.config import Config, ConfigBuilder +from forge.llm.providers.schema import ChatModelInfo, ModelProviderName from openai.pagination import AsyncPage from openai.types import Model from pydantic import SecretStr from autogpt.app.configurator import GPT_3_MODEL, GPT_4_MODEL, apply_overrides_to_config -from autogpt.config import Config, ConfigBuilder -from autogpt.core.resource.model_providers.schema import ( - ChatModelInfo, - ModelProviderName, -) def test_initial_values(config: Config) -> None: @@ -147,7 +144,7 @@ def test_azure_config(config_with_azure: Config) -> None: @pytest.mark.asyncio async def test_create_config_gpt4only(config: Config) -> None: with mock.patch( - "autogpt.core.resource.model_providers.multi.MultiProvider.get_available_models" + "forge.llm.providers.multi.MultiProvider.get_available_models" ) as mock_get_models: mock_get_models.return_value = [ ChatModelInfo( @@ -167,7 +164,7 @@ async def test_create_config_gpt4only(config: Config) -> None: @pytest.mark.asyncio async def test_create_config_gpt3only(config: Config) -> None: with mock.patch( - "autogpt.core.resource.model_providers.multi.MultiProvider.get_available_models" + "forge.llm.providers.multi.MultiProvider.get_available_models" ) as mock_get_models: mock_get_models.return_value = [ ChatModelInfo( diff --git a/autogpts/autogpt/tests/unit/test_file_operations.py b/autogpts/autogpt/tests/unit/test_file_operations.py index 92b03daa6..5ce06ca52 100644 --- a/autogpts/autogpt/tests/unit/test_file_operations.py +++ b/autogpts/autogpt/tests/unit/test_file_operations.py @@ -2,14 +2,9 @@ import os from pathlib import Path import pytest -from pytest_mock import MockerFixture +from forge.file_storage import FileStorage -import autogpt.agents.features.agent_file_manager as file_ops from autogpt.agents.agent import Agent -from autogpt.config import Config -from autogpt.file_storage import FileStorage -from autogpt.memory.vector.memory_item import MemoryItem -from autogpt.memory.vector.utils import Embedding @pytest.fixture() @@ -17,25 +12,6 @@ def file_content(): return "This is a test file.\n" -@pytest.fixture() -def mock_MemoryItem_from_text( - mocker: MockerFixture, mock_embedding: Embedding, config: Config -): - mocker.patch.object( - file_ops.MemoryItemFactory, - "from_text", - new=lambda content, source_type, config, metadata: MemoryItem( - raw_content=content, - summary=f"Summary of content '{content}'", - chunk_summaries=[f"Summary of content '{content}'"], - chunks=[content], - e_summary=mock_embedding, - e_chunks=[mock_embedding], - metadata=metadata | {"source_type": source_type}, - ), - ) - - @pytest.fixture def file_manager_component(agent: Agent): return agent.file_manager diff --git a/autogpts/autogpt/tests/unit/test_gcs_file_storage.py b/autogpts/autogpt/tests/unit/test_gcs_file_storage.py index a9dcd0103..a4f091ef1 100644 --- a/autogpts/autogpt/tests/unit/test_gcs_file_storage.py +++ b/autogpts/autogpt/tests/unit/test_gcs_file_storage.py @@ -4,12 +4,11 @@ from pathlib import Path import pytest import pytest_asyncio +from forge.file_storage import GCSFileStorage, GCSFileStorageConfiguration from google.auth.exceptions import GoogleAuthError from google.cloud import storage from google.cloud.exceptions import NotFound -from autogpt.file_storage.gcs import GCSFileStorage, GCSFileStorageConfiguration - try: storage.Client() except GoogleAuthError: diff --git a/autogpts/autogpt/tests/unit/test_git_commands.py b/autogpts/autogpt/tests/unit/test_git_commands.py index bf5dd0d84..05369f151 100644 --- a/autogpts/autogpt/tests/unit/test_git_commands.py +++ b/autogpts/autogpt/tests/unit/test_git_commands.py @@ -1,11 +1,11 @@ import pytest +from forge.components.git_operations import GitOperationsComponent +from forge.file_storage.base import FileStorage +from forge.utils.exceptions import CommandExecutionError from git.exc import GitCommandError from git.repo.base import Repo from autogpt.agents.agent import Agent -from autogpt.commands.git_operations import GitOperationsComponent -from autogpt.file_storage.base import FileStorage -from autogpt.utils.exceptions import CommandExecutionError @pytest.fixture diff --git a/autogpts/autogpt/tests/unit/test_json_utils.py b/autogpts/autogpt/tests/unit/test_json.py index fdd1b0f08..62f8b9690 100644 --- a/autogpts/autogpt/tests/unit/test_json_utils.py +++ b/autogpts/autogpt/tests/unit/test_json.py @@ -1,8 +1,7 @@ import json import pytest - -from autogpt.core.utils.json_utils import json_loads +from forge.json import json_loads _JSON_FIXABLE: list[tuple[str, str]] = [ # Missing comma diff --git a/autogpts/autogpt/tests/unit/test_local_file_storage.py b/autogpts/autogpt/tests/unit/test_local_file_storage.py index 971a2e421..8f4648c4e 100644 --- a/autogpts/autogpt/tests/unit/test_local_file_storage.py +++ b/autogpts/autogpt/tests/unit/test_local_file_storage.py @@ -1,8 +1,7 @@ from pathlib import Path import pytest - -from autogpt.file_storage.local import FileStorageConfiguration, LocalFileStorage +from forge.file_storage import FileStorageConfiguration, LocalFileStorage _ACCESSIBLE_PATHS = [ Path("."), diff --git a/autogpts/autogpt/tests/unit/test_logs.py b/autogpts/autogpt/tests/unit/test_logs.py index 1ded61f31..fd3c342db 100644 --- a/autogpts/autogpt/tests/unit/test_logs.py +++ b/autogpts/autogpt/tests/unit/test_logs.py @@ -1,6 +1,5 @@ import pytest - -from autogpt.logs.utils import remove_color_codes +from forge.logging.utils import remove_color_codes @pytest.mark.parametrize( diff --git a/autogpts/autogpt/tests/unit/test_prompt_config.py b/autogpts/autogpt/tests/unit/test_prompt_config.py index ccadb191d..517b478c3 100644 --- a/autogpts/autogpt/tests/unit/test_prompt_config.py +++ b/autogpts/autogpt/tests/unit/test_prompt_config.py @@ -1,4 +1,4 @@ -from autogpt.config.ai_directives import AIDirectives +from forge.config.ai_directives import AIDirectives """ Test cases for the PromptConfig class, which handles loads the Prompts configuration diff --git a/autogpts/autogpt/tests/unit/test_s3_file_storage.py b/autogpts/autogpt/tests/unit/test_s3_file_storage.py index 82bd5428c..fc88cd355 100644 --- a/autogpts/autogpt/tests/unit/test_s3_file_storage.py +++ b/autogpts/autogpt/tests/unit/test_s3_file_storage.py @@ -5,8 +5,7 @@ from pathlib import Path import pytest import pytest_asyncio from botocore.exceptions import ClientError - -from autogpt.file_storage.s3 import S3FileStorage, S3FileStorageConfiguration +from forge.file_storage import S3FileStorage, S3FileStorageConfiguration if not (os.getenv("S3_ENDPOINT_URL") and os.getenv("AWS_ACCESS_KEY_ID")): pytest.skip("S3 environment variables are not set", allow_module_level=True) diff --git a/autogpts/autogpt/tests/unit/test_text_file_parsers.py b/autogpts/autogpt/tests/unit/test_text_file_parsers.py index 0dfce3083..77a4af696 100644 --- a/autogpts/autogpt/tests/unit/test_text_file_parsers.py +++ b/autogpts/autogpt/tests/unit/test_text_file_parsers.py @@ -9,8 +9,7 @@ import docx import pytest import yaml from bs4 import BeautifulSoup - -from autogpt.utils.file_operations_utils import decode_textual_file, is_file_binary_fn +from forge.utils.file_operations import decode_textual_file, is_file_binary_fn logger = logging.getLogger(__name__) diff --git a/autogpts/autogpt/tests/unit/test_url_validation.py b/autogpts/autogpt/tests/unit/test_url_validation.py index cfecc48dd..38116a622 100644 --- a/autogpts/autogpt/tests/unit/test_url_validation.py +++ b/autogpts/autogpt/tests/unit/test_url_validation.py @@ -1,8 +1,7 @@ import pytest +from forge.utils.url_validator import validate_url from pytest import raises -from autogpt.url_utils.validators import validate_url - @validate_url def dummy_method(url): diff --git a/autogpts/autogpt/tests/unit/test_utils.py b/autogpts/autogpt/tests/unit/test_utils.py index d4b15c131..9fce40485 100644 --- a/autogpts/autogpt/tests/unit/test_utils.py +++ b/autogpts/autogpt/tests/unit/test_utils.py @@ -5,6 +5,8 @@ from unittest.mock import patch import pytest import requests +from forge.json.parsing import extract_dict_from_json +from forge.utils.yaml_validator import validate_yaml_file from git import InvalidGitRepositoryError import autogpt.app.utils @@ -14,8 +16,6 @@ from autogpt.app.utils import ( get_latest_bulletin, set_env_config_value, ) -from autogpt.core.utils.json_utils import extract_dict_from_json -from autogpt.utils.utils import validate_yaml_file from tests.utils import skip_in_ci diff --git a/autogpts/autogpt/tests/unit/test_web_search.py b/autogpts/autogpt/tests/unit/test_web_search.py index b4ee88bcb..411999c00 100644 --- a/autogpts/autogpt/tests/unit/test_web_search.py +++ b/autogpts/autogpt/tests/unit/test_web_search.py @@ -1,11 +1,11 @@ import json import pytest +from forge.components.web.search import WebSearchComponent +from forge.utils.exceptions import ConfigurationError from googleapiclient.errors import HttpError from autogpt.agents.agent import Agent -from autogpt.commands.web_search import WebSearchComponent -from autogpt.utils.exceptions import ConfigurationError @pytest.fixture @@ -56,7 +56,7 @@ def test_google_search( mock_ddg = mocker.Mock() mock_ddg.return_value = return_value - mocker.patch("autogpt.commands.web_search.DDGS.text", mock_ddg) + mocker.patch("forge.components.web.search.DDGS.text", mock_ddg) actual_output = web_search_component.web_search(query, num_results=num_results) for o in expected_output_parts: assert o in actual_output diff --git a/autogpts/forge/forge/actions/web/web_selenium.py b/autogpts/forge/forge/actions/web/web_selenium.py index 9e3c7494b..8de9aa34f 100644 --- a/autogpts/forge/forge/actions/web/web_selenium.py +++ b/autogpts/forge/forge/actions/web/web_selenium.py @@ -36,7 +36,7 @@ from webdriver_manager.chrome import ChromeDriverManager from webdriver_manager.firefox import GeckoDriverManager from webdriver_manager.microsoft import EdgeChromiumDriverManager as EdgeDriverManager -from forge.sdk.errors import CommandExecutionError +from forge.utils.exceptions import CommandExecutionError from ..registry import action diff --git a/autogpts/forge/forge/agent/__init__.py b/autogpts/forge/forge/agent/__init__.py new file mode 100644 index 000000000..da65968a8 --- /dev/null +++ b/autogpts/forge/forge/agent/__init__.py @@ -0,0 +1,15 @@ +from .base import AgentMeta, BaseAgent, BaseAgentConfiguration, BaseAgentSettings +from .components import ( + AgentComponent, + ComponentEndpointError, + ComponentSystemError, + EndpointPipelineError, +) +from .protocols import ( + AfterExecute, + AfterParse, + CommandProvider, + DirectiveProvider, + ExecutionFailure, + MessageProvider, +) diff --git a/autogpts/autogpt/autogpt/agents/base.py b/autogpts/forge/forge/agent/base.py index 515515701..8d37d5a8b 100644 --- a/autogpts/autogpt/autogpt/agents/base.py +++ b/autogpts/forge/forge/agent/base.py @@ -19,34 +19,31 @@ from colorama import Fore from pydantic import BaseModel, Field, validator if TYPE_CHECKING: - from autogpt.core.resource.model_providers.schema import ( - ChatModelInfo, - ) - from autogpt.models.action_history import ActionResult + from forge.models.action import ActionProposal, ActionResult -from autogpt.agents import protocols as _protocols -from autogpt.agents.components import ( +from forge.agent import protocols +from forge.agent.components import ( AgentComponent, ComponentEndpointError, EndpointPipelineError, ) -from autogpt.config import ConfigBuilder -from autogpt.config.ai_directives import AIDirectives -from autogpt.config.ai_profile import AIProfile -from autogpt.core.configuration import ( +from forge.config.ai_directives import AIDirectives +from forge.config.ai_profile import AIProfile +from forge.config.config import ConfigBuilder +from forge.llm.prompting.prompt import DEFAULT_TRIGGERING_PROMPT +from forge.llm.providers import ( + CHAT_MODELS, + AssistantFunctionCall, + ModelName, + OpenAIModelName, +) +from forge.llm.providers.schema import ChatModelInfo +from forge.models.config import ( Configurable, SystemConfiguration, SystemSettings, UserConfigurable, ) -from autogpt.core.resource.model_providers import ( - CHAT_MODELS, - AssistantFunctionCall, - ModelName, -) -from autogpt.core.resource.model_providers.openai import OpenAIModelName -from autogpt.models.utils import ModelWithSummary -from autogpt.prompts.prompt import DEFAULT_TRIGGERING_PROMPT logger = logging.getLogger(__name__) @@ -140,9 +137,7 @@ class AgentMeta(ABCMeta): return instance -class BaseAgentActionProposal(BaseModel): - thoughts: str | ModelWithSummary - use_tool: AssistantFunctionCall = None + class BaseAgent(Configurable[BaseAgentSettings], metaclass=AgentMeta): @@ -182,13 +177,13 @@ class BaseAgent(Configurable[BaseAgentSettings], metaclass=AgentMeta): return self.config.send_token_limit or self.llm.max_tokens * 3 // 4 @abstractmethod - async def propose_action(self) -> BaseAgentActionProposal: + async def propose_action(self) -> ActionProposal: ... @abstractmethod async def execute( self, - proposal: BaseAgentActionProposal, + proposal: ActionProposal, user_feedback: str = "", ) -> ActionResult: ... @@ -196,7 +191,7 @@ class BaseAgent(Configurable[BaseAgentSettings], metaclass=AgentMeta): @abstractmethod async def do_not_execute( self, - denied_proposal: BaseAgentActionProposal, + denied_proposal: ActionProposal, user_feedback: str, ) -> ActionResult: ... @@ -224,7 +219,7 @@ class BaseAgent(Configurable[BaseAgentSettings], metaclass=AgentMeta): ) -> list[T] | list[None]: method_name = protocol_method.__name__ protocol_name = protocol_method.__qualname__.split(".")[0] - protocol_class = getattr(_protocols, protocol_name) + protocol_class = getattr(protocols, protocol_name) if not issubclass(protocol_class, AgentComponent): raise TypeError(f"{repr(protocol_method)} is not a protocol method") @@ -300,7 +295,7 @@ class BaseAgent(Configurable[BaseAgentSettings], metaclass=AgentMeta): ] if self.components: - # Check if any coponent is missed (added to Agent but not to components) + # Check if any component is missing (added to Agent but not to components) for component in components: if component not in self.components: logger.warning( @@ -321,12 +316,11 @@ class BaseAgent(Configurable[BaseAgentSettings], metaclass=AgentMeta): if node in visited: return visited.add(node) - for neighbor_class in node.__class__.run_after: - # Find the instance of neighbor_class in components + for neighbor_class in node._run_after: neighbor = next( (m for m in components if isinstance(m, neighbor_class)), None ) - if neighbor: + if neighbor and neighbor not in visited: visit(neighbor) stack.append(node) diff --git a/autogpts/autogpt/autogpt/agents/components.py b/autogpts/forge/forge/agent/components.py index c6b275c16..88854dca8 100644 --- a/autogpts/autogpt/autogpt/agents/components.py +++ b/autogpts/forge/forge/agent/components.py @@ -1,9 +1,15 @@ +from __future__ import annotations + from abc import ABC -from typing import Callable +from typing import Callable, TypeVar + +T = TypeVar("T", bound="AgentComponent") class AgentComponent(ABC): - run_after: list[type["AgentComponent"]] = [] + """Base class for all agent components.""" + + _run_after: list[type[AgentComponent]] = [] _enabled: Callable[[], bool] | bool = True _disabled_reason: str = "" @@ -15,8 +21,17 @@ class AgentComponent(ABC): @property def disabled_reason(self) -> str: + """Return the reason this component is disabled.""" return self._disabled_reason + def run_after(self: T, *components: type[AgentComponent] | AgentComponent) -> T: + """Set the components that this component should run after.""" + for component in components: + t = component if isinstance(component, type) else type(component) + if t not in self._run_after and t is not self.__class__: + self._run_after.append(t) + return self + class ComponentEndpointError(Exception): """Error of a single protocol method on a component.""" diff --git a/autogpts/autogpt/autogpt/agents/protocols.py b/autogpts/forge/forge/agent/protocols.py index 22fab67f9..a1d9fb27c 100644 --- a/autogpts/autogpt/autogpt/agents/protocols.py +++ b/autogpts/forge/forge/agent/protocols.py @@ -1,13 +1,14 @@ from abc import abstractmethod from typing import TYPE_CHECKING, Iterator -from autogpt.agents.components import AgentComponent +from .components import AgentComponent if TYPE_CHECKING: - from autogpt.agents.base import BaseAgentActionProposal - from autogpt.core.resource.model_providers.schema import ChatMessage - from autogpt.models.action_history import ActionResult - from autogpt.models.command import Command + from forge.command.command import Command + from forge.llm.providers import ChatMessage + from forge.models.action import ActionResult + + from .base import ActionProposal class DirectiveProvider(AgentComponent): @@ -35,7 +36,7 @@ class MessageProvider(AgentComponent): class AfterParse(AgentComponent): @abstractmethod - def after_parse(self, result: "BaseAgentActionProposal") -> None: + def after_parse(self, result: "ActionProposal") -> None: ... diff --git a/autogpts/forge/forge/command/__init__.py b/autogpts/forge/forge/command/__init__.py new file mode 100644 index 000000000..752c91851 --- /dev/null +++ b/autogpts/forge/forge/command/__init__.py @@ -0,0 +1,3 @@ +from .command import Command, CommandOutput, CommandParameter +from .decorator import command +from .parameter import CommandParameter diff --git a/autogpts/autogpt/autogpt/models/command.py b/autogpts/forge/forge/command/command.py index e88db4a70..e187ff7d3 100644 --- a/autogpts/autogpt/autogpt/models/command.py +++ b/autogpts/forge/forge/command/command.py @@ -3,11 +3,9 @@ from __future__ import annotations import inspect from typing import Any, Callable, Generic, ParamSpec, TypeVar -from .command_parameter import CommandParameter -from .context_item import ContextItem +from .parameter import CommandParameter -CommandReturnValue = Any -CommandOutput = CommandReturnValue | tuple[CommandReturnValue, ContextItem] +CommandOutput = Any P = ParamSpec("P") CO = TypeVar("CO", bound=CommandOutput) diff --git a/autogpts/autogpt/autogpt/command_decorator.py b/autogpts/forge/forge/command/decorator.py index cf074b16c..ee047a2a7 100644 --- a/autogpts/autogpt/autogpt/command_decorator.py +++ b/autogpts/forge/forge/command/decorator.py @@ -1,9 +1,10 @@ import re from typing import Callable, Concatenate, Optional, TypeVar -from autogpt.agents.protocols import CommandProvider -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.models.command import CO, Command, CommandParameter, P +from forge.agent.protocols import CommandProvider +from forge.models.json_schema import JSONSchema + +from .command import CO, Command, CommandParameter, P _CP = TypeVar("_CP", bound=CommandProvider) diff --git a/autogpts/autogpt/autogpt/models/command_parameter.py b/autogpts/forge/forge/command/parameter.py index 402e870fc..91e64a173 100644 --- a/autogpts/autogpt/autogpt/models/command_parameter.py +++ b/autogpts/forge/forge/command/parameter.py @@ -1,6 +1,6 @@ import dataclasses -from autogpt.core.utils.json_schema import JSONSchema +from forge.models.json_schema import JSONSchema @dataclasses.dataclass diff --git a/autogpts/autogpt/autogpt/commands/README.md b/autogpts/forge/forge/components/README.md index 5575f0bcb..c53c49e27 100644 --- a/autogpts/autogpt/autogpt/commands/README.md +++ b/autogpts/forge/forge/components/README.md @@ -12,8 +12,8 @@ You can use any valid Python variable name, what matters for the component to be Visit [Built-in Components](./built-in-components.md) to see what components are available out of the box. ```py -from autogpt.agents import Agent -from autogpt.agents.components import AgentComponent +from forge.agent import BaseAgent +from forge.agent.components import AgentComponent class HelloComponent(AgentComponent): pass @@ -22,7 +22,7 @@ class SomeComponent(AgentComponent): def __init__(self, hello_component: HelloComponent): self.hello_component = hello_component -class MyAgent(Agent): +class MyAgent(BaseAgent): def __init__(self): # These components will be automatically discovered and used self.hello_component = HelloComponent() @@ -32,21 +32,30 @@ class MyAgent(Agent): ## Ordering components -The execution order of components is important because the latter ones may depend on the results of the former ones. +The execution order of components is important because some may depend on the results of the previous ones. +**By default, components are ordered alphabetically.** -### Implicit order +### Ordering individual components -Components can be ordered implicitly by the agent; each component can set `run_after` list to specify which components should run before it. This is useful when components rely on each other or need to be executed in a specific order. Otherwise, the order of components is alphabetical. +You can order a single component by passing other components (or their types) to the `run_after` method. This way you can ensure that the component will be executed after the specified one. +The `run_after` method returns the component itself, so you can call it when assigning the component to a variable: ```py -# This component will run after HelloComponent -class CalculatorComponent(AgentComponent): - run_after = [HelloComponent] +class MyAgent(Agent): + def __init__(self): + self.hello_component = HelloComponent() + self.calculator_component = CalculatorComponent().run_after(self.hello_component) + # This is equivalent to passing a type: + # self.calculator_component = CalculatorComponent().run_after(HelloComponent) ``` -### Explicit order +!!! warning + Be sure not to make circular dependencies when ordering components! + +### Ordering all components -Sometimes it may be easier to order components explicitly by setting `self.components` list in the agent's `__init__` method. This way you can also ensure there's no circular dependencies and `run_after` is ignored. +You can also order all components by setting `self.components` list in the agent's `__init__` method. +This way ensures that there's no circular dependencies and any `run_after` calls are ignored. !!! warning Be sure to include all components - by setting `self.components` list, you're overriding the default behavior of discovering components automatically. Since it's usually not intended agent will inform you in the terminal if some components were skipped. @@ -55,7 +64,7 @@ Sometimes it may be easier to order components explicitly by setting `self.compo class MyAgent(Agent): def __init__(self): self.hello_component = HelloComponent() - self.calculator_component = CalculatorComponent(self.hello_component) + self.calculator_component = CalculatorComponent() # Explicitly set components list self.components = [self.hello_component, self.calculator_component] ``` @@ -116,8 +125,8 @@ All errors accept an optional `str` message. There are following errors ordered **Example** ```py -from autogpt.agents.components import ComponentEndpointError -from autogpt.agents.protocols import MessageProvider +from forge.agent.components import ComponentEndpointError +from forge.agent.protocols import MessageProvider # Example of raising an error class MyComponent(MessageProvider): diff --git a/autogpts/forge/forge/components/action_history/__init__.py b/autogpts/forge/forge/components/action_history/__init__.py new file mode 100644 index 000000000..dfbb0e1bf --- /dev/null +++ b/autogpts/forge/forge/components/action_history/__init__.py @@ -0,0 +1,2 @@ +from .action_history import ActionHistoryComponent +from .model import Episode, EpisodicActionHistory diff --git a/autogpts/autogpt/autogpt/components/event_history.py b/autogpts/forge/forge/components/action_history/action_history.py index fd3b5100c..d0785093a 100644 --- a/autogpts/autogpt/autogpt/components/event_history.py +++ b/autogpts/forge/forge/components/action_history/action_history.py @@ -1,29 +1,24 @@ -from typing import Callable, Generic, Iterator, Optional - -from autogpt.agents.features.watchdog import WatchdogComponent -from autogpt.agents.protocols import AfterExecute, AfterParse, MessageProvider -from autogpt.config.config import Config -from autogpt.core.resource.model_providers.schema import ChatMessage, ChatModelProvider -from autogpt.models.action_history import ( - AP, - ActionResult, - Episode, - EpisodicActionHistory, -) -from autogpt.prompts.utils import indent - - -class EventHistoryComponent(MessageProvider, AfterParse, AfterExecute, Generic[AP]): - """Keeps track of the event history and provides a summary of the steps.""" +from typing import TYPE_CHECKING, Callable, Generic, Iterator, Optional + +from forge.agent.protocols import AfterExecute, AfterParse, MessageProvider +from forge.llm.prompting.utils import indent +from forge.llm.providers import ChatMessage, ChatModelProvider + +if TYPE_CHECKING: + from forge.config.config import Config - run_after = [WatchdogComponent] +from .model import AP, ActionResult, Episode, EpisodicActionHistory + + +class ActionHistoryComponent(MessageProvider, AfterParse, AfterExecute, Generic[AP]): + """Keeps track of the event history and provides a summary of the steps.""" def __init__( self, event_history: EpisodicActionHistory[AP], max_tokens: int, count_tokens: Callable[[str], int], - legacy_config: Config, + legacy_config: "Config", llm_provider: ChatModelProvider, ) -> None: self.event_history = event_history diff --git a/autogpts/autogpt/autogpt/models/action_history.py b/autogpts/forge/forge/components/action_history/model.py index d433cd80d..cc24ad4b5 100644 --- a/autogpts/autogpt/autogpt/models/action_history.py +++ b/autogpts/forge/forge/components/action_history/model.py @@ -1,83 +1,21 @@ from __future__ import annotations import asyncio -from typing import TYPE_CHECKING, Any, Generic, Iterator, Literal, Optional, TypeVar +from typing import TYPE_CHECKING, Generic, Iterator, TypeVar -from pydantic import BaseModel, Field +from pydantic import Field from pydantic.generics import GenericModel -from autogpt.agents.base import BaseAgentActionProposal -from autogpt.models.utils import ModelWithSummary -from autogpt.processing.text import summarize_text -from autogpt.prompts.utils import format_numbered_list, indent +from forge.content_processing.text import summarize_text +from forge.llm.prompting.utils import format_numbered_list, indent +from forge.models.action import ActionProposal, ActionResult +from forge.models.utils import ModelWithSummary if TYPE_CHECKING: - from autogpt.config.config import Config - from autogpt.core.resource.model_providers import ChatModelProvider + from forge.config.config import Config + from forge.llm.providers import ChatModelProvider - -class ActionSuccessResult(BaseModel): - outputs: Any - status: Literal["success"] = "success" - - def __str__(self) -> str: - outputs = str(self.outputs).replace("```", r"\```") - multiline = "\n" in outputs - return f"```\n{self.outputs}\n```" if multiline else str(self.outputs) - - -class ErrorInfo(BaseModel): - args: tuple - message: str - exception_type: str - repr: str - - @staticmethod - def from_exception(exception: Exception) -> ErrorInfo: - return ErrorInfo( - args=exception.args, - message=getattr(exception, "message", exception.args[0]), - exception_type=exception.__class__.__name__, - repr=repr(exception), - ) - - def __str__(self): - return repr(self) - - def __repr__(self): - return self.repr - - -class ActionErrorResult(BaseModel): - reason: str - error: Optional[ErrorInfo] = None - status: Literal["error"] = "error" - - @staticmethod - def from_exception(exception: Exception) -> ActionErrorResult: - return ActionErrorResult( - reason=getattr(exception, "message", exception.args[0]), - error=ErrorInfo.from_exception(exception), - ) - - def __str__(self) -> str: - return f"Action failed: '{self.reason}'" - - -class ActionInterruptedByHuman(BaseModel): - feedback: str - status: Literal["interrupted_by_human"] = "interrupted_by_human" - - def __str__(self) -> str: - return ( - 'The user interrupted the action with the following feedback: "%s"' - % self.feedback - ) - - -ActionResult = ActionSuccessResult | ActionErrorResult | ActionInterruptedByHuman - -AP = TypeVar("AP", bound=BaseAgentActionProposal) +AP = TypeVar("AP", bound=ActionProposal) class Episode(GenericModel, Generic[AP]): diff --git a/autogpts/forge/forge/components/code_executor/__init__.py b/autogpts/forge/forge/components/code_executor/__init__.py new file mode 100644 index 000000000..33cf11f48 --- /dev/null +++ b/autogpts/forge/forge/components/code_executor/__init__.py @@ -0,0 +1,7 @@ +from .code_executor import ( + ALLOWLIST_CONTROL, + DENYLIST_CONTROL, + CodeExecutorComponent, + is_docker_available, + we_are_running_in_a_docker_container, +) diff --git a/autogpts/autogpt/autogpt/commands/execute_code.py b/autogpts/forge/forge/components/code_executor/code_executor.py index 515f2d60b..346806deb 100644 --- a/autogpts/autogpt/autogpt/commands/execute_code.py +++ b/autogpts/forge/forge/components/code_executor/code_executor.py @@ -10,14 +10,12 @@ import docker from docker.errors import DockerException, ImageNotFound, NotFound from docker.models.containers import Container as DockerContainer -from autogpt.agents.base import BaseAgentSettings -from autogpt.agents.protocols import CommandProvider -from autogpt.command_decorator import command -from autogpt.config import Config -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.file_storage.base import FileStorage -from autogpt.models.command import Command -from autogpt.utils.exceptions import ( +from forge.agent import BaseAgentSettings, CommandProvider +from forge.command import Command, command +from forge.config.config import Config +from forge.file_storage import FileStorage +from forge.models.json_schema import JSONSchema +from forge.utils.exceptions import ( CodeExecutionError, CommandExecutionError, InvalidArgumentError, @@ -175,7 +173,7 @@ class CodeExecutorComponent(CommandProvider): if we_are_running_in_a_docker_container(): logger.debug( - "AutoGPT is running in a Docker container; " + "App is running in a Docker container; " f"executing {file_path} directly..." ) result = subprocess.run( @@ -189,7 +187,7 @@ class CodeExecutorComponent(CommandProvider): else: raise CodeExecutionError(result.stderr) - logger.debug("AutoGPT is not running in a Docker container") + logger.debug("App is not running in a Docker container") try: assert self.state.agent_id, "Need Agent ID to attach Docker container" diff --git a/autogpts/forge/forge/components/context/__init__.py b/autogpts/forge/forge/components/context/__init__.py new file mode 100644 index 000000000..243746d52 --- /dev/null +++ b/autogpts/forge/forge/components/context/__init__.py @@ -0,0 +1,7 @@ +from .context import ContextComponent +from .context_item import ( + ContextItem, + FileContextItem, + FolderContextItem, + StaticContextItem, +) diff --git a/autogpts/autogpt/autogpt/agents/features/context.py b/autogpts/forge/forge/components/context/context.py index 6e3ec1a91..c325e1adb 100644 --- a/autogpts/autogpt/autogpt/agents/features/context.py +++ b/autogpts/forge/forge/components/context/context.py @@ -5,14 +5,14 @@ from typing import Iterator from pydantic import BaseModel, Field from typing_extensions import Annotated -from autogpt.agents.protocols import CommandProvider, MessageProvider -from autogpt.command_decorator import command -from autogpt.core.resource.model_providers import ChatMessage -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.file_storage.base import FileStorage -from autogpt.models.command import Command -from autogpt.models.context_item import ContextItem, FileContextItem, FolderContextItem -from autogpt.utils.exceptions import InvalidArgumentError +from forge.agent.protocols import CommandProvider, MessageProvider +from forge.command import Command, command +from forge.file_storage.base import FileStorage +from forge.llm.providers import ChatMessage +from forge.models.json_schema import JSONSchema +from forge.utils.exceptions import InvalidArgumentError + +from .context_item import ContextItem, FileContextItem, FolderContextItem class AgentContext(BaseModel): diff --git a/autogpts/autogpt/autogpt/models/context_item.py b/autogpts/forge/forge/components/context/context_item.py index 50c66fc54..bd4944ab8 100644 --- a/autogpts/autogpt/autogpt/models/context_item.py +++ b/autogpts/forge/forge/components/context/context_item.py @@ -5,8 +5,8 @@ from typing import Literal, Optional from pydantic import BaseModel, Field -from autogpt.file_storage.base import FileStorage -from autogpt.utils.file_operations_utils import decode_textual_file +from forge.file_storage.base import FileStorage +from forge.utils.file_operations import decode_textual_file logger = logging.getLogger(__name__) diff --git a/autogpts/forge/forge/components/file_manager/__init__.py b/autogpts/forge/forge/components/file_manager/__init__.py new file mode 100644 index 000000000..e0a142529 --- /dev/null +++ b/autogpts/forge/forge/components/file_manager/__init__.py @@ -0,0 +1 @@ +from .file_manager import FileManagerComponent diff --git a/autogpts/autogpt/autogpt/agents/features/agent_file_manager.py b/autogpts/forge/forge/components/file_manager/file_manager.py index 1df9edfe1..21e5ded1e 100644 --- a/autogpts/autogpt/autogpt/agents/features/agent_file_manager.py +++ b/autogpts/forge/forge/components/file_manager/file_manager.py @@ -3,14 +3,12 @@ import os from pathlib import Path from typing import Iterator, Optional -from autogpt.agents.protocols import CommandProvider, DirectiveProvider -from autogpt.command_decorator import command -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.file_storage.base import FileStorage -from autogpt.models.command import Command -from autogpt.utils.file_operations_utils import decode_textual_file - -from ..base import BaseAgentSettings +from forge.agent import BaseAgentSettings +from forge.agent.protocols import CommandProvider, DirectiveProvider +from forge.command import Command, command +from forge.file_storage.base import FileStorage +from forge.models.json_schema import JSONSchema +from forge.utils.file_operations import decode_textual_file logger = logging.getLogger(__name__) diff --git a/autogpts/forge/forge/components/git_operations/__init__.py b/autogpts/forge/forge/components/git_operations/__init__.py new file mode 100644 index 000000000..290f8e615 --- /dev/null +++ b/autogpts/forge/forge/components/git_operations/__init__.py @@ -0,0 +1 @@ +from .git_operations import GitOperationsComponent diff --git a/autogpts/autogpt/autogpt/commands/git_operations.py b/autogpts/forge/forge/components/git_operations/git_operations.py index 19d3923ba..8178e3300 100644 --- a/autogpts/autogpt/autogpt/commands/git_operations.py +++ b/autogpts/forge/forge/components/git_operations/git_operations.py @@ -3,13 +3,12 @@ from typing import Iterator from git.repo import Repo -from autogpt.agents.protocols import CommandProvider -from autogpt.command_decorator import command -from autogpt.config.config import Config -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.models.command import Command -from autogpt.url_utils.validators import validate_url -from autogpt.utils.exceptions import CommandExecutionError +from forge.agent.protocols import CommandProvider +from forge.command import Command, command +from forge.config.config import Config +from forge.models.json_schema import JSONSchema +from forge.utils.exceptions import CommandExecutionError +from forge.utils.url_validator import validate_url class GitOperationsComponent(CommandProvider): diff --git a/autogpts/forge/forge/components/image_gen/__init__.py b/autogpts/forge/forge/components/image_gen/__init__.py new file mode 100644 index 000000000..00b8528d6 --- /dev/null +++ b/autogpts/forge/forge/components/image_gen/__init__.py @@ -0,0 +1 @@ +from .image_gen import ImageGeneratorComponent diff --git a/autogpts/autogpt/autogpt/commands/image_gen.py b/autogpts/forge/forge/components/image_gen/image_gen.py index d132e2fca..cf7d6ba3b 100644 --- a/autogpts/autogpt/autogpt/commands/image_gen.py +++ b/autogpts/forge/forge/components/image_gen/image_gen.py @@ -13,12 +13,11 @@ import requests from openai import OpenAI from PIL import Image -from autogpt.agents.protocols import CommandProvider -from autogpt.command_decorator import command -from autogpt.config.config import Config -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.file_storage.base import FileStorage -from autogpt.models.command import Command +from forge.agent.protocols import CommandProvider +from forge.command import Command, command +from forge.config.config import Config +from forge.file_storage import FileStorage +from forge.models.json_schema import JSONSchema logger = logging.getLogger(__name__) diff --git a/autogpts/forge/forge/components/system/__init__.py b/autogpts/forge/forge/components/system/__init__.py new file mode 100644 index 000000000..d6e654208 --- /dev/null +++ b/autogpts/forge/forge/components/system/__init__.py @@ -0,0 +1 @@ +from .system import SystemComponent diff --git a/autogpts/autogpt/autogpt/commands/system.py b/autogpts/forge/forge/components/system/system.py index ce2640529..9b72aecfe 100644 --- a/autogpts/autogpt/autogpt/commands/system.py +++ b/autogpts/forge/forge/components/system/system.py @@ -2,15 +2,14 @@ import logging import time from typing import Iterator -from autogpt.agents.protocols import CommandProvider, DirectiveProvider, MessageProvider -from autogpt.command_decorator import command -from autogpt.config.ai_profile import AIProfile -from autogpt.config.config import Config -from autogpt.core.resource.model_providers.schema import ChatMessage -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.models.command import Command -from autogpt.utils.exceptions import AgentFinished -from autogpt.utils.utils import DEFAULT_FINISH_COMMAND +from forge.agent.protocols import CommandProvider, DirectiveProvider, MessageProvider +from forge.command import Command, command +from forge.config.ai_profile import AIProfile +from forge.config.config import Config +from forge.llm.providers import ChatMessage +from forge.models.json_schema import JSONSchema +from forge.utils.const import FINISH_COMMAND +from forge.utils.exceptions import AgentFinished logger = logging.getLogger(__name__) @@ -39,7 +38,7 @@ class SystemComponent(DirectiveProvider, MessageProvider, CommandProvider): yield self.finish @command( - names=[DEFAULT_FINISH_COMMAND], + names=[FINISH_COMMAND], parameters={ "reason": JSONSchema( type=JSONSchema.Type.STRING, diff --git a/autogpts/forge/forge/components/user_interaction/__init__.py b/autogpts/forge/forge/components/user_interaction/__init__.py new file mode 100644 index 000000000..e354843c8 --- /dev/null +++ b/autogpts/forge/forge/components/user_interaction/__init__.py @@ -0,0 +1 @@ +from .user_interaction import UserInteractionComponent diff --git a/autogpts/autogpt/autogpt/commands/user_interaction.py b/autogpts/forge/forge/components/user_interaction/user_interaction.py index 4f6200f9e..b23bed3fe 100644 --- a/autogpts/autogpt/autogpt/commands/user_interaction.py +++ b/autogpts/forge/forge/components/user_interaction/user_interaction.py @@ -1,26 +1,25 @@ from typing import Iterator -from autogpt.agents.protocols import CommandProvider -from autogpt.app.utils import clean_input -from autogpt.command_decorator import command -from autogpt.config.config import Config -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.models.command import Command -from autogpt.utils.utils import DEFAULT_ASK_COMMAND +import click + +from forge.agent.protocols import CommandProvider +from forge.command import Command, command +from forge.config.config import Config +from forge.models.json_schema import JSONSchema +from forge.utils.const import ASK_COMMAND class UserInteractionComponent(CommandProvider): """Provides commands to interact with the user.""" def __init__(self, config: Config): - self.config = config self._enabled = not config.noninteractive_mode def get_commands(self) -> Iterator[Command]: yield self.ask_user @command( - names=[DEFAULT_ASK_COMMAND], + names=[ASK_COMMAND], parameters={ "question": JSONSchema( type=JSONSchema.Type.STRING, @@ -33,5 +32,5 @@ class UserInteractionComponent(CommandProvider): """If you need more details or information regarding the given goals, you can ask the user for input.""" print(f"\nQ: {question}") - resp = clean_input(self.config, "A:") + resp = click.prompt("A") return f"The user's answer: '{resp}'" diff --git a/autogpts/forge/forge/components/watchdog/__init__.py b/autogpts/forge/forge/components/watchdog/__init__.py new file mode 100644 index 000000000..d7c219687 --- /dev/null +++ b/autogpts/forge/forge/components/watchdog/__init__.py @@ -0,0 +1 @@ +from .watchdog import WatchdogComponent diff --git a/autogpts/autogpt/autogpt/agents/features/watchdog.py b/autogpts/forge/forge/components/watchdog/watchdog.py index 9a623d6a5..56406b0bf 100644 --- a/autogpts/autogpt/autogpt/agents/features/watchdog.py +++ b/autogpts/forge/forge/components/watchdog/watchdog.py @@ -1,10 +1,13 @@ import logging +from typing import TYPE_CHECKING -from autogpt.agents.base import BaseAgentActionProposal, BaseAgentConfiguration -from autogpt.agents.components import ComponentSystemError -from autogpt.agents.features.context import ContextComponent -from autogpt.agents.protocols import AfterParse -from autogpt.models.action_history import EpisodicActionHistory +if TYPE_CHECKING: + from forge.agent.base import BaseAgentConfiguration + +from forge.agent.components import ComponentSystemError +from forge.agent.protocols import AfterParse +from forge.components.action_history import EpisodicActionHistory +from forge.models.action import ActionProposal logger = logging.getLogger(__name__) @@ -15,18 +18,16 @@ class WatchdogComponent(AfterParse): looping, the watchdog will switch from the FAST_LLM to the SMART_LLM and re-think. """ - run_after = [ContextComponent] - def __init__( self, - config: BaseAgentConfiguration, - event_history: EpisodicActionHistory[BaseAgentActionProposal], + config: "BaseAgentConfiguration", + event_history: EpisodicActionHistory[ActionProposal], ): self.config = config self.event_history = event_history self.revert_big_brain = False - def after_parse(self, result: BaseAgentActionProposal) -> None: + def after_parse(self, result: ActionProposal) -> None: if self.revert_big_brain: self.config.big_brain = False self.revert_big_brain = False diff --git a/autogpts/forge/forge/components/web/__init__.py b/autogpts/forge/forge/components/web/__init__.py new file mode 100644 index 000000000..2bcc86d10 --- /dev/null +++ b/autogpts/forge/forge/components/web/__init__.py @@ -0,0 +1,2 @@ +from .search import WebSearchComponent +from .selenium import BrowsingError, WebSeleniumComponent diff --git a/autogpts/autogpt/autogpt/commands/web_search.py b/autogpts/forge/forge/components/web/search.py index 41a204ac0..36d304239 100644 --- a/autogpts/autogpt/autogpt/commands/web_search.py +++ b/autogpts/forge/forge/components/web/search.py @@ -5,12 +5,11 @@ from typing import Iterator from duckduckgo_search import DDGS -from autogpt.agents.protocols import CommandProvider, DirectiveProvider -from autogpt.command_decorator import command -from autogpt.config.config import Config -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.models.command import Command -from autogpt.utils.exceptions import ConfigurationError +from forge.agent.protocols import CommandProvider, DirectiveProvider +from forge.command import Command, command +from forge.config.config import Config +from forge.models.json_schema import JSONSchema +from forge.utils.exceptions import ConfigurationError DUCKDUCKGO_MAX_ATTEMPTS = 3 diff --git a/autogpts/autogpt/autogpt/commands/web_selenium.py b/autogpts/forge/forge/components/web/selenium.py index 6b2e788b9..b5ba87119 100644 --- a/autogpts/autogpt/autogpt/commands/web_selenium.py +++ b/autogpts/forge/forge/components/web/selenium.py @@ -28,19 +28,15 @@ from webdriver_manager.chrome import ChromeDriverManager from webdriver_manager.firefox import GeckoDriverManager from webdriver_manager.microsoft import EdgeChromiumDriverManager as EdgeDriverManager -from autogpt.agents.protocols import CommandProvider, DirectiveProvider -from autogpt.command_decorator import command -from autogpt.config import Config -from autogpt.core.resource.model_providers.schema import ( - ChatModelInfo, - ChatModelProvider, -) -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.models.command import Command -from autogpt.processing.html import extract_hyperlinks, format_hyperlinks -from autogpt.processing.text import extract_information, summarize_text -from autogpt.url_utils.validators import validate_url -from autogpt.utils.exceptions import CommandExecutionError, TooMuchOutputError +from forge.agent.protocols import CommandProvider, DirectiveProvider +from forge.command import Command, command +from forge.config.config import Config +from forge.content_processing.html import extract_hyperlinks, format_hyperlinks +from forge.content_processing.text import extract_information, summarize_text +from forge.llm.providers.schema import ChatModelInfo, ChatModelProvider +from forge.models.json_schema import JSONSchema +from forge.utils.exceptions import CommandExecutionError, TooMuchOutputError +from forge.utils.url_validator import validate_url logger = logging.getLogger(__name__) diff --git a/autogpts/autogpt/autogpt/config/__init__.py b/autogpts/forge/forge/config/__init__.py index e0c113391..cfa66121f 100644 --- a/autogpts/autogpt/autogpt/config/__init__.py +++ b/autogpts/forge/forge/config/__init__.py @@ -1,5 +1,5 @@ """ -This module contains the configuration classes for AutoGPT. +This module contains configuration models and helpers for AutoGPT Forge. """ from .ai_directives import AIDirectives from .ai_profile import AIProfile diff --git a/autogpts/autogpt/autogpt/config/ai_directives.py b/autogpts/forge/forge/config/ai_directives.py index 567a55e3f..9e40e29a1 100644 --- a/autogpts/autogpt/autogpt/config/ai_directives.py +++ b/autogpts/forge/forge/config/ai_directives.py @@ -4,8 +4,7 @@ from pathlib import Path import yaml from pydantic import BaseModel, Field -from autogpt.logs.helpers import request_user_double_check -from autogpt.utils.utils import validate_yaml_file +from forge.utils.yaml_validator import validate_yaml_file logger = logging.getLogger(__name__) @@ -25,6 +24,8 @@ class AIDirectives(BaseModel): @staticmethod def from_file(prompt_settings_file: Path) -> "AIDirectives": + from forge.logging.helpers import request_user_double_check + (validated, message) = validate_yaml_file(prompt_settings_file) if not validated: logger.error(message, extra={"title": "FAILED FILE VALIDATION"}) diff --git a/autogpts/autogpt/autogpt/config/ai_profile.py b/autogpts/forge/forge/config/ai_profile.py index 3f0043c79..3f0043c79 100644 --- a/autogpts/autogpt/autogpt/config/ai_profile.py +++ b/autogpts/forge/forge/config/ai_profile.py diff --git a/autogpts/autogpt/autogpt/config/config.py b/autogpts/forge/forge/config/config.py index 11d35b673..9108ba56f 100644 --- a/autogpts/autogpt/autogpt/config/config.py +++ b/autogpts/forge/forge/config/config.py @@ -7,28 +7,21 @@ import re from pathlib import Path from typing import Any, Optional, Union +import click from colorama import Fore from pydantic import SecretStr, validator -import autogpt -from autogpt.app.utils import clean_input -from autogpt.core.configuration.schema import ( - Configurable, - SystemSettings, - UserConfigurable, -) -from autogpt.core.resource.model_providers import CHAT_MODELS, ModelName -from autogpt.core.resource.model_providers.openai import ( - OpenAICredentials, - OpenAIModelName, -) -from autogpt.file_storage import FileStorageBackendName -from autogpt.logs.config import LoggingConfig -from autogpt.speech import TTSConfig +import forge +from forge.file_storage import FileStorageBackendName +from forge.llm.providers import CHAT_MODELS, ModelName +from forge.llm.providers.openai import OpenAICredentials, OpenAIModelName +from forge.logging.config import LoggingConfig +from forge.models.config import Configurable, SystemSettings, UserConfigurable +from forge.speech.say import TTSConfig logger = logging.getLogger(__name__) -PROJECT_ROOT = Path(autogpt.__file__).parent.parent +PROJECT_ROOT = Path(forge.__file__).parent.parent AI_SETTINGS_FILE = Path("ai_settings.yaml") AZURE_CONFIG_FILE = Path("azure.yaml") PROMPT_SETTINGS_FILE = Path("prompt_settings.yaml") @@ -258,8 +251,10 @@ def assert_config_has_openai_api_key(config: Config) -> None: logger.info( "You can get your key from https://platform.openai.com/account/api-keys" ) - openai_api_key = clean_input( - config, "Please enter your OpenAI API key if you have it:" + openai_api_key = click.prompt( + "Please enter your OpenAI API key if you have it", + default="", + show_default=False, ) openai_api_key = openai_api_key.strip() if re.search(key_pattern, openai_api_key): diff --git a/autogpts/autogpt/autogpt/models/__init__.py b/autogpts/forge/forge/content_processing/__init__.py index e69de29bb..e69de29bb 100644 --- a/autogpts/autogpt/autogpt/models/__init__.py +++ b/autogpts/forge/forge/content_processing/__init__.py diff --git a/autogpts/autogpt/autogpt/processing/html.py b/autogpts/forge/forge/content_processing/html.py index 73c65b9c9..73c65b9c9 100644 --- a/autogpts/autogpt/autogpt/processing/html.py +++ b/autogpts/forge/forge/content_processing/html.py diff --git a/autogpts/autogpt/autogpt/processing/text.py b/autogpts/forge/forge/content_processing/text.py index 4cebbabd6..02e6392c9 100644 --- a/autogpts/autogpt/autogpt/processing/text.py +++ b/autogpts/forge/forge/content_processing/text.py @@ -2,18 +2,16 @@ import logging import math -from typing import Iterator, Optional, TypeVar +from typing import TYPE_CHECKING, Iterator, Optional, TypeVar import spacy -from autogpt.config import Config -from autogpt.core.prompting import ChatPrompt -from autogpt.core.resource.model_providers import ( - ChatMessage, - ChatModelProvider, - ModelTokenizer, -) -from autogpt.core.utils.json_utils import extract_list_from_json +if TYPE_CHECKING: + from forge.config.config import Config + +from forge.json.parsing import extract_list_from_json +from forge.llm.prompting import ChatPrompt +from forge.llm.providers import ChatMessage, ChatModelProvider, ModelTokenizer logger = logging.getLogger(__name__) @@ -58,7 +56,7 @@ def chunk_content( async def summarize_text( text: str, llm_provider: ChatModelProvider, - config: Config, + config: "Config", question: Optional[str] = None, instruction: Optional[str] = None, ) -> tuple[str, list[tuple[str, str]]]: @@ -91,7 +89,7 @@ async def extract_information( source_text: str, topics_of_interest: list[str], llm_provider: ChatModelProvider, - config: Config, + config: "Config", ) -> list[str]: fmt_topics_list = "\n".join(f"* {topic}." for topic in topics_of_interest) instruction = ( @@ -115,7 +113,7 @@ async def _process_text( text: str, instruction: str, llm_provider: ChatModelProvider, - config: Config, + config: "Config", output_type: type[str | list[str]] = str, ) -> tuple[str, list[tuple[str, str]]] | list[str]: """Process text using the OpenAI API for summarization or information extraction @@ -124,7 +122,7 @@ async def _process_text( text (str): The text to process. instruction (str): Additional instruction for processing. llm_provider: LLM provider to use. - config (Config): The global application config. + config ("Config"): The global application config. output_type: `str` for summaries or `list[str]` for piece-wise info extraction. Returns: @@ -222,7 +220,7 @@ async def _process_text( def split_text( text: str, - config: Config, + config: "Config", max_chunk_length: int, tokenizer: ModelTokenizer, with_overlap: bool = True, @@ -232,7 +230,7 @@ def split_text( Args: text (str): The text to split. - config (Config): Config object containing the Spacy model setting. + config ("Config"): "Config" object containing the Spacy model setting. max_chunk_length (int, optional): The maximum length of a chunk. tokenizer (ModelTokenizer): Tokenizer to use for determining chunk length. with_overlap (bool, optional): Whether to allow overlap between chunks. diff --git a/autogpts/autogpt/autogpt/file_storage/__init__.py b/autogpts/forge/forge/file_storage/__init__.py index 8e4116f39..9d5df553e 100644 --- a/autogpts/autogpt/autogpt/file_storage/__init__.py +++ b/autogpts/forge/forge/file_storage/__init__.py @@ -1,7 +1,10 @@ import enum from pathlib import Path -from .base import FileStorage +from .base import FileStorage, FileStorageConfiguration +from .gcs import GCSFileStorage, GCSFileStorageConfiguration +from .local import LocalFileStorage +from .s3 import S3FileStorage, S3FileStorageConfiguration class FileStorageBackendName(str, enum.Enum): @@ -35,10 +38,3 @@ def get_storage( config = GCSFileStorageConfiguration.from_env() config.root = root_path return GCSFileStorage(config) - - -__all__ = [ - "FileStorage", - "FileStorageBackendName", - "get_storage", -] diff --git a/autogpts/autogpt/autogpt/file_storage/base.py b/autogpts/forge/forge/file_storage/base.py index 62521bb4a..822c46e43 100644 --- a/autogpts/autogpt/autogpt/file_storage/base.py +++ b/autogpts/forge/forge/file_storage/base.py @@ -11,7 +11,7 @@ from io import IOBase, TextIOBase from pathlib import Path from typing import IO, Any, BinaryIO, Callable, Literal, TextIO, overload -from autogpt.core.configuration.schema import SystemConfiguration +from forge.models.config import SystemConfiguration logger = logging.getLogger(__name__) diff --git a/autogpts/autogpt/autogpt/file_storage/gcs.py b/autogpts/forge/forge/file_storage/gcs.py index 45545d449..f631cd81b 100644 --- a/autogpts/autogpt/autogpt/file_storage/gcs.py +++ b/autogpts/forge/forge/file_storage/gcs.py @@ -14,7 +14,7 @@ from typing import Literal from google.cloud import storage from google.cloud.exceptions import NotFound -from autogpt.core.configuration.schema import UserConfigurable +from forge.models.config import UserConfigurable from .base import FileStorage, FileStorageConfiguration diff --git a/autogpts/autogpt/autogpt/file_storage/local.py b/autogpts/forge/forge/file_storage/local.py index 3a52bd572..3a52bd572 100644 --- a/autogpts/autogpt/autogpt/file_storage/local.py +++ b/autogpts/forge/forge/file_storage/local.py diff --git a/autogpts/autogpt/autogpt/file_storage/s3.py b/autogpts/forge/forge/file_storage/s3.py index f8ac15fe7..c7cfd8baf 100644 --- a/autogpts/autogpt/autogpt/file_storage/s3.py +++ b/autogpts/forge/forge/file_storage/s3.py @@ -16,7 +16,7 @@ import boto3 import botocore.exceptions from pydantic import SecretStr -from autogpt.core.configuration.schema import UserConfigurable +from forge.models.config import UserConfigurable from .base import FileStorage, FileStorageConfiguration diff --git a/autogpts/forge/forge/json/__init__.py b/autogpts/forge/forge/json/__init__.py new file mode 100644 index 000000000..726072d03 --- /dev/null +++ b/autogpts/forge/forge/json/__init__.py @@ -0,0 +1 @@ +from .parsing import extract_dict_from_json, extract_list_from_json, json_loads diff --git a/autogpts/autogpt/autogpt/core/utils/json_utils.py b/autogpts/forge/forge/json/parsing.py index 45650427d..45650427d 100644 --- a/autogpts/autogpt/autogpt/core/utils/json_utils.py +++ b/autogpts/forge/forge/json/parsing.py diff --git a/autogpts/autogpt/autogpt/processing/__init__.py b/autogpts/forge/forge/llm/__init__.py index e69de29bb..e69de29bb 100644 --- a/autogpts/autogpt/autogpt/processing/__init__.py +++ b/autogpts/forge/forge/llm/__init__.py diff --git a/autogpts/autogpt/autogpt/core/prompting/__init__.py b/autogpts/forge/forge/llm/prompting/__init__.py index 305c35685..305c35685 100644 --- a/autogpts/autogpt/autogpt/core/prompting/__init__.py +++ b/autogpts/forge/forge/llm/prompting/__init__.py diff --git a/autogpts/autogpt/autogpt/core/prompting/base.py b/autogpts/forge/forge/llm/prompting/base.py index 19e315f69..0bc351786 100644 --- a/autogpts/autogpt/autogpt/core/prompting/base.py +++ b/autogpts/forge/forge/llm/prompting/base.py @@ -1,7 +1,10 @@ import abc +from typing import TYPE_CHECKING -from autogpt.core.configuration import SystemConfiguration -from autogpt.core.resource.model_providers import AssistantChatMessage +from forge.models.config import SystemConfiguration + +if TYPE_CHECKING: + from forge.llm.providers import AssistantChatMessage from .schema import ChatPrompt, LanguageModelClassification @@ -19,5 +22,5 @@ class PromptStrategy(abc.ABC): ... @abc.abstractmethod - def parse_response_content(self, response_content: AssistantChatMessage): + def parse_response_content(self, response_content: "AssistantChatMessage"): ... diff --git a/autogpts/forge/forge/prompts/gpt-3.5-turbo/role_selection.j2 b/autogpts/forge/forge/llm/prompting/gpt-3.5-turbo/role_selection.j2 index cfc359a12..cfc359a12 100644 --- a/autogpts/forge/forge/prompts/gpt-3.5-turbo/role_selection.j2 +++ b/autogpts/forge/forge/llm/prompting/gpt-3.5-turbo/role_selection.j2 diff --git a/autogpts/forge/forge/prompts/gpt-3.5-turbo/system-format.j2 b/autogpts/forge/forge/llm/prompting/gpt-3.5-turbo/system-format.j2 index 4141e1d29..4141e1d29 100644 --- a/autogpts/forge/forge/prompts/gpt-3.5-turbo/system-format.j2 +++ b/autogpts/forge/forge/llm/prompting/gpt-3.5-turbo/system-format.j2 diff --git a/autogpts/forge/forge/prompts/gpt-3.5-turbo/task-step.j2 b/autogpts/forge/forge/llm/prompting/gpt-3.5-turbo/task-step.j2 index 96f58e8bd..96f58e8bd 100644 --- a/autogpts/forge/forge/prompts/gpt-3.5-turbo/task-step.j2 +++ b/autogpts/forge/forge/llm/prompting/gpt-3.5-turbo/task-step.j2 diff --git a/autogpts/autogpt/autogpt/prompts/prompt.py b/autogpts/forge/forge/llm/prompting/prompt.py index c9076aa9e..c9076aa9e 100644 --- a/autogpts/autogpt/autogpt/prompts/prompt.py +++ b/autogpts/forge/forge/llm/prompting/prompt.py diff --git a/autogpts/autogpt/autogpt/core/prompting/schema.py b/autogpts/forge/forge/llm/prompting/schema.py index fcc7c6b61..7dabfd9bf 100644 --- a/autogpts/autogpt/autogpt/core/prompting/schema.py +++ b/autogpts/forge/forge/llm/prompting/schema.py @@ -2,7 +2,7 @@ import enum from pydantic import BaseModel, Field -from autogpt.core.resource.model_providers.schema import ( +from forge.llm.providers.schema import ( ChatMessage, ChatMessageDict, CompletionModelFunction, diff --git a/autogpts/forge/forge/prompts/techniques/chain-of-thought.j2 b/autogpts/forge/forge/llm/prompting/techniques/chain-of-thought.j2 index dc6357f47..dc6357f47 100644 --- a/autogpts/forge/forge/prompts/techniques/chain-of-thought.j2 +++ b/autogpts/forge/forge/llm/prompting/techniques/chain-of-thought.j2 diff --git a/autogpts/forge/forge/prompts/techniques/expert.j2 b/autogpts/forge/forge/llm/prompting/techniques/expert.j2 index cacc966c0..cacc966c0 100644 --- a/autogpts/forge/forge/prompts/techniques/expert.j2 +++ b/autogpts/forge/forge/llm/prompting/techniques/expert.j2 diff --git a/autogpts/forge/forge/prompts/techniques/few-shot.j2 b/autogpts/forge/forge/llm/prompting/techniques/few-shot.j2 index e65878d8c..e65878d8c 100644 --- a/autogpts/forge/forge/prompts/techniques/few-shot.j2 +++ b/autogpts/forge/forge/llm/prompting/techniques/few-shot.j2 diff --git a/autogpts/autogpt/autogpt/prompts/utils.py b/autogpts/forge/forge/llm/prompting/utils.py index f5ab9df9d..4d45dec3f 100644 --- a/autogpts/autogpt/autogpt/prompts/utils.py +++ b/autogpts/forge/forge/llm/prompting/utils.py @@ -9,3 +9,14 @@ def indent(content: str, indentation: int | str = 4) -> str: if type(indentation) is int: indentation = " " * indentation return indentation + content.replace("\n", f"\n{indentation}") # type: ignore + + +def to_numbered_list( + items: list[str], no_items_response: str = "", **template_args +) -> str: + if items: + return "\n".join( + f"{i+1}. {item.format(**template_args)}" for i, item in enumerate(items) + ) + else: + return no_items_response diff --git a/autogpts/autogpt/autogpt/core/resource/model_providers/__init__.py b/autogpts/forge/forge/llm/providers/__init__.py index 7fb98170e..82c4e110f 100644 --- a/autogpts/autogpt/autogpt/core/resource/model_providers/__init__.py +++ b/autogpts/forge/forge/llm/providers/__init__.py @@ -32,6 +32,7 @@ from .schema import ( ModelResponse, ModelTokenizer, ) +from .utils import function_specs_from_commands __all__ = [ "AssistantChatMessage", @@ -66,4 +67,5 @@ __all__ = [ "OpenAIModelName", "OpenAIProvider", "OpenAISettings", + "function_specs_from_commands", ] diff --git a/autogpts/autogpt/autogpt/core/resource/model_providers/anthropic.py b/autogpts/forge/forge/llm/providers/anthropic.py index 3d5967f1c..74b516451 100644 --- a/autogpts/autogpt/autogpt/core/resource/model_providers/anthropic.py +++ b/autogpts/forge/forge/llm/providers/anthropic.py @@ -10,8 +10,7 @@ import tiktoken from anthropic import APIConnectionError, APIStatusError from pydantic import SecretStr -from autogpt.core.configuration import Configurable, UserConfigurable -from autogpt.core.resource.model_providers.schema import ( +from forge.llm.providers.schema import ( AssistantChatMessage, AssistantFunctionCall, AssistantToolCall, @@ -28,6 +27,7 @@ from autogpt.core.resource.model_providers.schema import ( ModelTokenizer, ToolResultMessage, ) +from forge.models.config import Configurable, UserConfigurable from .utils import validate_tool_calls diff --git a/autogpts/autogpt/autogpt/core/resource/model_providers/multi.py b/autogpts/forge/forge/llm/providers/multi.py index f194e0256..eadb9c13b 100644 --- a/autogpts/autogpt/autogpt/core/resource/model_providers/multi.py +++ b/autogpts/forge/forge/llm/providers/multi.py @@ -5,7 +5,7 @@ from typing import Callable, Iterator, Optional, TypeVar from pydantic import ValidationError -from autogpt.core.configuration import Configurable +from forge.models.config import Configurable from .anthropic import ANTHROPIC_CHAT_MODELS, AnthropicModelName, AnthropicProvider from .openai import OPEN_AI_CHAT_MODELS, OpenAIModelName, OpenAIProvider diff --git a/autogpts/autogpt/autogpt/core/resource/model_providers/openai.py b/autogpts/forge/forge/llm/providers/openai.py index c3c5aabb4..7f0d55384 100644 --- a/autogpts/autogpt/autogpt/core/resource/model_providers/openai.py +++ b/autogpts/forge/forge/llm/providers/openai.py @@ -17,8 +17,8 @@ from openai.types.chat import ( ) from pydantic import SecretStr -from autogpt.core.configuration import Configurable, UserConfigurable -from autogpt.core.resource.model_providers.schema import ( +from forge.json.parsing import json_loads +from forge.llm.providers.schema import ( AssistantChatMessage, AssistantFunctionCall, AssistantToolCall, @@ -39,8 +39,8 @@ from autogpt.core.resource.model_providers.schema import ( ModelProviderSettings, ModelTokenizer, ) -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.core.utils.json_utils import json_loads +from forge.models.config import Configurable, UserConfigurable +from forge.models.json_schema import JSONSchema from .utils import validate_tool_calls diff --git a/autogpts/autogpt/autogpt/core/resource/model_providers/schema.py b/autogpts/forge/forge/llm/providers/schema.py index bb2e29490..79cfbfbbe 100644 --- a/autogpts/autogpt/autogpt/core/resource/model_providers/schema.py +++ b/autogpts/forge/forge/llm/providers/schema.py @@ -18,8 +18,10 @@ from typing import ( from pydantic import BaseModel, Field, SecretStr, validator -from autogpt.core.configuration import SystemConfiguration, UserConfigurable -from autogpt.core.resource.schema import ( +from forge.logging.utils import fmt_kwargs +from forge.models.config import SystemConfiguration, UserConfigurable +from forge.models.json_schema import JSONSchema +from forge.models.providers import ( Embedding, ProviderBudget, ProviderCredentials, @@ -27,8 +29,6 @@ from autogpt.core.resource.schema import ( ProviderUsage, ResourceType, ) -from autogpt.core.utils.json_schema import JSONSchema -from autogpt.logs.utils import fmt_kwargs if TYPE_CHECKING: from jsonschema import ValidationError diff --git a/autogpts/autogpt/autogpt/core/resource/model_providers/utils.py b/autogpts/forge/forge/llm/providers/utils.py index 5b83b047b..aa63cb165 100644 --- a/autogpts/autogpt/autogpt/core/resource/model_providers/utils.py +++ b/autogpts/forge/forge/llm/providers/utils.py @@ -1,4 +1,7 @@ -from typing import Any +from typing import TYPE_CHECKING, Any, Iterable + +if TYPE_CHECKING: + from forge.command.command import Command from .schema import AssistantToolCall, CompletionModelFunction @@ -69,3 +72,17 @@ def validate_tool_calls( ) return errors + + +def function_specs_from_commands( + commands: Iterable["Command"], +) -> list[CompletionModelFunction]: + """Get LLM-consumable function specs for the agent's available commands.""" + return [ + CompletionModelFunction( + name=command.names[0], + description=command.description, + parameters={param.name: param.spec for param in command.parameters}, + ) + for command in commands + ] diff --git a/autogpts/forge/forge/logging/__init__.py b/autogpts/forge/forge/logging/__init__.py new file mode 100644 index 000000000..0348e043b --- /dev/null +++ b/autogpts/forge/forge/logging/__init__.py @@ -0,0 +1,11 @@ +from .config import configure_logging +from .filters import BelowLevelFilter +from .formatters import FancyConsoleFormatter +from .helpers import user_friendly_output + +__all__ = [ + "configure_logging", + "BelowLevelFilter", + "FancyConsoleFormatter", + "user_friendly_output", +] diff --git a/autogpts/autogpt/autogpt/logs/config.py b/autogpts/forge/forge/logging/config.py index 3569933a9..59d87fe5c 100644 --- a/autogpts/autogpt/autogpt/logs/config.py +++ b/autogpts/forge/forge/logging/config.py @@ -10,13 +10,13 @@ from typing import TYPE_CHECKING, Optional from openai._base_client import log as openai_logger -if TYPE_CHECKING: - from autogpt.speech import TTSConfig +from forge.models.config import SystemConfiguration, UserConfigurable -from autogpt.core.configuration import SystemConfiguration, UserConfigurable -from autogpt.core.runner.client_lib.logging import BelowLevelFilter +if TYPE_CHECKING: + from forge.speech import TTSConfig -from .formatters import AutoGptFormatter, StructuredLoggingFormatter +from .filters import BelowLevelFilter +from .formatters import ForgeFormatter, StructuredLoggingFormatter from .handlers import TTSHandler, TypingConsoleHandler LOG_DIR = Path(__file__).parent.parent.parent / "logs" @@ -138,7 +138,7 @@ def configure_logging( if config.log_format in (LogFormatName.DEBUG, LogFormatName.SIMPLE): console_format_template = TEXT_LOG_FORMAT_MAP[config.log_format] - console_formatter = AutoGptFormatter(console_format_template) + console_formatter = ForgeFormatter(console_format_template) else: console_formatter = StructuredLoggingFormatter() console_format_template = SIMPLE_LOG_FORMAT @@ -173,7 +173,7 @@ def configure_logging( if config.log_file_format is not None: if config.level < logging.ERROR: file_output_format_template = TEXT_LOG_FORMAT_MAP[config.log_file_format] - file_output_formatter = AutoGptFormatter( + file_output_formatter = ForgeFormatter( file_output_format_template, no_color=True ) @@ -191,9 +191,7 @@ def configure_logging( config.log_dir / ERROR_LOG_FILE, "a", "utf-8" ) error_log_handler.setLevel(logging.ERROR) - error_log_handler.setFormatter( - AutoGptFormatter(DEBUG_LOG_FORMAT, no_color=True) - ) + error_log_handler.setFormatter(ForgeFormatter(DEBUG_LOG_FORMAT, no_color=True)) log_handlers += [error_log_handler] user_friendly_output_logger.addHandler(error_log_handler) diff --git a/autogpts/autogpt/autogpt/logs/filters.py b/autogpts/forge/forge/logging/filters.py index 7a0ccd756..7a0ccd756 100644 --- a/autogpts/autogpt/autogpt/logs/filters.py +++ b/autogpts/forge/forge/logging/filters.py diff --git a/autogpts/autogpt/autogpt/logs/formatters.py b/autogpts/forge/forge/logging/formatters.py index a51112573..7f104da32 100644 --- a/autogpts/autogpt/autogpt/logs/formatters.py +++ b/autogpts/forge/forge/logging/formatters.py @@ -1,14 +1,56 @@ import logging -from colorama import Style +from colorama import Fore, Style from google.cloud.logging_v2.handlers import CloudLoggingFilter, StructuredLogHandler -from autogpt.core.runner.client_lib.logging import FancyConsoleFormatter - from .utils import remove_color_codes -class AutoGptFormatter(FancyConsoleFormatter): +class FancyConsoleFormatter(logging.Formatter): + """ + A custom logging formatter designed for console output. + + This formatter enhances the standard logging output with color coding. The color + coding is based on the level of the log message, making it easier to distinguish + between different types of messages in the console output. + + The color for each level is defined in the LEVEL_COLOR_MAP class attribute. + """ + + # level -> (level & text color, title color) + LEVEL_COLOR_MAP = { + logging.DEBUG: Fore.LIGHTBLACK_EX, + logging.INFO: Fore.BLUE, + logging.WARNING: Fore.YELLOW, + logging.ERROR: Fore.RED, + logging.CRITICAL: Fore.RED + Style.BRIGHT, + } + + def format(self, record: logging.LogRecord) -> str: + # Make sure `msg` is a string + if not hasattr(record, "msg"): + record.msg = "" + elif not type(record.msg) is str: + record.msg = str(record.msg) + + # Determine default color based on error level + level_color = "" + if record.levelno in self.LEVEL_COLOR_MAP: + level_color = self.LEVEL_COLOR_MAP[record.levelno] + record.levelname = f"{level_color}{record.levelname}{Style.RESET_ALL}" + + # Determine color for message + color = getattr(record, "color", level_color) + color_is_specified = hasattr(record, "color") + + # Don't color INFO messages unless the color is explicitly specified. + if color and (record.levelno != logging.INFO or color_is_specified): + record.msg = f"{color}{record.msg}{Style.RESET_ALL}" + + return super().format(record) + + +class ForgeFormatter(FancyConsoleFormatter): def __init__(self, *args, no_color: bool = False, **kwargs): super().__init__(*args, **kwargs) self.no_color = no_color diff --git a/autogpts/autogpt/autogpt/logs/handlers.py b/autogpts/forge/forge/logging/handlers.py index 6d371059a..bbc605087 100644 --- a/autogpts/autogpt/autogpt/logs/handlers.py +++ b/autogpts/forge/forge/logging/handlers.py @@ -7,11 +7,11 @@ import re import time from typing import TYPE_CHECKING -from autogpt.logs.utils import remove_color_codes -from autogpt.speech import TextToSpeechProvider +from forge.logging.utils import remove_color_codes +from forge.speech import TextToSpeechProvider if TYPE_CHECKING: - from autogpt.speech import TTSConfig + from forge.speech import TTSConfig class TypingConsoleHandler(logging.StreamHandler): diff --git a/autogpts/autogpt/autogpt/logs/helpers.py b/autogpts/forge/forge/logging/helpers.py index d81f01d67..d81f01d67 100644 --- a/autogpts/autogpt/autogpt/logs/helpers.py +++ b/autogpts/forge/forge/logging/helpers.py diff --git a/autogpts/autogpt/autogpt/logs/utils.py b/autogpts/forge/forge/logging/utils.py index d9f39af30..d9f39af30 100644 --- a/autogpts/autogpt/autogpt/logs/utils.py +++ b/autogpts/forge/forge/logging/utils.py diff --git a/autogpts/forge/forge/memory/memstore_test.py b/autogpts/forge/forge/memory/memstore_test.py index 3eb24309b..200dc63d9 100644 --- a/autogpts/forge/forge/memory/memstore_test.py +++ b/autogpts/forge/forge/memory/memstore_test.py @@ -3,7 +3,7 @@ import shutil import pytest -from forge.sdk.memory.memstore import ChromaMemStore +from forge.memory.chroma_memstore import ChromaMemStore @pytest.fixture diff --git a/autogpts/forge/forge/models/action.py b/autogpts/forge/forge/models/action.py new file mode 100644 index 000000000..3eefbf688 --- /dev/null +++ b/autogpts/forge/forge/models/action.py @@ -0,0 +1,76 @@ +from __future__ import annotations + +from typing import Any, Literal, Optional + +from pydantic import BaseModel + +from forge.llm.providers.schema import AssistantFunctionCall + +from .utils import ModelWithSummary + + +class ActionProposal(BaseModel): + thoughts: str | ModelWithSummary + use_tool: AssistantFunctionCall = None + + +class ActionSuccessResult(BaseModel): + outputs: Any + status: Literal["success"] = "success" + + def __str__(self) -> str: + outputs = str(self.outputs).replace("```", r"\```") + multiline = "\n" in outputs + return f"```\n{self.outputs}\n```" if multiline else str(self.outputs) + + +class ErrorInfo(BaseModel): + args: tuple + message: str + exception_type: str + repr: str + + @staticmethod + def from_exception(exception: Exception) -> ErrorInfo: + return ErrorInfo( + args=exception.args, + message=getattr(exception, "message", exception.args[0]), + exception_type=exception.__class__.__name__, + repr=repr(exception), + ) + + def __str__(self): + return repr(self) + + def __repr__(self): + return self.repr + + +class ActionErrorResult(BaseModel): + reason: str + error: Optional[ErrorInfo] = None + status: Literal["error"] = "error" + + @staticmethod + def from_exception(exception: Exception) -> ActionErrorResult: + return ActionErrorResult( + reason=getattr(exception, "message", exception.args[0]), + error=ErrorInfo.from_exception(exception), + ) + + def __str__(self) -> str: + return f"Action failed: '{self.reason}'" + + +class ActionInterruptedByHuman(BaseModel): + feedback: str + status: Literal["interrupted_by_human"] = "interrupted_by_human" + + def __str__(self) -> str: + return ( + 'The user interrupted the action with the following feedback: "%s"' + % self.feedback + ) + + +ActionResult = ActionSuccessResult | ActionErrorResult | ActionInterruptedByHuman diff --git a/autogpts/autogpt/autogpt/core/configuration/schema.py b/autogpts/forge/forge/models/config.py index 5bc95ffac..5bc95ffac 100644 --- a/autogpts/autogpt/autogpt/core/configuration/schema.py +++ b/autogpts/forge/forge/models/config.py diff --git a/autogpts/autogpt/autogpt/core/utils/json_schema.py b/autogpts/forge/forge/models/json_schema.py index ce987f5d7..ce987f5d7 100644 --- a/autogpts/autogpt/autogpt/core/utils/json_schema.py +++ b/autogpts/forge/forge/models/json_schema.py diff --git a/autogpts/autogpt/autogpt/core/resource/schema.py b/autogpts/forge/forge/models/providers.py index 0da275ee2..d422ff46f 100644 --- a/autogpts/autogpt/autogpt/core/resource/schema.py +++ b/autogpts/forge/forge/models/providers.py @@ -4,11 +4,7 @@ import math from pydantic import BaseModel, SecretBytes, SecretField, SecretStr -from autogpt.core.configuration import ( - SystemConfiguration, - SystemSettings, - UserConfigurable, -) +from forge.models.config import SystemConfiguration, SystemSettings, UserConfigurable class ResourceType(str, enum.Enum): diff --git a/autogpts/autogpt/autogpt/models/utils.py b/autogpts/forge/forge/models/utils.py index 1b4db2175..1b4db2175 100644 --- a/autogpts/autogpt/autogpt/models/utils.py +++ b/autogpts/forge/forge/models/utils.py diff --git a/autogpts/forge/forge/sdk/__init__.py b/autogpts/forge/forge/sdk/__init__.py index 9ee9f428a..c3db17e31 100644 --- a/autogpts/forge/forge/sdk/__init__.py +++ b/autogpts/forge/forge/sdk/__init__.py @@ -2,10 +2,26 @@ The Forge SDK. This is the core of the Forge. It contains the agent protocol, which is the core of the Forge. """ -from ..llm import chat_completion_request, create_embedding_request, transcribe_audio +from forge.utils.exceptions import ( + AccessDeniedError, + AgentException, + AgentFinished, + AgentTerminated, + CodeExecutionError, + CommandExecutionError, + ConfigurationError, + InvalidAgentResponseError, + InvalidArgumentError, + NotFoundError, + OperationNotAllowedError, + TooMuchOutputError, + UnknownCommandError, + get_detailed_traceback, + get_exception_message, +) + from .agent import Agent from .db import AgentDB, Base -from .errors import * from .forge_log import ForgeLogger from .model import ( Artifact, diff --git a/autogpts/forge/forge/sdk/agent.py b/autogpts/forge/forge/sdk/agent.py index 125167938..ca74607bb 100644 --- a/autogpts/forge/forge/sdk/agent.py +++ b/autogpts/forge/forge/sdk/agent.py @@ -9,8 +9,9 @@ from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import RedirectResponse, StreamingResponse from fastapi.staticfiles import StaticFiles +from forge.utils.exceptions import NotFoundError + from .db import AgentDB -from .errors import NotFoundError from .forge_log import ForgeLogger from .middlewares import AgentMiddleware from .model import ( diff --git a/autogpts/forge/forge/sdk/db.py b/autogpts/forge/forge/sdk/db.py index ce4d22f6f..127ccdcec 100644 --- a/autogpts/forge/forge/sdk/db.py +++ b/autogpts/forge/forge/sdk/db.py @@ -21,7 +21,8 @@ from sqlalchemy import ( from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import DeclarativeBase, joinedload, relationship, sessionmaker -from .errors import NotFoundError +from forge.utils.exceptions import NotFoundError + from .forge_log import ForgeLogger from .model import Artifact, Pagination, Status, Step, StepRequestBody, Task diff --git a/autogpts/forge/forge/sdk/db_test.py b/autogpts/forge/forge/sdk/db_test.py index 14330435e..05f4b8754 100644 --- a/autogpts/forge/forge/sdk/db_test.py +++ b/autogpts/forge/forge/sdk/db_test.py @@ -13,14 +13,8 @@ from forge.sdk.db import ( convert_to_step, convert_to_task, ) -from forge.sdk.errors import NotFoundError as DataNotFoundError -from forge.sdk.model import ( - Artifact, - Status, - Step, - StepRequestBody, - Task, -) +from forge.sdk.model import Artifact, Status, Step, StepRequestBody, Task +from forge.utils.exceptions import NotFoundError as DataNotFoundError @pytest.mark.asyncio diff --git a/autogpts/forge/forge/sdk/errors.py b/autogpts/forge/forge/sdk/errors.py deleted file mode 100644 index c1bacd0c9..000000000 --- a/autogpts/forge/forge/sdk/errors.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Optional - - -class NotFoundError(Exception): - pass - - -class AgentException(Exception): - """Base class for specific exceptions relevant in the execution of Agents""" - - message: str - - hint: Optional[str] = None - """A hint which can be passed to the LLM to reduce reoccurrence of this error""" - - def __init__(self, message: str, *args): - self.message = message - super().__init__(message, *args) - - -class ConfigurationError(AgentException): - """Error caused by invalid, incompatible or otherwise incorrect configuration""" - - -class InvalidAgentResponseError(AgentException): - """The LLM deviated from the prescribed response format""" - - -class UnknownCommandError(AgentException): - """The AI tried to use an unknown command""" - - hint = "Do not try to use this command again." - - -class DuplicateOperationError(AgentException): - """The proposed operation has already been executed""" - - -class CommandExecutionError(AgentException): - """An error occured when trying to execute the command""" - - -class InvalidArgumentError(CommandExecutionError): - """The command received an invalid argument""" - - -class OperationNotAllowedError(CommandExecutionError): - """The agent is not allowed to execute the proposed operation""" - - -class AccessDeniedError(CommandExecutionError): - """The operation failed because access to a required resource was denied""" - - -class CodeExecutionError(CommandExecutionError): - """The operation (an attempt to run arbitrary code) returned an error""" - - -class TooMuchOutputError(CommandExecutionError): - """The operation generated more output than what the Agent can process""" diff --git a/autogpts/forge/forge/sdk/middlewares.py b/autogpts/forge/forge/sdk/middlewares.py index 92945a95c..6a204a01e 100644 --- a/autogpts/forge/forge/sdk/middlewares.py +++ b/autogpts/forge/forge/sdk/middlewares.py @@ -1,5 +1,10 @@ +from typing import TYPE_CHECKING + from fastapi import FastAPI +if TYPE_CHECKING: + from .agent import Agent + class AgentMiddleware: """ diff --git a/autogpts/forge/forge/sdk/routes/agent_protocol.py b/autogpts/forge/forge/sdk/routes/agent_protocol.py index 4cab4be0c..1ea739ca0 100644 --- a/autogpts/forge/forge/sdk/routes/agent_protocol.py +++ b/autogpts/forge/forge/sdk/routes/agent_protocol.py @@ -28,10 +28,22 @@ from typing import Optional from fastapi import APIRouter, Query, Request, Response, UploadFile from fastapi.responses import FileResponse -from forge.sdk.errors import * from forge.sdk.forge_log import ForgeLogger -from forge.sdk.model import * -from forge.sdk.utils import get_detailed_traceback, get_exception_message +from forge.sdk.model import ( + Artifact, + Step, + StepRequestBody, + Task, + TaskArtifactsListResponse, + TaskListResponse, + TaskRequestBody, + TaskStepsListResponse, +) +from forge.utils.exceptions import ( + NotFoundError, + get_detailed_traceback, + get_exception_message, +) base_router = APIRouter() diff --git a/autogpts/forge/forge/sdk/utils.py b/autogpts/forge/forge/sdk/utils.py deleted file mode 100644 index 204d713fc..000000000 --- a/autogpts/forge/forge/sdk/utils.py +++ /dev/null @@ -1,39 +0,0 @@ -import inspect -import sys -import traceback - - -def get_exception_message(): - """Get current exception type and message.""" - exc_type, exc_value, _ = sys.exc_info() - exception_message = f"{exc_type.__name__}: {exc_value}" - return exception_message - - -def get_detailed_traceback(): - """Get current exception traceback with local variables.""" - _, _, exc_tb = sys.exc_info() - detailed_traceback = "Traceback (most recent call last):\n" - formatted_tb = traceback.format_tb(exc_tb) - detailed_traceback += "".join(formatted_tb) - - # Optionally add local variables to the traceback information - detailed_traceback += "\nLocal variables by frame, innermost last:\n" - while exc_tb: - frame = exc_tb.tb_frame - lineno = exc_tb.tb_lineno - function_name = frame.f_code.co_name - - # Format frame information - detailed_traceback += ( - f" Frame {function_name} in {frame.f_code.co_filename} at line {lineno}\n" - ) - - # Get local variables for the frame - local_vars = inspect.getargvalues(frame).locals - for var_name, value in local_vars.items(): - detailed_traceback += f" {var_name} = {value}\n" - - exc_tb = exc_tb.tb_next - - return detailed_traceback diff --git a/autogpts/forge/forge/speech/__init__.py b/autogpts/forge/forge/speech/__init__.py new file mode 100644 index 000000000..c5150f6ca --- /dev/null +++ b/autogpts/forge/forge/speech/__init__.py @@ -0,0 +1,2 @@ +"""This module contains the speech recognition and speech synthesis functions.""" +from .say import TextToSpeechProvider, TTSConfig diff --git a/autogpts/autogpt/autogpt/speech/base.py b/autogpts/forge/forge/speech/base.py index fd9fda60f..fd9fda60f 100644 --- a/autogpts/autogpt/autogpt/speech/base.py +++ b/autogpts/forge/forge/speech/base.py diff --git a/autogpts/autogpt/autogpt/speech/eleven_labs.py b/autogpts/forge/forge/speech/eleven_labs.py index 897f0fd7d..253b13bbb 100644 --- a/autogpts/autogpt/autogpt/speech/eleven_labs.py +++ b/autogpts/forge/forge/speech/eleven_labs.py @@ -7,7 +7,7 @@ import os import requests from playsound import playsound -from autogpt.core.configuration import SystemConfiguration, UserConfigurable +from forge.models.config import SystemConfiguration, UserConfigurable from .base import VoiceBase diff --git a/autogpts/autogpt/autogpt/speech/gtts.py b/autogpts/forge/forge/speech/gtts.py index 40f7bcb97..5a1f93675 100644 --- a/autogpts/autogpt/autogpt/speech/gtts.py +++ b/autogpts/forge/forge/speech/gtts.py @@ -6,7 +6,7 @@ import os import gtts from playsound import playsound -from autogpt.speech.base import VoiceBase +from .base import VoiceBase class GTTSVoice(VoiceBase): diff --git a/autogpts/autogpt/autogpt/speech/macos_tts.py b/autogpts/forge/forge/speech/macos_tts.py index 6a1dd99d5..971848bd2 100644 --- a/autogpts/autogpt/autogpt/speech/macos_tts.py +++ b/autogpts/forge/forge/speech/macos_tts.py @@ -3,7 +3,7 @@ from __future__ import annotations import subprocess -from autogpt.speech.base import VoiceBase +from .base import VoiceBase class MacOSTTS(VoiceBase): diff --git a/autogpts/autogpt/autogpt/speech/say.py b/autogpts/forge/forge/speech/say.py index 04ab3a4bc..cb0b3baa7 100644 --- a/autogpts/autogpt/autogpt/speech/say.py +++ b/autogpts/forge/forge/speech/say.py @@ -6,7 +6,7 @@ import threading from threading import Semaphore from typing import Literal, Optional -from autogpt.core.configuration.schema import SystemConfiguration, UserConfigurable +from forge.models.config import SystemConfiguration, UserConfigurable from .base import VoiceBase from .eleven_labs import ElevenLabsConfig, ElevenLabsSpeech diff --git a/autogpts/autogpt/autogpt/speech/stream_elements_speech.py b/autogpts/forge/forge/speech/stream_elements_speech.py index e12b29b2d..7c2cad063 100644 --- a/autogpts/autogpt/autogpt/speech/stream_elements_speech.py +++ b/autogpts/forge/forge/speech/stream_elements_speech.py @@ -6,8 +6,9 @@ import os import requests from playsound import playsound -from autogpt.core.configuration import SystemConfiguration, UserConfigurable -from autogpt.speech.base import VoiceBase +from forge.models.config import SystemConfiguration, UserConfigurable + +from .base import VoiceBase logger = logging.getLogger(__name__) @@ -17,7 +18,7 @@ class StreamElementsConfig(SystemConfiguration): class StreamElementsSpeech(VoiceBase): - """Streamelements speech module for autogpt""" + """Streamelements speech module for AutoGPT Forge""" def _setup(self, config: StreamElementsConfig) -> None: """Setup the voices, API key, etc.""" diff --git a/autogpts/forge/forge/utils/const.py b/autogpts/forge/forge/utils/const.py new file mode 100644 index 000000000..8a6a52e40 --- /dev/null +++ b/autogpts/forge/forge/utils/const.py @@ -0,0 +1,2 @@ +FINISH_COMMAND = "finish" +ASK_COMMAND = "ask_user" diff --git a/autogpts/autogpt/autogpt/utils/exceptions.py b/autogpts/forge/forge/utils/exceptions.py index 95fc044ee..207501978 100644 --- a/autogpts/autogpt/autogpt/utils/exceptions.py +++ b/autogpts/forge/forge/utils/exceptions.py @@ -1,6 +1,49 @@ +import inspect +import sys +import traceback from typing import Optional +def get_exception_message(): + """Get current exception type and message.""" + exc_type, exc_value, _ = sys.exc_info() + exception_message = f"{exc_type.__name__}: {exc_value}" + return exception_message + + +def get_detailed_traceback(): + """Get current exception traceback with local variables.""" + _, _, exc_tb = sys.exc_info() + detailed_traceback = "Traceback (most recent call last):\n" + formatted_tb = traceback.format_tb(exc_tb) + detailed_traceback += "".join(formatted_tb) + + # Optionally add local variables to the traceback information + detailed_traceback += "\nLocal variables by frame, innermost last:\n" + while exc_tb: + frame = exc_tb.tb_frame + lineno = exc_tb.tb_lineno + function_name = frame.f_code.co_name + + # Format frame information + detailed_traceback += ( + f" Frame {function_name} in {frame.f_code.co_filename} at line {lineno}\n" + ) + + # Get local variables for the frame + local_vars = inspect.getargvalues(frame).locals + for var_name, value in local_vars.items(): + detailed_traceback += f" {var_name} = {value}\n" + + exc_tb = exc_tb.tb_next + + return detailed_traceback + + +class NotFoundError(Exception): + pass + + class AgentException(Exception): """Base class for specific exceptions relevant in the execution of Agents""" diff --git a/autogpts/autogpt/autogpt/utils/file_operations_utils.py b/autogpts/forge/forge/utils/file_operations.py index e9dcae41b..e9dcae41b 100644 --- a/autogpts/autogpt/autogpt/utils/file_operations_utils.py +++ b/autogpts/forge/forge/utils/file_operations.py diff --git a/autogpts/autogpt/autogpt/url_utils/validators.py b/autogpts/forge/forge/utils/url_validator.py index a097804f1..a097804f1 100644 --- a/autogpts/autogpt/autogpt/url_utils/validators.py +++ b/autogpts/forge/forge/utils/url_validator.py diff --git a/autogpts/autogpt/autogpt/utils/utils.py b/autogpts/forge/forge/utils/yaml_validator.py index 725ffc688..18a7a6389 100644 --- a/autogpts/autogpt/autogpt/utils/utils.py +++ b/autogpts/forge/forge/utils/yaml_validator.py @@ -3,9 +3,6 @@ from pathlib import Path import yaml from colorama import Fore -DEFAULT_FINISH_COMMAND = "finish" -DEFAULT_ASK_COMMAND = "ask_user" - def validate_yaml_file(file: str | Path): try: diff --git a/autogpts/forge/poetry.lock b/autogpts/forge/poetry.lock index c619e7a95..fc6d69dad 100644 --- a/autogpts/forge/poetry.lock +++ b/autogpts/forge/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "agbenchmark" @@ -168,6 +168,30 @@ files = [ frozenlist = ">=1.1.0" [[package]] +name = "anthropic" +version = "0.25.8" +description = "The official Python library for the anthropic API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "anthropic-0.25.8-py3-none-any.whl", hash = "sha256:c7a0091916eb22a5e0012b725f5492779eedfcad2da8dc906082e1db7596a65c"}, + {file = "anthropic-0.25.8.tar.gz", hash = "sha256:93f6063e96d5dbeaa172edc177762f630e55b2f81595cedb760278b95a2dd03e"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tokenizers = ">=0.13.0" +typing-extensions = ">=4.7,<5" + +[package.extras] +bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"] +vertex = ["google-auth (>=2,<3)"] + +[[package]] name = "anyio" version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" @@ -385,6 +409,90 @@ jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] +name = "blis" +version = "0.7.11" +description = "The Blis BLAS-like linear algebra library, as a self-contained C-extension." +optional = false +python-versions = "*" +files = [ + {file = "blis-0.7.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd5fba34c5775e4c440d80e4dea8acb40e2d3855b546e07c4e21fad8f972404c"}, + {file = "blis-0.7.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:31273d9086cab9c56986d478e3ed6da6752fa4cdd0f7b5e8e5db30827912d90d"}, + {file = "blis-0.7.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d06883f83d4c8de8264154f7c4a420b4af323050ed07398c1ff201c34c25c0d2"}, + {file = "blis-0.7.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee493683e3043650d4413d531e79e580d28a3c7bdd184f1b9cfa565497bda1e7"}, + {file = "blis-0.7.11-cp310-cp310-win_amd64.whl", hash = "sha256:a73945a9d635eea528bccfdfcaa59dd35bd5f82a4a40d5ca31f08f507f3a6f81"}, + {file = "blis-0.7.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1b68df4d01d62f9adaef3dad6f96418787265a6878891fc4e0fabafd6d02afba"}, + {file = "blis-0.7.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:162e60d941a8151418d558a94ee5547cb1bbeed9f26b3b6f89ec9243f111a201"}, + {file = "blis-0.7.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:686a7d0111d5ba727cd62f374748952fd6eb74701b18177f525b16209a253c01"}, + {file = "blis-0.7.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0421d6e44cda202b113a34761f9a062b53f8c2ae8e4ec8325a76e709fca93b6e"}, + {file = "blis-0.7.11-cp311-cp311-win_amd64.whl", hash = "sha256:0dc9dcb3843045b6b8b00432409fd5ee96b8344a324e031bfec7303838c41a1a"}, + {file = "blis-0.7.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dadf8713ea51d91444d14ad4104a5493fa7ecc401bbb5f4a203ff6448fadb113"}, + {file = "blis-0.7.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5bcdaf370f03adaf4171d6405a89fa66cb3c09399d75fc02e1230a78cd2759e4"}, + {file = "blis-0.7.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7de19264b1d49a178bf8035406d0ae77831f3bfaa3ce02942964a81a202abb03"}, + {file = "blis-0.7.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea55c6a4a60fcbf6a0fdce40df6e254451ce636988323a34b9c94b583fc11e5"}, + {file = "blis-0.7.11-cp312-cp312-win_amd64.whl", hash = "sha256:5a305dbfc96d202a20d0edd6edf74a406b7e1404f4fa4397d24c68454e60b1b4"}, + {file = "blis-0.7.11-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:68544a1cbc3564db7ba54d2bf8988356b8c7acd025966e8e9313561b19f0fe2e"}, + {file = "blis-0.7.11-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:075431b13b9dd7b411894d4afbd4212acf4d0f56c5a20628f4b34902e90225f1"}, + {file = "blis-0.7.11-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:324fdf62af9075831aa62b51481960e8465674b7723f977684e32af708bb7448"}, + {file = "blis-0.7.11-cp36-cp36m-win_amd64.whl", hash = "sha256:afebdb02d2dcf9059f23ce1244585d3ce7e95c02a77fd45a500e4a55b7b23583"}, + {file = "blis-0.7.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2e62cd14b20e960f21547fee01f3a0b2ac201034d819842865a667c969c355d1"}, + {file = "blis-0.7.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89b01c05a5754edc0b9a3b69be52cbee03f645b2ec69651d12216ea83b8122f0"}, + {file = "blis-0.7.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfee5ec52ba1e9002311d9191f7129d7b0ecdff211e88536fb24c865d102b50d"}, + {file = "blis-0.7.11-cp37-cp37m-win_amd64.whl", hash = "sha256:844b6377e3e7f3a2e92e7333cc644095386548ad5a027fdc150122703c009956"}, + {file = "blis-0.7.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6df00c24128e323174cde5d80ebe3657df39615322098ce06613845433057614"}, + {file = "blis-0.7.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:809d1da1331108935bf06e22f3cf07ef73a41a572ecd81575bdedb67defe3465"}, + {file = "blis-0.7.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bfabd5272bbbe504702b8dfe30093653d278057656126716ff500d9c184b35a6"}, + {file = "blis-0.7.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca684f5c2f05269f17aefe7812360286e9a1cee3afb96d416485efd825dbcf19"}, + {file = "blis-0.7.11-cp38-cp38-win_amd64.whl", hash = "sha256:688a8b21d2521c2124ee8dfcbaf2c385981ccc27e313e052113d5db113e27d3b"}, + {file = "blis-0.7.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2ff7abd784033836b284ff9f4d0d7cb0737b7684daebb01a4c9fe145ffa5a31e"}, + {file = "blis-0.7.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9caffcd14795bfe52add95a0dd8426d44e737b55fcb69e2b797816f4da0b1d2"}, + {file = "blis-0.7.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fb36989ed61233cfd48915896802ee6d3d87882190000f8cfe0cf4a3819f9a8"}, + {file = "blis-0.7.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ea09f961871f880d5dc622dce6c370e4859559f0ead897ae9b20ddafd6b07a2"}, + {file = "blis-0.7.11-cp39-cp39-win_amd64.whl", hash = "sha256:5bb38adabbb22f69f22c74bad025a010ae3b14de711bf5c715353980869d491d"}, + {file = "blis-0.7.11.tar.gz", hash = "sha256:cec6d48f75f7ac328ae1b6fbb372dde8c8a57c89559172277f66e01ff08d4d42"}, +] + +[package.dependencies] +numpy = {version = ">=1.19.0", markers = "python_version >= \"3.9\""} + +[[package]] +name = "boto3" +version = "1.34.103" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.34.103-py3-none-any.whl", hash = "sha256:59b6499f1bb423dd99de6566a20d0a7cf1a5476824be3a792290fd86600e8365"}, + {file = "boto3-1.34.103.tar.gz", hash = "sha256:58d097241f3895c4a4c80c9e606689c6e06d77f55f9f53a4cc02dee7e03938b9"}, +] + +[package.dependencies] +botocore = ">=1.34.103,<1.35.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.34.103" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.34.103-py3-none-any.whl", hash = "sha256:0330d139f18f78d38127e65361859e24ebd6a8bcba184f903c01bb999a3fa431"}, + {file = "botocore-1.34.103.tar.gz", hash = "sha256:5f07e2c7302c0a9f469dcd08b4ddac152e9f5888b12220242c20056255010939"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.20.9)"] + +[[package]] name = "bs4" version = "0.0.1" description = "Dummy package for Beautiful Soup" @@ -432,6 +540,17 @@ files = [ ] [[package]] +name = "catalogue" +version = "2.0.10" +description = "Super lightweight function registries for your library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "catalogue-2.0.10-py3-none-any.whl", hash = "sha256:58c2de0020aa90f4a2da7dfad161bf7b3b054c86a5f09fcedc0b2b740c109a9f"}, + {file = "catalogue-2.0.10.tar.gz", hash = "sha256:4f56daa940913d3f09d589c191c74e5a6d51762b3a9e37dd53b7437afd6cda15"}, +] + +[[package]] name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." @@ -725,6 +844,26 @@ click = "*" test = ["pytest"] [[package]] +name = "cloudpathlib" +version = "0.16.0" +description = "pathlib-style classes for cloud storage services." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cloudpathlib-0.16.0-py3-none-any.whl", hash = "sha256:f46267556bf91f03db52b5df7a152548596a15aabca1c8731ef32b0b25a1a6a3"}, + {file = "cloudpathlib-0.16.0.tar.gz", hash = "sha256:cdfcd35d46d529587d744154a0bdf962aca953b725c8784cd2ec478354ea63a3"}, +] + +[package.dependencies] +typing_extensions = {version = ">4", markers = "python_version < \"3.11\""} + +[package.extras] +all = ["cloudpathlib[azure]", "cloudpathlib[gs]", "cloudpathlib[s3]"] +azure = ["azure-storage-blob (>=12)"] +gs = ["google-cloud-storage"] +s3 = ["boto3"] + +[[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." @@ -753,21 +892,19 @@ humanfriendly = ">=9.1" cron = ["capturer (>=2.4)"] [[package]] -name = "colorlog" -version = "6.8.0" -description = "Add colours to the output of Python's logging module." +name = "confection" +version = "0.1.4" +description = "The sweetest config system for Python" optional = false python-versions = ">=3.6" files = [ - {file = "colorlog-6.8.0-py3-none-any.whl", hash = "sha256:4ed23b05a1154294ac99f511fabe8c1d6d4364ec1f7fc989c7fb515ccc29d375"}, - {file = "colorlog-6.8.0.tar.gz", hash = "sha256:fbb6fdf9d5685f2517f388fb29bb27d54e8654dd31f58bc2a3b217e967a95ca6"}, + {file = "confection-0.1.4-py3-none-any.whl", hash = "sha256:a658818d004939069c3e2b3db74a2cb9d956a5e61a1c9ad61788e0ee09a7090f"}, + {file = "confection-0.1.4.tar.gz", hash = "sha256:e80f22fd008b5231a2e8852fac6de9e28f2276a04031d0536cff74fe4a990c8f"}, ] [package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -development = ["black", "flake8", "mypy", "pytest", "types-colorama"] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<3.0.0" +srsly = ">=2.4.0,<3.0.0" [[package]] name = "contourpy" @@ -873,6 +1010,48 @@ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] [[package]] +name = "cymem" +version = "2.0.8" +description = "Manage calls to calloc/free through Cython" +optional = false +python-versions = "*" +files = [ + {file = "cymem-2.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77b5d3a73c41a394efd5913ab7e48512054cd2dabb9582d489535456641c7666"}, + {file = "cymem-2.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bd33da892fb560ba85ea14b1528c381ff474048e861accc3366c8b491035a378"}, + {file = "cymem-2.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29a551eda23eebd6d076b855f77a5ed14a1d1cae5946f7b3cb5de502e21b39b0"}, + {file = "cymem-2.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8260445652ae5ab19fff6851f32969a7b774f309162e83367dd0f69aac5dbf7"}, + {file = "cymem-2.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:a63a2bef4c7e0aec7c9908bca0a503bf91ac7ec18d41dd50dc7dff5d994e4387"}, + {file = "cymem-2.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b84b780d52cb2db53d4494fe0083c4c5ee1f7b5380ceaea5b824569009ee5bd"}, + {file = "cymem-2.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d5f83dc3cb5a39f0e32653cceb7c8ce0183d82f1162ca418356f4a8ed9e203e"}, + {file = "cymem-2.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ac218cf8a43a761dc6b2f14ae8d183aca2bbb85b60fe316fd6613693b2a7914"}, + {file = "cymem-2.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c993589d1811ec665d37437d5677b8757f53afadd927bf8516ac8ce2d3a50c"}, + {file = "cymem-2.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:ab3cf20e0eabee9b6025ceb0245dadd534a96710d43fb7a91a35e0b9e672ee44"}, + {file = "cymem-2.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cb51fddf1b920abb1f2742d1d385469bc7b4b8083e1cfa60255e19bc0900ccb5"}, + {file = "cymem-2.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9235957f8c6bc2574a6a506a1687164ad629d0b4451ded89d49ebfc61b52660c"}, + {file = "cymem-2.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2cc38930ff5409f8d61f69a01e39ecb185c175785a1c9bec13bcd3ac8a614ba"}, + {file = "cymem-2.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf49e3ea2c441f7b7848d5c61b50803e8cbd49541a70bb41ad22fce76d87603"}, + {file = "cymem-2.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:ecd12e3bacf3eed5486e4cd8ede3c12da66ee0e0a9d0ae046962bc2bb503acef"}, + {file = "cymem-2.0.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:167d8019db3b40308aabf8183fd3fbbc256323b645e0cbf2035301058c439cd0"}, + {file = "cymem-2.0.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17cd2c2791c8f6b52f269a756ba7463f75bf7265785388a2592623b84bb02bf8"}, + {file = "cymem-2.0.8-cp36-cp36m-win_amd64.whl", hash = "sha256:6204f0a3307bf45d109bf698ba37997ce765f21e359284328e4306c7500fcde8"}, + {file = "cymem-2.0.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9c05db55ea338648f8e5f51dd596568c7f62c5ae32bf3fa5b1460117910ebae"}, + {file = "cymem-2.0.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ce641f7ba0489bd1b42a4335a36f38c8507daffc29a512681afaba94a0257d2"}, + {file = "cymem-2.0.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6b83a5972a64f62796118da79dfeed71f4e1e770b2b7455e889c909504c2358"}, + {file = "cymem-2.0.8-cp37-cp37m-win_amd64.whl", hash = "sha256:ada6eb022e4a0f4f11e6356a5d804ceaa917174e6cf33c0b3e371dbea4dd2601"}, + {file = "cymem-2.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e593cd57e2e19eb50c7ddaf7e230b73c890227834425b9dadcd4a86834ef2ab"}, + {file = "cymem-2.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d513f0d5c6d76facdc605e42aa42c8d50bb7dedca3144ec2b47526381764deb0"}, + {file = "cymem-2.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e370dd54359101b125bfb191aca0542718077b4edb90ccccba1a28116640fed"}, + {file = "cymem-2.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84f8c58cde71b8fc7024883031a4eec66c0a9a4d36b7850c3065493652695156"}, + {file = "cymem-2.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a6edddb30dd000a27987fcbc6f3c23b7fe1d74f539656952cb086288c0e4e29"}, + {file = "cymem-2.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b896c83c08dadafe8102a521f83b7369a9c5cc3e7768eca35875764f56703f4c"}, + {file = "cymem-2.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a4f8f2bfee34f6f38b206997727d29976666c89843c071a968add7d61a1e8024"}, + {file = "cymem-2.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7372e2820fa66fd47d3b135f3eb574ab015f90780c3a21cfd4809b54f23a4723"}, + {file = "cymem-2.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4e57bee56d35b90fc2cba93e75b2ce76feaca05251936e28a96cf812a1f5dda"}, + {file = "cymem-2.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ceeab3ce2a92c7f3b2d90854efb32cb203e78cb24c836a5a9a2cac221930303b"}, + {file = "cymem-2.0.8.tar.gz", hash = "sha256:8fb09d222e21dcf1c7e907dc85cf74501d4cea6c4ed4ac6c9e016f98fb59cbbf"}, +] + +[[package]] name = "decorator" version = "5.1.1" description = "Decorators for Humans" @@ -884,6 +1063,16 @@ files = [ ] [[package]] +name = "demjson3" +version = "3.0.6" +description = "encoder, decoder, and lint/validator for JSON (JavaScript Object Notation) compliant with RFC 7159" +optional = false +python-versions = "*" +files = [ + {file = "demjson3-3.0.6.tar.gz", hash = "sha256:37c83b0c6eb08d25defc88df0a2a4875d58a7809a9650bd6eee7afd8053cdbac"}, +] + +[[package]] name = "deprecated" version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." @@ -923,6 +1112,27 @@ files = [ ] [[package]] +name = "docker" +version = "7.0.0" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "docker-7.0.0-py3-none-any.whl", hash = "sha256:12ba681f2777a0ad28ffbcc846a69c31b4dfd9752b47eb425a274ee269c5e14b"}, + {file = "docker-7.0.0.tar.gz", hash = "sha256:323736fb92cd9418fc5e7133bc953e11a9da04f4483f828b527db553f1e7e5a3"}, +] + +[package.dependencies] +packaging = ">=14.0" +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + +[[package]] name = "duckduckgo-search" version = "5.1.0" description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine." @@ -1221,7 +1431,7 @@ tqdm = ["tqdm"] name = "gitdb" version = "4.0.11" description = "Git Object Database" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, @@ -1235,7 +1445,7 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.41" description = "GitPython is a Python library used to interact with Git repositories" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, @@ -1271,6 +1481,24 @@ grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] +name = "google-api-python-client" +version = "2.129.0" +description = "Google API Client Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-python-client-2.129.0.tar.gz", hash = "sha256:984cc8cc8eb4923468b1926d2b8effc5b459a4dda3c845896eb87c153b28ef84"}, + {file = "google_api_python_client-2.129.0-py2.py3-none-any.whl", hash = "sha256:d50f7e2dfdbb7fc2732f6a0cba1c54d7bb676390679526c6bb628c901e43ec86"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" +google-auth = ">=1.32.0,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0.dev0" +google-auth-httplib2 = ">=0.2.0,<1.0.0" +httplib2 = ">=0.19.0,<1.dev0" +uritemplate = ">=3.0.1,<5" + +[[package]] name = "google-auth" version = "2.26.2" description = "Google Authentication Library" @@ -1294,6 +1522,21 @@ reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] +name = "google-auth-httplib2" +version = "0.2.0" +description = "Google Authentication Library: httplib2 transport" +optional = false +python-versions = "*" +files = [ + {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, + {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, +] + +[package.dependencies] +google-auth = "*" +httplib2 = ">=0.19.0" + +[[package]] name = "google-cloud-core" version = "2.4.1" description = "Google Cloud API client core library" @@ -1618,6 +1861,20 @@ http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] [[package]] +name = "httplib2" +version = "0.22.0" +description = "A comprehensive HTTP client library." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, + {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, +] + +[package.dependencies] +pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} + +[[package]] name = "httptools" version = "0.6.1" description = "A collection of framework independent HTTP protocol utils." @@ -1891,6 +2148,17 @@ MarkupSafe = ">=2.0" i18n = ["Babel (>=2.7)"] [[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] name = "jsonpickle" version = "3.0.2" description = "Python library for serializing any arbitrary object graph into JSON" @@ -1907,6 +2175,41 @@ testing = ["ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest testing-libs = ["simplejson", "ujson"] [[package]] +name = "jsonschema" +version = "4.22.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, + {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] + +[package.dependencies] +referencing = ">=0.31.0" + +[[package]] name = "kiwisolver" version = "1.4.5" description = "A fast implementation of the Cassowary constraint solver" @@ -2046,6 +2349,42 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" adal = ["adal (>=1.0.2)"] [[package]] +name = "langcodes" +version = "3.4.0" +description = "Tools for labeling human languages with IETF language tags" +optional = false +python-versions = ">=3.8" +files = [ + {file = "langcodes-3.4.0-py3-none-any.whl", hash = "sha256:10a4cc078b8e8937d8485d3352312a0a89a3125190db9f2bb2074250eef654e9"}, + {file = "langcodes-3.4.0.tar.gz", hash = "sha256:ae5a77d1a01d0d1e91854a671890892b7ce9abb601ab7327fc5c874f899e1979"}, +] + +[package.dependencies] +language-data = ">=1.2" + +[package.extras] +build = ["build", "twine"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "language-data" +version = "1.2.0" +description = "Supplementary data about languages used by the langcodes module" +optional = false +python-versions = "*" +files = [ + {file = "language_data-1.2.0-py3-none-any.whl", hash = "sha256:77d5cab917f91ee0b2f1aa7018443e911cf8985ef734ca2ba3940770f6a3816b"}, + {file = "language_data-1.2.0.tar.gz", hash = "sha256:82a86050bbd677bfde87d97885b17566cfe75dad3ac4f5ce44b52c28f752e773"}, +] + +[package.dependencies] +marisa-trie = ">=0.7.7" + +[package.extras] +build = ["build", "twine"] +test = ["pytest", "pytest-cov"] + +[[package]] name = "litellm" version = "1.17.9" description = "Library to easily interface with LLM API providers" @@ -2165,6 +2504,109 @@ htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=3.0.7)"] [[package]] +name = "marisa-trie" +version = "1.1.1" +description = "Static memory-efficient and fast Trie-like structures for Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "marisa_trie-1.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:68e48a547b9a1fd64c648684cd375402ba521c2c4a724756a944ef4b88c3047c"}, + {file = "marisa_trie-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:615d7de907919bda16e9cafc1fa74942354273c299bf07e3c0adb2420d6fad48"}, + {file = "marisa_trie-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d587001ef30960eba6d4c9b1f6b03037480c1e4b277b305b5a2957a5eebe4f09"}, + {file = "marisa_trie-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11765ee9c2ad162bc7f8ab9cf383a21349673034bfac9bf00d6b06e44d70a4c9"}, + {file = "marisa_trie-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5abc72a7267de6a4e3aa7463e780ddfaac442ef3a385f9e1c60e7f32c0cc34"}, + {file = "marisa_trie-1.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c70f85ab67754e2f28af6cb1f1db826b5ec735beca2fa021a79c14f9afbc6167"}, + {file = "marisa_trie-1.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5c3a3d12f9c1a4312562b03ccbbd29d0aa28bda999c4f7fa7763f011c9d3a11"}, + {file = "marisa_trie-1.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:73eec66265424a548119648a6f38b119a525a767a86dc397e001bfe70f518b91"}, + {file = "marisa_trie-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:93c7129f410f9f3215d01ae7737cfc9afa528264c53ba8ee9859a29f164069e0"}, + {file = "marisa_trie-1.1.1-cp310-cp310-win32.whl", hash = "sha256:fe5b7ed1768409933d4457b8bf8d2b2b1af77b7333a27bd418ea0510289d4763"}, + {file = "marisa_trie-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:9c5baad750994681ebb8a92bd577a9be31de6e6f9cd391156bf595b91f719db2"}, + {file = "marisa_trie-1.1.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bfc1a6b60bccee0f8b2edba893b9ad339e7607aee728f3bc4f75ba7d28185c7d"}, + {file = "marisa_trie-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d45329585ad3e068b7878ba929032987c6a53f85a40bd859b9a1a16324236dd6"}, + {file = "marisa_trie-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd028e97d418f092e18d451a0a42bffaa849457662d66747a03332dfff6c39d9"}, + {file = "marisa_trie-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37d423cb3a9fe4270ee2ad083d1bb62d6c4cc333dcb1197b024ee1ae7c5d6535"}, + {file = "marisa_trie-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cbcf88ddab9890a4942b52fff6c09d8b8aea59f4861b5d37e112a16a4218461"}, + {file = "marisa_trie-1.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4268b12a279c90450b39e062068ff4c878a6b9750d6ab52ade8285b1594b5d10"}, + {file = "marisa_trie-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bbfbbff3e94b3a0be44e010b093af1ce0e29c7ed081d2a020496e863333f5c11"}, + {file = "marisa_trie-1.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ecc678f562dd0cfe2406f0d5447e8200691509149c979334c2d0c26420d28ac"}, + {file = "marisa_trie-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1039316fc5899eee25df9302d81380e0be9a7fa0c10231322187b6d932b55a4a"}, + {file = "marisa_trie-1.1.1-cp311-cp311-win32.whl", hash = "sha256:67fa17083d5fb6d883c91ae512f9aab093a8a73ed77eae07e963014774909e81"}, + {file = "marisa_trie-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:c3140312ecb40456490d2afe24594bfc62a5a18de5344672ce6526e4c6e79e0e"}, + {file = "marisa_trie-1.1.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:98270ed60d0906a185dca185a9ce92fb97fbb68878a6cd76bd61994725727402"}, + {file = "marisa_trie-1.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3ff16e08924f0c342a37b1b1762d8d1394c4cc3b29724e124af54edecbdbd820"}, + {file = "marisa_trie-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e2f867376a302d4770817f8caf1b1f22ac32a2a8a49629343391640054f8f7ab"}, + {file = "marisa_trie-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ae28c5ad4abc1e638db5b39c454a03b25e966836cb3b7edbf398b34393d5ed"}, + {file = "marisa_trie-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:597077e4687d1ab2df13a6d46e33a09e6edcb985566717fe52bcb262f592754b"}, + {file = "marisa_trie-1.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29414a4b49905c67b48c662f39894d7594be6e3a58b15d3e7eee3588188d5591"}, + {file = "marisa_trie-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:52414fd15573475c8f79f90c3b7bbc37723e54f9671ba7d0e491887bcdeac7e7"}, + {file = "marisa_trie-1.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5aa364e4ccda1af55784b6dd318954924870792f9fd336b941d9b2fd8a4311e0"}, + {file = "marisa_trie-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:86427594ee1024d092a1482c33ed857b74d55418a4385495e1e2c60de8ca7572"}, + {file = "marisa_trie-1.1.1-cp312-cp312-win32.whl", hash = "sha256:dea2583084f7d5e095676afc1cc6d342862911cd496095b636ef14ac74f14aa3"}, + {file = "marisa_trie-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:8a2af61b5c3d9151b9320020499c3609651e24dd0c6178ec8f4826c78dbd5f42"}, + {file = "marisa_trie-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5be36ef0f5649e47f53302dc5317445c2764870d6a0ab5317a79381ff5ddf2bb"}, + {file = "marisa_trie-1.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:298a496ac0a7d06710e1ecc4df1f22b7384ca1a46d5295eb7b4445bbd15adb92"}, + {file = "marisa_trie-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:883ec31db8ec790a3ce6f39988a983b2c2b49ab018ec0d5bad4a248c8171f90d"}, + {file = "marisa_trie-1.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f839cddd130d1073a151eb13d709b4449eb4eb2a29c0f38b8e1436fd57eb4a4b"}, + {file = "marisa_trie-1.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:235a14f65fc453e6ffe1f4287d7eda832b6870f925adf9bf72a402b0417d2711"}, + {file = "marisa_trie-1.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a707aa9d0ad8fb2fcc074129652903801e5295e53c94d46fb66f46fe38ad8b19"}, + {file = "marisa_trie-1.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3fc5ba277a586a3fd97c56076d9bd84339ef8cef08f28527b2384d72f28df853"}, + {file = "marisa_trie-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:6c5519ff75e6001a62404b087774b517d669122b9b8b8ecf622f21e6d990700a"}, + {file = "marisa_trie-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f9cc48c12556610d814e4b162123eee43a6048f032d3957554e664feb2f77504"}, + {file = "marisa_trie-1.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:73d7ae84293ea6986c168b0cf0d29cd3abf16cfef7375c33d423816ca0eebe48"}, + {file = "marisa_trie-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5f410c0c28ec0d411d75f56327de35df15656bdc308648312c983a15ee84023b"}, + {file = "marisa_trie-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b406bab536dde70b36a8e3e60d0b2f224b280281988d6b0a0c24e47bd71b2c18"}, + {file = "marisa_trie-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27567a8e8950ced08aa3c74da2ceeff1f433114064df15e9ed1ec981f30970af"}, + {file = "marisa_trie-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02578f4c709232caeb3bf404bfd6b1c49936db8899790dfe5cd21e1a72df18bb"}, + {file = "marisa_trie-1.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3edbb4373f20a5d62e33d8aad9d7f7ad40c2ccf8e41d0e2534f28c9a73d5613"}, + {file = "marisa_trie-1.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:86184796d384183da5e0068e6fb96b060fb437efc60ba264b125350e8c7f498c"}, + {file = "marisa_trie-1.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9992a5f0c90dfc21664d218cf016acc6d9ebeb2f97c57bb4aa4d063dcb2253b8"}, + {file = "marisa_trie-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dad3167eb1c8259afb183c3dddee070bc39c68857490ed61c5c90186ec380ab0"}, + {file = "marisa_trie-1.1.1-cp38-cp38-win32.whl", hash = "sha256:c0a0ae5d8b6c39f53f3711b8bcdda0fe559f52c1789438b8399ea8a81b399dff"}, + {file = "marisa_trie-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:a127e3eebfb638799cf35a8504174462cf45395825f1ae9d45a5c434490b1bcd"}, + {file = "marisa_trie-1.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:76d7fd725dd7d7621f4202306ddb3f7a90ff3d1c511de9ea2c7ffa540169a7ca"}, + {file = "marisa_trie-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4241322c9022ad0f01e6049994c4eb95f35d8f64d2d7ab55f653d9e8bf51ba0f"}, + {file = "marisa_trie-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8780b5a43a0cc861cafd78b9b2a9849648bb86d3cabe5e95d80350986ad7e801"}, + {file = "marisa_trie-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4261285399b27c36a7ff0eb13e4eebaab8dd814a9512b3cd1191552c0af799f8"}, + {file = "marisa_trie-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f451948bfbdc9627318e3210683f7b8d4533d3174d7706ee94b6008c39e80753"}, + {file = "marisa_trie-1.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53d4ef171c77d4f0fd6278a0f1dab58562faa12cac3c5c9cc4cac4ba7e378f17"}, + {file = "marisa_trie-1.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aacb972faffbc208ed7f52ed50dd6710f38175d3673861405e0e82fa12d57269"}, + {file = "marisa_trie-1.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e5603cb20eeded143c5ff035978591b71bc0bc2c6cd9c2e6dfdaacdaab76907c"}, + {file = "marisa_trie-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:405ece63330b113040ed5b2371ff6e026d53c9c706ca9c58baf57f322e192895"}, + {file = "marisa_trie-1.1.1-cp39-cp39-win32.whl", hash = "sha256:b7a853063785e382d86eadea57363a0e2f04520d6ef948be88181df9e9ee5c0d"}, + {file = "marisa_trie-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:b44bd2bfc4bf080421a9ebac5f12434b36494effaa0ca8593a3df4e77cc6620e"}, + {file = "marisa_trie-1.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5dba7a60d6d340fd498f2a967c0a4c3aa7c4cab6ca7655cde0289cdc7bf3f747"}, + {file = "marisa_trie-1.1.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ad624e95f46d8fc6f82af2d372ad55ef218babc323aa14338df843d907d040cc"}, + {file = "marisa_trie-1.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ccf3ae61a63dec06f3cfb8521fd9c8e6391761d47a4df0164954690b7cc3fab"}, + {file = "marisa_trie-1.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:493956e76e2c6276d1e804ee723b23eaba30beca43fc0ddf3a093abc178af3f4"}, + {file = "marisa_trie-1.1.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5207026332ed08957a3bc1391eb9c8861a1882e1517887ef423cfd3afc30e947"}, + {file = "marisa_trie-1.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bae9ff4146b84ef0d51e0940e310d034d1e6a6ce1879a03a891c541dce8b26f9"}, + {file = "marisa_trie-1.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:059a7b7cc0c7796c068e6ab07e522791c7addf3697616b2bcb73ed1d42a761aa"}, + {file = "marisa_trie-1.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e69ba62cbb74d2824cd49be9c2f592b306e5107d5005f0bb3b4d62c9b6ae7246"}, + {file = "marisa_trie-1.1.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26232fe4442f89643b4206ded1be486a12fcf731d55c5e42ff86e2f2ba5e949a"}, + {file = "marisa_trie-1.1.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fa3bd1d32faf6afdb877a1e1f65e33873d88d158a16f9e00830901519d428ca"}, + {file = "marisa_trie-1.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a7e48ba7748c2090b58f911ea995b94ff590781e81d0a2e0fc8b583af4d26710"}, + {file = "marisa_trie-1.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:52f0d96d738831c81127377920e86fc8cb14638df1ea8f37ea392b545f9f984c"}, + {file = "marisa_trie-1.1.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:511e5d23070c166427de24742771a6040eb5c787c51145dddcc7af4106ec8b08"}, + {file = "marisa_trie-1.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec39c09c0bf850f01b15bbd18214a89b9730001fd1483de873f6b7dc73fb2316"}, + {file = "marisa_trie-1.1.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfe6454eb6d2a9b2bb5583b433048670f85f264e613d1f885251ce68070adad8"}, + {file = "marisa_trie-1.1.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5661d8974b4128a847deb282dbe040e5eed5b91c56ed9d207623ea4db24abc5"}, + {file = "marisa_trie-1.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:08aed31f8164c7ec8ba6a449e6a18f4052bafe9dcaa2dcfd0e25fee9ddd94e36"}, + {file = "marisa_trie-1.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:18a1440b01d87566a5c2bddd6a575180a3526ec9da5f7aa55769213153737d19"}, + {file = "marisa_trie-1.1.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7cc903512d5d7cf3a30624dde8adc5ba4312732c931746f18641e0a5762646b3"}, + {file = "marisa_trie-1.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7785c04373d8d2844f6636d73c08384a587c098093a04166177fa45494d912"}, + {file = "marisa_trie-1.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0196e3a9ed3bfce20e32ff7d9ff1c929d0ceb8c380ae0f227e11ab819e70dc2c"}, + {file = "marisa_trie-1.1.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2601b320268a87a4a7accaf7c2e8fc99c568e13316903d2010eb09e0ff16b6a9"}, + {file = "marisa_trie-1.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cd285b97204046e5c5018fa03752d243c6423df023963b52de39d4e90bb3024a"}, + {file = "marisa_trie-1.1.1.tar.gz", hash = "sha256:363f1be2314b1f9e26b5a3de45b59fd9a0a3289bf157be61bbed770643a46f1a"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +test = ["hypothesis", "pytest", "readme-renderer"] + +[[package]] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." @@ -2526,6 +2968,48 @@ files = [ ] [[package]] +name = "murmurhash" +version = "1.0.10" +description = "Cython bindings for MurmurHash" +optional = false +python-versions = ">=3.6" +files = [ + {file = "murmurhash-1.0.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3e90eef568adca5e17a91f96975e9a782ace3a617bbb3f8c8c2d917096e9bfeb"}, + {file = "murmurhash-1.0.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f8ecb00cc1ab57e4b065f9fb3ea923b55160c402d959c69a0b6dbbe8bc73efc3"}, + {file = "murmurhash-1.0.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3310101004d9e2e0530c2fed30174448d998ffd1b50dcbfb7677e95db101aa4b"}, + {file = "murmurhash-1.0.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65401a6f1778676253cbf89c1f45a8a7feb7d73038e483925df7d5943c08ed9"}, + {file = "murmurhash-1.0.10-cp310-cp310-win_amd64.whl", hash = "sha256:f23f2dfc7174de2cdc5007c0771ab8376a2a3f48247f32cac4a5563e40c6adcc"}, + {file = "murmurhash-1.0.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90ed37ee2cace9381b83d56068334f77e3e30bc521169a1f886a2a2800e965d6"}, + {file = "murmurhash-1.0.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:22e9926fdbec9d24ced9b0a42f0fee68c730438be3cfb00c2499fd495caec226"}, + {file = "murmurhash-1.0.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54bfbfd68baa99717239b8844600db627f336a08b1caf4df89762999f681cdd1"}, + {file = "murmurhash-1.0.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b9d200a09d48ef67f6840b77c14f151f2b6c48fd69661eb75c7276ebdb146c"}, + {file = "murmurhash-1.0.10-cp311-cp311-win_amd64.whl", hash = "sha256:e5d7cfe392c0a28129226271008e61e77bf307afc24abf34f386771daa7b28b0"}, + {file = "murmurhash-1.0.10-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:96f0a070344d4802ea76a160e0d4c88b7dc10454d2426f48814482ba60b38b9e"}, + {file = "murmurhash-1.0.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9f61862060d677c84556610ac0300a0776cb13cb3155f5075ed97e80f86e55d9"}, + {file = "murmurhash-1.0.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3b6d2d877d8881a08be66d906856d05944be0faf22b9a0390338bcf45299989"}, + {file = "murmurhash-1.0.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f54b0031d8696fed17ed6e9628f339cdea0ba2367ca051e18ff59193f52687"}, + {file = "murmurhash-1.0.10-cp312-cp312-win_amd64.whl", hash = "sha256:97e09d675de2359e586f09de1d0de1ab39f9911edffc65c9255fb5e04f7c1f85"}, + {file = "murmurhash-1.0.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b64e5332932993fef598e78d633b1ba664789ab73032ed511f3dc615a631a1a"}, + {file = "murmurhash-1.0.10-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e2a38437a8497e082408aa015c6d90554b9e00c2c221fdfa79728a2d99a739e"}, + {file = "murmurhash-1.0.10-cp36-cp36m-win_amd64.whl", hash = "sha256:55f4e4f9291a53c36070330950b472d72ba7d331e4ce3ce1ab349a4f458f7bc4"}, + {file = "murmurhash-1.0.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:16ef9f0855952493fe08929d23865425906a8c0c40607ac8a949a378652ba6a9"}, + {file = "murmurhash-1.0.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cc3351ae92b89c2fcdc6e41ac6f17176dbd9b3554c96109fd0713695d8663e7"}, + {file = "murmurhash-1.0.10-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6559fef7c2e7349a42a63549067709b656d6d1580752bd76be1541d8b2d65718"}, + {file = "murmurhash-1.0.10-cp37-cp37m-win_amd64.whl", hash = "sha256:8bf49e3bb33febb7057ae3a5d284ef81243a1e55eaa62bdcd79007cddbdc0461"}, + {file = "murmurhash-1.0.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f1605fde07030516eb63d77a598dd164fb9bf217fd937dbac588fe7e47a28c40"}, + {file = "murmurhash-1.0.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4904f7e68674a64eb2b08823c72015a5e14653e0b4b109ea00c652a005a59bad"}, + {file = "murmurhash-1.0.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0438f0cb44cf1cd26251f72c1428213c4197d40a4e3f48b1efc3aea12ce18517"}, + {file = "murmurhash-1.0.10-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db1171a3f9a10571931764cdbfaa5371f4cf5c23c680639762125cb075b833a5"}, + {file = "murmurhash-1.0.10-cp38-cp38-win_amd64.whl", hash = "sha256:1c9fbcd7646ad8ba67b895f71d361d232c6765754370ecea473dd97d77afe99f"}, + {file = "murmurhash-1.0.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7024ab3498434f22f8e642ae31448322ad8228c65c8d9e5dc2d563d57c14c9b8"}, + {file = "murmurhash-1.0.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a99dedfb7f0cc5a4cd76eb409ee98d3d50eba024f934e705914f6f4d765aef2c"}, + {file = "murmurhash-1.0.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b580b8503647de5dd7972746b7613ea586270f17ac92a44872a9b1b52c36d68"}, + {file = "murmurhash-1.0.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75840212bf75eb1352c946c3cf1622dacddd6d6bdda34368237d1eb3568f23a"}, + {file = "murmurhash-1.0.10-cp39-cp39-win_amd64.whl", hash = "sha256:a4209962b9f85de397c3203ea4b3a554da01ae9fd220fdab38757d4e9eba8d1a"}, + {file = "murmurhash-1.0.10.tar.gz", hash = "sha256:5282aab1317804c6ebd6dd7f69f15ba9075aee671c44a34be2bde0f1b11ef88a"}, +] + +[[package]] name = "mypy" version = "1.8.0" description = "Optional static typing for Python" @@ -3055,7 +3539,7 @@ ptyprocess = ">=0.5" name = "pillow" version = "10.2.0" description = "Python Imaging Library (Fork)" -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, @@ -3152,6 +3636,16 @@ docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx- test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] +name = "playsound" +version = "1.2.2" +description = "Pure Python, cross platform, single function module with no dependencies for playing sounds." +optional = false +python-versions = "*" +files = [ + {file = "playsound-1.2.2-py2.py3-none-any.whl", hash = "sha256:1e83750a5325cbccee03d6e751ba3e78c037ac95b95a3ba1f38d0c5aca9e1a34"}, +] + +[[package]] name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" @@ -3208,6 +3702,52 @@ pyyaml = ">=5.1" virtualenv = ">=20.10.0" [[package]] +name = "preshed" +version = "3.0.9" +description = "Cython hash table that trusts the keys are pre-hashed" +optional = false +python-versions = ">=3.6" +files = [ + {file = "preshed-3.0.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f96ef4caf9847b2bb9868574dcbe2496f974e41c2b83d6621c24fb4c3fc57e3"}, + {file = "preshed-3.0.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a61302cf8bd30568631adcdaf9e6b21d40491bd89ba8ebf67324f98b6c2a2c05"}, + {file = "preshed-3.0.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99499e8a58f58949d3f591295a97bca4e197066049c96f5d34944dd21a497193"}, + {file = "preshed-3.0.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea6b6566997dc3acd8c6ee11a89539ac85c77275b4dcefb2dc746d11053a5af8"}, + {file = "preshed-3.0.9-cp310-cp310-win_amd64.whl", hash = "sha256:bfd523085a84b1338ff18f61538e1cfcdedc4b9e76002589a301c364d19a2e36"}, + {file = "preshed-3.0.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7c2364da27f2875524ce1ca754dc071515a9ad26eb5def4c7e69129a13c9a59"}, + {file = "preshed-3.0.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182138033c0730c683a6d97e567ceb8a3e83f3bff5704f300d582238dbd384b3"}, + {file = "preshed-3.0.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:345a10be3b86bcc6c0591d343a6dc2bfd86aa6838c30ced4256dfcfa836c3a64"}, + {file = "preshed-3.0.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51d0192274aa061699b284f9fd08416065348edbafd64840c3889617ee1609de"}, + {file = "preshed-3.0.9-cp311-cp311-win_amd64.whl", hash = "sha256:96b857d7a62cbccc3845ac8c41fd23addf052821be4eb987f2eb0da3d8745aa1"}, + {file = "preshed-3.0.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4fe6720012c62e6d550d6a5c1c7ad88cacef8388d186dad4bafea4140d9d198"}, + {file = "preshed-3.0.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e04f05758875be9751e483bd3c519c22b00d3b07f5a64441ec328bb9e3c03700"}, + {file = "preshed-3.0.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a55091d0e395f1fdb62ab43401bb9f8b46c7d7794d5b071813c29dc1ab22fd0"}, + {file = "preshed-3.0.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7de8f5138bcac7870424e09684dc3dd33c8e30e81b269f6c9ede3d8c7bb8e257"}, + {file = "preshed-3.0.9-cp312-cp312-win_amd64.whl", hash = "sha256:24229c77364628743bc29c5620c5d6607ed104f0e02ae31f8a030f99a78a5ceb"}, + {file = "preshed-3.0.9-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73b0f7ecc58095ebbc6ca26ec806008ef780190fe685ce471b550e7eef58dc2"}, + {file = "preshed-3.0.9-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cb90ecd5bec71c21d95962db1a7922364d6db2abe284a8c4b196df8bbcc871e"}, + {file = "preshed-3.0.9-cp36-cp36m-win_amd64.whl", hash = "sha256:e304a0a8c9d625b70ba850c59d4e67082a6be9c16c4517b97850a17a282ebee6"}, + {file = "preshed-3.0.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1fa6d3d5529b08296ff9b7b4da1485c080311fd8744bbf3a86019ff88007b382"}, + {file = "preshed-3.0.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef1e5173809d85edd420fc79563b286b88b4049746b797845ba672cf9435c0e7"}, + {file = "preshed-3.0.9-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fe81eb21c7d99e8b9a802cc313b998c5f791bda592903c732b607f78a6b7dc4"}, + {file = "preshed-3.0.9-cp37-cp37m-win_amd64.whl", hash = "sha256:78590a4a952747c3766e605ce8b747741005bdb1a5aa691a18aae67b09ece0e6"}, + {file = "preshed-3.0.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3452b64d97ce630e200c415073040aa494ceec6b7038f7a2a3400cbd7858e952"}, + {file = "preshed-3.0.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ac970d97b905e9e817ec13d31befd5b07c9cfec046de73b551d11a6375834b79"}, + {file = "preshed-3.0.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eebaa96ece6641cd981491cba995b68c249e0b6877c84af74971eacf8990aa19"}, + {file = "preshed-3.0.9-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d473c5f6856e07a88d41fe00bb6c206ecf7b34c381d30de0b818ba2ebaf9406"}, + {file = "preshed-3.0.9-cp38-cp38-win_amd64.whl", hash = "sha256:0de63a560f10107a3f0a9e252cc3183b8fdedcb5f81a86938fd9f1dcf8a64adf"}, + {file = "preshed-3.0.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3a9ad9f738084e048a7c94c90f40f727217387115b2c9a95c77f0ce943879fcd"}, + {file = "preshed-3.0.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a671dfa30b67baa09391faf90408b69c8a9a7f81cb9d83d16c39a182355fbfce"}, + {file = "preshed-3.0.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23906d114fc97c17c5f8433342495d7562e96ecfd871289c2bb2ed9a9df57c3f"}, + {file = "preshed-3.0.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:778cf71f82cedd2719b256f3980d556d6fb56ec552334ba79b49d16e26e854a0"}, + {file = "preshed-3.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:a6e579439b329eb93f32219ff27cb358b55fbb52a4862c31a915a098c8a22ac2"}, + {file = "preshed-3.0.9.tar.gz", hash = "sha256:721863c5244ffcd2651ad0928951a2c7c77b102f4e11a251ad85d37ee7621660"}, +] + +[package.dependencies] +cymem = ">=2.0.2,<2.1.0" +murmurhash = ">=0.28.0,<1.1.0" + +[[package]] name = "prompt-toolkit" version = "3.0.43" description = "Library for building powerful interactive command lines in Python" @@ -3477,10 +4017,20 @@ plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] +name = "pylatexenc" +version = "2.10" +description = "Simple LaTeX parser providing latex-to-unicode and unicode-to-latex conversion" +optional = false +python-versions = "*" +files = [ + {file = "pylatexenc-2.10.tar.gz", hash = "sha256:3dd8fd84eb46dc30bee1e23eaab8d8fb5a7f507347b23e5f38ad9675c84f40d3"}, +] + +[[package]] name = "pyparsing" version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = true +optional = false python-versions = ">=3.6.8" files = [ {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, @@ -3491,6 +4041,24 @@ files = [ diagrams = ["jinja2", "railroad-diagrams"] [[package]] +name = "pypdf" +version = "3.17.4" +description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pypdf-3.17.4-py3-none-any.whl", hash = "sha256:6aa0f61b33779b64486de3f42835d3668badd48dac4a536aeb87da187a5eacd2"}, + {file = "pypdf-3.17.4.tar.gz", hash = "sha256:ec96e2e4fc9648ac609d19c00d41e9d606e0ae2ce5a0bbe7691426f5f157166a"}, +] + +[package.extras] +crypto = ["PyCryptodome", "cryptography"] +dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"] +docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] +full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] +image = ["Pillow (>=8.0.0)"] + +[[package]] name = "pypika" version = "0.48.9" description = "A SQL query builder API for Python" @@ -3592,6 +4160,21 @@ files = [ six = ">=1.5" [[package]] +name = "python-docx" +version = "1.1.2" +description = "Create, read, and update Microsoft Word .docx files." +optional = false +python-versions = ">=3.7" +files = [ + {file = "python_docx-1.1.2-py3-none-any.whl", hash = "sha256:08c20d6058916fb19853fcf080f7f42b6270d89eac9fa5f8c15f691c0017fabe"}, + {file = "python_docx-1.1.2.tar.gz", hash = "sha256:0cf1f22e95b9002addca7948e16f2cd7acdfd498047f1941ca5d293db7762efd"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +typing-extensions = ">=4.9.0" + +[[package]] name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" @@ -3647,6 +4230,29 @@ jsonpickle = ">=1.4.1" networkx = ">=1.11" [[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" @@ -3707,6 +4313,21 @@ files = [ ] [[package]] +name = "referencing" +version = "0.35.1" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, + {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] name = "regex" version = "2023.12.25" description = "Alternative regular expression module, to replace re." @@ -3848,6 +4469,114 @@ requests = ">=2.0.0" rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] +name = "rpds-py" +version = "0.18.1" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"}, + {file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"}, + {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"}, + {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"}, + {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"}, + {file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"}, + {file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"}, + {file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"}, + {file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"}, + {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"}, + {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"}, + {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"}, + {file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"}, + {file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"}, + {file = "rpds_py-0.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3"}, + {file = "rpds_py-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac"}, + {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c"}, + {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac"}, + {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a"}, + {file = "rpds_py-0.18.1-cp312-none-win32.whl", hash = "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6"}, + {file = "rpds_py-0.18.1-cp312-none-win_amd64.whl", hash = "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72"}, + {file = "rpds_py-0.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74"}, + {file = "rpds_py-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0"}, + {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d"}, + {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e"}, + {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc"}, + {file = "rpds_py-0.18.1-cp38-none-win32.whl", hash = "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9"}, + {file = "rpds_py-0.18.1-cp38-none-win_amd64.whl", hash = "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2"}, + {file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"}, + {file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"}, + {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"}, + {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"}, + {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"}, + {file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"}, + {file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"}, + {file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"}, +] + +[[package]] name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" @@ -3862,6 +4591,23 @@ files = [ pyasn1 = ">=0.1.3" [[package]] +name = "s3transfer" +version = "0.10.1" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, + {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] name = "selenium" version = "4.16.0" description = "" @@ -3879,6 +4625,53 @@ trio-websocket = ">=0.9,<1.0" urllib3 = {version = ">=1.26,<3", extras = ["socks"]} [[package]] +name = "sentry-sdk" +version = "1.45.0" +description = "Python client for Sentry (https://sentry.io)" +optional = false +python-versions = "*" +files = [ + {file = "sentry-sdk-1.45.0.tar.gz", hash = "sha256:509aa9678c0512344ca886281766c2e538682f8acfa50fd8d405f8c417ad0625"}, + {file = "sentry_sdk-1.45.0-py2.py3-none-any.whl", hash = "sha256:1ce29e30240cc289a027011103a8c83885b15ef2f316a60bcc7c5300afa144f1"}, +] + +[package.dependencies] +certifi = "*" +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +arq = ["arq (>=0.23)"] +asyncpg = ["asyncpg (>=0.23)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +celery-redbeat = ["celery-redbeat (>=2)"] +chalice = ["chalice (>=1.16.0)"] +clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] +grpcio = ["grpcio (>=1.21.1)"] +httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +loguru = ["loguru (>=0.5)"] +openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] +pure-eval = ["asttokens", "executing", "pure-eval"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] +tornado = ["tornado (>=5)"] + +[[package]] name = "setuptools" version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" @@ -3906,10 +4699,31 @@ files = [ ] [[package]] +name = "smart-open" +version = "6.4.0" +description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "smart_open-6.4.0-py3-none-any.whl", hash = "sha256:8d3ef7e6997e8e42dd55c74166ed21e6ac70664caa32dd940b26d54a8f6b4142"}, + {file = "smart_open-6.4.0.tar.gz", hash = "sha256:be3c92c246fbe80ebce8fbacb180494a481a77fcdcb7c1aadb2ea5b9c2bee8b9"}, +] + +[package.extras] +all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests"] +azure = ["azure-common", "azure-core", "azure-storage-blob"] +gcs = ["google-cloud-storage (>=2.6.0)"] +http = ["requests"] +s3 = ["boto3"] +ssh = ["paramiko"] +test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] +webhdfs = ["requests"] + +[[package]] name = "smmap" version = "5.0.1" description = "A pure Python implementation of a sliding window memory map manager" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, @@ -3950,6 +4764,116 @@ files = [ ] [[package]] +name = "spacy" +version = "3.7.4" +description = "Industrial-strength Natural Language Processing (NLP) in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "spacy-3.7.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0f748625192f573c07ddea5fcd324919dbfbf4f4a2f7a1fc731e6dcba7321ea1"}, + {file = "spacy-3.7.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6288dca7b3a5489b3d7ce68404bc432ca22f826c662a12af47ef7bdb264307fb"}, + {file = "spacy-3.7.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef59db99b12a72d2646be3888d87f94c59e11cd07adc2f50a8130e83f07eb1cf"}, + {file = "spacy-3.7.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f07477a4027711c22b3865e78dc9076335c03fcf318a6736159bf07e2a923125"}, + {file = "spacy-3.7.4-cp310-cp310-win_amd64.whl", hash = "sha256:787ce42a837f7edfbd4185356eea893a81b7dd75743d0047f2b9bf179775f970"}, + {file = "spacy-3.7.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e82b9da21853d4aee46811804dc7e136895f087fda25c7585172d95eb9b70833"}, + {file = "spacy-3.7.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07ffedf51899441070fb70432f8f873696f39e0e31c9ce7403101c459f8a1281"}, + {file = "spacy-3.7.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba57bcc111eca7b086ee33a9636df775cfd4b14302f7d0ffbc11e95ac0fb3f0e"}, + {file = "spacy-3.7.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7580d1565f4d1ccbee9a18531f993a5b9b37ced96f145153dd4e98ceec607a55"}, + {file = "spacy-3.7.4-cp311-cp311-win_amd64.whl", hash = "sha256:df99c6f0085b1ec8e88beb5fd96d4371cef6fc19c202c41fc4fadc2afd55a157"}, + {file = "spacy-3.7.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b982ebab417189346acb4722637c573830d62e157ba336c3eb6c417249344be1"}, + {file = "spacy-3.7.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e7c29e152d8ea060af60da9410fa8ef038f3c9068a206905ee5c704de78f6e87"}, + {file = "spacy-3.7.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:023c9a008328f55c4717c56c4f8a28073b9961547f7d38a9405c967a52e66d59"}, + {file = "spacy-3.7.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1969d3d0fd0c811b7485438460f0ae8cfe16d46b54bcb8d1c26e70914e67e3d"}, + {file = "spacy-3.7.4-cp312-cp312-win_amd64.whl", hash = "sha256:040f7df5096c817450820eaaa426d54ed266254d16974e9a707a32f5b0f139ae"}, + {file = "spacy-3.7.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6757e8fbfd35dc0ed830296d5756f46d5b8d4b0353925dbe2f9aa33b82c5308"}, + {file = "spacy-3.7.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c500c1bad9e0488814a75077089aeef64a6b520ae8131578f266a08168106fa3"}, + {file = "spacy-3.7.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c992e2c5c0cd06c7f3e74fe8d758885117090013931c7938277d1421660bf71f"}, + {file = "spacy-3.7.4-cp37-cp37m-win_amd64.whl", hash = "sha256:2463c56ab1378f2b9a675340a2e3dfb618989d0da8cdce06429bc9b1dad4f294"}, + {file = "spacy-3.7.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b43e92edfa99f34dbb9dd30175f41158d20945e3179055d0071fee19394add96"}, + {file = "spacy-3.7.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c26a81d33c93e4a8e3360d61dcce0802fb886de79f666a487ea5abbd3ce4b30b"}, + {file = "spacy-3.7.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d7910ca7a91bf423febd8a9a10ca6a4cfcb5c99abdec79df1eb7b67ea3e3c90"}, + {file = "spacy-3.7.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b16768b9e5c350b8a383a6bd84cd0481ccdf10ae6231f568598890638065f69"}, + {file = "spacy-3.7.4-cp38-cp38-win_amd64.whl", hash = "sha256:ed99fb176979b1e3cf6830161f8e881beae54e80147b05fca31d9a67cb12fbca"}, + {file = "spacy-3.7.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ca8112330982dbeef125cc5eb40e0349493055835a0ebe29028a0953a25d8522"}, + {file = "spacy-3.7.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:977f37493d7cf0b5dca155f0450d47890378703283c29919cdcc220db994a775"}, + {file = "spacy-3.7.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ad5e931c294d100ec3edb40e40f2722ef505cea16312839dd6467e81d665740"}, + {file = "spacy-3.7.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11ebf6054cd3ec3638801d7ff9b709e32fb9c15512b347b489bfe2ccb1102c9f"}, + {file = "spacy-3.7.4-cp39-cp39-win_amd64.whl", hash = "sha256:f5b930753027ac599f70bb7e77d6a2256191fe582e6f3f0cd624d88f6c279fa4"}, + {file = "spacy-3.7.4.tar.gz", hash = "sha256:525f2ced2e40761562c8cace93ef6a1e6e8c483f27bd564bc1b15f608efbe85b"}, +] + +[package.dependencies] +catalogue = ">=2.0.6,<2.1.0" +cymem = ">=2.0.2,<2.1.0" +jinja2 = "*" +langcodes = ">=3.2.0,<4.0.0" +murmurhash = ">=0.28.0,<1.1.0" +numpy = {version = ">=1.19.0", markers = "python_version >= \"3.9\""} +packaging = ">=20.0" +preshed = ">=3.0.2,<3.1.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<3.0.0" +requests = ">=2.13.0,<3.0.0" +setuptools = "*" +smart-open = ">=5.2.1,<7.0.0" +spacy-legacy = ">=3.0.11,<3.1.0" +spacy-loggers = ">=1.0.0,<2.0.0" +srsly = ">=2.4.3,<3.0.0" +thinc = ">=8.2.2,<8.3.0" +tqdm = ">=4.38.0,<5.0.0" +typer = ">=0.3.0,<0.10.0" +wasabi = ">=0.9.1,<1.2.0" +weasel = ">=0.1.0,<0.4.0" + +[package.extras] +apple = ["thinc-apple-ops (>=0.1.0.dev0,<1.0.0)"] +cuda = ["cupy (>=5.0.0b4,<13.0.0)"] +cuda-autodetect = ["cupy-wheel (>=11.0.0,<13.0.0)"] +cuda100 = ["cupy-cuda100 (>=5.0.0b4,<13.0.0)"] +cuda101 = ["cupy-cuda101 (>=5.0.0b4,<13.0.0)"] +cuda102 = ["cupy-cuda102 (>=5.0.0b4,<13.0.0)"] +cuda110 = ["cupy-cuda110 (>=5.0.0b4,<13.0.0)"] +cuda111 = ["cupy-cuda111 (>=5.0.0b4,<13.0.0)"] +cuda112 = ["cupy-cuda112 (>=5.0.0b4,<13.0.0)"] +cuda113 = ["cupy-cuda113 (>=5.0.0b4,<13.0.0)"] +cuda114 = ["cupy-cuda114 (>=5.0.0b4,<13.0.0)"] +cuda115 = ["cupy-cuda115 (>=5.0.0b4,<13.0.0)"] +cuda116 = ["cupy-cuda116 (>=5.0.0b4,<13.0.0)"] +cuda117 = ["cupy-cuda117 (>=5.0.0b4,<13.0.0)"] +cuda11x = ["cupy-cuda11x (>=11.0.0,<13.0.0)"] +cuda12x = ["cupy-cuda12x (>=11.5.0,<13.0.0)"] +cuda80 = ["cupy-cuda80 (>=5.0.0b4,<13.0.0)"] +cuda90 = ["cupy-cuda90 (>=5.0.0b4,<13.0.0)"] +cuda91 = ["cupy-cuda91 (>=5.0.0b4,<13.0.0)"] +cuda92 = ["cupy-cuda92 (>=5.0.0b4,<13.0.0)"] +ja = ["sudachidict-core (>=20211220)", "sudachipy (>=0.5.2,!=0.6.1)"] +ko = ["natto-py (>=0.9.0)"] +lookups = ["spacy-lookups-data (>=1.0.3,<1.1.0)"] +th = ["pythainlp (>=2.0)"] +transformers = ["spacy-transformers (>=1.1.2,<1.4.0)"] + +[[package]] +name = "spacy-legacy" +version = "3.0.12" +description = "Legacy registered functions for spaCy backwards compatibility" +optional = false +python-versions = ">=3.6" +files = [ + {file = "spacy-legacy-3.0.12.tar.gz", hash = "sha256:b37d6e0c9b6e1d7ca1cf5bc7152ab64a4c4671f59c85adaf7a3fcb870357a774"}, + {file = "spacy_legacy-3.0.12-py2.py3-none-any.whl", hash = "sha256:476e3bd0d05f8c339ed60f40986c07387c0a71479245d6d0f4298dbd52cda55f"}, +] + +[[package]] +name = "spacy-loggers" +version = "1.0.5" +description = "Logging utilities for SpaCy" +optional = false +python-versions = ">=3.6" +files = [ + {file = "spacy-loggers-1.0.5.tar.gz", hash = "sha256:d60b0bdbf915a60e516cc2e653baeff946f0cfc461b452d11a4d5458c6fe5f24"}, + {file = "spacy_loggers-1.0.5-py3-none-any.whl", hash = "sha256:196284c9c446cc0cdb944005384270d775fdeaf4f494d8e269466cfa497ef645"}, +] + +[[package]] name = "sqlalchemy" version = "2.0.25" description = "Database Abstraction Library" @@ -4037,6 +4961,52 @@ pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] [[package]] +name = "srsly" +version = "2.4.8" +description = "Modern high-performance serialization utilities for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "srsly-2.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:17f3bcb418bb4cf443ed3d4dcb210e491bd9c1b7b0185e6ab10b6af3271e63b2"}, + {file = "srsly-2.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b070a58e21ab0e878fd949f932385abb4c53dd0acb6d3a7ee75d95d447bc609"}, + {file = "srsly-2.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98286d20014ed2067ad02b0be1e17c7e522255b188346e79ff266af51a54eb33"}, + {file = "srsly-2.4.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18685084e2e0cc47c25158cbbf3e44690e494ef77d6418c2aae0598c893f35b0"}, + {file = "srsly-2.4.8-cp310-cp310-win_amd64.whl", hash = "sha256:980a179cbf4eb5bc56f7507e53f76720d031bcf0cef52cd53c815720eb2fc30c"}, + {file = "srsly-2.4.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5472ed9f581e10c32e79424c996cf54c46c42237759f4224806a0cd4bb770993"}, + {file = "srsly-2.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:50f10afe9230072c5aad9f6636115ea99b32c102f4c61e8236d8642c73ec7a13"}, + {file = "srsly-2.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c994a89ba247a4d4f63ef9fdefb93aa3e1f98740e4800d5351ebd56992ac75e3"}, + {file = "srsly-2.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace7ed4a0c20fa54d90032be32f9c656b6d75445168da78d14fe9080a0c208ad"}, + {file = "srsly-2.4.8-cp311-cp311-win_amd64.whl", hash = "sha256:7a919236a090fb93081fbd1cec030f675910f3863825b34a9afbcae71f643127"}, + {file = "srsly-2.4.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7583c03d114b4478b7a357a1915305163e9eac2dfe080da900555c975cca2a11"}, + {file = "srsly-2.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:94ccdd2f6db824c31266aaf93e0f31c1c43b8bc531cd2b3a1d924e3c26a4f294"}, + {file = "srsly-2.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db72d2974f91aee652d606c7def98744ca6b899bd7dd3009fd75ebe0b5a51034"}, + {file = "srsly-2.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a60c905fd2c15e848ce1fc315fd34d8a9cc72c1dee022a0d8f4c62991131307"}, + {file = "srsly-2.4.8-cp312-cp312-win_amd64.whl", hash = "sha256:e0b8d5722057000694edf105b8f492e7eb2f3aa6247a5f0c9170d1e0d074151c"}, + {file = "srsly-2.4.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:196b4261f9d6372d1d3d16d1216b90c7e370b4141471322777b7b3c39afd1210"}, + {file = "srsly-2.4.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4750017e6d78590b02b12653e97edd25aefa4734281386cc27501d59b7481e4e"}, + {file = "srsly-2.4.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa034cd582ba9e4a120c8f19efa263fcad0f10fc481e73fb8c0d603085f941c4"}, + {file = "srsly-2.4.8-cp36-cp36m-win_amd64.whl", hash = "sha256:5a78ab9e9d177ee8731e950feb48c57380036d462b49e3fb61a67ce529ff5f60"}, + {file = "srsly-2.4.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:087e36439af517e259843df93eb34bb9e2d2881c34fa0f541589bcfbc757be97"}, + {file = "srsly-2.4.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad141d8a130cb085a0ed3a6638b643e2b591cb98a4591996780597a632acfe20"}, + {file = "srsly-2.4.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24d05367b2571c0d08d00459636b951e3ca2a1e9216318c157331f09c33489d3"}, + {file = "srsly-2.4.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3fd661a1c4848deea2849b78f432a70c75d10968e902ca83c07c89c9b7050ab8"}, + {file = "srsly-2.4.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec37233fe39af97b00bf20dc2ceda04d39b9ea19ce0ee605e16ece9785e11f65"}, + {file = "srsly-2.4.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2fd4bc081f1d6a6063396b6d97b00d98e86d9d3a3ac2949dba574a84e148080"}, + {file = "srsly-2.4.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7347cff1eb4ef3fc335d9d4acc89588051b2df43799e5d944696ef43da79c873"}, + {file = "srsly-2.4.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9dc1da5cc94d77056b91ba38365c72ae08556b6345bef06257c7e9eccabafe"}, + {file = "srsly-2.4.8-cp38-cp38-win_amd64.whl", hash = "sha256:dc0bf7b6f23c9ecb49ec0924dc645620276b41e160e9b283ed44ca004c060d79"}, + {file = "srsly-2.4.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ff8df21d00d73c371bead542cefef365ee87ca3a5660de292444021ff84e3b8c"}, + {file = "srsly-2.4.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ac3e340e65a9fe265105705586aa56054dc3902789fcb9a8f860a218d6c0a00"}, + {file = "srsly-2.4.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06d1733f4275eff4448e96521cc7dcd8fdabd68ba9b54ca012dcfa2690db2644"}, + {file = "srsly-2.4.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be5b751ad88fdb58fb73871d456248c88204f213aaa3c9aab49b6a1802b3fa8d"}, + {file = "srsly-2.4.8-cp39-cp39-win_amd64.whl", hash = "sha256:822a38b8cf112348f3accbc73274a94b7bf82515cb14a85ba586d126a5a72851"}, + {file = "srsly-2.4.8.tar.gz", hash = "sha256:b24d95a65009c2447e0b49cda043ac53fecf4f09e358d87a57446458f91b8a91"}, +] + +[package.dependencies] +catalogue = ">=2.0.3,<2.1.0" + +[[package]] name = "stack-data" version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" @@ -4115,6 +5085,88 @@ files = [ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] +name = "thinc" +version = "8.2.3" +description = "A refreshing functional take on deep learning, compatible with your favorite libraries" +optional = false +python-versions = ">=3.6" +files = [ + {file = "thinc-8.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:27950dc8a14e1ead09dec329ad98edf1b8f7cc71ec9d5ce5f301073de9d7dadf"}, + {file = "thinc-8.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fce09571619f344983f915f5deb5b8346304b56d3a9ae1bc5ac8c5872eee0738"}, + {file = "thinc-8.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0fb4e534c978ff4b429678ab28db2f81503549f97ed61b2b752c07c08b2083"}, + {file = "thinc-8.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607223c178ae5fba36a3b35fa82d94a453694551bcfbe7f9ac04a01a9e87ebad"}, + {file = "thinc-8.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:53b48a6ae43b0e4054816a378163237b1d2120a49c71994682037437d64b7f84"}, + {file = "thinc-8.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9db67f460dae2e3aada1ff166394ce13c2dabb4db93d6bd79cd256f5beab9599"}, + {file = "thinc-8.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d57bdf43e0acd1406d681bf988179f677cf1b385c86f744bf314d827383ce31"}, + {file = "thinc-8.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78311a593b8bf3f03af52bf71d6b364463c598f3540ea8387c00017d2a0e0a5d"}, + {file = "thinc-8.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9489ae7fec427064a50a0c3e7c661a95251756032e31316add2c8c13f98f93c"}, + {file = "thinc-8.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:d0bf3840d434e3dbdf294643e6d54d2042d0e652abc68dee16673f28269fc456"}, + {file = "thinc-8.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bb7c64d0cb8066c47af9441cd611e89a0e2b28b85f2fffbdec791724c81e1915"}, + {file = "thinc-8.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c11ab3236e56311568f1e84099bfbeea3a4ee2434758a32982b224ddf8bad9c5"}, + {file = "thinc-8.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0a7f29ad534b6e761ee24d0c9e7402447e8ed4e772922795f77c98d88d7f99c"}, + {file = "thinc-8.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2817bde75c92f98fee747efdbebca68d16158b808401c5a922ba54a5f2619e9b"}, + {file = "thinc-8.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:a336f8cae7374d1768a52e63a5084a1208e30b8761eede113d2703e43e7839f1"}, + {file = "thinc-8.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:45c1a2880329eae53da1d77a4898b7fd30faad445b28fdf92c5557dbf6492ff0"}, + {file = "thinc-8.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c899b25442ed915bc77fa4cf07e908dea1bccab7c4b8d854cc0b261026d6a06"}, + {file = "thinc-8.2.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a6b46d5f0accf0c2b2e5ff05b1bffd4d99721513b6d0374574009b0aab292c"}, + {file = "thinc-8.2.3-cp36-cp36m-win_amd64.whl", hash = "sha256:9a29a9ca7a5060c923866f16ba7823a4540cfd708eafa7202ee89ac029e0b78b"}, + {file = "thinc-8.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bd25b781faae71c52ba053157ab1865f4163be1a6485e70a007855a037ba060f"}, + {file = "thinc-8.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f01a7107c36c4fc60b60fdbda30d76a0ac9bc8f4f9c7f6872db62250e2f836a5"}, + {file = "thinc-8.2.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa65182424efda03be9359c3540928bf2985792f89826a76ee475c7c6b2ec64f"}, + {file = "thinc-8.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4d448c8a870f594125cbfadc91024ce67683eae5698207101d2ea4793ab222a1"}, + {file = "thinc-8.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97605421b898441733fda24c6dda74a85325fbeebc808176857b0a8e6e7a9d47"}, + {file = "thinc-8.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8b0309d14bcfdad24b1e8bb87f8b245acfd7eb5305be466c284c788adf026ffa"}, + {file = "thinc-8.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aead20abe233adade3c37daeb9d08e5429dfcada81856b1f2b1b7e4a67a671a0"}, + {file = "thinc-8.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:324e5d2c98f787d82d239cf33cee425e1c11e34a3c96cb3f4e1ee5661abef50c"}, + {file = "thinc-8.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:45e6416e56d5101d0557e31cd06235d80fc89e9ac455ef1b444c440cb3c1ce64"}, + {file = "thinc-8.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5e6ebf63a185d7691b38655a184e30554fbe589805a802d97230eed07af8ea39"}, + {file = "thinc-8.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4d29ee871cfd0d40f4a0436e154640c0965b163b91a088a85bcd5658c1cc3ed4"}, + {file = "thinc-8.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8709d114131680bc7c02b0c97817bd7692eda50beb7849c7908666cf15a6cfd"}, + {file = "thinc-8.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9b81e3c1e89c8ed6dff5a8440f584cda623ec77a3bd8c0ed059936405b8a7ca"}, + {file = "thinc-8.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:1df983af74952d4818703e6bac8af64fad338eaaef8b017fa05d372e3c68e577"}, + {file = "thinc-8.2.3.tar.gz", hash = "sha256:f5afc5222912a80bda8bdcec958362a2ba538d7027dc8db6154845d2859dca76"}, +] + +[package.dependencies] +blis = ">=0.7.8,<0.8.0" +catalogue = ">=2.0.4,<2.1.0" +confection = ">=0.0.1,<1.0.0" +cymem = ">=2.0.2,<2.1.0" +murmurhash = ">=1.0.2,<1.1.0" +numpy = {version = ">=1.19.0", markers = "python_version >= \"3.9\""} +packaging = ">=20.0" +preshed = ">=3.0.2,<3.1.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<3.0.0" +setuptools = "*" +srsly = ">=2.4.0,<3.0.0" +wasabi = ">=0.8.1,<1.2.0" + +[package.extras] +cuda = ["cupy (>=5.0.0b4)"] +cuda-autodetect = ["cupy-wheel (>=11.0.0)"] +cuda100 = ["cupy-cuda100 (>=5.0.0b4)"] +cuda101 = ["cupy-cuda101 (>=5.0.0b4)"] +cuda102 = ["cupy-cuda102 (>=5.0.0b4)"] +cuda110 = ["cupy-cuda110 (>=5.0.0b4)"] +cuda111 = ["cupy-cuda111 (>=5.0.0b4)"] +cuda112 = ["cupy-cuda112 (>=5.0.0b4)"] +cuda113 = ["cupy-cuda113 (>=5.0.0b4)"] +cuda114 = ["cupy-cuda114 (>=5.0.0b4)"] +cuda115 = ["cupy-cuda115 (>=5.0.0b4)"] +cuda116 = ["cupy-cuda116 (>=5.0.0b4)"] +cuda117 = ["cupy-cuda117 (>=5.0.0b4)"] +cuda11x = ["cupy-cuda11x (>=11.0.0)"] +cuda12x = ["cupy-cuda12x (>=11.5.0)"] +cuda80 = ["cupy-cuda80 (>=5.0.0b4)"] +cuda90 = ["cupy-cuda90 (>=5.0.0b4)"] +cuda91 = ["cupy-cuda91 (>=5.0.0b4)"] +cuda92 = ["cupy-cuda92 (>=5.0.0b4)"] +datasets = ["ml-datasets (>=0.2.0,<0.3.0)"] +mxnet = ["mxnet (>=1.5.1,<1.6.0)"] +tensorflow = ["tensorflow (>=2.0.0,<2.6.0)"] +torch = ["torch (>=1.6.0)"] + +[[package]] name = "tiktoken" version = "0.5.2" description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" @@ -4443,6 +5495,17 @@ files = [ ] [[package]] +name = "uritemplate" +version = "4.1.1" +description = "Implementation of RFC 6570 URI Templates" +optional = false +python-versions = ">=3.6" +files = [ + {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, + {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, +] + +[[package]] name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." @@ -4552,6 +5615,20 @@ docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx- test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] +name = "wasabi" +version = "1.1.2" +description = "A lightweight console printing and formatting toolkit" +optional = false +python-versions = ">=3.6" +files = [ + {file = "wasabi-1.1.2-py3-none-any.whl", hash = "sha256:0a3f933c4bf0ed3f93071132c1b87549733256d6c8de6473c5f7ed2e171b5cf9"}, + {file = "wasabi-1.1.2.tar.gz", hash = "sha256:1aaef3aceaa32edb9c91330d29d3936c0c39fdb965743549c173cb54b16c30b5"}, +] + +[package.dependencies] +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\" and python_version >= \"3.7\""} + +[[package]] name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" @@ -4689,6 +5766,28 @@ files = [ ] [[package]] +name = "weasel" +version = "0.3.4" +description = "Weasel: A small and easy workflow system" +optional = false +python-versions = ">=3.6" +files = [ + {file = "weasel-0.3.4-py3-none-any.whl", hash = "sha256:ee48a944f051d007201c2ea1661d0c41035028c5d5a8bcb29a0b10f1100206ae"}, + {file = "weasel-0.3.4.tar.gz", hash = "sha256:eb16f92dc9f1a3ffa89c165e3a9acd28018ebb656e0da4da02c0d7d8ae3f6178"}, +] + +[package.dependencies] +cloudpathlib = ">=0.7.0,<0.17.0" +confection = ">=0.0.4,<0.2.0" +packaging = ">=20.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<3.0.0" +requests = ">=2.13.0,<3.0.0" +smart-open = ">=5.2.1,<7.0.0" +srsly = ">=2.4.3,<3.0.0" +typer = ">=0.3.0,<0.10.0" +wasabi = ">=0.9.1,<1.2.0" + +[[package]] name = "webdriver-manager" version = "4.0.1" description = "Library provides the way to automatically manage drivers for different browsers" @@ -5018,4 +6117,4 @@ benchmark = ["agbenchmark"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "ffc627b17d9f92302ebc7a92088f839cfeb85830f412ca950dd1cbf3c09860e8" +content-hash = "d88392928c5d8009bd9eafd9d2ccd8c810e63928c780cfa9d88e2ce3ff659c1f" diff --git a/autogpts/forge/prompt_settings.yaml b/autogpts/forge/prompt_settings.yaml new file mode 100644 index 000000000..40ae1f8be --- /dev/null +++ b/autogpts/forge/prompt_settings.yaml @@ -0,0 +1,15 @@ +constraints: [ + 'Exclusively use the commands listed below.', + 'You can only act proactively, and are unable to start background jobs or set up webhooks for yourself. Take this into account when planning your actions.', + 'You are unable to interact with physical objects. If this is absolutely necessary to fulfill a task or objective or to complete a step, you must ask the user to do it for you. If the user refuses this, and there is no other way to achieve your goals, you must terminate to avoid wasting time and energy.' +] +resources: [ + 'You are a Large Language Model, trained on millions of pages of text, including a lot of factual knowledge. Make use of this factual knowledge to avoid unnecessary gathering of information.' +] +best_practices: [ + 'Continuously review and analyze your actions to ensure you are performing to the best of your abilities.', + 'Constructively self-criticize your big-picture behavior constantly.', + 'Reflect on past decisions and strategies to refine your approach.', + 'Every command has a cost, so be smart and efficient. Aim to complete tasks in the least number of steps.', + 'Only make use of your information gathering abilities to find information that you don''t yet have knowledge of.' +] diff --git a/autogpts/forge/pyproject.toml b/autogpts/forge/pyproject.toml index bd5575304..1c8796943 100644 --- a/autogpts/forge/pyproject.toml +++ b/autogpts/forge/pyproject.toml @@ -2,32 +2,54 @@ name = "AutoGPT-Forge" version = "0.1.0" description = "" -authors = ["Craig Swift <craigswift13@gmail.com>"] +authors = ["AutoGPT <support@agpt.co>"] license = "MIT" readme = "README.md" packages = [{ include = "forge" }] [tool.poetry.dependencies] python = "^3.10" -python-dotenv = "^1.0.0" -openai = "^1.7.2" -tenacity = "^8.2.2" -sqlalchemy = "^2.0.19" +agbenchmark = { path = "../../benchmark", optional = true } +# agbenchmark = {git = "https://github.com/Significant-Gravitas/AutoGPT.git", subdirectory = "benchmark", optional = true} aiohttp = "^3.8.5" -colorlog = "^6.7.0" +anthropic = "^0.25.1" +beautifulsoup4 = "^4.12.2" +boto3 = "^1.33.6" +bs4 = "^0.0.1" +charset-normalizer = "^3.1.0" chromadb = "^0.4.10" -python-multipart = "^0.0.7" -toml = "^0.10.2" +click = "*" +colorama = "^0.4.6" +demjson3 = "^3.0.0" +docker = "*" +duckduckgo-search = "^5.0.0" +fastapi = "^0.109.1" +gitpython = "^3.1.32" +google-api-python-client = "*" +google-cloud-storage = "^2.13.0" jinja2 = "^3.1.2" -uvicorn = "^0.23.2" +jsonschema = "*" litellm = "^1.17.9" -duckduckgo-search = "^5.0.0" +openai = "^1.7.2" +Pillow = "*" +playsound = "~1.2.2" +pydantic = "*" +python-docx = "*" +python-dotenv = "^1.0.0" +python-multipart = "^0.0.7" +pylatexenc = "*" +pypdf = "^3.1.0" +pyyaml = "^6.0" +requests = "*" selenium = "^4.13.0" -bs4 = "^0.0.1" -agbenchmark = { path = "../../benchmark", optional = true } -# agbenchmark = {git = "https://github.com/Significant-Gravitas/AutoGPT.git", subdirectory = "benchmark", optional = true} +sqlalchemy = "^2.0.19" +sentry-sdk = "^1.40.4" +spacy = "^3.0.0" +tenacity = "^8.2.2" +tiktoken = "^0.5.0" +toml = "^0.10.2" +uvicorn = "^0.23.2" webdriver-manager = "^4.0.1" -google-cloud-storage = "^2.13.0" [tool.poetry.extras] benchmark = ["agbenchmark"] @@ -55,7 +77,7 @@ build-backend = "poetry.core.masonry.api" line-length = 88 target-version = ['py310'] include = '\.pyi?$' -packages = ["autogpt"] +packages = ["forge"] extend-exclude = '(/dist|/.venv|/venv|/build|/agent|agbenchmark/challenges)/' [tool.isort] |