diff options
Diffstat (limited to 'autogpts/autogpt/autogpt/commands/file_operations.py')
-rw-r--r-- | autogpts/autogpt/autogpt/commands/file_operations.py | 70 |
1 files changed, 26 insertions, 44 deletions
diff --git a/autogpts/autogpt/autogpt/commands/file_operations.py b/autogpts/autogpt/autogpt/commands/file_operations.py index 29935dc87..29c92b346 100644 --- a/autogpts/autogpt/autogpt/commands/file_operations.py +++ b/autogpts/autogpt/autogpt/commands/file_operations.py @@ -2,10 +2,6 @@ from __future__ import annotations -COMMAND_CATEGORY = "file_operations" -COMMAND_CATEGORY_TITLE = "File Operations" - -import contextlib import hashlib import logging import os @@ -17,11 +13,16 @@ from autogpt.agents.agent import Agent from autogpt.agents.utils.exceptions import DuplicateOperationError from autogpt.command_decorator import command from autogpt.core.utils.json_schema import JSONSchema -from autogpt.memory.vector import MemoryItem, VectorMemory +from autogpt.memory.vector import MemoryItemFactory, VectorMemory from .decorators import sanitize_path_arg +from .file_operations_utils import decode_textual_file + +COMMAND_CATEGORY = "file_operations" +COMMAND_CATEGORY_TITLE = "File Operations" + + from .file_context import open_file, open_folder # NOQA -from .file_operations_utils import read_textual_file logger = logging.getLogger(__name__) @@ -82,7 +83,7 @@ def file_operations_state(log_path: str | Path) -> dict[str, str]: return state -@sanitize_path_arg("file_path") +@sanitize_path_arg("file_path", make_relative=True) def is_duplicate_operation( operation: Operation, file_path: Path, agent: Agent, checksum: str | None = None ) -> bool: @@ -97,10 +98,6 @@ def is_duplicate_operation( Returns: True if the operation has already been performed on the file """ - # Make the file path into a relative path if possible - with contextlib.suppress(ValueError): - file_path = file_path.relative_to(agent.workspace.root) - state = file_operations_state(agent.file_manager.file_ops_log_path) if operation == "delete" and str(file_path) not in state: return True @@ -109,9 +106,12 @@ def is_duplicate_operation( return False -@sanitize_path_arg("file_path") +@sanitize_path_arg("file_path", make_relative=True) def log_operation( - operation: Operation, file_path: Path, agent: Agent, checksum: str | None = None + operation: Operation, + file_path: str | Path, + agent: Agent, + checksum: str | None = None, ) -> None: """Log the file operation to the file_logger.log @@ -120,10 +120,6 @@ def log_operation( file_path: The name of the file the operation was performed on checksum: The checksum of the contents to be written """ - # Make the file path into a relative path if possible - with contextlib.suppress(ValueError): - file_path = file_path.relative_to(agent.workspace.root) - log_entry = f"{operation}: {file_path}" if checksum is not None: log_entry += f" #{checksum}" @@ -144,8 +140,7 @@ def log_operation( ) }, ) -@sanitize_path_arg("filename") -def read_file(filename: Path, agent: Agent) -> str: +def read_file(filename: str | Path, agent: Agent) -> str: """Read a file and return the contents Args: @@ -154,8 +149,8 @@ def read_file(filename: Path, agent: Agent) -> str: Returns: str: The contents of the file """ - content = read_textual_file(filename, logger) - # TODO: content = agent.workspace.read_file(filename) + file = agent.workspace.open_file(filename, binary=True) + content = decode_textual_file(file, os.path.splitext(filename)[1], logger) # # TODO: invalidate/update memory when file is edited # file_memory = MemoryItem.from_text_file(content, str(filename), agent.config) @@ -182,13 +177,13 @@ def ingest_file( content = read_file(filename) # TODO: differentiate between different types of files - file_memory = MemoryItem.from_text_file(content, filename) + file_memory = MemoryItemFactory.from_text_file(content, filename) logger.debug(f"Created memory: {file_memory.dump(True)}") memory.add(file_memory) logger.info(f"Ingested {len(file_memory.e_chunks)} chunks from {filename}") except Exception as err: - logger.warn(f"Error while ingesting file '{filename}': {err}") + logger.warning(f"Error while ingesting file '{filename}': {err}") @command( @@ -208,8 +203,7 @@ def ingest_file( }, aliases=["create_file"], ) -@sanitize_path_arg("filename") -async def write_to_file(filename: Path, contents: str, agent: Agent) -> str: +async def write_to_file(filename: str | Path, contents: str, agent: Agent) -> str: """Write contents to a file Args: @@ -220,14 +214,14 @@ async def write_to_file(filename: Path, contents: str, agent: Agent) -> str: str: A message indicating success or failure """ checksum = text_checksum(contents) - if is_duplicate_operation("write", filename, agent, checksum): - raise DuplicateOperationError(f"File {filename.name} has already been updated.") + if is_duplicate_operation("write", Path(filename), agent, checksum): + raise DuplicateOperationError(f"File {filename} has already been updated.") - directory = os.path.dirname(filename) - os.makedirs(directory, exist_ok=True) + if directory := os.path.dirname(filename): + agent.workspace.get_path(directory).mkdir(exist_ok=True) await agent.workspace.write_file(filename, contents) log_operation("write", filename, agent, checksum) - return f"File {filename.name} has been written successfully." + return f"File {filename} has been written successfully." def append_to_file( @@ -262,8 +256,7 @@ def append_to_file( ) }, ) -@sanitize_path_arg("folder") -def list_folder(folder: Path, agent: Agent) -> list[str]: +def list_folder(folder: str | Path, agent: Agent) -> list[str]: """Lists files in a folder recursively Args: @@ -272,15 +265,4 @@ def list_folder(folder: Path, agent: Agent) -> list[str]: Returns: list[str]: A list of files found in the folder """ - found_files = [] - - for root, _, files in os.walk(folder): - for file in files: - if file.startswith("."): - continue - relative_path = os.path.relpath( - os.path.join(root, file), agent.workspace.root - ) - found_files.append(relative_path) - - return found_files + return [str(p) for p in agent.workspace.list(folder)] |