aboutsummaryrefslogtreecommitdiff
path: root/autogpt/memory/vector/providers/base.py
blob: aa233228f368f039152fe195f18241d747c3ad4c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import abc
import functools
from typing import MutableSet, Sequence

import numpy as np

from autogpt.config.config import Config
from autogpt.logs import logger

from .. import MemoryItem, MemoryItemRelevance
from ..utils import Embedding, get_embedding


class VectorMemoryProvider(MutableSet[MemoryItem]):
    @abc.abstractmethod
    def __init__(self, config: Config):
        pass

    def get(self, query: str, config: Config) -> MemoryItemRelevance | None:
        """
        Gets the data from the memory that is most relevant to the given query.

        Args:
            query: The query used to retrieve information.
            config: The config Object.

        Returns: The most relevant Memory
        """
        result = self.get_relevant(query, 1, config)
        return result[0] if result else None

    def get_relevant(
        self, query: str, k: int, config: Config
    ) -> Sequence[MemoryItemRelevance]:
        """
        Returns the top-k most relevant memories for the given query

        Args:
            query: the query to compare stored memories to
            k: the number of relevant memories to fetch
            config: The config Object.

        Returns:
            list[MemoryItemRelevance] containing the top [k] relevant memories
        """
        if len(self) < 1:
            return []

        logger.debug(
            f"Searching for {k} relevant memories for query '{query}'; "
            f"{len(self)} memories in index"
        )

        relevances = self.score_memories_for_relevance(query, config)
        logger.debug(f"Memory relevance scores: {[str(r) for r in relevances]}")

        # take last k items and reverse
        top_k_indices = np.argsort([r.score for r in relevances])[-k:][::-1]

        return [relevances[i] for i in top_k_indices]

    def score_memories_for_relevance(
        self, for_query: str, config: Config
    ) -> Sequence[MemoryItemRelevance]:
        """
        Returns MemoryItemRelevance for every memory in the index.
        Implementations may override this function for performance purposes.
        """
        e_query: Embedding = get_embedding(for_query, config)
        return [m.relevance_for(for_query, e_query) for m in self]

    def get_stats(self) -> tuple[int, int]:
        """
        Returns:
            tuple (n_memories: int, n_chunks: int): the stats of the memory index
        """
        return len(self), functools.reduce(lambda t, m: t + len(m.e_chunks), self, 0)