Merge remote-tracking branch 'upstream/master' into more_azure

This commit is contained in:
Peter Edwards
2023-04-13 08:12:29 +02:00
31 changed files with 773 additions and 158 deletions

View File

@@ -1,17 +1,23 @@
from memory.local import LocalCache
# List of supported memory backends
# Add a backend to this list if the import attempt is successful
supported_memory = ['local']
try:
from memory.redismem import RedisMemory
supported_memory.append('redis')
except ImportError:
print("Redis not installed. Skipping import.")
RedisMemory = None
try:
from memory.pinecone import PineconeMemory
supported_memory.append('pinecone')
except ImportError:
print("Pinecone not installed. Skipping import.")
PineconeMemory = None
def get_memory(cfg, init=False):
memory = None
if cfg.memory_backend == "pinecone":
@@ -35,6 +41,9 @@ def get_memory(cfg, init=False):
memory.clear()
return memory
def get_supported_memory_backends():
return supported_memory
__all__ = [
"get_memory",

View File

@@ -1,8 +1,8 @@
"""Base class for memory providers."""
import abc
from config import AbstractSingleton
from config import Config
from config import AbstractSingleton, Config
import openai
cfg = Config()
cfg = Config()

View File

@@ -28,10 +28,20 @@ class LocalCache(MemoryProviderSingleton):
def __init__(self, cfg) -> None:
self.filename = f"{cfg.memory_index}.json"
if os.path.exists(self.filename):
with open(self.filename, 'rb') as f:
loaded = orjson.loads(f.read())
self.data = CacheContent(**loaded)
try:
with open(self.filename, 'w+b') as f:
file_content = f.read()
if not file_content.strip():
file_content = b'{}'
f.write(file_content)
loaded = orjson.loads(file_content)
self.data = CacheContent(**loaded)
except orjson.JSONDecodeError:
print(f"Error: The file '{self.filename}' is not in JSON format.")
self.data = CacheContent()
else:
print(f"Warning: The file '{self.filename}' does not exist. Local memory would not be saved to a file.")
self.data = CacheContent()
def add(self, text: str):