Skip to content

Commit 592ecf0

Browse files
committed
Remove deprecated ensure_search_index_exists function
The VectorStore adapter creates the index automatically when needed, making this function unnecessary. Removed all calls to it from: - API server startup (main.py) - MCP server startup (mcp.py) - CLI commands (cli.py) - Long-term memory compaction (long_term_memory.py) Updated rebuild_index CLI command to use the vectorstore adapter directly. Updated tests to match new behavior.
1 parent 412ed74 commit 592ecf0

File tree

7 files changed

+58
-144
lines changed

7 files changed

+58
-144
lines changed

agent_memory_server/cli.py

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
migrate_add_memory_hashes_1,
2222
migrate_add_memory_type_3,
2323
)
24-
from agent_memory_server.utils.redis import ensure_search_index_exists, get_redis_conn
24+
from agent_memory_server.utils.redis import get_redis_conn
2525

2626

2727
logger = get_logger(__name__)
@@ -46,11 +46,26 @@ def rebuild_index():
4646
"""Rebuild the search index."""
4747
import asyncio
4848

49+
from agent_memory_server.vectorstore_adapter import RedisVectorStoreAdapter
50+
from agent_memory_server.vectorstore_factory import get_vectorstore_adapter
51+
4952
configure_logging()
5053

5154
async def setup_and_run():
52-
redis = await get_redis_conn()
53-
await ensure_search_index_exists(redis, overwrite=True)
55+
# Get the vectorstore adapter
56+
adapter = await get_vectorstore_adapter()
57+
58+
# Only Redis adapter supports index rebuilding
59+
if isinstance(adapter, RedisVectorStoreAdapter):
60+
index = adapter.vectorstore.index
61+
logger.info(f"Dropping and recreating index '{index.name}'")
62+
index.create(overwrite=True)
63+
logger.info("Index rebuilt successfully")
64+
else:
65+
logger.error(
66+
"Index rebuilding is only supported for Redis vectorstore. "
67+
"Current vectorstore does not support this operation."
68+
)
5469

5570
asyncio.run(setup_and_run())
5671

@@ -200,8 +215,8 @@ def schedule_task(task_path: str, args: list[str]):
200215
sys.exit(1)
201216

202217
async def setup_and_run_task():
203-
redis = await get_redis_conn()
204-
await ensure_search_index_exists(redis)
218+
# Initialize Redis connection
219+
await get_redis_conn()
205220

206221
# Import the task function
207222
module_path, function_name = task_path.rsplit(".", 1)
@@ -269,14 +284,10 @@ async def _ensure_stream_and_group():
269284
raise
270285

271286
async def _run_worker():
272-
# Ensure Redis stream/consumer group and search index exist before starting worker
287+
# Ensure Redis stream/consumer group exists before starting worker
288+
# Index will be created automatically when needed
273289
await _ensure_stream_and_group()
274-
try:
275-
redis = await get_redis_conn()
276-
# Don't overwrite if an index already exists; just ensure it's present
277-
await ensure_search_index_exists(redis, overwrite=False)
278-
except Exception as e:
279-
logger.warning(f"Failed to ensure search index exists: {e}")
290+
await get_redis_conn()
280291
await Worker.run(
281292
docket_name=settings.docket_name,
282293
url=settings.redis_url,

agent_memory_server/long_term_memory.py

Lines changed: 3 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -49,10 +49,7 @@
4949
rerank_with_recency,
5050
update_memory_hash_if_text_changed,
5151
)
52-
from agent_memory_server.utils.redis import (
53-
ensure_search_index_exists,
54-
get_redis_conn,
55-
)
52+
from agent_memory_server.utils.redis import get_redis_conn
5653
from agent_memory_server.vectorstore_factory import get_vectorstore_adapter
5754

5855

@@ -614,18 +611,8 @@ async def compact_long_term_memories(
614611
index_name = Keys.search_index_name()
615612
logger.info(f"Using index '{index_name}' for semantic duplicate compaction.")
616613

617-
# Check if the index exists before proceeding
618-
try:
619-
await redis_client.execute_command(f"FT.INFO {index_name}")
620-
except Exception as info_e:
621-
if "unknown index name" in str(info_e).lower():
622-
logger.info(f"Search index {index_name} doesn't exist, creating it")
623-
# Ensure 'get_search_index' is called with the correct name to create it if needed
624-
await ensure_search_index_exists(redis_client, index_name=index_name)
625-
else:
626-
logger.warning(
627-
f"Error checking index '{index_name}': {info_e} - attempting to proceed."
628-
)
614+
# Index will be created automatically when we add memories if it doesn't exist
615+
# No need to check or create it explicitly
629616

630617
# Get all memories using the vector store adapter
631618
try:

agent_memory_server/main.py

Lines changed: 3 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
from agent_memory_server.logging import get_logger
1515
from agent_memory_server.utils.redis import (
1616
_redis_pool as connection_pool,
17-
ensure_search_index_exists,
1817
get_redis_conn,
1918
)
2019

@@ -75,29 +74,10 @@ async def lifespan(app: FastAPI):
7574
"Long-term memory requires OpenAI for embeddings, but OpenAI API key is not set"
7675
)
7776

78-
# Set up RediSearch index if long-term memory is enabled
77+
# Set up Redis connection if long-term memory is enabled
78+
# The VectorStore adapter will create the index automatically when needed
7979
if settings.long_term_memory:
80-
redis = await get_redis_conn()
81-
82-
# Get embedding dimensions from model config
83-
embedding_model_config = MODEL_CONFIGS.get(settings.embedding_model)
84-
vector_dimensions = (
85-
str(embedding_model_config.embedding_dimensions)
86-
if embedding_model_config
87-
else "1536"
88-
)
89-
distance_metric = "COSINE"
90-
91-
try:
92-
await ensure_search_index_exists(
93-
redis,
94-
index_name=settings.redisvl_index_name,
95-
vector_dimensions=vector_dimensions,
96-
distance_metric=distance_metric,
97-
)
98-
except Exception as e:
99-
logger.error(f"Failed to ensure RediSearch index: {e}")
100-
raise
80+
await get_redis_conn()
10181

10282
# Initialize Docket for background tasks if enabled
10383
if settings.use_docket:

agent_memory_server/mcp.py

Lines changed: 8 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -158,14 +158,11 @@ async def call_tool(self, name, arguments):
158158
return await super().call_tool(name, arguments)
159159

160160
async def run_sse_async(self):
161-
"""Ensure Redis search index exists before starting SSE server."""
162-
from agent_memory_server.utils.redis import (
163-
ensure_search_index_exists,
164-
get_redis_conn,
165-
)
161+
"""Start SSE server. Index will be created automatically when needed."""
162+
from agent_memory_server.utils.redis import get_redis_conn
166163

167-
redis = await get_redis_conn()
168-
await ensure_search_index_exists(redis)
164+
# Initialize Redis connection
165+
await get_redis_conn()
169166

170167
# Run the SSE server using our custom implementation
171168
import uvicorn
@@ -176,14 +173,11 @@ async def run_sse_async(self):
176173
).serve()
177174

178175
async def run_stdio_async(self):
179-
"""Ensure Redis search index exists before starting STDIO MCP server."""
180-
from agent_memory_server.utils.redis import (
181-
ensure_search_index_exists,
182-
get_redis_conn,
183-
)
176+
"""Start STDIO MCP server. Index will be created automatically when needed."""
177+
from agent_memory_server.utils.redis import get_redis_conn
184178

185-
redis = await get_redis_conn()
186-
await ensure_search_index_exists(redis)
179+
# Initialize Redis connection
180+
await get_redis_conn()
187181
return await super().run_stdio_async()
188182

189183

agent_memory_server/utils/redis.py

Lines changed: 0 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -4,17 +4,12 @@
44
from typing import Any
55

66
from redis.asyncio import Redis
7-
from redis.exceptions import ResponseError
8-
from redisvl.index import AsyncSearchIndex
97

108
from agent_memory_server.config import settings
11-
from agent_memory_server.vectorstore_adapter import RedisVectorStoreAdapter
12-
from agent_memory_server.vectorstore_factory import get_vectorstore_adapter
139

1410

1511
logger = logging.getLogger(__name__)
1612
_redis_pool: Redis | None = None
17-
_index: AsyncSearchIndex | None = None
1813

1914

2015
async def get_redis_conn(url: str = settings.redis_url, **kwargs) -> Redis:
@@ -36,53 +31,6 @@ async def get_redis_conn(url: str = settings.redis_url, **kwargs) -> Redis:
3631
return _redis_pool
3732

3833

39-
async def ensure_search_index_exists(
40-
redis: Redis,
41-
index_name: str = settings.redisvl_index_name,
42-
vector_dimensions: str = settings.redisvl_vector_dimensions,
43-
distance_metric: str = settings.redisvl_distance_metric,
44-
overwrite: bool = True,
45-
) -> None:
46-
"""
47-
Ensure that the async search index exists, create it if it doesn't.
48-
This function is deprecated and only exists for compatibility.
49-
The VectorStore adapter now handles index creation automatically.
50-
51-
Args:
52-
redis: A Redis client instance
53-
vector_dimensions: Dimensions of the embedding vectors
54-
distance_metric: Distance metric to use (default: COSINE)
55-
index_name: The name of the index
56-
"""
57-
# If this is Redis, creating the adapter will create the index.
58-
adapter = await get_vectorstore_adapter()
59-
60-
if overwrite:
61-
if isinstance(adapter, RedisVectorStoreAdapter):
62-
index = adapter.vectorstore.index
63-
if index is not None:
64-
try:
65-
index.create(overwrite=True)
66-
except ResponseError as e:
67-
# Index already exists is not an error condition
68-
error_msg = str(e)
69-
if "Index already exists" in error_msg:
70-
logger.info(
71-
f"Index '{index.name}' already exists, skipping creation"
72-
)
73-
elif "no such index" in error_msg:
74-
# Index doesn't exist yet, create it without overwrite
75-
logger.info(f"Index '{index.name}' does not exist, creating it")
76-
index.create(overwrite=False)
77-
else:
78-
raise
79-
else:
80-
logger.warning(
81-
"Overwriting the search index is only supported for RedisVectorStoreAdapter. "
82-
"Consult your vector store's documentation to learn how to recreate the index."
83-
)
84-
85-
8634
def safe_get(doc: Any, key: str, default: Any | None = None) -> Any:
8735
"""Get a value from a Document, returning a default if the key is not present.
8836

tests/test_cli.py

Lines changed: 18 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -36,21 +36,30 @@ def test_version_command(self):
3636
class TestRebuildIndex:
3737
"""Tests for the rebuild_index command."""
3838

39-
@patch("agent_memory_server.cli.ensure_search_index_exists")
40-
@patch("agent_memory_server.cli.get_redis_conn")
41-
def test_rebuild_index_command(self, mock_get_redis_conn, mock_ensure_index):
39+
@patch("agent_memory_server.vectorstore_factory.get_vectorstore_adapter")
40+
def test_rebuild_index_command(self, mock_get_adapter):
4241
"""Test rebuild_index command execution."""
43-
# Use AsyncMock which returns completed awaitables
44-
mock_redis = Mock()
45-
mock_get_redis_conn.return_value = mock_redis
46-
mock_ensure_index.return_value = None
42+
from agent_memory_server.vectorstore_adapter import RedisVectorStoreAdapter
43+
44+
# Create a mock adapter with a mock index
45+
mock_index = Mock()
46+
mock_index.name = "test_index"
47+
mock_index.create = Mock()
48+
49+
mock_vectorstore = Mock()
50+
mock_vectorstore.index = mock_index
51+
52+
mock_adapter = Mock(spec=RedisVectorStoreAdapter)
53+
mock_adapter.vectorstore = mock_vectorstore
54+
55+
mock_get_adapter.return_value = mock_adapter
4756

4857
runner = CliRunner()
4958
result = runner.invoke(rebuild_index)
5059

5160
assert result.exit_code == 0
52-
mock_get_redis_conn.assert_called_once()
53-
mock_ensure_index.assert_called_once_with(mock_redis, overwrite=True)
61+
mock_get_adapter.assert_called_once()
62+
mock_index.create.assert_called_once_with(overwrite=True)
5463

5564

5665
class TestMigrateMemories:
@@ -440,7 +449,6 @@ def test_schedule_task_argument_parsing(self):
440449
class TestTaskWorker:
441450
"""Tests for the task_worker command."""
442451

443-
@patch("agent_memory_server.cli.ensure_search_index_exists")
444452
@patch("agent_memory_server.cli.get_redis_conn")
445453
@patch("docket.Worker.run")
446454
@patch("agent_memory_server.cli.settings")
@@ -449,7 +457,6 @@ def test_task_worker_success(
449457
mock_settings,
450458
mock_worker_run,
451459
mock_get_redis_conn,
452-
mock_ensure_index,
453460
redis_url,
454461
):
455462
"""Test successful task worker start."""
@@ -460,7 +467,6 @@ def test_task_worker_success(
460467
mock_worker_run.return_value = None
461468
mock_redis = AsyncMock()
462469
mock_get_redis_conn.return_value = mock_redis
463-
mock_ensure_index.return_value = None
464470

465471
runner = CliRunner()
466472
result = runner.invoke(
@@ -481,7 +487,6 @@ def test_task_worker_docket_disabled(self, mock_settings):
481487
assert result.exit_code == 1
482488
assert "Docket is disabled in settings" in result.output
483489

484-
@patch("agent_memory_server.cli.ensure_search_index_exists")
485490
@patch("agent_memory_server.cli.get_redis_conn")
486491
@patch("docket.Worker.run")
487492
@patch("agent_memory_server.cli.settings")
@@ -490,7 +495,6 @@ def test_task_worker_default_params(
490495
mock_settings,
491496
mock_worker_run,
492497
mock_get_redis_conn,
493-
mock_ensure_index,
494498
redis_url,
495499
):
496500
"""Test task worker with default parameters."""
@@ -501,7 +505,6 @@ def test_task_worker_default_params(
501505
mock_worker_run.return_value = None
502506
mock_redis = AsyncMock()
503507
mock_get_redis_conn.return_value = mock_redis
504-
mock_ensure_index.return_value = None
505508

506509
runner = CliRunner()
507510
result = runner.invoke(task_worker)

tests/test_memory_compaction.py

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -126,10 +126,7 @@ async def test_hash_deduplication_integration(
126126
# Clear all data to ensure clean test environment
127127
await async_redis_client.flushdb()
128128

129-
# Ensure index exists after flush
130-
from agent_memory_server.utils.redis import ensure_search_index_exists
131-
132-
await ensure_search_index_exists(async_redis_client)
129+
# Index will be created automatically when we add memories
133130

134131
# Stub merge to return first memory unchanged
135132
async def dummy_merge(memories, llm_client=None):
@@ -229,10 +226,7 @@ async def test_semantic_deduplication_integration(
229226
# Clear all data to ensure clean test environment
230227
await async_redis_client.flushdb()
231228

232-
# Ensure index exists after flush
233-
from agent_memory_server.utils.redis import ensure_search_index_exists
234-
235-
await ensure_search_index_exists(async_redis_client)
229+
# Index will be created automatically when we add memories
236230

237231
# Stub merge to return first memory
238232
async def dummy_merge(memories, llm_client=None):
@@ -308,10 +302,7 @@ async def test_full_compaction_integration(
308302
# Clear all data to ensure clean test environment
309303
await async_redis_client.flushdb()
310304

311-
# Ensure index exists after flush
312-
from agent_memory_server.utils.redis import ensure_search_index_exists
313-
314-
await ensure_search_index_exists(async_redis_client)
305+
# Index will be created automatically when we add memories
315306

316307
async def dummy_merge(memories, llm_client=None):
317308
memory = memories[0]

0 commit comments

Comments
 (0)