import json import logging from contextlib import contextmanager from typing import Any, List, Optional from pydantic import BaseModel # Try to import psycopg (psycopg3) first, then fall back to psycopg2 try: from psycopg.types.json import Json from psycopg_pool import ConnectionPool PSYCOPG_VERSION = 3 logger = logging.getLogger(__name__) logger.info("Using psycopg (psycopg3) with ConnectionPool for PostgreSQL connections") except ImportError: try: from psycopg2.extras import Json, execute_values from psycopg2.pool import ThreadedConnectionPool as ConnectionPool PSYCOPG_VERSION = 2 logger = logging.getLogger(__name__) logger.info("Using psycopg2 with ThreadedConnectionPool for PostgreSQL connections") except ImportError: raise ImportError( "Neither 'psycopg' nor 'psycopg2' library is available. " "Please install one of them using 'pip install psycopg[pool]' or 'pip install psycopg2'" ) from neomem.vector_stores.base import VectorStoreBase logger = logging.getLogger(__name__) class OutputData(BaseModel): id: Optional[str] score: Optional[float] payload: Optional[dict] class PGVector(VectorStoreBase): def __init__( self, dbname, collection_name, embedding_model_dims, user, password, host, port, diskann, hnsw, minconn=1, maxconn=5, sslmode=None, connection_string=None, connection_pool=None, ): """ Initialize the PGVector database. Args: dbname (str): Database name collection_name (str): Collection name embedding_model_dims (int): Dimension of the embedding vector user (str): Database user password (str): Database password host (str, optional): Database host port (int, optional): Database port diskann (bool, optional): Use DiskANN for faster search hnsw (bool, optional): Use HNSW for faster search minconn (int): Minimum number of connections to keep in the connection pool maxconn (int): Maximum number of connections allowed in the connection pool sslmode (str, optional): SSL mode for PostgreSQL connection (e.g., 'require', 'prefer', 'disable') connection_string (str, optional): PostgreSQL connection string (overrides individual connection parameters) connection_pool (Any, optional): psycopg2 connection pool object (overrides connection string and individual parameters) """ self.collection_name = collection_name self.use_diskann = diskann self.use_hnsw = hnsw self.embedding_model_dims = embedding_model_dims self.connection_pool = None # Connection setup with priority: connection_pool > connection_string > individual parameters if connection_pool is not None: # Use provided connection pool self.connection_pool = connection_pool elif connection_string: if sslmode: # Append sslmode to connection string if provided if 'sslmode=' in connection_string: # Replace existing sslmode import re connection_string = re.sub(r'sslmode=[^ ]*', f'sslmode={sslmode}', connection_string) else: # Add sslmode to connection string connection_string = f"{connection_string} sslmode={sslmode}" else: connection_string = f"postgresql://{user}:{password}@{host}:{port}/{dbname}" if sslmode: connection_string = f"{connection_string} sslmode={sslmode}" if self.connection_pool is None: if PSYCOPG_VERSION == 3: # psycopg3 ConnectionPool self.connection_pool = ConnectionPool(conninfo=connection_string, min_size=minconn, max_size=maxconn, open=True) else: # psycopg2 ThreadedConnectionPool self.connection_pool = ConnectionPool(minconn=minconn, maxconn=maxconn, dsn=connection_string) collections = self.list_cols() if collection_name not in collections: self.create_col() @contextmanager def _get_cursor(self, commit: bool = False): """ Unified context manager to get a cursor from the appropriate pool. Auto-commits or rolls back based on exception, and returns the connection to the pool. """ if PSYCOPG_VERSION == 3: # psycopg3 auto-manages commit/rollback and pool return with self.connection_pool.connection() as conn: with conn.cursor() as cur: try: yield cur if commit: conn.commit() except Exception: conn.rollback() logger.error("Error in cursor context (psycopg3)", exc_info=True) raise else: # psycopg2 manual getconn/putconn conn = self.connection_pool.getconn() cur = conn.cursor() try: yield cur if commit: conn.commit() except Exception as exc: conn.rollback() logger.error(f"Error occurred: {exc}") raise exc finally: cur.close() self.connection_pool.putconn(conn) def create_col(self) -> None: """ Create a new collection (table in PostgreSQL). Will also initialize vector search index if specified. """ with self._get_cursor(commit=True) as cur: cur.execute("CREATE EXTENSION IF NOT EXISTS vector") cur.execute( f""" CREATE TABLE IF NOT EXISTS {self.collection_name} ( id UUID PRIMARY KEY, vector vector({self.embedding_model_dims}), payload JSONB ); """ ) if self.use_diskann and self.embedding_model_dims < 2000: cur.execute("SELECT * FROM pg_extension WHERE extname = 'vectorscale'") if cur.fetchone(): # Create DiskANN index if extension is installed for faster search cur.execute( f""" CREATE INDEX IF NOT EXISTS {self.collection_name}_diskann_idx ON {self.collection_name} USING diskann (vector); """ ) elif self.use_hnsw: cur.execute( f""" CREATE INDEX IF NOT EXISTS {self.collection_name}_hnsw_idx ON {self.collection_name} USING hnsw (vector vector_cosine_ops) """ ) def insert(self, vectors: list[list[float]], payloads=None, ids=None) -> None: logger.info(f"Inserting {len(vectors)} vectors into collection {self.collection_name}") json_payloads = [json.dumps(payload) for payload in payloads] data = [(id, vector, payload) for id, vector, payload in zip(ids, vectors, json_payloads)] if PSYCOPG_VERSION == 3: with self._get_cursor(commit=True) as cur: cur.executemany( f"INSERT INTO {self.collection_name} (id, vector, payload) VALUES (%s, %s, %s)", data, ) else: with self._get_cursor(commit=True) as cur: execute_values( cur, f"INSERT INTO {self.collection_name} (id, vector, payload) VALUES %s", data, ) def search( self, query: str, vectors: list[float], limit: Optional[int] = 5, filters: Optional[dict] = None, ) -> List[OutputData]: """ Search for similar vectors. Args: query (str): Query. vectors (List[float]): Query vector. limit (int, optional): Number of results to return. Defaults to 5. filters (Dict, optional): Filters to apply to the search. Defaults to None. Returns: list: Search results. """ filter_conditions = [] filter_params = [] if filters: for k, v in filters.items(): filter_conditions.append("payload->>%s = %s") filter_params.extend([k, str(v)]) filter_clause = "WHERE " + " AND ".join(filter_conditions) if filter_conditions else "" with self._get_cursor() as cur: cur.execute( f""" SELECT id, vector <=> %s::vector AS distance, payload FROM {self.collection_name} {filter_clause} ORDER BY distance LIMIT %s """, (vectors, *filter_params, limit), ) results = cur.fetchall() return [OutputData(id=str(r[0]), score=float(r[1]), payload=r[2]) for r in results] def delete(self, vector_id: str) -> None: """ Delete a vector by ID. Args: vector_id (str): ID of the vector to delete. """ with self._get_cursor(commit=True) as cur: cur.execute(f"DELETE FROM {self.collection_name} WHERE id = %s", (vector_id,)) def update( self, vector_id: str, vector: Optional[list[float]] = None, payload: Optional[dict] = None, ) -> None: """ Update a vector and its payload. Args: vector_id (str): ID of the vector to update. vector (List[float], optional): Updated vector. payload (Dict, optional): Updated payload. """ with self._get_cursor(commit=True) as cur: if vector: cur.execute( f"UPDATE {self.collection_name} SET vector = %s WHERE id = %s", (vector, vector_id), ) if payload: # Handle JSON serialization based on psycopg version if PSYCOPG_VERSION == 3: # psycopg3 uses psycopg.types.json.Json cur.execute( f"UPDATE {self.collection_name} SET payload = %s WHERE id = %s", (Json(payload), vector_id), ) else: # psycopg2 uses psycopg2.extras.Json cur.execute( f"UPDATE {self.collection_name} SET payload = %s WHERE id = %s", (Json(payload), vector_id), ) def get(self, vector_id: str) -> OutputData: """ Retrieve a vector by ID. Args: vector_id (str): ID of the vector to retrieve. Returns: OutputData: Retrieved vector. """ with self._get_cursor() as cur: cur.execute( f"SELECT id, vector, payload FROM {self.collection_name} WHERE id = %s", (vector_id,), ) result = cur.fetchone() if not result: return None return OutputData(id=str(result[0]), score=None, payload=result[2]) def list_cols(self) -> List[str]: """ List all collections. Returns: List[str]: List of collection names. """ with self._get_cursor() as cur: cur.execute("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'") return [row[0] for row in cur.fetchall()] def delete_col(self) -> None: """Delete a collection.""" with self._get_cursor(commit=True) as cur: cur.execute(f"DROP TABLE IF EXISTS {self.collection_name}") def col_info(self) -> dict[str, Any]: """ Get information about a collection. Returns: Dict[str, Any]: Collection information. """ with self._get_cursor() as cur: cur.execute( f""" SELECT table_name, (SELECT COUNT(*) FROM {self.collection_name}) as row_count, (SELECT pg_size_pretty(pg_total_relation_size('{self.collection_name}'))) as total_size FROM information_schema.tables WHERE table_schema = 'public' AND table_name = %s """, (self.collection_name,), ) result = cur.fetchone() return {"name": result[0], "count": result[1], "size": result[2]} def list( self, filters: Optional[dict] = None, limit: Optional[int] = 100 ) -> List[OutputData]: """ List all vectors in a collection. Args: filters (Dict, optional): Filters to apply to the list. limit (int, optional): Number of vectors to return. Defaults to 100. Returns: List[OutputData]: List of vectors. """ filter_conditions = [] filter_params = [] if filters: for k, v in filters.items(): filter_conditions.append("payload->>%s = %s") filter_params.extend([k, str(v)]) filter_clause = "WHERE " + " AND ".join(filter_conditions) if filter_conditions else "" query = f""" SELECT id, vector, payload FROM {self.collection_name} {filter_clause} LIMIT %s """ with self._get_cursor() as cur: cur.execute(query, (*filter_params, limit)) results = cur.fetchall() return [[OutputData(id=str(r[0]), score=None, payload=r[2]) for r in results]] def __del__(self) -> None: """ Close the database connection pool when the object is deleted. """ try: # Close pool appropriately if PSYCOPG_VERSION == 3: self.connection_pool.close() else: self.connection_pool.closeall() except Exception: pass def reset(self) -> None: """Reset the index by deleting and recreating it.""" logger.warning(f"Resetting index {self.collection_name}...") self.delete_col() self.create_col()