Skip to content

Commit

Permalink
Merge pull request #403 from aurelio-labs/feat/use-pinecone-index-list
Browse files Browse the repository at this point in the history
feat: Use Pinecone library list functionality
  • Loading branch information
jamescalam authored Aug 29, 2024
2 parents 5ba4f2a + f0d8829 commit e5d59d2
Showing 1 changed file with 3 additions and 36 deletions.
39 changes: 3 additions & 36 deletions semantic_router/index/pinecone.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from typing import Any, Dict, List, Optional, Tuple, Union

import numpy as np
import requests
from pydantic.v1 import BaseModel, Field

from semantic_router.index.base import BaseIndex
Expand Down Expand Up @@ -363,39 +362,13 @@ def _get_all(self, prefix: Optional[str] = None, include_metadata: bool = False)
if self.index is None:
raise ValueError("Index is None, could not retrieve vector IDs.")
all_vector_ids = []
next_page_token = None

if prefix:
prefix_str = f"?prefix={prefix}"
else:
prefix_str = ""

# Construct the request URL for listing vectors. Adjust parameters as needed.
list_url = f"https://{self.host}/vectors/list{prefix_str}"
params: Dict = {}
if self.namespace:
params["namespace"] = self.namespace
headers = {"Api-Key": self.api_key}
metadata = []

while True:
if next_page_token:
params["paginationToken"] = next_page_token

# Make the request to list vectors. Adjust headers and parameters as needed.
response = requests.get(list_url, params=params, headers=headers)
response_data = response.json()
for ids in self.index.list(prefix=prefix, namespace=self.namespace):
all_vector_ids.extend(ids)

# Extract vector IDs from the response and add them to the list
vector_ids = [vec["id"] for vec in response_data.get("vectors", [])]
# check that there are vector IDs, otherwise break the loop
if not vector_ids:
break
all_vector_ids.extend(vector_ids)

# if we need metadata, we fetch it
if include_metadata:
for id in vector_ids:
for id in ids:
res_meta = (
self.index.fetch(ids=[id], namespace=self.namespace)
if self.index
Expand All @@ -404,12 +377,6 @@ def _get_all(self, prefix: Optional[str] = None, include_metadata: bool = False)
metadata.extend(
[x["metadata"] for x in res_meta["vectors"].values()]
)
# extract metadata only

# Check if there's a next page token; if not, break the loop
next_page_token = response_data.get("pagination", {}).get("next")
if not next_page_token:
break

return all_vector_ids, metadata

Expand Down

0 comments on commit e5d59d2

Please sign in to comment.