Skip to content

Commit

Permalink
Remove
Browse files Browse the repository at this point in the history
  • Loading branch information
EricLBuehler committed Apr 25, 2024
1 parent b6672dd commit 6ea135e
Show file tree
Hide file tree
Showing 291 changed files with 446 additions and 995 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26228,7 +26228,7 @@
"timestampEdited": null,
"callEndedTimestamp": null,
"isPinned": false,
"content": "i.e. \n```python\nindex = GPTChromaIndex(documents, chroma_collection=chroma_collection)\n```\nthen\n```python\nresponse = index.query(\"What did the author do growing up?\")\n```\n\nwe get \n```\nValueError: chroma_collection cannot be None.\n```\n\nSince we check it here:\n```\nclass GPTChromaIndexQuery(BaseGPTVectorStoreIndexQuery[ChromaIndexStruct]): \"\"\"GPTChromaIndex query.\"\"\" def __init__( self, index_struct: ChromaIndexStruct, embed_model: Optional[BaseEmbedding] = None, similarity_top_k: int = 1, chroma_collection: Optional[Any] = None, **kwargs: Any, ) -> None: \"\"\"Initialize params.\"\"\" super().__init__( index_struct=index_struct, embed_model=embed_model, similarity_top_k=similarity_top_k, **kwargs, ) import_err_msg = ( \"`chromadb` package not found, please run `pip install chromadb`\" ) try: import chromadb except ImportError: raise ValueError(import_err_msg) if chroma_collection is None: raise ValueError(\"chroma_collection cannot be None.\")\n```\n\nWhat am I missing ?",
"content": "i.e. \n```python\nindex = GPTChromaIndex(documents, chroma_collection=chroma_collection)\n```\nthen\n```python\nresponse = index.query(\"What did the author do growing up?\")\n```\n\nwe get \n```\nValueError: chroma_collection cannot be None.\n```\n\nSince we check it here:\n```\nclass GPTChromaIndexQuery(BaseGPTVectorStoreIndexQuery[ChromaIndexStruct]): \"\"\"GPTChromaIndex query.\"\"\" def __init__( self, index_struct: ChromaIndexStruct, embed_model: Optional[BaseEmbedding] = None, similarity_top_k: int = 1, chroma_collection: Optional[Any] = None, **kwargs: Any ) -> None: \"\"\"Initialize params.\"\"\" super().__init__( index_struct=index_struct, embed_model=embed_model, similarity_top_k=similarity_top_k, **kwargs, ) import_err_msg = ( \"`chromadb` package not found, please run `pip install chromadb`\" ) try: import chromadb except ImportError: raise ValueError(import_err_msg) if chroma_collection is None: raise ValueError(\"chroma_collection cannot be None.\")\n```\n\nWhat am I missing ?",
"author": {
"id": "1065787112475066409",
"name": "chroma_anton",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26228,7 +26228,7 @@
"timestampEdited": null,
"callEndedTimestamp": null,
"isPinned": false,
"content": "i.e. \n```python\nindex = GPTChromaIndex(documents, chroma_collection=chroma_collection)\n```\nthen\n```python\nresponse = index.query(\"What did the author do growing up?\")\n```\n\nwe get \n```\nValueError: chroma_collection cannot be None.\n```\n\nSince we check it here:\n```\nclass GPTChromaIndexQuery(BaseGPTVectorStoreIndexQuery[ChromaIndexStruct]): \"\"\"GPTChromaIndex query.\"\"\" def __init__( self, index_struct: ChromaIndexStruct, embed_model: Optional[BaseEmbedding] = None, similarity_top_k: int = 1, chroma_collection: Optional[Any] = None, **kwargs: Any, ) -> None: \"\"\"Initialize params.\"\"\" super().__init__( index_struct=index_struct, embed_model=embed_model, similarity_top_k=similarity_top_k, **kwargs, ) import_err_msg = ( \"`chromadb` package not found, please run `pip install chromadb`\" ) try: import chromadb except ImportError: raise ValueError(import_err_msg) if chroma_collection is None: raise ValueError(\"chroma_collection cannot be None.\")\n```\n\nWhat am I missing ?",
"content": "i.e. \n```python\nindex = GPTChromaIndex(documents, chroma_collection=chroma_collection)\n```\nthen\n```python\nresponse = index.query(\"What did the author do growing up?\")\n```\n\nwe get \n```\nValueError: chroma_collection cannot be None.\n```\n\nSince we check it here:\n```\nclass GPTChromaIndexQuery(BaseGPTVectorStoreIndexQuery[ChromaIndexStruct]): \"\"\"GPTChromaIndex query.\"\"\" def __init__( self, index_struct: ChromaIndexStruct, embed_model: Optional[BaseEmbedding] = None, similarity_top_k: int = 1, chroma_collection: Optional[Any] = None, **kwargs: Any ) -> None: \"\"\"Initialize params.\"\"\" super().__init__( index_struct=index_struct, embed_model=embed_model, similarity_top_k=similarity_top_k, **kwargs, ) import_err_msg = ( \"`chromadb` package not found, please run `pip install chromadb`\" ) try: import chromadb except ImportError: raise ValueError(import_err_msg) if chroma_collection is None: raise ValueError(\"chroma_collection cannot be None.\")\n```\n\nWhat am I missing ?",
"author": {
"id": "1065787112475066409",
"name": "chroma_anton",
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/examples/embeddings/custom_embeddings.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@
" self,\n",
" instructor_model_name: str = \"hkunlp/instructor-large\",\n",
" instruction: str = \"Represent a document for semantic search:\",\n",
" **kwargs: Any,\n",
" **kwargs: Any\n",
" ) -> None:\n",
" self._model = INSTRUCTOR(instructor_model_name)\n",
" self._instruction = instruction\n",
Expand Down
12 changes: 3 additions & 9 deletions docs/docs/examples/low_level/vector_store.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -207,9 +207,7 @@
" pass\n",
"\n",
" def query(\n",
" self,\n",
" query: VectorStoreQuery,\n",
" **kwargs: Any,\n",
" self, query: VectorStoreQuery, **kwargs: Any\n",
" ) -> VectorStoreQueryResult:\n",
" \"\"\"Get nodes for response.\"\"\"\n",
" pass\n",
Expand Down Expand Up @@ -473,9 +471,7 @@
" \"\"\"Implements semantic/dense search.\"\"\"\n",
"\n",
" def query(\n",
" self,\n",
" query: VectorStoreQuery,\n",
" **kwargs: Any,\n",
" self, query: VectorStoreQuery, **kwargs: Any\n",
" ) -> VectorStoreQueryResult:\n",
" \"\"\"Get nodes for response.\"\"\"\n",
"\n",
Expand Down Expand Up @@ -568,9 +564,7 @@
" \"\"\"Implements Metadata Filtering.\"\"\"\n",
"\n",
" def query(\n",
" self,\n",
" query: VectorStoreQuery,\n",
" **kwargs: Any,\n",
" self, query: VectorStoreQuery, **kwargs: Any\n",
" ) -> VectorStoreQueryResult:\n",
" \"\"\"Get nodes for response.\"\"\"\n",
" # 1. First filter by metadata\n",
Expand Down
4 changes: 2 additions & 2 deletions docs/docs/module_guides/evaluating/usage_pattern.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ All of the evaluation modules in LlamaIndex implement the `BaseEvaluator` class,
query: Optional[str] = None,
contexts: Optional[Sequence[str]] = None,
response: Optional[str] = None,
**kwargs: Any,
**kwargs: Any
) -> EvaluationResult:
```

Expand All @@ -23,7 +23,7 @@ def evaluate_response(
self,
query: Optional[str] = None,
response: Optional[Response] = None,
**kwargs: Any,
**kwargs: Any
) -> EvaluationResult:
```

Expand Down
2 changes: 1 addition & 1 deletion docs/docs/module_guides/models/embeddings.md
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ class InstructorEmbeddings(BaseEmbedding):
self,
instructor_model_name: str = "hkunlp/instructor-large",
instruction: str = "Represent the Computer Science documentation or question:",
**kwargs: Any,
**kwargs: Any
) -> None:
self._model = INSTRUCTOR(instructor_model_name)
self._instruction = instruction
Expand Down
2 changes: 1 addition & 1 deletion llama-index-core/llama_index/core/agent/react/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def from_tools(
handle_reasoning_failure_fn: Optional[
Callable[[CallbackManager, Exception], ToolOutput]
] = None,
**kwargs: Any,
**kwargs: Any
) -> "ReActAgent":
"""Convenience constructor method from set of BaseTools (Optional).
Expand Down
4 changes: 1 addition & 3 deletions llama-index-core/llama_index/core/agent/runner/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,9 +247,7 @@ def __init__(

@staticmethod
def from_llm(
tools: Optional[List[BaseTool]] = None,
llm: Optional[LLM] = None,
**kwargs: Any,
tools: Optional[List[BaseTool]] = None, llm: Optional[LLM] = None, **kwargs: Any
) -> "AgentRunner":
from llama_index.core.agent import ReActAgent

Expand Down
5 changes: 1 addition & 4 deletions llama-index-core/llama_index/core/base/embeddings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,10 +295,7 @@ async def aget_text_embedding(self, text: str) -> Embedding:

@dispatcher.span
def get_text_embedding_batch(
self,
texts: List[str],
show_progress: bool = False,
**kwargs: Any,
self, texts: List[str], show_progress: bool = False, **kwargs: Any
) -> List[Embedding]:
"""Get a list of text embeddings, with batching."""
dispatch_event = dispatcher.get_dispatch_event()
Expand Down
4 changes: 2 additions & 2 deletions llama-index-core/llama_index/core/callbacks/base_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def on_event_start(
payload: Optional[Dict[str, Any]] = None,
event_id: str = "",
parent_id: str = "",
**kwargs: Any,
**kwargs: Any
) -> str:
"""Run when an event starts and return id of event."""

Expand All @@ -38,7 +38,7 @@ def on_event_end(
event_type: CBEventType,
payload: Optional[Dict[str, Any]] = None,
event_id: str = "",
**kwargs: Any,
**kwargs: Any
) -> None:
"""Run when an event ends."""

Expand Down
2 changes: 1 addition & 1 deletion llama-index-core/llama_index/core/chat_engine/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def from_defaults(
node_postprocessors: Optional[List[BaseNodePostprocessor]] = None,
context_template: Optional[str] = None,
llm: Optional[LLM] = None,
**kwargs: Any,
**kwargs: Any
) -> "ContextChatEngine":
"""Initialize a ContextChatEngine from default parameters."""
llm = llm or llm_from_settings_or_context(Settings, service_context)
Expand Down
2 changes: 1 addition & 1 deletion llama-index-core/llama_index/core/chat_engine/simple.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def from_defaults(
llm: Optional[LLM] = None,
# deprecated
service_context: Optional[ServiceContext] = None,
**kwargs: Any,
**kwargs: Any
) -> "SimpleChatEngine":
"""Initialize a SimpleChatEngine from default parameters."""
llm = llm or llm_from_settings_or_context(Settings, service_context)
Expand Down
8 changes: 4 additions & 4 deletions llama-index-core/llama_index/core/evaluation/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def evaluate(
query: Optional[str] = None,
response: Optional[str] = None,
contexts: Optional[Sequence[str]] = None,
**kwargs: Any,
**kwargs: Any
) -> EvaluationResult:
"""Run evaluation with query string, retrieved contexts,
and generated response string.
Expand All @@ -74,7 +74,7 @@ async def aevaluate(
query: Optional[str] = None,
response: Optional[str] = None,
contexts: Optional[Sequence[str]] = None,
**kwargs: Any,
**kwargs: Any
) -> EvaluationResult:
"""Run evaluation with query string, retrieved contexts,
and generated response string.
Expand All @@ -88,7 +88,7 @@ def evaluate_response(
self,
query: Optional[str] = None,
response: Optional[Response] = None,
**kwargs: Any,
**kwargs: Any
) -> EvaluationResult:
"""Run evaluation with query string and generated Response object.
Expand All @@ -109,7 +109,7 @@ async def aevaluate_response(
self,
query: Optional[str] = None,
response: Optional[Response] = None,
**kwargs: Any,
**kwargs: Any
) -> EvaluationResult:
"""Run evaluation with query string and generated Response object.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ async def aevaluate(
contexts: Optional[Sequence[str]] = None,
reference: Optional[str] = None,
sleep_time_in_seconds: int = 0,
**kwargs: Any,
**kwargs: Any
) -> EvaluationResult:
del kwargs # Unused
del contexts # Unused
Expand Down
5 changes: 1 addition & 4 deletions llama-index-core/llama_index/core/evaluation/eval_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,7 @@ async def aget_responses(
return await asyncio_mod.gather(*tasks)


def get_responses(
*args: Any,
**kwargs: Any,
) -> List[str]:
def get_responses(*args: Any, **kwargs: Any) -> List[str]:
"""Get responses.
Sync version of aget_responses.
Expand Down
2 changes: 1 addition & 1 deletion llama-index-core/llama_index/core/evaluation/guideline.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ async def aevaluate(
response: Optional[str] = None,
contexts: Optional[Sequence[str]] = None,
sleep_time_in_seconds: int = 0,
**kwargs: Any,
**kwargs: Any
) -> EvaluationResult:
"""Evaluate whether the query and response pair passes the guidelines."""
del contexts # Unused
Expand Down
2 changes: 1 addition & 1 deletion llama-index-core/llama_index/core/evaluation/pairwise.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ async def aevaluate(
second_response: Optional[str] = None,
reference: Optional[str] = None,
sleep_time_in_seconds: int = 0,
**kwargs: Any,
**kwargs: Any
) -> EvaluationResult:
del kwargs # Unused
del contexts # Unused
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def __init__(
metrics: Sequence[BaseRetrievalMetric],
retriever: BaseRetriever,
node_postprocessors: Optional[List[BaseNodePostprocessor]] = None,
**kwargs: Any,
**kwargs: Any
) -> None:
"""Init params."""
super().__init__(
Expand Down Expand Up @@ -89,7 +89,7 @@ def __init__(
metrics: Sequence[BaseRetrievalMetric],
retriever: BaseRetriever,
node_postprocessors: Optional[List[BaseNodePostprocessor]] = None,
**kwargs: Any,
**kwargs: Any
) -> None:
"""Init params."""
super().__init__(
Expand Down
4 changes: 2 additions & 2 deletions llama-index-core/llama_index/core/extractors/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ async def aprocess_nodes(
nodes: List[BaseNode],
excluded_embed_metadata_keys: Optional[List[str]] = None,
excluded_llm_metadata_keys: Optional[List[str]] = None,
**kwargs: Any,
**kwargs: Any
) -> List[BaseNode]:
"""Post process nodes parsed from documents.
Expand Down Expand Up @@ -137,7 +137,7 @@ def process_nodes(
nodes: List[BaseNode],
excluded_embed_metadata_keys: Optional[List[str]] = None,
excluded_llm_metadata_keys: Optional[List[str]] = None,
**kwargs: Any,
**kwargs: Any
) -> List[BaseNode]:
return asyncio.run(
self.aprocess_nodes(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def from_indices(
index_summaries: Optional[Sequence[str]] = None,
service_context: Optional[ServiceContext] = None,
storage_context: Optional[StorageContext] = None,
**kwargs: Any,
**kwargs: Any
) -> "ComposableGraph": # type: ignore
"""Create composable graph using this index class as the root."""
service_context = service_context or ServiceContext.from_defaults()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def __init__(
callback_manager: Optional[CallbackManager] = None,
object_map: Optional[dict] = None,
verbose: bool = False,
**kwargs: Any,
**kwargs: Any
) -> None:
self._index = index
self._choice_select_prompt = (
Expand Down Expand Up @@ -135,7 +135,7 @@ def __init__(
callback_manager: Optional[CallbackManager] = None,
object_map: Optional[dict] = None,
verbose: bool = False,
**kwargs: Any,
**kwargs: Any
) -> None:
"""Init params."""
self._index = index
Expand Down
2 changes: 1 addition & 1 deletion llama-index-core/llama_index/core/indices/empty/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def __init__(
index_struct: Optional[EmptyIndexStruct] = None,
# deprecated
service_context: Optional[ServiceContext] = None,
**kwargs: Any,
**kwargs: Any
) -> None:
"""Initialize params."""
super().__init__(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def __init__(
index: EmptyIndex,
input_prompt: Optional[BasePromptTemplate] = None,
callback_manager: Optional[CallbackManager] = None,
**kwargs: Any,
**kwargs: Any
) -> None:
"""Initialize params."""
self._index = index
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def as_retriever(
retriever_mode: Union[
str, KeywordTableRetrieverMode
] = KeywordTableRetrieverMode.RAKE,
**kwargs: Any,
**kwargs: Any
) -> BaseRetriever:
return super().as_retriever(retriever_mode=retriever_mode, **kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def as_retriever(
retriever_mode: Union[
str, KeywordTableRetrieverMode
] = KeywordTableRetrieverMode.SIMPLE,
**kwargs: Any,
**kwargs: Any
) -> BaseRetriever:
return super().as_retriever(retriever_mode=retriever_mode, **kwargs)

Expand Down
8 changes: 2 additions & 6 deletions llama-index-core/llama_index/core/indices/loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,7 @@


def load_index_from_storage(
storage_context: StorageContext,
index_id: Optional[str] = None,
**kwargs: Any,
storage_context: StorageContext, index_id: Optional[str] = None, **kwargs: Any
) -> BaseIndex:
"""Load index from storage context.
Expand Down Expand Up @@ -83,9 +81,7 @@ def load_indices_from_storage(


def load_graph_from_storage(
storage_context: StorageContext,
root_id: str,
**kwargs: Any,
storage_context: StorageContext, root_id: str, **kwargs: Any
) -> ComposableGraph:
"""Load composable graph from storage context.
Expand Down
4 changes: 2 additions & 2 deletions llama-index-core/llama_index/core/indices/managed/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def __init__(
show_progress: bool = False,
# deprecated
service_context: Optional[ServiceContext] = None,
**kwargs: Any,
**kwargs: Any
) -> None:
"""Initialize params."""
super().__init__(
Expand Down Expand Up @@ -90,7 +90,7 @@ def from_documents(
transformations: Optional[List[TransformComponent]] = None,
# deprecated
service_context: Optional[ServiceContext] = None,
**kwargs: Any,
**kwargs: Any
) -> IndexType:
"""Build an index from a sequence of documents."""
raise NotImplementedError(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def __init__(
doc_ids: Optional[List[str]] = None,
sparse_top_k: Optional[int] = None,
callback_manager: Optional[CallbackManager] = None,
**kwargs: Any,
**kwargs: Any
) -> None:
"""Initialize params."""
self._index = index
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def __init__(
service_context: Optional[ServiceContext] = None,
schema_extract_prompt: Optional[BasePromptTemplate] = None,
output_parser: Optional[OUTPUT_PARSER_TYPE] = None,
**kwargs: Any,
**kwargs: Any
) -> None:
"""Initialize params."""
self.schema_extract_prompt = (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,10 +110,7 @@ def derive_index_from_context(
for table_name, context_str in full_context_dict.items():
doc = Document(text=context_str, metadata={"table_name": table_name})
context_docs.append(doc)
return index_cls.from_documents(
documents=context_docs,
**index_kwargs,
)
return index_cls.from_documents(documents=context_docs, **index_kwargs)

def query_index_for_context(
self,
Expand Down
Loading

0 comments on commit 6ea135e

Please sign in to comment.