diff --git a/.vscode/settings.json b/.vscode/settings.json index 1c78d0764b0aae..7baa8bc71b52ed 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -9,8 +9,6 @@ "python.testing.pytestArgs": ["tests"], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, - "python.analysis.extraPaths": [ - "./llama-index-core" - ], + "python.analysis.extraPaths": ["./llama-index-core"], "python.analysis.autoImportCompletions": true } diff --git a/docs/prepare_for_build.py b/docs/prepare_for_build.py index 239e6bad5c86ce..1559c63f9375fa 100644 --- a/docs/prepare_for_build.py +++ b/docs/prepare_for_build.py @@ -139,13 +139,17 @@ ) if ( toc_path_name - not in mkdocs["nav"][examples_idx]["Examples"][label_idx][label] + not in mkdocs["nav"][examples_idx]["Examples"][label_idx][ + label + ] ): - mkdocs["nav"][examples_idx]["Examples"][label_idx][label].append( - toc_path_name - ) + mkdocs["nav"][examples_idx]["Examples"][label_idx][ + label + ].append(toc_path_name) if os.path.isdir(os.path.join(path_name, file_name)): - for root, dirs, files in os.walk(os.path.join(path_name, file_name)): + for root, dirs, files in os.walk( + os.path.join(path_name, file_name) + ): for file in files: if file.endswith(".ipynb"): toc_path_name = "./" + os.path.join( @@ -153,9 +157,9 @@ ) if ( toc_path_name - not in mkdocs["nav"][examples_idx]["Examples"][label_idx][ - label - ] + not in mkdocs["nav"][examples_idx]["Examples"][ + label_idx + ][label] ): mkdocs["nav"][examples_idx]["Examples"][label_idx][ label @@ -196,7 +200,9 @@ elif folder_name == "graph_stores": folder_name = "storage/graph_stores" - full_path = os.path.join("docs/docs/api_reference", folder_name) + full_path = os.path.join( + "docs/docs/api_reference", folder_name + ) module_name = import_path.split(".")[-1] + ".md" os.makedirs(full_path, exist_ok=True) with open(os.path.join(full_path, module_name), "w") as f: @@ -212,23 +218,31 @@ if "storage" in folder_name: label = "Storage" else: - label = INTEGRATION_FOLDER_TO_LABEL[import_path.split(".")[1]] + label = INTEGRATION_FOLDER_TO_LABEL[ + import_path.split(".")[1] + ] label_idx = -1 - for idx, item in enumerate(mkdocs["nav"][api_ref_idx]["API Reference"]): + for idx, item in enumerate( + mkdocs["nav"][api_ref_idx]["API Reference"] + ): if label in item: label_idx = idx break if label_idx == -1: - mkdocs["nav"][api_ref_idx]["API Reference"].append({label: []}) + mkdocs["nav"][api_ref_idx]["API Reference"].append( + {label: []} + ) toc_path_name = "./" + os.path.join( "api_reference", folder_name, module_name ) if ( toc_path_name - not in mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][label] + not in mkdocs["nav"][api_ref_idx]["API Reference"][ + label_idx + ][label] ): # storage is a special case, multi-level if label == "Storage": @@ -239,18 +253,18 @@ existing_sub_label_idx, existing_sub_label, ) in enumerate( - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ - label - ] + mkdocs["nav"][api_ref_idx]["API Reference"][ + label_idx + ][label] ): if sub_label in existing_sub_label: sub_label_idx = existing_sub_label_idx break if sub_label_idx == -1: - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ - label - ].append({sub_label: []}) + mkdocs["nav"][api_ref_idx]["API Reference"][ + label_idx + ][label].append({sub_label: []}) if ( toc_path_name @@ -258,17 +272,19 @@ label_idx ][label][sub_label_idx][sub_label] ): - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ - label - ][sub_label_idx][sub_label].append(toc_path_name) + mkdocs["nav"][api_ref_idx]["API Reference"][ + label_idx + ][label][sub_label_idx][sub_label].append( + toc_path_name + ) # sort per sub-label - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][label][ - sub_label_idx - ][sub_label] = sorted( - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ - label - ][sub_label_idx][sub_label], + mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ + label + ][sub_label_idx][sub_label] = sorted( + mkdocs["nav"][api_ref_idx]["API Reference"][ + label_idx + ][label][sub_label_idx][sub_label], key=lambda x: next(iter(x.keys())) if isinstance(x, dict) else x, @@ -279,9 +295,15 @@ ].append(toc_path_name) # maintain sorting per label - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][label] = sorted( - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][label], - key=lambda x: next(iter(x.keys())) if isinstance(x, dict) else x, + mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ + label + ] = sorted( + mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ + label + ], + key=lambda x: next(iter(x.keys())) + if isinstance(x, dict) + else x, ) # add existing api reference pages to nav @@ -308,7 +330,9 @@ label = INTEGRATION_FOLDER_TO_LABEL[root.split("/")[-1]] label_idx = -1 - for idx, item in enumerate(mkdocs["nav"][api_ref_idx]["API Reference"]): + for idx, item in enumerate( + mkdocs["nav"][api_ref_idx]["API Reference"] + ): if label in item: label_idx = idx break @@ -324,7 +348,9 @@ existing_sub_label_idx, existing_sub_label, ) in enumerate( - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][label] + mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ + label + ] ): if sub_label in existing_sub_label: sub_label_idx = existing_sub_label_idx @@ -337,35 +363,45 @@ if ( toc_path_name - not in mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ - label - ][sub_label_idx][sub_label] + not in mkdocs["nav"][api_ref_idx]["API Reference"][ + label_idx + ][label][sub_label_idx][sub_label] ): - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][label][ - sub_label_idx - ][sub_label].append(toc_path_name) + mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ + label + ][sub_label_idx][sub_label].append(toc_path_name) # sort per sub-label mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][label][ sub_label_idx ][sub_label] = sorted( - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][label][ - sub_label_idx - ][sub_label], - key=lambda x: next(iter(x.keys())) if isinstance(x, dict) else x, + mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ + label + ][sub_label_idx][sub_label], + key=lambda x: next(iter(x.keys())) + if isinstance(x, dict) + else x, ) elif ( toc_path_name - not in mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][label] + not in mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ + label + ] ): - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][label].append( - toc_path_name - ) + mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ + label + ].append(toc_path_name) # sort per label - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][label] = sorted( - mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][label], - key=lambda x: next(iter(x.keys())) if isinstance(x, dict) else x, + mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ + label + ] = sorted( + mkdocs["nav"][api_ref_idx]["API Reference"][label_idx][ + label + ], + key=lambda x: next(iter(x.keys())) + if isinstance(x, dict) + else x, ) # sort the API Reference nav section @@ -381,9 +417,9 @@ for search_path in search_paths: if ( search_path - not in mkdocs["plugins"][i]["mkdocstrings"]["handlers"]["python"][ - "paths" - ] + not in mkdocs["plugins"][i]["mkdocstrings"]["handlers"][ + "python" + ]["paths"] ): mkdocs["plugins"][i]["mkdocstrings"]["handlers"]["python"][ "paths" diff --git a/llama-index-core/llama_index/core/indices/vector_store/retrievers/auto_retriever/prompts.py b/llama-index-core/llama_index/core/indices/vector_store/retrievers/auto_retriever/prompts.py index a943e88beca765..e195b43649ddc8 100644 --- a/llama-index-core/llama_index/core/indices/vector_store/retrievers/auto_retriever/prompts.py +++ b/llama-index-core/llama_index/core/indices/vector_store/retrievers/auto_retriever/prompts.py @@ -127,7 +127,11 @@ {example_output_2.json()} ``` -""".replace("{", "{{").replace("}", "}}") +""".replace( + "{", "{{" +).replace( + "}", "}}" +) SUFFIX = """ diff --git a/llama-index-core/llama_index/core/question_gen/prompts.py b/llama-index-core/llama_index/core/question_gen/prompts.py index 4f95d7de5e089d..1c5f6f26e77a2d 100644 --- a/llama-index-core/llama_index/core/question_gen/prompts.py +++ b/llama-index-core/llama_index/core/question_gen/prompts.py @@ -65,7 +65,11 @@ def build_tools_text(tools: Sequence[ToolMetadata]) -> str: {example_output_str} ``` -""".replace("{", "{{").replace("}", "}}") +""".replace( + "{", "{{" +).replace( + "}", "}}" +) SUFFIX = """\ # Example 2 diff --git a/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/prompts.py b/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/prompts.py index fabe62f0fed04b..1e8cc906b09d3a 100644 --- a/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/prompts.py +++ b/llama-index-integrations/indices/llama-index-indices-managed-vectara/llama_index/indices/managed/vectara/prompts.py @@ -126,7 +126,11 @@ {example_output_2.json()} ``` -""".replace("{", "{{").replace("}", "}}") +""".replace( + "{", "{{" +).replace( + "}", "}}" +) SUFFIX = """ diff --git a/llama-index-integrations/indices/llama-index-indices-managed-zilliz/llama_index/indices/managed/zilliz/base.py b/llama-index-integrations/indices/llama-index-indices-managed-zilliz/llama_index/indices/managed/zilliz/base.py index 45af55766f88e4..10df554dda5e50 100644 --- a/llama-index-integrations/indices/llama-index-indices-managed-zilliz/llama_index/indices/managed/zilliz/base.py +++ b/llama-index-integrations/indices/llama-index-indices-managed-zilliz/llama_index/indices/managed/zilliz/base.py @@ -103,9 +103,7 @@ def __init__( if len(self.pipeline_ids) == 0: print("No available pipelines. Please create pipelines first.") else: - assert set( - PIPELINE_TYPES - ).issubset( + assert set(PIPELINE_TYPES).issubset( set(self.pipeline_ids.keys()) ), f"Missing pipeline(s): {set(PIPELINE_TYPES) - set(self.pipeline_ids.keys())}" diff --git a/llama-index-integrations/llms/llama-index-llms-mistral-rs/README.md b/llama-index-integrations/llms/llama-index-llms-mistral-rs/README.md index 9f0aebac785a84..deb5bb75384d2f 100644 --- a/llama-index-integrations/llms/llama-index-llms-mistral-rs/README.md +++ b/llama-index-integrations/llms/llama-index-llms-mistral-rs/README.md @@ -1,3 +1,3 @@ # LlamaIndex Llms Integration: Mistral-Rs -Please install the Python bindings according to the guide [here](https://github.com/EricLBuehler/mistral.rs/blob/master/mistralrs-pyo3/README.md). \ No newline at end of file +Please install the Python bindings according to the guide [here](https://github.com/EricLBuehler/mistral.rs/blob/master/mistralrs-pyo3/README.md). diff --git a/llama-index-integrations/llms/llama-index-llms-mistral-rs/examples/plain.ipynb b/llama-index-integrations/llms/llama-index-llms-mistral-rs/examples/plain.ipynb index f238a8a2cf8631..0cf859364ae438 100644 --- a/llama-index-integrations/llms/llama-index-llms-mistral-rs/examples/plain.ipynb +++ b/llama-index-integrations/llms/llama-index-llms-mistral-rs/examples/plain.ipynb @@ -15,7 +15,7 @@ "documents = SimpleDirectoryReader(\"data\").load_data()\n", "\n", "# bge embedding model\n", - "Settings.embed_model = resolve_embed_model(\"local:BAAI/bge-small-en-v1.5\")\n" + "Settings.embed_model = resolve_embed_model(\"local:BAAI/bge-small-en-v1.5\")" ] }, { @@ -55,7 +55,7 @@ "source": [ "query_engine = index.as_query_engine()\n", "response = query_engine.query(\"How do I pronounce graphene?\")\n", - "print(response)\n" + "print(response)" ] } ], diff --git a/llama-index-integrations/llms/llama-index-llms-mistral-rs/examples/streaming.ipynb b/llama-index-integrations/llms/llama-index-llms-mistral-rs/examples/streaming.ipynb index c9daf850693736..6bcfe155db049e 100644 --- a/llama-index-integrations/llms/llama-index-llms-mistral-rs/examples/streaming.ipynb +++ b/llama-index-integrations/llms/llama-index-llms-mistral-rs/examples/streaming.ipynb @@ -15,7 +15,7 @@ "documents = SimpleDirectoryReader(\"data\").load_data()\n", "\n", "# bge embedding model\n", - "Settings.embed_model = resolve_embed_model(\"local:BAAI/bge-small-en-v1.5\")\n" + "Settings.embed_model = resolve_embed_model(\"local:BAAI/bge-small-en-v1.5\")" ] }, { @@ -24,7 +24,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "Settings.llm = MistralRS(\n", " which=Which.GGUF(\n", " tok_model_id=\"mistralai/Mistral-7B-Instruct-v0.1\",\n", @@ -58,7 +57,7 @@ "query_engine = index.as_query_engine(streaming=True)\n", "response = query_engine.query(\"What are non-granular scalings?\")\n", "for text in response.response_gen:\n", - " print(text,end=\"\")\n", + " print(text, end=\"\")\n", " sys.stdout.flush()" ] } diff --git a/llama-index-integrations/llms/llama-index-llms-mistral-rs/examples/xlora_gguf.ipynb b/llama-index-integrations/llms/llama-index-llms-mistral-rs/examples/xlora_gguf.ipynb index f5e6f804ac282f..98dc78a41e652c 100644 --- a/llama-index-integrations/llms/llama-index-llms-mistral-rs/examples/xlora_gguf.ipynb +++ b/llama-index-integrations/llms/llama-index-llms-mistral-rs/examples/xlora_gguf.ipynb @@ -15,7 +15,7 @@ "documents = SimpleDirectoryReader(\"data\").load_data()\n", "\n", "# bge embedding model\n", - "Settings.embed_model = resolve_embed_model(\"local:BAAI/bge-small-en-v1.5\")\n" + "Settings.embed_model = resolve_embed_model(\"local:BAAI/bge-small-en-v1.5\")" ] }, { @@ -24,7 +24,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "Settings.llm = MistralRS(\n", " which=Which.GGUF(\n", " tok_model_id=\"mistralai/Mistral-7B-Instruct-v0.1\",\n", @@ -57,7 +56,7 @@ "source": [ "query_engine = index.as_query_engine()\n", "response = query_engine.query(\"How do I pronounce graphene?\")\n", - "print(response)\n" + "print(response)" ] } ], diff --git a/llama-index-integrations/llms/llama-index-llms-mistral-rs/llama_index/llms/mistral_rs/base.py b/llama-index-integrations/llms/llama-index-llms-mistral-rs/llama_index/llms/mistral_rs/base.py index b895af643d746b..8624d088fa442e 100644 --- a/llama-index-integrations/llms/llama-index-llms-mistral-rs/llama_index/llms/mistral_rs/base.py +++ b/llama-index-integrations/llms/llama-index-llms-mistral-rs/llama_index/llms/mistral_rs/base.py @@ -18,9 +18,6 @@ ) from llama_index.core.llms.callbacks import llm_chat_callback, llm_completion_callback from llama_index.core.llms.custom import CustomLLM -from llama_index.core.base.llms.generic_utils import ( - completion_response_to_chat_response, -) from llama_index.core.types import BaseOutputParser, PydanticProgramMode from mistralrs import ( diff --git a/llama-index-integrations/llms/llama-index-llms-mistral-rs/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-mistral-rs/pyproject.toml index 0dba6bdbcf2a23..cf3eebcd1b5bc7 100644 --- a/llama-index-integrations/llms/llama-index-llms-mistral-rs/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-mistral-rs/pyproject.toml @@ -1,6 +1,6 @@ [build-system] -requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" +requires = ["poetry-core"] [tool.codespell] check-filenames = true @@ -24,13 +24,13 @@ ignore_missing_imports = true python_version = "3.8" [tool.poetry] -name = "llama-index-llms-mistral-rs" -version = "0.1.0" -description = "llama-index llms mistral-rs integration" authors = ["EricLBuehler"] +description = "llama-index llms mistral-rs integration" license = "MIT" -readme = "README.md" +name = "llama-index-llms-mistral-rs" packages = [{include = "llama_index/"}] +readme = "README.md" +version = "0.1.0" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" @@ -52,5 +52,5 @@ types-Deprecated = ">=0.1.0" types-PyYAML = "^6.0.12.12" types-protobuf = "^4.24.0.4" types-redis = "4.5.5.0" -types-requests = "2.28.11.8" # TODO: unpin when mypy>0.991 +types-requests = "2.28.11.8" # TODO: unpin when mypy>0.991 types-setuptools = "67.1.0.0" diff --git a/llama-index-integrations/readers/llama-index-readers-mondaydotcom/llama_index/readers/mondaydotcom/base.py b/llama-index-integrations/readers/llama-index-readers-mondaydotcom/llama_index/readers/mondaydotcom/base.py index 99063711abd462..2bcbd3480e54db 100644 --- a/llama-index-integrations/readers/llama-index-readers-mondaydotcom/llama_index/readers/mondaydotcom/base.py +++ b/llama-index-integrations/readers/llama-index-readers-mondaydotcom/llama_index/readers/mondaydotcom/base.py @@ -48,7 +48,9 @@ def _perform_request(self, board_id) -> Dict[str, str]: } } } - } """ % (board_id) + } """ % ( + board_id + ) data = {"query": query} response = requests.post(url=self.api_url, json=data, headers=headers) diff --git a/llama-index-legacy/llama_index/legacy/indices/managed/vectara/prompts.py b/llama-index-legacy/llama_index/legacy/indices/managed/vectara/prompts.py index a5953065992218..1a4279a010470b 100644 --- a/llama-index-legacy/llama_index/legacy/indices/managed/vectara/prompts.py +++ b/llama-index-legacy/llama_index/legacy/indices/managed/vectara/prompts.py @@ -126,7 +126,11 @@ {example_output_2.json()} ``` -""".replace("{", "{{").replace("}", "}}") +""".replace( + "{", "{{" +).replace( + "}", "}}" +) SUFFIX = """ diff --git a/llama-index-legacy/llama_index/legacy/indices/managed/zilliz/base.py b/llama-index-legacy/llama_index/legacy/indices/managed/zilliz/base.py index 41bd631165c0c3..e9f86a98134af4 100644 --- a/llama-index-legacy/llama_index/legacy/indices/managed/zilliz/base.py +++ b/llama-index-legacy/llama_index/legacy/indices/managed/zilliz/base.py @@ -99,9 +99,7 @@ def __init__( if len(self.pipeline_ids) == 0: print("No available pipelines. Please create pipelines first.") else: - assert set( - PIPELINE_TYPES - ).issubset( + assert set(PIPELINE_TYPES).issubset( set(self.pipeline_ids.keys()) ), f"Missing pipeline(s): {set(PIPELINE_TYPES) - set(self.pipeline_ids.keys())}" diff --git a/llama-index-legacy/llama_index/legacy/indices/vector_store/retrievers/auto_retriever/prompts.py b/llama-index-legacy/llama_index/legacy/indices/vector_store/retrievers/auto_retriever/prompts.py index 53f802de1a8865..0421308b18fc6a 100644 --- a/llama-index-legacy/llama_index/legacy/indices/vector_store/retrievers/auto_retriever/prompts.py +++ b/llama-index-legacy/llama_index/legacy/indices/vector_store/retrievers/auto_retriever/prompts.py @@ -126,7 +126,11 @@ {example_output_2.json()} ``` -""".replace("{", "{{").replace("}", "}}") +""".replace( + "{", "{{" +).replace( + "}", "}}" +) SUFFIX = """ diff --git a/llama-index-legacy/llama_index/legacy/question_gen/prompts.py b/llama-index-legacy/llama_index/legacy/question_gen/prompts.py index 47e1c37155e1c9..05244cbe2658a0 100644 --- a/llama-index-legacy/llama_index/legacy/question_gen/prompts.py +++ b/llama-index-legacy/llama_index/legacy/question_gen/prompts.py @@ -65,7 +65,11 @@ def build_tools_text(tools: Sequence[ToolMetadata]) -> str: {example_output_str} ``` -""".replace("{", "{{").replace("}", "}}") +""".replace( + "{", "{{" +).replace( + "}", "}}" +) SUFFIX = """\ # Example 2 diff --git a/llama-index-legacy/llama_index/legacy/readers/redis/utils.py b/llama-index-legacy/llama_index/legacy/readers/redis/utils.py index a146e6a7c7153f..bec24cbf6a3749 100644 --- a/llama-index-legacy/llama_index/legacy/readers/redis/utils.py +++ b/llama-index-legacy/llama_index/legacy/readers/redis/utils.py @@ -50,7 +50,9 @@ def check_redis_modules_exist(client: "RedisType") -> None: for module in REDIS_REQUIRED_MODULES: if module["name"] in installed_modules and int( installed_modules[module["name"]][b"ver"] - ) >= int(module["ver"]): # type: ignore[call-overload] + ) >= int( + module["ver"] + ): # type: ignore[call-overload] return # otherwise raise error error_message = ( diff --git a/llama-index-packs/llama-index-packs-searchain/llama_index/packs/searchain/base.py b/llama-index-packs/llama-index-packs-searchain/llama_index/packs/searchain/base.py index d791ffd15db66d..f6bdf4c8ed6c54 100644 --- a/llama-index-packs/llama-index-packs-searchain/llama_index/packs/searchain/base.py +++ b/llama-index-packs/llama-index-packs-searchain/llama_index/packs/searchain/base.py @@ -199,7 +199,9 @@ def execute(self, data_path, start_idx): [Query 2]: When was First for Women started? [Answer 2]: 1989 [Final Content]: Arthur’s Magazine started in 1844 [1]. First for Women started in 1989 [2]. So Arthur’s Magazine was started first. So the answer is Arthur’s Magazi - [Question]: {}""".format(q, q), + [Question]: {}""".format( + q, q + ), ) ] feedback_answer = "continue"