diff --git a/CHANGELOG.md b/CHANGELOG.md index 7f0e9e1aa4a7a..c63c8a1c236ec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,61 @@ # ChangeLog +## [2024-05-28] + +### `llama-index-core` [0.10.40] + +- Added `PropertyGraphIndex` and other supporting abstractions. See the [full guide](https://docs.llamaindex.ai/en/latest/module_guides/indexing/lpg_index_guide/) for more details (#13747) +- Updated `AutoPrevNextNodePostprocessor` to allow passing in response mode and LLM (#13771) +- fix type handling with return direct (#13776) +- Correct the method name to `_aget_retrieved_ids_and_texts` in retrievval evaluator (#13765) +- fix: QueryTransformComponent incorrect call `self._query_transform` (#13756) +- implement more filters for `SimpleVectorStoreIndex` (#13365) + +### `llama-index-embeddings-bedrock` [0.2.0] + +- Added support for Bedrock Titan Embeddings v2 (#13580) + +### `llama-index-embeddings-oci-genai` [0.1.0] + +- add Oracle Cloud Infrastructure (OCI) Generative AI (#13631) + +### `llama-index-embeddings-huggingface` [0.2.1] + +- Expose "safe_serialization" parameter from AutoModel (#11939) + +### `llama-index-graph-stores-neo4j` [0.2.0] + +- Added `Neo4jPGStore` for property graph support (#13747) + +### `llama-index-indices-managed-dashscope` [0.1.1] + +- Added dashscope managed index (#13378) + +### `llama-index-llms-oci-genai` [0.1.0] + +- add Oracle Cloud Infrastructure (OCI) Generative AI (#13631) + +### `llama-index-readers-feishu-wiki` [0.1.1] + +- fix undefined variable (#13768) + +### `llama-index-packs-secgpt` [0.1.0] + +- SecGPT - LlamaIndex Integration #13127 + +### `llama-index-vector-stores-hologres` [0.1.0] + +- Add Hologres vector db (#13619) + +### `llama-index-vector-stores-milvus` [0.1.16] + +- Remove FlagEmbedding as Milvus's dependency (#13767) + Unify the collection construction regardless of the value of enable_sparse (#13773) + +### `llama-index-vector-stores-opensearch` [0.1.9] + +- refactor to put helper methods inside class definition (#13749) + ## [2024-05-24] ### `llama-index-core` [0.10.39] diff --git a/docs/docs/CHANGELOG.md b/docs/docs/CHANGELOG.md index 7f0e9e1aa4a7a..ae8e228d8378c 100644 --- a/docs/docs/CHANGELOG.md +++ b/docs/docs/CHANGELOG.md @@ -1,5 +1,61 @@ # ChangeLog +## [2024-05-28] + +### `llama-index-core` [0.10.40] + +- Added `PropertyGraphIndex` and other supporting abstractions. See the [full guide](https://docs.llamaindex.ai/en/latest/module_guides/indexing/lpg_index_guide/) for more details (#13747) +- Updated `AutoPrevNextNodePostprocessor` to allow passing in response mode and LLM (#13771) +- fix type handling with return direct (#13776) +- Correct the method name to `_aget_retrieved_ids_and_texts` in retrievval evaluator (#13765) +- fix: QueryTransformComponent incorrect call `self._query_transform` (#13756) +- implement more filters for `SimpleVectorStoreIndex` (#13365) + +### `llama-index-embeddings-bedrock` [0.2.0] + +- Added support for Bedrock Titan Embeddings v2 (#13580) + +### `llama-index-embeddings-oci-genai` [0.1.0] + +- add Oracle Cloud Infrastructure (OCI) Generative AI (#13631) + +### `llama-index-embeddings-huggingface` [0.2.1] + +- Expose "safe_serialization" parameter from AutoModel (#11939) + +### `llama-index-graph-stores-neo4j` [0.2.0] + +- Added `Neo4jPGStore` for property graph support (#13747) + +### `llama-index-indices-managed-dashscope` [0.1.1] + +- Added dashscope managed index (#13378) + +### `llama-index-llms-oci-genai` [0.1.0] + +- add Oracle Cloud Infrastructure (OCI) Generative AI (#13631) + +### `llama-index-readers-feishu-wiki` [0.1.1] + +- fix undefined variable (#13768) + +### `llama-index-packs-secgpt` [0.1.0] + +- SecGPT - LlamaIndex Integration #13127 + +### `llama-index-vector-stores-hologres` [0.1.0] + +- Add Hologres vector db (#13619) + +### `llama-index-vector-stores-milvus` [0.1.16] + +- Remove FlagEmbedding as Milvus's dependency (#13767) +Unify the collection construction regardless of the value of enable_sparse (#13773) + +### `llama-index-vector-stores-opensearch` [0.1.9] + +- refactor to put helper methods inside class definition (#13749) + ## [2024-05-24] ### `llama-index-core` [0.10.39] diff --git a/docs/docs/api_reference/embeddings/oci_genai.md b/docs/docs/api_reference/embeddings/oci_genai.md new file mode 100644 index 0000000000000..1ea78a1d30232 --- /dev/null +++ b/docs/docs/api_reference/embeddings/oci_genai.md @@ -0,0 +1,4 @@ +::: llama_index.embeddings.oci_genai + options: + members: + - OCIGenAIEmbeddings diff --git a/docs/docs/api_reference/indices/dashscope.md b/docs/docs/api_reference/indices/dashscope.md new file mode 100644 index 0000000000000..f4f3ffb9b7497 --- /dev/null +++ b/docs/docs/api_reference/indices/dashscope.md @@ -0,0 +1,4 @@ +::: llama_index.indices.managed.dashscope + options: + members: + - DashScopeCloudIndex diff --git a/docs/docs/api_reference/llms/oci_genai.md b/docs/docs/api_reference/llms/oci_genai.md new file mode 100644 index 0000000000000..83cc20a19d58e --- /dev/null +++ b/docs/docs/api_reference/llms/oci_genai.md @@ -0,0 +1,4 @@ +::: llama_index.llms.oci_genai + options: + members: + - OCIGenAI diff --git a/docs/docs/api_reference/node_parser/dashscope.md b/docs/docs/api_reference/node_parser/dashscope.md new file mode 100644 index 0000000000000..63eb306018fe2 --- /dev/null +++ b/docs/docs/api_reference/node_parser/dashscope.md @@ -0,0 +1,4 @@ +::: llama_index.node_parser.dashscope + options: + members: + - DashScopeJsonNodeParser diff --git a/docs/docs/api_reference/packs/secgpt.md b/docs/docs/api_reference/packs/secgpt.md new file mode 100644 index 0000000000000..2da0a60c399c6 --- /dev/null +++ b/docs/docs/api_reference/packs/secgpt.md @@ -0,0 +1,4 @@ +::: llama_index.packs.secgpt + options: + members: + - SecGPTPack diff --git a/docs/docs/api_reference/readers/dashscope.md b/docs/docs/api_reference/readers/dashscope.md new file mode 100644 index 0000000000000..111d9451854da --- /dev/null +++ b/docs/docs/api_reference/readers/dashscope.md @@ -0,0 +1,4 @@ +::: llama_index.readers.dashscope + options: + members: + - DashScopeParse diff --git a/docs/docs/api_reference/storage/vector_store/hologres.md b/docs/docs/api_reference/storage/vector_store/hologres.md new file mode 100644 index 0000000000000..2f87712504009 --- /dev/null +++ b/docs/docs/api_reference/storage/vector_store/hologres.md @@ -0,0 +1,4 @@ +::: llama_index.vector_stores.hologres + options: + members: + - HologresVectorStore diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index cb300d0ec07f8..064ac2c150ca1 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -207,6 +207,7 @@ nav: - ./examples/embeddings/nvidia.ipynb - ./examples/embeddings/deepinfra.ipynb - ./examples/embeddings/upstage.ipynb + - ./examples/embeddings/oci_genai.ipynb - Evaluation: - ./examples/evaluation/TonicValidateEvaluators.ipynb - ./examples/evaluation/semantic_similarity_eval.ipynb @@ -328,6 +329,8 @@ nav: - ./examples/llm/unify.ipynb - ./examples/llm/ipex_llm_gpu.ipynb - ./examples/llm/deepinfra.ipynb + - ./examples/llm/oci_genai.ipynb + - ./examples/llm/nvidia_nim.ipynb - Low Level: - ./examples/low_level/oss_ingestion_retrieval.ipynb - ./examples/low_level/fusion_retriever.ipynb @@ -575,7 +578,6 @@ nav: - ./examples/vector_stores/AlibabaCloudOpenSearchIndexDemo.ipynb - ./examples/vector_stores/RelytDemo.ipynb - ./examples/vector_stores/HologresDemo.ipynb - - Component Guides: - ./module_guides/index.md - Models: @@ -755,6 +757,7 @@ nav: - ./api_reference/embeddings/mistralai.md - ./api_reference/embeddings/nomic.md - ./api_reference/embeddings/nvidia.md + - ./api_reference/embeddings/oci_genai.md - ./api_reference/embeddings/octoai.md - ./api_reference/embeddings/ollama.md - ./api_reference/embeddings/openai.md @@ -783,6 +786,7 @@ nav: - ./api_reference/evaluation/tonic_validate.md - Indexes: - ./api_reference/indices/colbert.md + - ./api_reference/indices/dashscope.md - ./api_reference/indices/document_summary.md - ./api_reference/indices/google.md - ./api_reference/indices/index.md @@ -790,6 +794,7 @@ nav: - ./api_reference/indices/knowledge_graph.md - ./api_reference/indices/llama_cloud.md - ./api_reference/indices/postgresml.md + - ./api_reference/indices/property_graph.md - ./api_reference/indices/summary.md - ./api_reference/indices/tree.md - ./api_reference/indices/vectara.md @@ -844,6 +849,7 @@ nav: - ./api_reference/llms/nvidia.md - ./api_reference/llms/nvidia_tensorrt.md - ./api_reference/llms/nvidia_triton.md + - ./api_reference/llms/oci_genai.md - ./api_reference/llms/octoai.md - ./api_reference/llms/ollama.md - ./api_reference/llms/openai.md @@ -920,6 +926,7 @@ nav: - ./api_reference/packs/resume_screener.md - ./api_reference/packs/retry_engine_weaviate.md - ./api_reference/packs/searchain.md + - ./api_reference/packs/secgpt.md - ./api_reference/packs/self_discover.md - ./api_reference/packs/self_rag.md - ./api_reference/packs/sentence_window_retriever.md @@ -959,6 +966,7 @@ nav: - ./api_reference/multi_modal_llms/openai.md - ./api_reference/multi_modal_llms/replicate.md - Node Parsers & Text Splitters: + - ./api_reference/node_parser/dashscope.md - ./api_reference/node_parsers/code.md - ./api_reference/node_parsers/hierarchical.md - ./api_reference/node_parsers/html.md @@ -1096,6 +1104,7 @@ nav: - ./api_reference/readers/couchbase.md - ./api_reference/readers/couchdb.md - ./api_reference/readers/dad_jokes.md + - ./api_reference/readers/dashscope.md - ./api_reference/readers/dashvector.md - ./api_reference/readers/database.md - ./api_reference/readers/deeplake.md @@ -1310,6 +1319,7 @@ nav: - ./api_reference/storage/vector_store/faiss.md - ./api_reference/storage/vector_store/firestore.md - ./api_reference/storage/vector_store/google.md + - ./api_reference/storage/vector_store/hologres.md - ./api_reference/storage/vector_store/index.md - ./api_reference/storage/vector_store/jaguar.md - ./api_reference/storage/vector_store/kdbai.md @@ -1931,6 +1941,12 @@ plugins: - ../llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-azure - ../llama-index-integrations/llms/llama-index-llms-deepinfra - ../llama-index-integrations/vector_stores/llama-index-vector-stores-hologres + - ../llama-index-packs/llama-index-packs-secgpt + - ../llama-index-integrations/indices/llama-index-indices-managed-dashscope + - ../llama-index-integrations/embeddings/llama-index-embeddings-oci-genai + - ../llama-index-integrations/node_parser/llama-index-node-parser-relational-dashscope + - ../llama-index-integrations/readers/llama-index-readers-dashscope + - ../llama-index-integrations/llms/llama-index-llms-oci-genai - redirects: redirect_maps: ./api/llama_index.vector_stores.MongoDBAtlasVectorSearch.html: api_reference/storage/vector_store/mongodb.md diff --git a/docs/prepare_for_build.py b/docs/prepare_for_build.py index 1559c63f9375f..8226715d48169 100644 --- a/docs/prepare_for_build.py +++ b/docs/prepare_for_build.py @@ -79,6 +79,7 @@ "memory": "Memory", "multi_modal_llms": "Multi-Modal LLMs", "node_parsers": "Node Parsers & Text Splitters", + "node_parser": "Node Parsers & Text Splitters", "objects": "Object Stores", "output_parsers": "Output Parsers", "postprocessor": "Node Postprocessors", diff --git a/llama-index-core/llama_index/core/__init__.py b/llama-index-core/llama_index/core/__init__.py index 9d36c00ba00a6..280b611096a0f 100644 --- a/llama-index-core/llama_index/core/__init__.py +++ b/llama-index-core/llama_index/core/__init__.py @@ -1,6 +1,6 @@ """Init file of LlamaIndex.""" -__version__ = "0.10.39" +__version__ = "0.10.40" import logging from logging import NullHandler diff --git a/llama-index-core/pyproject.toml b/llama-index-core/pyproject.toml index b7ba7ee91da24..83d5fb0e8c177 100644 --- a/llama-index-core/pyproject.toml +++ b/llama-index-core/pyproject.toml @@ -43,7 +43,7 @@ name = "llama-index-core" packages = [{include = "llama_index"}] readme = "README.md" repository = "https://github.com/run-llama/llama_index" -version = "0.10.39post1" +version = "0.10.40" [tool.poetry.dependencies] SQLAlchemy = {extras = ["asyncio"], version = ">=1.4.49"} diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-huggingface/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-huggingface/pyproject.toml index 76c108070ce6f..c061032756d1c 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-huggingface/pyproject.toml +++ b/llama-index-integrations/embeddings/llama-index-embeddings-huggingface/pyproject.toml @@ -28,7 +28,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-embeddings-huggingface" readme = "README.md" -version = "0.2.0" +version = "0.2.1" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" diff --git a/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/pyproject.toml b/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/pyproject.toml index c6743c67ea17e..38c46f87928c9 100644 --- a/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/pyproject.toml +++ b/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/pyproject.toml @@ -28,11 +28,11 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-graph-stores-neo4j" readme = "README.md" -version = "0.1.4" +version = "0.2.0" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -llama-index-core = "^0.10.1" +llama-index-core = "^0.10.40" neo4j = "^5.16.0" [tool.poetry.group.dev.dependencies] diff --git a/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/tests/test_pg_stores_neo4j.py b/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/tests/test_pg_stores_neo4j.py new file mode 100644 index 0000000000000..0f8f11adfc92e --- /dev/null +++ b/llama-index-integrations/graph_stores/llama-index-graph-stores-neo4j/tests/test_pg_stores_neo4j.py @@ -0,0 +1,108 @@ +import os +import pytest + +from llama_index.graph_stores.neo4j import Neo4jPGStore +from llama_index.core.graph_stores.types import Relation, EntityNode +from llama_index.core.schema import TextNode + +neo4j_url = os.environ.get("NEO4J_TEST_URL") +neo4j_user = os.environ.get("NEO4J_TEST_USER") +neo4j_password = os.environ.get("NEO4J_TEST_PASSWORD") + +if not neo4j_url or not neo4j_user or not neo4j_password: + neo4j_available = False +else: + neo4j_available = True + + +@pytest.fixture() +def pg_store() -> Neo4jPGStore: + if not neo4j_available: + pytest.skip("No Neo4j credentials provided") + pg_store = Neo4jPGStore(username=neo4j_user, password=neo4j_password, url=neo4j_url) + pg_store.structured_query("MATCH (n) DETACH DELETE n") + return pg_store + + +def test_neo4j_pg_store(pg_store: Neo4jPGStore) -> None: + # Create a two entity nodes + entity1 = EntityNode(label="PERSON", name="Logan", properties={"age": 28}) + entity2 = EntityNode(label="ORGANIZATION", name="LlamaIndex") + + # Create a relation + relation = Relation( + label="WORKS_FOR", + source_id=entity1.id, + target_id=entity2.id, + properties={"since": 2023}, + ) + + pg_store.upsert_nodes([entity1, entity2]) + pg_store.upsert_relations([relation]) + + source_node = TextNode(text="Logan (age 28), works for LlamaIndex since 2023.") + relations = [ + Relation( + label="MENTIONS", + target_id=entity1.id, + source_id=source_node.node_id, + ), + Relation( + label="MENTIONS", + target_id=entity2.id, + source_id=source_node.node_id, + ), + ] + + pg_store.upsert_llama_nodes([source_node]) + pg_store.upsert_relations(relations) + + kg_nodes = pg_store.get(ids=[entity1.id]) + assert len(kg_nodes) == 1 + assert kg_nodes[0].label == "PERSON" + assert kg_nodes[0].name == "Logan" + + kg_nodes = pg_store.get(properties={"age": 28}) + assert len(kg_nodes) == 1 + assert kg_nodes[0].label == "PERSON" + assert kg_nodes[0].name == "Logan" + + # get paths from a node + paths = pg_store.get_rel_map(kg_nodes, depth=1) + for path in paths: + assert path[0].id == entity1.id + assert path[2].id == entity2.id + assert path[1].id == relation.id + + query = "match (n:`__Entity__`) return n" + result = pg_store.structured_query(query) + assert len(result) == 2 + + # get the original text node back + llama_nodes = pg_store.get_llama_nodes([source_node.node_id]) + assert len(llama_nodes) == 1 + assert llama_nodes[0].text == source_node.text + + # Upsert a new node + new_node = EntityNode( + label="PERSON", name="Logan", properties={"age": 28, "location": "Canada"} + ) + pg_store.upsert_nodes([new_node]) + kg_nodes = pg_store.get(properties={"age": 28}) + assert len(kg_nodes) == 1 + assert kg_nodes[0].label == "PERSON" + assert kg_nodes[0].name == "Logan" + assert kg_nodes[0].properties["location"] == "Canada" + + # deleting + # delete our entities + pg_store.delete(ids=[entity1.id, entity2.id]) + + # delete our text nodes + pg_store.delete(ids=[source_node.node_id]) + + nodes = pg_store.get(ids=[entity1.id, entity2.id]) + assert len(nodes) == 0 + + text_nodes = pg_store.get_llama_nodes([source_node.node_id]) + assert len(text_nodes) == 0 diff --git a/llama-index-integrations/readers/llama-index-readers-dashscope/pyproject.toml b/llama-index-integrations/readers/llama-index-readers-dashscope/pyproject.toml index b875b64a79c72..e2057c5bba128 100644 --- a/llama-index-integrations/readers/llama-index-readers-dashscope/pyproject.toml +++ b/llama-index-integrations/readers/llama-index-readers-dashscope/pyproject.toml @@ -11,7 +11,7 @@ skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" [tool.llamahub] contains_example = false -import_path = "llama_index.readers.dashscope.base" +import_path = "llama_index.readers.dashscope" [tool.llamahub.class_authors] DashScopeParse = "phantomgrapes" diff --git a/poetry.lock b/poetry.lock index 07180be22db37..619f9f9375d38 100644 --- a/poetry.lock +++ b/poetry.lock @@ -123,13 +123,13 @@ files = [ [[package]] name = "anyio" -version = "4.3.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -1508,13 +1508,13 @@ llama-index-llms-openai = ">=0.1.1,<0.2.0" [[package]] name = "llama-index-core" -version = "0.10.39" +version = "0.10.40" description = "Interface between LLMs and your data" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index_core-0.10.39-py3-none-any.whl", hash = "sha256:4b68724936f09220e417951572739f041ec2184caf9eb4e45017f4419cb98bd9"}, - {file = "llama_index_core-0.10.39.tar.gz", hash = "sha256:67b417676ff9b50d89d9379251c7de51bd68da042c47db934ed5794824eb41a5"}, + {file = "llama_index_core-0.10.40-py3-none-any.whl", hash = "sha256:c08df2a46ebf417ca0f1b68a5d797df0c053796d96c93e1cdc0456d11a89753a"}, + {file = "llama_index_core-0.10.40.tar.gz", hash = "sha256:72d30aea7a77f87484abe99a341c945021c95ea5e2adcc60199094931a07623a"}, ] [package.dependencies] @@ -2241,18 +2241,15 @@ twitter = ["twython"] [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.0" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.0-py2.py3-none-any.whl", hash = "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a"}, + {file = "nodeenv-1.9.0.tar.gz", hash = "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "numpy" version = "1.24.4" @@ -2292,13 +2289,13 @@ files = [ [[package]] name = "openai" -version = "1.30.3" +version = "1.30.4" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.30.3-py3-none-any.whl", hash = "sha256:f88119c8a848998be533c71ab8aa832446fa72b7ddbc70917c3f5886dc132051"}, - {file = "openai-1.30.3.tar.gz", hash = "sha256:8e1bcdca2b96fe3636ab522fa153d88efde1b702d12ec32f1c73e9553ff93f45"}, + {file = "openai-1.30.4-py3-none-any.whl", hash = "sha256:fb2635efd270efaf9fac2e07558d7948373b940637d3ae3ab624c1a983d4f03f"}, + {file = "openai-1.30.4.tar.gz", hash = "sha256:f3488d9a1c4e0d332b019377d27d7cb4b3d6103fd5d0a416c7ceac780d1d9b88"}, ] [package.dependencies] @@ -2575,13 +2572,13 @@ virtualenv = ">=20.10.0" [[package]] name = "prompt-toolkit" -version = "3.0.43" +version = "3.0.45" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, + {file = "prompt_toolkit-3.0.45-py3-none-any.whl", hash = "sha256:a29b89160e494e3ea8622b09fa5897610b437884dcdcd054fdc1308883326c2a"}, + {file = "prompt_toolkit-3.0.45.tar.gz", hash = "sha256:07c60ee4ab7b7e90824b61afa840c8f5aad2d46b3e2e10acc33d8ecc94a49089"}, ] [package.dependencies] @@ -3232,21 +3229,6 @@ files = [ {file = "ruff-0.0.292.tar.gz", hash = "sha256:1093449e37dd1e9b813798f6ad70932b57cf614e5c2b5c51005bf67d55db33ac"}, ] -[[package]] -name = "setuptools" -version = "70.0.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "six" version = "1.16.0" @@ -4215,13 +4197,13 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.18.2" +version = "3.19.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.2-py3-none-any.whl", hash = "sha256:dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e"}, - {file = "zipp-3.18.2.tar.gz", hash = "sha256:6278d9ddbcfb1f1089a88fde84481528b07b0e10474e09dcfe53dad4069fa059"}, + {file = "zipp-3.19.0-py3-none-any.whl", hash = "sha256:96dc6ad62f1441bcaccef23b274ec471518daf4fbbc580341204936a5a3dddec"}, + {file = "zipp-3.19.0.tar.gz", hash = "sha256:952df858fb3164426c976d9338d3961e8e8b3758e2e059e0f754b8c4262625ee"}, ] [package.extras] @@ -4231,4 +4213,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "f049b8ec55ae89485074b2cfc4eecc5a036844d7990474dbd428d47067b4f920" +content-hash = "db57688dc0469f5df51c670b2c6b9d18059a4dc763c76292b519a8c3ea94f467" diff --git a/pyproject.toml b/pyproject.toml index f11532ae87457..5c8dbe6f3d866 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,7 +44,7 @@ name = "llama-index" packages = [{from = "_llama-index", include = "llama_index"}] readme = "README.md" repository = "https://github.com/run-llama/llama_index" -version = "0.10.39" +version = "0.10.40" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" @@ -57,7 +57,7 @@ llama-index-agent-openai = ">=0.1.4,<0.3.0" llama-index-readers-file = "^0.1.4" llama-index-readers-llama-parse = "^0.1.2" llama-index-indices-managed-llama-cloud = "^0.1.2" -llama-index-core = "^0.10.39" +llama-index-core = "^0.10.40" llama-index-multi-modal-llms-openai = "^0.1.3" llama-index-cli = "^0.1.2"