Skip to content

Commit

Permalink
v0.10.57 (#14893)
Browse files Browse the repository at this point in the history
  • Loading branch information
logan-markewich committed Jul 23, 2024
1 parent a961dfb commit f39df8f
Show file tree
Hide file tree
Showing 21 changed files with 231 additions and 123 deletions.
52 changes: 50 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,10 +1,30 @@
# ChangeLog

## Unreleased
## [2024-07-22]

### `llama-index-core` [Unreleased]
### `llama-index-core` [v0.10.57]

- Add an optional parameter similarity_score to VectorContextRetrieve… (#14831)
- add property extraction (using property names and optional descriptions) for KGs (#14707)
- able to attach output classes to LLMs (#14747)
- Add streaming for tool calling / structured extraction (#14759)
- fix from removing private variables when copying/pickling (#14860)
- Fix empty array being send to vector store in ingestion pipeline (#14859)
- optimize ingestion pipeline deduping (#14858)
- Add an optional parameter similarity_score to VectorContextRetriever (#14831)

### `llama-index-llms-azure-openai` [0.1.10]

- Bugfix: AzureOpenAI may fail with custom azure_ad_token_provider (#14869)

### `llama-index-llms-bedrock-converse` [0.1.5]

- feat: ✨ Implement async functionality in BedrockConverse (#14326)

### `llama-index-llms-langchain` [0.3.0]

- make some dependencies optional
- bump langchain version in integration (#14879)

### `llama-index-llms-ollama` [0.1.6]

Expand All @@ -14,6 +34,34 @@

- align deps (#14850)

### `llama-index-readers-notion` [0.1.10]

- update notion reader to handle duplicate pages, database+page ids (#14861)

### `llama-index-vector-stores-milvus` [0.1.21]

- Implements delete_nodes() and clear() for Weviate, Opensearch, Milvus, Postgres, and Pinecone Vector Stores (#14800)

### `llama-index-vector-stores-mongodb` [0.1.8]

- MongoDB Atlas Vector Search: Enhanced Metadata Filtering (#14856)

### `llama-index-vector-stores-opensearch` [0.1.13]

- Implements delete_nodes() and clear() for Weviate, Opensearch, Milvus, Postgres, and Pinecone Vector Stores (#14800)

### `llama-index-vector-stores-pinecone` [0.1.8]

- Implements delete_nodes() and clear() for Weviate, Opensearch, Milvus, Postgres, and Pinecone Vector Stores (#14800)

### `llama-index-vector-stores-postgres` [0.1.12]

- Implements delete_nodes() and clear() for Weviate, Opensearch, Milvus, Postgres, and Pinecone Vector Stores (#14800)

### `llama-index-vector-stores-weaviate` [1.0.2]

- Implements delete_nodes() and clear() for Weviate, Opensearch, Milvus, Postgres, and Pinecone Vector Stores (#14800)

## [2024-07-19]

### `llama-index-core` [0.10.56]
Expand Down
52 changes: 50 additions & 2 deletions docs/docs/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,10 +1,30 @@
# ChangeLog

## Unreleased
## [2024-07-22]

### `llama-index-core` [Unreleased]
### `llama-index-core` [v0.10.57]

- Add an optional parameter similarity_score to VectorContextRetrieve… (#14831)
- add property extraction (using property names and optional descriptions) for KGs (#14707)
- able to attach output classes to LLMs (#14747)
- Add streaming for tool calling / structured extraction (#14759)
- fix from removing private variables when copying/pickling (#14860)
- Fix empty array being send to vector store in ingestion pipeline (#14859)
- optimize ingestion pipeline deduping (#14858)
- Add an optional parameter similarity_score to VectorContextRetriever (#14831)

### `llama-index-llms-azure-openai` [0.1.10]

- Bugfix: AzureOpenAI may fail with custom azure_ad_token_provider (#14869)

### `llama-index-llms-bedrock-converse` [0.1.5]

- feat: ✨ Implement async functionality in BedrockConverse (#14326)

### `llama-index-llms-langchain` [0.3.0]

- make some dependencies optional
- bump langchain version in integration (#14879)

### `llama-index-llms-ollama` [0.1.6]

Expand All @@ -14,6 +34,34 @@

- align deps (#14850)

### `llama-index-readers-notion` [0.1.10]

- update notion reader to handle duplicate pages, database+page ids (#14861)

### `llama-index-vector-stores-milvus` [0.1.21]

- Implements delete_nodes() and clear() for Weviate, Opensearch, Milvus, Postgres, and Pinecone Vector Stores (#14800)

### `llama-index-vector-stores-mongodb` [0.1.8]

- MongoDB Atlas Vector Search: Enhanced Metadata Filtering (#14856)

### `llama-index-vector-stores-opensearch` [0.1.13]

- Implements delete_nodes() and clear() for Weviate, Opensearch, Milvus, Postgres, and Pinecone Vector Stores (#14800)

### `llama-index-vector-stores-pinecone` [0.1.8]

- Implements delete_nodes() and clear() for Weviate, Opensearch, Milvus, Postgres, and Pinecone Vector Stores (#14800)

### `llama-index-vector-stores-postgres` [0.1.12]

- Implements delete_nodes() and clear() for Weviate, Opensearch, Milvus, Postgres, and Pinecone Vector Stores (#14800)

### `llama-index-vector-stores-weaviate` [1.0.2]

- Implements delete_nodes() and clear() for Weviate, Opensearch, Milvus, Postgres, and Pinecone Vector Stores (#14800)

## [2024-07-19]

### `llama-index-core` [0.10.56]
Expand Down
2 changes: 1 addition & 1 deletion llama-index-core/llama_index/core/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Init file of LlamaIndex."""

__version__ = "0.10.56"
__version__ = "0.10.57"

import logging
from logging import NullHandler
Expand Down
2 changes: 1 addition & 1 deletion llama-index-core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ name = "llama-index-core"
packages = [{include = "llama_index"}]
readme = "README.md"
repository = "https://github.com/run-llama/llama_index"
version = "0.10.56"
version = "0.10.57"

[tool.poetry.dependencies]
SQLAlchemy = {extras = ["asyncio"], version = ">=1.4.49"}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-anthropic"
readme = "README.md"
version = "0.1.15"
version = "0.1.16"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
llama-index-core = "^0.10.1"
llama-index-core = "^0.10.57"
anthropic = ">=0.26.2, <0.29.0"

[tool.poetry.group.dev.dependencies]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-bedrock-converse"
readme = "README.md"
version = "0.1.5"
version = "0.1.6"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
llama-index-core = "^0.10.1"
llama-index-core = "^0.10.57"
llama-index-llms-anthropic = "^0.1.7"
boto3 = "^1.34.122"
aioboto3 = "^13.1.1"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-deepinfra"
readme = "README.md"
version = "0.1.5"
version = "0.1.6"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
llama-index-core = "^0.10.1"
llama-index-core = "^0.10.57"
llama-index-llms-openai = "^0.1.1"
aiohttp = "^3.8.1"
tenacity = ">=8.1.0,<8.4.0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,11 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-huggingface"
readme = "README.md"
version = "0.2.4"
version = "0.2.5"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
llama-index-core = "^0.10.41"
llama-index-core = "^0.10.57"
huggingface-hub = "^0.23.0"
torch = "^2.1.2"
text-generation = "^0.7.0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,6 @@

from llama_index.core.base.llms.types import ChatMessage, LLMMetadata, MessageRole
from llama_index.core.constants import AI21_J2_CONTEXT_WINDOW, COHERE_CONTEXT_WINDOW
from llama_index.llms.anyscale.utils import anyscale_modelname_to_contextsize
from llama_index.llms.fireworks.utils import fireworks_modelname_to_contextsize
from llama_index.llms.openai.utils import openai_modelname_to_contextsize


class LC:
Expand Down Expand Up @@ -99,27 +96,59 @@ def get_llm_metadata(llm: LC.BaseLanguageModel) -> LLMMetadata:
is_chat_model_ = is_chat_model(llm)

if isinstance(llm, LC.OpenAI):
try:
from llama_index.llms.openai.utils import openai_modelname_to_contextsize
except ImportError:
raise ImportError(
"Please `pip install llama-index-llms-openai` to use OpenAI models."
)

return LLMMetadata(
context_window=openai_modelname_to_contextsize(llm.model_name),
num_output=llm.max_tokens,
is_chat_model=is_chat_model_,
model_name=llm.model_name,
)
elif isinstance(llm, LC.ChatAnyscale):
try:
from llama_index.llms.anyscale.utils import (
anyscale_modelname_to_contextsize,
)
except ImportError:
raise ImportError(
"Please `pip install llama-index-llms-anyscale` to use Anyscale models."
)

return LLMMetadata(
context_window=anyscale_modelname_to_contextsize(llm.model_name),
num_output=llm.max_tokens or -1,
is_chat_model=is_chat_model_,
model_name=llm.model_name,
)
elif isinstance(llm, LC.ChatFireworks):
try:
from llama_index.llms.fireworks.utils import (
fireworks_modelname_to_contextsize,
)
except ImportError:
raise ImportError(
"Please `pip install llama-index-llms-fireworks` to use Fireworks models."
)

return LLMMetadata(
context_window=fireworks_modelname_to_contextsize(llm.model_name),
num_output=llm.max_tokens or -1,
is_chat_model=is_chat_model_,
model_name=llm.model_name,
)
elif isinstance(llm, LC.ChatOpenAI):
try:
from llama_index.llms.openai.utils import openai_modelname_to_contextsize
except ImportError:
raise ImportError(
"Please `pip install llama-index-llms-openai` to use OpenAI models."
)

return LLMMetadata(
context_window=openai_modelname_to_contextsize(llm.model_name),
num_output=llm.max_tokens or -1,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,12 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-langchain"
readme = "README.md"
version = "0.2.0"
version = "0.3.0"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
llama-index-core = "^0.10.41"
llama-index-llms-anyscale = "^0.1.1"
langchain = ">=0.1.3"
llama-index-llms-openai = "^0.1.1"
llama-index-llms-fireworks = "^0.1.1"

[tool.poetry.group.dev.dependencies]
ipython = "8.10.0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-mistralai"
readme = "README.md"
version = "0.1.18"
version = "0.1.19"

[tool.poetry.dependencies]
python = ">=3.9,<4.0"
llama-index-core = "^0.10.39"
llama-index-core = "^0.10.57"
mistralai = ">=0.4.2"

[tool.poetry.group.dev.dependencies]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,11 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-openai"
readme = "README.md"
version = "0.1.26"
version = "0.1.27"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
llama-index-core = "^0.10.24"
llama-index-core = "^0.10.57"

[tool.poetry.group.dev.dependencies]
ipython = "8.10.0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-text-generation-inference"
readme = "README.md"
version = "0.1.2"
version = "0.1.3"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
llama-index-core = "^0.10.41"
llama-index-core = "^0.10.57"
text-generation = "^0.7.0"
llama-index-utils-huggingface = "^0.1.1"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-llms-vertex"
readme = "README.md"
version = "0.2.1"
version = "0.2.2"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
llama-index-core = "^0.10.1"
llama-index-core = "^0.10.57"
google-cloud-aiplatform = "^1.39.0"
pyarrow = "^15.0.2"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,12 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-program-openai"
readme = "README.md"
version = "0.1.6"
version = "0.1.7"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
llama-index-llms-openai = ">=0.1.1"
llama-index-core = "^0.10.1"
llama-index-core = "^0.10.57"
llama-index-agent-openai = ">=0.1.1,<0.3.0"

[tool.poetry.group.dev.dependencies]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,11 @@ license = "MIT"
maintainers = ["Disiok"]
name = "llama-index-packs-resume-screener"
readme = "README.md"
version = "0.1.5"
version = "0.1.6"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
llama-index-core = "^0.10.24"
llama-index-core = "^0.10.57"
pypdf = "^4.0.1"
llama-index-readers-file = "^0.1.1"
llama-index-llms-openai = "^0.1.13"
Expand Down

This file was deleted.

Empty file.

This file was deleted.

Loading

0 comments on commit f39df8f

Please sign in to comment.