From 45f0427652551334e4da4d66d4666331948d39c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Louf?= Date: Tue, 16 Apr 2024 10:19:15 +0200 Subject: [PATCH 1/2] Make `torch` import optional --- docs/installation.md | 4 ++-- docs/reference/models/exllamav2.md | 4 ++++ docs/reference/models/mamba.md | 4 ++++ outlines/models/exllamav2.py | 10 +++++----- outlines/models/mamba.py | 10 +++++----- 5 files changed, 20 insertions(+), 12 deletions(-) diff --git a/docs/installation.md b/docs/installation.md index 12f113d36..1017b627e 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -16,8 +16,8 @@ Outlines supports OpenAI, transformers, Mamba, llama.cpp and exllama2 but **you pip install openai pip install transformers datasets accelerate torch pip install llama-cpp-python -pip install exllamav2 torch -pip install mamba_ssm torch +pip install exllamav2 transformers torch +pip install mamba_ssm transformers torch pip install vllm ``` diff --git a/docs/reference/models/exllamav2.md b/docs/reference/models/exllamav2.md index e4f9ae9d0..afe542112 100644 --- a/docs/reference/models/exllamav2.md +++ b/docs/reference/models/exllamav2.md @@ -1,3 +1,7 @@ # ExllamaV2 +```bash +pip install exllamav2 transformers torch +``` + *Coming soon* diff --git a/docs/reference/models/mamba.md b/docs/reference/models/mamba.md index 7a720516a..ac6db3682 100644 --- a/docs/reference/models/mamba.md +++ b/docs/reference/models/mamba.md @@ -1,3 +1,7 @@ # Mamba +```bash +pip install mamba_ssm transformers torch +``` + *Coming soon* diff --git a/outlines/models/exllamav2.py b/outlines/models/exllamav2.py index a3e97b6ff..0ec6ef033 100644 --- a/outlines/models/exllamav2.py +++ b/outlines/models/exllamav2.py @@ -1,11 +1,10 @@ import os from typing import TYPE_CHECKING, Optional -import torch - if TYPE_CHECKING: from exllamav2 import ExLlamaV2, ExLlamaV2Cache, ExLlamaV2Lora from transformers import PreTrainedTokenizer + import torch from .transformers import TransformerTokenizer @@ -28,8 +27,9 @@ def __init__( self.past_seq = None self.lora = lora - def forward(self, input_ids: torch.LongTensor, *_): + def forward(self, input_ids: "torch.LongTensor", *_): """Compute a forward pass through the exl2 model.""" + import torch # Caching with past_seq reset = True @@ -74,7 +74,7 @@ def forward(self, input_ids: torch.LongTensor, *_): seq_tensor[-1:].view(1, -1), self.cache, loras=[self.lora] ) - def __call__(self, input_ids: torch.LongTensor, *_) -> torch.FloatTensor: + def __call__(self, input_ids: "torch.LongTensor", *_) -> "torch.FloatTensor": logits = self.forward(input_ids) next_token_logits = logits[..., -1, :] @@ -169,7 +169,7 @@ def exl2( from transformers import AutoTokenizer except ImportError: raise ImportError( - "The `exllamav2` library needs to be installed in order to use `exllamav2` models." + "The `exllamav2`, `transformers` and `torch` libraries needs to be installed in order to use `exllamav2` models." ) # Load tokenizer diff --git a/outlines/models/mamba.py b/outlines/models/mamba.py index 1375a3811..d3dabf669 100644 --- a/outlines/models/mamba.py +++ b/outlines/models/mamba.py @@ -1,10 +1,9 @@ from typing import TYPE_CHECKING, Optional -import torch - from .transformers import TransformerTokenizer if TYPE_CHECKING: + import torch from mamba_ssm.models.mixer_seq_simple import MambaLMHeadModel from transformers import PreTrainedTokenizer @@ -22,14 +21,14 @@ def __init__( self.model = model self.tokenizer = TransformerTokenizer(tokenizer) - def forward(self, input_ids: torch.LongTensor, *_): + def forward(self, input_ids: "torch.LongTensor", *_): """Compute a forward pass through the mamba model.""" output = self.model(input_ids) next_token_logits = output.logits[..., -1, :] return next_token_logits, None - def __call__(self, input_ids: torch.LongTensor, *_) -> torch.FloatTensor: + def __call__(self, input_ids: "torch.LongTensor", *_) -> "torch.FloatTensor": return self.forward(input_ids) @@ -40,11 +39,12 @@ def mamba( tokenizer_kwargs: dict = {}, ): try: + import torch from mamba_ssm import MambaLMHeadModel from transformers import AutoTokenizer except ImportError: raise ImportError( - "The `mamba_ssm` library needs to be installed in order to use Mamba people." + "The `mamba_ssm`, `torch` and `transformer` libraries needs to be installed in order to use Mamba people." ) if not torch.cuda.is_available(): From e7f673e0f68d87a059df827a2262507148134021 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Louf?= Date: Tue, 16 Apr 2024 10:21:05 +0200 Subject: [PATCH 2/2] Add Outlines twitter account --- README.md | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 49428199e..f6962cdde 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,8 @@ Outlines Logo -[![Twitter][twitter-badge]][twitter] +[![.txt Twitter][dottxt-twitter-badge]][dottxt-twitter] +[![Outlines Twitter][outlines-twitter-badge]][outlines-twitter] [![Contributors][contributors-badge]][contributors] [![Downloads][downloads-badge]][pypistats] @@ -355,9 +356,11 @@ answer = outlines.generate.text(model)(prompt, max_tokens=100) [contributors]: https://github.com/outlines-dev/outlines/graphs/contributors [contributors-badge]: https://img.shields.io/github/contributors/outlines-dev/outlines?style=flat-square&logo=github&logoColor=white&color=ECEFF4 -[twitter]: https://twitter.com/dottxtai +[dottxt-twitter]: https://twitter.com/dottxtai +[outlines-twitter]: https://twitter.com/OutlinesOSS [discord]: https://discord.gg/R9DSu34mGd [discord-badge]: https://img.shields.io/discord/1182316225284554793?color=81A1C1&logo=discord&logoColor=white&style=flat-square [downloads-badge]: https://img.shields.io/pypi/dm/outlines?color=89AC6B&logo=python&logoColor=white&style=flat-square [pypistats]: https://pypistats.org/packages/outlines -[twitter-badge]: https://img.shields.io/twitter/follow/dottxtai?style=social +[dottxt-twitter-badge]: https://img.shields.io/twitter/follow/dottxtai?style=social +[outlines-twitter-badge]: https://img.shields.io/twitter/follow/OutlinesOSS?style=social