Skip to content

Commit

Permalink
Merge pull request #847 from guardrails-ai/dtam/051_6x_deprecations
Browse files Browse the repository at this point in the history
prompt, instructions, msg_history deprecations, messages RAIL support
  • Loading branch information
zsimjee authored Aug 2, 2024
2 parents 4872f8f + 8b4df1f commit 17dcaf8
Show file tree
Hide file tree
Showing 11 changed files with 328 additions and 32 deletions.
26 changes: 26 additions & 0 deletions guardrails/actions/reask.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from guardrails.classes.validation.validation_result import FailResult
from guardrails.prompt.instructions import Instructions
from guardrails.prompt.prompt import Prompt
from guardrails.prompt.messages import Messages
from guardrails.schema.generator import generate_example
from guardrails.schema.rail_schema import json_schema_to_rail_output
from guardrails.types.validator import ValidatorMap
Expand Down Expand Up @@ -294,6 +295,19 @@ def get_reask_setup_for_string(
xml_output_schema=xml_output_schema,
**prompt_params,
)
messages = None
if exec_options.reask_messages:
messages = Messages(exec_options.reask_messages)
if messages is None:
messages = Messages(
[{"role": "system", "content": "You are a helpful assistant."}]
)

messages = messages.format(
output_schema=schema_prompt_content,
xml_output_schema=xml_output_schema,
**prompt_params,
)

return output_schema, prompt, instructions

Expand Down Expand Up @@ -459,6 +473,18 @@ def reask_decoder(obj: ReAsk):
instructions = Instructions(instructions_const)
instructions = instructions.format(**prompt_params)

# TODO: enable this in 0.6.0
# messages = None
# if exec_options.reask_messages:
# messages = Messages(exec_options.reask_messages)
# else:
# messages = Messages(
# [
# {"role": "system", "content": instructions},
# {"role": "user", "content": prompt},
# ]
# )

return reask_schema, prompt, instructions


Expand Down
22 changes: 14 additions & 8 deletions guardrails/async_guard.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,11 +92,12 @@ def from_pydantic(
cls,
output_class: ModelOrListOfModels,
*,
prompt: Optional[str] = None, # deprecate this too
instructions: Optional[str] = None, # deprecate this too
prompt: Optional[str] = None,
instructions: Optional[str] = None,
num_reasks: Optional[int] = None,
reask_prompt: Optional[str] = None, # deprecate this too
reask_instructions: Optional[str] = None, # deprecate this too
reask_prompt: Optional[str] = None,
reask_instructions: Optional[str] = None,
reask_messages: Optional[List[Dict]] = None,
tracer: Optional[Tracer] = None,
name: Optional[str] = None,
description: Optional[str] = None,
Expand All @@ -108,6 +109,7 @@ def from_pydantic(
num_reasks=num_reasks,
reask_prompt=reask_prompt,
reask_instructions=reask_instructions,
reask_messages=reask_messages,
tracer=tracer,
name=name,
description=description,
Expand All @@ -123,10 +125,10 @@ def from_string(
validators: Sequence[Validator],
*,
string_description: Optional[str] = None,
prompt: Optional[str] = None, # deprecate this too
instructions: Optional[str] = None, # deprecate this too
reask_prompt: Optional[str] = None, # deprecate this too
reask_instructions: Optional[str] = None, # deprecate this too
prompt: Optional[str] = None,
instructions: Optional[str] = None,
reask_prompt: Optional[str] = None,
reask_instructions: Optional[str] = None,
num_reasks: Optional[int] = None,
tracer: Optional[Tracer] = None,
name: Optional[str] = None,
Expand Down Expand Up @@ -251,6 +253,10 @@ async def __exec(
"custom_reask_instructions",
self._exec_opts.reask_instructions is not None,
),
(
"custom_reask_messages",
self._exec_opts.reask_messages is not None,
),
],
is_parent=True, # It will have children
has_parent=False, # Has no parents
Expand Down
2 changes: 2 additions & 0 deletions guardrails/classes/execution/guard_execution_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ class GuardExecutionOptions:
prompt: Optional[str] = None
instructions: Optional[str] = None
msg_history: Optional[List[Dict]] = None
messages: Optional[List[Dict]] = None
reask_prompt: Optional[str] = None
reask_instructions: Optional[str] = None
reask_messages: Optional[List[Dict]] = None
num_reasks: Optional[int] = None
20 changes: 20 additions & 0 deletions guardrails/classes/history/call.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from guardrails.constants import error_status, fail_status, not_run_status, pass_status
from guardrails.prompt.instructions import Instructions
from guardrails.prompt.prompt import Prompt
from guardrails.prompt.messages import Messages
from guardrails.classes.validation.validator_logs import ValidatorLogs
from guardrails.actions.reask import (
ReAsk,
Expand Down Expand Up @@ -132,6 +133,25 @@ def compiled_instructions(self) -> Optional[str]:
if instructions is not None:
return instructions.format(**prompt_params).source

@property
def reask_messages(self) -> Stack[Messages]:
"""The compiled messages used during reasks.
Does not include the initial messages.
"""
if self.iterations.length > 0:
reasks = self.iterations.copy()
initial_messages = reasks.first
reasks.remove(initial_messages) # type: ignore
return Stack(
*[
r.inputs.messages if r.inputs.messages is not None else None
for r in reasks
]
)

return Stack()

@property
def reask_instructions(self) -> Stack[str]:
"""The compiled instructions used during reasks.
Expand Down
5 changes: 5 additions & 0 deletions guardrails/classes/history/inputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from guardrails.llm_providers import PromptCallableBase
from guardrails.prompt.instructions import Instructions
from guardrails.prompt.prompt import Prompt
from guardrails.prompt.messages import Messages


class Inputs(IInputs, ArbitraryModel):
Expand Down Expand Up @@ -52,6 +53,10 @@ class Inputs(IInputs, ArbitraryModel):
description="The message history provided by the user for chat model calls.",
default=None,
)
messages: Optional[List[Messages]] = Field(
description="The message history provided by the user for chat model calls.",
default=None,
)
prompt_params: Optional[Dict] = Field(
description="The parameters provided by the user"
"that will be formatted into the final LLM prompt.",
Expand Down
55 changes: 47 additions & 8 deletions guardrails/guard.py
Original file line number Diff line number Diff line change
Expand Up @@ -465,11 +465,13 @@ def from_pydantic(
cls,
output_class: ModelOrListOfModels,
*,
prompt: Optional[str] = None, # TODO: deprecate this in 0.5.1
instructions: Optional[str] = None, # TODO: deprecate this in 0.5.1
prompt: Optional[str] = None,
instructions: Optional[str] = None,
num_reasks: Optional[int] = None,
reask_prompt: Optional[str] = None, # TODO: deprecate this in 0.5.1
reask_instructions: Optional[str] = None, # TODO: deprecate this in 0.5.1
reask_prompt: Optional[str] = None,
reask_instructions: Optional[str] = None,
reask_messages: Optional[List[Dict]] = None,
messages: Optional[List[Dict]] = None,
tracer: Optional[Tracer] = None,
name: Optional[str] = None,
description: Optional[str] = None,
Expand All @@ -485,6 +487,7 @@ def from_pydantic(
instructions (str, optional): Instructions for chat models. Defaults to None.
reask_prompt (str, optional): An alternative prompt to use during reasks. Defaults to None.
reask_instructions (str, optional): Alternative instructions to use during reasks. Defaults to None.
reask_messages (List[Dict], optional): A list of messages to use during reasks. Defaults to None.
num_reasks (int, optional): The max times to re-ask the LLM if validation fails. Deprecated
tracer (Tracer, optional): An OpenTelemetry tracer to use for metrics and traces. Defaults to None.
name (str, optional): A unique name for this Guard. Defaults to `gr-` + the object id.
Expand All @@ -503,6 +506,19 @@ def from_pydantic(
DeprecationWarning,
)

if reask_instructions:
warnings.warn(
"reask_instructions is deprecated and will be removed in 0.6.x!"
"Please be prepared to set reask_messages instead.",
DeprecationWarning,
)
if reask_prompt:
warnings.warn(
"reask_prompt is deprecated and will be removed in 0.6.x!"
"Please be prepared to set reask_messages instead.",
DeprecationWarning,
)

# We have to set the tracer in the ContextStore before the Rail,
# and therefore the Validators, are initialized
cls._set_tracer(cls, tracer) # type: ignore
Expand All @@ -513,6 +529,8 @@ def from_pydantic(
instructions=instructions,
reask_prompt=reask_prompt,
reask_instructions=reask_instructions,
reask_messages=reask_messages,
messages=messages,
)
guard = cls(
name=name,
Expand Down Expand Up @@ -548,10 +566,12 @@ def from_string(
validators: Sequence[Validator],
*,
string_description: Optional[str] = None,
prompt: Optional[str] = None, # TODO: deprecate this in 0.5.1
instructions: Optional[str] = None, # TODO: deprecate this in 0.5.1
reask_prompt: Optional[str] = None, # TODO: deprecate this in 0.5.1
reask_instructions: Optional[str] = None, # TODO: deprecate this in 0.5.1
prompt: Optional[str] = None,
instructions: Optional[str] = None,
reask_prompt: Optional[str] = None,
reask_instructions: Optional[str] = None,
reask_messages: Optional[List[Dict]] = None,
messages: Optional[List[Dict]] = None,
num_reasks: Optional[int] = None,
tracer: Optional[Tracer] = None,
name: Optional[str] = None,
Expand All @@ -566,11 +586,24 @@ def from_string(
instructions (str, optional): Instructions for chat models. Defaults to None.
reask_prompt (str, optional): An alternative prompt to use during reasks. Defaults to None.
reask_instructions (str, optional): Alternative instructions to use during reasks. Defaults to None.
reask_messages (List[Dict], optional): A list of messages to use during reasks. Defaults to None.
num_reasks (int, optional): The max times to re-ask the LLM if validation fails. Deprecated
tracer (Tracer, optional): An OpenTelemetry tracer to use for metrics and traces. Defaults to None.
name (str, optional): A unique name for this Guard. Defaults to `gr-` + the object id.
description (str, optional): A description for this Guard. Defaults to None.
""" # noqa
if reask_instructions:
warnings.warn(
"reask_instructions is deprecated and will be removed in 0.6.x!"
"Please be prepared to set reask_messages instead.",
DeprecationWarning,
)
if reask_prompt:
warnings.warn(
"reask_prompt is deprecated and will be removed in 0.6.x!"
"Please be prepared to set reask_messages instead.",
DeprecationWarning,
)

if num_reasks:
warnings.warn(
Expand All @@ -594,6 +627,8 @@ def from_string(
instructions=instructions,
reask_prompt=reask_prompt,
reask_instructions=reask_instructions,
reask_messages=reask_messages,
messages=messages,
)
guard = cast(
Guard[str],
Expand Down Expand Up @@ -696,6 +731,10 @@ def __exec(
"custom_reask_instructions",
self._exec_opts.reask_instructions is not None,
),
(
"custom_reask_messages",
self._exec_opts.reask_messages is not None,
),
],
is_parent=True, # It will have children
has_parent=False, # Has no parents
Expand Down
39 changes: 38 additions & 1 deletion guardrails/llm_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
cast,
)

import warnings

from guardrails_api_client.models import LLMResource
from pydantic import BaseModel

Expand Down Expand Up @@ -141,6 +143,12 @@ def _invoke_llm(
*args,
**kwargs,
) -> LLMResponse:
warnings.warn(
"This callable is deprecated in favor of passing "
"no callable and the model argument which utilizes LiteLLM"
"for example guard(model='gpt-4.o', messages=[...], ...)",
DeprecationWarning,
)
if "api_key" in kwargs:
api_key = kwargs.pop("api_key")
else:
Expand Down Expand Up @@ -199,6 +207,12 @@ def _invoke_llm(
If `base_model` is passed, the chat engine will be used as a function
on the base model.
"""
warnings.warn(
"This callable is deprecated in favor of passing "
"no callable and the model argument which utilizes LiteLLM"
"for example guard(model='gpt-4.o', messages=[...], ...)",
DeprecationWarning,
)
if msg_history is None and text is None:
raise PromptCallableException(
"You must pass in either `text` or `msg_history` to `guard.__call__`."
Expand Down Expand Up @@ -310,6 +324,12 @@ def _invoke_llm(
)
```
""" # noqa
warnings.warn(
"This callable is deprecated in favor of passing "
"no callable and the model argument which utilizes LiteLLM"
"for example guard(model='command-r', messages=[...], ...)",
DeprecationWarning,
)

trace_input_messages = chat_prompt(prompt, kwargs.get("instructions"))
if "instructions" in kwargs:
Expand Down Expand Up @@ -394,6 +414,12 @@ def _invoke_llm(
...
```
"""
warnings.warn(
"This callable is deprecated in favor of passing "
"no callable and the model argument which utilizes LiteLLM"
"for example guard(model='claude-3-opus-20240229', messages=[...], ...)",
DeprecationWarning,
)
try:
import anthropic
except ImportError:
Expand Down Expand Up @@ -925,6 +951,12 @@ async def invoke_llm(
*args,
**kwargs,
):
warnings.warn(
"This callable is deprecated in favor of passing "
"no callable and the model argument which utilizes LiteLLM"
"for example guard(model='gpt-4.o', messages=[...], ...)",
DeprecationWarning,
)
if "api_key" in kwargs:
api_key = kwargs.pop("api_key")
else:
Expand Down Expand Up @@ -976,7 +1008,12 @@ async def invoke_llm(
If `base_model` is passed, the chat engine will be used as a function
on the base model.
"""

warnings.warn(
"This callable is deprecated in favor of passing "
"no callable and the model argument which utilizes LiteLLM"
"for example guard(model='gpt-4.o', messages=[...], ...)",
DeprecationWarning,
)
if msg_history is None and text is None:
raise PromptCallableException(
"You must pass in either `text` or `msg_history` to `guard.__call__`."
Expand Down
Loading

0 comments on commit 17dcaf8

Please sign in to comment.