Skip to content

Commit

Permalink
add explanations
Browse files Browse the repository at this point in the history
Signed-off-by: Yotam-Perlitz <y.perlitz@ibm.com>
  • Loading branch information
perlitz committed Sep 24, 2024
1 parent e1a9637 commit 99e454d
Showing 1 changed file with 24 additions and 7 deletions.
31 changes: 24 additions & 7 deletions examples/run_generic_inference_engine.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,21 @@
from unitxt import get_logger, produce
from unitxt.inference import GenericInferenceEngine
from unitxt import get_logger, produce # Import necessary functions from unitxt
from unitxt.inference import GenericInferenceEngine # Import the inference engine class

if __name__ == "__main__":
# Create an instance of the GenericInferenceEngine with a default engine.
# This means if no engine is specified during inference, it will default to this one.
generic_engine_with_default = GenericInferenceEngine(
default="engines.ibm_gen_ai.llama_3_70b_instruct"
)

# Define the recipe for data processing and model selection.
# - card: Specifies the underlying data (from cards.almost_evil).
# - template: Selects the specific template within the card (from templates.qa.open.simple).
# - demos_pool_size and num_demos: Control the number of demonstration examples used (set to 0 here).
recipe = "card=cards.almost_evil,template=templates.qa.open.simple,demos_pool_size=0,num_demos=0"

# Create a list of instances (data points) for inference.
# Each instance has a "question" and its corresponding "answers".
instances = [
{
"question": "How many days there are in a week, answer only with numerals",
Expand All @@ -16,19 +26,26 @@
"answers": ["2"],
},
]

# Process the instances using the defined recipe.
# This likely formats the data according to the chosen card and template.
dataset = produce(instances, recipe)

# now, trying without a default, make sure you have something like
# export UNITXT_INFERENCE_ENGINE="engines.ibm_gen_ai.llama_3_70b_instruct"
# in your ~/.bashrc
# Perform inference on the processed dataset using the engine with the default model.
predictions = generic_engine_with_default.infer(dataset)
get_logger().info(predictions)
get_logger().info(predictions) # Log the predictions

# The following code block demonstrates how to use the GenericInferenceEngine without specifying a
# default engine. It expects the engine to be defined in the UNITXT_INFERENCE_ENGINE environment variable.
try:
# Attempt to create an instance without a default engine.
generic_engine_without_default = GenericInferenceEngine()

# Perform inference (will use the engine specified in the environment variable).
predictions = generic_engine_without_default.infer(dataset)
get_logger().info(predictions)
get_logger().info(predictions) # Log the predictions
except:
# Handle the case where the environment variable is not set.
get_logger().error(
"GenericInferenceEngine could not be initialized without a default since "
"UNITXT_INFERENCE_ENGINE environmental variable is not set."
Expand Down

0 comments on commit 99e454d

Please sign in to comment.