from fi.opt.optimizers import RandomSearchOptimizer
from fi.opt.generators import LiteLLMGenerator
from fi.opt.datamappers import BasicDataMapper
from fi.opt.base.evaluator import Evaluator
# a. Define the generator with the initial prompt to be optimized
initial_generator = LiteLLMGenerator(
model="gpt-4o-mini",
prompt_template="Summarize this: {article}"
)
# b. Setup the evaluator to score prompt performance
evaluator = Evaluator(
eval_template="summary_quality", # A built-in template for summarization
eval_model_name="turing_flash" # The model to perform the evaluation
)
# c. Setup the data mapper to link dataset fields
data_mapper = BasicDataMapper(
key_map={"input": "article", "output": "generated_output"}
)
# d. Initialize the Random Search optimizer
optimizer = RandomSearchOptimizer(
generator=initial_generator,
teacher_model="gpt-4o", # A powerful model to generate prompt ideas
num_variations=5 # Generate 5 different versions of our prompt
)
# e. Run the optimization!
result = optimizer.optimize(
evaluator=evaluator,
data_mapper=data_mapper,
dataset=dataset
)