from vllm import LLM, SamplingParams
llm = LLM(model="lmsys/vicuna-7b-v1.5")
prompts = [
"Hello, my name is",
"The president of the United States is",
"The capital of France is",
"The future of AI is",
"John F. Kennedy International Airport is",
"In a galaxy far, far away, the Jedi fought against",
"When life gives you lemons, make",
"Once upon a time in a land filled with magic,",
"Behind the mountains, there lies a hidden",
"Exploring the depths of the ocean, scientists discovered",
"Walking through the enchanted forest, I stumbled upon",
"Amidst the bustling city streets, a lone street performer played",
"As the sun set over the horizon, the sky turned into shades of",
"Lost in a world of books, I found myself",
"With a backpack full of dreams, I embarked on a journey to",
"The sound of laughter echoed through the park as children",
"Underneath the starry night sky, two lovers sat on a bench and",
"Beneath the mask, a superhero grappled with",
"In the laboratory, a team of scientists worked tirelessly to unlock",
"As the first snowflake fell, a sense of wonder filled",
"In the heart of the rainforest, a rare species of bird with vibrant feathers",
]
sampling_params = SamplingParams(temperature=0)
outputs = llm.generate(user_input, sampling_params)
# Print the outputs.
for output in outputs:
prompt = output.prompt
generated_text = output.outputs[0].text
print(f"Prompt: {prompt!r} {generated_text!r}")