from superduper import model
from superduper.components.graph import Graph, input_node
prompt_template = (
"Use the following context snippets, these snippets are not ordered!, Answer the question based on this context.\n"
"{context}\n\n"
"Here's the question: {query}"
)
@model
def build_prompt(query, docs):
chunks = [doc["text"] for doc in docs]
context = "\n\n".join(chunks)
prompt = prompt_template.format(context=context, query=query)
return prompt
in_ = input_node('query')
vector_search_results = vector_search_model(query=in_)
prompt = build_prompt(query=in_, docs=vector_search_results)
answer = llm(prompt)
context_llm = answer.to_graph("context_llm")
context_llm.predict(query)