[docs]asyncdefllama_index_generate_base(row:Dict,llm:BaseLLM,messages:List[ChatMessage],)->Dict:context=list(itertools.chain.from_iterable(row["retrieval_gt_contents"]))context_str="\n".join([f"{i+1}. {c}"fori,cinenumerate(context)])user_prompt=f"Text:\n{context_str}\n\nGenerated Question from the Text:\n"user_message=ChatMessage(role=MessageRole.USER,content=user_prompt)new_messages=[*messages,user_message]chat_response:ChatResponse=awaitllm.achat(messages=new_messages)row["query"]=chat_response.message.contentreturnrow
[docs]asyncdeftwo_hop_incremental(row:Dict,llm:BaseLLM,lang:str="en",)->Dict:messages=QUERY_GEN_PROMPT["two_hop_incremental"][lang]passages=row["retrieval_gt_contents"]assert(len(passages)>=2),"You have to sample more than two passages for making two-hop questions."context_str=f"Document 1: {passages[0][0]}\nDocument 2: {passages[1][0]}"user_prompt=f"{context_str}\n\nGenerated two-hop Question from two Documents:\n"messages.append(ChatMessage(role=MessageRole.USER,content=user_prompt))chat_response:ChatResponse=awaitllm.achat(messages=messages)response=chat_response.message.contentrow["query"]=response.split(":")[-1].strip()returnrow