[docs]classMockEmbeddingRandom(MockEmbedding):"""Mock embedding with random vectors."""def_get_vector(self)->List[float]:return[random()for_inrange(self.embed_dim)]
embedding_models={# llama index"openai":LazyInit(OpenAIEmbedding),# default model is OpenAIEmbeddingModelType.TEXT_EMBED_ADA_002"openai_embed_3_large":LazyInit(OpenAIEmbedding,model_name=OpenAIEmbeddingModelType.TEXT_EMBED_3_LARGE),"openai_embed_3_small":LazyInit(OpenAIEmbedding,model_name=OpenAIEmbeddingModelType.TEXT_EMBED_3_SMALL),# you can use your own model in this way."huggingface_baai_bge_small":LazyInit(HuggingFaceEmbedding,model_name="BAAI/bge-small-en-v1.5"),"huggingface_cointegrated_rubert_tiny2":LazyInit(HuggingFaceEmbedding,model_name="cointegrated/rubert-tiny2"),"huggingface_all_mpnet_base_v2":LazyInit(HuggingFaceEmbedding,model_name="sentence-transformers/all-mpnet-base-v2",max_length=512,),"huggingface_bge_m3":LazyInit(HuggingFaceEmbedding,model_name="BAAI/bge-m3"),"mock":LazyInit(MockEmbeddingRandom,embed_dim=768),# langchain"openai_langchain":LazyInit(OpenAIEmbeddings),}generator_models={"openai":OpenAI,"huggingfacellm":HuggingFaceLLM,"openailike":OpenAILike,"ollama":Ollama,"mock":MockLLM,}rich_format="[%(filename)s:%(lineno)s] >> %(message)s"logging.basicConfig(level="INFO",format=rich_format,handlers=[RichHandler(rich_tracebacks=True)])logger=logging.getLogger("AutoRAG")