[docs]classMockEmbeddingRandom(MockEmbedding):"""Mock embedding with random vectors."""def_get_vector(self)->List[float]:return[random()for_inrange(self.embed_dim)]
sys.excepthook=handle_exceptionembedding_models={# llama index"openai":LazyInit(OpenAIEmbedding),# default model is OpenAIEmbeddingModelType.TEXT_EMBED_ADA_002"openai_embed_3_large":LazyInit(OpenAIEmbedding,model_name=OpenAIEmbeddingModelType.TEXT_EMBED_3_LARGE),"openai_embed_3_small":LazyInit(OpenAIEmbedding,model_name=OpenAIEmbeddingModelType.TEXT_EMBED_3_SMALL),"mock":LazyInit(MockEmbeddingRandom,embed_dim=768),# langchain"openai_langchain":LazyInit(OpenAIEmbeddings),}try:# you can use your own model in this way.fromllama_index.embeddings.huggingfaceimportHuggingFaceEmbeddingembedding_models["huggingface_baai_bge_small"]=LazyInit(HuggingFaceEmbedding,model_name="BAAI/bge-small-en-v1.5")embedding_models["huggingface_cointegrated_rubert_tiny2"]=LazyInit(HuggingFaceEmbedding,model_name="cointegrated/rubert-tiny2")embedding_models["huggingface_all_mpnet_base_v2"]=LazyInit(HuggingFaceEmbedding,model_name="sentence-transformers/all-mpnet-base-v2",max_length=512,)embedding_models["huggingface_bge_m3"]=LazyInit(HuggingFaceEmbedding,model_name="BAAI/bge-m3")exceptImportError:logger.info("You are using API version of AutoRAG.""To use local version, run pip install 'AutoRAG[gpu]'")
generator_models={"openai":OpenAI,"openailike":OpenAILike,"mock":MockLLM,"bedrock":AutoRAGBedrock,}try:fromllama_index.llms.huggingfaceimportHuggingFaceLLMfromllama_index.llms.ollamaimportOllamagenerator_models["huggingfacellm"]=HuggingFaceLLMgenerator_models["ollama"]=OllamaexceptImportError:logger.info("You are using API version of AutoRAG.""To use local version, run pip install 'AutoRAG[gpu]'")try:importtransformerstransformers.logging.set_verbosity_error()exceptImportError:logger.info("You are using API version of AutoRAG.""To use local version, run pip install 'AutoRAG[gpu]'")