elif os.path.isfile(os.path.join(SLAVE_LOCAL_PATH, "index.faiss")): shutil.copytree(SLAVE_LOCAL_PATH, MASTER_LOCAL_PATH) vector_store = do_load_exist_vector_store(MASTER_LOCAL_PATH, embed_model) else: vector_store = new_vector_store(embed_model=embed_model) vector_store.save_local(MAST...
- from_texts方法:根据文本数据和嵌入式向量模型计算文本向量,初始化FAISS数据库,并返回FAISS封装对象。 - save_local方法:将FAISS索引、文档存储和索引到文档存储id保存到指定的文件夹路径中。 - load_local方法:从指定的文件夹路径中加载FAISS索引、文档存储和索引到文档存储id。然后返回FAISS封装对象。©...
def save_local(self, folder_path: str, index_name: str = "index") -> None: """Save FAISS index, docstore, and index_to_docstore_id to disk. Args: folder_path: folder path to save index, docstore, and index_to_docstore_id to. index_name: for saving with a specific index file n...
db = FAISS.from_documents(docs, embeddings_model) db.save_local("./opdf_index") return db # attempts to load vectorstore from disk def load_db() -> 'FAISS': embeddings_model = HuggingFaceEmbeddings() try: db = FAISS.load_local("./opdf_index", embeddings_model) except Exception as ...
db.save_local("./opdf_index") return db # attempts to load vectorstore from disk def load_db() -> 'FAISS': embeddings_model = HuggingFaceEmbeddings() try: db = FAISS.load_local("./opdf_index", embeddings_model) except Exception as e: ...
defvectorize(embeddings_model)->'FAISS':docs=load_doc()db=FAISS.from_documents(docs,embeddings_model)db.save_local("./opdf_index")returndb # attempts to load vectorstore from disk defload_db()->'FAISS':embeddings_model=HuggingFaceEmbeddings()try:db=FAISS.load_local("./opdf_index",embeddin...
vector_store.save_local(self.vs_path) refresh_vs_cache(self.kb_name) ifnotkwargs.get("not_refresh_vs_cache"): vector_store.save_local(self.vs_path) refresh_vs_cache(self.kb_name) returnTrue Expand Down 6 changes: 4 additions & 2 deletions6webui_pages/knowledge_base/knowledge_base.py...
def save_local(self, folder_path: str, index_name: str = "index") -> None: """Save FAISS index, docstore, and index_to_docstore_id to disk. Args: folder_path: folder path to save index, docstore, and index_to_docstore_id to. index_name: for saving with a specific index file ...
db_num+=1db.save_local(index)returndb 方法中真对大数据量,增加了切片方式进行插入,同时利用merge方式,创建两个db,最终合并成完整的向量数据,下面验证一下吧 all_list=db.docstore._dictprint(len(all_list)) 可以看到最后的数量是都存进去了
db.save_local("faiss_index") new_db=FAISS.load_local("faiss_index",embeddings) docs=new_db.similarity_search(query) docs[0] Document(page_content='Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while you’re at it, pass...