from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.embeddings.cohere import CohereEmbeddings
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores import Milvus
from langchain.document_loaders import PyPDFLoader, TextLoader
from langchain.chains import RetrievalQAWithSourcesChain
import os
from langchain.llms import OpenAI

OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")

chain = None


def do_work():
    global chain
    loader = PyPDFLoader("./王天龙-算法工程师-研究生.pdf")
    docs = loader.load_and_split()
    embeddings = OpenAIEmbeddings()

    vector_db = Milvus.from_documents(
        docs,
        embeddings,
        connection_args={
            "host": "ljxwtl.cn", "port": "19530"
        }
    )

    chain = RetrievalQAWithSourcesChain.from_chain_type(
        OpenAI(temperature=0, openai_api_key=OPENAI_API_KEY),
        chain_type="map_reduce",
        retriever=vector_db.as_retriever()
    )


def __test_do_work():
    do_work()


def query(question):
    global chain

    response = chain(
        inputs={"question": question},
        return_only_outputs=True
    )
    print(response)


def __test_query():
    query("王天龙的简介")


if __name__ == '__main__':
    __test_do_work()
    __test_query()

LangChain结合milvus向量数据库以及GPT3.5结合做知识库问答之二 --->代码实现-LMLPHP

from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.embeddings.cohere import CohereEmbeddings
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores import Milvus
from langchain.document_loaders import PyPDFLoader, TextLoader
from langchain.chains import RetrievalQAWithSourcesChain
import os
from langchain.llms import OpenAI

OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")

chain = None


def do_work():
    global chain
    loader = PyPDFLoader("./王天龙-算法工程师-研究生.pdf")
    docs = loader.load_and_split()
    embeddings = OpenAIEmbeddings()

    vector_db = Milvus.from_documents(
        docs,
        embeddings,
        connection_args={
            "host": "ljxwtl.cn", "port": "19530"
        }
    )

    chain = RetrievalQAWithSourcesChain.from_chain_type(
        OpenAI(temperature=0, openai_api_key=OPENAI_API_KEY),
        chain_type="map_reduce",
        retriever=vector_db.as_retriever()
    )


def __test_do_work():
    do_work()


def query(question):
    global chain

    response = chain(
        inputs={"question": question},
        return_only_outputs=True
    )
    print(response)


def __test_query():
    query("王天龙的工作经验")


if __name__ == '__main__':
    __test_do_work()
    __test_query()

LangChain结合milvus向量数据库以及GPT3.5结合做知识库问答之二 --->代码实现-LMLPHP

10-15 19:31