fromlangchain_community.llms importOllamafromlangchain_core.prompts importChatPromptTemplatefromlangchain.chains.combine_documents importcreate_stuff_documents_chainllm =Ollama(model="llama2")fromlangchain_core.prompts importMessagesPlaceholderprompt =ChatPromptTemplate.from_messages([("system","""Answer the user's questions based on the below context:<context>{context}</context>"""),MessagesPlaceholder(variable_name="chat_history"),("user","{input}"),])## Add MessagesPlaceholderdocument_chain =create_stuff_documents_chain(llm,prompt)fromlangchain_core.documents importDocumentdocs =[Document(page_content="langsmith can let you visualize test results")]fromlangchain_text_splitters importRecursiveCharacterTextSplittertext_splitter =RecursiveCharacterTextSplitter()documents =text_splitter.split_documents(docs)fromlangchain_community.embeddings importOllamaEmbeddingsembeddings =OllamaEmbeddings()fromlangchain_community.vectorstores importFAISSvector =FAISS.from_documents(documents,embeddings)fromlangchain_community.embeddings importOllamaEmbeddingsembeddings =OllamaEmbeddings()fromlangchain_community.vectorstores importFAISSvector =FAISS.from_documents(documents,embeddings)fromlangchain.chains importcreate_retrieval_chain##-- Start changing --##retriever =vector.as_retriever()fromlangchain.chains importcreate_history_aware_retrieverretriever_chain =create_history_aware_retriever(llm,retriever,prompt)fromlangchain.chains importcreate_retrieval_chainretrieval_chain =create_retrieval_chain(retriever_chain,document_chain)fromlangchain_core.messages importHumanMessage,AIMessagechat_history =[HumanMessage(content="Can LangSmith help test my LLM applications?"),AIMessage(content="Yes!")]response =retrieval_chain.invoke({"chat_history":chat_history,"input":"Tell me how","context":""# I don't know why the 'context' variable is needed here, but it is required by the 'prompt' variable.})print(response)
fromlangchain_community.llms importOllamafromlangchain_core.prompts importChatPromptTemplatefromlangchain.chains.combine_documents importcreate_stuff_documents_chainllm =Ollama(model="llama2")prompt =ChatPromptTemplate.from_template("""Answer the following question based only on the provided context:<context>{context}</context>Question: {input}""")document_chain =create_stuff_documents_chain(llm,prompt)fromlangchain_core.documents importDocumentdocs =[Document(page_content="langsmith can let you visualize test results")]fromlangchain_text_splitters importRecursiveCharacterTextSplittertext_splitter =RecursiveCharacterTextSplitter()documents =text_splitter.split_documents(docs)fromlangchain_community.embeddings importOllamaEmbeddingsembeddings =OllamaEmbeddings()fromlangchain_community.vectorstores importFAISSvector =FAISS.from_documents(documents,embeddings)fromlangchain_community.embeddings importOllamaEmbeddingsembeddings =OllamaEmbeddings()fromlangchain_community.vectorstores importFAISSvector =FAISS.from_documents(documents,embeddings)fromlangchain.chains importcreate_retrieval_chainretriever =vector.as_retriever()retrieval_chain =create_retrieval_chain(retriever,document_chain)response =retrieval_chain.invoke({"input":"how can langsmith help with testing?"})print(response["answer"])
fromlangchain_community.llms importOllamafromlangchain_core.prompts importChatPromptTemplatefromlangchain_core.output_parsers importStrOutputParseroutput_parser =StrOutputParser()llm =Ollama(model="llama2")prompt =ChatPromptTemplate.from_messages([("system","You are world class technical documentation writer."),("user","{input}")])chain =prompt |llm |output_parserprint(chain.invoke({"input":"how can langsmith help with testing?"}))
增加context:自定义文档内容
fromlangchain_community.llms importOllamafromlangchain_core.prompts importChatPromptTemplatefromlangchain.chains.combine_documents importcreate_stuff_documents_chainllm =Ollama(model="llama2")prompt =ChatPromptTemplate.from_template("""Answer the following question based only on the provided context:<context>{context}</context>Question: {input}""")document_chain =create_stuff_documents_chain(llm,prompt)fromlangchain_core.documents importDocumentdocs =[Document(page_content="langsmith can let you visualize test results")]document_chain.invoke({"input":"how can langsmith help with testing?","context":docs})
增加context:从网页中获取文档内容
下面这个代码会读网页的内容到docs里,可以替代上一节的docs = Document(page_content="langsmith can let you visualize test results")部分。互联网等,我也没看懂。