{"id":1483,"date":"2024-05-16T03:35:47","date_gmt":"2024-05-16T03:35:47","guid":{"rendered":"https:\/\/www.nicekj.com\/?p=1483"},"modified":"2024-05-16T03:36:19","modified_gmt":"2024-05-16T03:36:19","slug":"goujianninde-ai-bokezhushouyi","status":"publish","type":"post","link":"https:\/\/www.nicekj.com\/goujianninde-ai-bokezhushouyi.html","title":{"rendered":"\u6784\u5efa\u60a8\u7684 AI \u64ad\u5ba2\u52a9\u624b[\u8bd1]"},"content":{"rendered":"<h3 data-id=\"heading-0\">\u628a\u542c\u64ad\u5ba2\u7684\u6570\u5c0f\u65f6\u8f6c\u6362\u6210\u5373\u65f6\u7684\u6df1\u523b\u89c1\u89e3<\/h3>\n<p>\u4f5c\u4e3a\u4e00\u4e2a\u5bf9\u5065\u5eb7\u6216\u5546\u4e1a\u4e3b\u9898\u64ad\u5ba2\u60c5\u6709\u72ec\u949f\u7684\u7231\u597d\u8005\uff0c\u6211\u5e38\u5e38\u611f\u5230\u6709\u592a\u591a\u7684\u8282\u76ee\u8981\u542c\uff0c\u800c\u65f6\u95f4\u5374\u8fdc\u8fdc\u4e0d\u591f\u3002\u50cf Huberman Lab \u8fd9\u6837\u7684\u64ad\u5ba2\uff0c\u4e00\u671f\u8282\u76ee\u901a\u5e38\u80fd\u957f\u8fbe\u4e09\u56db\u4e2a\u5c0f\u65f6\u3002\u95ee\u9898\u4e0d\u4ec5\u662f\u627e\u51fa\u65f6\u95f4\u6765\u542c\uff0c\u66f4\u591a\u65f6\u5019\uff0c\u6211\u5bf9\u8c08\u8bdd\u4e2d\u7684\u67d0\u4e9b\u7279\u5b9a\u90e8\u5206\u611f\u5174\u8da3\u3002\u6b63\u56e0\u5982\u6b64\uff0c\u6211\u5229\u7528 OpenAI \u6a21\u578b\u3001LangChain \u548c Streamlit \u5f00\u53d1\u4e86\u4e00\u4e2a\u7b80\u6d01\u7684\u5e94\u7528\u7a0b\u5e8f\u3002\u5b83\u80fd\u8ba9\u6211\u7c98\u8d34\u4e00\u4e2a YouTube \u64ad\u5ba2\u94fe\u63a5\uff08\u6bd4\u5982 Huberman Lab \u6216 The Diary of a CEO\uff09\uff0c\u8fc5\u901f\u4e3a\u6211\u63d0\u4f9b\u8282\u76ee\u6458\u8981\u3002\u4e4b\u540e\uff0c\u6211\u8fd8\u53ef\u4ee5\u9488\u5bf9\u6027\u5730\u63d0\u95ee\uff0c\u5f97\u5230\u7acb\u523b\u7684\u7b54\u6848\u3002\u8fd9\u6837\u4e00\u6765\uff0c\u6211\u5c31\u80fd\u76f4\u63a5\u83b7\u53d6\u6211\u6240\u9700\u7684\u4fe1\u606f\uff0c\u65e0\u9700\u89c2\u770b\u6574\u671f\u8282\u76ee\u3002<\/p>\n<p>\n<figure class=\"wp-block-image size-large\"><img decoding=\"async\" src=\"https:\/\/www.nicekj.com\/wp-content\/uploads\/replace\/ede5a3399577e65473f9ec162088fb59.png\" alt=\"image.png\" \/><\/figure>\n<\/p>\n<h3 data-id=\"heading-1\">\u8ba4\u8bc6 RAG<\/h3>\n<p>\u5728\u6df1\u5165\u7814\u7a76\u4ee3\u7801\u4e4b\u524d\uff0c\u8ba9\u6211\u4eec\u5148\u7b80\u5355\u4e86\u89e3\u4e00\u4e0b RAG\u3002\u5b83\u662f\u4e00\u79cd\u7ed3\u5408\u4e86\u50cf GPT-4 \u8fd9\u7c7b\u6a21\u578b\u7684\u6587\u672c\u751f\u6210\u80fd\u529b\u548c\u4fe1\u606f\u68c0\u7d22\u529f\u80fd\u7684\u65b9\u6cd5\uff0c\u80fd\u591f\u63d0\u4f9b\u7cbe\u786e\u3001\u4e0e\u4e0a\u4e0b\u6587\u76f8\u5173\u7684\u4fe1\u606f\u3002\u5927\u8bed\u8a00\u6a21\u578b\u6709\u65f6\u5019\u53ef\u80fd\u65e0\u6cd5\u63a5\u89e6\u5230\u6211\u4eec\u9700\u8981\u7684\u7279\u5b9a\u6570\u636e\u3002RAG \u5141\u8bb8\u6211\u4eec\u8f93\u5165\u76f8\u5173\u6570\u636e\uff08\u6bd4\u5982\u64ad\u5ba2\u7684\u6587\u5b57\u8bb0\u5f55\uff09\uff0c\u8fd9\u6837\u6a21\u578b\u5c31\u80fd\u591f\u7ed9\u51fa\u5207\u9898\u7684\u7b54\u6848\u3002<\/p>\n<p>\u5176\u8fc7\u7a0b\u5f88\u7b80\u5355\uff1a\u6211\u4eec\u8f7d\u5165\u6570\u636e\uff08\u6bd4\u5982\u64ad\u5ba2\u7684\u6587\u5b57\u8bb0\u5f55\uff09\uff0c\u5c06\u5176\u5206\u5272\u6210\u5c0f\u7247\u6bb5\uff0c\u8fdb\u884c\u5d4c\u5165\u5904\u7406\uff0c\u7136\u540e\u5b58\u50a8\u5728\u5411\u91cf\u6570\u636e\u5e93\u4e2d\u3002<\/p>\n<p>\n<figure class=\"wp-block-image size-large\"><img decoding=\"async\" src=\"https:\/\/p9-juejin.byteimg.com\/tos-cn-i-k3u1fbpfcp\/57f21301243148caa61ed915b368eda6~tplv-k3u1fbpfcp-jj-mark:3024:0:0:0:q75.awebp#?w=1400&amp;h=704&amp;s=255849&amp;e=png&amp;b=161f35\" alt=\"image.png\" \/><\/figure>\n<\/p>\n<p>\u5f53\u6211\u4eec\u63d0\u51fa\u67e5\u8be2\u65f6\uff0c\u6a21\u578b\u4f1a\u5c06\u6211\u4eec\u7684\u95ee\u9898\u4e0e\u8fd9\u4e9b\u6570\u636e\u7247\u6bb5\u8fdb\u884c\u5339\u914d\u3002\u6311\u9009\u51fa\u6700\u76f8\u5173\u7684\u6587\u672c\uff0c\u5c06\u5176\u8fde\u540c\u95ee\u9898\u4e00\u8d77\u8f93\u5165\u5927\u8bed\u8a00\u6a21\u578b\uff0c\u4ece\u800c\u751f\u6210\u51c6\u786e\u7684\u7b54\u6848\u3002\u7b80\u800c\u8a00\u4e4b\uff0cRAG \u901a\u8fc7\u68c0\u7d22\u76f8\u5173\u4fe1\u606f\u5e76\u5229\u7528\u8fd9\u4e9b\u4fe1\u606f\u6765\u56de\u7b54\u6211\u4eec\u7684\u95ee\u9898\u3002<\/p>\n<p>\n<figure class=\"wp-block-image size-large\"><img decoding=\"async\" src=\"https:\/\/www.nicekj.com\/wp-content\/uploads\/replace\/bf58c0b86ddd209aea89e9cbd89d1e5e.png\" alt=\"image.png\" \/><\/figure>\n\n\u603b\u7684\u6765\u8bf4\uff0c\u8fd9\u5c31\u662f RAG \u7684\u5de5\u4f5c\u539f\u7406\u3002\u5b83\u627e\u5230\u4e0e\u4f60\u7684\u67e5\u8be2\u76f8\u5173\u7684\u4fe1\u606f\u7247\u6bb5\uff0c\u7136\u540e\u7ed3\u5408\u8fd9\u4e9b\u4fe1\u606f\u751f\u6210\u7b54\u6848\u3002<\/p>\n<h3 data-id=\"heading-2\">\u5e94\u7528\u7a0b\u5e8f\u7684\u5de5\u4f5c\u539f\u7406<\/h3>\n<p>\u8fd9\u6b3e\u5e94\u7528\u7a0b\u5e8f\u975e\u5e38\u5bb9\u6613\u4e0a\u624b\uff1a\u53ea\u9700\u5c06 YouTube \u64ad\u5ba2\u7684\u94fe\u63a5\u7c98\u8d34\u5230 Streamlit \u754c\u9762\uff0c\u8f93\u5165\u4f60\u7684 OpenAI \u5bc6\u94a5\uff0c\u8f6c\u77ac\u95f4\uff0c\u4f60\u5c31\u80fd\u5f97\u5230\u4e00\u4e2a\u64ad\u5ba2\u6458\u8981\u3002\u63a5\u4e0b\u6765\uff0c\u4f60\u53ef\u4ee5\u63d0\u51fa\u5177\u4f53\u7684\u95ee\u9898\uff0c\u6bd4\u5982\u201c\u63a8\u8350\u4e86\u54ea\u4e9b\u4e66\uff1f\u201d\u6216\u8005\u201c\u63d0\u5230\u4e86\u54ea\u4e9b\u6700\u4f73\u7761\u7720\u5efa\u8bae\uff1f\u201d\u3002<\/p>\n<p>\u4ee3\u7801\u4e3b\u8981\u5206\u4e3a\u4e09\u4e2a\u90e8\u5206\uff1a\u4ece YouTube \u83b7\u53d6\u6570\u636e\u3001\u751f\u6210\u6458\u8981\u4e0e\u56de\u7b54\u4ee5\u53ca Streamlit \u524d\u7aef\u663e\u793a\u3002\u8fd8\u6709\u4e00\u4e2a\u53ef\u9009\u7684\u7b2c\u56db\u90e8\u5206\uff0c\u7528\u4e8e\u66f4\u9ad8\u7ea7\u7684 RAG \u5e94\u7528\uff0c\u4e0d\u8fc7\u57fa\u7840\u7248\u7684\u529f\u80fd\u5df2\u7ecf\u975e\u5e38\u51fa\u8272\u4e86\u3002\u8ba9\u6211\u4eec\u6765\u6df1\u5165\u4e86\u89e3\u4ee3\u7801\u5427\uff01<\/p>\n<h2 data-id=\"heading-3\">\u7b2c\u4e00\u90e8\u5206\uff1a\u4e0e YouTube \u7684\u4e92\u52a8<\/h2>\n<p>\u9996\u5148\uff0c\u6211\u4eec\u7684\u5e94\u7528\u4f1a\u8fde\u63a5\u5230 YouTube\uff0c\u63d0\u53d6\u89c6\u9891\u7684\u6807\u9898\u548c\u5b57\u5e55 \u2014\u2014 \u8fd9\u662f\u7406\u89e3\u64ad\u5ba2\u5185\u5bb9\u7684\u5173\u952e\u6240\u5728\u3002<\/p>\n<p><strong>\u529f\u80fd 1\uff1a\u83b7\u53d6\u89c6\u9891\u6807\u9898<\/strong><\/p>\n<p>\u8fd9\u4e2a\u529f\u80fd\u5229\u7528\u89c6\u9891\u7684 URL \u6765\u83b7\u53d6 YouTube \u89c6\u9891\u7684\u6807\u9898\u3002\u5b83\u53d1\u9001\u7f51\u7edc\u8bf7\u6c42\uff0c\u89e3\u6790 HTML \u4ee5\u627e\u5230\u6807\u9898\u6807\u7b7e\uff0c\u5e76\u8fd4\u56de\u6807\u9898\u3002\u8fd9\u662f\u6211\u4eec\u4e86\u89e3\u6bcf\u4e2a\u89c6\u9891\u5185\u5bb9\u7684\u8d77\u70b9\u3002<\/p>\n<pre><\/div><div class=\"code-block-extension-headerRight\"><span class=\"code-block-extension-lang\">python<\/span><div class=\"code-block-extension-copyCodeBtn\">\u590d\u5236\u4ee3\u7801<\/div><\/div><\/div><code class=\"hljs language-python code-block-extension-codeShowNum\" lang=\"python\"><span class=\"code-block-extension-codeLine\" data-line-num=\"1\"><span class=\"hljs-keyword\">import<\/span> requests<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"2\"><span class=\"hljs-keyword\">from<\/span> bs4 <span class=\"hljs-keyword\">import<\/span> BeautifulSoup<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"3\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"4\"><span class=\"hljs-keyword\">def<\/span> <span class=\"hljs-title function_\">get_youtube_video_title<\/span>(<span class=\"hljs-params\">video_url<\/span>):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"5\">    response = requests.get(video_url)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"6\">    soup = BeautifulSoup(response.content, <span class=\"hljs-string\">'html.parser'<\/span>)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"7\">    title = soup.find(<span class=\"hljs-string\">'meta'<\/span>, <span class=\"hljs-built_in\">property<\/span>=<span class=\"hljs-string\">'og:title'<\/span>)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"8\">    <span class=\"hljs-keyword\">return<\/span> title[<span class=\"hljs-string\">'content'<\/span>] <span class=\"hljs-keyword\">if<\/span> title <span class=\"hljs-keyword\">else<\/span> <span class=\"hljs-string\">\"Title not found\"<\/span><\/span>\n<\/code><\/pre>\n<p><strong>\u529f\u80fd 2\uff1a\u63d0\u53d6\u5b57\u5e55<\/strong><\/p>\n<p>\u7d27\u63a5\u7740\uff0c\u6211\u4eec\u4f1a\u83b7\u53d6\u89c6\u9891\u7684\u5b57\u5e55\u3002\u6211\u4eec\u4f7f\u7528\u83b7\u53d6\u6807\u9898\u7684\u529f\u80fd\uff0c\u5e76\u501f\u52a9 LangChain \u4e2d\u7684 YoutubeLoader \u6765\u52a0\u8f7d\u5b57\u5e55\u3002\u8fd9\u4e9b\u6587\u5b57\u8bb0\u5f55\u5c06\u88ab\u8f93\u5165\u6211\u4eec\u7684 RAG \u7cfb\u7edf\uff0c\u4ee5\u751f\u6210\u76f8\u5173\u7684\u56de\u7b54\u3002<\/p>\n<pre><\/div><div class=\"code-block-extension-headerRight\"><span class=\"code-block-extension-lang\">ini<\/span><div class=\"code-block-extension-copyCodeBtn\">\u590d\u5236\u4ee3\u7801<\/div><\/div><\/div><code class=\"hljs language-ini code-block-extension-codeShowNum\" lang=\"ini\"><span class=\"code-block-extension-codeLine\" data-line-num=\"1\">from langchain.document_loaders import YoutubeLoader<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"2\">from langchain.schema import Document<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"3\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"4\">def fetch_youtube_captions(video_url):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"5\">    <span class=\"hljs-attr\">title<\/span> = get_youtube_video_title(video_url)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"6\">    <span class=\"hljs-attr\">loader<\/span> = YoutubeLoader.from_youtube_url(video_url)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"7\">    <span class=\"hljs-attr\">docs<\/span> = loader.load()<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"8\">    if docs and len(docs) &gt; 0:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"9\">        <span class=\"hljs-attr\">intro_sentence<\/span> = <span class=\"hljs-string\">\"This is the title of the video\/transcription\/conversation: \"<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"10\">        <span class=\"hljs-attr\">title_content<\/span> = intro_sentence + title<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"11\">        docs<span class=\"hljs-section\">[0]<\/span> = Document(<span class=\"hljs-attr\">page_content<\/span>=title_content + <span class=\"hljs-string\">\"nn\"<\/span> + docs[<span class=\"hljs-number\">0<\/span>].page_content)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"12\">    return docs<\/span>\n<\/code><\/pre>\n<h2 data-id=\"heading-4\">\u7b2c\u4e8c\u90e8\u5206\uff1a\u6570\u636e\u5904\u7406\u4e0e AI \u6574\u5408<\/h2>\n<h3 data-id=\"heading-5\">\u6838\u5fc3\u5e94\u7528\u903b\u8f91\uff1a\u5c06\u5bf9\u8bdd\u8f6c\u6362\u4e3a\u6570\u636e<\/h3>\n<p>\u8fd9\u91cc\u662f\u201c\u53d8\u9b54\u672f\u201d\u7684\u5730\u65b9\u3002\u6211\u4eec\u5c06\u64ad\u5ba2\u5185\u5bb9\u5206\u89e3\u6210\u6613\u4e8e\u6d88\u5316\u7684\u5c0f\u5757\uff0c\u8f6c\u6362\u6210\u9002\u5408 AI \u5904\u7406\u7684\u683c\u5f0f\uff0c\u5e76\u8fdb\u884c\u5b58\u50a8\uff0c\u4ee5\u4fbf\u6211\u4eec\u968f\u65f6\u68c0\u7d22\u548c\u751f\u6210\u6458\u8981\u3002<\/p>\n<p><strong>\u73af\u5883\u642d\u5efa\u548c\u5168\u5c40\u53d8\u91cf\u8bbe\u7f6e<\/strong><\/p>\n<p>\u6211\u4eec\u9996\u5148\u642d\u5efa\u7f16\u7a0b\u73af\u5883\u5e76\u5b9a\u4e49\u4e00\u4e9b\u5168\u5c40\u53d8\u91cf\u3002\u5176\u4e2d\u5305\u62ec\u521d\u59cb\u5316\u4e00\u4e2a\u6570\u636e\u5e93\u548c\u5bf9\u8bdd\u5185\u5bb9\u7684\u5b58\u50a8\u7cfb\u7edf\u3002<\/p>\n<pre><\/div><div class=\"code-block-extension-headerRight\"><span class=\"code-block-extension-lang\">python<\/span><div class=\"code-block-extension-copyCodeBtn\">\u590d\u5236\u4ee3\u7801<\/div><\/div><\/div><code class=\"hljs language-python code-block-extension-codeShowNum\" lang=\"python\"><span class=\"code-block-extension-codeLine\" data-line-num=\"1\"><span class=\"hljs-keyword\">from<\/span> langchain.text_splitter <span class=\"hljs-keyword\">import<\/span> RecursiveCharacterTextSplitter<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"2\"><span class=\"hljs-keyword\">from<\/span> langchain.embeddings.openai <span class=\"hljs-keyword\">import<\/span> OpenAIEmbeddings<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"3\"><span class=\"hljs-keyword\">from<\/span> langchain.vectorstores <span class=\"hljs-keyword\">import<\/span> Chroma<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"4\"><span class=\"hljs-keyword\">from<\/span> sklearn.cluster <span class=\"hljs-keyword\">import<\/span> KMeans<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"5\"><span class=\"hljs-keyword\">from<\/span> langchain.chat_models <span class=\"hljs-keyword\">import<\/span> ChatOpenAI<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"6\"><span class=\"hljs-keyword\">from<\/span> langchain.chains.summarize <span class=\"hljs-keyword\">import<\/span> load_summarize_chain<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"7\"><span class=\"hljs-keyword\">from<\/span> langchain.schema <span class=\"hljs-keyword\">import<\/span> Document<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"8\"><span class=\"hljs-keyword\">import<\/span> numpy <span class=\"hljs-keyword\">as<\/span> np<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"9\"><span class=\"hljs-keyword\">from<\/span> langchain.chains <span class=\"hljs-keyword\">import<\/span> ConversationalRetrievalChain<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"10\"><span class=\"hljs-keyword\">import<\/span> logging<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"11\"><span class=\"hljs-keyword\">from<\/span> langchain.prompts <span class=\"hljs-keyword\">import<\/span> PromptTemplate<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"12\"><span class=\"hljs-keyword\">from<\/span> langchain.memory <span class=\"hljs-keyword\">import<\/span> ConversationBufferMemory<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"13\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"14\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"15\">logging.basicConfig(level=logging.INFO) <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"16\"><span class=\"hljs-comment\"># Initialize global variables<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"17\">global_chromadb = <span class=\"hljs-literal\">None<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"18\">global_documents = <span class=\"hljs-literal\">None<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"19\">global_short_documents = <span class=\"hljs-literal\">None<\/span><\/span>\n<\/code><\/pre>\n<p><strong>\u6570\u636e\u7ba1\u7406\u51fd\u6570<\/strong><\/p>\n<p>\u8fd9\u4e9b\u51fd\u6570\u5bf9\u4e8e\u7ba1\u7406\u5e94\u7528\u6570\u636e\u81f3\u5173\u91cd\u8981\u3002reset_globals \u7528\u4e8e\u91cd\u7f6e\u5168\u5c40\u53d8\u91cf\uff0cinit_chromadb \u5219\u7528\u4e8e\u7528\u5904\u7406\u8fc7\u7684\u6570\u636e\u521d\u59cb\u5316\u6570\u636e\u5e93\u3002<\/p>\n<pre><\/div><div class=\"code-block-extension-headerRight\"><span class=\"code-block-extension-lang\">ini<\/span><div class=\"code-block-extension-copyCodeBtn\">\u590d\u5236\u4ee3\u7801<\/div><\/div><\/div><code class=\"hljs language-ini code-block-extension-codeShowNum\" lang=\"ini\"><span class=\"code-block-extension-codeLine\" data-line-num=\"1\"><span class=\"hljs-comment\"># Initialize the memory outside the function so it persists across different calls<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"2\"><span class=\"hljs-attr\">conversation_memory<\/span> = ConversationBufferMemory(<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"3\">    <span class=\"hljs-attr\">memory_key<\/span>=<span class=\"hljs-string\">\"chat_history\"<\/span>,<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"4\">    <span class=\"hljs-attr\">max_len<\/span>=<span class=\"hljs-number\">50<\/span>,<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"5\">    <span class=\"hljs-attr\">input_key<\/span>=<span class=\"hljs-string\">\"question\"<\/span>,<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"6\">    <span class=\"hljs-attr\">output_key<\/span>=<span class=\"hljs-string\">\"answer\"<\/span>,<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"7\">    <span class=\"hljs-attr\">return_messages<\/span>=<span class=\"hljs-literal\">True<\/span>,<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"8\">)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"9\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"10\"><span class=\"hljs-comment\"># Function to reset global variables<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"11\">def reset_globals():<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"12\">    global global_chromadb, global_documents, global_short_documents<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"13\">    <span class=\"hljs-attr\">global_chromadb<\/span> = None<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"14\">    <span class=\"hljs-attr\">global_documents<\/span> = None<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"15\">    <span class=\"hljs-attr\">global_short_documents<\/span> = None<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"16\">    <span class=\"hljs-comment\"># Reset the conversation memory<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"17\">    if conversation_memory:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"18\">        conversation_memory.clear()<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"19\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"20\">def init_chromadb(openai_api_key):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"21\">    global global_chromadb, global_short_documents<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"22\">    if global_chromadb is None and global_short_documents is not None:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"23\">        <span class=\"hljs-attr\">global_chromadb<\/span> = Chroma.from_documents(documents=global_short_documents, embedding=OpenAIEmbeddings(openai_api_key=openai_api_key))<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"24\">        <\/span>\n<\/code><\/pre>\n<p><strong>\u5904\u7406\u5b57\u5e55\u5e76\u751f\u6210\u6458\u8981<\/strong><\/p>\n<p>process_and_cluster_captions: \u8fd9\u91cc\uff0c\u6211\u4eec\u5bf9 YouTube \u5b57\u5e55\u8fdb\u884c\u52a0\u5de5\uff0c\u4e3a\u5206\u6790\u548c\u751f\u6210\u56de\u7b54\u505a\u597d\u51c6\u5907\u3002\u5177\u4f53\u6b65\u9aa4\u5305\u62ec\uff1a<\/p>\n<ol>\n<li>\n<p><strong>\u521d\u6b65\u6570\u636e\u68c0\u67e5\uff1a<\/strong> \u6211\u4eec\u5148\u68c0\u67e5\u5b57\u5e55\u7684\u683c\u5f0f\uff0c\u786e\u4fdd\u5b83\u4eec\u9002\u5408\u540e\u7eed\u5904\u7406\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u5207\u5206\u5b57\u5e55\uff1a<\/strong> \u5b57\u5e55\u88ab\u5206\u5272\u6210\u66f4\u5c0f\u7684\u6bb5\u843d\uff0c\u5206\u522b\u7528\u4e8e\u5236\u4f5c\u6458\u8981\u548c\u56de\u7b54\u95ee\u9898\u3002\u8fd9\u6837\u7684\u5207\u5206\u5bf9\u9ad8\u6548\u5b8c\u6210\u5177\u4f53\u4efb\u52a1\u975e\u5e38\u5173\u952e\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u805a\u7c7b\u5f52\u7eb3\u76f8\u5173\u5185\u5bb9\uff1a<\/strong> \u6211\u4eec\u8fd0\u7528 KMeans \u7b97\u6cd5\u5bf9\u5236\u4f5c\u6458\u8981\u7684\u6bb5\u843d\u8fdb\u884c\u805a\u7c7b\u3002\u8fd9\u6837\u505a\u53ef\u4ee5\u7b5b\u9009\u51fa\u91cd\u590d\u5185\u5bb9\uff0c\u4ec5\u4fdd\u7559\u64ad\u5ba2\u4e2d\u6700\u6709\u4ee3\u8868\u6027\u7684\u90e8\u5206\u3002\u4ece\u6bcf\u4e2a\u7c7b\u522b\u4e2d\u9009\u53d6\u4e00\u4e2a\u6bb5\u843d\uff0c\u6211\u4eec\u786e\u4fdd AI \u80fd\u63a5\u6536\u5230\u65e2\u591a\u6837\u5316\u53c8\u7cbe\u70bc\u7684\u4fe1\u606f\uff0c\u975e\u5e38\u9002\u5408\u521b\u5efa\u6709\u610f\u4e49\u7684\u6458\u8981\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u5168\u5c40\u5b58\u50a8\uff1a<\/strong> \u5206\u5272\u597d\u7684\u5b57\u5e55\u88ab\u5168\u5c40\u5b58\u50a8\uff0c\u65b9\u4fbf\u4e4b\u540e\u5236\u4f5c\u6458\u8981\u548c\u56de\u7b54\u95ee\u9898\u65f6\u968f\u65f6\u8bbf\u95ee\u3002<\/p>\n<p>def process_and_cluster_captions(captions, openai_api_key, num_clusters=12):\nglobal global_documents, global_short_documents\nlogging.info(&#8220;\u6b63\u5728\u5904\u7406\u548c\u805a\u7c7b\u5b57\u5e55&#8221;)<\/p>\n<pre><\/div><div class=\"code-block-extension-headerRight\"><span class=\"code-block-extension-lang\">ini<\/span><div class=\"code-block-extension-copyCodeBtn\">\u590d\u5236\u4ee3\u7801<\/div><\/div><\/div><code class=\"hljs language-ini code-block-extension-codeShowNum\" lang=\"ini\"><span class=\"code-block-extension-codeLine\" data-line-num=\"1\"><span class=\"hljs-comment\"># \u8bb0\u5f55\u5b57\u5e55\u5f00\u5934\u7684500\u4e2a\u5b57\u7b26\uff0c\u68c0\u67e5\u5176\u683c\u5f0f<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"2\">logging.info(f\"\u63a5\u6536\u5230\u7684\u5b57\u5e55\uff08\u524d500\u4e2a\u5b57\u7b26\uff09: {captions<span class=\"hljs-section\">[0]<\/span>.page_content<span class=\"hljs-section\">[:500]<\/span>}\")<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"3\"><span class=\"hljs-attr\">caption_content<\/span> = captions[<span class=\"hljs-number\">0<\/span>].page_content<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"4\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"5\"><span class=\"hljs-comment\"># \u786e\u8ba4\u5b57\u5e55\u662f\u5b57\u7b26\u4e32\u683c\u5f0f\uff0c\u4ee5\u4fbf\u5904\u7406<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"6\">if not isinstance(caption_content, str):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"7\">    logging.error(\"\u5b57\u5e55\u683c\u5f0f\u4e0e\u9884\u671f\u4e0d\u7b26\")<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"8\">    return <span class=\"hljs-section\">[]<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"9\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"10\"><span class=\"hljs-comment\"># \u4e3a\u6458\u8981\u521b\u5efa\u8f83\u957f\u7684\u6587\u672c\u5757<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"11\"><span class=\"hljs-attr\">summary_splitter<\/span> = RecursiveCharacterTextSplitter(chunk_size=<span class=\"hljs-number\">1000<\/span>, chunk_overlap=<span class=\"hljs-number\">0<\/span>, separators=[<span class=\"hljs-string\">\"nn\"<\/span>, <span class=\"hljs-string\">\"n\"<\/span>, <span class=\"hljs-string\">\" \"<\/span>, <span class=\"hljs-string\">\"\"<\/span>])<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"12\"><span class=\"hljs-attr\">summary_docs<\/span> = summary_splitter.create_documents([caption_content])<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"13\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"14\"><span class=\"hljs-comment\"># \u4e3a\u95ee\u7b54\u521b\u5efa\u8f83\u77ed\u7684\u6587\u672c\u5757<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"15\"><span class=\"hljs-attr\">qa_splitter<\/span> = RecursiveCharacterTextSplitter(chunk_size=<span class=\"hljs-number\">200<\/span>, chunk_overlap=<span class=\"hljs-number\">0<\/span>, separators=[<span class=\"hljs-string\">\"nn\"<\/span>, <span class=\"hljs-string\">\"n\"<\/span>, <span class=\"hljs-string\">\" \"<\/span>, <span class=\"hljs-string\">\"\"<\/span>])<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"16\"><span class=\"hljs-attr\">qa_docs<\/span> = qa_splitter.create_documents([caption_content])<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"17\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"18\"><span class=\"hljs-comment\"># \u5904\u7406\u7528\u4e8e\u6458\u8981\u7684\u6587\u672c<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"19\"><span class=\"hljs-attr\">summary_embeddings<\/span> = OpenAIEmbeddings(openai_api_key=openai_api_key).embed_documents([x.page_content for x in summary_docs])<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"20\"><span class=\"hljs-attr\">kmeans<\/span> = KMeans(n_clusters=num_clusters, random_state=<span class=\"hljs-number\">42<\/span>).fit(summary_embeddings)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"21\"><span class=\"hljs-attr\">closest_indices<\/span> = [np.argmin(np.linalg.norm(summary_embeddings - center, axis=<span class=\"hljs-number\">1<\/span>)) for center in kmeans.cluster_centers_]<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"22\"><span class=\"hljs-attr\">representative_docs<\/span> = [summary_docs[i] for i in closest_indices]<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"23\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"24\"><span class=\"hljs-comment\"># \u5168\u5c40\u5b58\u50a8\u6587\u6863<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"25\"><span class=\"hljs-attr\">global_documents<\/span> = summary_docs  <span class=\"hljs-comment\"># \u5b58\u50a8\u7528\u4e8e\u5236\u4f5c\u6458\u8981\u7684\u6587\u6863<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"26\"><span class=\"hljs-attr\">global_short_documents<\/span> = qa_docs  <span class=\"hljs-comment\"># \u5b58\u50a8\u7528\u4e8e\u95ee\u7b54\u7684\u6587\u6863<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"27\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"28\">init_chromadb(openai_api_key)  <span class=\"hljs-comment\"># \u5229\u7528\u8f83\u957f\u7684\u6587\u672c\u5757\u521d\u59cb\u5316\u6570\u636e\u5e93<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"29\">return representative_docs<\/span>\n<\/code><\/pre>\n<\/li>\n<\/ol>\n<p>generate_summary: \u8be5\u51fd\u6570\u4f7f\u7528\u5904\u7406\u8fc7\u7684\u5b57\u5e55\uff0c\u501f\u52a9 OpenAI \u7684 AI \u6a21\u578b\u751f\u6210\u89c6\u9891\u7684\u6e05\u6670\u4e14\u7b80\u6d01\u7684\u6458\u8981\u3002\u5177\u4f53\u5982\u4e0b\uff1a<\/p>\n<ol>\n<li>\n<p><strong>\u6574\u5408\u5173\u952e\u6bb5\u843d\uff1a<\/strong> \u6211\u4eec\u5c06\u805a\u7c7b\u8fc7\u7a0b\u4e2d\u6311\u9009\u51fa\u7684\u6bb5\u843d\u7ed3\u5408\u8d77\u6765\uff0c\u5f62\u6210\u8fde\u8d2f\u7684\u53d9\u8ff0\u3002\u8fd9\u786e\u4fdd\u4e86\u6458\u8981\u80fd\u591f\u8986\u76d6\u64ad\u5ba2\u7684\u4e0d\u540c\u5c42\u9762\u3002<\/p>\n<\/li>\n<li>\n<p><strong>AI \u5bfc\u5411\u7684\u6458\u8981\u5236\u4f5c\uff1a<\/strong> \u5229\u7528\u5b9a\u5236\u7684\u63d0\u793a\u8bed\uff0c\u6211\u4eec\u6307\u5bfc AI \u5236\u4f5c\u4e00\u4e2a\u7b80\u660e\u4e14\u5305\u542b\u4e30\u5bcc\u4fe1\u606f\u7684\u6458\u8981\u3002\u8fd9\u4e00\u6b65\u9aa4\u5bf9\u4e8e\u5f15\u5bfc AI \u805a\u7126\u4e8e\u603b\u4f53\u4e3b\u9898\u548c\u91cd\u70b9\u5185\u5bb9\u3001\u907f\u514d\u65e0\u5173\u7ec6\u8282\u975e\u5e38\u5173\u952e\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u6267\u884c\u6458\u8981\u94fe\uff1a<\/strong> AI \u5904\u7406\u8fd9\u4e9b\u7ed3\u5408\u5728\u4e00\u8d77\u7684\u6587\u672c\uff0c\u751f\u6210\u6458\u8981\uff0c\u8ba9\u7528\u6237\u8fc5\u901f\u628a\u63e1\u64ad\u5ba2\u7684\u6838\u5fc3\u5185\u5bb9\u3002<\/p>\n<\/li>\n<\/ol>\n<p>\u8fd9\u79cd\u65b9\u6cd5\u786e\u4fdd\u6240\u751f\u6210\u7684\u6458\u8981\u65e2\u5168\u9762\u53c8\u7cbe\u51c6\uff0c\u6355\u6349\u4e86\u64ad\u5ba2\u7684\u7cbe\u534e\u6240\u5728\u3002<\/p>\n<pre><\/div><div class=\"code-block-extension-headerRight\"><span class=\"code-block-extension-lang\">ini<\/span><div class=\"code-block-extension-copyCodeBtn\">\u590d\u5236\u4ee3\u7801<\/div><\/div><\/div><code class=\"hljs language-ini code-block-extension-codeShowNum\" lang=\"ini\"><span class=\"code-block-extension-codeLine\" data-line-num=\"1\">def generate_summary(representative_docs, openai_api_key, model_name):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"2\">    logging.info(\"\u6b63\u5728\u751f\u6210\u6458\u8981\")<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"3\">    <span class=\"hljs-attr\">llm4<\/span> = ChatOpenAI(model_name=model_name, temperature=<span class=\"hljs-number\">0.2<\/span>, openai_api_key=openai_api_key)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"4\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"5\">    <span class=\"hljs-comment\"># \u6c47\u603b\u6587\u672c\u4ee5\u51c6\u5907\u751f\u6210\u6458\u8981<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"6\">    <span class=\"hljs-attr\">summary_text<\/span> = <span class=\"hljs-string\">\"n\"<\/span>.join([doc.page_content for doc in representative_docs])<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"7\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"8\">    <span class=\"hljs-attr\">summary_prompt_template<\/span> = PromptTemplate(<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"9\">        <span class=\"hljs-attr\">template<\/span>=(<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"10\">            \"\u6839\u636e\u4e0b\u9762\u63d0\u4f9b\u7684\u6587\u672c\uff0c\u521b\u5efa\u4e00\u4e2a\u64ad\u5ba2\u5bf9\u8bdd\u7684\u7b80\u6d01\u6458\u8981\u3002\u6587\u672c\u5305\u542b\u4ece\u5bf9\u8bdd\u4e0d\u540c\u90e8\u5206\u6311\u9009\u51fa\u7684\u3001\u5177\u6709\u4ee3\u8868\u6027\u7684\u8282\u9009\u3002\"<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"11\">            \"\u4f60\u7684\u4efb\u52a1\u662f\u5c06\u8fd9\u4e9b\u8282\u9009\u7efc\u5408\u6210\u4e00\u4e2a\u8fde\u8d2f\u4e14\u7b80\u6d01\u7684\u6458\u8981\u3002\u4e13\u6ce8\u4e8e\u64ad\u5ba2\u4e2d\u8ba8\u8bba\u7684\u603b\u4f53\u4e3b\u9898\u548c\u4e3b\u8981\u89c2\u70b9\u3002\"<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"12\">            \"\u6458\u8981\u5e94\u8be5\u6e05\u6670\u5b8c\u6574\u5730\u4f20\u8fbe\u5bf9\u8bdd\u7684\u5173\u952e\u8bdd\u9898\u548c\u6d1e\u89c1\uff0c\u540c\u65f6\u7701\u7565\u4efb\u4f55\u4e0d\u5fc5\u8981\u7684\u7ec6\u8282\u3002\u5b83\u5e94\u8be5\u662f\u5f15\u4eba\u5165\u80dc\u4e14\u6613\u4e8e\u9605\u8bfb\u7684\uff0c\u7406\u60f3\u60c5\u51b5\u4e0b\u4e3a\u4e00\u5230\u4e24\u6bb5\u3002\u5c3d\u53ef\u80fd\u4fdd\u6301\u7b80\u77ed\"<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"13\">            \"nn\u9009\u5b9a\u7684\u64ad\u5ba2\u8282\u9009\uff1an{text}nn\u6458\u8981\uff1a\"<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"14\">        ),<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"15\">        <span class=\"hljs-attr\">input_variables<\/span>=[<span class=\"hljs-string\">\"text\"<\/span>]<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"16\">    )<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"17\">    <span class=\"hljs-comment\"># \u52a0\u8f7d\u6458\u8981\u94fe<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"18\">    <span class=\"hljs-attr\">summarize_chain<\/span> = load_summarize_chain(llm=llm4, chain_type=<span class=\"hljs-string\">\"stuff\"<\/span>, prompt=summary_prompt_template)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"19\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"20\">    <span class=\"hljs-comment\"># \u6267\u884c\u6458\u8981\u94fe<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"21\">    <span class=\"hljs-attr\">summary<\/span> = summarize_chain.run([Document(page_content=summary_text)])<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"22\"><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"23\">    logging.info(\"\u6458\u8981\u751f\u6210\u5b8c\u6210\")<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"24\">    return summary<\/span>\n<\/code><\/pre>\n<p><strong>\u89e3\u7b54\u7528\u6237\u95ee\u9898<\/strong><\/p>\n<p>answer_question: \u8fd9\u662f\u6211\u4eec RAG\uff08\u68c0\u7d22\u589e\u5f3a\u751f\u6210\uff09\u7cfb\u7edf\u7684\u6838\u5fc3\u529f\u80fd\u3002\u6b64\u529f\u80fd\u5229\u7528\u5df2\u5904\u7406\u7684\u6570\u636e\u56de\u7b54\u7528\u6237\u7684\u7279\u5b9a\u95ee\u9898\uff0c\u5145\u5206\u5c55\u73b0\u4e86\u7ed3\u5408\u68c0\u7d22\u7684\u751f\u6210\u6280\u672f\u7684\u80fd\u529b\u3002<\/p>\n<ol>\n<li>\n<p><strong>\u6570\u636e\u5e93\u521d\u59cb\u5316\uff1a<\/strong> \u5b83\u9996\u5148\u786e\u8ba4\u5b58\u50a8\u5df2\u5904\u7406\u6bb5\u843d\u7684\u6570\u636e\u5e93\u662f\u5426\u51c6\u5907\u5c31\u7eea\u3002\u5982\u679c\u5c1a\u672a\u521d\u59cb\u5316\uff0c\u7cfb\u7edf\u5c06\u914d\u7f6e\u6570\u636e\u5e93\uff0c\u786e\u4fdd\u80fd\u591f\u9ad8\u6548\u5730\u8fdb\u884c\u4fe1\u606f\u68c0\u7d22\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u4fe1\u606f\u68c0\u7d22\u4e0e\u56de\u7b54\u751f\u6210\uff1a<\/strong> \u63a5\u7740\uff0c\u7cfb\u7edf\u5c06\u7528\u6237\u7684\u63d0\u95ee\u4e0e\u4ece\u6570\u636e\u5e93\u4e2d\u68c0\u7d22\u5230\u7684\u76f8\u5173\u4fe1\u606f\u7ed3\u5408\uff0c\u8fdb\u884c\u667a\u80fd\u5339\u914d\u3002\u8fd9\u79cd\u7cbe\u51c6\u68c0\u7d22\u786e\u4fdd\u4e86 AI \u63d0\u4f9b\u7684\u7b54\u6848\u65e2\u51c6\u786e\u53c8\u5207\u9898\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u751f\u6210\u5177\u6709\u60c5\u5883\u611f\u77e5\u7684\u7b54\u6848\uff1a<\/strong> AI \u5728\u638c\u63e1\u4e86\u95ee\u9898\u548c\u6700\u5339\u914d\u7684\u4e0a\u4e0b\u6587\u540e\uff0c\u4f1a\u751f\u6210\u4e00\u4e2a\u7b80\u660e\u76f4\u63a5\u7684\u7b54\u6848\u6765\u56de\u5e94\u7528\u6237\u7684\u95ee\u9898\u3002<\/p>\n<\/li>\n<\/ol>\n<p>\u901a\u8fc7\u8fd9\u4e00\u8fc7\u7a0b\uff0c\u7528\u6237\u80fd\u5f97\u5230\u9488\u5bf9\u6027\u7684\u3001\u4fe1\u606f\u4e30\u5bcc\u7684\u7b54\u590d\uff0c\u8fd9\u5927\u5927\u63d0\u5347\u4e86\u4ed6\u4eec\u4e0e\u64ad\u5ba2\u5185\u5bb9\u7684\u4e92\u52a8\u4f53\u9a8c\u3002<\/p>\n<pre><\/div><div class=\"code-block-extension-headerRight\"><span class=\"code-block-extension-lang\">ini<\/span><div class=\"code-block-extension-copyCodeBtn\">\u590d\u5236\u4ee3\u7801<\/div><\/div><\/div><code class=\"hljs language-ini code-block-extension-codeShowNum\" lang=\"ini\"><span class=\"code-block-extension-codeLine\" data-line-num=\"1\">def answer_question(question, openai_api_key, model_name):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"2\">        <span class=\"hljs-attr\">llm4<\/span> = ChatOpenAI(model_name=model_name, temperature=<span class=\"hljs-number\">0<\/span>, openai_api_key=openai_api_key)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"3\">        global global_chromadb, global_short_documents<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"4\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"5\">        if global_chromadb is None and global_short_documents is not None:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"6\">            init_chromadb(openai_api_key, <span class=\"hljs-attr\">documents<\/span>=global_short_documents)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"7\">        <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"8\">        logging.info(f\"Answering question: {question}\")<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"9\">        <span class=\"hljs-attr\">chatTemplate<\/span> = <span class=\"hljs-string\">\"\"\"<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"10\">        You are an AI assistant tasked with answering questions based on context from a podcast conversation. Use the provided context and relevant chat messages to answer. If unsure, say so. Keep your answer to three sentences or less, focusing on the most relevant information.<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"11\">        Chat Messages (if relevant): {chat_history}<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"12\">        Question: {question} <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"13\">        Context from Podcast: {context} <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"14\">        Answer:\"\"\"<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"15\">        <span class=\"hljs-attr\">QA_CHAIN_PROMPT<\/span> = PromptTemplate(input_variables=[<span class=\"hljs-string\">\"context\"<\/span>, <span class=\"hljs-string\">\"question\"<\/span>, <span class=\"hljs-string\">\"chat_history\"<\/span>],template=chatTemplate)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"16\">        <span class=\"hljs-attr\">qa_chain<\/span> = ConversationalRetrievalChain.from_llm(<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"17\">            <span class=\"hljs-attr\">llm<\/span>=llm4, <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"18\">            <span class=\"hljs-attr\">chain_type<\/span>=<span class=\"hljs-string\">\"stuff\"<\/span>, <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"19\">            <span class=\"hljs-attr\">retriever<\/span>=global_chromadb.as_retriever(search_type=<span class=\"hljs-string\">\"mmr\"<\/span>, search_kwargs={<span class=\"hljs-string\">\"k\"<\/span>:<span class=\"hljs-number\">12<\/span>}),<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"20\">            <span class=\"hljs-attr\">memory<\/span>=conversation_memory,<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"21\">            <span class=\"hljs-comment\">#return_source_documents=True,<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"22\">            <span class=\"hljs-attr\">combine_docs_chain_kwargs<\/span>={<span class=\"hljs-string\">'prompt'<\/span>: QA_CHAIN_PROMPT},<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"23\">        )<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"24\">        <span class=\"hljs-comment\"># Log the current chat history<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"25\">        <span class=\"hljs-attr\">current_chat_history<\/span> = conversation_memory.load_memory_variables({})<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"26\">        logging.info(f\"Current Chat History: {current_chat_history}\")<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"27\">        <span class=\"hljs-attr\">response<\/span> = qa_chain({<span class=\"hljs-string\">\"question\"<\/span>: question}) <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"28\">        logging.info(f\"this is the result: {response}\")<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"29\">        <span class=\"hljs-attr\">output<\/span> = response[<span class=\"hljs-string\">'answer'<\/span>]    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"30\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"31\">        return output<\/span>\n<\/code><\/pre>\n<h2 data-id=\"heading-6\">\u7b2c 3 \u90e8\u5206\uff1aStreamlit \u7528\u6237\u754c\u9762<\/h2>\n<h3 data-id=\"heading-7\">\u6fc0\u6d3b\u5e94\u7528\u7684\u7075\u9b42<\/h3>\n<p>\u6700\u7ec8\uff0c\u6211\u4eec\u91c7\u7528 Streamlit \u6765\u6253\u9020\u4e00\u4e2a\u4e92\u52a8\u6027\u5f3a\u7684\u7f51\u7edc\u5e94\u7528\uff0c\u5b83\u6574\u5408\u4e86\u6240\u6709\u7684\u529f\u80fd\u3002\u7528\u6237\u53ef\u4ee5\u8f93\u5165 YouTube \u89c6\u9891\u94fe\u63a5\uff0c\u63d0\u51fa\u95ee\u9898\uff0c\u5e76\u83b7\u5f97 AI \u751f\u6210\u7684\u6458\u8981\u548c\u7b54\u6848\u3002\u4ee5\u4e0b\u4ee3\u7801\u662f\u6211\u8bbe\u8ba1\u7684\u754c\u9762\u7248\u672c\uff0c\u4f60\u53ef\u4ee5\u76f4\u63a5\u91c7\u7528\u6216\u6839\u636e\u4e2a\u4eba\u559c\u597d\u8fdb\u884c\u8c03\u6574\u3002<\/p>\n<pre><\/div><div class=\"code-block-extension-headerRight\"><span class=\"code-block-extension-lang\">python<\/span><div class=\"code-block-extension-copyCodeBtn\">\u590d\u5236\u4ee3\u7801<\/div><\/div><\/div><code class=\"hljs language-python code-block-extension-codeShowNum\" lang=\"python\"><span class=\"code-block-extension-codeLine\" data-line-num=\"1\"><span class=\"hljs-keyword\">import<\/span> streamlit <span class=\"hljs-keyword\">as<\/span> st<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"2\">    <span class=\"hljs-keyword\">from<\/span> youtuber <span class=\"hljs-keyword\">import<\/span> fetch_youtube_captions<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"3\">    <span class=\"hljs-keyword\">from<\/span> agent <span class=\"hljs-keyword\">import<\/span> process_and_cluster_captions, generate_summary, answer_question, reset_globals<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"4\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"5\">    <span class=\"hljs-comment\"># Set Streamlit page configuration with custom tab title<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"6\">    st.set_page_config(page_title=<span class=\"hljs-string\">\"\ud83c\udfc4GPTpod\"<\/span>, page_icon=<span class=\"hljs-string\">\"\ud83c\udfc4\"<\/span>, layout=<span class=\"hljs-string\">\"wide\"<\/span>)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"7\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"8\">    <span class=\"hljs-keyword\">def<\/span> <span class=\"hljs-title function_\">user_query<\/span>(<span class=\"hljs-params\">question, openai_api_key, model_name<\/span>):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"9\">        <span class=\"hljs-string\">\"\"\"Process and display the query response.\"\"\"<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"10\">        <span class=\"hljs-comment\"># Add the user's question to the conversation<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"11\">        st.session_state.conversation.append((<span class=\"hljs-string\">f\"<span class=\"hljs-subst\">{question}<\/span>\"<\/span>, <span class=\"hljs-string\">\"user-message\"<\/span>))<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"12\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"13\">        <span class=\"hljs-comment\"># Check if this query has been processed before<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"14\">        <span class=\"hljs-keyword\">if<\/span> question <span class=\"hljs-keyword\">not<\/span> <span class=\"hljs-keyword\">in<\/span> st.session_state.processed_questions:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"15\">            <span class=\"hljs-comment\"># Process the query<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"16\">            answer = answer_question(question, openai_api_key, model_name)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"17\">            <span class=\"hljs-keyword\">if<\/span> <span class=\"hljs-built_in\">isinstance<\/span>(answer, <span class=\"hljs-built_in\">str<\/span>):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"18\">                st.session_state.conversation.append((<span class=\"hljs-string\">f\"<span class=\"hljs-subst\">{answer}<\/span>\"<\/span>, <span class=\"hljs-string\">\"grimoire-message\"<\/span>))<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"19\">            <span class=\"hljs-keyword\">else<\/span>:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"20\">                st.session_state.conversation.append((<span class=\"hljs-string\">\"Could not find a proper answer.\"<\/span>, <span class=\"hljs-string\">\"grimoire-message\"<\/span>))<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"21\">            <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"22\">            st.rerun()<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"23\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"24\">            <span class=\"hljs-comment\"># Mark this question as processed<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"25\">            st.session_state.processed_questions.add(question)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"26\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"27\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"28\">    <span class=\"hljs-comment\"># Initialize session state<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"29\">    <span class=\"hljs-keyword\">if<\/span> <span class=\"hljs-string\">'conversation'<\/span> <span class=\"hljs-keyword\">not<\/span> <span class=\"hljs-keyword\">in<\/span> st.session_state:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"30\">        st.session_state.conversation = []<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"31\">        st.session_state.asked_questions = <span class=\"hljs-built_in\">set<\/span>()<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"32\">        st.session_state.processed_questions = <span class=\"hljs-built_in\">set<\/span>()<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"33\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"34\">    <span class=\"hljs-comment\"># Sidebar for input and operations<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"35\">    <span class=\"hljs-keyword\">with<\/span> st.sidebar:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"36\">        st.title(<span class=\"hljs-string\">\"GPT Podcast Surfer\ud83c\udf0a\ud83c\udfc4\ud83c\udffc\"<\/span>)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"37\">        st.image(<span class=\"hljs-string\">\"img.png\"<\/span>) <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"38\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"39\">        <span class=\"hljs-comment\"># Expandable Instructions<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"40\">        <span class=\"hljs-keyword\">with<\/span> st.expander(<span class=\"hljs-string\">\"\ud83d\udd0d How to use:\"<\/span>, expanded=<span class=\"hljs-literal\">False<\/span>):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"41\">            st.markdown(<span class=\"hljs-string\">\"\"\"<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"42\">                - \ud83d\udd10 **Enter your OpenAI API Key.**<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"43\">                - \ud83d\udcfa **Paste a YouTube URL.**<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"44\">                - \ud83c\udfc3\u200d\u2642\ufe0f **Click 'Run it' to process.**<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"45\">                - \ud83d\udd75\ufe0f\u200d\u2642\ufe0f **Ask questions in the chat.**<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"46\">            \"\"\")<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"47\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"48\">        <span class=\"hljs-comment\"># Model selection in the sidebar<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"49\">        model_choice = st.sidebar.selectbox(<span class=\"hljs-string\">\"Choose Model:\"<\/span>, <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"50\">                                            (<span class=\"hljs-string\">\"GPT-4 Turbo\"<\/span>, <span class=\"hljs-string\">\"GPT-3.5 Turbo\"<\/span>), <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"51\">                                            index=<span class=\"hljs-number\">0<\/span>)  <span class=\"hljs-comment\"># Default to GPT-4 Turbo<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"52\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"53\">        <span class=\"hljs-comment\"># Map friendly names to actual model names<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"54\">        model_name_mapping = {<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"55\">            <span class=\"hljs-string\">\"GPT-4 Turbo\"<\/span>: <span class=\"hljs-string\">\"gpt-4-1106-preview\"<\/span>,<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"56\">            <span class=\"hljs-string\">\"GPT-3.5 Turbo\"<\/span>: <span class=\"hljs-string\">\"gpt-3.5-turbo\"<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"57\">        }<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"58\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"59\">        selected_model = model_name_mapping[model_choice]<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"60\">        st.session_state[<span class=\"hljs-string\">'selected_model'<\/span>] = model_name_mapping[model_choice]<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"61\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"62\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"63\">        <span class=\"hljs-comment\"># Input for OpenAI API Key<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"64\">        openai_api_key = st.text_input(<span class=\"hljs-string\">\"Enter your OpenAI API Key:\"<\/span>, <span class=\"hljs-built_in\">type<\/span>=<span class=\"hljs-string\">\"password\"<\/span>)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"65\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"66\">        <span class=\"hljs-comment\"># Save the API key in session state if it's entered<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"67\">        <span class=\"hljs-keyword\">if<\/span> openai_api_key:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"68\">            st.session_state[<span class=\"hljs-string\">'openai_api_key'<\/span>] = openai_api_key<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"69\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"70\">        youtube_url = st.text_input(<span class=\"hljs-string\">\"Enter YouTube URL:\"<\/span>)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"71\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"72\">        <span class=\"hljs-comment\"># Button to trigger processing<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"73\">        <span class=\"hljs-keyword\">if<\/span> st.button(<span class=\"hljs-string\">\"\ud83d\ude80Run it\"<\/span>):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"74\">            <span class=\"hljs-keyword\">if<\/span> openai_api_key:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"75\">                <span class=\"hljs-keyword\">if<\/span> youtube_url <span class=\"hljs-keyword\">and<\/span> <span class=\"hljs-string\">'processed_data'<\/span> <span class=\"hljs-keyword\">not<\/span> <span class=\"hljs-keyword\">in<\/span> st.session_state:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"76\">                    reset_globals()<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"77\">                    <span class=\"hljs-keyword\">with<\/span> st.spinner(<span class=\"hljs-string\">'\ud83d\udc69\u200d\ud83c\udf73 GPT is cooking up your podcast... hang tight for a few secs\ud83c\udf73'<\/span>):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"78\">                        captions = fetch_youtube_captions(youtube_url)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"79\">                        <span class=\"hljs-keyword\">if<\/span> captions:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"80\">                            representative_docs = process_and_cluster_captions(captions, st.session_state[<span class=\"hljs-string\">'openai_api_key'<\/span>])<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"81\">                            summary = generate_summary(representative_docs, st.session_state[<span class=\"hljs-string\">'openai_api_key'<\/span>], selected_model)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"82\">                            st.session_state.processed_data = (representative_docs, summary)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"83\">                            <span class=\"hljs-keyword\">if<\/span> <span class=\"hljs-string\">'summary_displayed'<\/span> <span class=\"hljs-keyword\">not<\/span> <span class=\"hljs-keyword\">in<\/span> st.session_state:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"84\">                                st.session_state.conversation.append((<span class=\"hljs-string\">f\"Here's a rundown of the conversation: <span class=\"hljs-subst\">{summary}<\/span>\"<\/span>, <span class=\"hljs-string\">\"summary-message\"<\/span>))<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"85\">                                guiding_message = <span class=\"hljs-string\">\"Feel free to ask me anything else about it! :)\"<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"86\">                                st.session_state.conversation.append((guiding_message, <span class=\"hljs-string\">\"grimoire-message\"<\/span>))<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"87\">                                st.session_state[<span class=\"hljs-string\">'summary_displayed'<\/span>] = <span class=\"hljs-literal\">True<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"88\">                        <span class=\"hljs-keyword\">else<\/span>:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"89\">                            st.error(<span class=\"hljs-string\">\"Failed to fetch captions.\"<\/span>)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"90\">            <span class=\"hljs-keyword\">else<\/span>:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"91\">                st.warning(<span class=\"hljs-string\">\"Please add the OpenAI API key first.\"<\/span>)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"92\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"93\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"94\">    <span class=\"hljs-comment\"># Main app logic<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"95\">    <span class=\"hljs-keyword\">for<\/span> message, css_class <span class=\"hljs-keyword\">in<\/span> st.session_state.conversation:<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"96\">        role = <span class=\"hljs-string\">\"assistant\"<\/span> <span class=\"hljs-keyword\">if<\/span> css_class <span class=\"hljs-keyword\">in<\/span> [<span class=\"hljs-string\">\"grimoire-message\"<\/span>, <span class=\"hljs-string\">\"summary-message\"<\/span>, <span class=\"hljs-string\">\"suggestion-message\"<\/span>] <span class=\"hljs-keyword\">else<\/span> <span class=\"hljs-string\">\"user\"<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"97\">        <span class=\"hljs-keyword\">with<\/span> st.chat_message(role):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"98\">            st.markdown(message)<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"99\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"100\">    <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"101\">    <span class=\"hljs-comment\"># Chat input field<\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"102\">    <span class=\"hljs-keyword\">if<\/span> prompt := st.chat_input(<span class=\"hljs-string\">\"Ask me anything about the podcast...\"<\/span>):<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"103\">        user_query(prompt, st.session_state.get(<span class=\"hljs-string\">'openai_api_key'<\/span>, <span class=\"hljs-string\">''<\/span>), st.session_state.get(<span class=\"hljs-string\">'selected_model'<\/span>, <span class=\"hljs-string\">'gpt-4-1106-preview'<\/span>))<\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"104\"><\/span>\n<\/code><\/pre>\n<p>\u8fd9\u662f\u7528\u6237\u754c\u9762\u7684\u6837\u5b50\uff1a<\/p>\n<p>\n<figure class=\"wp-block-image size-large\"><img decoding=\"async\" src=\"https:\/\/www.nicekj.com\/wp-content\/uploads\/replace\/2a70d74924d3ca6af40aa17f9b9c00d3.png\" alt=\"image.png\" \/><\/figure>\n<\/p>\n<h2 data-id=\"heading-8\">\u7b2c 4 \u90e8\u5206\uff1a\u589e\u5f3a\u7ed3\u679c\u7684\u53ef\u9009 RAG-Fusion \u6280\u672f<\/h2>\n<p><strong>RAG-Fusion \u6280\u672f\u7b80\u4ecb<\/strong><\/p>\n<p>RAG-Fusion \u662f\u6211\u4e3a\u4e86\u63d0\u5347\u7cfb\u7edf\u800c\u52a0\u5165\u7684\u4e00\u4e2a\u8bd5\u9a8c\u6027\u529f\u80fd\u3002\u5c3d\u7ba1\u8fd9\u4e0d\u662f\u5fc5\u987b\u7684\uff0c\u4f46\u5b83\u80fd\u5728\u589e\u52a0\u5c11\u91cf\u4ee3\u7801\u548c\u7a0d\u5fae\u727a\u7272\u901f\u5ea6\u7684\u57fa\u7840\u4e0a\uff0c\u63d0\u4f9b\u66f4\u4f18\u7684\u7ed3\u679c\u3002\u5176\u6838\u5fc3\u601d\u60f3\u5728\u4e8e\u63d0\u9ad8 AI \u7406\u89e3\u95ee\u9898\u548c\u63d0\u9ad8\u56de\u7b54\u51c6\u786e\u5ea6\u7684\u80fd\u529b\u3002<\/p>\n<h3 data-id=\"heading-9\"><strong>\u4e3a\u4ec0\u4e48\u9009\u62e9 RAG-Fusion\uff1f<\/strong><\/h3>\n<ul>\n<li>\n<p><strong>\u5f25\u8865\u4e0d\u8db3\uff1a<\/strong> \u5b83\u901a\u8fc7\u751f\u6210\u548c\u91cd\u65b0\u8bc4\u4f30\u7528\u6237\u95ee\u9898\u7684\u591a\u4e2a\u7248\u672c\uff0c\u5f25\u8865\u4e86\u6807\u51c6 RAG \u7684\u67d0\u4e9b\u5c40\u9650\uff0c\u786e\u4fdd\u66f4\u5e7f\u6cdb\u548c\u7cbe\u51c6\u7684\u641c\u7d22\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u66f4\u4f18\u7684\u641c\u7d22\u7ed3\u679c\uff1a<\/strong> \u5b83\u7ed3\u5408\u4e86\u4e92\u60e0\u6392\u540d\u878d\u5408\u6280\u672f\u548c\u81ea\u5b9a\u4e49\u5411\u91cf\u8bc4\u5206\u65b9\u6cd5\uff0c\u5f97\u5230\u66f4\u5168\u9762\u548c\u7cbe\u786e\u7684\u7b54\u6848\u3002<\/p>\n<\/li>\n<\/ul>\n<p>RAG-Fusion \u529b\u56fe\u4e0d\u53ea\u662f\u89e3\u8bfb\u7528\u6237\u6240\u95ee\u7684\u95ee\u9898\uff0c\u8fd8\u8981\u6d1e\u6089\u7528\u6237\u771f\u6b63\u610f\u56fe\u63d0\u95ee\u7684\u5185\u5bb9\uff0c\u6df1\u5165\u6316\u6398\u90a3\u4e9b\u5e38\u88ab\u5ffd\u7565\u7684\u6df1\u5c42\u89c1\u89e3\u3002<\/p>\n<h3 data-id=\"heading-10\">RAG-Fusion \u7684\u5de5\u4f5c\u6d41\u7a0b<\/h3>\n<ol>\n<li>\n<p><strong>\u8f6c\u6362\u67e5\u8be2\uff1a<\/strong> \u6211\u4eec\u9996\u5148\u7528\u8bed\u8a00\u6a21\u578b\u5c06\u539f\u59cb\u7528\u6237\u67e5\u8be2\u8f6c\u6362\u6210\u51e0\u4e2a\u76f8\u4f3c\u4f46\u5404\u6709\u4e0d\u540c\u7684\u95ee\u9898\u3002\u8fd9\u79cd\u591a\u89d2\u5ea6\u7684\u65b9\u6cd5\u5bf9\u4e8e\u5168\u9762\u641c\u7d22\u975e\u5e38\u5173\u952e\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u589e\u5f3a\u7684\u5411\u91cf\u641c\u7d22\uff1a<\/strong> \u8fd9\u4e9b\u65b0\u751f\u6210\u7684\u67e5\u8be2\u5c06\u8fdb\u884c\u5411\u91cf\u641c\u7d22\uff0c\u805a\u96c6\u591a\u5143\u5316\u7684\u7ed3\u679c\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u667a\u80fd\u91cd\u6392\uff1a<\/strong> \u901a\u8fc7\u4e92\u60e0\u6392\u540d\u878d\u5408\uff0c\u6211\u4eec\u5bf9\u6240\u6709\u8fd9\u4e9b\u7ed3\u679c\u8fdb\u884c\u91cd\u7ec4\uff0c\u628a\u6700\u76f8\u5173\u7684\u7ed3\u679c\u653e\u5728\u9996\u4f4d\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u5b9a\u5236\u6700\u7ec8\u8f93\u51fa\uff1a<\/strong> \u7ed3\u5408\u9876\u5c16\u7ed3\u679c\u548c\u65b0\u751f\u6210\u7684\u67e5\u8be2\uff0c\u5f15\u5bfc\u8bed\u8a00\u6a21\u578b\u751f\u6210\u4e00\u4e2a\u57fa\u4e8e\u66f4\u5e7f\u6cdb\u4e0a\u4e0b\u6587\u7684\u56de\u7b54\u3002<\/p>\n<\/li>\n<\/ol>\n<h3 data-id=\"heading-11\">RAG-Fusion \u7684\u6838\u5fc3\u529f\u80fd<\/h3>\n<ul>\n<li>\n<p><strong>reciprocal_rank_fusion\uff1a<\/strong> \u8fd9\u4e2a\u529f\u80fd\u4f1a\u6839\u636e\u76f8\u5173\u6027\u5206\u6570\u91cd\u65b0\u6392\u5e8f\u641c\u7d22\u7ed3\u679c\uff0c\u786e\u4fdd\u4f18\u5148\u8003\u8651\u6700\u4f73\u7b54\u6848\u3002<\/p>\n<\/li>\n<li>\n<p><strong>generate_multiple_queries\uff1a<\/strong> \u5b83\u80fd\u751f\u6210\u521d\u59cb\u67e5\u8be2\u7684\u591a\u4e2a\u53d8\u79cd\uff0c\u4ece\u800c\u6269\u5927\u641c\u7d22\u7684\u8303\u56f4\u3002<\/p>\n<\/li>\n<li>\n<p><strong>answer_question\uff1a<\/strong> \u8fd9\u662f\u5c06\u6240\u6709\u90e8\u5206\u878d\u5408\u5728\u4e00\u8d77\u7684\u529f\u80fd\u6240\u5728\u3002\u9996\u5148\uff0c\u5b83\u751f\u6210\u591a\u4e2a\u67e5\u8be2\uff0c\u5bf9\u6bcf\u4e00\u4e2a\u67e5\u8be2\u68c0\u7d22\u76f8\u5e94\u7684\u6587\u6863\uff0c\u5e76\u8fdb\u884c\u4e92\u60e0\u6392\u540d\u878d\u5408\u3002\u7136\u540e\u5b83\u5229\u7528\u8fd9\u4e9b\u7cbe\u70bc\u540e\u7684\u7ed3\u679c\u548c\u81ea\u5b9a\u4e49\u6570\u636e\u5e93\uff0c\u5f15\u5bfc AI \u5236\u5b9a\u51fa\u66f4\u52a0\u4e30\u5bcc\u548c\u7cbe\u786e\u7684\u56de\u7b54\u3002<\/p>\n<\/li>\n<\/ul>\n\n<pre><\/div><div class=\"code-block-extension-headerRight\"><span class=\"code-block-extension-lang\">ini<\/span><div class=\"code-block-extension-copyCodeBtn\">\u590d\u5236\u4ee3\u7801<\/div><\/div><\/div><code class=\"hljs language-ini code-block-extension-codeShowNum\" lang=\"ini\"><span class=\"code-block-extension-codeLine\" data-line-num=\"1\">def reciprocal_rank_fusion(results: list<span class=\"hljs-section\">[list]<\/span>, <span class=\"hljs-attr\">k<\/span>=<span class=\"hljs-number\">60<\/span>):  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"2\"><span class=\"hljs-attr\">fused_scores<\/span> = {}  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"3\">for docs in results:  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"4\"><span class=\"hljs-comment\"># the docs are returned in sorted order of relevance  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"5\">for rank, doc in enumerate(docs):  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"6\"><span class=\"hljs-attr\">doc_str<\/span> = dumps(doc)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"7\">logging.info(f\"Serialized Document: {doc_str}\")  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"8\">if doc_str not in fused_scores:  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"9\">fused_scores<span class=\"hljs-section\">[doc_str]<\/span> = 0  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"10\"><span class=\"hljs-attr\">previous_score<\/span> = fused_scores[doc_str]  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"11\">fused_scores<span class=\"hljs-section\">[doc_str]<\/span> += 1 \/ (rank + k)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"12\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"13\"><span class=\"hljs-attr\">reranked_results<\/span> = [  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"14\">loads(doc)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"15\">for doc, score in sorted(fused_scores.items(), key=lambda x: x[<span class=\"hljs-number\">1<\/span>], reverse=<span class=\"hljs-literal\">True<\/span>)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"16\">]  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"17\">logging.info(f\"Reciprocal Rank Fusion applied. Reranked Results: {reranked_results<span class=\"hljs-section\">[:10]<\/span>}\") <span class=\"hljs-comment\"># Log top 10 results  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"18\">return reranked_results  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"19\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"20\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"21\">def generate_multiple_queries(question, llm):  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"22\"><span class=\"hljs-attr\">prompt<\/span> = PromptTemplate(  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"23\"><span class=\"hljs-attr\">input_variables<\/span>=[<span class=\"hljs-string\">\"question\"<\/span>],  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"24\"><span class=\"hljs-attr\">template<\/span>=<span class=\"hljs-string\">\"\"\"You are an AI language model assistant. Your task is to OUTPUT 4  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"25\">different versions of the given user question to retrieve relevant documents from a vector  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"26\">database. By generating multiple perspectives on the user question, your goal is to help  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"27\">the user overcome some of the limitations of the distance-based similarity search.  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"28\">Provide these alternative questions separated by newlines.  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"29\">Original question: {question}\"\"\",  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"30\">)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"31\"><span class=\"hljs-comment\"># Create a chain with the language model and the prompt  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"32\"><span class=\"hljs-attr\">llm_chain<\/span> = LLMChain(llm=llm, prompt=prompt, output_parser=StrOutputParser())  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"33\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"34\"><span class=\"hljs-comment\"># Run the chain  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"35\"><span class=\"hljs-attr\">response<\/span> = llm_chain.run({<span class=\"hljs-string\">\"question\"<\/span>: question})  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"36\"><span class=\"hljs-attr\">queries<\/span> = response.split(<span class=\"hljs-string\">\"n\"<\/span>)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"37\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"38\">logging.info(f\"Generated Queries: {queries}\")  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"39\">return queries  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"40\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"41\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"42\">def answer_question(question, openai_api_key, model_name):  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"43\"><span class=\"hljs-attr\">llm4<\/span> = ChatOpenAI(model_name=model_name, temperature=<span class=\"hljs-number\">0.1<\/span>, openai_api_key=openai_api_key)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"44\">global global_chromadb, global_short_documents  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"45\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"46\">if global_chromadb is None and global_short_documents is not None:  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"47\">init_chromadb(openai_api_key, <span class=\"hljs-attr\">documents<\/span>=global_short_documents)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"48\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"49\">logging.info(f\"Answering question: {question}\")  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"50\"><span class=\"hljs-comment\"># Generate multiple queries  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"51\"><span class=\"hljs-attr\">queries<\/span> = generate_multiple_queries(question, llm4)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"52\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"53\"><span class=\"hljs-comment\"># Retrieve documents for each query  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"54\"><span class=\"hljs-attr\">results<\/span> = []  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"55\">for query in queries:  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"56\"><span class=\"hljs-attr\">retrieved_docs_with_scores<\/span> = global_chromadb.similarity_search_with_score(query, k=<span class=\"hljs-number\">8<\/span>)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"57\"><span class=\"hljs-comment\"># Log the number of documents retrieved for each query and the first 3 docs  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"58\">logging.info(f\"Retrieved {len(retrieved_docs_with_scores)} documents for query '{query}': {retrieved_docs_with_scores<span class=\"hljs-section\">[:3]<\/span>}\")  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"59\">results.append(retrieved_docs_with_scores)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"60\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"61\"><span class=\"hljs-comment\"># Apply reciprocal rank fusion  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"62\"><span class=\"hljs-attr\">reranked_results<\/span> = reciprocal_rank_fusion(results)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"63\">logging.info(f\"Number of reranked documents: {len(reranked_results)}\")  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"64\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"65\"><span class=\"hljs-comment\">#extract the Document object only  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"66\"><span class=\"hljs-attr\">reranked_documents<\/span> = [doc for doc, _ in reranked_results]  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"67\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"68\"><span class=\"hljs-comment\"># Create a new Chroma instance with reranked results  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"69\"><span class=\"hljs-attr\">custom_chromadb<\/span> = Chroma.from_documents(documents=reranked_documents, embedding=OpenAIEmbeddings(openai_api_key=openai_api_key))  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"70\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"71\"><span class=\"hljs-attr\">chatTemplate<\/span> = <span class=\"hljs-string\">\"\"\"  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"72\">You are an AI assistant tasked with answering questions based on context from a podcast conversation.  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"73\">Use the provided context and relevant chat messages to answer. If unsure, say so. Keep your answer to four sentences or less, focusing on the most relevant information.  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"74\">Chat Messages (if relevant): {chat_history}  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"75\">Question: {question}  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"76\">Context from Podcast: {context}  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"77\">Answer:\"\"\"  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"78\"><span class=\"hljs-attr\">QA_CHAIN_PROMPT<\/span> = PromptTemplate(input_variables=[<span class=\"hljs-string\">\"context\"<\/span>, <span class=\"hljs-string\">\"question\"<\/span>, <span class=\"hljs-string\">\"chat_history\"<\/span>],template=chatTemplate)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"79\"><span class=\"hljs-attr\">qa_chain<\/span> = ConversationalRetrievalChain.from_llm(  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"80\"><span class=\"hljs-attr\">llm<\/span>=llm4,  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"81\"><span class=\"hljs-attr\">chain_type<\/span>=<span class=\"hljs-string\">\"stuff\"<\/span>,  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"82\"><span class=\"hljs-attr\">retriever<\/span>=custom_chromadb.as_retriever(search_type=<span class=\"hljs-string\">\"similarity\"<\/span>, search_kwargs={<span class=\"hljs-string\">\"k\"<\/span>:<span class=\"hljs-number\">10<\/span>}),  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"83\"><span class=\"hljs-attr\">memory<\/span>=conversation_memory,  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"84\"><span class=\"hljs-attr\">return_source_documents<\/span>=<span class=\"hljs-literal\">True<\/span>,  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"85\"><span class=\"hljs-attr\">combine_docs_chain_kwargs<\/span>={<span class=\"hljs-string\">'prompt'<\/span>: QA_CHAIN_PROMPT},  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"86\">)  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"87\"><span class=\"hljs-comment\"># Log the current chat history  <\/span><\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"88\"><span class=\"hljs-attr\">current_chat_history<\/span> = conversation_memory.load_memory_variables({})  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"89\">logging.info(f\"Current Chat History: {current_chat_history}\")  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"90\"><span class=\"hljs-attr\">response<\/span> = qa_chain({<span class=\"hljs-string\">\"question\"<\/span>: question})  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"91\">logging.info(f\"Final response: {response}\")  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"92\"><span class=\"hljs-attr\">output<\/span> = response[<span class=\"hljs-string\">'answer'<\/span>]  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"93\">  <\/span>\n<span class=\"code-block-extension-codeLine\" data-line-num=\"94\">return output<\/span>\n<\/code><\/pre>\n<p>\u603b\u7ed3\n\u81f3\u6b64\uff0c\u6211\u4eec\u5b8c\u6210\u4e86\uff01\u4ee5\u4e0a\u662f\u4e00\u4efd\u8be6\u7ec6\u6307\u5357\uff0c\u6559\u4f60\u5982\u4f55\u6253\u9020\u4e00\u4e2a\u4e92\u52a8\u6027\u5f3a\u3001\u667a\u80fd\u5316\u7684\u64ad\u5ba2\u5e94\u7528\u3002\u6b22\u8fce\u5728\u8fd9\u91cc\u8bd5\u7528\uff0c\u5e76\u7559\u4e0b\u60a8\u5b9d\u8d35\u7684\u610f\u89c1\u3002<\/p>\n<p>\u611f\u8c22\u60a8\u7684\u966a\u4f34\uff0c\u795d\u60a8\u7f16\u7a0b\u6109\u5feb\uff01:)<\/p>","protected":false},"excerpt":{"rendered":"<p>\u628a\u542c\u64ad\u5ba2\u7684\u6570\u5c0f\u65f6\u8f6c\u6362\u6210\u5373\u65f6\u7684\u6df1\u523b\u89c1\u89e3 \u4f5c\u4e3a\u4e00\u4e2a\u5bf9\u5065\u5eb7\u6216\u5546\u4e1a\u4e3b\u9898\u64ad\u5ba2\u60c5\u6709\u72ec\u949f\u7684\u7231\u597d\u8005\uff0c\u6211\u5e38\u5e38\u611f\u5230\u6709\u592a\u591a\u7684\u8282\u76ee\u8981\u542c\uff0c\u800c\u65f6\u95f4\u5374\u8fdc\u8fdc\u4e0d\u591f\u3002\u50cf Huberman Lab \u8fd9\u6837\u7684\u64ad\u5ba2\uff0c\u4e00\u671f\u8282\u76ee\u901a\u5e38\u80fd\u957f\u8fbe\u4e09\u56db\u4e2a\u5c0f\u65f6<\/p>\n","protected":false},"author":1,"featured_media":9051,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"_acf_changed":false,"rank_math_title":"","rank_math_description":"","rank_math_focus_keyword":"","views":"","footnotes":""},"categories":[3],"tags":[127,128,129,136,126],"collection":[],"class_list":["post-1483","post","type-post","status-publish","format-standard","has-post-thumbnail","hentry","category-fenlei2","tag-ai","tag-128","tag-129","tag-136","tag-gpt"],"acf":[],"_links":{"self":[{"href":"https:\/\/www.nicekj.com\/nicekj2024\/wp\/v2\/posts\/1483","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.nicekj.com\/nicekj2024\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/www.nicekj.com\/nicekj2024\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/www.nicekj.com\/nicekj2024\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/www.nicekj.com\/nicekj2024\/wp\/v2\/comments?post=1483"}],"version-history":[{"count":0,"href":"https:\/\/www.nicekj.com\/nicekj2024\/wp\/v2\/posts\/1483\/revisions"}],"wp:featuredmedia":[{"embeddable":true,"href":"https:\/\/www.nicekj.com\/nicekj2024\/wp\/v2\/media\/9051"}],"wp:attachment":[{"href":"https:\/\/www.nicekj.com\/nicekj2024\/wp\/v2\/media?parent=1483"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/www.nicekj.com\/nicekj2024\/wp\/v2\/categories?post=1483"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/www.nicekj.com\/nicekj2024\/wp\/v2\/tags?post=1483"},{"taxonomy":"collection","embeddable":true,"href":"https:\/\/www.nicekj.com\/nicekj2024\/wp\/v2\/collection?post=1483"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}