Tejasw1 commited on
Commit
2f9b8b7
Β·
1 Parent(s): 7f35abf

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. langchain_qwen.ipynb +11 -37
  2. langchain_retreival.ipynb +1 -1
  3. report.pdf +0 -0
langchain_qwen.ipynb CHANGED
@@ -65,7 +65,7 @@
65
  "from langchain.chat_models import ChatOpenAI\n",
66
  "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n",
67
  "\n",
68
- "ChatOpenAI(openai_api_base='http://20.124.240.6:8080/v1',\n",
69
  " openai_api_key='none', callbacks=[StreamingStdOutCallbackHandler()], streaming=True,)\n"
70
  ]
71
  },
@@ -137,7 +137,6 @@
137
  " retriever,\n",
138
  " \"search_legal_sections\",\n",
139
  " \"Searches and returns documents regarding Indian law. Accept query as a string. For example: 'Section 298 of Indian Penal Code'.\",\n",
140
- " \n",
141
  ")\n",
142
  "tools = [tool]\n"
143
  ]
@@ -246,31 +245,24 @@
246
  },
247
  {
248
  "cell_type": "code",
249
- "execution_count": 9,
250
  "metadata": {},
251
- "outputs": [
252
- {
253
- "ename": "NameError",
254
- "evalue": "name 'retriever' is not defined",
255
- "output_type": "error",
256
- "traceback": [
257
- "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
258
- "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
259
- "\u001b[1;32m/Users/tejasw/Downloads/scraper-law/votum-gradio/langchain_qwen.ipynb Cell 15\u001b[0m line \u001b[0;36m1\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/tejasw/Downloads/scraper-law/votum-gradio/langchain_qwen.ipynb#X20sZmlsZQ%3D%3D?line=2'>3</a>\u001b[0m \u001b[39mimport\u001b[39;00m \u001b[39mlangchain\u001b[39;00m\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/tejasw/Downloads/scraper-law/votum-gradio/langchain_qwen.ipynb#X20sZmlsZQ%3D%3D?line=4'>5</a>\u001b[0m langchain\u001b[39m.\u001b[39mverbose \u001b[39m=\u001b[39m \u001b[39mTrue\u001b[39;00m\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/tejasw/Downloads/scraper-law/votum-gradio/langchain_qwen.ipynb#X20sZmlsZQ%3D%3D?line=7'>8</a>\u001b[0m flare \u001b[39m=\u001b[39m FlareChain\u001b[39m.\u001b[39mfrom_llm(\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/tejasw/Downloads/scraper-law/votum-gradio/langchain_qwen.ipynb#X20sZmlsZQ%3D%3D?line=8'>9</a>\u001b[0m llm_n,\n\u001b[0;32m---> <a href='vscode-notebook-cell:/Users/tejasw/Downloads/scraper-law/votum-gradio/langchain_qwen.ipynb#X20sZmlsZQ%3D%3D?line=9'>10</a>\u001b[0m retriever\u001b[39m=\u001b[39mretriever,\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/tejasw/Downloads/scraper-law/votum-gradio/langchain_qwen.ipynb#X20sZmlsZQ%3D%3D?line=10'>11</a>\u001b[0m max_generation_len\u001b[39m=\u001b[39m\u001b[39m164\u001b[39m,\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/tejasw/Downloads/scraper-law/votum-gradio/langchain_qwen.ipynb#X20sZmlsZQ%3D%3D?line=11'>12</a>\u001b[0m min_prob\u001b[39m=\u001b[39m\u001b[39m0.3\u001b[39m,\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/tejasw/Downloads/scraper-law/votum-gradio/langchain_qwen.ipynb#X20sZmlsZQ%3D%3D?line=12'>13</a>\u001b[0m )\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/tejasw/Downloads/scraper-law/votum-gradio/langchain_qwen.ipynb#X20sZmlsZQ%3D%3D?line=14'>15</a>\u001b[0m query \u001b[39m=\u001b[39m \u001b[39m\"\u001b[39m\u001b[39mexplain in great detail the difference between the langchain framework and baby agi\u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/tejasw/Downloads/scraper-law/votum-gradio/langchain_qwen.ipynb#X20sZmlsZQ%3D%3D?line=15'>16</a>\u001b[0m flare\u001b[39m.\u001b[39mrun(query)\n",
260
- "\u001b[0;31mNameError\u001b[0m: name 'retriever' is not defined"
261
- ]
262
- }
263
- ],
264
  "source": [
265
  "# We set this so we can see what exactly is going on\n",
266
  "from langchain.chains import FlareChain\n",
267
  "import langchain\n",
 
 
 
 
 
268
  "\n",
269
  "langchain.verbose = True\n",
270
  "\n",
271
  "\n",
272
  "flare = FlareChain.from_llm(\n",
273
- " llm_n,\n",
274
  " retriever=retriever,\n",
275
  " max_generation_len=164,\n",
276
  " min_prob=0.3,\n",
@@ -354,27 +346,9 @@
354
  },
355
  {
356
  "cell_type": "code",
357
- "execution_count": 7,
358
  "metadata": {},
359
- "outputs": [
360
- {
361
- "name": "stdout",
362
- "output_type": "stream",
363
- "text": [
364
- "I am a large language model created by Alibaba Cloud. I am called QianWen."
365
- ]
366
- },
367
- {
368
- "data": {
369
- "text/plain": [
370
- "AIMessageChunk(content='I am a large language model created by Alibaba Cloud. I am called QianWen.')"
371
- ]
372
- },
373
- "execution_count": 7,
374
- "metadata": {},
375
- "output_type": "execute_result"
376
- }
377
- ],
378
  "source": [
379
  "# from langchain.chat_models import ChatOpenAI\n",
380
  "# from langchain.document_loaders import TextLoader\n",
 
65
  "from langchain.chat_models import ChatOpenAI\n",
66
  "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n",
67
  "\n",
68
+ "llm = ChatOpenAI(openai_api_base='http://20.124.240.6:8080/v1',\n",
69
  " openai_api_key='none', callbacks=[StreamingStdOutCallbackHandler()], streaming=True,)\n"
70
  ]
71
  },
 
137
  " retriever,\n",
138
  " \"search_legal_sections\",\n",
139
  " \"Searches and returns documents regarding Indian law. Accept query as a string. For example: 'Section 298 of Indian Penal Code'.\",\n",
 
140
  ")\n",
141
  "tools = [tool]\n"
142
  ]
 
245
  },
246
  {
247
  "cell_type": "code",
248
+ "execution_count": null,
249
  "metadata": {},
250
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
251
  "source": [
252
  "# We set this so we can see what exactly is going on\n",
253
  "from langchain.chains import FlareChain\n",
254
  "import langchain\n",
255
+ "import os\n",
256
+ "\n",
257
+ "os.environ['OPENAI_API_KEY'] = 'none'\n",
258
+ "os.environ['OPENAI_API_BASE'] = 'http://20.124.240.6:8080/v1'\n",
259
+ "# os.environ['OPEN']\n",
260
  "\n",
261
  "langchain.verbose = True\n",
262
  "\n",
263
  "\n",
264
  "flare = FlareChain.from_llm(\n",
265
+ " llm,\n",
266
  " retriever=retriever,\n",
267
  " max_generation_len=164,\n",
268
  " min_prob=0.3,\n",
 
346
  },
347
  {
348
  "cell_type": "code",
349
+ "execution_count": null,
350
  "metadata": {},
351
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
352
  "source": [
353
  "# from langchain.chat_models import ChatOpenAI\n",
354
  "# from langchain.document_loaders import TextLoader\n",
langchain_retreival.ipynb CHANGED
@@ -11,7 +11,7 @@
11
  },
12
  "outputs": [],
13
  "source": [
14
- "pip -q install -U langchain huggingface_hub tiktoken PyPDF2 pypdf sentence_transformers together FlagEmbedding faiss-gpu openai text-generation"
15
  ]
16
  },
17
  {
 
11
  },
12
  "outputs": [],
13
  "source": [
14
+ "pip -q install -U langchain huggingface_hub tiktoken PyPDF2 pypdf sentence_transformers together FlagEmbedding faiss-gpu openai text-generation pymupdf"
15
  ]
16
  },
17
  {
report.pdf ADDED
Binary file (51.9 kB). View file