Spaces:
Sleeping
Sleeping
Commit
·
c38054e
1
Parent(s):
22956f8
requirement.txt
Browse files- app.py +4 -4
- requirements.txt +4 -3
app.py
CHANGED
@@ -2,19 +2,19 @@ import gradio as gr
|
|
2 |
import os
|
3 |
import pinecone
|
4 |
import time
|
5 |
-
from torch import cuda
|
6 |
from langchain.embeddings.huggingface import HuggingFaceEmbeddings
|
7 |
# import PyPDF2
|
8 |
# import re
|
9 |
from langchain.vectorstores import Pinecone
|
10 |
|
11 |
embed_model_id = 'sentence-transformers/all-MiniLM-L6-v2'
|
12 |
-
device = f'cuda:{cuda.current_device()}' if cuda.is_available() else 'cpu'
|
13 |
|
14 |
embed_model = HuggingFaceEmbeddings(
|
15 |
model_name=embed_model_id,
|
16 |
-
model_kwargs={'device': device},
|
17 |
-
encode_kwargs={'device': device, 'batch_size': 32}
|
18 |
)
|
19 |
|
20 |
# get API key from app.pinecone.io and environment from console
|
|
|
2 |
import os
|
3 |
import pinecone
|
4 |
import time
|
5 |
+
# from torch import cuda
|
6 |
from langchain.embeddings.huggingface import HuggingFaceEmbeddings
|
7 |
# import PyPDF2
|
8 |
# import re
|
9 |
from langchain.vectorstores import Pinecone
|
10 |
|
11 |
embed_model_id = 'sentence-transformers/all-MiniLM-L6-v2'
|
12 |
+
# device = f'cuda:{cuda.current_device()}' if cuda.is_available() else 'cpu'
|
13 |
|
14 |
embed_model = HuggingFaceEmbeddings(
|
15 |
model_name=embed_model_id,
|
16 |
+
# model_kwargs={'device': device},
|
17 |
+
# encode_kwargs={'device': device, 'batch_size': 32}
|
18 |
)
|
19 |
|
20 |
# get API key from app.pinecone.io and environment from console
|
requirements.txt
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
gradio
|
2 |
-
pinecone
|
3 |
-
|
4 |
-
|
|
|
|
1 |
gradio
|
2 |
+
pinecone-client==2.2.2
|
3 |
+
sentence-transformers==2.2.2
|
4 |
+
# torch
|
5 |
+
langchain==0.0.240
|