new code
Browse files- src/pdfchatbot.py +7 -1
src/pdfchatbot.py
CHANGED
@@ -78,6 +78,12 @@ class PDFChatBot:
|
|
78 |
def create_organic_response(self, history, query):
|
79 |
self.get_organic_context(query)
|
80 |
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Meta-Llama-3-8B-Instruct")
|
|
|
|
|
|
|
|
|
|
|
|
|
81 |
messages = [
|
82 |
{"role": "system", "content": "From the the contained given below, answer the question of user \n " + self.current_context},
|
83 |
{"role": "user", "content": query},
|
@@ -89,7 +95,7 @@ class PDFChatBot:
|
|
89 |
add_generation_prompt=True
|
90 |
)
|
91 |
temp = 0.1
|
92 |
-
outputs =
|
93 |
prompt,
|
94 |
max_new_tokens=1024,
|
95 |
do_sample=True,
|
|
|
78 |
def create_organic_response(self, history, query):
|
79 |
self.get_organic_context(query)
|
80 |
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Meta-Llama-3-8B-Instruct")
|
81 |
+
pipe = pipeline(
|
82 |
+
"text-generation",
|
83 |
+
model="meta-llama/Meta-Llama-3-8B-Instruct",
|
84 |
+
model_kwargs={"torch_dtype": torch.bfloat16},
|
85 |
+
device="cuda",
|
86 |
+
)
|
87 |
messages = [
|
88 |
{"role": "system", "content": "From the the contained given below, answer the question of user \n " + self.current_context},
|
89 |
{"role": "user", "content": query},
|
|
|
95 |
add_generation_prompt=True
|
96 |
)
|
97 |
temp = 0.1
|
98 |
+
outputs = pipe(
|
99 |
prompt,
|
100 |
max_new_tokens=1024,
|
101 |
do_sample=True,
|