黄腾 aopstudio commited on
Commit
c260733
·
1 Parent(s): 591d80e

fix add Bedrock llm error (#1952)

Browse files

### What problem does this PR solve?

#1942 fix add Bedrock llm error

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)

---------

Co-authored-by: Zhedong Cen <[email protected]>

Files changed (1) hide show
  1. rag/llm/chat_model.py +8 -1
rag/llm/chat_model.py CHANGED
@@ -678,6 +678,10 @@ class BedrockChat(Base):
678
  if "top_p" in gen_conf:
679
  gen_conf["topP"] = gen_conf["top_p"]
680
  _ = gen_conf.pop("top_p")
 
 
 
 
681
 
682
  try:
683
  # Send the message to the model, using a basic inference configuration.
@@ -707,7 +711,10 @@ class BedrockChat(Base):
707
  if "top_p" in gen_conf:
708
  gen_conf["topP"] = gen_conf["top_p"]
709
  _ = gen_conf.pop("top_p")
710
-
 
 
 
711
  if self.model_name.split('.')[0] == 'ai21':
712
  try:
713
  response = self.client.converse(
 
678
  if "top_p" in gen_conf:
679
  gen_conf["topP"] = gen_conf["top_p"]
680
  _ = gen_conf.pop("top_p")
681
+ for item in history:
682
+ if not isinstance(item["content"],list) and not isinstance(item["content"],tuple):
683
+ item["content"] = [{"text":item["content"]}]
684
+
685
 
686
  try:
687
  # Send the message to the model, using a basic inference configuration.
 
711
  if "top_p" in gen_conf:
712
  gen_conf["topP"] = gen_conf["top_p"]
713
  _ = gen_conf.pop("top_p")
714
+ for item in history:
715
+ if not isinstance(item["content"],list) and not isinstance(item["content"],tuple):
716
+ item["content"] = [{"text":item["content"]}]
717
+
718
  if self.model_name.split('.')[0] == 'ai21':
719
  try:
720
  response = self.client.converse(