|
|
|
import time |
|
import copy |
|
import requests |
|
import openai |
|
from abc import ABC, abstractmethod |
|
|
|
class AzureVisionClient(): |
|
def __init__(self, ak, max_retries=3): |
|
|
|
self.client = openai.AzureOpenAI( |
|
azure_endpoint="https://search-va.byteintl.net/gpt/openapi/online/multimodal/crawl/", |
|
api_version="2023-09-01-preview", |
|
api_key=ak |
|
) |
|
self.max_retries = max_retries |
|
self.model_name = 'gptv' |
|
self.temperature = 0.000000001 |
|
self.max_tokens = 3000 |
|
|
|
def request(self, query, messages=None, img_data=None, vid_data=None): |
|
if messages is None: |
|
messages = [] |
|
|
|
content = { |
|
"type": "text", |
|
"text": query |
|
} |
|
if img_data: |
|
content = [ |
|
content, |
|
{ |
|
"type": "image_url", |
|
"image_url": { |
|
"url": f"data:image/jpeg;base64,{img_data}" |
|
} |
|
} |
|
] |
|
elif vid_data: |
|
content = [ |
|
content, |
|
*map(lambda x: { |
|
"type": "image_url", |
|
"image_url": { |
|
"url": f"data:image/jpeg;base64, {x}" |
|
} |
|
}, vid_data) |
|
] |
|
else: |
|
content = [content] |
|
|
|
messages.append({ |
|
'role': 'user', |
|
'content': content |
|
}) |
|
|
|
completion = None |
|
num_cur_retry = 0 |
|
while num_cur_retry < self.max_retries: |
|
try: |
|
completion = self.client.chat.completions.create( |
|
model=self.model_name, |
|
max_tokens=self.max_tokens, |
|
temperature=self.temperature, |
|
messages=messages |
|
) |
|
break |
|
except Exception as e: |
|
num_cur_retry += 1 |
|
if 'Error code: 429' not in e.message: |
|
completion = None |
|
print(e) |
|
break |
|
if num_cur_retry % 20 == 1: |
|
print('retry times:', num_cur_retry, e) |
|
time.sleep(5) |
|
|
|
resp = completion.choices[0].message.content |
|
messages.append({ |
|
"role": "assistant", |
|
"content": [{ |
|
"type": "text", |
|
"text": completion.choices[0].message.content |
|
}] |
|
}) |
|
|
|
return resp, messages |
|
|
|
|
|
class BaseAPIWrapper(ABC): |
|
@abstractmethod |
|
def get_completion(self, user_prompt, system_prompt=None): |
|
pass |
|
|
|
class GPTAPIWrapper(BaseAPIWrapper): |
|
def __init__(self, ak, max_retries=1000): |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
self.client = openai.AzureOpenAI( |
|
azure_endpoint="https://search-us.byteintl.net/gpt/openapi/online/v2/crawl", |
|
api_version="2023-06-01-preview", |
|
api_key=ak |
|
) |
|
self.max_retries = max_retries |
|
self.model_name = 'gpt-4-32k-0613' |
|
self.temperature = 0.000000001 |
|
self.max_tokens = 3000 |
|
|
|
def request(self, system_content, usr_question, previous_msg=None, last_answer=None): |
|
if previous_msg is None: |
|
msgs = [ |
|
{"role": "system", "content": f"{system_content}"}, |
|
{"role": "user", "content": f"{usr_question}"} |
|
] |
|
else: |
|
msgs = copy.deepcopy(previous_msg) |
|
msgs += [ |
|
{"role": "assistant", "content": last_answer}, |
|
{"role": "user", "content": usr_question} |
|
] |
|
response = self.client.chat.completions.create( |
|
messages=msgs, |
|
temperature=self.temperature, |
|
max_tokens=self.max_tokens, |
|
model=self.model_name, |
|
) |
|
resp = response.choices[0].message.content |
|
|
|
|
|
return resp, msgs |
|
|
|
def get_completion(self, user_prompt=None, system_prompt=None, previous_msgs=None, last_answer=None): |
|
gpt_cv_nlp = '[]' |
|
key_i = 0 |
|
total_tokens = 0 |
|
max_try = self.max_retries |
|
|
|
while max_try > 0: |
|
try: |
|
gpt_cv_nlp, msgs = self.request(system_prompt, user_prompt, previous_msgs, last_answer) |
|
|
|
max_try = 0 |
|
break |
|
except Exception as e: |
|
print("fail ", max_try, e) |
|
|
|
|
|
|
|
time.sleep(1) |
|
max_try -= 1 |
|
|
|
return gpt_cv_nlp, msgs |