莘权 马
commited on
Commit
·
0a341c0
1
Parent(s):
65f7c11
feat: + error handler
Browse files
app.py
CHANGED
@@ -5,17 +5,15 @@ from __future__ import annotations
|
|
5 |
import asyncio
|
6 |
import contextlib
|
7 |
import pathlib
|
|
|
8 |
import shutil
|
9 |
import traceback
|
10 |
import uuid
|
11 |
from collections import deque
|
12 |
from functools import partial
|
13 |
-
from json import JSONDecodeError
|
14 |
from typing import Dict
|
15 |
|
16 |
import fire
|
17 |
-
import httpx
|
18 |
-
import openai
|
19 |
import tenacity
|
20 |
import uvicorn
|
21 |
from fastapi import FastAPI, Request
|
@@ -25,6 +23,7 @@ from loguru import logger
|
|
25 |
from metagpt.config import CONFIG
|
26 |
from metagpt.logs import set_llm_stream_logfunc
|
27 |
from metagpt.schema import Message
|
|
|
28 |
from openai import OpenAI
|
29 |
|
30 |
from data_model import (
|
@@ -154,32 +153,42 @@ class Service:
|
|
154 |
while isinstance(original_exception, tenacity.RetryError):
|
155 |
original_exception = original_exception.last_attempt.exception()
|
156 |
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
answer = f"{original_exception}. {original_exception.request}"
|
169 |
-
title = "httpx ReadTimeout"
|
170 |
-
think_act_prompt = cls.create_error_think_act_prompt(tc_id, title, title, answer)
|
171 |
-
return think_act_prompt.prompt + "\n\n"
|
172 |
-
elif isinstance(original_exception, JSONDecodeError):
|
173 |
-
answer = str(original_exception)
|
174 |
-
title = "MetaGPT Error"
|
175 |
-
description = "LLM return result parsing error"
|
176 |
-
think_act_prompt = cls.create_error_think_act_prompt(tc_id, title, description, answer)
|
177 |
-
return think_act_prompt.prompt + "\n\n"
|
178 |
-
else:
|
179 |
-
return cls.handle_unexpected_error(tc_id, error)
|
180 |
except Exception:
|
181 |
return cls.handle_unexpected_error(tc_id, error)
|
182 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
183 |
@classmethod
|
184 |
def handle_unexpected_error(cls, tc_id, error):
|
185 |
description = str(error)
|
|
|
5 |
import asyncio
|
6 |
import contextlib
|
7 |
import pathlib
|
8 |
+
import re
|
9 |
import shutil
|
10 |
import traceback
|
11 |
import uuid
|
12 |
from collections import deque
|
13 |
from functools import partial
|
|
|
14 |
from typing import Dict
|
15 |
|
16 |
import fire
|
|
|
|
|
17 |
import tenacity
|
18 |
import uvicorn
|
19 |
from fastapi import FastAPI, Request
|
|
|
23 |
from metagpt.config import CONFIG
|
24 |
from metagpt.logs import set_llm_stream_logfunc
|
25 |
from metagpt.schema import Message
|
26 |
+
from metagpt.utils.common import any_to_name, any_to_str
|
27 |
from openai import OpenAI
|
28 |
|
29 |
from data_model import (
|
|
|
153 |
while isinstance(original_exception, tenacity.RetryError):
|
154 |
original_exception = original_exception.last_attempt.exception()
|
155 |
|
156 |
+
name = any_to_str(original_exception)
|
157 |
+
if re.match(r"^openai\.", name):
|
158 |
+
return cls._handle_openai_error(tc_id, original_exception)
|
159 |
+
|
160 |
+
if re.match(r"^httpx\.", name):
|
161 |
+
return cls._handle_httpx_error(tc_id, original_exception)
|
162 |
+
|
163 |
+
if re.match(r"^json\.", name):
|
164 |
+
return cls._handle_json_error(tc_id, original_exception)
|
165 |
+
|
166 |
+
return cls.handle_unexpected_error(tc_id, error)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
167 |
except Exception:
|
168 |
return cls.handle_unexpected_error(tc_id, error)
|
169 |
|
170 |
+
@classmethod
|
171 |
+
def _handle_openai_error(cls, tc_id, original_exception):
|
172 |
+
answer = original_exception.message
|
173 |
+
title = f"OpenAI {any_to_name(original_exception)}"
|
174 |
+
think_act_prompt = cls.create_error_think_act_prompt(tc_id, title, title, answer)
|
175 |
+
return think_act_prompt.prompt + "\n\n"
|
176 |
+
|
177 |
+
@classmethod
|
178 |
+
def _handle_httpx_error(cls, tc_id, original_exception):
|
179 |
+
answer = f"{original_exception}. {original_exception.request}"
|
180 |
+
title = f"httpx {any_to_name(original_exception)}"
|
181 |
+
think_act_prompt = cls.create_error_think_act_prompt(tc_id, title, title, answer)
|
182 |
+
return think_act_prompt.prompt + "\n\n"
|
183 |
+
|
184 |
+
@classmethod
|
185 |
+
def _handle_json_error(cls, tc_id, original_exception):
|
186 |
+
answer = str(original_exception)
|
187 |
+
title = "MetaGPT Action Node Error"
|
188 |
+
description = f"LLM response parse error. {any_to_str(original_exception)}: {original_exception}"
|
189 |
+
think_act_prompt = cls.create_error_think_act_prompt(tc_id, title, description, answer)
|
190 |
+
return think_act_prompt.prompt + "\n\n"
|
191 |
+
|
192 |
@classmethod
|
193 |
def handle_unexpected_error(cls, tc_id, error):
|
194 |
description = str(error)
|