|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import os |
|
import logging |
|
from api.utils import get_base_config, decrypt_database_config |
|
from api.utils.file_utils import get_project_base_directory |
|
from api.utils.log_utils import LoggerFactory, getLogger |
|
|
|
|
|
|
|
RAG_CONF_PATH = os.path.join(get_project_base_directory(), "conf") |
|
SUBPROCESS_STD_LOG_NAME = "std.log" |
|
|
|
ES = get_base_config("es", {}) |
|
AZURE = get_base_config("azure", {}) |
|
S3 = get_base_config("s3", {}) |
|
MINIO = decrypt_database_config(name="minio") |
|
try: |
|
REDIS = decrypt_database_config(name="redis") |
|
except Exception as e: |
|
REDIS = {} |
|
pass |
|
DOC_MAXIMUM_SIZE = int(os.environ.get("MAX_CONTENT_LENGTH", 128 * 1024 * 1024)) |
|
|
|
|
|
LoggerFactory.set_directory( |
|
os.path.join( |
|
get_project_base_directory(), |
|
"logs", |
|
"rag")) |
|
|
|
LoggerFactory.LEVEL = 30 |
|
|
|
es_logger = getLogger("es") |
|
minio_logger = getLogger("minio") |
|
s3_logger = getLogger("s3") |
|
azure_logger = getLogger("azure") |
|
cron_logger = getLogger("cron_logger") |
|
chunk_logger = getLogger("chunk_logger") |
|
database_logger = getLogger("database") |
|
|
|
formatter = logging.Formatter("%(asctime)-15s %(levelname)-8s (%(process)d) %(message)s") |
|
for logger in [es_logger, minio_logger, s3_logger, azure_logger, cron_logger, chunk_logger, database_logger]: |
|
logger.setLevel(logging.INFO) |
|
for handler in logger.handlers: |
|
handler.setFormatter(fmt=formatter) |
|
|
|
SVR_QUEUE_NAME = "rag_flow_svr_queue" |
|
SVR_QUEUE_RETENTION = 60*60 |
|
SVR_QUEUE_MAX_LEN = 1024 |
|
SVR_CONSUMER_NAME = "rag_flow_svr_consumer" |
|
SVR_CONSUMER_GROUP_NAME = "rag_flow_svr_consumer_group" |
|
|