Print configs when startup RAGFlow server (#3414)
Browse files### What problem does this PR solve?
Print configs at the RAGFlow startup phase.
### Type of change
- [x] New Feature (non-breaking change which adds functionality)
```
2024-11-14 21:27:53,090 INFO 962231 Current configs, from /home/weilongma/Documents/development/ragflow/conf/service_conf.yaml:
2024-11-14 21:27:53,090 INFO 962231 ragflow: {'host': '0.0.0.0', 'http_port': 9380}
2024-11-14 21:27:53,090 INFO 962231 mysql: {'name': 'rag_flow', 'user': 'root', 'password': 'infini_rag_flow', 'host': 'mysql', 'port': 5455, 'max_connections': 100, 'stale_timeout': 30}
2024-11-14 21:27:53,090 INFO 962231 minio: {'user': 'rag_flow', 'password': 'infini_rag_flow', 'host': 'minio:9000'}
2024-11-14 21:27:53,090 INFO 962231 es: {'hosts': 'http://es01:1200', 'username': 'elastic', 'password': 'infini_rag_flow'}
2024-11-14 21:27:53,090 INFO 962231 redis: {'db': 1, 'password': 'infini_rag_flow', 'host': 'redis:6379'}
```
Signed-off-by: jinhai <[email protected]>
- api/constants.py +3 -1
- api/ragflow_server.py +6 -4
- api/utils/__init__.py +29 -21
- api/validation.py +2 -2
@@ -15,4 +15,6 @@
|
|
15 |
|
16 |
NAME_LENGTH_LIMIT = 2 ** 10
|
17 |
|
18 |
-
IMG_BASE64_PREFIX = 'data:image/png;base64,'
|
|
|
|
|
|
15 |
|
16 |
NAME_LENGTH_LIMIT = 2 ** 10
|
17 |
|
18 |
+
IMG_BASE64_PREFIX = 'data:image/png;base64,'
|
19 |
+
|
20 |
+
SERVICE_CONF = "service_conf.yaml"
|
@@ -17,6 +17,7 @@
|
|
17 |
import logging
|
18 |
import inspect
|
19 |
from api.utils.log_utils import initRootLogger
|
|
|
20 |
initRootLogger(inspect.getfile(inspect.currentframe()))
|
21 |
for module in ["pdfminer"]:
|
22 |
module_logger = logging.getLogger(module)
|
@@ -45,6 +46,7 @@ from api import utils
|
|
45 |
from api.db.db_models import init_database_tables as init_web_db
|
46 |
from api.db.init_data import init_web_data
|
47 |
from api.versions import get_ragflow_version
|
|
|
48 |
|
49 |
|
50 |
def update_progress():
|
@@ -71,6 +73,7 @@ if __name__ == '__main__':
|
|
71 |
logging.info(
|
72 |
f'project base: {utils.file_utils.get_project_base_directory()}'
|
73 |
)
|
|
|
74 |
|
75 |
# init db
|
76 |
init_web_db()
|
@@ -80,7 +83,7 @@ if __name__ == '__main__':
|
|
80 |
|
81 |
parser = argparse.ArgumentParser()
|
82 |
parser.add_argument(
|
83 |
-
"--version", default=False, help="
|
84 |
)
|
85 |
parser.add_argument(
|
86 |
"--debug", default=False, help="debug mode", action="store_true"
|
@@ -97,9 +100,8 @@ if __name__ == '__main__':
|
|
97 |
RuntimeConfig.init_env()
|
98 |
RuntimeConfig.init_config(JOB_SERVER_HOST=HOST, HTTP_PORT=HTTP_PORT)
|
99 |
|
100 |
-
|
101 |
-
|
102 |
-
thr.submit(update_progress)
|
103 |
|
104 |
# start http server
|
105 |
try:
|
|
|
17 |
import logging
|
18 |
import inspect
|
19 |
from api.utils.log_utils import initRootLogger
|
20 |
+
|
21 |
initRootLogger(inspect.getfile(inspect.currentframe()))
|
22 |
for module in ["pdfminer"]:
|
23 |
module_logger = logging.getLogger(module)
|
|
|
46 |
from api.db.db_models import init_database_tables as init_web_db
|
47 |
from api.db.init_data import init_web_data
|
48 |
from api.versions import get_ragflow_version
|
49 |
+
from api.utils import show_configs
|
50 |
|
51 |
|
52 |
def update_progress():
|
|
|
73 |
logging.info(
|
74 |
f'project base: {utils.file_utils.get_project_base_directory()}'
|
75 |
)
|
76 |
+
show_configs()
|
77 |
|
78 |
# init db
|
79 |
init_web_db()
|
|
|
83 |
|
84 |
parser = argparse.ArgumentParser()
|
85 |
parser.add_argument(
|
86 |
+
"--version", default=False, help="RAGFlow version", action="store_true"
|
87 |
)
|
88 |
parser.add_argument(
|
89 |
"--debug", default=False, help="debug mode", action="store_true"
|
|
|
100 |
RuntimeConfig.init_env()
|
101 |
RuntimeConfig.init_config(JOB_SERVER_HOST=HOST, HTTP_PORT=HTTP_PORT)
|
102 |
|
103 |
+
thread = ThreadPoolExecutor(max_workers=1)
|
104 |
+
thread.submit(update_progress)
|
|
|
105 |
|
106 |
# start http server
|
107 |
try:
|
@@ -23,54 +23,61 @@ import socket
|
|
23 |
import time
|
24 |
import uuid
|
25 |
import requests
|
|
|
26 |
from enum import Enum, IntEnum
|
27 |
import importlib
|
28 |
from Cryptodome.PublicKey import RSA
|
29 |
from Cryptodome.Cipher import PKCS1_v1_5 as Cipher_pkcs1_v1_5
|
30 |
-
|
31 |
from filelock import FileLock
|
|
|
32 |
|
33 |
from . import file_utils
|
34 |
|
35 |
-
SERVICE_CONF = "service_conf.yaml"
|
36 |
-
|
37 |
|
38 |
def conf_realpath(conf_name):
|
39 |
conf_path = f"conf/{conf_name}"
|
40 |
return os.path.join(file_utils.get_project_base_directory(), conf_path)
|
41 |
|
42 |
|
43 |
-
def
|
44 |
local_config = {}
|
45 |
local_path = conf_realpath(f'local.{conf_name}')
|
46 |
-
if default is None:
|
47 |
-
default = os.environ.get(key.upper())
|
48 |
|
|
|
49 |
if os.path.exists(local_path):
|
50 |
local_config = file_utils.load_yaml_conf(local_path)
|
51 |
if not isinstance(local_config, dict):
|
52 |
raise ValueError(f'Invalid config file: "{local_path}".')
|
53 |
|
54 |
-
|
55 |
-
|
56 |
|
57 |
-
|
58 |
-
|
59 |
|
60 |
-
|
61 |
-
|
62 |
|
63 |
-
config.update(local_config)
|
64 |
-
return config.get(key, default) if key is not None else config
|
65 |
|
|
|
66 |
|
67 |
-
|
68 |
-
|
|
|
|
|
|
|
69 |
|
70 |
|
71 |
-
|
72 |
-
|
73 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
74 |
|
75 |
|
76 |
class BaseType:
|
@@ -98,6 +105,7 @@ class BaseType:
|
|
98 |
data = obj
|
99 |
return {"type": obj.__class__.__name__,
|
100 |
"data": data, "module": module}
|
|
|
101 |
return _dict(self)
|
102 |
|
103 |
|
@@ -342,8 +350,8 @@ def download_img(url):
|
|
342 |
return ""
|
343 |
response = requests.get(url)
|
344 |
return "data:" + \
|
345 |
-
|
346 |
-
|
347 |
|
348 |
|
349 |
def delta_seconds(date_string: str):
|
|
|
23 |
import time
|
24 |
import uuid
|
25 |
import requests
|
26 |
+
import logging
|
27 |
from enum import Enum, IntEnum
|
28 |
import importlib
|
29 |
from Cryptodome.PublicKey import RSA
|
30 |
from Cryptodome.Cipher import PKCS1_v1_5 as Cipher_pkcs1_v1_5
|
|
|
31 |
from filelock import FileLock
|
32 |
+
from api.constants import SERVICE_CONF
|
33 |
|
34 |
from . import file_utils
|
35 |
|
|
|
|
|
36 |
|
37 |
def conf_realpath(conf_name):
|
38 |
conf_path = f"conf/{conf_name}"
|
39 |
return os.path.join(file_utils.get_project_base_directory(), conf_path)
|
40 |
|
41 |
|
42 |
+
def read_config(conf_name=SERVICE_CONF):
|
43 |
local_config = {}
|
44 |
local_path = conf_realpath(f'local.{conf_name}')
|
|
|
|
|
45 |
|
46 |
+
# load local config file
|
47 |
if os.path.exists(local_path):
|
48 |
local_config = file_utils.load_yaml_conf(local_path)
|
49 |
if not isinstance(local_config, dict):
|
50 |
raise ValueError(f'Invalid config file: "{local_path}".')
|
51 |
|
52 |
+
global_config_path = conf_realpath(conf_name)
|
53 |
+
global_config = file_utils.load_yaml_conf(global_config_path)
|
54 |
|
55 |
+
if not isinstance(global_config, dict):
|
56 |
+
raise ValueError(f'Invalid config file: "{global_config_path}".')
|
57 |
|
58 |
+
global_config.update(local_config)
|
59 |
+
return global_config
|
60 |
|
|
|
|
|
61 |
|
62 |
+
CONFIGS = read_config()
|
63 |
|
64 |
+
|
65 |
+
def show_configs():
|
66 |
+
logging.info(f"Current configs, from {conf_realpath(SERVICE_CONF)}:")
|
67 |
+
for k, v in CONFIGS.items():
|
68 |
+
logging.info(f"{k}: {v}")
|
69 |
|
70 |
|
71 |
+
def get_base_config(key, default=None):
|
72 |
+
if key is None:
|
73 |
+
return None
|
74 |
+
if default is None:
|
75 |
+
default = os.environ.get(key.upper())
|
76 |
+
return CONFIGS.get(key, default)
|
77 |
+
|
78 |
+
|
79 |
+
use_deserialize_safe_module = get_base_config(
|
80 |
+
'use_deserialize_safe_module', False)
|
81 |
|
82 |
|
83 |
class BaseType:
|
|
|
105 |
data = obj
|
106 |
return {"type": obj.__class__.__name__,
|
107 |
"data": data, "module": module}
|
108 |
+
|
109 |
return _dict(self)
|
110 |
|
111 |
|
|
|
350 |
return ""
|
351 |
response = requests.get(url)
|
352 |
return "data:" + \
|
353 |
+
response.headers.get('Content-Type', 'image/jpg') + ";" + \
|
354 |
+
"base64," + base64.b64encode(response.content).decode("utf-8")
|
355 |
|
356 |
|
357 |
def delta_seconds(date_string: str):
|
@@ -43,7 +43,7 @@ def download_nltk_data():
|
|
43 |
try:
|
44 |
from multiprocessing import Pool
|
45 |
pool = Pool(processes=1)
|
46 |
-
|
47 |
-
binary =
|
48 |
except Exception as e:
|
49 |
print('\x1b[6;37;41m WARNING \x1b[0m' + "Downloading NLTK data failure.", flush=True)
|
|
|
43 |
try:
|
44 |
from multiprocessing import Pool
|
45 |
pool = Pool(processes=1)
|
46 |
+
thread = pool.apply_async(download_nltk_data)
|
47 |
+
binary = thread.get(timeout=60)
|
48 |
except Exception as e:
|
49 |
print('\x1b[6;37;41m WARNING \x1b[0m' + "Downloading NLTK data failure.", flush=True)
|