add system_prompt param in process_query

This commit is contained in:
charry
2025-11-21 16:59:35 +08:00
parent cb61a88751
commit 4a7cfb1cee

View File

@@ -310,6 +310,7 @@ class MMLM_Agent:
temperature: float = 0.7,
top_p: float = 0.95,
stop: Optional[List[str]] = None,
system_prompt = None,
is_use_chat_history:bool = False,
is_use_rag:bool = False,
is_save_history:bool = False) :
@@ -370,6 +371,14 @@ class MMLM_Agent:
)
logger.debug(f"生成提示: {final_conversation_prompt[:200]}...") # 只显示前200字符
# # 5.是否用system_prompt
# is_use_system_prompt = False
# if is_use_system_prompt:
# system_prompt = ""
# else:
# system_prompt = None
## 6.调用VLM生成回答
# output = self.llm(
# prompt=conversation_prompt,
@@ -385,6 +394,7 @@ class MMLM_Agent:
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
system_prompt=system_prompt,
stop = stop)
output = self.model_mag.models_interface.multimodal_inference(request=multi_modal_request)
else:
@@ -392,6 +402,7 @@ class MMLM_Agent:
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
system_prompt=system_prompt,
stop = stop)
output=self.model_mag.models_interface.text_inference(request=text_request)