From fa78e5b5e03e40431e6c4404c6ecb00b8de81be3 Mon Sep 17 00:00:00 2001
From: shidong <shidong@jhsoft.cc>
Date: 星期一, 14 七月 2025 07:36:08 +0800
Subject: [PATCH] #2025/7/14 #优化获取最新数据代码 #优化预警提示语
---
qwen_thread.py | 5 +++--
1 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/qwen_thread.py b/qwen_thread.py
index 9c348b4..aeb16d9 100644
--- a/qwen_thread.py
+++ b/qwen_thread.py
@@ -41,6 +41,7 @@
torch_dtype=torch.float16
).eval()
+ model = model.to(f"cuda:{config.get('cuda')}")
self.model_pool.append(model)
# 鍏变韩鐨勫鐞嗗櫒 (绾跨▼瀹夊叏)
@@ -172,7 +173,7 @@
return_tensors="pt",
)
inputs = inputs.to(model.device)
- with torch.inference_mode():
+ with torch.inference_mode(),torch.cuda.amp.autocast():
outputs = model.generate(**inputs,max_new_tokens=200)
generated_ids = outputs[:, len(inputs.input_ids[0]):]
image_text = self.processor.batch_decode(
@@ -219,7 +220,7 @@
def image_rule_chat(self, image_des,rule_text, ragurl, rag_mode,max_tokens):
try:
content = (
- f"鍥剧墖鎻忚堪鍐呭涓猴細\n{image_des}\n瑙勫垯鍐呭锛歕n{rule_text}銆俓n璇烽獙璇佸浘鐗囨弿杩颁腑鏄惁鏈夌鍚堣鍒欑殑鍐呭锛屼笉杩涜鎺ㄧ悊鍜宼hink銆傝繑鍥炵粨鏋滄牸寮忎负[xxx绗﹀悎鐨勮鍒檌d]锛屽鏋滄病鏈夎繑鍥瀃]")
+ f"鍥剧墖鎻忚堪鍐呭涓猴細\n{image_des}\n瑙勫垯鍐呭锛歕n{rule_text}銆俓n璇烽獙璇佸浘鐗囨弿杩颁腑鏄惁鏈変笉绗﹀悎瑙勫垯鐨勫唴瀹癸紝涓嶈繘琛屾帹鐞嗗拰think銆傝繑鍥炵粨鏋滄牸寮忎负[xxx绗﹀悎鐨勮鍒檌d]锛屽鏋滄病鏈夎繑鍥瀃]")
#self.logger.info(len(content))
search_data = {
"prompt": "",
--
Gitblit v1.8.0