From a6038cc55d58da0e417284f83758885a8ea23bc7 Mon Sep 17 00:00:00 2001 From: "majiahui@haimaqingfan.com" Date: Fri, 18 Jul 2025 17:02:17 +0800 Subject: [PATCH] =?UTF-8?q?=E6=9B=B4=E6=96=B0=E6=A3=80=E6=B5=8B=E6=A8=A1?= =?UTF-8?q?=E5=9E=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- chatgpt_detector_model_predict.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/chatgpt_detector_model_predict.py b/chatgpt_detector_model_predict.py index 888ed7b..66789f6 100644 --- a/chatgpt_detector_model_predict.py +++ b/chatgpt_detector_model_predict.py @@ -20,6 +20,7 @@ import uuid import time import json import docx2txt +from datetime import datetime pool = redis.ConnectionPool(host='localhost', port=63179, max_connections=100, db=12, password="zhicheng123*") @@ -32,7 +33,8 @@ batch_size = 32 # model_name = "AIGC_detector_zhv2" # model_name = "drop_aigc_model_2" # model_name = "drop_aigc_model_3" -model_name = "/home/majiahui/project/models-llm/aigc_check_10" +# model_name = "/home/majiahui/project/models-llm/aigc_check_10" +model_name = "/home/majiahui/project/models-llm/weipu_aigc_512_3" tokenizer = AutoTokenizer.from_pretrained(model_name) @@ -208,7 +210,14 @@ def classify(): # 调用模型,设置最大batch_size } return_text = {"resilt": resilt, "probabilities": None, "status_code": 200} - load_result_path = "./new_data_logs/{}.json".format(queue_uuid) + + # 查询增加日期 + date_str = datetime.now().strftime("%Y-%m-%d") + dir_path = "./new_data_logs/{}/".format(date_str) + # 检查并创建目录(如果不存在) + os.makedirs(dir_path, exist_ok=True) + load_result_path = dir_path + '{}.json'.format(id_) + # load_result_path = "./new_data_logs/{}.json".format(query_id) print("query_id: ", queue_uuid) print("load_result_path: ", load_result_path)