From 48fb7a00951f91bdc707e5dd2d196e5bccb752c3 Mon Sep 17 00:00:00 2001
From: Administrator <admin@example.com>
Date: 星期三, 18 六月 2025 18:41:30 +0800
Subject: [PATCH] 异常保护

---
 l2_test.py |   91 +++++++++++++++++++++++++++++++++++++++++++--
 1 files changed, 87 insertions(+), 4 deletions(-)

diff --git a/l2_test.py b/l2_test.py
index 07d0a4a..d555401 100644
--- a/l2_test.py
+++ b/l2_test.py
@@ -2,6 +2,7 @@
 import json
 import logging
 import multiprocessing
+import queue
 import socketserver
 import threading
 import time
@@ -10,11 +11,16 @@
 import psutil
 import requests
 
+from api import low_suction_data_pusher
 from code_attribute import global_data_loader
 from huaxin_client import l2_client_test, l1_subscript_codes_manager
-from log_module.log import logger_local_huaxin_l2_transaction_big_order, logger_system
+from log_module.log import logger_local_huaxin_l2_transaction_big_order, logger_system, \
+    logger_local_huaxin_l2_transaction_accurate_big_order
 from third_data.custom_block_in_money_manager import CodeInMoneyManager, BlockInMoneyRankManager
-from utils import tool
+from third_data.history_k_data_manager import HistoryKDataManager
+from third_data.history_k_data_util import HistoryKDatasUtils
+from trade.buy_radical.block_special_codes_manager import BlockSpecialCodesManager
+from utils import tool, middle_api_protocol, global_util
 import urllib.parse as urlparse
 from urllib.parse import parse_qs
 
@@ -88,7 +94,70 @@
         logger_system.error(f"绔彛鏈嶅姟鍣細{port} 鍚姩澶辫触")
 
 
+def __run_upload_big_order_task(_queue: queue.Queue):
+    # 杩愯涓婁紶澶у崟浠诲姟
+    while True:
+        try:
+            datas = []
+            while not _queue.empty():
+                datas.append(_queue.get())
+            if datas:
+                # 涓婁紶鏁版嵁
+                requests.post("http://192.168.84.71:12881/upload_deal_big_orders", json.dumps(datas))
+        except:
+            pass
+        finally:
+            time.sleep(1)
+
+
+def __get_special_codes():
+    """
+    鑾峰彇鐗规畩鐨勪唬鐮侊紝闇�瑕佽闃�300w浠ヤ笂鐨勫ぇ鍗�
+    @return: 浠g爜闆嗗悎
+    """
+    try:
+        zylt_volume_map = global_util.zylt_volume_map
+        codes = set()
+        last_trade_day = HistoryKDatasUtils.get_latest_trading_date(1)[0]
+        for code in zylt_volume_map:
+            if code == '601288':
+                print("")
+            volume = zylt_volume_map.get(code)
+            # 浠婃棩娑ㄥ仠浠疯绐佺牬鏄ㄦ棩鏈�楂樹环
+            k_bars = HistoryKDataManager().get_history_bars(code, last_trade_day)
+            if k_bars and 10e8 <= k_bars[0]["close"] * volume * tool.get_limit_up_rate(code) <= 300e8:
+                # 鑷敱娴侀�氬競鍊煎湪10浜�-300浜夸互涓�
+                limit_up_price = round(tool.get_limit_up_rate(code) * k_bars[0]["close"], 2)
+                if limit_up_price > k_bars[0]["high"] or True:
+                    # 浠婃棩娑ㄥ仠浠疯绐佺牬鏄ㄦ棩鏈�楂樹环
+                    codes.add(code)
+        # 鑾峰彇杈ㄨ瘑搴︾殑绁�
+        special_codes = BlockSpecialCodesManager().get_origin_code_blocks_dict().keys()
+        if special_codes:
+            codes |= set(special_codes)
+        return codes
+    except Exception as e:
+        logger_system.exception(e)
+        return set()
+
+
+def __save_accurate_big_order(big_accurate_order_queue):
+    while True:
+        try:
+            datas = []
+            while not big_accurate_order_queue.empty():
+                data = big_accurate_order_queue.get()
+                datas.append(data)
+            if datas:
+                low_suction_data_pusher.push_big_order(datas)
+                for data in datas:
+                    logger_local_huaxin_l2_transaction_accurate_big_order.info(f"{data}")
+        except:
+            pass
+
+
 def run():
+    special_codes = __get_special_codes()
     codes_sh, codes_sz = l1_subscript_codes_manager.get_codes()
     codes = [x.decode() for x in codes_sh]
     codes.extend([x.decode() for x in codes_sz])
@@ -97,19 +166,29 @@
     cpu_count = 16
     page_size = int(len(codes) / cpu_count) + 1
 
-    big_order_queue = multiprocessing.Queue()
+    big_order_queue = multiprocessing.Queue(maxsize=1024)
+    big_accurate_order_queue = multiprocessing.Queue(maxsize=1024)
+    # 澶у崟涓婁紶闃熷垪
+    big_order_upload_queue = queue.Queue(maxsize=1024)
 
     for i in range(cpu_count):
         process = multiprocessing.Process(target=l2_client_test.run,
-                                          args=(codes[i * page_size:(i + 1) * page_size], big_order_queue,))
+                                          args=(
+                                              codes[i * page_size:(i + 1) * page_size], big_order_queue,
+                                              big_accurate_order_queue, special_codes,))
 
         process.start()
         # 缁戞牳杩愯
         psutil.Process(process.pid).cpu_affinity([i])
+    threading.Thread(target=__run_upload_big_order_task, args=(big_order_upload_queue,), daemon=True).start()
+    threading.Thread(target=__save_accurate_big_order, args=(big_accurate_order_queue,), daemon=True).start()
+
     while True:
         try:
             data = big_order_queue.get()
             CodeInMoneyManager().add_data(data)
+            # 娣诲姞涓婁紶鏁版嵁
+            big_order_upload_queue.put_nowait(data)
             logger_local_huaxin_l2_transaction_big_order.info(f"{data}")
         except:
             pass
@@ -150,6 +229,10 @@
             __upload_data("jingxuan_rank", json.dumps(fins))
             __upload_data("jingxuan_rank_out", json.dumps(fouts))
             __upload_codes_in_money()
+            try:
+                low_suction_data_pusher.push_block_in(in_list)
+            except:
+                pass
         except Exception as e:
             logging.exception(e)
         finally:

--
Gitblit v1.8.0