From 34958e138d670ecd921615e34ae1b56d3a99f910 Mon Sep 17 00:00:00 2001 From: admin <admin@example.com> Date: 星期一, 25 八月 2025 18:29:55 +0800 Subject: [PATCH] 1.将积累好的过滤文件写入本地存储 时间终止点确定 2.打印更细致 --- data_server.py | 278 +++++++++++++++++++++++++++++++++++++++++++++++------- 1 files changed, 239 insertions(+), 39 deletions(-) diff --git a/data_server.py b/data_server.py index 6d846e7..3f491c8 100644 --- a/data_server.py +++ b/data_server.py @@ -5,18 +5,26 @@ import json import logging import socketserver +import threading +import time from http.server import BaseHTTPRequestHandler import urllib.parse as urlparse import psutil +import constant from db import redis_manager_delegate as redis_manager, mysql_data_delegate as mysql_data from db.redis_manager_delegate import RedisUtils -from log_module import log_export -from log_module.log import hx_logger_l2_transaction, logger_debug, logger_request_api -from strategy import data_cache -from strategy.trade_setting import TradeSetting -from trade import huaxin_trade_api, huaxin_trade_data_update +from log_module import log_export, async_log_util +from log_module.log import hx_logger_l2_transaction, logger_debug, logger_request_api, logger_system, \ + logger_kpl_forbidden_plates +from strategy import data_cache, all_K_line, local_data_management, plate_strength_analysis +from strategy.forbidden_plates_manager import ForbiddenPlatesManager +from strategy.kpl_data_manager import KPLMarketsSiftPlateLogManager, KPLMarketStockHeatLogManager +from strategy.local_data_management import KBarsManager +from strategy.trade_setting import TradeSetting, BuyMoneyPerCodeManager, OpeningQuantityManager +from strategy.trading_dates_manager import TradingDatesManager +from trade import huaxin_trade_api, huaxin_trade_data_update, middle_api_protocol from trade.huaxin_trade_record_manager import DelegateRecordManager, DealRecordManager, MoneyManager, PositionManager from utils import tool, huaxin_util, socket_util @@ -94,33 +102,24 @@ fdatas = [] for code in codes: data = data_cache.latest_code_market_info_dict.get(code) - logger_debug.info(f"鑾峰彇L1琛屾儏鎺ュ彛锛歿code}-{data}") + # logger_debug.info(f"鑾峰彇L1琛屾儏鎺ュ彛锛歿code}-{data}") if data: fdatas.append(data) response_data = json.dumps({"code": 0, "data": fdatas}) elif url.path == "/get_buy_money": # 鑾峰彇姣忔涔板叆鐨勯噾棰� - money = data_cache.BUY_MONEY_PER_CODE + money = BuyMoneyPerCodeManager().get_money() response_data = json.dumps({"code": 0, "data": {"money": money}}) elif url.path == "/get_trade_settings": fdata = {"running": TradeSetting().get_running(), "auto_sell": TradeSetting().get_auto_sell(), "auto_buy": TradeSetting().get_auto_buy()} response_data = json.dumps({"code": 0, "data": fdata}) - elif url.path == "/set_trade_settings": - running = params_dict.get("running") - auto_sell = params_dict.get("auto_sell") - auto_buy = params_dict.get("auto_buy") - if running is not None: - TradeSetting().set_running(int(running)) - if auto_sell is not None: - TradeSetting().set_auto_sell(int(auto_sell)) - if auto_buy is not None: - TradeSetting().set_auto_buy(int(auto_buy)) - response_data = json.dumps({"code": 0, "data": {}}) - elif url.path == "/get_env": + request_id = params_dict.get("request_id") + use_time_list = [] try: + __start_time = time.time() fdata = {} # try: # date = HistoryKDatasUtils.get_trading_dates(tool.date_sub(tool.get_now_date_str(), 10), @@ -143,6 +142,7 @@ fdata["redis"] = 1 except: fdata["redis"] = 0 + use_time_list.append(("楠岃瘉redis", time.time() - __start_time)) try: # 楠岃瘉mysql @@ -150,12 +150,14 @@ fdata["mysql"] = 1 except: fdata["mysql"] = 0 + use_time_list.append(("楠岃瘉mysql", time.time() - __start_time)) try: # redis寮傛浠诲姟鏁伴噺 fdata["redis_async_task_count"] = redis_manager.RedisUtils.get_async_task_count() except: pass + use_time_list.append(("楠岃瘉寮傛浠诲姟鏁伴噺", time.time() - __start_time)) # 鑾峰彇浜ゆ槗閫氶亾 try: @@ -164,30 +166,112 @@ except Exception as e: logger_debug.exception(e) fdata["trade_channel_access"] = 0 + use_time_list.append(("楠岃瘉浜ゆ槗閫氶亾", time.time() - __start_time)) # 鑾峰彇CPU涓庡唴瀛橀�傜敤鎯呭喌 memory_info = psutil.virtual_memory() cpu_percent = psutil.cpu_percent(interval=1) fdata["device"] = {"cpu": cpu_percent, "memery": memory_info.percent} + + # 鑾峰彇K绾挎暟鎹� + if KBarsManager().get_now_day() != KBarsManager().day: + # 鏃ユ湡鍙樺寲锛岄渶瑕侀噸鏂拌浇鍏ユ暟鎹� + KBarsManager().load_data() + count = len( + data_cache.all_stocks_all_K_line_property_dict) if data_cache.all_stocks_all_K_line_property_dict else 0 + fdata["k_line_count"] = count + + # 鑾峰彇浠g爜鐨勬澘鍧� + count = len(data_cache.all_stocks_plate_dict) if data_cache.all_stocks_plate_dict else 0 + fdata["all_stocks_plate_count"] = count + + # 鑾峰彇L1鐨勬暟鎹� + # L1 鏁版嵁鏍煎紡锛� (d["securityID"], d["preClosePrice"], d['lastPrice'], d['totalVolumeTrade'], + # d['totalValueTrade'], d['buy'], d['sell'], d['dataTimeStamp']) + now_time_str = tool.get_now_time_str() + if "09:25:00" <= now_time_str < "09:30:00": + now_time_str = "09:25:00" + try: + count = len([k for k, v in data_cache.current_l1_dict.items() if + tool.trade_time_sub(now_time_str, huaxin_util.convert_time(v[7])) < 10]) + except Exception as e: + logger_debug.exception(e) + count = -1 + fdata["l1_count"] = count + + # 鑾峰彇L2鐨勬暟鎹� + if data_cache.latest_l2_transaction_info_dict: + min_time = '09:24:55' if tool.get_now_time_str() <= '09:30:00' else tool.trade_time_add_second( + tool.get_now_time_str(), -6) + count = len([k for k, v in data_cache.latest_l2_transaction_info_dict.items() if v[1] > min_time]) + else: + count = 0 + fdata["l2_count"] = count + + # 鏈�鏂扮殑浜ゆ槗鏃ュ巻 + trading_dates = TradingDatesManager().get_trading_dates() + if trading_dates: + fdata["trading_date"] = (len(trading_dates), trading_dates[-1]) + else: + fdata["trading_date"] = (0, '鏆傛棤') + + # 鑾峰彇鐩爣浠g爜鏁伴噺 + stocks_info = data_cache.DataCache().get_all_stocks_info() + if not stocks_info: + stocks_info = ([], tool.get_now_date_str()) + fdata["all_stocks"] = (len(stocks_info[0]), stocks_info[1]) + + # 寮�鐩樺暒鏁版嵁 + kpl_info = f"寮哄害-{len(data_cache.market_sift_plate_stock_dict)}锛� 娑ㄥ仠-{len(data_cache.limit_up_block_names)}" + fdata["kpl_info"] = kpl_info + + use_time_list.append(("鑾峰彇璁惧璧勬簮鍗犵敤", time.time() - __start_time)) # 鑾峰彇浜ゆ槗閫氶亾 result = {"code": 0, "data": fdata, "msg": ""} # print("OnGetEnvInfo 鎴愬姛") response_data = json.dumps(result) except Exception as e: response_data = json.dumps({"code": 1, "msg": str(e)}) - elif url.path == "/get_kpl_stock_of_markets_plate": + logger_debug.error(f"鐜鑾峰彇寮傚父锛歿request_id}") + logger_debug.exception(e) + finally: + if use_time_list and use_time_list[-1][1] > 10: + logger_debug.warning(f"鐜鑾峰彇鏃堕棿澶т簬10s({request_id}):{use_time_list}") + # 鑾峰彇鏉垮潡寮哄害鏁版嵁 + elif url.path == "/load_kpl_market_sift_plate": + # 鍔犺浇鏁版嵁 + KPLMarketsSiftPlateLogManager().load_data() + response_data = json.dumps({"code": 0, "msg": "鏆傛棤鍐呭"}) + elif url.path == "/get_kpl_market_sift_plate": + # 鑾峰彇寮�鐩樺暒娴佸叆鏉垮潡璇︾粏淇℃伅 + print("==========get_kpl_market_sift_plate==========") + try: + time_str = params_dict.get("time") + if not time_str: + time_str = tool.get_now_time_str() + fdatas = KPLMarketsSiftPlateLogManager().get_filter_log_datas() + response_data = json.dumps({"code": 1, "msg": "鏆傛棤鍐呭"}) + for i in range(len(fdatas) - 1, -1, -1): + if fdatas[i][0] <= time_str: + response_data = json.dumps({"code": 0, "data": fdatas[i]}) + break + except Exception as e: + logging.exception(e) + response_data = json.dumps({"code": 1, "msg": str(e)}) + + # 鑾峰彇涓偂寮哄害鏁版嵁 + elif url.path == "/load_kpl_market_stock_heat": + # 鍔犺浇鏁版嵁 + KPLMarketStockHeatLogManager().load_data() + response_data = json.dumps({"code": 0, "msg": "鏆傛棤鍐呭"}) + elif url.path == "/get_kpl_market_stock_heat": # 鑾峰彇寮�鐩樺暒娴佸叆鏉垮潡璇︾粏淇℃伅 print("==========get_kpl_stock_of_markets_plate==========") try: time_str = params_dict.get("time") if not time_str: time_str = tool.get_now_time_str() - datas = log_export.load_stock_of_markets_plate() - fdatas = [] - for data in datas: - # (鍙戠敓鏃堕棿,[鍑�娴佸叆鏉垮潡], {"鏉垮潡":(浠g爜, 鍚嶇О, 娑ㄥ箙)}) - fdatas.append((data[0], [x[1] for x in data[1][0]], - {p: [(xx[0], xx[1], xx[6]) for xx in data[1][1][p]] for p in data[1][1]})) + fdatas = KPLMarketStockHeatLogManager().get_filter_log_datas() response_data = json.dumps({"code": 1, "msg": "鏆傛棤鍐呭"}) for i in range(len(fdatas) - 1, -1, -1): if fdatas[i][0] <= time_str: @@ -207,6 +291,78 @@ # (璺濈09:15:00鐨勭鏁�, 鏃堕棿, 寮哄害) fdatas.append((tool.trade_time_sub(data[0], "09:15:00"), data[0], data[1])) response_data = json.dumps({"code": 0, "data": fdatas}) + elif url.path == "/get_place_order_records": + datas = data_cache.purchased_stocks_details_list + response_data = json.dumps({"code": 0, "data": datas}) + elif url.path == "/get_forbidden_plates": + datas = ForbiddenPlatesManager().list_plates() + # human 璁や负璁剧疆 fixed: 鍥哄畾鐨� + response_data = json.dumps( + {"code": 0, "data": {"human": list(datas), "fixed": constant.BLACK_CONCEPT_VAGUE_PLATE_LIST}}) + elif url.path == "/add_forbidden_plate": + plate = params_dict.get("plate") + logger_kpl_forbidden_plates.info(f"add-{plate}") + ForbiddenPlatesManager().add_plate(plate) + response_data = json.dumps({"code": 0, "data": {}}) + elif url.path == "/remove_forbidden_plate": + plate = params_dict.get("plate") + logger_kpl_forbidden_plates.info(f"remove-{plate}") + ForbiddenPlatesManager().remove_plate(plate) + response_data = json.dumps({"code": 0, "data": {}}) + elif url.path == "/get_market_sift_plate_stock_dict": + # 鑾峰彇寮�鐩樺暒鏉垮潡绮鹃�夋祦鍏� + data = data_cache.market_sift_plates + response_data = json.dumps({"code": 0, "data": data}) + elif url.path == "/get_limit_up_block_info": + # 鑾峰彇娑ㄥ仠鏉垮潡鍒楄〃 + limit_up_block_info_list = list(data_cache.limit_up_block_info) + data = limit_up_block_info_list + response_data = json.dumps({"code": 0, "data": data}) + elif url.path == "/update_k_lines": + # 鑾峰彇寮�鐩樺暒鏉垮潡绮鹃�夋祦鍏� + def update_k_lines(): + all_K_line.all_stocks_all_k_line_dict_write() + local_data_management.read_local_K_line_data() + + threading.Thread(target=update_k_lines, daemon=True).start() + response_data = json.dumps({"code": 0, "msg": f"鐩爣绁ㄦ暟閲忥細{len(data_cache.DataCache().filtered_stocks)}"}) + elif url.path == "/update_all_stocks_plate": + def update_all_stocks_plate(): + plate_strength_analysis.get_all_stocks_plate_dict(data_cache.DataCache().filtered_stocks) + local_data_management.read_local_all_stocks_plate_data() + + threading.Thread(target=update_all_stocks_plate, daemon=True).start() + response_data = json.dumps({"code": 0, "msg": f"鐩爣绁ㄦ暟閲忥細{len(data_cache.DataCache().filtered_stocks)}"}) + elif url.path == "/update_target_codes": + # 鏇存柊鐩爣浠g爜 + def update_target_codes(): + data_cache.target_codes_manager.update_today_codes_info() + data_cache.target_codes_manager.load_data() + + threading.Thread(target=update_target_codes, daemon=True).start() + response_data = json.dumps({"code": 0, "msg": f"鐩爣绁ㄦ暟閲忥細{len(data_cache.DataCache().filtered_stocks)}"}) + elif url.path == "/update_trading_dates": + # 鏇存柊浜ゆ槗鏃ュ巻 + try: + dates = TradingDatesManager().update_trading_dates() + TradingDatesManager().load_data() + response_data = json.dumps({"code": 0, "data": dates}) + except Exception as e: + logger_debug.exception(e) + response_data = json.dumps( + {"code": 1, "msg": str(e)}) + elif url.path == "/get_opening_quantity": + quantity = OpeningQuantityManager().get_quantity() + response_data = json.dumps({"code": 0, "data": {"quantity": quantity}}) + elif url.path == "/set_opening_quantity": + quantity = params_dict.get("quantity") + OpeningQuantityManager().set_quantity(quantity) + response_data = json.dumps({"code": 0, "data": {}}) + elif url.path == "/test_push_msg": + middle_api_protocol.push( + middle_api_protocol.load_push_msg({"type": "update_position", "data": {"time": tool.get_now_time_str()}})) + response_data = json.dumps({"code": 0, "data": {}}) + # OpeningQuantityManager self.send_response(200) # 鍙戠粰璇锋眰瀹㈡埛绔殑鍝嶅簲鏁版嵁 @@ -239,13 +395,31 @@ print("鎺ユ敹鍒癙OST璇锋眰锛�", str(path)) url = urlparse.urlparse(path) if url.path == "/trade_callback": - # 鎺ュ彈寮�鐩樺暒鏁版嵁 - body = self.__parse_request() - if type(body) != str: - huaxin_trade_api.add_trade_callback_data(json.dumps(body)) - else: - huaxin_trade_api.add_trade_callback_data(body) + if constant.IS_SIMULATED_TRADE: + # 鎺ュ彈寮�鐩樺暒鏁版嵁 + body = self.__parse_request() + if type(body) != str: + huaxin_trade_api.add_trade_callback_data(json.dumps(body)) + else: + huaxin_trade_api.add_trade_callback_data(body) result_str = json.dumps({"code": 0}) + elif url.path == "/set_trade_settings": + params = self.__parse_request() + if not self.__is_sign_right(params): + result_str = json.dumps({"code": 1001, "msg": "绛惧悕閿欒"}) + return + logger_debug.info(f"set_trade_settings: {params}") + running = params.get("running") + auto_sell = params.get("auto_sell") + auto_buy = params.get("auto_buy") + if running is not None: + TradeSetting().set_running(int(running)) + if auto_sell is not None: + TradeSetting().set_auto_sell(int(auto_sell)) + if auto_buy is not None: + TradeSetting().set_auto_buy(int(auto_buy)) + result_str = json.dumps({"code": 0, "data": {}}) + elif url.path == "/buy": # 绛惧悕楠岃瘉 params = self.__parse_request() @@ -290,6 +464,11 @@ raise Exception("娌℃湁鑾峰彇鍒癓1鏁版嵁") pre_price = data[1] current_price = data[2] if data[2] else data[5][0][0] + # 鑾峰彇鏈�鏂版垚浜や环鏍� + latest_deal_price_info = data_cache.latest_l2_transaction_info_dict.get(code) + if latest_deal_price_info: + current_price = round(float(latest_deal_price_info[0]), 2) + async_log_util.info(logger_debug, f"鏍规嵁鎴愪氦浠峰崠鍑猴細{code}-{latest_deal_price_info}") price = tool.get_buy_min_price(current_price) price = max(price, tool.get_limit_down_price(code, pre_price)) else: @@ -313,7 +492,9 @@ result_str = json.dumps({"code": 1, "msg": "鏈笂浼犻噾棰�"}) return money = int(money) - data_cache.BUY_MONEY_PER_CODE = money + + logger_debug.info(f"璁剧疆寮�浠撻噾棰濓細{money}") + BuyMoneyPerCodeManager().set_money(money) result_str = json.dumps({"code": 0}) elif url.path == "/set_limit_up_sell": @@ -350,11 +531,34 @@ orderSysID = params.get("orderSysID") # 绯荤粺璁㈠崟缂栧彿 result = huaxin_trade_api.cancel_order(direction, code, orderSysID, blocking=True) result_str = json.dumps(result) + elif url.path == "/account_charge": + # 璐︽埛鍏呭�� + params = self.__parse_request() + # 绛惧悕楠岃瘉 + if not self.__is_sign_right(params): + result_str = json.dumps({"code": 1001, "msg": "绛惧悕閿欒"}) + return + # 鍗栧嚭 + print("璐︽埛鍏呭��", params) + money = params.get("money") + money = round(float(money), 2) + result = huaxin_trade_api.charge(money) + result_str = json.dumps(result) elif url.path == "/upload_deal_big_orders": # 鎴愪氦澶у崟浼犻�� datas = self.rfile.read(int(self.headers['content-length'])) _str = str(datas, encoding="gbk") - hx_logger_l2_transaction.info(_str) + datas = json.loads(_str) + for d in datas: + if d[1] != 0: + continue + code, data = d[0], d[2] + if code not in data_cache.big_order_deal_dict: + data_cache.big_order_deal_dict[code] = [] + data_cache.big_order_deal_dict[code].append(d) + # 鑾峰彇涔板ぇ鍗曟暟閲� + len(data_cache.big_order_deal_dict.get(code, [])) + async_log_util.info(hx_logger_l2_transaction, _str) # 璁板綍鏃ュ織 result_str = json.dumps({"code": 0}) except Exception as e: @@ -392,8 +596,4 @@ print("HTTP server is at: http://%s:%d/" % (addr, port)) httpd.serve_forever() except Exception as e: - pass - - -if __name__ == "__main__": - run() + logger_system.exception(e) -- Gitblit v1.8.0