| | |
| | | import http |
| | | import json |
| | | import logging |
| | | import socketserver |
| | | import threading |
| | | import time |
| | | from http.server import BaseHTTPRequestHandler |
| | | import dask |
| | | |
| | | from log_module.log import logger_system, logger_debug |
| | | from utils import global_util, tool |
| | | from code_attribute.gpcode_manager import BlackListCodeManager, CodePrePriceManager |
| | | from log_module.log import logger_system, logger_debug, logger_kpl_limit_up |
| | | from utils import global_util, tool, data_export_util, init_data_util |
| | | from code_attribute import gpcode_manager |
| | | from log_module import log, log_analyse, log_export |
| | | from l2 import code_price_manager, l2_data_util, l2_data_manager_new, cancel_buy_strategy |
| | | from l2 import code_price_manager, l2_data_util, l2_data_manager_new, cancel_buy_strategy, transaction_progress |
| | | from l2.cancel_buy_strategy import HourCancelBigNumComputer |
| | | from output.limit_up_data_filter import IgnoreCodeManager |
| | | from third_data import kpl_util, kpl_data_manager, kpl_api, block_info |
| | |
| | | from urllib.parse import parse_qs |
| | | from output import code_info_output, limit_up_data_filter, output_util, kp_client_msg_manager |
| | | |
| | | from trade import bidding_money_manager, trade_manager, l2_trade_util |
| | | from trade.l2_trade_util import BlackListCodeManager |
| | | from trade import bidding_money_manager, trade_manager, l2_trade_util, trade_record_log_util |
| | | import concurrent.futures |
| | | |
| | | # 禁用http.server的日志输出 |
| | | logger = logging.getLogger("http.server") |
| | | logger.setLevel(logging.CRITICAL) |
| | | |
| | | |
| | | class DataServer(BaseHTTPRequestHandler): |
| | |
| | | __industry_cache_dict = {} |
| | | __latest_limit_up_codes_set = set() |
| | | __data_process_thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=10) |
| | | # 代码的涨幅 |
| | | __code_limit_rate_dict = {} |
| | | |
| | | # 禁用日志输出 |
| | | def log_message(self, format, *args): |
| | | pass |
| | | |
| | | def __get_limit_up_list(self): |
| | | # 统计目前为止的代码涨停数量(分涨停原因) |
| | |
| | | total_datas = KPLLimitUpDataRecordManager.total_datas |
| | | # 通过涨停时间排序 |
| | | total_datas = list(total_datas) |
| | | current_reason_codes_dict = KPLLimitUpDataRecordManager.get_current_reason_codes_dict() |
| | | |
| | | # 统计涨停原因 |
| | | limit_up_reason_dict = {} |
| | |
| | | (k, len(limit_up_reason_dict[k]), limit_up_reason_want_count_dict.get(k), limit_up_reason_dict[k][0][5]) for |
| | | k in |
| | | limit_up_reason_dict] |
| | | limit_up_reason_statistic_info.sort(key=lambda x: int(x[3])) |
| | | limit_up_reason_statistic_info.sort( |
| | | key=lambda x: len(current_reason_codes_dict[x[0]]) if x[0] in current_reason_codes_dict else 0) |
| | | limit_up_reason_statistic_info.reverse() |
| | | |
| | | codes_set = set([d[3] for d in total_datas]) |
| | | # 判断是龙几,判断是否涨停,判断是否炸板,加载分数 |
| | | rank_dict = limit_up_data_filter.get_limit_up_time_rank_dict(total_datas) |
| | | limit_up_dict, limit_up_codes, open_limit_up_codes = limit_up_data_filter.get_limit_up_info(codes_set) |
| | | score_dict = limit_up_data_filter.get_codes_scores_dict(codes_set) |
| | | score_dict = {} |
| | | fresult = [] |
| | | ignore_codes = self.__IgnoreCodeManager.list_ignore_codes("1") |
| | | |
| | |
| | | # --数据准备开始-- |
| | | codes_set = set([d[0] for d in temps]) |
| | | limit_up_dict, limit_up_codes, open_limit_up_codes = limit_up_data_filter.get_limit_up_info(codes_set) |
| | | score_dict = limit_up_data_filter.get_codes_scores_dict(codes_set) |
| | | score_dict = {} |
| | | want_codes = gpcode_manager.WantBuyCodesManager().list_code_cache() |
| | | black_codes = BlackListCodeManager().list_codes() |
| | | total_datas = KPLLimitUpDataRecordManager.total_datas |
| | |
| | | ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) |
| | | code = ps_dict['code'] |
| | | name = ps_dict.get('name') |
| | | try: |
| | | data = code_info_output.get_output_params(code, self.__jingxuan_cache_dict, self.__industry_cache_dict) |
| | | if data["code_name"].find("None") > -1 and name: |
| | | data["code_name"] = f"{name} {code}" |
| | | |
| | | data = code_info_output.get_output_params(code, self.__jingxuan_cache_dict, self.__industry_cache_dict) |
| | | if data["code_name"].find("None") > -1 and name: |
| | | data["code_name"] = f"{name} {code}" |
| | | self.__history_plates_dict[code] = (time.time(), data["kpl_code_info"]["code_records"]) |
| | | if "plate" in data["kpl_code_info"]: |
| | | self.__blocks_dict[code] = (time.time(), data["kpl_code_info"]["plate"]) |
| | | |
| | | self.__history_plates_dict[code] = (time.time(), data["kpl_code_info"]["code_records"]) |
| | | if "plate" in data["kpl_code_info"]: |
| | | self.__blocks_dict[code] = (time.time(), data["kpl_code_info"]["plate"]) |
| | | |
| | | response_data = json.dumps({"code": 0, "data": data}) |
| | | print("get_score_info 耗时:", time.time() - start_time) |
| | | response_data = json.dumps({"code": 0, "data": data}) |
| | | print("get_score_info 耗时:", time.time() - start_time) |
| | | except Exception as e: |
| | | logger_debug.exception(e) |
| | | logging.exception(e) |
| | | |
| | | # 获取评分信息 |
| | | pass |
| | | elif url.path == "/get_kpl_block_info": |
| | | start_time = time.time() |
| | | ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) |
| | | code = ps_dict['code'] |
| | | try: |
| | | data = code_info_output.get_kpl_block_info(code) |
| | | response_data = json.dumps({"code": 0, "data": data}) |
| | | print("get_kpl_block_info 耗时:", time.time() - start_time) |
| | | except Exception as e: |
| | | logger_debug.exception(e) |
| | | logging.exception(e) |
| | | |
| | | elif url.path == "/get_l2_datas": |
| | | try: |
| | | # 获取L2的数据 |
| | | ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) |
| | | code = ps_dict['code'] |
| | | datas = data_export_util.get_l2_datas(code) |
| | | code_name = gpcode_manager.get_code_name(code) |
| | | response_data = json.dumps({"code": 0, "data": {"code": code, "code_name": code_name, "data": datas}}) |
| | | except Exception as e: |
| | | logger_debug.exception(e) |
| | | elif url.path == "/get_trade_progress": |
| | | # 获取交易进度 |
| | | ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) |
| | | code = ps_dict['code'] |
| | | trade_progress, is_default = transaction_progress.TradeBuyQueue().get_traded_index(code) |
| | | response_data = json.dumps( |
| | | {"code": 0, "data": {"trade_progress": trade_progress, "is_default": is_default}}) |
| | | elif url.path == "/get_l_cancel_datas": |
| | | # 最新的l撤数据 |
| | | ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) |
| | | code = ps_dict['code'] |
| | | buy_single_index = ps_dict.get('buy_single_index') |
| | | if buy_single_index is not None: |
| | | buy_single_index = int(buy_single_index) |
| | | records = code_info_output.load_trade_record_cancel_watch_indexes(code) |
| | | # 获取最新的L上与L下 |
| | | records.reverse() |
| | | up_indexes = [] |
| | | down_indexes = [] |
| | | for r in records: |
| | | if buy_single_index and buy_single_index != r[1]: |
| | | continue |
| | | if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_UP: |
| | | up_indexes = r[2] |
| | | break |
| | | for r in records: |
| | | if buy_single_index and buy_single_index != r[1]: |
| | | continue |
| | | if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_DOWN: |
| | | down_indexes = r[2] |
| | | break |
| | | |
| | | response_data = json.dumps( |
| | | {"code": 0, "data": {"up": up_indexes, "down": down_indexes}}) |
| | | elif url.path == "/get_h_cancel_datas": |
| | | # 最新的H撤数据 |
| | | ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) |
| | | code = ps_dict['code'] |
| | | buy_single_index = ps_dict.get('buy_single_index') |
| | | records = code_info_output.load_trade_record_cancel_watch_indexes(code, |
| | | trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_H) |
| | | # 获取最新的L上与L下 |
| | | records.reverse() |
| | | indexes = [] |
| | | for r in records: |
| | | if buy_single_index and buy_single_index != r[1]: |
| | | continue |
| | | indexes = r[2] |
| | | break |
| | | response_data = json.dumps( |
| | | {"code": 0, "data": indexes}) |
| | | elif url.path == "/kpl/get_limit_up_list": |
| | | response_data = self.__get_limit_up_list() |
| | | |
| | |
| | | elif url.path == "/kpl/get_plate_codes": |
| | | # 获取涨停原因下面的代码 |
| | | ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) |
| | | plate = ps_dict["plate"] |
| | | plate = kpl_util.filter_block(ps_dict["plate"]) |
| | | # 获取板块下的代码 |
| | | # 统计目前为止的代码涨停数量(分涨停原因) |
| | | now_limit_up_codes_info = self.__kplDataManager.get_data(KPLDataType.LIMIT_UP) |
| | |
| | | |
| | | codes_info = [] |
| | | for d in record_limit_up_datas: |
| | | if d[2] != plate: |
| | | if kpl_util.filter_block(d[2]) != plate: |
| | | continue |
| | | # 代码,名称,涨停时间,是否炸板,是否想买,是否已经下过单 |
| | | codes_info.append( |
| | | [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0]) |
| | | [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0, d[12], |
| | | output_util.money_desc(d[13]), 1]) |
| | | |
| | | for d in record_limit_up_datas: |
| | | if kpl_util.filter_block(d[2]) == plate: |
| | | continue |
| | | if plate not in [kpl_util.filter_block(k) for k in d[6].split("、")]: |
| | | continue |
| | | # 代码,名称,涨停时间,是否炸板,是否想买,是否已经下过单 |
| | | codes_info.append( |
| | | [d[3], d[4], tool.to_time_str(int(d[5])), 1 if d[3] not in now_limit_up_codes else 0, 0, 0, d[12], |
| | | output_util.money_desc(d[13]), 0]) |
| | | |
| | | codes_info.sort(key=lambda x: x[2]) |
| | | # 查询是否为想买单 |
| | | want_codes = gpcode_manager.WantBuyCodesManager().list_code_cache() |
| | |
| | | response_data = json.dumps({"code": 1, "msg": "请上传code"}) |
| | | |
| | | elif url.path == "/get_last_trade_day_reasons": |
| | | # 获取上个交易日的相同涨停原因的代码信息 |
| | | ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) |
| | | code = ps_dict["code"] |
| | | day = HistoryKDatasUtils.get_previous_trading_date(tool.get_now_date_str()) |
| | | # 获取涨停数据 |
| | | |
| | | # 获取代码的原因 |
| | | reasons = kpl_data_manager.KPLLimitUpDataRecordManager.list_by_code(code, day) |
| | | if reasons: |
| | | reasons = list(reasons) |
| | | reasons.sort(key=lambda x: x[9]) |
| | | reason = reasons[-1][2] |
| | | datas = self.__kplDataManager.get_from_file(kpl_util.KPLDataType.LIMIT_UP, day) |
| | | # (代码,名称,首次涨停时间,最近涨停时间,几板,涨停原因,板块,实际流通,主力净额,涨停原因代码,涨停原因代码数量) |
| | | result_list = [] |
| | | if datas: |
| | | for d in datas: |
| | | if d[5] == reason and d[0] != code: |
| | | # (代码,名称) |
| | | result_list.append((d[0], d[1])) |
| | | response_data = json.dumps({"code": 0, "data": {"reason": reason, "data": result_list}}) |
| | | else: |
| | | response_data = json.dumps({"code": 1, "msg": "昨日未涨停"}) |
| | | # 计算平均涨幅 |
| | | def get_limit_rate_list(codes): |
| | | if not codes: |
| | | return [] |
| | | need_request_codes = set() |
| | | if tool.trade_time_sub(tool.get_now_time_str(), "09:30:00") < 0: |
| | | need_request_codes |= set(codes) |
| | | else: |
| | | now_time = time.time() |
| | | for c in codes: |
| | | if c not in self.__code_limit_rate_dict: |
| | | need_request_codes.add(c) |
| | | elif now_time - self.__code_limit_rate_dict[c][1] > 60: |
| | | need_request_codes.add(c) |
| | | if need_request_codes: |
| | | _limit_rate_list = HistoryKDatasUtils.get_codes_limit_rate(list(need_request_codes)) |
| | | for d in _limit_rate_list: |
| | | self.__code_limit_rate_dict[d[0]] = (d[1], time.time()) |
| | | return [(c_, self.__code_limit_rate_dict[c_][0]) for c_ in codes] |
| | | |
| | | try: |
| | | raise Exception("接口暂停使用") |
| | | # 获取上个交易日的相同涨停原因的代码信息 |
| | | ps_dict = dict([(k, v[0]) for k, v in parse_qs(url.query).items()]) |
| | | code = ps_dict["code"] |
| | | # 获取昨日涨停数据 |
| | | day = HistoryKDatasUtils.get_previous_trading_date_cache(tool.get_now_date_str()) |
| | | |
| | | limit_up_records = kpl_data_manager.KPLLimitUpDataRecordManager.list_all_cache(day) |
| | | reasons = [] |
| | | for d in limit_up_records: |
| | | if d[3] == code: |
| | | reasons.append(d) |
| | | # 获取代码的原因 |
| | | if reasons: |
| | | reasons = list(reasons) |
| | | reasons.sort(key=lambda x: x[9]) |
| | | reason = reasons[-1][2] |
| | | # 获取涨停数据 |
| | | datas = self.__kplDataManager.get_from_file_cache(kpl_util.KPLDataType.LIMIT_UP, day) |
| | | # (代码,名称,首次涨停时间,最近涨停时间,几板,涨停原因,板块,实际流通,主力净额,涨停原因代码,涨停原因代码数量) |
| | | yesterday_result_list = [] |
| | | percent_rate = 0 |
| | | if datas: |
| | | yesterday_codes = set() |
| | | for d in datas: |
| | | if d[5] == reason: |
| | | yesterday_codes.add(d[0]) |
| | | # 获取涨幅 |
| | | limit_rate_list = get_limit_rate_list(yesterday_codes) |
| | | limit_rate_dict = {} |
| | | if limit_rate_list: |
| | | total_rate = 0 |
| | | for d in limit_rate_list: |
| | | limit_rate_dict[d[0]] = d[1] |
| | | total_rate += d[1] |
| | | percent_rate = round(total_rate / len(limit_rate_list), 2) |
| | | |
| | | for d in datas: |
| | | if d[5] == reason: |
| | | yesterday_codes.add(d[0]) |
| | | if d[0] != code: |
| | | # (代码,名称, 涨幅) |
| | | yesterday_result_list.append((d[0], d[1], limit_rate_dict.get(d[0]))) |
| | | |
| | | current_limit_up_list = kpl_data_manager.KPLLimitUpDataRecordManager.latest_origin_datas |
| | | current_result_list = [] |
| | | if current_limit_up_list: |
| | | for c in current_limit_up_list: |
| | | if c[5] == reason and c[0] != code: |
| | | current_result_list.append((c[0], c[1])) |
| | | response_data = json.dumps({"code": 0, "data": {"reason": reason, "reason_rate": percent_rate, |
| | | "data": {"yesterday": yesterday_result_list, |
| | | "current": current_result_list}}}) |
| | | else: |
| | | response_data = json.dumps({"code": 1, "msg": "昨日未涨停"}) |
| | | except Exception as e: |
| | | logger_debug.exception(e) |
| | | raise e |
| | | |
| | | elif url.path == "/pull_kp_client_msg": |
| | | # 拉取客户端消息 |
| | |
| | | result_str = self.__process_kpl_data(params) |
| | | self.__send_response(result_str) |
| | | |
| | | def __process_kpl_data(self, data): |
| | | def __process_kpl_data(self, data_origin): |
| | | def do_limit_up(result_list_): |
| | | if result_list_: |
| | | # 保存涨停时间 |
| | |
| | | code = d[0] |
| | | limit_up_reasons[code] = d[5] |
| | | codes_set.add(code) |
| | | if code.find("00") == 0 or code.find("60") == 0: |
| | | if tool.is_shsz_code(code): |
| | | limit_up_time = time.strftime("%H:%M:%S", time.localtime(d[2])) |
| | | code_price_manager.Buy1PriceManager().set_limit_up_time(code, limit_up_time) |
| | | add_codes = codes_set - self.__latest_limit_up_codes_set |
| | |
| | | limit_up_reason_code_dict[b].add(code) |
| | | cancel_buy_strategy.LCancelRateManager.set_block_limit_up_count(limit_up_reason_code_dict) |
| | | |
| | | |
| | | if add_codes: |
| | | for code in add_codes: |
| | | # 根据涨停原因判断是否可以买 |
| | | if code.find("00") == 0 or code.find("60") == 0: |
| | | if tool.is_shsz_code(code): |
| | | try: |
| | | # 判断是否下单 |
| | | trade_state = trade_manager.CodesTradeStateManager().get_trade_state(code) |
| | |
| | | current_limit_up_datas = [] |
| | | if not limit_up_record_datas: |
| | | limit_up_record_datas = [] |
| | | if CodePlateKeyBuyManager.is_need_cancel(code, limit_up_reasons.get(code), |
| | | current_limit_up_datas, |
| | | limit_up_record_datas, |
| | | yesterday_current_limit_up_codes, |
| | | before_blocks_dict): |
| | | pass |
| | | # TODO 测试暂时注释 |
| | | l2_data_manager_new.L2TradeDataProcessor.cancel_buy(code, |
| | | f"涨停原因({limit_up_reasons.get(code)})不是老大撤单", |
| | | "板块撤") |
| | | # 买绝对老大 |
| | | # 中途不能撤单 |
| | | # if CodePlateKeyBuyManager.is_need_cancel(code, limit_up_reasons.get(code), |
| | | # current_limit_up_datas, |
| | | # limit_up_record_datas, |
| | | # yesterday_current_limit_up_codes, |
| | | # before_blocks_dict): |
| | | # l2_data_manager_new.L2TradeDataProcessor.cancel_buy(code, |
| | | # f"涨停原因({limit_up_reasons.get(code)})不是老大撤单", |
| | | # "板块撤") |
| | | except Exception as e: |
| | | logger_debug.exception(e) |
| | | kpl_data_manager.KPLLimitUpDataRecordManager.save_record(tool.get_now_date_str(), result_list_) |
| | | self.__kplDataManager.save_data(type_, result_list_) |
| | | |
| | | # 将"概念"二字替换掉 |
| | | data = data_origin |
| | | type_ = data["type"] |
| | | print("开盘啦type:", type_) |
| | | if type_ == KPLDataType.BIDDING.value: |
| | |
| | | |
| | | self.__kplDataManager.save_data(type_, result_list) |
| | | elif type_ == KPLDataType.LIMIT_UP.value: |
| | | result_list = kpl_util.parseDaBanData(data["data"], kpl_util.DABAN_TYPE_LIMIT_UP) |
| | | result_list = kpl_util.parseLimitUpData(data["data"]) |
| | | self.__data_process_thread_pool.submit(lambda: do_limit_up(result_list)) |
| | | # 记录涨停日志 |
| | | logger_kpl_limit_up.info(result_list) |
| | | elif type_ == KPLDataType.OPEN_LIMIT_UP.value: |
| | | result_list = kpl_util.parseDaBanData(data["data"], kpl_util.DABAN_TYPE_OPEN_LIMIT_UP) |
| | | if result_list: |
| | |
| | | |
| | | handler = DataServer |
| | | # httpd = socketserver.TCPServer((addr, port), handler) |
| | | httpd = ThreadedHTTPServer((addr, port), handler) |
| | | print("HTTP server is at: http://%s:%d/" % (addr, port)) |
| | | try: |
| | | httpd = ThreadedHTTPServer((addr, port), handler) |
| | | print("HTTP server is at: http://%s:%d/" % (addr, port)) |
| | | httpd.serve_forever() |
| | | except Exception as e: |
| | | logger_system.exception(e) |
| | |
| | | |
| | | |
| | | if __name__ == "__main__": |
| | | run("0.0.0.0", 9004) |
| | | code = "002676" |
| | | buy_single_index = 716 |
| | | records = code_info_output.load_trade_record_cancel_watch_indexes(code) |
| | | # 获取最新的L上与L下 |
| | | records.reverse() |
| | | up_indexes = [] |
| | | down_indexes = [] |
| | | for r in records: |
| | | if buy_single_index and buy_single_index != r[1]: |
| | | continue |
| | | if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_UP: |
| | | up_indexes = r[2] |
| | | break |
| | | for r in records: |
| | | if buy_single_index and buy_single_index != r[1]: |
| | | continue |
| | | if r[0] == trade_record_log_util.CancelWatchIndexesInfo.CANCEL_TYPE_L_DOWN: |
| | | down_indexes = r[2] |
| | | break |
| | | |
| | | response_data = json.dumps( |
| | | {"code": 0, "data": {"up": up_indexes, "down": down_indexes}}) |