| | |
| | | import copy |
| | | import json |
| | | import logging |
| | | import os |
| | | import threading |
| | | import time |
| | | |
| | | import requests |
| | | |
| | | import constant |
| | | from db.redis_manager_delegate import RedisUtils |
| | | from log_module import async_log_util |
| | | from utils import tool |
| | | |
| | | # 开盘啦历史涨停数据管理 |
| | | from db import mysql_data, redis_manager |
| | | from log_module.log import logger_kpl_limit_up_reason_change |
| | | from db import mysql_data_delegate as mysql_data, redis_manager_delegate as redis_manager |
| | | from log_module.log import logger_kpl_limit_up_reason_change, logger_debug, logger_kpl_limit_up |
| | | from third_data import kpl_util, kpl_api |
| | | from third_data.code_plate_key_manager import LimitUpCodesPlateKeyManager, CodesHisReasonAndBlocksManager |
| | | |
| | | # 代码对应的涨停原因保存 |
| | | from third_data.kpl_util import KPLPlatManager, KPLDataType |
| | | |
| | | |
| | | |
| | | |
| | | class KPLCodeLimitUpReasonManager: |
| | |
| | | return self.__redisManager.getRedis() |
| | | |
| | | def save_reason(self, code, reason): |
| | | self.__get_redis().setex(f"kpl_limitup_reason-{code}", tool.get_expire(), reason) |
| | | RedisUtils.setex(self.__get_redis(), f"kpl_limitup_reason-{code}", tool.get_expire(), reason) |
| | | |
| | | def list_all(self): |
| | | keys = self.__get_redis().keys("kpl_limitup_reason-*") |
| | | keys = RedisUtils.keys(self.__get_redis(), "kpl_limitup_reason-*") |
| | | dict_ = {} |
| | | for k in keys: |
| | | val = self.__get_redis().get(k) |
| | | val = RedisUtils.get(self.__get_redis(), k) |
| | | dict_[k.split("-")[1]] = val |
| | | return dict_ |
| | | |
| | |
| | | __kplPlatManager = KPLPlatManager() |
| | | __LimitUpCodesPlateKeyManager = LimitUpCodesPlateKeyManager() |
| | | __CodesPlateKeysManager = CodesHisReasonAndBlocksManager() |
| | | __current_code_reasons_dict = {} |
| | | __current_reason_codes_dict = {} |
| | | __records_cache = {} |
| | | |
| | | @classmethod |
| | | def __load_hist_and_blocks(cls, code): |
| | |
| | | if records: |
| | | cls.latest_origin_datas = records |
| | | cls.__LimitUpCodesPlateKeyManager.set_today_limit_up([(r[0], r[5]) for r in records]) |
| | | |
| | | code_reasons_dict = {} |
| | | reason_codes_dict = {} |
| | | for d in records: |
| | | # 涨停原因 + 推荐原因 |
| | | bs = {d[5]} |
| | | if d[6]: |
| | | bs |= set(d[6].split("、")) |
| | | code_reasons_dict[d[0]] = bs |
| | | for b in bs: |
| | | if b not in reason_codes_dict: |
| | | reason_codes_dict[b] = set() |
| | | reason_codes_dict[b].add(d[0]) |
| | | cls.__current_code_reasons_dict = code_reasons_dict |
| | | cls.__current_reason_codes_dict = reason_codes_dict |
| | | |
| | | # 涨停数据记录 |
| | | mysqldb = mysql_data.Mysqldb() |
| | |
| | | mysqldb = mysql_data.Mysqldb() |
| | | return mysqldb.select_all(f"select * from kpl_limit_up_record where _day='{day}'") |
| | | |
| | | @classmethod |
| | | def list_all_cache(cls, day): |
| | | if day in cls.__records_cache: |
| | | return cls.__records_cache[day] |
| | | fdata = cls.list_all(day) |
| | | if fdata: |
| | | cls.__records_cache[day] = fdata |
| | | return fdata |
| | | |
| | | @staticmethod |
| | | def list_by_code(code, day): |
| | | mysqldb = mysql_data.Mysqldb() |
| | |
| | | for b in constant.KPL_INVALID_BLOCKS: |
| | | wheres.append(f"hb.`_hot_block_name` != '{b}'") |
| | | wheres = " and ".join(wheres) |
| | | sql = f"SELECT GROUP_CONCAT(_hot_block_name) FROM (SELECT hb.`_hot_block_name`,hb.`_day` FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' AND {wheres} ORDER BY hb.`_day` DESC LIMIT 10) a GROUP BY a._day ORDER BY a._day DESC LIMIT 1" |
| | | sql = f"SELECT GROUP_CONCAT(_hot_block_name) FROM (SELECT hb.`_hot_block_name`,hb.`_day` FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' AND {wheres} ORDER BY hb.`_day` DESC LIMIT 2) a GROUP BY a._day ORDER BY a._day DESC LIMIT 1" |
| | | mysqldb = mysql_data.Mysqldb() |
| | | return mysqldb.select_one(sql) |
| | | |
| | | # 获取代码最近的板块,返回[(板块,日期)] |
| | | @classmethod |
| | | def get_latest_infos(cls, code, count, contains_today=True): |
| | | wheres = [] |
| | | for b in constant.KPL_INVALID_BLOCKS: |
| | | wheres.append(f"hb.`_hot_block_name` != '{b}'") |
| | | wheres = " and ".join(wheres) |
| | | # 只获取最近180天的数据 |
| | | min_day = tool.date_sub(tool.get_now_date_str(), 180) |
| | | sql = f"SELECT GROUP_CONCAT(_hot_block_name),`_day`,_blocks FROM (SELECT hb.`_hot_block_name`,hb.`_day`,hb._blocks FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' and hb.`_day` > '{min_day}' ORDER BY hb.`_day` DESC LIMIT 10) a GROUP BY a._day ORDER BY a._day DESC LIMIT {count}" |
| | | sql = f"SELECT GROUP_CONCAT(_hot_block_name),`_day`,_blocks FROM (SELECT hb.`_hot_block_name`,hb.`_day`,hb._blocks FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' and {wheres} and hb.`_day` > '{min_day}' ORDER BY hb.`_day` DESC LIMIT 10) a GROUP BY a._day ORDER BY a._day DESC LIMIT {count}" |
| | | mysqldb = mysql_data.Mysqldb() |
| | | results = mysqldb.select_all(sql) |
| | | if results and not contains_today and results[0][1] == tool.get_now_date_str(): |
| | |
| | | |
| | | @classmethod |
| | | def get_latest_blocks_set(cls, code): |
| | | results = cls.get_latest_infos(code, 10, False) |
| | | results = cls.get_latest_infos(code, 2, False) |
| | | bs = set([b[0] for b in results]) |
| | | return bs |
| | | |
| | | @classmethod |
| | | def get_current_blocks(cls, code): |
| | | return cls.__current_code_reasons_dict.get(code) |
| | | |
| | | @classmethod |
| | | def get_current_codes_by_block(cls, block): |
| | | return cls.__current_reason_codes_dict.get(block) |
| | | |
| | | @classmethod |
| | | def get_current_reason_codes_dict(cls): |
| | | return copy.deepcopy(cls.__current_reason_codes_dict) |
| | | |
| | | @classmethod |
| | | def get_current_reasons(cls): |
| | | if cls.__current_reason_codes_dict: |
| | | return cls.__current_reason_codes_dict.keys() |
| | | return set() |
| | | |
| | | |
| | | class KPLDataManager: |
| | | __latest_datas = {} |
| | | kpl_data_update_info = {} |
| | | __file_content_cache = {} |
| | | |
| | | def __save_in_file(self, key, datas): |
| | | @classmethod |
| | | def __save_in_file(cls, key, datas): |
| | | name = f"{tool.get_now_date_str()}_{key}.log" |
| | | path = f"{constant.CACHE_PATH}/{name}" |
| | | with open(path, 'w') as f: |
| | | f.write(json.dumps(datas)) |
| | | |
| | | def __get_from_file(self, key, day=tool.get_now_date_str()): |
| | | @classmethod |
| | | def __get_from_file(cls, key, day=tool.get_now_date_str()): |
| | | name = f"{day}_{key}.log" |
| | | path = f"{constant.CACHE_PATH}/{name}" |
| | | if not os.path.exists(path): |
| | |
| | | return json.loads(lines[0]) |
| | | return None |
| | | |
| | | def get_from_file(self, type, day): |
| | | @classmethod |
| | | def get_from_file(cls, type, day): |
| | | name = f"{day}_{type.value}.log" |
| | | path = f"{constant.CACHE_PATH}/{name}" |
| | | if not os.path.exists(path): |
| | |
| | | return json.loads(lines[0]) |
| | | return None |
| | | |
| | | @classmethod |
| | | def get_from_file_cache(cls, type, day): |
| | | key = f"{type}-{day}" |
| | | if key in cls.__file_content_cache: |
| | | return cls.__file_content_cache.get(key) |
| | | fdata = cls.get_from_file(type, day) |
| | | if fdata: |
| | | cls.__file_content_cache[key] = fdata |
| | | return fdata |
| | | |
| | | @classmethod |
| | | # 获取最近几天的数据,根据日期倒序返回 |
| | | def get_latest_from_file(self, type, count): |
| | | def get_latest_from_file(cls, type, count): |
| | | files = os.listdir(constant.CACHE_PATH) |
| | | file_name_list = [] |
| | | for f in files: |
| | |
| | | |
| | | return fresults |
| | | |
| | | def save_data(self, type, datas): |
| | | self.__latest_datas[type] = datas |
| | | self.__save_in_file(type, datas) |
| | | @classmethod |
| | | def save_data(cls, type, datas): |
| | | cls.kpl_data_update_info[type] = (tool.get_now_time_str(), len(datas)) |
| | | cls.__latest_datas[type] = datas |
| | | cls.__save_in_file(type, datas) |
| | | |
| | | def get_data(self, type): |
| | | @classmethod |
| | | def get_data(cls, type): |
| | | type = type.value |
| | | if type in self.__latest_datas: |
| | | return self.__latest_datas[type] |
| | | result = self.__get_from_file(type) |
| | | if type in cls.__latest_datas: |
| | | return cls.__latest_datas[type] |
| | | result = cls.__get_from_file(type) |
| | | if result is not None: |
| | | self.__latest_datas[type] = result |
| | | cls.__latest_datas[type] = result |
| | | return result |
| | | |
| | | |
| | |
| | | if day in __limit_up_list_records_dict: |
| | | datas = __limit_up_list_records_dict[day] |
| | | else: |
| | | datas = KPLDataManager().get_latest_from_file(KPLDataType.LIMIT_UP, 10) |
| | | logger_debug.info("从文件中获取前几天的实时涨停数据") |
| | | datas = KPLDataManager().get_latest_from_file(KPLDataType.LIMIT_UP, count + 2) |
| | | if datas: |
| | | # 保存数据 |
| | | __limit_up_list_records_dict[day] = datas |
| | |
| | | return fresults |
| | | |
| | | |
| | | def get_yesterday_limit_up_codes(): |
| | | yesterday_limit_up_data_records = get_yesterday_current_limit_up_records() |
| | | yesterday_codes = set([x[0] for x in yesterday_limit_up_data_records]) |
| | | return yesterday_codes |
| | | |
| | | |
| | | def get_yesterday_current_limit_up_records(): |
| | | yesterday_limit_up_data_records = get_current_limit_up_data_records(1)[0][1] |
| | | return yesterday_limit_up_data_records |
| | | |
| | | |
| | | # 获取最近几天涨停原因 |
| | | __latest_current_limit_up_records = {} |
| | | |
| | | |
| | | def get_latest_current_limit_up_records(): |
| | | day = tool.get_now_date_str() |
| | | if day not in __latest_current_limit_up_records: |
| | | fdatas = get_current_limit_up_data_records(15) |
| | | __latest_current_limit_up_records[day] = fdatas |
| | | return __latest_current_limit_up_records.get(day) |
| | | |
| | | |
| | | # 运行拉取任务 |
| | | def run_pull_task(): |
| | | def __upload_data(type, datas): |
| | | root_data = { |
| | | "type": type, |
| | | "data": datas |
| | | } |
| | | requests.post("http://127.0.0.1:9004/upload_kpl_data", json.dumps(root_data)) |
| | | |
| | | def get_limit_up(): |
| | | while True: |
| | | if tool.is_trade_time() and int(tool.get_now_time_str().replace(':', '')) > int("092530"): |
| | | try: |
| | | results = kpl_api.getLimitUpInfoNew() |
| | | result = json.loads(results) |
| | | start_time = time.time() |
| | | __upload_data("limit_up", result) |
| | | except Exception as e: |
| | | logging.exception(e) |
| | | time.sleep(3) |
| | | |
| | | def get_bidding_money(): |
| | | # 竞价数据上传 |
| | | while True: |
| | | if int("092600") < int(tool.get_now_time_str().replace(":", "")) < int("092700"): |
| | | try: |
| | | results = kpl_api.daBanList(kpl_api.DABAN_TYPE_BIDDING) |
| | | result = json.loads(results) |
| | | __upload_data("biddings", result) |
| | | except Exception as e: |
| | | pass |
| | | time.sleep(3) |
| | | |
| | | def get_market_industry(): |
| | | while True: |
| | | if tool.is_trade_time(): |
| | | try: |
| | | results = kpl_api.getMarketIndustryRealRankingInfo() |
| | | result = json.loads(results) |
| | | __upload_data("industry_rank", result) |
| | | except: |
| | | pass |
| | | time.sleep(3) |
| | | |
| | | def get_market_jingxuan(): |
| | | while True: |
| | | if tool.is_trade_time(): |
| | | try: |
| | | results = kpl_api.getMarketJingXuanRealRankingInfo() |
| | | result = json.loads(results) |
| | | __upload_data("jingxuan_rank", result) |
| | | except: |
| | | pass |
| | | time.sleep(3) |
| | | |
| | | threading.Thread(target=get_limit_up, daemon=True).start() |
| | | # threading.Thread(target=get_bidding_money, daemon=True).start() |
| | | # threading.Thread(target=get_market_industry, daemon=True).start() |
| | | # threading.Thread(target=get_market_jingxuan, daemon=True).start() |
| | | |
| | | |
| | | if __name__ == "__main__": |
| | | fresults = get_current_limit_up_data_records(2) |
| | | for d in fresults: |
| | | print(d) |
| | | get_current_limit_up_data_records(2) |
| | | print(get_latest_current_limit_up_records()) |
| | | print(get_latest_current_limit_up_records()) |
| | | input() |