import json
|
import os
|
import threading
|
import time
|
|
import requests
|
|
import constant
|
from db.redis_manager import RedisUtils
|
from log_module.log import logger_kpl_limit_up_reason_change
|
from utils import tool
|
|
# 开盘啦历史涨停数据管理
|
from db import mysql_data as mysql_data, redis_manager as redis_manager
|
from third_data import kpl_util, kpl_api
|
from third_data.code_plate_key_manager import LimitUpCodesPlateKeyManager, CodesHisReasonAndBlocksManager
|
|
# 代码对应的涨停原因保存
|
from third_data.kpl_util import KPLPlatManager, KPLDataType
|
|
|
class KPLCodeLimitUpReasonManager:
|
__redisManager = redis_manager.RedisManager(3)
|
|
def __get_redis(self):
|
return self.__redisManager.getRedis()
|
|
def save_reason(self, code, reason):
|
RedisUtils.setex(self.__get_redis(), f"kpl_limitup_reason-{code}", tool.get_expire(), reason)
|
|
def list_all(self):
|
keys = RedisUtils.keys(self.__get_redis(), "kpl_limitup_reason-*")
|
dict_ = {}
|
for k in keys:
|
val = RedisUtils.get(self.__get_redis(), k)
|
dict_[k.split("-")[1]] = val
|
return dict_
|
|
|
class KPLLimitUpDataRecordManager:
|
total_datas = None
|
latest_datas = {}
|
latest_origin_datas = []
|
__kplPlatManager = KPLPlatManager()
|
__LimitUpCodesPlateKeyManager = LimitUpCodesPlateKeyManager()
|
__CodesPlateKeysManager = CodesHisReasonAndBlocksManager()
|
|
@classmethod
|
def __load_hist_and_blocks(cls, code):
|
# 有数据新增,加载历史原因与板块
|
his_reasons = cls.get_latest_infos(code, 10, False)
|
his_reasons = set([r[0] for r in his_reasons])
|
cls.__CodesPlateKeysManager.set_history_limit_up_reason(code, his_reasons)
|
try:
|
if not cls.__CodesPlateKeysManager.get_blocks(code):
|
results = kpl_api.getStockIDPlate(code)
|
bs = [r[1] for r in results]
|
cls.__CodesPlateKeysManager.set_blocks(code, bs)
|
except Exception as e:
|
pass
|
|
@classmethod
|
def save_record(cls, day, records):
|
# 统计代码所属板块
|
code_block_dict = {}
|
for data in records:
|
blocks = set(data[5].split("、"))
|
code = data[0]
|
for b in blocks:
|
if not code_block_dict.get(code):
|
code_block_dict[code] = set()
|
code_block_dict[code].add(b)
|
# 设置涨停数据
|
if records:
|
cls.latest_origin_datas = records
|
cls.__LimitUpCodesPlateKeyManager.set_today_limit_up([(r[0], r[5]) for r in records])
|
|
# 涨停数据记录
|
mysqldb = mysql_data.Mysqldb()
|
# 统计涨停原因和概念代码
|
plats = {}
|
for d in records:
|
plats[d[5]] = d[9]
|
for p in plats:
|
cls.__kplPlatManager.save_plat(plats[p], p)
|
|
for d in records:
|
# (代码, 名称, 首次涨停时间, 最近涨停时间, 几板, 涨停原因, 板块, 实际流通, 主力净额,涨停原因代码,涨停原因代码数量)
|
code = d[0]
|
_id = f"{day}_{code}_{d[5]}"
|
|
result = mysqldb.select_one("select * from kpl_limit_up_record where _id='{}'".format(_id))
|
if not result:
|
mysqldb.execute(
|
f"insert into kpl_limit_up_record(_id,_day,_hot_block_name,_code,_code_name,_limit_up_time,_blocks,_latest_limit_up_time,_update_time,_create_time,_hot_block_code_count,_limit_up_high_info,_zylt_val) values('{_id}','{day}','{d[5]}','{d[0]}','{d[1]}','{d[2]}','{d[6]}','{d[3]}',now(),now(),{d[10]},'{d[4]}',{d[7]})")
|
cls.__load_hist_and_blocks(code)
|
else:
|
if _id in cls.latest_datas and json.dumps(cls.latest_datas.get(_id)) != json.dumps(d):
|
mysqldb.execute(
|
f"update kpl_limit_up_record set _latest_limit_up_time='{d[3]}',_limit_up_time='{d[2]}',_hot_block_code_count={d[10]},_limit_up_high_info='{d[4]}' ,_update_time=now() where _id='{_id}'")
|
cls.latest_datas[_id] = d
|
cls.latest_datas[_id] = d
|
|
# 获取原来的代码所属板块,删除之前错误的板块
|
old_datas = KPLLimitUpDataRecordManager.list_by_code(code, day)
|
if old_datas:
|
for dd in old_datas:
|
if dd[2] not in code_block_dict[code]:
|
mysqldb.execute(f"delete from kpl_limit_up_record where _id='{dd[0]}'")
|
logger_kpl_limit_up_reason_change.info(f"code-{dd[3]}:{dd[2]}-{code_block_dict[code]}")
|
# 板块更改过
|
mysqldb.execute(
|
f"update kpl_limit_up_record set _hot_block_change = f'{dd[2]}' where _day='{dd[1]}' and _code='{code}'")
|
|
cls.__LimitUpCodesPlateKeyManager.set_today_limit_up_reason_change(code, dd[2],
|
code_block_dict[code])
|
|
if dd[0] in cls.latest_datas:
|
cls.latest_datas.pop(dd[0])
|
cls.total_datas = KPLLimitUpDataRecordManager.list_all(tool.get_now_date_str())
|
|
@classmethod
|
def load_total_datas(cls):
|
cls.total_datas = KPLLimitUpDataRecordManager.list_all(tool.get_now_date_str())
|
cls.__LimitUpCodesPlateKeyManager.set_today_total_limit_up([(r[3], r[2]) for r in cls.total_datas])
|
for d in cls.total_datas:
|
cls.__load_hist_and_blocks(d[3])
|
|
@staticmethod
|
def list_all(day):
|
mysqldb = mysql_data.Mysqldb()
|
return mysqldb.select_all(f"select * from kpl_limit_up_record where _day='{day}'")
|
|
@staticmethod
|
def list_by_code(code, day):
|
mysqldb = mysql_data.Mysqldb()
|
return mysqldb.select_all(f"select * from kpl_limit_up_record where _code='{code}' and _day='{day}'")
|
|
@staticmethod
|
def list_by_block(block_name, day):
|
mysqldb = mysql_data.Mysqldb()
|
return mysqldb.select_all(
|
f"select * from kpl_limit_up_record where _hot_block_name='{block_name}' and _day='{day}'")
|
|
@staticmethod
|
def list_blocks_with_day(days):
|
mysqldb = mysql_data.Mysqldb()
|
sql = "select _hot_block_name,_day from kpl_limit_up_record where "
|
wheres = []
|
for day in days:
|
wheres.append(f"_day = '{day}'")
|
sql += " or ".join(wheres)
|
sql += " group by _hot_block_name,_day"
|
|
results = mysqldb.select_all(sql)
|
return results
|
|
@staticmethod
|
def get_latest_blocks(code):
|
wheres = []
|
for b in constant.KPL_INVALID_BLOCKS:
|
wheres.append(f"hb.`_hot_block_name` != '{b}'")
|
wheres = " and ".join(wheres)
|
sql = f"SELECT GROUP_CONCAT(_hot_block_name) FROM (SELECT hb.`_hot_block_name`,hb.`_day` FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' AND {wheres} ORDER BY hb.`_day` DESC LIMIT 2) a GROUP BY a._day ORDER BY a._day DESC LIMIT 1"
|
mysqldb = mysql_data.Mysqldb()
|
return mysqldb.select_one(sql)
|
|
# 获取代码最近的板块,返回[(板块,日期)]
|
@classmethod
|
def get_latest_infos(cls, code, count, contains_today=True):
|
wheres = []
|
for b in constant.KPL_INVALID_BLOCKS:
|
wheres.append(f"hb.`_hot_block_name` != '{b}'")
|
wheres = " and ".join(wheres)
|
# 只获取最近180天的数据
|
min_day = tool.date_sub(tool.get_now_date_str(), 180)
|
sql = f"SELECT GROUP_CONCAT(_hot_block_name),`_day`,_blocks FROM (SELECT hb.`_hot_block_name`,hb.`_day`,hb._blocks FROM `kpl_limit_up_record` hb WHERE hb.`_code`='{code}' and {wheres} and hb.`_day` > '{min_day}' ORDER BY hb.`_day` DESC LIMIT 10) a GROUP BY a._day ORDER BY a._day DESC LIMIT {count}"
|
mysqldb = mysql_data.Mysqldb()
|
results = mysqldb.select_all(sql)
|
if results and not contains_today and results[0][1] == tool.get_now_date_str():
|
return results[1:]
|
return results
|
|
@classmethod
|
def get_latest_blocks_set(cls, code):
|
results = cls.get_latest_infos(code, 2, False)
|
bs = set([b[0] for b in results])
|
return bs
|
|
|
class KPLDataManager:
|
__latest_datas = {}
|
kpl_data_update_info = {}
|
|
@classmethod
|
def __save_in_file(cls, key, datas):
|
name = f"{tool.get_now_date_str()}_{key}.log"
|
path = f"{constant.CACHE_PATH}/{name}"
|
with open(path, 'w') as f:
|
f.write(json.dumps(datas))
|
|
@classmethod
|
def __get_from_file(cls, key, day=tool.get_now_date_str()):
|
name = f"{day}_{key}.log"
|
path = f"{constant.CACHE_PATH}/{name}"
|
if not os.path.exists(path):
|
return None
|
with open(path, 'r') as f:
|
lines = f.readlines()
|
if lines:
|
return json.loads(lines[0])
|
return None
|
|
@classmethod
|
def get_from_file(cls, type, day):
|
name = f"{day}_{type.value}.log"
|
path = f"{constant.CACHE_PATH}/{name}"
|
if not os.path.exists(path):
|
return None
|
with open(path, 'r') as f:
|
lines = f.readlines()
|
if lines:
|
return json.loads(lines[0])
|
return None
|
|
@classmethod
|
# 获取最近几天的数据,根据日期倒序返回
|
def get_latest_from_file(cls, type, count):
|
files = os.listdir(constant.CACHE_PATH)
|
file_name_list = []
|
for f in files:
|
if f[10:] == f"_{type.value}.log":
|
file_name_list.append((f.split("_")[0], f))
|
|
file_name_list.sort(key=lambda x: x[0], reverse=True)
|
file_name_list = file_name_list[:count]
|
fresults = []
|
for file in file_name_list:
|
path = f"{constant.CACHE_PATH}/{file[1]}"
|
if not os.path.exists(path):
|
continue
|
with open(path, 'r') as f:
|
lines = f.readlines()
|
if lines:
|
fresults.append((file[0], json.loads(lines[0])))
|
|
return fresults
|
|
@classmethod
|
def save_data(cls, type, datas):
|
cls.kpl_data_update_info[type] = (tool.get_now_time_str(), len(datas))
|
cls.__latest_datas[type] = datas
|
cls.__save_in_file(type, datas)
|
|
@classmethod
|
def get_data(cls, type):
|
type = type.value
|
if type in cls.__latest_datas:
|
return cls.__latest_datas[type]
|
result = cls.__get_from_file(type)
|
if result is not None:
|
cls.__latest_datas[type] = result
|
return result
|
|
|
def load_history_limit_up():
|
for file_name in os.listdir(f"{constant.get_path_prefix()}/kpl/his"):
|
if file_name.find("HisDaBanList_1.log") < 0:
|
continue
|
day = file_name[:10]
|
with open(f"{constant.get_path_prefix()}/kpl/his/{file_name}", 'r', encoding="utf-16") as f:
|
lines = f.readlines()
|
line = lines[0]
|
result = json.loads(line)
|
list_ = kpl_util.parseDaBanData(result, kpl_util.DABAN_TYPE_LIMIT_UP)
|
# KPLLimitUpDataRecordManager.save_record(day, list_)
|
for r in list_:
|
print(r[-1], r[5])
|
KPLPlatManager().save_plat(r[-1], r[5])
|
|
# print(day, list_)
|
|
|
# 历史涨停列表
|
__limit_up_list_records_dict = {}
|
|
|
# 获取最近几天的实时涨停信息
|
# 返回格式([日期,数据])
|
def get_current_limit_up_data_records(count):
|
fresults = []
|
day = tool.get_now_date_str()
|
datas = []
|
if day in __limit_up_list_records_dict:
|
datas = __limit_up_list_records_dict[day]
|
else:
|
datas = KPLDataManager().get_latest_from_file(KPLDataType.LIMIT_UP, 10)
|
if datas:
|
# 保存数据
|
__limit_up_list_records_dict[day] = datas
|
for i in range(len(datas)):
|
if datas[i][0] == day:
|
continue
|
fresults.append(datas[i])
|
if len(fresults) >= count:
|
break
|
return fresults
|
|
|
def get_yesterday_limit_up_codes():
|
yesterday_limit_up_data_records = get_current_limit_up_data_records(1)[0][1]
|
yesterday_codes = set([x[0] for x in yesterday_limit_up_data_records])
|
return yesterday_codes
|
|
|
# 运行拉取任务
|
def run_pull_task():
|
def __upload_data(type, datas):
|
root_data = {
|
"type": type,
|
"data": datas
|
}
|
requests.post("http://127.0.0.1:9004/upload_kpl_data", json.dumps(root_data))
|
|
def get_limit_up():
|
while True:
|
if tool.is_trade_time():
|
try:
|
results = kpl_api.getLimitUpInfo()
|
result = json.loads(results)
|
__upload_data("limit_up", result)
|
except Exception as e:
|
pass
|
time.sleep(3)
|
|
def get_bidding_money():
|
# 竞价数据上传
|
while True:
|
if int("092600") < int(tool.get_now_time_str().replace(":", "")) < int("092700"):
|
try:
|
results = kpl_api.daBanList(kpl_api.DABAN_TYPE_BIDDING)
|
result = json.loads(results)
|
__upload_data("biddings", result)
|
except Exception as e:
|
pass
|
time.sleep(3)
|
|
def get_market_industry():
|
while True:
|
if tool.is_trade_time():
|
try:
|
results = kpl_api.getMarketIndustryRealRankingInfo()
|
result = json.loads(results)
|
__upload_data("industry_rank", result)
|
except:
|
pass
|
time.sleep(3)
|
|
def get_market_jingxuan():
|
while True:
|
if tool.is_trade_time():
|
try:
|
results = kpl_api.getMarketJingXuanRealRankingInfo()
|
result = json.loads(results)
|
__upload_data("jingxuan_rank", result)
|
except:
|
pass
|
time.sleep(3)
|
|
threading.Thread(target=get_limit_up, daemon=True).start()
|
# threading.Thread(target=get_bidding_money, daemon=True).start()
|
threading.Thread(target=get_market_industry, daemon=True).start()
|
threading.Thread(target=get_market_jingxuan, daemon=True).start()
|
|
|
if __name__ == "__main__":
|
# run_pull_task()
|
# input()
|
results = kpl_api.daBanList(kpl_api.DABAN_TYPE_LIMIT_UP)
|
results = json.loads(results)
|
results = results["list"]
|
for result in results:
|
print(result)
|