| | |
| | | |
| | | import dask |
| | | |
| | | import log |
| | | from code_attribute import gpcode_manager |
| | | from log import logger_request_debug |
| | | from log_module import log_analyse, log_export |
| | |
| | | KPLLimitUpDataRecordManager.load_total_datas() |
| | | total_datas = KPLLimitUpDataRecordManager.total_datas |
| | | |
| | | current_datas_results = hosting_api_util.common_request({"ctype":"get_kpl_limit_up_datas"}) |
| | | current_datas_results = hosting_api_util.common_request({"ctype": "get_kpl_limit_up_datas"}) |
| | | if type(current_datas_results) == str: |
| | | current_datas_results = json.loads(current_datas_results) |
| | | current_datas = current_datas_results.get("data") #KPLLimitUpDataRecordManager.latest_origin_datas |
| | | current_datas = current_datas_results.get("data") # KPLLimitUpDataRecordManager.latest_origin_datas |
| | | current_block_codes = {} |
| | | for c in current_datas: |
| | | if c[5] not in current_block_codes: |
| | |
| | | def do_GET(self): |
| | | path = self.path |
| | | url = urlparse.urlparse(path) |
| | | thread_id = random.randint(0, 1000000) |
| | | logger_request_debug.info(f"GET 请求开始({thread_id}):{url.path}") |
| | | log.request_info("DATA_SERVER_GET", f"GET 请求开始:{url.path}") |
| | | try: |
| | | if url.path == "/kpl/get_limit_up_list": |
| | | response_data = self.__get_limit_up_list() |
| | |
| | | result = hosting_api_util.get_from_data_server(url.path, ps_dict) |
| | | self.__send_response(result) |
| | | finally: |
| | | logger_request_debug.info(f"GET 请求结束({thread_id}):{url.path}") |
| | | |
| | | log.request_info("DATA_SERVER_GET", f"GET 请求结束") |
| | | |
| | | def do_POST(self): |
| | | thread_id = random.randint(0, 1000000) |
| | | path = self.path |
| | | url = urlparse.urlparse(path) |
| | | logger_request_debug.info(f"POST 请求开始({thread_id}):{url.path}") |
| | | log.request_info("DATA_SERVER_POST", f"POST 请求开始:{url.path}") |
| | | try: |
| | | if url.path == "/upload_kpl_data": |
| | | # 接受开盘啦数据 |
| | |
| | | result_str = self.__process_kpl_data(params) |
| | | self.__send_response(result_str) |
| | | finally: |
| | | logger_request_debug.info(f"POST 请求结束({thread_id}):{url.path}") |
| | | log.request_info("DATA_SERVER_POST", f"POST 请求结束") |
| | | |
| | | def __process_kpl_data(self, data): |
| | | data = json.loads(json.dumps(data).replace("概念", "")) |
| | |
| | | """ |
| | | 日志 |
| | | """ |
| | | import queue |
| | | import sys |
| | | import time |
| | | |
| | | from loguru import logger |
| | | import constant |
| | | from utils import tool |
| | | |
| | | |
| | | class MyLogger: |
| | |
| | | logger_profile = __mylogger.get_logger("profile") |
| | | |
| | | logger_request_debug = __mylogger.get_logger("request_debug") |
| | | |
| | | |
| | | class AsyncLogManager: |
| | | __log_queue = queue.Queue() |
| | | |
| | | def __add_log(self, logger, method, *args): |
| | | self.__log_queue.put_nowait((logger, time.time(), method, args)) |
| | | |
| | | def debug(self, logger, *args): |
| | | self.__add_log(logger, "debug", *args) |
| | | |
| | | def info(self, logger, *args): |
| | | self.__add_log(logger, "info", *args) |
| | | |
| | | def warning(self, logger, *args): |
| | | self.__add_log(logger, "warning", *args) |
| | | |
| | | def error(self, logger, *args): |
| | | self.__add_log(logger, "error", *args) |
| | | |
| | | def exception(self, logger, *args): |
| | | self.__add_log(logger, "exception", *args) |
| | | |
| | | # 运行同步日志 |
| | | def run_sync(self): |
| | | while True: |
| | | try: |
| | | val = self.__log_queue.get() |
| | | time_s = val[1] |
| | | cmd = val[2] |
| | | method = getattr(val[0], cmd) |
| | | d = list(val[3]) |
| | | d[0] = f"[{tool.to_time_str(int(time_s))}.{str(time_s).split('.')[1][:3]}] " + d[0] |
| | | d = tuple(d) |
| | | method(*d) |
| | | except: |
| | | pass |
| | | |
| | | |
| | | async_log_util = AsyncLogManager() |
| | | |
| | | |
| | | def request_info(type_name, content, thread_id=None): |
| | | if not thread_id: |
| | | thread_id = tool.get_thread_id() |
| | | async_log_util.info(logger_request_debug, f"【{thread_id}】【{type_name}】 {content}") |
| | |
| | | |
| | | import constant |
| | | import data_server |
| | | import log |
| | | import middle_api_server |
| | | import middle_server |
| | | |
| | |
| | | t1.start() |
| | | t1 = threading.Thread(target=lambda: data_server.run("0.0.0.0", constant.DATA_SERVER_PORT), daemon=True) |
| | | t1.start() |
| | | t1 = threading.Thread(target=lambda: log.async_log_util.run_sync(), daemon=True) |
| | | t1.start() |
| | | middle_server.run() |
| | |
| | | import time |
| | | |
| | | import constant |
| | | import log |
| | | import socket_manager |
| | | import trade_manager |
| | | from db import mysql_data, redis_manager |
| | |
| | | # print("收到数据------", f"{data_str[:20]}......{data_str[-20:]}") |
| | | data_json = json.loads(data_str) |
| | | type_ = data_json['type'] |
| | | thread_id = random.randint(0, 1000000) |
| | | try: |
| | | logger_request_debug.info(f"middle_api_server 请求开始({thread_id}):{type_}") |
| | | log.request_info("middle_api_server", f"请求开始:{type_}") |
| | | if type(type_) == int: |
| | | # 处理数字型TYPE |
| | | return_str = self.process_num_type(sk, type_, data_str) |
| | |
| | | result = hosting_api_util.common_request(params) |
| | | return_str = json.dumps(result) |
| | | finally: |
| | | logger_request_debug.info(f"middle_api_server 请求结束({thread_id}):{type}") |
| | | log.request_info("middle_api_server", f"请求结束:{type_}") |
| | | break |
| | | # sk.close() |
| | | except Exception as e: |
| | |
| | | import time |
| | | |
| | | import constant |
| | | import log |
| | | import socket_manager |
| | | from db import mysql_data |
| | | from db.redis_manager import RedisUtils, RedisManager |
| | |
| | | {"code": 100, "msg": f"JSON解析失败"}).encode( |
| | | encoding='utf-8'))) |
| | | continue |
| | | thread_id = random.randint(0, 1000000) |
| | | logger_request_debug.info(f"middle_server 请求开始({thread_id}):{data_json.get('type')}") |
| | | type_= data_json["type"] |
| | | log.request_info("middle_server", f"请求开始:{type_}") |
| | | try: |
| | | if data_json["type"] == 'register': |
| | | client_type = data_json["data"]["client_type"] |
| | |
| | | sk.sendall(socket_util.load_header(result_str.encode(encoding='utf-8'))) |
| | | pass |
| | | finally: |
| | | logger_request_debug.info(f"middle_server 请求结束({thread_id}):{data_json.get('type')}") |
| | | log.request_info("middle_server", f"请求结束") |
| | | |
| | | else: |
| | | # 断开连接 |
| | | break |
| | |
| | | """ |
| | | 常用工具 |
| | | """ |
| | | import ctypes |
| | | import decimal |
| | | import random |
| | | import threading |
| | | import time |
| | | import time as t |
| | | import datetime |
| | |
| | | if code in cache_dict: |
| | | return True, cache_dict.get(code) |
| | | return False, None |
| | | |
| | | |
| | | def get_thread_id(): |
| | | try: |
| | | if constant.is_windows(): |
| | | return threading.current_thread().ident |
| | | else: |
| | | thread_id = ctypes.CDLL('libc.so.6').syscall(186) # Linux下的系统调用号 |
| | | return thread_id |
| | | except: |
| | | pass |
| | | return None |