Explorar el Código

更新提交使用版

Daniel hace 9 meses
padre
commit
6eea18cc27

+ 13 - 0
.idea/dataSources.xml

@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+  <component name="DataSourceManagerImpl" format="xml" multifile-model="true">
+    <data-source source="LOCAL" name="@localhost" uuid="2ee82303-47d8-4121-993c-602fa77a41a2">
+      <driver-ref>mysql.8</driver-ref>
+      <synchronize>true</synchronize>
+      <remarks>stock</remarks>
+      <jdbc-driver>com.mysql.cj.jdbc.Driver</jdbc-driver>
+      <jdbc-url>jdbc:mysql://localhost:3307</jdbc-url>
+      <working-dir>$ProjectFileDir$</working-dir>
+    </data-source>
+  </component>
+</project>

+ 3 - 17
.idea/deployment.xml

@@ -1,30 +1,16 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <project version="4">
-  <component name="PublishConfigData" autoUpload="Always" serverName="root@localhost:8722 password (3)" remoteFilesAllowedToDisappearOnAutoupload="false" confirmBeforeUploading="false">
+  <component name="PublishConfigData" serverName="root@localhost:8722 password" remoteFilesAllowedToDisappearOnAutoupload="false" confirmBeforeUploading="false">
     <option name="confirmBeforeUploading" value="false" />
     <serverData>
       <paths name="root@localhost:8722 password">
         <serverdata>
           <mappings>
-            <mapping deploy="/tmp/pycharm_project_662" local="$PROJECT_DIR$" />
-          </mappings>
-        </serverdata>
-      </paths>
-      <paths name="root@localhost:8722 password (2)">
-        <serverdata>
-          <mappings>
-            <mapping deploy="/tmp/pycharm_project_952" local="$PROJECT_DIR$" />
-          </mappings>
-        </serverdata>
-      </paths>
-      <paths name="root@localhost:8722 password (3)">
-        <serverdata>
-          <mappings>
-            <mapping deploy="/Git/tmp" local="$PROJECT_DIR$/../backtest" />
+            <mapping deploy="/tmp/pycharm_project_858" local="$PROJECT_DIR$" />
+            <mapping deploy="/tmp/pycharm_project_585" local="$PROJECT_DIR$/../backtest" />
           </mappings>
         </serverdata>
       </paths>
     </serverData>
-    <option name="myAutoUpload" value="ALWAYS" />
   </component>
 </project>

+ 6 - 1
.idea/misc.xml

@@ -1,6 +1,11 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <project version="4">
-  <component name="ProjectRootManager" version="2" project-jdk-name="Remote Python 3.10.11 (sftp://root@localhost:8722/conda/envs/rapids/bin/python3.10)" project-jdk-type="Python SDK" />
+  <component name="Black">
+    <option name="executionMode" value="BINARY" />
+    <option name="pathToExecutable" value="C:\Users\Daniel\anaconda3\Scripts\black.exe" />
+    <option name="sdkName" value="openAI" />
+  </component>
+  <component name="ProjectRootManager" version="2" project-jdk-name="Quant" project-jdk-type="Python SDK" />
   <component name="PyPackaging">
     <option name="earlyReleasesAsUpgrades" value="true" />
   </component>

+ 1 - 0
.idea/modules.xml

@@ -4,6 +4,7 @@
     <modules>
       <module fileurl="file://$PROJECT_DIR$/../AI/.idea/AI.iml" filepath="$PROJECT_DIR$/../AI/.idea/AI.iml" />
       <module fileurl="file://$PROJECT_DIR$/../backtest/.idea/backtest.iml" filepath="$PROJECT_DIR$/../backtest/.idea/backtest.iml" />
+      <module fileurl="file://$USER_HOME$/PycharmProjects/openAI/.idea/openAI.iml" filepath="$USER_HOME$/PycharmProjects/openAI/.idea/openAI.iml" />
       <module fileurl="file://$USER_HOME$/PycharmProjects/pythonProject/.idea/pythonProject.iml" filepath="$USER_HOME$/PycharmProjects/pythonProject/.idea/pythonProject.iml" />
       <module fileurl="file://$USER_HOME$/Library/CloudStorage/OneDrive-个人/个人/python_stocks/quantify01/.idea/quantify01.iml" filepath="$USER_HOME$/Library/CloudStorage/OneDrive-个人/个人/python_stocks/quantify01/.idea/quantify01.iml" />
       <module fileurl="file://$PROJECT_DIR$/.idea/stock.iml" filepath="$PROJECT_DIR$/.idea/stock.iml" />

+ 1 - 0
.idea/stock.iml

@@ -9,5 +9,6 @@
     <orderEntry type="module" module-name="pythonProject" />
     <orderEntry type="module" module-name="AI" />
     <orderEntry type="module" module-name="backtest" />
+    <orderEntry type="module" module-name="openAI" />
   </component>
 </module>

+ 177 - 0
QMT/111.py

@@ -0,0 +1,177 @@
+import multiprocessing as mp
+from concurrent import futures
+import concurrent.futures
+import logging
+from sqlalchemy import create_engine, text
+import pandas as pd
+from myindicator import myind
+from pandas.testing import assert_frame_equal
+from multiprocessing import freeze_support, Value, Lock
+import logging
+from datetime import datetime as dt
+import traceback
+import dask.dataframe as dd
+from dask import delayed, compute
+from concurrent.futures import ThreadPoolExecutor
+import threading
+from itertools import islice
+
+pd.set_option('display.max_columns', None)  # 设置显示最大行
+
+
+def error(msg, *args):
+    return mp.get_logger().error(msg, *args)
+
+
+class LogExceptions(object):
+    def __init__(self, callable):
+        self.__callable = callable
+        return
+
+    def __call__(self, *args, **kwargs):
+        try:
+            result = self.__callable(*args, **kwargs)
+
+        except Exception as e:
+            # Here we add some debugging help. If multiprocessing's
+            # debugging is on, it will arrange to log the traceback
+            error(traceback.format_exc())
+            # Re-raise the original exception so the Pool worker can
+            # clean up
+            raise
+
+        # It was fine, give a normal answer
+        return result
+
+    pass
+
+
+def err_call_back(err):
+    print(f'问题在这里~ error:{str(err)}')
+    traceback.print_exc()
+
+
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+def assert_frame_equal(df, u):
+    # print(f'{u}开始')
+    data_temp = df
+    st = dt.now()
+    
+    myind.get_macd_data(df)
+    df_temp, trading_signals= myind.get_hlfx(df)
+    try:
+        df_temp_2, trading_signals_2 = myind.get_hlfx_optimization(df)
+        # myind.get_ddfx(df, data_temp, u)
+        # print(f'get_ddfx', u, u, u)
+        # df_temp, t_signals = myind.get_hlfx(df)
+    except BaseException as e:
+        print('err', e)
+    # print('df_temp', df_temp)
+    # print('df_temp_2', df_temp_2)
+    try:
+        print('tttttt', df_temp.equals(df_temp_2))
+    except BaseException as e:
+        print('err', e)
+    return df_temp
+
+
+def t(df, u):
+    st = dt.now()
+    t = pd.DataFrame()
+    for i in range(len(df)):
+        t = pd.concat([t.copy(), df.loc[i].to_frame().T], axis=0)
+    t.loc['HL'] = 1
+    print(f'{u}完成,{dt.now() - st}')
+
+
+def ts(df, u):
+    print(f'{u}开始')
+    st = dt.now()
+    t_list = []  # 创建一个空列表用于保存每次连接的DataFrame
+    for i in range(len(df)):
+        t_list.append(df.loc[i].to_frame().T)
+    t = dd.concat(t_list, axis=0)  # 一次性执行连接操作
+    t = t.assign(HL=1)  # 在Dask DataFrame中添加一列HL并赋值为1
+    result = compute(t)  # 执行计算
+    # print(result)
+    print(f'{u}完成,{dt.now() - st}')
+
+'''
+# 主函数
+if __name__ == '__main__':
+    freeze_support()
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+
+
+    engine = create_engine(
+        'mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_whole?charset=utf8')
+
+    df = pd.read_sql_table('000001.SZ_1d', con=engine)
+
+
+
+    # pool = futures.ProcessPoolExecutor(max_workers=24)
+    # pool.map(assert_frame_equal(df, range(5000)))
+    # with concurrent.futures.ProcessPoolExecutor(max_workers=24) as executor:
+    #     for i in range(5000):
+    #         # executor.submit(LogExceptions(assert_frame_equal), df, i)
+    #         executor.submit(assert_frame_equal, df, i)
+    #         print(i)
+    pool = mp.Pool(24)
+    for j in range(5000):
+    #     pool.apply_async(LogExceptions(assert_frame_equal), args=(df, j))
+    #     pool.apply_async(func=assert_frame_equal, args=(df, j), error_callback=err_call_back)
+    #     pool.apply_async(func=t, args=(df, j))
+        pool.apply_async(func=ts, args=(df, j))
+    #     # pool.map_async(func=assert_frame_equal, iterable=[df], chunksize=1)
+    #     print(j)
+    pool.close()
+    pool.join()
+'''
+
+
+if __name__ == '__main__':
+    mp.freeze_support()
+    logger = mp.log_to_stderr()
+    # logger.setLevel(logging.DEBUG)
+    
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_whole?charset=utf8')
+
+    df = pd.read_sql_table('000001.SZ_1d', con=engine)
+    # print(df)
+    # a, b= assert_frame_equal(df, 1)
+    # print(a,b)
+    # exit()
+    df_dict = {f'{i:06}.SZ_1d': df.copy() for i in range(1, 11)}
+    print(len(df_dict))
+    # exit()
+    
+    async_results = []
+    
+    # 设置每一轮的任务数
+    CHUNK_SIZE = 50  # 您可以根据需要进行调整
+    
+    for chunk in chunked_iterable(df_dict.items(), CHUNK_SIZE):
+        print(f'chunk:{chunk[0][0]}-{chunk[-1][0]}')
+        with mp.Pool(processes=min(CHUNK_SIZE, len(chunk), 24)) as pool:  # 使用最小值确保不会超出任务数或超过24核心
+            for stock, df_stock in chunk:
+                async_result = pool.apply_async(func=assert_frame_equal, args=(df_stock, stock))
+                async_results.append(async_result)
+            
+            pool.close()
+            pool.join()
+    exit()
+
+    # 在主进程中统一处理结果
+    for res in async_results:
+        print(res.get())
+

+ 47 - 0
QMT/1m_real_time.py

@@ -0,0 +1,47 @@
+# coding:utf-8
+from datetime import datetime as dt
+import os
+import pandas as pd
+from xtquant.xttrader import XtQuantTrader, XtQuantTraderCallback
+from xtquant.xttype import StockAccount
+from xtquant import xtdata, xtconstant
+import time
+from sqlalchemy import create_engine, text
+from jqdatasdk import *
+import pymysql
+import multiprocessing as mp
+import math
+import psutil
+import datetime
+from apscheduler.schedulers.blocking import BlockingScheduler
+import sys
+
+
+df = pd.DataFrame(columns=['time', 'open', 'close', 'high', 'low', 'volume', 'amount'])
+
+def avg_price(df):
+        print('avg_price')
+        daily_avg_price = []
+        for i in range(1, len(df) + 1):
+            daily_avg_price.append(round(sum(df['amount'][:i]) / sum(df['volume'][:i]) / 100, 2))
+        df['daily_avg_price'] = daily_avg_price
+        return df
+
+def over_avg(datas):
+    global df
+    for stock_code in datas:
+        df_temp = pd.DataFrame(datas[stock_code])
+        df_temp['time'] = df_temp['time'].apply(lambda x: dt.fromtimestamp(x / 1000.0))
+        df = pd.concat([df, df_temp], axis=0, ignore_index=True)
+        # df = avg_price(df)
+        # df = df[['time', 'open', 'close', 'high', 'low', 'volume', 'amount', 'daily_avg_price']]
+        print(df)
+        # print(stock_code, datas[stock_code])
+
+
+if __name__ == '__main__':
+    print('start')
+    t = dt.now().strftime('%Y%m%d')
+    # stocks = xtdata.get_stock_list_in_sector('沪深A股')
+    xtdata.subscribe_quote('000001.SH', period='1m', start_time=t, callback=over_avg)
+    xtdata.run()

+ 237 - 0
QMT/222.py

@@ -0,0 +1,237 @@
+# coding:utf-8
+import pandas as pd
+import pymysql
+from sqlalchemy import create_engine, text
+import threading
+from datetime import datetime as dt
+import datetime
+from jqdatasdk.technical_analysis import *
+from xtquant import xtdata, xtconstant
+from xtquant.xttype import StockAccount
+from xtquant.xttrader import XtQuantTrader, XtQuantTraderCallback
+import time
+import math
+import multiprocessing as mp
+import os
+import psutil
+import traceback
+from apscheduler.schedulers.blocking import BlockingScheduler
+import sys
+
+pd.set_option('display.max_columns', None)  # 设置显示最大行
+# 全局变量 stocks_dict
+stocks_dict = {}
+stocks = xtdata.get_stock_list_in_sector('沪深A股')
+for stock in stocks:
+    stocks_dict[stock] = []
+print(stocks_dict)
+
+
+class MyXtQuantTraderCallback(XtQuantTraderCallback):
+    def on_disconnected(self):
+        """
+        连接断开
+        :return:
+        """
+        print(datetime.datetime.now(), '连接断开回调')
+
+    def on_stock_order(self, order):
+        """
+        委托回报推送
+        :param order: XtOrder对象
+        :return:
+        """
+        print(datetime.datetime.now(), '委托回调', order.order_remark)
+
+    def on_stock_trade(self, trade):
+        """
+        成交变动推送
+        :param trade: XtTrade对象
+        :return:
+        """
+        print(datetime.datetime.now(), '成交回调', trade.order_remark)
+
+    def on_order_error(self, order_error):
+        """
+        委托失败推送
+        :param order_error:XtOrderError 对象
+        :return:
+        """
+        # print("on order_error callback")
+        # print(order_error.order_id, order_error.error_id, order_error.error_msg)
+        print(f"委托报错回调 {order_error.order_remark} {order_error.error_msg}")
+
+    def on_cancel_error(self, cancel_error):
+        """
+        撤单失败推送
+        :param cancel_error: XtCancelError 对象
+        :return:
+        """
+        print(datetime.datetime.now(), sys._getframe().f_code.co_name)
+
+    def on_order_stock_async_response(self, response):
+        """
+        异步下单回报推送
+        :param response: XtOrderResponse 对象
+        :return:
+        """
+        print(f"异步委托回调 {response.order_remark}")
+
+    def on_cancel_order_stock_async_response(self, response):
+        """
+        :param response: XtCancelOrderResponse 对象
+        :return:
+        """
+        print(datetime.datetime.now(), sys._getframe().f_code.co_name)
+
+    def on_account_status(self, status):
+        """
+        :param response: XtAccountStatus 对象
+        :return:
+        """
+        print(datetime.datetime.now(), sys._getframe().f_code.co_name)
+
+
+def err_call_back(err):
+    print(f'问题在这里~ error:{str(err)}')
+    traceback.print_exc()
+
+
+def run(seq):
+    mor = datetime.datetime.strptime(
+        str(dt.now().date()) + '11:30', '%Y-%m-%d%H:%M')
+    afternoon = datetime.datetime.strptime(
+        str(dt.now().date()) + '15:00', '%Y-%m-%d%H:%M')
+    mor_1 = datetime.datetime.strptime(
+        str(dt.now().date()) + '11:10', '%Y-%m-%d%H:%M')
+    """阻塞线程接收行情回调"""
+    import time
+    client = xtdata.get_client()
+    while True:
+        now_date = dt.now()
+        if not client.is_connected():
+            xtdata.unsubscribe_quote(seq)
+            raise Exception('行情服务连接断开')
+        # if mor < dt.now() < mor_1:
+        #     xtdata.unsubscribe_quote(seq)
+        #     print(f'现在时间:{dt.now()},已休市')
+        #     sys.exit()
+        #     break
+        #     return 0
+        elif dt.now() > afternoon:
+            xtdata.unsubscribe_quote(seq)
+            print(f'现在时间:{dt.now()},已收盘')
+            sys.exit()
+            break
+
+    return
+
+
+def speed(stock_list, datas):
+    global stocks_dict
+    print('..............................',stocks_dict)
+    for i in range(len(stock_list)):
+        stock = stock_list[i]
+        try:
+            stocks_dict[stock].append(datas[stock]['lastPrice'])
+
+        except BaseException as e:
+            print(e)
+        print(stock, stocks_dict[stock], datas[stock]['lastPrice'])
+        # print(datas[i]['lastPrice'], stocks_dict[i])
+    '''    
+    for stock in stock_list:
+        print(stock, datas[stock]['lastPrice'])
+        print(stocks_dict[stock])
+        try:
+            stocks_dict[stock].append(datas[stock]['lastPrice'])
+        except BaseException as e:
+            print(e)
+        print(stock, stocks_dict[stock], datas[stock]['lastPrice'])
+    
+    '''
+
+
+
+
+def prepare(datas):
+    for k, v in datas.items():
+        # print(k)
+        stocks_dict[k].append(v['lastPrice'])
+        print(k, stocks_dict[k], v['lastPrice'])
+        # stocks_dict[stock] = datas[stock]['lastPrice']
+
+        # 将数据添加至全局变量 stocks_dict[stock] key为股票代码,值为最新价
+        # print(stocks_dict[stock])
+    '''
+    stock_list = list(datas.keys())
+    if len(datas.keys()) >= 12:
+        cpu_count = 12
+    else:
+        cpu_count = len(datas.keys())
+    step = math.ceil(len(stock_list) / cpu_count)
+
+    to_list = []
+    for i in range(0, len(stock_list), step):
+        to_list.append([x for x in stock_list[i:i + step]])
+
+    pool = mp.Pool(processes=int(cpu_count/2))
+    for m in range(len(to_list)):
+        pool.apply_async(func=speed,
+                         args=(to_list[m], datas), error_callback=err_call_back)
+    pool.close()
+    pool.join()
+    '''
+
+
+
+
+def bridge():
+    # 全局变量 stocks_dict
+    global stocks
+    print(f'bridge is {os.getpid()}, now is {dt.now()},开盘了')
+
+    # stocks_dict = dict.fromkeys(stocks, [])
+    # print(stocks_dict)
+    seq = xtdata.subscribe_whole_quote(stocks, callback=prepare)
+    # 建立一个以stocks中值为键,值为空列表的字典
+
+    # print(f'stocks_dict is {stocks_dict}')
+    run(seq)
+
+
+if __name__ == '__main__':
+    print(f'总进程pid:{os.getpid()}')
+    mp.freeze_support()
+    pus = psutil.Process()
+    # pus.cpu_affinity([0, 1, 2, 3, 4, 5, 6, 7])
+
+    path = r'c:\\qmt\\userdata_mini'
+    # 生成session id 整数类型 同时运行的策略不能重复
+    session_id = int(time.time())
+    xt_trader = XtQuantTrader(path, session_id)
+    # 创建资金账号为 800068 的证券账号对象
+    acc = StockAccount('920000207040', 'SECURITY')
+    # 创建交易回调类对象,并声明接收回调
+    callback = MyXtQuantTraderCallback()
+    xt_trader.register_callback(callback)
+    # 启动交易线程
+    xt_trader.start()
+    # 建立交易连接,返回0表示连接成功
+    connect_result = xt_trader.connect()
+    print('建立交易连接,返回0表示连接成功', connect_result)
+    # 对交易回调进行订阅,订阅后可以收到交易主推,返回0表示订阅成功
+    subscribe_result = xt_trader.subscribe(acc)
+    print('对交易回调进行订阅,订阅后可以收到交易主推,返回0表示订阅成功', subscribe_result)
+
+    bridge()
+
+    scheduler = BlockingScheduler()
+    scheduler.add_job(func=bridge, trigger='cron', day_of_week='0-4', hour='09', minute='25',
+                      timezone="Asia/Shanghai", max_instances=5)
+    # # scheduler.add_job(func=job_func, trigger='cron', day_of_week='0-4', hour='13', minute='00',
+    # #                   timezone="Asia/Shanghai")
+    try:
+        scheduler.start()
+    except (KeyboardInterrupt, SystemExit):
+        pass

+ 26 - 11
QMT/230504_real_time.py

@@ -17,15 +17,17 @@ from apscheduler.schedulers.blocking import BlockingScheduler
 import sys
 
 # 指定客户端所在路径
-path = r'c:\\qmt\\userdata_mini'
+# path = r'c:\\qmt\\userdata_mini'
+path = 'C:\\方正证券FQT交易客户端\\userdata_mini'
 # 创建资金账号为 800068 的证券账号对象
-acc = StockAccount('920000207040', 'SECURITY')
+# acc = StockAccount('920000207040', 'SECURITY')
+acc = StockAccount('2162003533')
 # 生成session id 整数类型 同时运行的策略不能重复
 session_id = 123456
 xt_trader = None
 engine_stock = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_whole?charset=utf8',
                              pool_size=5000, pool_recycle=50, max_overflow=-1)
-auth('18616891214', 'Ea?*7f68nD.dafcW34d!')
+# auth('18616891214', 'Ea?*7f68nD.dafcW34d!')
 
 
 class MyXtQuantTraderCallback(XtQuantTraderCallback):
@@ -179,7 +181,10 @@ def ma_judge(data, list_judge, rate, results):
         MA5_1 = ma_1(stock, 5)
         # print(i, current_price, open_price, MA5, MA10, MA20, MA5_1)
         # 入交易池标准:阳线\大于MA5\MA5向上\MA20<MA10\离120线有距离
-        if (current_price > open_price) & (current_price > MA5) & (MA5 > MA5_1) & (current_price < MA5 * 1.05) \
+        # if (current_price > open_price) & (current_price > MA5) & (MA5 > MA5_1*1.0098) & (current_price < MA5 * 1.05) \
+        #         & (current_price > MA120 or current_price < MA120 * rate) & (MA5 < MA10*0.99):
+        if (current_price > open_price) & (current_price > MA5) & (
+                current_price < MA5 * 1.05) \
                 & (current_price > MA120 or current_price < MA120 * rate):
             if his_vol(stock, -1) > his_vol(stock, -2):
                 results.append(stock.replace('SH', 'XSHG').replace('SZ', 'XSHE'))
@@ -229,8 +234,13 @@ def sell_trader(data):
         engine_stock.dispose()
 
 
+def over_avg(stock):
+    pass
+
+
+
 def buy_trader(data):
-    # print('买入函数:', dt.now(), f'接受到{len(data.keys())}个个股')
+    print('买入函数:', dt.now(), f'接受到{len(data.keys())}个个股')
     results = mp.Manager().list()
     mp_list = []
     engine_hlfx_pool = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/hlfx_pool?charset=utf8',
@@ -240,19 +250,22 @@ def buy_trader(data):
         stock_pool = pd.read_sql_query(
             text('select value from `%s` order by `index` desc limit 10' % '1d'), engine_hlfx_pool.connect())
         stock_pool = stock_pool.iloc[0, 0].split(",")
+        print(stock_pool)
+        print('---------')
         stock_pool.sort()
-        print('stock_pool', len(stock_pool))
+        # print('stock_pool', len(stock_pool))
 
     except BaseException as e:
         print(e)
 
     if len(stock_pool) != 0:
+        print(set(data.keys()))
         list_judge = list(set(data.keys()) & set(stock_pool))
         print(f'本轮有{len(data.keys())}条个股信息,而list_judge有:{len(list_judge)}')
     else:
         print(f'stock_pool为{len(stock_pool)}个')
     step = math.ceil(len(list_judge) / 4)
-    rate = 0.8
+    rate = 0.9
     if len(list_judge) != 0:
         print(f'list_judge:{list_judge}')
         for i in range(0, len(list_judge), step):
@@ -320,6 +333,8 @@ def buy_trader(data):
                 current_price = data[stock]['lastPrice']
                 current_high = data[stock]['high']
                 if stock not in positions_dict:
+                    overavg = over_avg(stock)
+
                     if len(positions_dict) < max_pos and current_price > 9 \
                             and current_price > (current_high * 0.98):
                         if 5000 > cash:
@@ -337,7 +352,7 @@ def buy_trader(data):
                 else:
                     print(f'{stock}已持仓!')
     engine_hlfx_pool.dispose()
-    print('一轮结束了,现在时间是:', dt.now())
+    # print('一轮结束了,现在时间是:', dt.now())
 
 
 def trader(data):
@@ -345,7 +360,7 @@ def trader(data):
     # print(f'xt_trader = {xt_trader},{session_id}')
     # print(len(xt_trader.query_stock_positions(acc)))
     # 卖出判断
-    sell_trader(data)
+    # sell_trader(data)
     # 买入条件
     buy_trader(data)
 
@@ -406,9 +421,9 @@ if __name__ == '__main__':
     mp.freeze_support()
     # print('cpu_count =', mp.cpu_count())
     pus = psutil.Process()
-    pus.cpu_affinity([12, 13, 14, 15])
+    # pus.cpu_affinity([10, 11, 12, 13, 14, 15])
 
-    # job_func()
+    job_func()
 
     scheduler = BlockingScheduler()
     scheduler.add_job(func=job_func, trigger='cron', day_of_week='0-4', hour='09', minute='40',

+ 182 - 59
QMT/230715_get_indicators.py

@@ -1,4 +1,6 @@
 # coding:utf-8
+import time
+
 from datetime import datetime as dt
 import socket
 import pandas as pd
@@ -8,6 +10,7 @@ from jqdatasdk import *
 import pymysql
 import multiprocessing as mp
 from multiprocessing import freeze_support
+import concurrent.futures
 import math
 import talib as ta
 import os
@@ -16,6 +19,9 @@ import random
 import logging
 from myindicator import myind
 import psutil
+from tqdm import tqdm
+from itertools import islice
+from func_timeout import func_set_timeout, FunctionTimedOut
 from apscheduler.schedulers.blocking import BlockingScheduler
 
 # 显示最大行与列
@@ -27,11 +33,15 @@ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(
 
 # 创建连接池
 engine = create_engine(
-    'mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_whole?charset=utf8')
+    'mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_whole?charset=utf8', pool_recycle=3600, pool_size=100,
+    max_overflow=20)
 engine_tech = create_engine(
-    'mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8')
-engine_tech2 = create_engine(
-    'mysql+pymysql://root:r6kEwqWU9!v3@localhost:3308/qmt_stocks_tech?charset=utf8')
+    'mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8', pool_size=100, pool_recycle=3600,
+    max_overflow=20)
+
+
+# engine_tech2 = create_engine(
+#     'mysql+pymysql://root:r6kEwqWU9!v3@localhost:3308/qmt_stocks_tech?charset=utf8', pool_size=100, max_overflow=20)
 
 
 def err_call_back(err):
@@ -39,60 +49,69 @@ def err_call_back(err):
     traceback.print_exc()
 
 
-def tech_anal(stock, fre, hlfx_pool, hlfx_pool_daily, err_list):
+def tech_anal(stock, df_stock, fre, hlfx_pool, hlfx_pool_daily, err_list):
+    import pandas as pd
     t_signals = 0
     global engine
     global engine_tech
-    global engine_tech2
+    # global engine_tech2
 
     try:
-        con_engine = engine.connect()
-        con_engine_tech = engine_tech.connect()
-        con_engine_tech2 = engine_tech2.connect()
+        # con_engine = engine.connect()
+        # con_engine_tech = engine_tech.connect()
+        # con_engine_tech2 = engine_tech2.connect()
         try:
-            table_name = f'{stock}_{fre}'
+            # table_name = f'{stock}_{fre}'
             # 从engine中读取table_name表存入df
-            df = pd.read_sql_table(table_name, con=engine)
+            # df = pd.read_sql_table(table_name, con=engine)
+            table_name = stock
+            df = df_stock
             df.dropna(axis=0, how='any')
         except BaseException as e:
             print(f"{stock}读取有问题")
             traceback.print_exc()
-            err_list.append(stock)
+            err_list.append(stock[0:9])
         else:
             if len(df) != 0:
                 # 计算技术指标
+                print(f'{stock}开始计算技术指标')
                 try:
                     myind.get_macd_data(df)
-                    df_temp, t_signals = myind.get_hlfx(df)
                     myind.get_ris(df)
                     myind.get_bias(df)
                     myind.get_wilr(df)
+                    df = df.round(2)
+                    df_temp, t_signals = myind.get_hlfx(df)
                     df = pd.merge(df, df_temp, on='time', how='left')
                     df['HL'].fillna(value='-', inplace=True)
                     df = df.reset_index(drop=True)
+                    df = df.replace([np.inf, -np.inf], np.nan)
+                    df = df.round(2)
                 except BaseException as e:
                     print(f'{stock}计算有问题', e)
                 else:
+                    # 存入数据库
                     try:
-                        df = df.replace([np.inf, -np.inf], np.nan)
-                        df.to_sql('%s_1d' % stock, con=engine_tech, index=False, if_exists='replace')
-                        df.to_sql('%s_1d' % stock, con=engine_tech2, index=False, if_exists='replace')
+                        # pass
+                        df.to_sql('%s' % stock, con=engine_tech, index=False, if_exists='replace')
+                        # df.to_sql('%s_1d' % stock, con=engine_tech2, index=False, if_exists='replace')
                     except BaseException as e:
                         print(f'{stock}存储有问题', e)
                         traceback.print_exc()
-                        err_list.append(stock)
+                        err_list.append(stock[0:9])
             else:
-                err_list.append(stock)
+                err_list.append(stock[0:9])
                 print(f'{stock}数据为空')
         finally:
             if stock in hlfx_pool and t_signals == 2:
                 hlfx_pool.remove(stock)
             elif stock not in hlfx_pool and t_signals == 1:
-                hlfx_pool.append(stock)
-                hlfx_pool_daily.append(stock)
-            con_engine.close()
-            con_engine_tech.close()
-            con_engine_tech2.close()
+                hlfx_pool.append(stock[0:9])
+                hlfx_pool_daily.append(stock[0:9])
+            # con_engine.close()
+            # con_engine_tech.close()
+            print(f'{stock}计算完成!')
+            # con_engine_tech2.close()
             # print(f"{stock}, {T_signals}, '\n', {df_temp.head(20)}")
             # print(f'{stock}计算完成!')
 
@@ -102,7 +121,42 @@ def tech_anal(stock, fre, hlfx_pool, hlfx_pool_daily, err_list):
 
     engine.dispose()
     engine_tech.dispose()
-    engine_tech2.dispose()
+    # engine_tech2.dispose()
+
+
+def query_database(table_name):
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_whole?charset=utf8')
+    df = pd.read_sql_table(table_name, engine)
+    engine.dispose()
+    return df
+
+
+def get_stock_data():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qmt_stocks_whole')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '1d')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            table_list = table_list
+            cursor.close()
+            db.close()
+            print(f'开始数据库读取')
+            with concurrent.futures.ProcessPoolExecutor(max_workers=24) as executor:
+                # 使用executor.map方法实现多进程并行查询数据库,得到每个表的数据,并存储在一个字典中
+                data_dict = {table_name: df for table_name, df in
+                             tqdm(zip(table_list, executor.map(query_database, table_list)))}
+            print(f'数据库读取完成')
+            break
+        except BaseException as e:
+            print(f'数据库读取错误{e}')
+            time.sleep(30)
+            continue
+    return data_dict
 
 
 # 分割列表
@@ -120,35 +174,104 @@ def split_list(lst, num_parts):
     return partitions
 
 
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+
 # 多进程实现技术指标计算
 def ind():
     # 记录开始时间
     start_time = dt.now()
     fre = '1d'
-    num_cpus = mp.cpu_count()
-    print(f"{socket.gethostname()}共有{num_cpus}个核心\n{start_time.strftime('%Y-%m-%d %H:%M:%S')}开始计算{fre}技术指标")
-    # 连接数据库 获取股票列表
-    conn_engine_hlfx_pool = create_engine(
-        'mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/hlfx_pool?charset=utf8')
-    con_engine_hlfx_pool = conn_engine_hlfx_pool.connect()
-
-    # stocks = xtdata.get_stock_list_in_sector('沪深A股')
-    stocks = pd.read_sql_query(
-        text("select securities from %s" % 'stocks_list'), con=con_engine_hlfx_pool).iloc[-1, 0].split(",")
-    con_engine_hlfx_pool.close()
-    print(f'股票列表长度为{len(stocks)}')
+    if socket.gethostname() == 'DESKTOP-PC':
+        num_cpus = mp.cpu_count()
+    else:
+        num_cpus = mp.cpu_count()
+
+    print(
+        f"{socket.gethostname()}共有{num_cpus}个核心\n{start_time.strftime('%Y-%m-%d %H:%M:%S')}开始计算{fre}技术指标")
+    while True:
+        try:
+            # 连接数据库 获取股票列表
+            conn_engine_hlfx_pool = create_engine(
+                'mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/hlfx_pool?charset=utf8')
+            con_engine_hlfx_pool = conn_engine_hlfx_pool.connect()
+
+            # stocks = xtdata.get_stock_list_in_sector('沪深A股')
+            stocks = pd.read_sql_query(
+                text("select securities from %s" % 'stocks_list'), con=con_engine_hlfx_pool).iloc[-1, 0].split(",")
+            con_engine_hlfx_pool.close()
+            conn_engine_hlfx_pool.dispose()
+
+        except BaseException as e:
+            print(f'股票列表读取错误{e}')
+            continue
+        else:
+            print(f'股票列表长度为{len(stocks)}')
+            break
     err_list, hlfx_pool, hlfx_pool_daily = mp.Manager().list(), mp.Manager().list(), mp.Manager().list()  # 定义共享列表
 
     # 多进程执行tech_anal方法
-    pool = mp.Pool(processes=num_cpus)
     # 保存AsyncResult对象的列表
     async_results = []
-    for stock in stocks:
-        async_result = pool.apply_async(tech_anal, args=(stock, fre, hlfx_pool, hlfx_pool_daily, err_list,),
-                                        error_callback=err_call_back)
-        async_results.append(async_result)
-    pool.close()
-    pool.join()
+    # m = 0
+    # with concurrent.futures.ProcessPoolExecutor(max_workers=num_cpus) as executor:
+    #     for stock in tqdm(stocks):
+    #         executor.submit(tech_anal, stock, fre, hlfx_pool, hlfx_pool_daily, err_list)
+    #         m += 1
+    # print(m)
+
+    # 获取数据
+    stock_data_dict = get_stock_data()
+
+    # 设置每一轮的任务数
+    CHUNK_SIZE = 200  # 您可以根据需要进行调整
+    timeout = 120
+    max_retries =3
+
+    for chunk in chunked_iterable(stock_data_dict.items(), CHUNK_SIZE):
+        retries = 0
+        while True:
+            print(f'chunk:{chunk[0][0]}-{chunk[-1][0]}')
+            with mp.Pool(processes=min(CHUNK_SIZE, len(chunk), num_cpus)) as pool:  # 使用最小值确保不会超出任务数或超过24核心
+                for stock, df_stock in chunk:
+                    print('**************', stock)
+                    async_result = pool.apply_async(func=tech_anal, args=(stock, df_stock, fre, hlfx_pool, hlfx_pool_daily,
+                                                                          err_list), error_callback=err_call_back)
+                    async_results.append(async_result)
+                try:
+                    for async_result in async_results:
+                        result = async_result.get(timeout=timeout)
+                except mp.TimeoutError:
+                    retries += 1
+                    print(f"Timeout occurred in pool. Retry {retries}/{max_retries}...")
+                    continue
+                except FunctionTimedOut:
+                    retries += 1
+                    print(f"Timeout occurred in worker. Retry {retries}/{max_retries}...")
+                    continue
+                except Exception as e:
+                    print(f"Error occurred: {e}")
+                    break
+                else:
+                    pool.close()
+                    pool.join()
+                    break
+
+    # with mp.Pool(processes=1) as pool:
+    #     for stock, df_stock in tqdm(stock_data_dict.items()):
+    #         # print(stock, df_stock.shape)
+    #         async_result = pool.apply_async(tech_anal, args=(stock, df_stock, fre, hlfx_pool, hlfx_pool_daily, err_list),
+    #                                         error_callback=err_call_back)
+    #         async_results.append(async_result)
+    #     pool.close()
+    #     pool.join()
 
     # 统计返回为 None 的结果数量
     none_count = 0
@@ -159,7 +282,7 @@ def ind():
             none_count += 1
 
     print(
-        f"共计算{none_count}/{i+1},\n当日信号:{len(hlfx_pool_daily)},\n持续检测为:{len(hlfx_pool)}, \n错误列表:{err_list}")
+        f"共计算{none_count}/{len(async_results)},\n当日信号:{len(hlfx_pool_daily)},\n持续检测为:{len(hlfx_pool)}, \n错误列表:{err_list}")
 
     # 将list转换为字符串
     results_list = ','.join(set(hlfx_pool))
@@ -173,34 +296,34 @@ def ind():
                               password='r6kEwqWU9!v3',
                               database='hlfx_pool')
 
-    db_pool2 = pymysql.connect(host='localhost',
-                               user='root',
-                               port=3308,
-                               password='r6kEwqWU9!v3',
-                               database='hlfx_pool')
+    # db_pool2 = pymysql.connect(host='localhost',
+    #                            user='root',b
+    #                            port=3308,
+    #                            password='r6kEwqWU9!v3',
+    #                            database='hlfx_pool')
 
     # 将list插入数据库
     cursor = db_pool.cursor()
-    cursor2 = db_pool2.cursor()
+    # cursor2 = db_pool2.cursor()
     sql = "INSERT INTO %s (date,value) VALUES('%s','%s')" % (fre, dt.now().strftime('%Y-%m-%d %H:%M:%S'), results_list)
     sql2 = "INSERT INTO daily_%s (date,value) VALUES('%s','%s')" % (fre, dt.now().strftime('%Y-%m-%d %H:%M:%S'),
                                                                     results_list_daily)
     try:
         cursor.execute(sql)
         cursor.execute(sql2)
-        cursor2.execute(sql)
-        cursor2.execute(sql2)
+        # cursor2.execute(sql)
+        # cursor2.execute(sql2)
         db_pool.commit()
-        db_pool2.commit()
+        # db_pool2.commit()
     except Exception as e:
         print(f'1d存入有问题', e)
         # db_pool.rollback()
     finally:
+        print(f"results_list_daily:{results_list_daily}")
         cursor.close()
         db_pool.close()
-        cursor2.close()
-        db_pool2.close()
-
+        # cursor2.close()
+        # db_pool2.close()
 
     # 记录结束时间
     end_time = dt.now()
@@ -209,10 +332,10 @@ def ind():
 
 if __name__ == '__main__':
     logger = mp.log_to_stderr()
-    # logger.setLevel(logging.DEBUG)
+    logger.setLevel(logging.DEBUG)
     freeze_support()
-    # 创建一个0-23的列表,用于设置cpu亲和度
-    cpu_list = list(range(24))
+    # 创建一个0-17的列表,用于设置cpu亲和度
+    cpu_list = list(range(23))
     pus = psutil.Process()
     pus.cpu_affinity(cpu_list)
 

+ 106 - 0
QMT/404.py

@@ -0,0 +1,106 @@
+from xtquant import xtdata, xtconstant
+from xtquant.xttype import StockAccount
+from xtquant.xttrader import XtQuantTrader, XtQuantTraderCallback
+import pandas as pd
+import datetime
+from datetime import datetime as dt
+import sys
+
+pd.set_option('display.max_columns', None)
+
+
+class MyXtQuantTraderCallback(XtQuantTraderCallback):
+    def on_disconnected(self):
+        """
+        连接断开
+        :return:
+        """
+        print(datetime.datetime.now(), '连接断开回调')
+
+    def on_stock_order(self, order):
+        """
+        委托回报推送
+        :param order: XtOrder对象
+        :return:
+        """
+        print(datetime.datetime.now(), '委托回调', order.order_remark)
+
+    def on_stock_trade(self, trade):
+        """
+        成交变动推送
+        :param trade: XtTrade对象
+        :return:
+        """
+        print(datetime.datetime.now(), '成交回调', trade.order_remark)
+
+    def on_order_error(self, order_error):
+        """
+        委托失败推送
+        :param order_error:XtOrderError 对象
+        :return:
+        """
+        # print("on order_error callback")
+        # print(order_error.order_id, order_error.error_id, order_error.error_msg)
+        print(f"委托报错回调 {order_error.order_remark} {order_error.error_msg}")
+
+    def on_cancel_error(self, cancel_error):
+        """
+        撤单失败推送
+        :param cancel_error: XtCancelError 对象
+        :return:
+        """
+        print(datetime.datetime.now(), sys._getframe().f_code.co_name)
+
+    def on_order_stock_async_response(self, response):
+        """
+        异步下单回报推送
+        :param response: XtOrderResponse 对象
+        :return:
+        """
+        print(f"异步委托回调 {response.order_remark}")
+
+    def on_cancel_order_stock_async_response(self, response):
+        """
+        :param response: XtCancelOrderResponse 对象
+        :return:
+        """
+        print(datetime.datetime.now(), sys._getframe().f_code.co_name)
+
+    def on_account_status(self, status):
+        """
+        :param response: XtAccountStatus 对象
+        :return:
+        """
+        print(datetime.datetime.now(), sys._getframe().f_code.co_name)
+
+
+def p(data):
+    print(data)
+
+
+if __name__ == '__main__':
+    hszs = xtdata.get_stock_list_in_sector('中金所')
+    print(hszs)
+    print(dt.now().strftime('%Y%m%d'))
+
+    # print(xtdata.get_sector_list())
+
+    field = ['time', 'open', 'close', 'high', 'low', 'volume', 'amount']
+    stock_list = hszs
+    stock = '000001.SH'
+
+    xtdata.download_history_data2(stock_list=['000001.SZ'], period='1m', start_time='20230801', end_time='')
+    df = xtdata.get_market_data(field, ['000001.SZ'], '1m', start_time='20230804', end_time='', count=-1,
+                                dividend_type='front')
+    df = pd.concat([df[i].T for i in ['time', 'open', 'high', 'low', 'close', 'volume',
+                                      'amount']], axis=1)
+    df.columns = ['time', 'open_front', 'high_front', 'low_front', 'close_front', 'volume_front',
+                  'amount_front']
+    df['time'] = df['time'].apply(lambda x: dt.fromtimestamp(x / 1000.0))
+    daily_avg_price = []
+    print(len(df))
+    for i in range(1, len(df) + 1):
+        daily_avg_price.append(round(sum(df['amount_front'][:i]) / sum(df['volume_front'][:i]) / 100, 2))
+    df['daily_avg_price'] = daily_avg_price
+    print(df)
+    # xtdata.subscribe_whole_quote(['000001.SZ'], callback=p)

+ 61 - 37
QMT/download_data_whole.py

@@ -1,4 +1,5 @@
 from xtquant import xtdata
+from xtquant import xtdatacenter as xtdc
 from datetime import datetime as dt
 import pandas as pd
 import math
@@ -10,18 +11,25 @@ from apscheduler.schedulers.blocking import BlockingScheduler
 import traceback
 import psutil
 import pymysql
+from tqdm import tqdm
+import logging
+
+xtdata.connect('', port=58611, remember_if_success=True)
 
 pd.set_option('display.max_columns', None)  # 设置显示最大行
 
 # path = 'C:\\qmt\\userdata_mini'
-path = '\\DANIEL-NUC\\qmt\\userdata_mini'
+# path = '\\DANIEL-NUC\\qmt\\userdata_mini'
+path = 'C:\\方正证券FQT交易客户端\\userdata_mini'
 
 field = ['time', 'open', 'close', 'high', 'low', 'volume', 'amount']
 # 创建共享计数器
 count = mp.Value('i', 0)
 
-eng_w = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_whole?charset=utf8',)
-eng_w2 = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3308/qmt_stocks_whole?charset=utf8',)
+eng_w = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_whole?charset=utf8', )
+
+
+# eng_w2 = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3308/qmt_stocks_whole?charset=utf8')
 
 
 def err_call_back(err):
@@ -30,16 +38,19 @@ def err_call_back(err):
 
 
 def to_sql(stock):
-    global eng_w, eng_w2
+    global eng_w
 
     # 后复权数据
-    data_back = xtdata.get_market_data(field, [stock], '1d', end_time='', count=-1, dividend_type='back')
+    data_back = xtdata.get_market_data(field, [stock], '1d', count=-1, dividend_type='back')
     df_back = pd.concat([data_back[i].loc[stock].T for i in ['time', 'open', 'high', 'low', 'close', 'volume',
                                                              'amount']], axis=1)
+
     df_back.columns = ['time', 'open_back', 'high_back', 'low_back', 'close_back', 'volume_back', 'amount_back']
     df_back['time'] = df_back['time'].apply(lambda x: dt.fromtimestamp(x / 1000.0))
     df_back.reset_index(drop=True, inplace=True)
 
+
+
     # 前复权数据
     data_front = xtdata.get_market_data(field, [stock], '1d', end_time='', count=-1, dividend_type='front')
     df_front = pd.concat([data_front[i].loc[stock].T for i in ['time', 'open', 'high', 'low', 'close', 'volume',
@@ -48,34 +59,39 @@ def to_sql(stock):
                         'amount_front']
     df_front['time'] = df_front['time'].apply(lambda x: dt.fromtimestamp(x / 1000.0))
     df = pd.merge_asof(df_back, df_front, 'time')
+    df = df.round(2)
     # print(df)
     try:
         # eng_w.connect().execute(text("truncate table `%s_1d`" % stock))
         df.to_sql('%s_1d' % stock, con=eng_w, index=False, if_exists='replace', chunksize=20000)
-        df.to_sql('%s_1d' % stock, con=eng_w2, index=False, if_exists='replace', chunksize=20000)
-        with count.get_lock():
-            count.value += 1
+    # df.to_sql('%s_1d' % stock, con=eng_w2, index=False, if_exists='replace', chunksize=20000)
     except BaseException as e:
         print(stock, e)
         pass
     finally:
+        print(f'{stock}入库完成!')
         eng_w.dispose()
-        eng_w2.dispose()
+    # eng_w2.dispose()
+
+
+def on_progress(data):
+    print(data)
+    return
 
 
 def download_data():
     global count
     stock_list = xtdata.get_stock_list_in_sector('沪深A股')
     '''
-    # 连接数据库 获取股票列表
-    conn_engine_hlfx_pool = create_engine(
-        'mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/hlfx_pool?charset=utf8')
+	# 连接数据库 获取股票列表
+	conn_engine_hlfx_pool = create_engine(
+		'mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/hlfx_pool?charset=utf8')
 
-    con_engine_hlfx_pool = conn_engine_hlfx_pool.connect()
+	con_engine_hlfx_pool = conn_engine_hlfx_pool.connect()
 
-    stock_list = pd.read_sql_query(
-        text("select securities from %s" % 'stocks_list'), con=con_engine_hlfx_pool).iloc[-1, 0].split(",")
-    '''
+	stock_list = pd.read_sql_query(
+		text("select securities from %s" % 'stocks_list'), con=con_engine_hlfx_pool).iloc[-1, 0].split(",")
+	'''
 
     results_list = ','.join(set(stock_list))
     print(f'今日个股列表为{len(stock_list)}')
@@ -84,29 +100,34 @@ def download_data():
                               port=3307,
                               password='r6kEwqWU9!v3',
                               database='hlfx_pool')
-    db_pool2 = pymysql.connect(host='localhost',
-                               user='root',
-                               port=3308,
-                               password='r6kEwqWU9!v3',
-                               database='hlfx_pool')
+    # db_pool2 = pymysql.connect(host='localhost',
+    #                            user='root',
+    #                            port=3308,
+    #                            password='r6kEwqWU9!v3',
+    #                            database='hlfx_pool')
     cursor_pool = db_pool.cursor()
-    cursor_pool2 = db_pool2.cursor()
+    # cursor_pool2 = db_pool2.cursor()
     sql = "INSERT INTO %s (date,securities) VALUES('%s','%s')" % (
         'stocks_list', dt.now().strftime('%Y-%m-%d %H:%M:%S'), results_list)
     cursor_pool.execute(sql)
-    cursor_pool2.execute(sql)
-    db_pool.commit()
-    db_pool2.commit()
+    # cursor_pool2.execute(sql)
+    try:
+        db_pool.commit()
+    except BaseException as e:
+        print(e)
+    # db_pool.rollback()
+    # db_pool2.commit()
 
     print(dt.now().strftime('%Y-%m-%d %H:%M:%S'), '开始下载!')
-    xtdata.download_history_data2(stock_list=stock_list, period='1d', start_time='', end_time='')
+    xtdata.download_history_data2(stock_list=stock_list, period='1d', start_time='', end_time='',callback=on_progress)
     print(dt.now().strftime('%Y-%m-%d %H:%M:%S'), '下载完成,准备入库!')
 
     async_results = []
-    pool = mp.Pool(processes=mp.cpu_count())
-    for stock in stock_list:
-        async_result = pool.apply_async(func=to_sql, args=(stock, ), error_callback=err_call_back)
+    pool = mp.Pool(processes=8)
+    for stock in tqdm(stock_list, desc='入库进度'):
+        async_result = pool.apply_async(func=to_sql, args=(stock,), error_callback=err_call_back)
         async_results.append(async_result)
+    print(f'记录循环{len(async_results)}次!')
     pool.close()
     pool.join()
 
@@ -121,17 +142,20 @@ def download_data():
 
 
 if __name__ == '__main__':
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
     freeze_support()
     field = ['time', 'open', 'close', 'high', 'low', 'volume', 'amount']
-    cpu_count = mp.cpu_count()
+    cpu_list = list(range(24))
     pus = psutil.Process()
+    pus.cpu_affinity(cpu_list)
 
     download_data()
 
-    # scheduler = BlockingScheduler()
-    # scheduler.add_job(func=download_data, trigger='cron', day_of_week='0-4', hour='20', minute='05',
-    #                   timezone="Asia/Shanghai", max_instances=10)
-    # try:
-    #     scheduler.start()
-    # except (KeyboardInterrupt, SystemExit):
-    #     pass
+# scheduler = BlockingScheduler()
+# scheduler.add_job(func=download_data, trigger='cron', day_of_week='0-4', hour='20', minute='05',
+#                   timezone="Asia/Shanghai", max_instances=10)
+# try:
+#     scheduler.start()
+# except (KeyboardInterrupt, SystemExit):
+#     pass

+ 97 - 0
QMT/download_futures.py

@@ -0,0 +1,97 @@
+from xtquant import xtdata
+from datetime import datetime as dt
+import pandas as pd
+import math
+from sqlalchemy import create_engine, text
+import multiprocessing as mp
+from multiprocessing import freeze_support
+import os
+from apscheduler.schedulers.blocking import BlockingScheduler
+import traceback
+import psutil
+import pymysql
+from tqdm import tqdm
+import logging
+
+pd.set_option('display.max_columns', None)  # 设置显示最大行
+
+field = ['time', 'open', 'close', 'high', 'low', 'volume', 'amount']
+# 创建共享计数器
+count = mp.Value('i', 0)
+
+eng_w = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qihuo?charset=utf8',)
+
+x = 'jmJQ00.DF'
+
+def err_call_back(err):
+    print(f'问题在这里~ error:{str(err)}')
+    traceback.print_exc()
+
+
+def to_sql():
+    global eng_w
+
+    data = xtdata.get_market_data([], [x], '1h', end_time='', count=-1)
+    print(data)
+    df = pd.concat([data[i].loc[x].T for i in ['time', 'open', 'high', 'low', 'close', 'volume',
+                                                             'amount']], axis=1)
+    df.columns = ['time', 'open_back', 'high_back', 'low_back', 'close_back', 'volume', 'amount']
+    df['time'] = df['time'].apply(lambda x: dt.fromtimestamp(x / 1000.0))
+    df.reset_index(drop=True, inplace=True)
+    print(df)
+
+
+    try:
+        df.to_sql('%s_5m' % x, con=eng_w, index=False, if_exists='replace', chunksize=20000)
+
+    except BaseException as e:
+        print( e)
+        pass
+    finally:
+        print(f'入库完成!')
+        eng_w.dispose()
+        # eng_w2.dispose()
+
+
+def download_data():
+    print(dt.now().strftime('%Y-%m-%d %H:%M:%S'), '开始下载!')
+    xtdata.download_history_data(x, '5m', '', '')
+    print(dt.now().strftime('%Y-%m-%d %H:%M:%S'), '下载完成,准备入库!')
+    to_sql()
+    # async_results = []
+    # pool = mp.Pool(processes=24)
+    # for stock in tqdm(stock_list, desc='入库进度'):
+    #     async_result = pool.apply_async(func=to_sql, args=(stock, ), error_callback=err_call_back)
+    #     async_results.append(async_result)
+    # print(f'记录循环{len(async_results)}次!')
+    # pool.close()
+    # pool.join()
+
+    # 统计返回为 None 的结果数量
+    # none_count = 0
+    # for i, result_async in enumerate(async_results):
+    #     _ = result_async.get()  # 获取任务的结果
+    #     if _ is None:
+    #         none_count += 1
+
+    # print(f"{dt.now().strftime('%Y-%m-%d %H:%M:%S')}\n今日数据{len(async_results)}下载完毕,入库{none_count}条!")
+
+
+if __name__ == '__main__':
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+    freeze_support()
+    field = ['time', 'open', 'close', 'high', 'low', 'volume', 'amount']
+    cpu_list = list(range(16))
+    pus = psutil.Process()
+    pus.cpu_affinity(cpu_list)
+
+    download_data()
+
+    # scheduler = BlockingScheduler()
+    # scheduler.add_job(func=download_data, trigger='cron', day_of_week='0-4', hour='20', minute='05',
+    #                   timezone="Asia/Shanghai", max_instances=10)
+    # try:
+    #     scheduler.start()
+    # except (KeyboardInterrupt, SystemExit):
+    #     pass

+ 105 - 0
QMT/get_futures.py

@@ -0,0 +1,105 @@
+# coding:utf-8
+from datetime import datetime as dt
+import os
+import pandas as pd
+import pymysql
+from xtquant.xttrader import XtQuantTrader, XtQuantTraderCallback
+from xtquant.xttype import StockAccount
+from xtquant import xtdata, xtconstant
+from sqlalchemy import create_engine
+
+pd.set_option('display.max_columns', None) # 设置显示最大行
+
+df = pd.DataFrame(columns=['time', 'open', 'close', 'high', 'low', 'volume', 'amount'])
+field = ['time', 'open', 'close', 'high', 'low', 'volume', 'amount']
+eng_w = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qihuo?charset=utf8',)
+
+
+def avg_price(df):
+    print('avg_price')
+    daily_avg_price = []
+    for i in range(1, len(df) + 1):
+        daily_avg_price.append(round(sum(df['amount'][:i]) / sum(df['volume'][:i]) / 10, 2))
+    df['daily_avg_price'] = daily_avg_price
+    return df
+
+
+def over_avg(datas):
+    global df
+    for stock_code in datas:
+        df_temp = pd.DataFrame(datas[stock_code])
+        df_temp['time'] = df_temp['time'].apply(lambda x: dt.fromtimestamp(x / 1000.0))
+        df = pd.concat([df, df_temp], axis=0, ignore_index=True)
+        df = avg_price(df)
+        df = df[['time', 'open', 'close', 'high', 'low', 'volume', 'amount', 'daily_avg_price']]
+        print(df)
+
+
+if __name__ == '__main__':
+    print('start')
+    xtdata.subscribe_quote('rb00.SF', '1d', '', '')
+    xtdata.download_history_data('rb00.SF', '1d', '', '')
+
+    data = xtdata.get_market_data_ex([], ['rb00.SF'], period='1d')
+    print(type(data['rb00.SF']))
+    exit()
+    db_pool = pymysql.connect(host='localhost',
+                              user='root',
+                              port=3307,
+                              password='r6kEwqWU9!v3',
+                              database='qihuo')
+    cursor_pool = db_pool.cursor()
+    data['rb00.SF'].to_sql('rb00_1d' , con=eng_w, index=False, if_exists='replace', chunksize=20000)
+
+    exit()
+
+    fts = xtdata.get_stock_list_in_sector('中金所主力合约')
+    print(fts, len(fts))
+
+    fts_list = ['IF00.CFFEX', 'IH00.CFFEX', 'IC00.CFFEX', 'IO00.CFFEX']
+    # t = dt.now().strftime('%Y%m%d')
+    s = 'IF00.CFFEX'
+    new_s ='IF00.IF'
+
+    data = xtdata.get_market_data_ex([], ['IF00.IF', 'RB01.SHFE'], period='1d', end_time='', count=-1)
+    df = data['IF00.IF'][['open', 'close', 'high', 'low', 'volume', 'amount']].copy()
+    print('df', df)
+    df2 = data['RB01.SHFE'][['open', 'close', 'high', 'low', 'volume', 'amount']].copy()
+    print('df2', df2)
+    df2['daily_point'] = df2['close'] - df2['open']
+    # 计算每日波动点数,开盘-收盘
+    df['daily_point'] = df['close'] - df['open']
+    # 选取需要的字段
+    print(df.describe())
+
+    # 描述统计信息
+    describe_stats = df['daily_point'].describe()
+
+    # 正数的平均值
+    positive_mean = df[df['daily_point'] > 0]['daily_point'].mean()
+
+    # 负数的平均值
+    negative_mean = df[df['daily_point'] < 0]['daily_point'].mean()
+
+    # 绝对值的平均值
+    absolute_mean = df['daily_point'].abs().mean()
+
+    # 分位数分布
+    percentiles = df['daily_point'].quantile([0.25, 0.5, 0.75])
+
+    # 统计正数的数量
+    positive_count = len(df[df['daily_point'] > 0])
+
+    # 统计负数的数量
+    negative_count = len(df[df['daily_point'] < 0])
+
+    print("统计信息:\n", describe_stats)
+    print("正数的平均值:", positive_mean)
+    print("负数的平均值:", negative_mean)
+    print("绝对值的平均值:", absolute_mean)
+    print("25%、50%、75% 分布:\n", percentiles)
+    print("正数的数量:", positive_count)
+    print("负数的数量:", negative_count)
+
+
+

+ 40 - 0
QMT/kzz.py

@@ -0,0 +1,40 @@
+# coding:utf-8
+from datetime import datetime as dt
+import os
+import pandas as pd
+from xtquant.xttrader import XtQuantTrader, XtQuantTraderCallback
+from xtquant.xttype import StockAccount
+from xtquant import xtdata, xtconstant
+
+
+df = pd.DataFrame(columns=['time', 'open', 'close', 'high', 'low', 'volume', 'amount'])
+def avg_price(df):
+    print('avg_price')
+    daily_avg_price = []
+    for i in range(1, len(df) + 1):
+        daily_avg_price.append(round(sum(df['amount'][:i]) / sum(df['volume'][:i]) / 10, 2))
+    df['daily_avg_price'] = daily_avg_price
+    return df
+
+
+def over_avg(datas):
+    global df
+    for stock_code in datas:
+        df_temp = pd.DataFrame(datas[stock_code])
+        df_temp['time'] = df_temp['time'].apply(lambda x: dt.fromtimestamp(x / 1000.0))
+        df = pd.concat([df, df_temp], axis=0, ignore_index=True)
+        df = avg_price(df)
+        df = df[['time', 'open', 'close', 'high', 'low', 'volume', 'amount', 'daily_avg_price']]
+        print(stock_code, '\n', df)
+
+
+if __name__ == '__main__':
+    print('start')
+    kzz = xtdata.get_stock_list_in_sector('沪深转债')
+    print(kzz, len(kzz))
+    # exit()
+    t = dt.now().strftime('%Y%m%d')
+    # stocks = xtdata.get_stock_list_in_sector('沪深A股')
+    # xtdata.subscribe_whole_quote(kzz, callback=over_avg)
+    xtdata.subscribe_quote('128130.SZ', period='1m', start_time=t, count=-1, callback=over_avg)
+    xtdata.run()

+ 473 - 0
QMT/myindicator.py

@@ -0,0 +1,473 @@
+from time import sleep
+
+import os
+
+import pandas as pd
+import talib as ta
+import numpy as np
+from datetime import datetime as dt
+
+
+class myind:
+    def __init__(self, name, value):
+        self.name = name
+        self.value = value
+
+    def __repr__(self):
+        return self.name
+
+    def __call__(self, x):
+        return self.value(x)
+
+    def myself_kdj(df):
+        low_list = df['low_back'].rolling(9, min_periods=9).min()
+        low_list.fillna(value=df['low_back'].expanding().min(), inplace=True)
+        high_list = df['high_back'].rolling(9, min_periods=9).max()
+        high_list.fillna(value=df['high_back'].expanding().max(), inplace=True)
+        rsv = (df['close_back'] - low_list) / (high_list - low_list) * 100
+        df['k'] = pd.DataFrame(rsv).ewm(com=2).mean()
+        df['d'] = df['k'].ewm(com=2).mean()
+        df['j'] = 3 * df['k'] - 2 * df['d']
+        return df
+
+    # macd指标
+    def get_macd_data(data, short=0, long1=0, mid=0):
+        if short == 0:
+            short = 12
+        if long1 == 0:
+            long1 = 26
+        if mid == 0:
+            mid = 9
+        data['sema'] = pd.Series(data['close_back']).ewm(span=short).mean()
+        data['lema'] = pd.Series(data['close_back']).ewm(span=long1).mean()
+        data.fillna(0, inplace=True)
+        data['dif'] = data['sema'] - data['lema']
+        data['dea'] = pd.Series(data['dif']).ewm(span=mid).mean()
+        data['macd'] = 2 * (data['dif'] - data['dea'])
+        data.fillna(0, inplace=True)
+        return data[['dif', 'dea', 'macd']]
+
+    # rsi指标
+    def get_ris(data):
+        data["rsi_6"] = ta.RSI(data['close_back'], timeperiod=6)
+        data["rsi_12"] = ta.RSI(data['close_back'], timeperiod=12)
+        data["rsi_24"] = ta.RSI(data['close_back'], timeperiod=24)
+        # return data
+
+    def get_bias(data):
+        # 计算方法:
+        # bias指标
+        # N期BIAS=(当日收盘价-N期平均收盘价)/N期平均收盘价*100%
+        data['bias_6'] = (data['close_back'] - data['close_back'].rolling(6, min_periods=1).mean()) / \
+                         data['close_back'].rolling(6, min_periods=1).mean() * 100
+        data['bias_12'] = (data['close_back'] - data['close_back'].rolling(12, min_periods=1).mean()) / \
+                          data['close_back'].rolling(12, min_periods=1).mean() * 100
+        data['bias_24'] = (data['close_back'] - data['close_back'].rolling(24, min_periods=1).mean()) / \
+                          data['close_back'].rolling(24, min_periods=1).mean() * 100
+        data['bias_6'] = round(data['bias_6'], 2)
+        data['bias_12'] = round(data['bias_12'], 2)
+        data['bias_24'] = round(data['bias_24'], 2)
+
+    def get_wilr(data):
+        # 威廉指标
+        # 建议用talib库的WILLR方法,亲测有用
+        data['willr'] = ta.WILLR(data['high_back'], data['low_back'], data['close_back'], timeperiod=14)
+
+    def get_hlfx(data):
+        import pandas as pd
+        trading_signals = 0
+        data_temp = data[['time', 'open_back', 'close_back', 'high_back', 'low_back', 'dif', 'dea', 'macd']]
+        data_temp.columns = ['time', 'open', 'close', 'high', 'low', 'dif', 'dea', 'macd']
+        df_day = pd.DataFrame(columns=['time', 'open', 'close', 'high', 'low'])
+        # df_day = pd.DataFrame()
+        # 先处理去包含
+        for i in data_temp.index:
+            if i == 0 or i == 1:
+                df_day = pd.concat([df_day.copy(), data_temp.loc[i].to_frame().T], ignore_index=True)
+            # 不包含
+            elif (df_day.iloc[-1, 3] > data_temp.loc[i, 'high']
+                  and df_day.iloc[-1, 4] > data_temp.loc[i, 'low']) \
+                    or (df_day.iloc[-1, 3] < data_temp.loc[i, 'high']
+                        and df_day.iloc[-1, 4] < data_temp.loc[i, 'low']):
+                df_day = pd.concat([df_day.copy(), data_temp.loc[i].to_frame().T], ignore_index=True)
+
+            # 包含
+            else:
+                # 左高,下降
+                if df_day.iloc[-2, 3] > df_day.iloc[-1, 3]:
+                    df_day.iloc[-1, 3] = min(df_day.iloc[-1, 3], data_temp.loc[i, 'high'])
+                    df_day.iloc[-1, 4] = min(df_day.iloc[-1, 4], data_temp.loc[i, 'low'])
+
+                else:
+                    # 右高,上升
+                    df_day.iloc[-1, 3] = max(df_day.iloc[-1, 3], data_temp.loc[i, 'high'])
+                    df_day.iloc[-1, 4] = max(df_day.iloc[-1, 4], data_temp.loc[i, 'low'])
+
+        # print('111', df_day, data_temp)
+
+        df_day['HL'] = np.nan
+        if len(df_day.index) > 2:
+            # 寻找顶底分型
+            for x in range(2, len(df_day.index)):
+                m = x - 1
+                # 底
+                # 符合底分型形态,且第2、3根k线是阳线
+                if ((df_day.loc[x, 'high'] > df_day.loc[x - 1, 'high']) and
+                        (df_day.loc[x - 2, 'high'] > df_day.loc[x - 1, 'high'])):
+                    # and df_day.loc[x, 'close'] > df_day.loc[x, 'open'] and \
+                    #     df_day.loc[x - 1, 'close'] > df_day.loc[x - 1, 'open']:
+
+                    df_day.loc[x, 'HL'] = 'L*'
+
+                    while m:
+                        if df_day.loc[m, 'HL'] in ['H', 'HH', 'H*']:
+                            if (x - m) > 3:
+                                # 成笔——>L
+                                df_day.loc[x, 'HL'] = 'L'
+                                # 产生信号,进入hlfx_pool
+                                if x == len(df_day.index) - 1:
+                                    trading_signals = 1
+                            else:
+                                # 不成笔 次级别中枢,保持L* 修订原H为H*
+                                df_day.loc[m, 'HL'] = 'H*'
+                            break
+
+                        elif df_day.loc[m, 'HL'] in ['L', 'LL', 'L*']:
+                            if df_day.loc[m - 1, 'low'] > df_day.loc[x - 1, 'low']:
+                                # 前一个为底更高,且中间不存在更低的底
+                                df_day.loc[x, 'HL'] = 'L'
+                                df_day.loc[m, 'HL'] = '-'
+
+                                # 产生信号,进入hlfx_pool
+                                if x == len(df_day.index) - 1:
+                                    trading_signals = 1
+
+                                # 获得MACD,判断MACD判断背驰
+                                x_macd_dif, x_macd_dea, x_macd_macd = data_temp.loc[x, 'dif'], data_temp.loc[x, 'dea'], \
+                                    data_temp.loc[x, 'macd']
+                                m_macd_dif, m_macd_dea, m_macd_macd = data_temp.loc[m, 'dif'], data_temp.loc[m, 'dea'], \
+                                    data_temp.loc[m, 'macd']
+
+                                # MACD底背驰
+                                if m_macd_dif < x_macd_dif:
+                                    # 次级别背驰底->LL
+                                    df_day.loc[x, 'HL'] = 'LL'
+                                break
+                            else:
+                                # 前底更低,本底无效
+                                df_day.loc[x, 'HL'] = '-'
+                                break
+                        m = m - 1
+                        if m == 0:
+                            df_day.loc[x, 'HL'] = 'L'
+
+                # 顶
+                elif ((df_day.loc[x, 'high'] < df_day.loc[x - 1, 'high']) and (
+                        df_day.loc[x - 2, 'high'] < df_day.loc[x - 1, 'high'])):
+
+                    df_day.loc[x, 'HL'] = 'H*'
+                    while m:
+                        if df_day.loc[m, 'HL'] in ['L', 'LL', 'L*']:
+                            if x - m > 3:
+                                # 成笔->H
+                                df_day.loc[x, 'HL'] = 'H'
+                                # 产生信号,进入hlfx_pool
+                                if x == len(df_day.index) - 1:
+                                    trading_signals = 2
+                            else:
+                                # 不成笔 次级别中枢,保持H* 修订原L为L*
+                                df_day.loc[m, 'HL'] = 'L*'
+                            break
+
+                        elif df_day.loc[m, 'HL'] in ['H', 'HH', 'H*']:
+                            if df_day.loc[x - 1, 'high'] > df_day.loc[m - 1, 'high']:
+                                # 前一个为顶,且中间存在不包含 or 更高的顶
+                                df_day.loc[x, 'HL'] = 'H'
+                                df_day.loc[m, 'HL'] = '-'
+                                # 产生信号,进入hlfx_pool
+                                if x == len(df_day.index) - 1:
+                                    trading_signals = 2
+
+                                # 获得MACD,判断MACD判断背驰
+                                x_macd_dif, x_macd_dea, x_macd_macd = data_temp.loc[x, 'dif'], data_temp.loc[x, 'dea'], \
+                                    data_temp.loc[x, 'macd']
+                                m_macd_dif, m_macd_dea, m_macd_macd = data_temp.loc[m, 'dif'], data_temp.loc[m, 'dea'], \
+                                    data_temp.loc[m, 'macd']
+
+                                # MACD顶背驰
+                                if x_macd_dif < m_macd_dif:
+                                    # 次级别背驰底->HH
+                                    df_day.loc[x, 'HL'] = 'HH'
+                                break
+                            else:
+                                # 前顶更高,本顶无效
+                                df_day.loc[x, 'HL'] = '-'
+                                break
+                        m = m - 1
+                        if m == 0:
+                            df_day.loc[x, 'HL'] = 'H'
+
+                else:
+                    df_day.loc[x, 'HL'] = '-'
+        df_temp = df_day[['time', 'HL']]
+
+        return df_temp, trading_signals
+
+    # 帮我优化get_hlfx方法
+    def get_hlfx_optimization(data):
+        # print(os.getpid(),  'start', len(data))
+
+
+        trading_signals = 0
+        data_temp = data[['time', 'open_back', 'close_back', 'high_back', 'low_back', 'dif', 'dea', 'macd']]
+        data_temp.columns = ['time', 'open', 'close', 'high', 'low', 'dif', 'dea', 'macd']
+
+        merged_data = []
+        prev_kline = data_temp.iloc[0].copy()
+        merged_data.append(prev_kline)
+        # df_day = pd.DataFrame(columns=['time', 'open', 'close', 'high', 'low', 'volume', 'money', 'HL'])
+        # df_day = pd.DataFrame(columns=['time', 'high', 'low', 'HL'])
+        df_day = pd.DataFrame()
+        for i in range(1, len(data_temp)):
+            current_kline = data_temp.iloc[i].copy()
+
+            # 使用处理过的数据进行判断
+            last_merged_kline = merged_data[-1]
+
+            if i == 0 or i == 1 or \
+                (last_merged_kline['high'] >= current_kline['high'] and last_merged_kline['low'] <= current_kline['low']) or \
+           (last_merged_kline['high'] <= current_kline['high'] and last_merged_kline['low'] >= current_kline['low']):
+            # 根据前一根K线的走势合并K线
+                if last_merged_kline['close'] > last_merged_kline['open']:  # 前一根K线是上涨的
+                    last_merged_kline['high'] = max(last_merged_kline['high'], current_kline['high'])
+                    last_merged_kline['low'] = max(last_merged_kline['low'], current_kline['low'])
+                else:  # 前一根K线是下跌的
+                    last_merged_kline['high'] = min(last_merged_kline['high'], current_kline['high'])
+                    last_merged_kline['low'] = min(last_merged_kline['low'], current_kline['low'])
+            else:
+                # 保存新的K线
+                merged_data.append(current_kline)
+        df_day = pd.DataFrame(merged_data).reset_index(drop=True)
+
+
+        # 顶底分型
+        df_day['HL'] = np.nan
+        try:
+            if len(df_day.index) > 2:
+            # 寻找顶底分型
+                for x in range(2, len(df_day.index)):
+                    m = x - 1
+                    # 底
+                    # 符合底分型形态,且第2、3根k线是阳线
+                    if ((df_day.loc[x, 'high'] > df_day.loc[x - 1, 'high']) and
+                            (df_day.loc[x - 2, 'high'] > df_day.loc[x - 1, 'high'])):
+                        # and df_day.loc[x, 'close'] > df_day.loc[x, 'open'] and \
+                        #     df_day.loc[x - 1, 'close'] > df_day.loc[x - 1, 'open']:
+
+                        df_day.loc[x, 'HL'] = 'L*'
+
+                        while m:
+                            if df_day.loc[m, 'HL'] in ['H', 'HH', 'H*']:
+                                if (x - m) > 3:
+                                    # 成笔——>L
+                                    df_day.loc[x, 'HL'] = 'L'
+                                    # 产生信号,进入hlfx_pool
+                                    if x == len(df_day.index) - 1:
+                                        trading_signals = 1
+                                else:
+                                    # 不成笔 次级别中枢,保持L* 修订原H为H*
+                                    df_day.loc[m, 'HL'] = 'H*'
+                                break
+
+                            elif df_day.loc[m, 'HL'] in ['L', 'LL', 'L*']:
+                                if df_day.loc[m - 1, 'low'] > df_day.loc[x - 1, 'low']:
+                                    # 前一个为底更高,且中间不存在更低的底
+                                    df_day.loc[x, 'HL'] = 'L'
+                                    df_day.loc[m, 'HL'] = '-'
+
+                                    # 产生信号,进入hlfx_pool
+                                    if x == len(df_day.index) - 1:
+                                        trading_signals = 1
+
+                                    # 获得MACD,判断MACD判断背驰
+                                    x_macd_dif, x_macd_dea, x_macd_macd = data_temp.loc[x, 'dif'], data_temp.loc[x, 'dea'], \
+                                        data_temp.loc[x, 'macd']
+                                    m_macd_dif, m_macd_dea, m_macd_macd = data_temp.loc[m, 'dif'], data_temp.loc[m, 'dea'], \
+                                        data_temp.loc[m, 'macd']
+
+                                    # MACD底背驰
+                                    if m_macd_dif < x_macd_dif:
+                                        # 次级别背驰底->LL
+                                        df_day.loc[x, 'HL'] = 'LL'
+                                    break
+                                else:
+                                    # 前底更低,本底无效
+                                    df_day.loc[x, 'HL'] = '-'
+                                    break
+                            m = m - 1
+                            if m == 0:
+                                df_day.loc[x, 'HL'] = 'L'
+
+                    # 顶
+                    elif ((df_day.loc[x, 'high'] < df_day.loc[x - 1, 'high']) and (
+                            df_day.loc[x - 2, 'high'] < df_day.loc[x - 1, 'high'])):
+
+                        df_day.loc[x, 'HL'] = 'H*'
+                        while m:
+                            if df_day.loc[m, 'HL'] in ['L', 'LL', 'L*']:
+                                if x - m > 3:
+                                    # 成笔->H
+                                    df_day.loc[x, 'HL'] = 'H'
+                                    # 产生信号,进入hlfx_pool
+                                    if x == len(df_day.index) - 1:
+                                        trading_signals = 2
+                                else:
+                                    # 不成笔 次级别中枢,保持H* 修订原L为L*
+                                    df_day.loc[m, 'HL'] = 'L*'
+                                break
+
+                            elif df_day.loc[m, 'HL'] in ['H', 'HH', 'H*']:
+                                if df_day.loc[x - 1, 'high'] > df_day.loc[m - 1, 'high']:
+                                    # 前一个为顶,且中间存在不包含 or 更高的顶
+                                    df_day.loc[x, 'HL'] = 'H'
+                                    df_day.loc[m, 'HL'] = '-'
+                                    # 产生信号,进入hlfx_pool
+                                    if x == len(df_day.index) - 1:
+                                        trading_signals = 2
+
+                                    # 获得MACD,判断MACD判断背驰
+                                    x_macd_dif, x_macd_dea, x_macd_macd = data_temp.loc[x, 'dif'], data_temp.loc[x, 'dea'], \
+                                        data_temp.loc[x, 'macd']
+                                    m_macd_dif, m_macd_dea, m_macd_macd = data_temp.loc[m, 'dif'], data_temp.loc[m, 'dea'], \
+                                        data_temp.loc[m, 'macd']
+
+                                    # MACD顶背驰
+                                    if x_macd_dif < m_macd_dif:
+                                        # 次级别背驰底->HH
+                                        df_day.loc[x, 'HL'] = 'HH'
+                                    break
+                                else:
+                                    # 前顶更高,本顶无效
+                                    df_day.loc[x, 'HL'] = '-'
+                                    break
+                            m = m - 1
+                            if m == 0:
+                                df_day.loc[x, 'HL'] = 'H'
+
+                    else:
+                        df_day.loc[x, 'HL'] = '-'
+        except BaseException as e:
+            print('errrrr', e)
+
+        df_temp = df_day[['time', 'HL']]
+        return df_temp, trading_signals
+
+
+    def get_ddfx(data, data_temp, u):
+        df_day = data
+        if len(df_day.index) > 2:
+            # 寻找顶底分型
+            for x in range(2, len(df_day.index)):
+                m = x - 1
+                # 底
+                # 符合底分型形态,且第2、3根k线是阳线
+                if ((df_day.loc[x, 'high'] > df_day.loc[x - 1, 'high']) and
+                        (df_day.loc[x - 2, 'high'] > df_day.loc[x - 1, 'high'])):
+                    # and df_day.loc[x, 'close'] > df_day.loc[x, 'open'] and \
+                    #     df_day.loc[x - 1, 'close'] > df_day.loc[x - 1, 'open']:
+
+                    df_day.loc[x, 'HL'] = 'L*'
+
+                    while m:
+                        if df_day.loc[m, 'HL'] in ['H', 'HH', 'H*']:
+                            if (x - m) > 3:
+                                # 成笔——>L
+                                df_day.loc[x, 'HL'] = 'L'
+                                # 产生信号,进入hlfx_pool
+                                if x == len(df_day.index) - 1:
+                                    trading_signals = 1
+                            else:
+                                # 不成笔 次级别中枢,保持L* 修订原H为H*
+                                df_day.loc[m, 'HL'] = 'H*'
+                            break
+
+                        elif df_day.loc[m, 'HL'] in ['L', 'LL', 'L*']:
+                            if df_day.loc[m - 1, 'low'] > df_day.loc[x - 1, 'low']:
+                                # 前一个为底更高,且中间不存在更低的底
+                                df_day.loc[x, 'HL'] = 'L'
+                                df_day.loc[m, 'HL'] = '-'
+
+                                # 产生信号,进入hlfx_pool
+                                if x == len(df_day.index) - 1:
+                                    trading_signals = 1
+
+                                # 获得MACD,判断MACD判断背驰
+                                x_macd_dif, x_macd_dea, x_macd_macd = data_temp.loc[x, 'dif'], data_temp.loc[x, 'dea'], \
+                                    data_temp.loc[x, 'macd']
+                                m_macd_dif, m_macd_dea, m_macd_macd = data_temp.loc[m, 'dif'], data_temp.loc[m, 'dea'], \
+                                    data_temp.loc[m, 'macd']
+
+                                # MACD底背驰
+                                if m_macd_dif < x_macd_dif:
+                                    # 次级别背驰底->LL
+                                    df_day.loc[x, 'HL'] = 'LL'
+                                break
+                            else:
+                                # 前底更低,本底无效
+                                df_day.loc[x, 'HL'] = '-'
+                                break
+                        m = m - 1
+                        if m == 0:
+                            df_day.loc[x, 'HL'] = 'L'
+
+                # 顶
+                elif ((df_day.loc[x, 'high'] < df_day.loc[x - 1, 'high']) and (
+                        df_day.loc[x - 2, 'high'] < df_day.loc[x - 1, 'high'])):
+
+                    df_day.loc[x, 'HL'] = 'H*'
+                    while m:
+                        if df_day.loc[m, 'HL'] in ['L', 'LL', 'L*']:
+                            if x - m > 3:
+                                # 成笔->H
+                                df_day.loc[x, 'HL'] = 'H'
+                                # 产生信号,进入hlfx_pool
+                                if x == len(df_day.index) - 1:
+                                    trading_signals = 2
+                            else:
+                                # 不成笔 次级别中枢,保持H* 修订原L为L*
+                                df_day.loc[m, 'HL'] = 'L*'
+                            break
+
+                        elif df_day.loc[m, 'HL'] in ['H', 'HH', 'H*']:
+                            if df_day.loc[x - 1, 'high'] > df_day.loc[m - 1, 'high']:
+                                # 前一个为顶,且中间存在不包含 or 更高的顶
+                                df_day.loc[x, 'HL'] = 'H'
+                                df_day.loc[m, 'HL'] = '-'
+                                # 产生信号,进入hlfx_pool
+                                if x == len(df_day.index) - 1:
+                                    trading_signals = 2
+
+                                # 获得MACD,判断MACD判断背驰
+                                x_macd_dif, x_macd_dea, x_macd_macd = data_temp.loc[x, 'dif'], data_temp.loc[x, 'dea'], \
+                                    data_temp.loc[x, 'macd']
+                                m_macd_dif, m_macd_dea, m_macd_macd = data_temp.loc[m, 'dif'], data_temp.loc[m, 'dea'], \
+                                    data_temp.loc[m, 'macd']
+
+                                # MACD顶背驰
+                                if x_macd_dif < m_macd_dif:
+                                    # 次级别背驰底->HH
+                                    df_day.loc[x, 'HL'] = 'HH'
+                                break
+                            else:
+                                # 前顶更高,本顶无效
+                                df_day.loc[x, 'HL'] = '-'
+                                break
+                        m = m - 1
+                        if m == 0:
+                            df_day.loc[x, 'HL'] = 'H'
+
+                else:
+                    df_day.loc[x, 'HL'] = '-'
+        data = df_day
+        print('44444444444444444', u)
+        print(data)

+ 92 - 0
QMT/qihuo_get_indicators.py

@@ -0,0 +1,92 @@
+# coding:utf-8
+import time
+
+from datetime import datetime as dt
+import socket
+import pandas as pd
+import numpy as np
+from sqlalchemy import create_engine, text
+from jqdatasdk import *
+import pymysql
+import multiprocessing as mp
+from multiprocessing import freeze_support
+import concurrent.futures
+import math
+import talib as ta
+import os
+import traceback
+import random
+import logging
+from myindicator import myind
+import psutil
+from tqdm import tqdm
+from itertools import islice
+from func_timeout import func_set_timeout, FunctionTimedOut
+from apscheduler.schedulers.blocking import BlockingScheduler
+
+# 显示最大行与列
+pd.set_option('display.max_rows', None)
+pd.set_option('display.max_columns', None)
+
+def tech_anal(table_list):
+    # 创建数据库连接
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qihuo')
+    engine_tech = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qihuo_tech')
+    for i in table_list:
+        df = pd.read_sql_table(i, con=engine)
+        print(i)
+        try:
+            myind.get_macd_data(df)
+            myind.get_ris(df)
+            myind.get_bias(df)
+            myind.get_wilr(df)
+            df = df.round(2)
+            df_temp, t_signals = myind.get_hlfx(df)
+            df = pd.merge(df, df_temp, on='time', how='left')
+            df['HL'].fillna(value='-', inplace=True)
+            df = df.reset_index(drop=True)
+            df = df.replace([np.inf, -np.inf], np.nan)
+            df = df.round(2)
+        except BaseException as e:
+            print(f'{i}计算有问题', e)
+        else:
+            # 存入数据库
+            try:
+                # pass
+                df.to_sql('%s' % i, con=engine_tech, index=False, if_exists='replace')
+                # df.to_sql('%s_1d' % stock, con=engine_tech2, index=False, if_exists='replace')
+            except BaseException as e:
+                print(f'{i}存储有问题', e)
+                traceback.print_exc()
+    exit()
+
+
+# 从qihuo数据库读取所有表数据
+def read_sql():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qihuo')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '00.')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            cursor.close()
+            db.close()
+        except Exception as e:
+            print(e)
+            time.sleep(5)
+            continue
+        finally:
+            break
+    return table_list
+
+
+if __name__ == '__main__':
+    print('start')
+    table_list = read_sql()
+    print('已读取期货表')
+    tech_anal(table_list)
+

+ 1 - 1
QMT/qmt_get_indicators_3308.py

@@ -304,7 +304,7 @@ def ind():
     # mp.log_to_stderr()
 
     sttime = dt.now()
-    num_cpus = mp.cpu_count()
+    num_cpus = int(mp.cpu_count()-6)
 
     engine_hlfx_pool = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/hlfx_pool?charset=utf8')
 

+ 4 - 4
QMT/qmt_real_hlfx.py

@@ -136,14 +136,14 @@ def hlfx(stock_list, data):
 
     # 获得hlfx_pool池子
     engine_hlfx_pool = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/hlfx_pool?charset=utf8',
-                                     pool_size=100, pool_recycle=3600, max_overflow=50, pool_timeout=60)
+                                     pool_size=100, pool_recycle=60, max_overflow=50, pool_timeout=60)
     results = []
     results.extend(pd.read_sql_query(text(
         'select value from `%s` order by `index` desc limit 10' % fre), engine_hlfx_pool.connect()).iloc[0, 0].split(","))
     # print(f'本次hlfx_pool有{len(results)}个个股')
 
     engine_stock = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8',
-                                 pool_size=100, pool_recycle=3600, max_overflow=50, pool_timeout=60)
+                                 pool_size=100, pool_recycle=60, max_overflow=50, pool_timeout=60)
 
     for qmt_stock in stock_list:
         # 读取qmt_stocks_whole表-前复权-信息
@@ -357,8 +357,8 @@ if __name__ == '__main__':
     subscribe_result = xt_trader.subscribe(acc)
     print('对交易回调进行订阅,订阅后可以收到交易主推,返回0表示订阅成功', subscribe_result)
 
-    # job_func()
-    #
+    job_func()
+
     scheduler = BlockingScheduler()
     scheduler.add_job(func=job_func, trigger='cron', day_of_week='0-4', hour='09', minute='25',
                       timezone="Asia/Shanghai", max_instances=5)

+ 2 - 2
QMT/real_time.py

@@ -348,7 +348,7 @@ def buy_trader(data):
 
 
 def trader(data):
-    sell_trader(data)
+    # sell_trader(data)
     # 买入条件
     buy_trader(data)
 
@@ -376,7 +376,7 @@ if __name__ == '__main__':
     mp.freeze_support()
     print('cpu_count =', mp.cpu_count())
     pus = psutil.Process()
-    pus.cpu_affinity([12, 13, 14, 15, 16, 17])
+    pus.cpu_affinity([10, 11, 12, 13, 14, 15])
 
     # job_func()
 

+ 16 - 3
QMT/sell_real_time.py

@@ -23,6 +23,7 @@ acc = StockAccount('920000207040', 'SECURITY')
 # 生成session id 整数类型 同时运行的策略不能重复
 session_id = 123456
 xt_trader = None
+order_list = []
 engine_stock = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_whole?charset=utf8',
                              pool_size=5000, pool_recycle=50, max_overflow=-1)
 
@@ -192,6 +193,15 @@ def sell_trader(data):
     print(
         f'目前持仓总数为:{len([positions[x].stock_code for x in range(0, len(positions)) if positions[x].volume != 0])}')
 
+    # orders = xt_trader.query_stock_orders(acc, False)
+    # print(orders)
+    # if orders is not None:
+    #     for order_id in order_list:
+    #         cancel_result = xt_trader.cancel_order_stock(acc, order_id)
+    #         print(f'{order_id}撤单结果:{cancel_result}')
+    # else:
+    #     print(f'今日没有委托单!')
+
     for stock, can_use_volume in positions_dict.items():
         # if stock in data and can_use_volume != 0:
         if stock in data:
@@ -217,12 +227,14 @@ def sell_trader(data):
                                                  xtconstant.LATEST_PRICE, 0, 'MA5策略', '低于MA5趋势向下')
                 print('价格:', current_price, open_price, MA5, MA5_1, '低于MA5趋势向下')
                 print(order_id, stock, can_use_volume)
+                order_list.append(order_id)
             elif current_price > MA5 * 1.07:
                 print('盈利乖离率超7%!!!!!!', stock, current_price)
                 order_id = xt_trader.order_stock(acc, stock, xtconstant.STOCK_SELL, can_use_volume,
                                                  xtconstant.LATEST_PRICE, 0, 'MA5策略', '盈利乖离率超7%')
                 print('价格:', current_price, open_price, MA5, MA5_1, '盈利乖离率超7%')
                 print(order_id, stock, can_use_volume)
+                order_list.append(order_id)
         else:
             # print(f'本轮没有持仓股票信息!')
             pass
@@ -285,12 +297,13 @@ if __name__ == '__main__':
     mp.freeze_support()
     # print('cpu_count =', mp.cpu_count())
     pus = psutil.Process()
-    pus.cpu_affinity([16, 17, 18, 19])
+    # pus.cpu_affinity([16, 17, 18, 19])
+    print('sell real time start at', dt.now())
 
-    # job_func()
+    job_func()
 
     scheduler = BlockingScheduler()
-    scheduler.add_job(func=job_func, trigger='cron', day_of_week='0-4', hour='09', minute='30',
+    scheduler.add_job(func=job_func, trigger='cron', day_of_week='0-4', hour='09', minute='40',
                       timezone="Asia/Shanghai", max_instances=5)
     # scheduler.add_job(func=job_func, trigger='cron', day_of_week='0-4', hour='12', minute='35',
     #                   timezone="Asia/Shanghai")

+ 7 - 5
backtrader/230508_bt.py

@@ -152,8 +152,8 @@ class TestStrategy(bt.Strategy):
                 while True:
                     if (self.hl[m] == 2 or self.hl[m] == 1) and self.macd[m] > self.macd[-1] \
                             and 0.99 * self.sma10[m] < self.sma5[m] < 1.01 * self.sma10[m] \
-                            and 0.99 * self.sma10[m-1] < self.sma5[m-1] < 1.01 * self.sma10[m-1] \
-                            and 0.99 * self.sma10[m-2] < self.sma5[m-2] < 1.01 * self.sma10[m-2] \
+                            and 0.99 * self.sma10[m - 1] < self.sma5[m - 1] < 1.01 * self.sma10[m - 1] \
+                            and 0.99 * self.sma10[m - 2] < self.sma5[m - 2] < 1.01 * self.sma10[m - 2] \
                             and self.dataclose[0] > self.sma5[0] \
                             and self.dataclose[-1] > self.dataopen[-1] \
                             and self.volume[-1] > self.volume[-2] \
@@ -191,7 +191,8 @@ def to_df(lt):
     df.sort_values(by=['周期', '波动率', 'MA5斜率'], ascending=True, inplace=True)
     df = df.reset_index(drop=True)
     if platform.node() == 'DanieldeMBP.lan':
-        df.to_csv(f"/Users/daniel/Documents/策略/策略穷举-均线粘连后底分型{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
+        df.to_csv(f"/Users/daniel/Documents/策略/策略穷举-均线粘连后底分型{dt.now().strftime('%Y%m%d%H%m%S')}.csv",
+                  index=True,
                   encoding='utf_8_sig', mode='w')
     else:
         df.to_csv(f"C:\Daniel\策略\策略穷举底分型_均线缠绕_只买一次{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
@@ -280,14 +281,13 @@ def backtrader(list_date, table_list, result, result_change, result_change_fall,
                           np.nansum(result_change_fall), np.nanmean(result_change_fall),
                           np.nanmin(result_change_fall), np.nanmax(result_change_fall),
                           len(result_change) / len(result_change_fall)])
-        to_df(list_date)
+        # to_df(list_date)
         endtime = dt.now()
         print(f'{num}天波动率为{Volatility}%MA5斜率为{rate},myPID is {os.getpid()}.本轮耗时为{endtime - sttime}')
     else:
         print('阿欧', len(result), len(result_change), len(result_change_fall), num, Volatility, rate, err_list)
         list_date.append([num, Volatility, rate, 0, len(result) / len(table_list), len(result),
                           len(result), len(result), len(result), len(result), len(result), len(result), 0])
-    to_df(list_date)
     # cerebro.plot()
 
 
@@ -346,6 +346,8 @@ if __name__ == '__main__':
     pool.close()
     pool.join()
 
+    to_df(list_date)
+
     edtime = dt.now()
     print('总耗时:', edtime - starttime)
     # df.to_csv(r'C:\Users\Daniel\Documents\策略穷举2.csv', index=True)

+ 230 - 141
backtrader/230723 _bt.py

@@ -1,3 +1,5 @@
+import time
+
 import os
 import traceback
 import numpy as np
@@ -10,19 +12,29 @@ import datetime
 import math
 from datetime import datetime as dt
 import multiprocessing as mp
-from multiprocessing import Pool, Lock, Value
+from multiprocessing import Pool, Lock, Value, freeze_support
+import concurrent.futures
+import functools
+
 from backtrader.feeds import PandasData
 import platform
 import psutil
 import logging
 
-
-
-engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8',
-                       max_overflow=-1)
 lock = Lock()
 counter = Value('i', 0)
 
+
+# engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8',
+                       # pool_size=5000, max_overflow=200)
+
+# db_pool = pymysql.connect(host='localhost',
+#                           user='root',
+#                           port=3307,
+#                           password='r6kEwqWU9!v3',
+#                           database='qmt_stocks_tech')
+
+
 class MyPandasData(PandasData):
     lines = ('hl', 'dif', 'dea', 'macd', 'rsi_6', 'rsi_12', 'rsi_24',)
     params = (('hl', 7),
@@ -132,47 +144,36 @@ class TestStrategy(bt.Strategy):
         # self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' % (trade.pnl, trade.pnlcomm))
 
     def next(self):
-        # print(self.num,self.Volatility)
-        # Simply log the closing price of the series from the reference
-        # self.sma20[-2] < self.sma20[-1] < self.sma20[0] and self.sma10[-2] < self.sma10[-1] < self.sma10[0]
-        # and (self.sma5[-1] < self.sma10[-1])
-        # and (self.net_pct_l[0] > 10) and (self.net_pct_xl[0] > 3)  \
-        # and (self.net_amount_main[-1] > 0) and (self.net_amount_main[0] > 0)
-
-        if len(self) > self.params.num:
-            vola = self.params.Volatility / 100
-            rate = self.params.rate / 100
-            lowest = np.min(self.low.get(size=self.params.num))
-            highest = np.max(self.high.get(size=self.params.num))
-
-            # > self.sma5[-1]
-            # and (((lowest * (1 - vola)) < self.low[-2] < (lowest * (1 + vola))) or (
-            #         (lowest * (1 - vola)) < self.low[-1] < (lowest * (1 + vola)))) \
-            if self.hl[-1] == 2 or self.hl[-1] == 1:
-                m = -2
-                self.order = self.buy()
-                self.pos_price = self.low[-1]
-
-                while True:
-                    if (self.hl[m] == 2 or self.hl[m] == 1) and self.macd[m] > self.macd[-1] \
-                            and self.dataclose[0] > self.sma5[0] \
-                            and self.dataclose[-1] > self.dataopen[-1] \
-                            and (self.sma10[-2] - self.sma5[-2]) < (self.sma10[-1] - self.sma5[-1]) \
-                            and self.low[-2] < self.sma5[-2] * (1 - rate) \
-                            and self.sma5[-1] < self.sma10[-1] < self.sma20[-1] < self.sma20[-2] < self.sma20[-3] \
-                            and lowest * (1 - vola) < self.low[-1] < lowest * (1 + vola):
-                        self.order = self.buy()
-                        self.pos_price = self.low[-1]
-                        break
-                    m -= 1
-                    if m + len(self) == 2:
-                        break
-
-            # elif (self.hl[0] == 5 or self.dataclose[0] < self.sma5[0]):
-            elif self.dataclose[0] < self.sma5[0] or self.sma5[0] < self.sma5[-1] \
-                    or self.dataclose[0] < self.pos_price or self.high[0] > self.sma5[0] * (1 + vola):
-                self.order = self.close()
-                self.pos_price = 0
+        # if len(self) > self.params.num:
+        vola = self.params.Volatility / 100
+        rate = self.params.rate / 100
+        lowest = np.min(self.low.get(size=self.params.num))
+        highest = np.max(self.high.get(size=self.params.num))
+
+        if self.hl[-1] == 2 or self.hl[-1] == 1:
+            m = -2
+            # self.order = self.buy()
+            # self.pos_price = self.low[-1]
+            while True:
+                if (self.hl[m] == 2 or self.hl[m] == 1) and self.macd[m] > self.macd[-1] \
+                        and self.dataclose[0] > self.sma5[0] \
+                        and self.dataclose[-1] > self.dataopen[-1] \
+                        and (self.sma10[-2] - self.sma5[-2]) < (self.sma10[-1] - self.sma5[-1]) \
+                        and self.low[-2] < self.sma5[-2] * (1 - rate) \
+                        and self.sma5[-1] < self.sma10[-1] < self.sma20[-1] < self.sma20[-2] < self.sma20[-3] \
+                        and lowest * (1 - vola) < self.low[-1] < lowest * (1 + vola):
+                    self.order = self.buy()
+                    self.pos_price = self.low[-1]
+                    break
+                m -= 1
+                if m + len(self) == 2:
+                    break
+
+        # elif (self.hl[0] == 5 or self.dataclose[0] < self.sma5[0]):
+        elif self.dataclose[0] < self.sma5[0] or self.sma5[0] < self.sma5[-1] \
+                or self.dataclose[0] < self.pos_price or self.high[0] > self.sma5[0] * (1 + vola):
+            self.order = self.close()
+            self.pos_price = 0
 
     def stop(self):
         # pass
@@ -184,11 +185,11 @@ def err_call_back(err):
     traceback.format_exc(err)
 
 
-def to_df(lt):
+def to_df(df):
     print('开始存数据')
-    df = pd.DataFrame(list(lt),
-                      columns=['周期', '波动率', 'MA5斜率', '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利',
-                               '最小盈利', '总亏损', '平均亏损', '最大亏损', '最小亏损', '盈亏对比'])
+    # df = pd.DataFrame(list(lt),
+    #                   columns=['周期', '波动率', 'MA5斜率', '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利',
+    #                            '最小盈利', '总亏损', '平均亏损', '最大亏损', '最小亏损', '盈亏对比'])
     df.sort_values(by=['周期', '波动率', 'MA5斜率'], ascending=True, inplace=True)
     df = df.reset_index(drop=True)
     if platform.node() == 'DanieldeMBP.lan':
@@ -196,65 +197,91 @@ def to_df(lt):
                   index=True,
                   encoding='utf_8_sig', mode='w')
     else:
-        df.to_csv(f"C:\Daniel\策略\策略穷举底分型_均线缠绕_只买一次{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
+        df.to_csv(f"C:\策略结果\策略穷举底分型_均线缠绕_只买一次{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
                   encoding='utf_8_sig', mode='w')
     print(f'结果:, \n, {df}')
 
 
-def backtrader(table_list, stock, result, result_change, result_change_fall, num, Volatility, rate, err_list):
-    global engine, counter, lock
-    conn = engine.connect()
-    stk_df = pd.read_sql_table(stock, conn)
-    stk_df.time = pd.to_datetime(stk_df.time)
-    # stk_df = stk_df[stk_df['HL'] != '-']
+def backtrader(stock, result, result_change, result_change_fall, num, Volatility, rate, err_list):
+    # global engine
+    # global db_pool
+    global lock
+    sttime = dt.now()
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8',
+                           pool_size=10, max_overflow=20)
     try:
-        stk_df['HL'] = stk_df['HL'].map({'L': 1,
-                                         'LL': 2,
-                                         'L*': 3,
-                                         'H': 4,
-                                         'HH': 5,
-                                         'H*': 6,
-                                         '-': 7})
-    except BaseException:
-        print(f'{stock}数据不全,不做测试')
-    else:
+        # cursor = db_pool.cursor()
+        # sql_query = f"select * from `{stock}`"
+        # stk_df = pd.read_sql_query(sql_query, engine)
+        conn = engine.connect()
+        # with engine.connect() as conn:
+        stk_df = pd.read_sql_table(stock, conn)
+        stk_df.time = pd.to_datetime(stk_df.time)
         conn.close()
+        engine.dispose()
+        # stk_df = stk
+    except BaseException as e:
+        print(f'{stock}读取有问题', e)
+    else:
+        pass
+
+    try:
+        # stk_df = stk_df[stk_df['HL'] != '-']
+        try:
+            stk_df['HL'] = stk_df['HL'].map({'L': 1,
+                                             'LL': 2,
+                                             'L*': 3,
+                                             'H': 4,
+                                             'HH': 5,
+                                             'H*': 6,
+                                             '-': 7})
+        except BaseException:
+            print(f'{stock}数据不全,不做测试')
+    finally:
+        # print(f'{stock}读取通过')
+        pass
+
+    try:
         if len(stk_df) > 60:
-            cerebro = bt.Cerebro()
-            cerebro.addstrategy(TestStrategy, num=num, Volatility=Volatility, rate=rate)
-            cerebro.addsizer(bt.sizers.FixedSize, stake=10000)
-            data = MyPandasData(dataname=stk_df,
-                                fromdate=datetime.datetime(2017, 1, 1),
-                                todate=datetime.datetime(2022, 10, 30),
-                                datetime='time',
-                                open='open_back',
-                                close='close_back',
-                                high='high_back',
-                                low='low_back',
-                                volume='volume_back',
-                                hl='HL',
-                                dif='dif',
-                                dea='dea',
-                                macd='macd',
-                                rsi_6='rsi_6',
-                                rsi_12='rsi_12',
-                                rsi_24='rsi_24',
-                                )
-            # print('取值完成')
-            cerebro.adddata(data, name=stock)
-            cerebro.broker.setcash(100000.0)
-            cerebro.broker.setcommission(0.005)
-            cerebro.addanalyzer(bt.analyzers.PyFolio)
-            # 策略执行前的资金
-            # print('启动资金: %.2f' % cerebro.broker.getvalue())
             try:
+                cerebro = bt.Cerebro()
+                cerebro.addstrategy(TestStrategy, num=num, Volatility=Volatility, rate=rate)
+                cerebro.addsizer(bt.sizers.FixedSize, stake=10000)
+                data = MyPandasData(dataname=stk_df,
+                                    fromdate=datetime.datetime(2017, 1, 1),
+                                    todate=datetime.datetime(2022, 10, 30),
+                                    datetime='time',
+                                    open='open_back',
+                                    close='close_back',
+                                    high='high_back',
+                                    low='low_back',
+                                    volume='volume_back',
+                                    hl='HL',
+                                    dif='dif',
+                                    dea='dea',
+                                    macd='macd',
+                                    rsi_6='rsi_6',
+                                    rsi_12='rsi_12',
+                                    rsi_24='rsi_24',
+                                    )
+                # print('取值完成')
+                cerebro.adddata(data, name=stock)
+                cerebro.broker.setcash(100000.0)
+                cerebro.broker.setcommission(0.005)
+                cerebro.addanalyzer(bt.analyzers.PyFolio)
+                # 策略执行前的资金
+                # print('启动资金: %.2f' % cerebro.broker.getvalue())
+
                 # 策略执行
                 cerebro.run()
-            except IndexError as e:
+            except BaseException as e:
+                lock.acquire()
                 err_list.append(stock)
+                lock.release()
                 # print(f'{num}天波动率为{Volatility}%MA5斜率为{rate}的{stock}错误')
-                # print(e)
+                print(stock, 'cerebro错误', e)
             else:
+                lock.acquire()
                 if cerebro.broker.getvalue() > 100000.0:
                     result_change.append(cerebro.broker.getvalue() - 100000)
                     result.append(stock)
@@ -262,18 +289,29 @@ def backtrader(table_list, stock, result, result_change, result_change_fall, num
                     # print(result)
                 elif cerebro.broker.getvalue() <= 100000.0:
                     result_change_fall.append(cerebro.broker.getvalue() - 100000)
-                    # print('aaaaaaaaaaa')
-                    # print(result_change_fall)
-                # print('最终资金: %.2f' % cerebro.broker.getvalue())
-            finally:
-                with lock:
-                    counter.value += 1
-                logging.info('执行完成:(%d / %d) 进程号: %d --------------- %s', counter.value, len(table_list), os.getpid(), stock)
-
-        # print(f'已计算{counter.value}/{len(table_list)}只股票')
-    # print(f'已计算{(len(result) + len(result_change_fall)+len(err_list))}/{len(table_list)}只股票')
-    '''
-        if len(result) * len(result_change) * len(result_change_fall) != 0:
+                lock.release()
+        else:
+            lock.acquire()
+            err_list.append(stock)
+            lock.release()
+
+            # print('aaaaaaaaaaa')
+            # print(result_change_fall)
+            # print('最终资金: %.2f' % cerebro.broker.getvalue())
+            # finally:
+            #     with lock:
+            #         counter.value += 1
+            #     logging.info('执行完成:(%d / %d) 进程号: %d --------------- %s', counter.value, len(table_list), os.getpid(), stock)
+
+            # print(f'已计算{counter.value}/{len(table_list)}只股票')
+    except BaseException as e:
+        print(f'{stock}backtrader问题', e)
+    finally:
+        print(f'{stock}通过')
+        # print(f'已计算{(len(result) + len(result_change_fall)+len(err_list))}/{len(table_list)}只股票')
+
+    
+    if len(result) * len(result_change) * len(result_change_fall) != 0:
         print(f'以{num}内最低值波动{Volatility}为支撑、MA5斜率为{rate}%,结果状态为:')
         print('正盈利的个股为:', len(result), '成功率为:', len(result) / len(table_list))
         print(
@@ -294,7 +332,7 @@ def backtrader(table_list, stock, result, result_change, result_change_fall, num
         print('阿欧', len(result), len(result_change), len(result_change_fall), num, Volatility, rate, err_list)
         list_date.append([num, Volatility, rate, 0, len(result) / len(table_list), len(result),
                           len(result), len(result), len(result), len(result), len(result), len(result), 0])
-    '''
+
     # list_date.append([num, Volatility, rate, len(result), len(result) / len(table_list), np.nansum(result_change),
     #                   np.nanmean(result_change), np.nanmax(result_change), np.min(result_change),
     #                   np.nansum(result_change_fall), np.nanmean(result_change_fall),
@@ -306,12 +344,16 @@ def backtrader(table_list, stock, result, result_change, result_change_fall, num
 # df = pd.DataFrame(
 #     columns=['周期', '波动率', 'MA5斜率', '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利', '最小盈利', '总亏损',
 #              '平均亏损', '最大亏损', '最小亏损'])
+#
 if __name__ == '__main__':
+    freeze_support()
     logger = mp.log_to_stderr()
     logger.setLevel(logging.INFO)
     starttime = dt.now()
+
+
     print(starttime)
-    pus = psutil.Process()
+    # pus = psutil.Process()
 
     fre = '1d'
     db = pymysql.connect(host='localhost',
@@ -322,41 +364,63 @@ if __name__ == '__main__':
     cursor = db.cursor()
     cursor.execute("show tables like '%%%s%%' " % fre)
     table_list = [tuple[0] for tuple in cursor.fetchall()]
+    cursor.close()
+    db.close()
+
     # print(table_list)
     # table_list = table_list[0:500]
     print(f'计算个股数为:{len(table_list)}')
 
     list_date = []
-    thread_list = []
-    pool = mp.Pool(processes=mp.cpu_count())
-    # pool = mp.Pool(processes=8)
+    pddate = pd.DataFrame(columns=['周期', '波动率', 'MA5斜率', '盈利个数', '盈利比例', '总盈利',
+                                   '平均盈利', '最大盈利', '最小盈利', '总亏损', '平均亏损',
+                                   '最大亏损',
+                                   '最小亏损', '盈亏对比'])
+
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8',
+                           pool_size=10, max_overflow=20)
+
+    stk_df = pd.read_sql_table(table_list[0], engine)
+    engine.dispose()
+    print(stk_df)
+
     for num in range(60, 80, 20):
-        for Volatility in range(7, 8, 1):
-            for rate in range(3, 4, 1):
-                # step = math.ceil(len(table_list) / mp.cpu_count())
+        for Volatility in range(7, 12, 1):
+            for rate in range(3, 13, 1):
+                stattime = dt.now().strftime('%Y-%m-%d %H:%M:%S')
+                print(stattime)
+                # pool = mp.Pool()
                 result = mp.Manager().list()
                 result_change = mp.Manager().list()
                 result_change_fall = mp.Manager().list()
                 err_list = mp.Manager().list()
-                print(f'{num}天波动率为{Volatility}%MA5斜率为{rate}')
-                # for i in range(0, len(table_list), step):
-                stattime = dt.now()
+
+                print(os.getpid())
+                print(num, Volatility, rate, result, result_change, result_change_fall, err_list)
+
                 # 保存AsyncResult对象的列表
                 async_results = []
-                # thd = threading.local()
-                # print(i)
-                # p = mp.Process(target=backtrader, args=(df, table_list, result, result_change, result_change_fall,
-                #                                         num, Volatility, rate, err_list))
-                # thread_list.append(p)
-                for stock in table_list:
-                    async_result = pool.apply_async(func=backtrader,
-                                                    args=(table_list, stock, result, result_change, result_change_fall,
-                                                          num, Volatility, rate, err_list,),
-                                                    error_callback=err_call_back)
-                    async_results.append(async_result)
-                # p.start()
-                pool.close()
-                pool.join()
+                partial_func_list = []
+
+                m = 0
+                try:
+                    pool = mp.Pool(processes=8)
+                    for stock in table_list:
+                        async_result = pool.apply_async(func=backtrader,
+                                                        args=(
+                                                              stock, result, result_change,
+                                                              result_change_fall,
+                                                              num, Volatility, rate, err_list,),
+                                                        error_callback=err_call_back)
+                        m += 1
+                        async_results.append(async_result)
+                            # p.start()
+                        pool.close()
+                        time.sleep(1)
+                        pool.join()
+                except BaseException as e:
+                    print(f'进程池报错{e}')
+                print(f'共有{m}只股票')
 
                 # 统计返回为 None 的结果数量
                 none_count = 0
@@ -364,17 +428,42 @@ if __name__ == '__main__':
                     _ = result_async.get()  # 获取任务的结果
                     if _ is None:
                         none_count += 1
+                print(f'{num}天波动率为{Volatility}%MA5斜率为{rate}')
+                print(f"正确计算的有{none_count},错误的有{len(err_list)},共计算{len(async_results)}只股票")
+                '''
+                list_date = [num, Volatility, rate, len(result), len(result) / len(table_list),
+                             np.nansum(result_change),
+                             np.nanmean(result_change), np.nanmax(result_change), np.min(result_change),
+                             np.nansum(result_change_fall), np.nanmean(result_change_fall),
+                             np.nanmin(result_change_fall), np.nanmax(result_change_fall),
+                             len(result_change) / len(result_change_fall)]
+                ld = pd.Series(list_date, index=['周期', '波动率', 'MA5斜率', '盈利个数', '盈利比例', '总盈利',
+                                                 '平均盈利', '最大盈利', '最小盈利', '总亏损', '平均亏损',
+                                                 '最大亏损', '最小亏损', '盈亏对比'])
+                pddate = pd.concat([pddate, ld.to_frame().T], ignore_index=True)
                 print(f'计算总数={len(result) + len(result_change_fall)}\n计数为:{none_count}')
-                list_date.append(
-                    [num, Volatility, rate, len(result), len(result) / len(table_list), np.nansum(result_change),
-                     np.nanmean(result_change), np.nanmax(result_change), np.min(result_change),
-                     np.nansum(result_change_fall), np.nanmean(result_change_fall),
-                     np.nanmin(result_change_fall), np.nanmax(result_change_fall),
-                     len(result_change) / len(result_change_fall)])
-                print(list_date)
 
-    # to_df(list_date)
+                print(pddate)
+                to_df(pddate)
+                
+                # time.sleep(1)
+                '''
 
+    # to_df(list_date)
+    print(pddate)
+    to_df(pddate)
     edtime = dt.now()
     print('总耗时:', edtime - starttime)
+
+    # with concurrent.futures.ProcessPoolExecutor() as executor:
+    #     for stock_code in table_list:
+    #         partial_func = functools.partial(backtrader, table_list, stock_code, result, result_change,
+    #                                          result_change_fall, num, Volatility, rate, err_list)
+    #         partial_func_list.append(partial_func)
+    #         executor.submit(partial_func)
+    # executor.submit(backtrader, table_list, stock_code, result, result_change,
+    #                 result_change_fall, num, Volatility, rate, err_list,)
+
+    # print(pool)
+
     # df.to_csv(r'C:\Users\Daniel\Documents\策略穷举2.csv', index=True)

+ 361 - 0
backtrader/230815_bt.py

@@ -0,0 +1,361 @@
+# coding:utf-8
+import time
+from multiprocessing import freeze_support, Value, Lock
+import backtrader as bt
+from backtrader.feeds import PandasData
+import backtrader.indicators as btind
+from sqlalchemy import create_engine, text
+import pymysql
+from tqdm import tqdm
+import concurrent.futures
+import numpy as np
+import pandas as pd
+import platform
+import datetime
+from datetime import datetime as dt
+from itertools import product
+import psutil
+import logging
+import multiprocessing as mp
+from itertools import islice
+
+from func_timeout import func_set_timeout, FunctionTimedOut
+from functools import partial
+
+
+
+class MyPandasData(PandasData):
+    lines = ('hl', 'dif', 'dea', 'macd', 'rsi_6', 'rsi_12', 'rsi_24',)
+    params = (('hl', 7),
+              ('dif', 8),
+              ('dea', 9),
+              ('macd', 10),
+              ('rsi_6', 11),
+              ('rsi_12', 12),
+              ('rsi_24', 13),
+              )
+
+
+
+class TestStrategy(bt.Strategy):
+    params = (
+        ("num", 3),
+        ('Volatility', 0),
+        ('rate', 3),  # 注意要有逗号!!
+    )
+
+    def log(self, txt, dt=None):
+        # 记录策略的执行日志
+        dt = dt or self.datas[0].datetime.date(0)
+        # print('%s, %s' % (dt.isoformat(), txt))
+
+    def __init__(self):
+        try:
+            self.pos_price = 0
+            self.dataclose = self.datas[0].close
+            self.dataopen = self.datas[0].open
+            self.high = self.datas[0].high
+            self.low = self.datas[0].low
+            self.volume = self.datas[0].volume
+            self.hl = self.datas[0].hl
+            self.dif = self.datas[0].dif
+            self.dea = self.datas[0].dea
+            self.macd = self.datas[0].macd
+            self.rsi_6 = self.datas[0].rsi_6
+            self.rsi_12 = self.datas[0].rsi_12
+            self.rsi_24 = self.datas[0].rsi_24
+            self.sma5 = btind.MovingAverageSimple(self.datas[0].close, period=5)
+            self.sma10 = btind.MovingAverageSimple(self.datas[0].close, period=10)
+            self.sma20 = btind.MovingAverageSimple(self.datas[0].close, period=20)
+            self.sma60 = btind.MovingAverageSimple(self.datas[0].close, period=60)
+        except BaseException as e:
+            print(f'初始化错误{e}')
+
+    def notify_order(self, order):
+        """
+        订单状态处理
+
+        Arguments:
+            order {object} -- 订单状态
+        """
+        if order.status in [order.Submitted, order.Accepted]:
+            # 如订单已被处理,则不用做任何事情
+            return
+
+        # 检查订单是否完成
+        if order.status in [order.Completed]:
+            if order.isbuy():
+                self.buyprice = order.executed.price
+                self.buycomm = order.executed.comm
+            self.bar_executed = len(self)
+
+        # 订单因为缺少资金之类的原因被拒绝执行
+        elif order.status in [order.Canceled, order.Margin, order.Rejected]:
+            pass
+            # self.log('Order Canceled/Margin/Rejected')
+
+        # 订单状态处理完成,设为空
+        self.order = None
+
+    def notify_trade(self, trade):
+        """
+        交易成果
+
+        Arguments:
+            trade {object} -- 交易状态
+        """
+        if not trade.isclosed:
+            return
+
+        # 显示交易的毛利率和净利润
+        # self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' % (trade.pnl, trade.pnlcomm))
+
+    def next(self):
+        if len(self) > self.params.num:
+            vola = self.params.Volatility / 100
+            rate = self.params.rate / 100
+            lowest = np.min(self.low.get(size=self.params.num))
+            highest = np.max(self.high.get(size=self.params.num))
+
+            if self.hl[-1] == 2 or self.hl[-1] == 1:
+                m = -2
+                # self.order = self.buy()
+                # self.pos_price = self.low[-1]
+                while True:
+                    if (self.hl[m] == 2 or self.hl[m] == 1) and self.macd[m] > self.macd[-1] \
+                            and self.dataclose[0] > self.sma5[0] \
+                            and self.dataclose[-1] > self.dataopen[-1] \
+                            and (self.sma10[-2] - self.sma5[-2]) < (self.sma10[-1] - self.sma5[-1]) \
+                            and self.low[-2] < self.sma5[-2] * (1 - rate) \
+                            and self.sma5[-1] < self.sma10[-1] < self.sma20[-1] < self.sma20[-2] < self.sma20[-3] \
+                            and lowest * (1 - vola) < self.low[-1] < lowest * (1 + vola):
+                        self.order = self.buy()
+                        self.pos_price = self.low[-1]
+                        break
+                    m -= 1
+                    if m + len(self) == 2:
+                        break
+
+            # elif (self.hl[0] == 5 or self.dataclose[0] < self.sma5[0]):
+            elif self.dataclose[0] < self.sma5[0] or self.sma5[0] < self.sma5[-1] \
+                    or self.dataclose[0] < self.pos_price or self.high[0] > self.sma5[0] * (1 + vola):
+                self.order = self.close()
+                self.pos_price = 0
+
+    def stop(self):
+        # pass
+        self.log(u'(MA趋势交易效果) Ending Value %.2f' % (self.broker.getvalue()))
+
+
+def to_df(df):
+    print('开始存数据')
+    df.sort_values(by=['周期', '波动率', 'MA5斜率'], ascending=True, inplace=True)
+    df = df.reset_index(drop=True)
+    if platform.node() == 'DanieldeMBP.lan':
+        df.to_csv(f"/Users/daniel/Documents/策略/策略穷举-均线粘连后底分型{dt.now().strftime('%Y%m%d%H%m%S')}.csv",
+                  index=True,
+                  encoding='utf_8_sig', mode='w')
+    else:
+        df.to_csv(f"C:\策略结果\策略穷举底分型_均线缠绕_只买一次{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
+                  encoding='utf_8_sig', mode='w')
+    print(f'结果:, \n, {df}')
+
+
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+
+def query_database(table_name):
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8')
+    df = pd.read_sql_table(table_name, engine)
+    return df
+
+
+def get_stock_data():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qmt_stocks_tech')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '1d')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            # table_list = table_list[0: 10]
+            cursor.close()
+            db.close()
+            print(f'开始数据库读取')
+            with concurrent.futures.ProcessPoolExecutor(max_workers=16) as executor:
+                # 使用executor.map方法实现多进程并行查询数据库,得到每个表的数据,并存储在一个字典中
+                data_dict = {table_name: df for table_name, df in
+                             tqdm(zip(table_list, executor.map(query_database, table_list)))}
+            print(f'数据库读取完成')
+            break
+        except BaseException as e:
+            print(f'数据库读取错误{e}')
+            continue
+    return data_dict
+
+
+def backtrader_test(stock_data, stock_name, num, vot, rate):
+    # print(f'开始回测{stock_name}')
+    try:
+        cerebro = bt.Cerebro()
+        stock_data.time = pd.to_datetime(stock_data.time)
+        stock_data['HL'] = stock_data['HL'].map({'L': 1,
+                                                 'LL': 2,
+                                                 'L*': 3,
+                                                 'H': 4,
+                                                 'HH': 5,
+                                                 'H*': 6,
+                                                 '-': 7})
+        cerebro.addstrategy(TestStrategy, num=num, Volatility=vot, rate=rate)
+        data = MyPandasData(dataname=stock_data,
+                            fromdate=datetime.datetime(2017, 1, 1),
+                            todate=datetime.datetime(2022, 10, 30),
+                            datetime='time',
+                            open='open_back',
+                            close='close_back',
+                            high='high_back',
+                            low='low_back',
+                            volume='volume_back',
+                            hl='HL',
+                            dif='dif',
+                            dea='dea',
+                            macd='macd',
+                            rsi_6='rsi_6',
+                            rsi_12='rsi_12',
+                            rsi_24='rsi_24',
+                            )
+        cerebro.adddata(data)
+        cerebro.addstrategy(TestStrategy)
+        cerebro.broker.setcash(100000.0)
+        cerebro.addsizer(bt.sizers.FixedSize, stake=10000)
+        cerebro.broker.setcommission(commission=0.001)
+        cerebro.run()
+    except  BaseException as e:
+        print(f'{stock_name}回测错误{e}')
+        return np.nan
+    # print(cerebro.broker.getvalue() - 100000.0)
+    # print(stock_name)
+    else:
+        return cerebro.broker.getvalue() - 100000.0
+
+
+def bbt(stock_data_dict, num, Volatility, rate, err_list):
+    async_results = []
+    try:
+        # 设置每一轮的任务数
+        CHUNK_SIZE = 200  # 您可以根据需要进行调整
+        timeout = 120
+        max_retries = 3
+        with concurrent.futures.ProcessPoolExecutor(max_workers=16, max_tasks_per_child=20000) as inner_executor:
+            # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+            results = [result for result in tqdm(
+                inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(), [num] * len(stock_data_dict),
+                                   [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict)), desc='单轮计算进度')]
+
+
+    except BaseException as e:
+        print(f'计算错误{e}')
+
+    # print(f'{num},{Volatility},{rate}计算完成,共计算{len(async_results)}个股票')
+    # outputs = [result.get() for result in async_results]
+    print(f'{num},{Volatility},{rate}计算完成,共计算{len(results)}个股票')
+    print(f'计算结果{results}')
+    return results
+
+
+def tdf(tt, num, Volatility, rate):
+    num_nan = np.isnan(tt).sum()  # Count NaN values
+    print(f'num_nan={num_nan}')
+
+    filtered_result = [r for r in tt if not np.isnan(r)]  # Filter out NaN values
+    print(f'filtered_result={filtered_result}')
+
+    # Calculate statistics
+    num_profits = len([r for r in tt if r > 0])
+    num_losses = len([r for r in tt if r < 0])
+    profit_ratio = num_profits / (len(filtered_result))
+    total_profit = sum([r for r in tt if r > 0])
+    avg_profit = total_profit / num_profits if num_profits else 0
+    max_profit = max(tt)
+    min_profit = min([r for r in tt if r > 0]) if num_profits else 0
+    total_loss = sum([r for r in tt if r < 0])
+    avg_loss = total_loss / num_losses if num_losses else 0
+    max_loss = min(tt)
+    min_loss = max([r for r in tt if r < 0]) if num_losses else 0
+    # Append the results into the DataFrame
+    result_dict = {'周期': num, '波动率': Volatility, 'MA5斜率': rate, '盈利个数': num_profits,
+                   '盈利比例': profit_ratio, '总盈利': total_profit, '平均盈利': avg_profit,
+                   '最大盈利': max_profit, '最小盈利': min_profit, '总亏损': total_loss,
+                   '平均亏损': avg_loss, '最大亏损': max_loss, '最小亏损': min_loss, '未计算个股数': num_nan}
+    df_t = pd.Series(result_dict)
+    return df_t
+
+
+if __name__ == '__main__':
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+    # cpu_list = list(range(23))
+    # pus = psutil.Process()
+    # pus.cpu_affinity(cpu_list)
+    start_time = dt.now()
+
+    # 定义需要穷举的参数值
+    nums = range(60, 120, 20)
+    Volatilitys = range(5, 13, 1)
+    rates = range(3, 8, 1)
+    # 生成所有参数组合
+    all_combinations = list(product(nums, Volatilitys, rates))
+    print(f'共需计算{len(all_combinations)}次')
+
+    # 获取数据
+    stock_data_dict = get_stock_data()
+    results = []
+
+    df = pd.DataFrame(
+        columns=['周期', '波动率', 'MA5斜率', '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利', '最小盈利',
+                 '总亏损',
+                 '平均亏损', '最大亏损', '最小亏损', '未计算个股数'])
+
+
+    err_list = []
+
+            # 设置每一轮的任务数
+    CHUNK_SIZE = 200  # 您可以根据需要进行调整
+    timeout = 120
+    max_retries = 3
+    with concurrent.futures.ProcessPoolExecutor(max_workers=24) as inner_executor:
+        for num, Volatility, rate in tqdm(all_combinations, desc='计算进度'):
+            while True:
+                try:
+                    # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+                    res = [result for result in tqdm(
+                        inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(),
+                                           [num] * len(stock_data_dict),
+                                           [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict)),
+                        desc='单轮计算进度')]
+
+                except BaseException as e:
+                    print(f'计算错误{e}')
+                    inner_executor = concurrent.futures.ProcessPoolExecutor(max_workers=16)
+                else:
+                    results.append(res)
+                    df_t = tdf(res, num, Volatility, rate)
+                    df = pd.concat([df, df_t.to_frame().T], ignore_index=True)
+                    break
+            time.sleep(1)
+            print(f'{num},{Volatility},{rate}计算完成,共计算{len(res)}个股票')
+            print(df)
+        print('循环结束')
+        to_df(df)
+        print(f'计算完成,共耗时{dt.now() - start_time}秒')
+

+ 361 - 0
backtrader/230822_bt.py

@@ -0,0 +1,361 @@
+# coding:utf-8
+import time
+from multiprocessing import freeze_support, Value, Lock
+import backtrader as bt
+from backtrader.feeds import PandasData
+import backtrader.indicators as btind
+from sqlalchemy import create_engine, text
+import pymysql
+from tqdm import tqdm
+import concurrent.futures
+import numpy as np
+import pandas as pd
+import platform
+import datetime
+from datetime import datetime as dt
+from itertools import product
+import psutil
+import logging
+import multiprocessing as mp
+from itertools import islice
+
+from func_timeout import func_set_timeout, FunctionTimedOut
+from functools import partial
+
+
+
+class MyPandasData(PandasData):
+    lines = ('hl', 'dif', 'dea', 'macd', 'rsi_6', 'rsi_12', 'rsi_24',)
+    params = (('hl', 7),
+              ('dif', 8),
+              ('dea', 9),
+              ('macd', 10),
+              ('rsi_6', 11),
+              ('rsi_12', 12),
+              ('rsi_24', 13),
+              )
+
+
+
+class TestStrategy(bt.Strategy):
+    params = (
+        ("num", 3),
+        ('Volatility', 0),
+        ('rate', 3),  # 注意要有逗号!!
+    )
+
+    def log(self, txt, dt=None):
+        # 记录策略的执行日志
+        dt = dt or self.datas[0].datetime.date(0)
+        # print('%s, %s' % (dt.isoformat(), txt))
+
+    def __init__(self):
+        try:
+            self.pos_price = 0
+            self.dataclose = self.datas[0].close
+            self.dataopen = self.datas[0].open
+            self.high = self.datas[0].high
+            self.low = self.datas[0].low
+            self.volume = self.datas[0].volume
+            self.hl = self.datas[0].hl
+            self.dif = self.datas[0].dif
+            self.dea = self.datas[0].dea
+            self.macd = self.datas[0].macd
+            self.rsi_6 = self.datas[0].rsi_6
+            self.rsi_12 = self.datas[0].rsi_12
+            self.rsi_24 = self.datas[0].rsi_24
+            self.sma5 = btind.MovingAverageSimple(self.datas[0].close, period=5)
+            self.sma10 = btind.MovingAverageSimple(self.datas[0].close, period=10)
+            self.sma20 = btind.MovingAverageSimple(self.datas[0].close, period=20)
+            self.sma60 = btind.MovingAverageSimple(self.datas[0].close, period=60)
+        except BaseException as e:
+            print(f'初始化错误{e}')
+
+    def notify_order(self, order):
+        """
+        订单状态处理
+
+        Arguments:
+            order {object} -- 订单状态
+        """
+        if order.status in [order.Submitted, order.Accepted]:
+            # 如订单已被处理,则不用做任何事情
+            return
+
+        # 检查订单是否完成
+        if order.status in [order.Completed]:
+            if order.isbuy():
+                self.buyprice = order.executed.price
+                self.buycomm = order.executed.comm
+            self.bar_executed = len(self)
+
+        # 订单因为缺少资金之类的原因被拒绝执行
+        elif order.status in [order.Canceled, order.Margin, order.Rejected]:
+            pass
+            # self.log('Order Canceled/Margin/Rejected')
+
+        # 订单状态处理完成,设为空
+        self.order = None
+
+    def notify_trade(self, trade):
+        """
+        交易成果
+
+        Arguments:
+            trade {object} -- 交易状态
+        """
+        if not trade.isclosed:
+            return
+
+        # 显示交易的毛利率和净利润
+        # self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' % (trade.pnl, trade.pnlcomm))
+
+    def next(self):
+        if len(self) > self.params.num:
+            vola = self.params.Volatility / 100
+            rate = self.params.rate / 100
+            lowest = np.min(self.low.get(size=self.params.num))
+            highest = np.max(self.high.get(size=self.params.num))
+
+            if self.hl[-1] == 2 or self.hl[-1] == 1:
+                m = -2
+                # self.order = self.buy()
+                # self.pos_price = self.low[-1]
+                while True:
+                    if (self.hl[m] == 2 or self.hl[m] == 1) and self.macd[m] < self.macd[-1] \
+                            and self.dataclose[0] > self.sma5[0] \
+                            and self.dataclose[-1] > self.dataopen[-1] \
+                            and (self.sma10[-2] - self.sma5[-2]) > (self.sma10[-1] - self.sma5[-1]) \
+                            and self.low[-2] < self.sma5[-2] * (1 - rate) \
+                            and self.sma5[-1] < self.sma10[-1] < self.sma20[-1] < self.sma20[-2] < self.sma20[-3] \
+                            and lowest * (1 - vola) < self.low[-1] < lowest * (1 + vola):
+                        self.order = self.buy()
+                        self.pos_price = self.low[-1]
+                        break
+                    m -= 1
+                    if m + len(self) == 2:
+                        break
+
+            # elif (self.hl[0] == 5 or self.dataclose[0] < self.sma5[0]):
+            elif self.dataclose[0] < self.sma5[0] or self.sma5[0] < self.sma5[-1] \
+                    or self.dataclose[0] < self.pos_price or self.high[0] > self.sma5[0] * (1 + vola):
+                self.order = self.close()
+                self.pos_price = 0
+
+    def stop(self):
+        # pass
+        self.log(u'(MA趋势交易效果) Ending Value %.2f' % (self.broker.getvalue()))
+
+
+def to_df(df):
+    print('开始存数据')
+    df.sort_values(by=['周期', '波动率', 'MA5斜率'], ascending=True, inplace=True)
+    df = df.reset_index(drop=True)
+    if platform.node() == 'DanieldeMBP.lan':
+        df.to_csv(f"/Users/daniel/Documents/策略/策略穷举-均线粘连后底分型{dt.now().strftime('%Y%m%d%H%m%S')}.csv",
+                  index=True,
+                  encoding='utf_8_sig', mode='w')
+    else:
+        df.to_csv(f"C:\策略结果\策略穷举底分型_均线缠绕_只买一次{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
+                  encoding='utf_8_sig', mode='w')
+    print(f'结果:, \n, {df}')
+
+
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+
+def query_database(table_name):
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8')
+    df = pd.read_sql_table(table_name, engine)
+    return df
+
+
+def get_stock_data():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qmt_stocks_tech')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '1d')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            # table_list = table_list[0: 10]
+            cursor.close()
+            db.close()
+            print(f'开始数据库读取')
+            with concurrent.futures.ProcessPoolExecutor(max_workers=16) as executor:
+                # 使用executor.map方法实现多进程并行查询数据库,得到每个表的数据,并存储在一个字典中
+                data_dict = {table_name: df for table_name, df in
+                             tqdm(zip(table_list, executor.map(query_database, table_list)))}
+            print(f'数据库读取完成')
+            break
+        except BaseException as e:
+            print(f'数据库读取错误{e}')
+            continue
+    return data_dict
+
+
+def backtrader_test(stock_data, stock_name, num, vot, rate):
+    # print(f'开始回测{stock_name}')
+    try:
+        cerebro = bt.Cerebro()
+        stock_data.time = pd.to_datetime(stock_data.time)
+        stock_data['HL'] = stock_data['HL'].map({'L': 1,
+                                                 'LL': 2,
+                                                 'L*': 3,
+                                                 'H': 4,
+                                                 'HH': 5,
+                                                 'H*': 6,
+                                                 '-': 7})
+        cerebro.addstrategy(TestStrategy, num=num, Volatility=vot, rate=rate)
+        data = MyPandasData(dataname=stock_data,
+                            fromdate=datetime.datetime(2017, 1, 1),
+                            todate=datetime.datetime(2022, 10, 30),
+                            datetime='time',
+                            open='open_back',
+                            close='close_back',
+                            high='high_back',
+                            low='low_back',
+                            volume='volume_back',
+                            hl='HL',
+                            dif='dif',
+                            dea='dea',
+                            macd='macd',
+                            rsi_6='rsi_6',
+                            rsi_12='rsi_12',
+                            rsi_24='rsi_24',
+                            )
+        cerebro.adddata(data)
+        cerebro.addstrategy(TestStrategy)
+        cerebro.broker.setcash(100000.0)
+        cerebro.addsizer(bt.sizers.FixedSize, stake=10000)
+        cerebro.broker.setcommission(commission=0.001)
+        cerebro.run()
+    except  BaseException as e:
+        print(f'{stock_name}回测错误{e}')
+        return np.nan
+    # print(cerebro.broker.getvalue() - 100000.0)
+    # print(stock_name)
+    else:
+        return cerebro.broker.getvalue() - 100000.0
+
+
+def bbt(stock_data_dict, num, Volatility, rate, err_list):
+    async_results = []
+    try:
+        # 设置每一轮的任务数
+        CHUNK_SIZE = 200  # 您可以根据需要进行调整
+        timeout = 120
+        max_retries = 3
+        with concurrent.futures.ProcessPoolExecutor(max_workers=16, max_tasks_per_child=20000) as inner_executor:
+            # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+            results = [result for result in tqdm(
+                inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(), [num] * len(stock_data_dict),
+                                   [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict)), desc='单轮计算进度')]
+
+
+    except BaseException as e:
+        print(f'计算错误{e}')
+
+    # print(f'{num},{Volatility},{rate}计算完成,共计算{len(async_results)}个股票')
+    # outputs = [result.get() for result in async_results]
+    print(f'{num},{Volatility},{rate}计算完成,共计算{len(results)}个股票')
+    print(f'计算结果{results}')
+    return results
+
+
+def tdf(tt, num, Volatility, rate):
+    num_nan = np.isnan(tt).sum()  # Count NaN values
+    print(f'num_nan={num_nan}')
+
+    filtered_result = [r for r in tt if not np.isnan(r)]  # Filter out NaN values
+    print(f'filtered_result={filtered_result}')
+
+    # Calculate statistics
+    num_profits = len([r for r in tt if r > 0])
+    num_losses = len([r for r in tt if r < 0])
+    profit_ratio = num_profits / (len(filtered_result))
+    total_profit = sum([r for r in tt if r > 0])
+    avg_profit = total_profit / num_profits if num_profits else 0
+    max_profit = max(tt)
+    min_profit = min([r for r in tt if r > 0]) if num_profits else 0
+    total_loss = sum([r for r in tt if r < 0])
+    avg_loss = total_loss / num_losses if num_losses else 0
+    max_loss = min(tt)
+    min_loss = max([r for r in tt if r < 0]) if num_losses else 0
+    # Append the results into the DataFrame
+    result_dict = {'周期': num, '波动率': Volatility, 'MA5斜率': rate, '盈利个数': num_profits,
+                   '盈利比例': profit_ratio, '总盈利': total_profit, '平均盈利': avg_profit,
+                   '最大盈利': max_profit, '最小盈利': min_profit, '总亏损': total_loss,
+                   '平均亏损': avg_loss, '最大亏损': max_loss, '最小亏损': min_loss, '未计算个股数': num_nan}
+    df_t = pd.Series(result_dict)
+    return df_t
+
+
+if __name__ == '__main__':
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+    # cpu_list = list(range(23))
+    # pus = psutil.Process()
+    # pus.cpu_affinity(cpu_list)
+    start_time = dt.now()
+
+    # 定义需要穷举的参数值
+    nums = range(60, 120, 20)
+    Volatilitys = range(5, 13, 1)
+    rates = range(3, 8, 1)
+    # 生成所有参数组合
+    all_combinations = list(product(nums, Volatilitys, rates))
+    print(f'共需计算{len(all_combinations)}次')
+
+    # 获取数据
+    stock_data_dict = get_stock_data()
+    results = []
+
+    df = pd.DataFrame(
+        columns=['周期', '波动率', 'MA5斜率', '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利', '最小盈利',
+                 '总亏损',
+                 '平均亏损', '最大亏损', '最小亏损', '未计算个股数'])
+
+
+    err_list = []
+
+            # 设置每一轮的任务数
+    CHUNK_SIZE = 200  # 您可以根据需要进行调整
+    timeout = 120
+    max_retries = 3
+    with concurrent.futures.ProcessPoolExecutor(max_workers=24) as inner_executor:
+        for num, Volatility, rate in tqdm(all_combinations, desc='计算进度'):
+            while True:
+                try:
+                    # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+                    res = [result for result in tqdm(
+                        inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(),
+                                           [num] * len(stock_data_dict),
+                                           [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict)),
+                        desc='单轮计算进度')]
+
+                except BaseException as e:
+                    print(f'计算错误{e}')
+                    inner_executor = concurrent.futures.ProcessPoolExecutor(max_workers=16)
+                else:
+                    results.append(res)
+                    df_t = tdf(res, num, Volatility, rate)
+                    df = pd.concat([df, df_t.to_frame().T], ignore_index=True)
+                    break
+            time.sleep(1)
+            print(f'{num},{Volatility},{rate}计算完成,共计算{len(res)}个股票')
+            print(df)
+        print('循环结束')
+        to_df(df)
+        print(f'计算完成,共耗时{dt.now() - start_time}秒')
+

+ 360 - 0
backtrader/230823_bt.py

@@ -0,0 +1,360 @@
+# coding:utf-8
+import time
+from multiprocessing import freeze_support, Value, Lock
+import backtrader as bt
+from backtrader.feeds import PandasData
+import backtrader.indicators as btind
+from sqlalchemy import create_engine, text
+import pymysql
+from tqdm import tqdm
+import concurrent.futures
+import numpy as np
+import pandas as pd
+import platform
+import datetime
+from datetime import datetime as dt
+from itertools import product
+import psutil
+import logging
+import multiprocessing as mp
+from itertools import islice
+
+from func_timeout import func_set_timeout, FunctionTimedOut
+from functools import partial
+
+
+
+class MyPandasData(PandasData):
+    lines = ('hl', 'dif', 'dea', 'macd', 'rsi_6', 'rsi_12', 'rsi_24',)
+    params = (('hl', 7),
+              ('dif', 8),
+              ('dea', 9),
+              ('macd', 10),
+              ('rsi_6', 11),
+              ('rsi_12', 12),
+              ('rsi_24', 13),
+              )
+
+
+
+class TestStrategy(bt.Strategy):
+    params = (
+        ("num", 3),
+        ('Volatility', 0),
+        ('rate', 3),  # 注意要有逗号!!
+    )
+
+    def log(self, txt, dt=None):
+        # 记录策略的执行日志
+        dt = dt or self.datas[0].datetime.date(0)
+        # print('%s, %s' % (dt.isoformat(), txt))
+
+    def __init__(self):
+        try:
+            self.pos_price = 0
+            self.dataclose = self.datas[0].close
+            self.dataopen = self.datas[0].open
+            self.high = self.datas[0].high
+            self.low = self.datas[0].low
+            self.volume = self.datas[0].volume
+            self.hl = self.datas[0].hl
+            self.dif = self.datas[0].dif
+            self.dea = self.datas[0].dea
+            self.macd = self.datas[0].macd
+            self.rsi_6 = self.datas[0].rsi_6
+            self.rsi_12 = self.datas[0].rsi_12
+            self.rsi_24 = self.datas[0].rsi_24
+            self.sma5 = btind.MovingAverageSimple(self.datas[0].close, period=5)
+            self.sma10 = btind.MovingAverageSimple(self.datas[0].close, period=10)
+            self.sma20 = btind.MovingAverageSimple(self.datas[0].close, period=20)
+            self.sma60 = btind.MovingAverageSimple(self.datas[0].close, period=60)
+        except BaseException as e:
+            print(f'初始化错误{e}')
+
+    def notify_order(self, order):
+        """
+        订单状态处理
+
+        Arguments:
+            order {object} -- 订单状态
+        """
+        if order.status in [order.Submitted, order.Accepted]:
+            # 如订单已被处理,则不用做任何事情
+            return
+
+        # 检查订单是否完成
+        if order.status in [order.Completed]:
+            if order.isbuy():
+                self.buyprice = order.executed.price
+                self.buycomm = order.executed.comm
+            self.bar_executed = len(self)
+
+        # 订单因为缺少资金之类的原因被拒绝执行
+        elif order.status in [order.Canceled, order.Margin, order.Rejected]:
+            pass
+            # self.log('Order Canceled/Margin/Rejected')
+
+        # 订单状态处理完成,设为空
+        self.order = None
+
+    def notify_trade(self, trade):
+        """
+        交易成果
+
+        Arguments:
+            trade {object} -- 交易状态
+        """
+        if not trade.isclosed:
+            return
+
+        # 显示交易的毛利率和净利润
+        # self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' % (trade.pnl, trade.pnlcomm))
+
+    def next(self):
+        if len(self) > self.params.num:
+            vola = self.params.Volatility / 100
+            rate = self.params.rate / 100
+            lowest = np.min(self.low.get(size=self.params.num))
+            highest = np.max(self.high.get(size=self.params.num))
+
+            if self.hl[-1] == 2 or self.hl[-1] == 1:
+                m = -2
+                # self.order = self.buy()
+                # self.pos_price = self.low[-1]
+                while True:
+                    if (self.hl[m] == 2 or self.hl[m] == 1) and self.macd[m] < self.macd[-1] \
+                            and (self.sma10[-2] - self.sma5[-2]) > (self.sma10[-1] - self.sma5[-1]) \
+                            and self.sma5[0] > self.sma5[-1] * (1 + rate) \
+                            and self.sma5[-1] < self.sma10[-1] < self.sma20[-1] < self.sma20[-2] < self.sma20[-3] \
+                            and lowest * (1 - vola) < self.low[-1] < lowest * (1 + vola):
+                        self.order = self.buy()
+                        self.pos_price = self.low[-1]
+                        break
+                    m -= 1
+                    if m + len(self) == 2:
+                        break
+
+            # elif (self.hl[0] == 5 or self.dataclose[0] < self.sma5[0]):
+            elif self.dataclose[0] < self.sma5[0] or self.sma5[0] < self.sma5[-1] \
+                    or self.dataclose[0] < self.pos_price or self.high[0] > self.sma5[0] * (1 + vola):
+                self.order = self.close()
+                self.pos_price = 0
+
+    def stop(self):
+        # pass
+        self.log(u'(MA趋势交易效果) Ending Value %.2f' % (self.broker.getvalue()))
+
+
+def to_df(df):
+    print('开始存数据')
+    df.sort_values(by=['周期', '波动率', 'MA5斜率'], ascending=True, inplace=True)
+    df = df.reset_index(drop=True)
+    if platform.node() == 'DanieldeMBP.lan':
+        df.to_csv(f"/Users/daniel/Documents/策略/策略穷举-均线粘连后底分型{dt.now().strftime('%Y%m%d%H%m%S')}.csv",
+                  index=True,
+                  encoding='utf_8_sig', mode='w')
+    else:
+        df.to_csv(f"C:\策略结果\策略穷举底分型_背驰_Ma5乖离{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
+                  encoding='utf_8_sig', mode='w')
+    print(f'结果:, \n, {df}')
+
+
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+
+def query_database(table_name):
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8')
+    df = pd.read_sql_table(table_name, engine)
+    return df
+
+
+def get_stock_data():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qmt_stocks_tech')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '1d')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            # table_list = table_list[0: 10]
+            cursor.close()
+            db.close()
+            print(f'开始数据库读取')
+            with concurrent.futures.ProcessPoolExecutor(max_workers=16) as executor:
+                # 使用executor.map方法实现多进程并行查询数据库,得到每个表的数据,并存储在一个字典中
+                data_dict = {table_name: df for table_name, df in
+                             tqdm(zip(table_list, executor.map(query_database, table_list)))}
+            print(f'数据库读取完成')
+            break
+        except BaseException as e:
+            print(f'数据库读取错误{e}')
+            continue
+    return data_dict
+
+
+def backtrader_test(stock_data, stock_name, num, vot, rate):
+    # print(f'开始回测{stock_name}')
+    try:
+        cerebro = bt.Cerebro()
+        stock_data.time = pd.to_datetime(stock_data.time)
+        stock_data['HL'] = stock_data['HL'].map({'L': 1,
+                                                 'LL': 2,
+                                                 'L*': 3,
+                                                 'H': 4,
+                                                 'HH': 5,
+                                                 'H*': 6,
+                                                 '-': 7})
+        cerebro.addstrategy(TestStrategy, num=num, Volatility=vot, rate=rate)
+        data = MyPandasData(dataname=stock_data,
+                            fromdate=datetime.datetime(2017, 1, 1),
+                            todate=datetime.datetime(2022, 10, 30),
+                            datetime='time',
+                            open='open_back',
+                            close='close_back',
+                            high='high_back',
+                            low='low_back',
+                            volume='volume_back',
+                            hl='HL',
+                            dif='dif',
+                            dea='dea',
+                            macd='macd',
+                            rsi_6='rsi_6',
+                            rsi_12='rsi_12',
+                            rsi_24='rsi_24',
+                            )
+        cerebro.adddata(data)
+        cerebro.addstrategy(TestStrategy)
+        cerebro.broker.setcash(100000.0)
+        cerebro.addsizer(bt.sizers.FixedSize, stake=10000)
+        cerebro.broker.setcommission(commission=0.001)
+        cerebro.run()
+    except  BaseException as e:
+        print(f'{stock_name}回测错误{e}')
+        return np.nan
+    # print(cerebro.broker.getvalue() - 100000.0)
+    # print(stock_name)
+    else:
+        return cerebro.broker.getvalue() - 100000.0
+
+
+def bbt(stock_data_dict, num, Volatility, rate, err_list):
+    async_results = []
+    try:
+        # 设置每一轮的任务数
+        CHUNK_SIZE = 200  # 您可以根据需要进行调整
+        timeout = 120
+        max_retries = 3
+        with concurrent.futures.ProcessPoolExecutor(max_workers=16, max_tasks_per_child=20000) as inner_executor:
+            # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+            results = [result for result in tqdm(
+                inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(), [num] * len(stock_data_dict),
+                                   [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict)), desc='单轮计算进度')]
+
+
+    except BaseException as e:
+        print(f'计算错误{e}')
+
+    # print(f'{num},{Volatility},{rate}计算完成,共计算{len(async_results)}个股票')
+    # outputs = [result.get() for result in async_results]
+    print(f'{num},{Volatility},{rate}计算完成,共计算{len(results)}个股票')
+    print(f'计算结果{results}')
+    return results
+
+
+def tdf(tt, num, Volatility, rate):
+    num_nan = np.isnan(tt).sum()  # Count NaN values
+    print(f'num_nan={num_nan}')
+
+    filtered_result = [r for r in tt if not np.isnan(r)]  # Filter out NaN values
+    print(f'filtered_result={filtered_result}')
+
+    # Calculate statistics
+    num_profits = len([r for r in tt if r > 0])
+    num_losses = len([r for r in tt if r < 0])
+    profit_ratio = num_profits / (len(filtered_result))
+    total_profit = sum([r for r in tt if r > 0])
+    avg_profit = total_profit / num_profits if num_profits else 0
+    max_profit = max(tt)
+    min_profit = min([r for r in tt if r > 0]) if num_profits else 0
+    total_loss = sum([r for r in tt if r < 0])
+    avg_loss = total_loss / num_losses if num_losses else 0
+    max_loss = min(tt)
+    min_loss = max([r for r in tt if r < 0]) if num_losses else 0
+    # Append the results into the DataFrame
+    result_dict = {'周期': num, '波动率': Volatility, 'MA5斜率': rate, '盈利个数': num_profits,
+                   '盈利比例': profit_ratio, '总盈利': total_profit, '平均盈利': avg_profit,
+                   '最大盈利': max_profit, '最小盈利': min_profit, '总亏损': total_loss,
+                   '平均亏损': avg_loss, '最大亏损': max_loss, '最小亏损': min_loss, '未计算个股数': num_nan}
+    df_t = pd.Series(result_dict)
+    return df_t
+
+
+if __name__ == '__main__':
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+    cpu_list = list(range(8, 24))
+    print(cpu_list)
+    pus = psutil.Process()
+    pus.cpu_affinity(cpu_list)
+    start_time = dt.now()
+
+    # 定义需要穷举的参数值
+    nums = range(60, 120, 20)
+    Volatilitys = range(5, 13, 1)
+    rates = range(1, 10, 1)
+    # 生成所有参数组合
+    all_combinations = list(product(nums, Volatilitys, rates))
+    print(f'共需计算{len(all_combinations)}次')
+
+    # 获取数据
+    stock_data_dict = get_stock_data()
+    results = []
+
+    df = pd.DataFrame(
+        columns=['周期', '波动率', 'MA5斜率', '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利', '最小盈利',
+                 '总亏损',
+                 '平均亏损', '最大亏损', '最小亏损', '未计算个股数'])
+
+
+    err_list = []
+
+            # 设置每一轮的任务数
+    CHUNK_SIZE = 200  # 您可以根据需要进行调整
+    timeout = 120
+    max_retries = 3
+    with concurrent.futures.ProcessPoolExecutor(max_workers=20) as inner_executor:
+        for num, Volatility, rate in tqdm(all_combinations, desc='计算进度'):
+            while True:
+                try:
+                    # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+                    res = [result for result in tqdm(
+                        inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(),
+                                           [num] * len(stock_data_dict),
+                                           [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict)),
+                        desc='单轮计算进度')]
+
+                except BaseException as e:
+                    print(f'计算错误{e}')
+                    inner_executor = concurrent.futures.ProcessPoolExecutor(max_workers=20)
+                else:
+                    results.append(res)
+                    df_t = tdf(res, num, Volatility, rate)
+                    df = pd.concat([df, df_t.to_frame().T], ignore_index=True)
+                    break
+            time.sleep(1)
+            print(f'{num},{Volatility},{rate}计算完成,共计算{len(res)}个股票')
+            print(df)
+        print('循环结束')
+        to_df(df)
+        print(f'计算完成,共耗时{dt.now() - start_time}秒')
+

+ 359 - 0
backtrader/230912_bt.py

@@ -0,0 +1,359 @@
+# coding:utf-8
+import time
+from multiprocessing import freeze_support, Value, Lock
+import backtrader as bt
+from backtrader.feeds import PandasData
+import backtrader.indicators as btind
+from sqlalchemy import create_engine, text
+import pymysql
+from tqdm import tqdm
+import concurrent.futures
+import numpy as np
+import pandas as pd
+import platform
+import datetime
+from datetime import datetime as dt
+from itertools import product
+import psutil
+import logging
+import multiprocessing as mp
+from itertools import islice
+
+from func_timeout import func_set_timeout, FunctionTimedOut
+from functools import partial
+
+
+
+class MyPandasData(PandasData):
+    lines = ('hl', 'dif', 'dea', 'macd', 'rsi_6', 'rsi_12', 'rsi_24',)
+    params = (('hl', 7),
+              ('dif', 8),
+              ('dea', 9),
+              ('macd', 10),
+              ('rsi_6', 11),
+              ('rsi_12', 12),
+              ('rsi_24', 13),
+              )
+
+
+
+class TestStrategy(bt.Strategy):
+    params = (
+        ("num", 3),
+        ('Volatility', 0),
+        ('rate', 3),  # 注意要有逗号!!
+    )
+
+    def log(self, txt, dt=None):
+        # 记录策略的执行日志
+        dt = dt or self.datas[0].datetime.date(0)
+        # print('%s, %s' % (dt.isoformat(), txt))
+
+    def __init__(self):
+        try:
+            self.pos_price = 0
+            self.dataclose = self.datas[0].close
+            self.dataopen = self.datas[0].open
+            self.high = self.datas[0].high
+            self.low = self.datas[0].low
+            self.volume = self.datas[0].volume
+            self.hl = self.datas[0].hl
+            self.dif = self.datas[0].dif
+            self.dea = self.datas[0].dea
+            self.macd = self.datas[0].macd
+            self.rsi_6 = self.datas[0].rsi_6
+            self.rsi_12 = self.datas[0].rsi_12
+            self.rsi_24 = self.datas[0].rsi_24
+            self.sma5 = btind.MovingAverageSimple(self.datas[0].close, period=5)
+            self.sma10 = btind.MovingAverageSimple(self.datas[0].close, period=10)
+            self.sma20 = btind.MovingAverageSimple(self.datas[0].close, period=20)
+            self.sma60 = btind.MovingAverageSimple(self.datas[0].close, period=60)
+        except BaseException as e:
+            print(f'初始化错误{e}')
+
+    def notify_order(self, order):
+        """
+        订单状态处理
+
+        Arguments:
+            order {object} -- 订单状态
+        """
+        if order.status in [order.Submitted, order.Accepted]:
+            # 如订单已被处理,则不用做任何事情
+            return
+
+        # 检查订单是否完成
+        if order.status in [order.Completed]:
+            if order.isbuy():
+                self.buyprice = order.executed.price
+                self.buycomm = order.executed.comm
+            self.bar_executed = len(self)
+
+        # 订单因为缺少资金之类的原因被拒绝执行
+        elif order.status in [order.Canceled, order.Margin, order.Rejected]:
+            pass
+            # self.log('Order Canceled/Margin/Rejected')
+
+        # 订单状态处理完成,设为空
+        self.order = None
+
+    def notify_trade(self, trade):
+        """
+        交易成果
+
+        Arguments:
+            trade {object} -- 交易状态
+        """
+        if not trade.isclosed:
+            return
+
+        # 显示交易的毛利率和净利润
+        # self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' % (trade.pnl, trade.pnlcomm))
+
+    def next(self):
+        if len(self) > self.params.num:
+            vola = self.params.Volatility / 100
+            rate = self.params.rate / 100
+            lowest = np.min(self.low.get(size=self.params.num))
+            highest = np.max(self.high.get(size=self.params.num))
+
+            if self.hl[-1] == 2 or self.hl[-1] == 1:
+                m = -2
+                # self.order = self.buy()
+                # self.pos_price = self.low[-1]
+                while True:
+                    if ((self.hl[m] == 2 or self.hl[m] == 1) and self.dataclose[m] > self.dataclose[-1]
+                            and self.macd[m] < self.macd[-1]
+                            and self.dataclose[0] > self.dataopen[0]
+                            and self.sma5[0] > self.sma5[-1]*(1+rate)):
+                        self.order = self.buy()
+                        self.pos_price = self.low[-1]
+                        break
+                    m -= 1
+                    if m + len(self) == 2:
+                        break
+
+            # elif (self.hl[0] == 5 or self.dataclose[0] < self.sma5[0]):
+            elif self.dataclose[0] < self.sma5[0] or self.sma5[0] < self.sma5[-1] \
+                    or self.dataclose[0] < self.pos_price or self.high[0] > self.sma5[0] * (1 + vola):
+                self.order = self.close()
+                self.pos_price = 0
+
+    def stop(self):
+        # pass
+        self.log(u'(MA趋势交易效果) Ending Value %.2f' % (self.broker.getvalue()))
+
+
+def to_df(df):
+    print('开始存数据')
+    df.sort_values(by=['周期', '波动率', 'MA5斜率'], ascending=True, inplace=True)
+    df = df.reset_index(drop=True)
+    if platform.node() == 'DanieldeMBP.lan':
+        df.to_csv(f"/Users/daniel/Documents/策略/策略穷举-均线粘连后底分型{dt.now().strftime('%Y%m%d%H%m%S')}.csv",
+                  index=True,
+                  encoding='utf_8_sig', mode='w')
+    else:
+        df.to_csv(f"C:\策略结果\策略穷举底分型_背驰_Ma5乖离{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
+                  encoding='utf_8_sig', mode='w')
+    print(f'结果:, \n, {df}')
+
+
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+
+def query_database(table_name):
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8')
+    df = pd.read_sql_table(table_name, engine)
+    return df
+
+
+def get_stock_data():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qmt_stocks_tech')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '1d')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            # table_list = table_list[0: 10]
+            cursor.close()
+            db.close()
+            print(f'开始数据库读取')
+            with concurrent.futures.ProcessPoolExecutor(max_workers=16) as executor:
+                # 使用executor.map方法实现多进程并行查询数据库,得到每个表的数据,并存储在一个字典中
+                data_dict = {table_name: df for table_name, df in
+                             tqdm(zip(table_list, executor.map(query_database, table_list)))}
+            print(f'数据库读取完成')
+            break
+        except BaseException as e:
+            print(f'数据库读取错误{e}')
+            continue
+    return data_dict
+
+
+def backtrader_test(stock_data, stock_name, num, vot, rate):
+    # print(f'开始回测{stock_name}')
+    try:
+        cerebro = bt.Cerebro()
+        stock_data.time = pd.to_datetime(stock_data.time)
+        stock_data['HL'] = stock_data['HL'].map({'L': 1,
+                                                 'LL': 2,
+                                                 'L*': 3,
+                                                 'H': 4,
+                                                 'HH': 5,
+                                                 'H*': 6,
+                                                 '-': 7})
+        cerebro.addstrategy(TestStrategy, num=num, Volatility=vot, rate=rate)
+        data = MyPandasData(dataname=stock_data,
+                            fromdate=datetime.datetime(2017, 1, 1),
+                            todate=datetime.datetime(2022, 10, 30),
+                            datetime='time',
+                            open='open_back',
+                            close='close_back',
+                            high='high_back',
+                            low='low_back',
+                            volume='volume_back',
+                            hl='HL',
+                            dif='dif',
+                            dea='dea',
+                            macd='macd',
+                            rsi_6='rsi_6',
+                            rsi_12='rsi_12',
+                            rsi_24='rsi_24',
+                            )
+        cerebro.adddata(data)
+        cerebro.addstrategy(TestStrategy)
+        cerebro.broker.setcash(100000.0)
+        cerebro.addsizer(bt.sizers.FixedSize, stake=10000)
+        cerebro.broker.setcommission(commission=0.001)
+        cerebro.run()
+    except  BaseException as e:
+        print(f'{stock_name}回测错误{e}')
+        return np.nan
+    # print(cerebro.broker.getvalue() - 100000.0)
+    # print(stock_name)
+    else:
+        return cerebro.broker.getvalue() - 100000.0
+
+
+def bbt(stock_data_dict, num, Volatility, rate, err_list):
+    async_results = []
+    try:
+        # 设置每一轮的任务数
+        CHUNK_SIZE = 200  # 您可以根据需要进行调整
+        timeout = 120
+        max_retries = 3
+        with concurrent.futures.ProcessPoolExecutor(max_workers=16, max_tasks_per_child=20000) as inner_executor:
+            # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+            results = [result for result in tqdm(
+                inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(), [num] * len(stock_data_dict),
+                                   [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict)), desc='单轮计算进度')]
+
+
+    except BaseException as e:
+        print(f'计算错误{e}')
+
+    # print(f'{num},{Volatility},{rate}计算完成,共计算{len(async_results)}个股票')
+    # outputs = [result.get() for result in async_results]
+    print(f'{num},{Volatility},{rate}计算完成,共计算{len(results)}个股票')
+    print(f'计算结果{results}')
+    return results
+
+
+def tdf(tt, num, Volatility, rate):
+    num_nan = np.isnan(tt).sum()  # Count NaN values
+    print(f'num_nan={num_nan}')
+
+    filtered_result = [r for r in tt if not np.isnan(r)]  # Filter out NaN values
+    print(f'filtered_result={filtered_result}')
+
+    # Calculate statistics
+    num_profits = len([r for r in tt if r > 0])
+    num_losses = len([r for r in tt if r < 0])
+    profit_ratio = num_profits / (len(filtered_result))
+    total_profit = sum([r for r in tt if r > 0])
+    avg_profit = total_profit / num_profits if num_profits else 0
+    max_profit = max(tt)
+    min_profit = min([r for r in tt if r > 0]) if num_profits else 0
+    total_loss = sum([r for r in tt if r < 0])
+    avg_loss = total_loss / num_losses if num_losses else 0
+    max_loss = min(tt)
+    min_loss = max([r for r in tt if r < 0]) if num_losses else 0
+    # Append the results into the DataFrame
+    result_dict = {'周期': num, '波动率': Volatility, 'MA5斜率': rate, '盈利个数': num_profits,
+                   '盈利比例': profit_ratio, '总盈利': total_profit, '平均盈利': avg_profit,
+                   '最大盈利': max_profit, '最小盈利': min_profit, '总亏损': total_loss,
+                   '平均亏损': avg_loss, '最大亏损': max_loss, '最小亏损': min_loss, '未计算个股数': num_nan}
+    df_t = pd.Series(result_dict)
+    return df_t
+
+
+if __name__ == '__main__':
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+    cpu_list = list(range(8, 24))
+    print(cpu_list)
+    pus = psutil.Process()
+    pus.cpu_affinity(cpu_list)
+    start_time = dt.now()
+
+    # 定义需要穷举的参数值
+    nums = range(60, 120, 20)
+    Volatilitys = range(5, 13, 1)
+    rates = range(1, 10, 1)
+    # 生成所有参数组合
+    all_combinations = list(product(nums, Volatilitys, rates))
+    print(f'共需计算{len(all_combinations)}次')
+
+    # 获取数据
+    stock_data_dict = get_stock_data()
+    results = []
+
+    df = pd.DataFrame(
+        columns=['周期', '波动率', 'MA5斜率', '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利', '最小盈利',
+                 '总亏损',
+                 '平均亏损', '最大亏损', '最小亏损', '未计算个股数'])
+
+
+    err_list = []
+
+            # 设置每一轮的任务数
+    CHUNK_SIZE = 200  # 您可以根据需要进行调整
+    timeout = 120
+    max_retries = 3
+    with concurrent.futures.ProcessPoolExecutor(max_workers=16) as inner_executor:
+        for num, Volatility, rate in tqdm(all_combinations, desc='计算进度'):
+            while True:
+                try:
+                    # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+                    res = [result for result in tqdm(
+                        inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(),
+                                           [num] * len(stock_data_dict),
+                                           [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict)),
+                        desc='单轮计算进度')]
+
+                except BaseException as e:
+                    print(f'计算错误{e}')
+                    inner_executor = concurrent.futures.ProcessPoolExecutor(max_workers=20)
+                else:
+                    results.append(res)
+                    df_t = tdf(res, num, Volatility, rate)
+                    df = pd.concat([df, df_t.to_frame().T], ignore_index=True)
+                    break
+            time.sleep(1)
+            print(f'{num},{Volatility},{rate}计算完成,共计算{len(res)}个股票')
+            print(df)
+        print('循环结束')
+        to_df(df)
+        print(f'计算完成,共耗时{dt.now() - start_time}秒')
+

+ 316 - 0
backtrader/230919_bt.py

@@ -0,0 +1,316 @@
+# coding:utf-8
+import time
+from multiprocessing import freeze_support, Value, Lock
+import backtrader as bt
+from backtrader.feeds import PandasData
+import backtrader.indicators as btind
+from sqlalchemy import create_engine, text
+import pymysql
+from tqdm import tqdm
+import concurrent.futures
+import numpy as np
+import pandas as pd
+import platform
+import datetime
+from datetime import datetime as dt
+from itertools import product
+import psutil
+import logging
+import multiprocessing as mp
+from itertools import islice
+
+from func_timeout import func_set_timeout, FunctionTimedOut
+from functools import partial
+
+
+class MyPandasData(PandasData):
+    lines = ('hl', 'dif', 'dea', 'macd', 'rsi_6', 'rsi_12', 'rsi_24',)
+    params = (('hl', 7),
+              ('dif', 8),
+              ('dea', 9),
+              ('macd', 10),
+              ('rsi_6', 11),
+              ('rsi_12', 12),
+              ('rsi_24', 13),
+              )
+
+
+class TestStrategy(bt.Strategy):
+    params = (
+        ("num", 3),
+        ('Volatility', 0),
+        ('rate', 3),  # 注意要有逗号!!
+    )
+
+    def log(self, txt, dt=None):
+        # 记录策略的执行日志
+        dt = dt or self.datas[0].datetime.date(0)
+        # print('%s, %s' % (dt.isoformat(), txt))
+
+    def __init__(self):
+        try:
+            self.pos_price = 0
+            self.dataclose = self.datas[0].close
+            self.dataopen = self.datas[0].open
+            self.high = self.datas[0].high
+            self.low = self.datas[0].low
+            self.volume = self.datas[0].volume
+            self.hl = self.datas[0].hl
+            self.dif = self.datas[0].dif
+            self.dea = self.datas[0].dea
+            self.macd = self.datas[0].macd
+            self.rsi_6 = self.datas[0].rsi_6
+            self.rsi_12 = self.datas[0].rsi_12
+            self.rsi_24 = self.datas[0].rsi_24
+            self.sma5 = btind.MovingAverageSimple(self.datas[0].close, period=5)
+            self.sma10 = btind.MovingAverageSimple(self.datas[0].close, period=10)
+            self.sma20 = btind.MovingAverageSimple(self.datas[0].close, period=20)
+            self.sma60 = btind.MovingAverageSimple(self.datas[0].close, period=60)
+        except BaseException as e:
+            print(f'初始化错误{e}')
+
+    def notify_order(self, order):
+        """
+        订单状态处理
+
+        Arguments:
+            order {object} -- 订单状态
+        """
+        if order.status in [order.Submitted, order.Accepted]:
+            # 如订单已被处理,则不用做任何事情
+            return
+
+        # 检查订单是否完成
+        if order.status in [order.Completed]:
+            if order.isbuy():
+                self.buyprice = order.executed.price
+                self.buycomm = order.executed.comm
+            self.bar_executed = len(self)
+
+        # 订单因为缺少资金之类的原因被拒绝执行
+        elif order.status in [order.Canceled, order.Margin, order.Rejected]:
+            pass
+            # self.log('Order Canceled/Margin/Rejected')
+
+        # 订单状态处理完成,设为空
+        self.order = None
+
+    def notify_trade(self, trade):
+        """
+        交易成果
+
+        Arguments:
+            trade {object} -- 交易状态
+        """
+        if not trade.isclosed:
+            return
+
+        # 显示交易的毛利率和净利润
+        # self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' % (trade.pnl, trade.pnlcomm))
+
+    def next(self):
+        rate = self.params.rate / 100
+        vola = self.params.Volatility / 100
+        if self.low[0] < self.sma5[0] * (1 - rate) and self.sma5[0] < self.sma5[-1] < self.sma5[-2] < self.sma10[-2] < \
+                self.sma20[-2]:
+            self.order = self.buy()
+            self.pos_price = self.low[-1]
+
+
+        # elif (self.hl[0] == 5 or self.dataclose[0] < self.sma5[0]):
+        elif ((self.high[0] < self.sma5[0] and self.dataclose[0] < (self.high[0] * (1 - vola))) or
+              (self.high[0] > self.sma5[0] > self.dataclose[0]) or self.dataclose[0] < self.pos_price) \
+                or (self.dataclose[0] < self.sma5[0]) or (self.dataclose[0] > self.sma5[0] * (1 + rate)):
+            self.order = self.close()
+            self.pos_price = 0
+
+    def stop(self):
+        # pass
+        self.log(u'(MA趋势交易效果) Ending Value %.2f' % (self.broker.getvalue()))
+
+
+def to_df(df):
+    print('开始存数据')
+    df.sort_values(by=['MA5乖离率', '当日回落'], ascending=True, inplace=True)
+    df = df.reset_index(drop=True)
+    if platform.node() == 'DanieldeMBP.lan':
+        df.to_csv(f"/Users/daniel/Documents/策略/Ma5乖离7买入{dt.now().strftime('%Y%m%d%H%m%S')}.csv",
+                  index=True,
+                  encoding='utf_8_sig', mode='w')
+    else:
+        df.to_csv(f"C:\策略结果\Ma5乖离7买入{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
+                  encoding='utf_8_sig', mode='w')
+    print(f'结果:, \n, {df}')
+
+
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+
+def query_database(table_name):
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8')
+    df = pd.read_sql_table(table_name, engine)
+    return df
+
+
+def get_stock_data():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qmt_stocks_tech')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '1d')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            # table_list = table_list[0: 10]
+            cursor.close()
+            db.close()
+            print(f'开始数据库读取')
+            with concurrent.futures.ProcessPoolExecutor(max_workers=16) as executor:
+                # 使用executor.map方法实现多进程并行查询数据库,得到每个表的数据,并存储在一个字典中
+                data_dict = {table_name: df for table_name, df in
+                             tqdm(zip(table_list, executor.map(query_database, table_list)))}
+            print(f'数据库读取完成')
+            break
+        except BaseException as e:
+            print(f'数据库读取错误{e}')
+            continue
+    return data_dict
+
+
+def backtrader_test(stock_data, stock_name, vot, rate):
+    # print(f'开始回测{stock_name}')
+    try:
+        cerebro = bt.Cerebro()
+        stock_data.time = pd.to_datetime(stock_data.time)
+        stock_data['HL'] = stock_data['HL'].map({'L': 1,
+                                                 'LL': 2,
+                                                 'L*': 3,
+                                                 'H': 4,
+                                                 'HH': 5,
+                                                 'H*': 6,
+                                                 '-': 7})
+        cerebro.addstrategy(TestStrategy, Volatility=vot, rate=rate)
+        data = MyPandasData(dataname=stock_data,
+                            fromdate=datetime.datetime(2017, 1, 1),
+                            todate=datetime.datetime(2022, 10, 30),
+                            datetime='time',
+                            open='open_back',
+                            close='close_back',
+                            high='high_back',
+                            low='low_back',
+                            volume='volume_back',
+                            hl='HL',
+                            dif='dif',
+                            dea='dea',
+                            macd='macd',
+                            rsi_6='rsi_6',
+                            rsi_12='rsi_12',
+                            rsi_24='rsi_24',
+                            )
+        cerebro.adddata(data)
+        cerebro.addstrategy(TestStrategy)
+        cerebro.broker.setcash(100000.0)
+        cerebro.addsizer(bt.sizers.FixedSize, stake=10000)
+        cerebro.broker.setcommission(commission=0.001)
+        cerebro.run()
+    except  BaseException as e:
+        print(f'{stock_name}回测错误{e}')
+        return np.nan
+    # print(cerebro.broker.getvalue() - 100000.0)
+    # print(stock_name)
+    else:
+        return cerebro.broker.getvalue() - 100000.0
+
+
+def tdf(tt, rate, Volatility):
+    num_nan = np.isnan(tt).sum()  # Count NaN values
+    print(f'num_nan={num_nan}')
+
+    filtered_result = [r for r in tt if not np.isnan(r)]  # Filter out NaN values
+    print(f'filtered_result={filtered_result}')
+
+    # Calculate statistics
+    num_profits = len([r for r in tt if r > 0])
+    num_losses = len([r for r in tt if r < 0])
+    profit_ratio = num_profits / (len(filtered_result))
+    total_profit = sum([r for r in tt if r > 0])
+    avg_profit = total_profit / num_profits if num_profits else 0
+    max_profit = max(tt)
+    min_profit = min([r for r in tt if r > 0]) if num_profits else 0
+    total_loss = sum([r for r in tt if r < 0])
+    avg_loss = total_loss / num_losses if num_losses else 0
+    max_loss = min(tt)
+    min_loss = max([r for r in tt if r < 0]) if num_losses else 0
+    # Append the results into the DataFrame
+    result_dict = {'MA5乖离率': rate, '当日回落': Volatility, '盈利个数': num_profits,
+                   '盈利比例': profit_ratio, '总盈利': total_profit, '平均盈利': avg_profit,
+                   '最大盈利': max_profit, '最小盈利': min_profit, '总亏损': total_loss,
+                   '平均亏损': avg_loss, '最大亏损': max_loss, '最小亏损': min_loss, '未计算个股数': num_nan}
+    df_t = pd.Series(result_dict)
+    return df_t
+
+
+if __name__ == '__main__':
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+    cpu_list = list(range(0, 23))
+    print(cpu_list)
+    pus = psutil.Process()
+    pus.cpu_affinity(cpu_list)
+    start_time = dt.now()
+
+    # 定义需要穷举的参数值
+    Volatilitys = range(1, 10, 1)  # 当日回撤
+    rates = range(3, 20, 1)  # 乖离率
+    # 生成所有参数组合
+    all_combinations = list(product(Volatilitys, rates))
+    print(f'共需计算{len(all_combinations)}次')
+
+    # 获取数据
+    stock_data_dict = get_stock_data()
+    results = []
+
+    df = pd.DataFrame(
+        columns=['MA5乖离率', '当日回落', '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利', '最小盈利',
+                 '总亏损',
+                 '平均亏损', '最大亏损', '最小亏损', '未计算个股数'])
+
+    err_list = []
+
+    # 设置每一轮的任务数
+    CHUNK_SIZE = 200  # 您可以根据需要进行调整
+    timeout = 120
+    max_retries = 3
+    with concurrent.futures.ProcessPoolExecutor(max_workers=24) as inner_executor:
+        for Volatility, rate in tqdm(all_combinations, desc='计算进度'):
+            while True:
+                try:
+                    # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+                    res = [result for result in tqdm(
+                        inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(),
+                                           [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict)),
+                        desc='单轮计算进度')]
+
+                except BaseException as e:
+                    print(f'计算错误{e}')
+                    inner_executor = concurrent.futures.ProcessPoolExecutor(max_workers=20)
+                else:
+                    results.append(res)
+                    df_t = tdf(res, rate, Volatility)
+                    df = pd.concat([df, df_t.to_frame().T], ignore_index=True)
+                    break
+            # time.sleep(1)
+            print(f'{rate}计算完成,共计算{len(res)}个股票')
+            print(df)
+        print('循环结束')
+        to_df(df)
+        print(f'计算完成,共耗时{dt.now() - start_time}秒')

+ 311 - 0
backtrader/230930_bt.py

@@ -0,0 +1,311 @@
+# coding:utf-8
+import time
+from multiprocessing import freeze_support, Value, Lock
+import backtrader as bt
+from backtrader.feeds import PandasData
+import backtrader.indicators as btind
+from sqlalchemy import create_engine, text
+import pymysql
+from tqdm import tqdm
+import concurrent.futures
+import numpy as np
+import pandas as pd
+import platform
+import datetime
+from datetime import datetime as dt
+from itertools import product
+import psutil
+import logging
+import multiprocessing as mp
+from itertools import islice
+
+from func_timeout import func_set_timeout, FunctionTimedOut
+from functools import partial
+
+
+class MyPandasData(PandasData):
+    lines = ('hl', 'dif', 'dea', 'macd', 'rsi_6', 'rsi_12', 'rsi_24',)
+    params = (('hl', 7),
+              ('dif', 8),
+              ('dea', 9),
+              ('macd', 10),
+              ('rsi_6', 11),
+              ('rsi_12', 12),
+              ('rsi_24', 13),
+              )
+
+
+class TestStrategy(bt.Strategy):
+    params = (
+        ("num", 3),
+        ('Volatility', 0),
+        ('rate', 3),  # 注意要有逗号!!
+    )
+
+    def log(self, txt, dt=None):
+        # 记录策略的执行日志
+        dt = dt or self.datas[0].datetime.date(0)
+        # print('%s, %s' % (dt.isoformat(), txt))
+
+    def __init__(self):
+        try:
+            self.pos_price = 0
+            self.dataclose = self.datas[0].close
+            self.dataopen = self.datas[0].open
+            self.high = self.datas[0].high
+            self.low = self.datas[0].low
+            self.volume = self.datas[0].volume
+            self.hl = self.datas[0].hl
+            self.dif = self.datas[0].dif
+            self.dea = self.datas[0].dea
+            self.macd = self.datas[0].macd
+            self.rsi_6 = self.datas[0].rsi_6
+            self.rsi_12 = self.datas[0].rsi_12
+            self.rsi_24 = self.datas[0].rsi_24
+            self.sma5 = btind.MovingAverageSimple(self.datas[0].close, period=5)
+            self.sma10 = btind.MovingAverageSimple(self.datas[0].close, period=10)
+            self.sma20 = btind.MovingAverageSimple(self.datas[0].close, period=20)
+            self.sma60 = btind.MovingAverageSimple(self.datas[0].close, period=60)
+            # self.sma_vol = btind.MovingAverageSimple(self.datas[0].close, period=Volatility)
+        except BaseException as e:
+            print(f'初始化错误{e}')
+
+    def notify_order(self, order):
+        """
+        订单状态处理
+
+        Arguments:
+            order {object} -- 订单状态
+        """
+        if order.status in [order.Submitted, order.Accepted]:
+            # 如订单已被处理,则不用做任何事情
+            return
+
+        # 检查订单是否完成
+        if order.status in [order.Completed]:
+            if order.isbuy():
+                self.buyprice = order.executed.price
+                self.buycomm = order.executed.comm
+            self.bar_executed = len(self)
+
+        # 订单因为缺少资金之类的原因被拒绝执行
+        elif order.status in [order.Canceled, order.Margin, order.Rejected]:
+            pass
+            # self.log('Order Canceled/Margin/Rejected')
+
+        # 订单状态处理完成,设为空
+        self.order = None
+
+    def notify_trade(self, trade):
+        """
+        交易成果
+
+        Arguments:
+            trade {object} -- 交易状态
+        """
+        if not trade.isclosed:
+            return
+
+        # 显示交易的毛利率和净利润
+        # self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' % (trade.pnl, trade.pnlcomm))
+
+    def next(self):
+        if self.volume[-1] < self.volume[0] and self.sma5[0] < self.dataclose[0]\
+                and self.dataclose[0] > self.sma20[0] \
+                and (self.hl[0] == 1 or self.hl[0] == 2 or self.hl[0] == 3):
+            self.order = self.buy()
+            self.pos_price = self.dataclose[0]
+
+        elif (self.hl[0] == 5 or self.dataclose[0] < self.sma5[0]):
+            self.order = self.close()
+            self.pos_price = 0
+
+    def stop(self):
+        # pass
+        self.log(u'(MA趋势交易效果) Ending Value %.2f' % (self.broker.getvalue()))
+
+def to_df(df):
+    print('开始存数据')
+    df.sort_values(by=['MA5乖离率', '当日回落'], ascending=True, inplace=True)
+    df = df.reset_index(drop=True)
+    if platform.node() == 'DanieldeMBP.lan':
+        df.to_csv(f"/Users/daniel/Documents/策略/Ma5乖离7买入{dt.now().strftime('%Y%m%d%H%m%S')}.csv",
+                  index=True,
+                  encoding='utf_8_sig', mode='w')
+    else:
+        df.to_csv(f"C:\策略结果\Ma5乖离7买入{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
+                  encoding='utf_8_sig', mode='w')
+    print(f'结果:, \n, {df}')
+
+
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+
+def query_database(table_name):
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8')
+    df = pd.read_sql_table(table_name, engine)
+    return df
+
+
+def get_stock_data():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qmt_stocks_tech')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '1d')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            # table_list = table_list[0: 10]
+            cursor.close()
+            db.close()
+            print(f'开始数据库读取')
+            with concurrent.futures.ProcessPoolExecutor(max_workers=16) as executor:
+                # 使用executor.map方法实现多进程并行查询数据库,得到每个表的数据,并存储在一个字典中
+                data_dict = {table_name: df for table_name, df in
+                             tqdm(zip(table_list, executor.map(query_database, table_list)))}
+            print(f'数据库读取完成')
+            break
+        except BaseException as e:
+            print(f'数据库读取错误{e}')
+            continue
+    return data_dict
+
+
+def backtrader_test(stock_data, stock_name, vot):
+    # print(f'开始回测{stock_name}')
+    try:
+        cerebro = bt.Cerebro()
+        stock_data.time = pd.to_datetime(stock_data.time)
+        stock_data['HL'] = stock_data['HL'].map({'L': 1,
+                                                 'LL': 2,
+                                                 'L*': 3,
+                                                 'H': 4,
+                                                 'HH': 5,
+                                                 'H*': 6,
+                                                 '-': 7})
+        cerebro.addstrategy(TestStrategy, Volatility=vot)
+        data = MyPandasData(dataname=stock_data,
+                            fromdate=datetime.datetime(2017, 1, 1),
+                            todate=datetime.datetime(2022, 10, 30),
+                            datetime='time',
+                            open='open_back',
+                            close='close_back',
+                            high='high_back',
+                            low='low_back',
+                            volume='volume_back',
+                            hl='HL',
+                            dif='dif',
+                            dea='dea',
+                            macd='macd',
+                            rsi_6='rsi_6',
+                            rsi_12='rsi_12',
+                            rsi_24='rsi_24',
+                            )
+        cerebro.adddata(data)
+        cerebro.addstrategy(TestStrategy)
+        cerebro.broker.setcash(100000.0)
+        cerebro.addsizer(bt.sizers.FixedSize, stake=10000)
+        cerebro.broker.setcommission(commission=0.001)
+        cerebro.run()
+    except BaseException as e:
+        print(f'{stock_name}回测错误{e}')
+        return np.nan
+    # print(cerebro.broker.getvalue() - 100000.0)
+    # print(stock_name)
+    else:
+        return cerebro.broker.getvalue() - 100000.0
+
+
+def tdf(tt, Volatility):
+    num_nan = np.isnan(tt).sum()  # Count NaN values
+    print(f'num_nan={num_nan}')
+
+    filtered_result = [r for r in tt if not np.isnan(r)]  # Filter out NaN values
+    print(f'filtered_result={filtered_result}')
+
+    # Calculate statistics
+    num_profits = len([r for r in tt if r > 0])
+    num_losses = len([r for r in tt if r < 0])
+    profit_ratio = num_profits / (len(filtered_result))
+    total_profit = sum([r for r in tt if r > 0])
+    avg_profit = total_profit / num_profits if num_profits else 0
+    max_profit = max(tt)
+    min_profit = min([r for r in tt if r > 0]) if num_profits else 0
+    total_loss = sum([r for r in tt if r < 0])
+    avg_loss = total_loss / num_losses if num_losses else 0
+    max_loss = min(tt)
+    min_loss = max([r for r in tt if r < 0]) if num_losses else 0
+    # Append the results into the DataFrame
+    result_dict = {'基准均线': Volatility, '盈利个数': num_profits,
+                   '盈利比例': profit_ratio, '总盈利': total_profit, '平均盈利': avg_profit,
+                   '最大盈利': max_profit, '最小盈利': min_profit, '总亏损': total_loss,
+                   '平均亏损': avg_loss, '最大亏损': max_loss, '最小亏损': min_loss, '未计算个股数': num_nan}
+    df_t = pd.Series(result_dict)
+    return df_t
+
+
+if __name__ == '__main__':
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+    cpu_list = list(range(0, 23))
+    print(cpu_list)
+    pus = psutil.Process()
+    pus.cpu_affinity(cpu_list)
+    start_time = dt.now()
+
+    # 定义需要穷举的参数值
+    Volatility = range(5, 500, 5)  # 当日回撤
+    # rates = range(3, 20, 1)  # 乖离率
+    # 生成所有参数组合
+    all_combinations = list(product(Volatility))
+    print(f'共需计算{len(all_combinations)}次')
+
+    # 获取数据
+    stock_data_dict = get_stock_data()
+    results = []
+
+    df = pd.DataFrame(
+        columns=['MA5乖离率', '当日回落', '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利', '最小盈利',
+                 '总亏损',
+                 '平均亏损', '最大亏损', '最小亏损', '未计算个股数'])
+
+    err_list = []
+
+    # 设置每一轮的任务数
+    CHUNK_SIZE = 200  # 您可以根据需要进行调整
+    timeout = 120
+    max_retries = 3
+    with concurrent.futures.ProcessPoolExecutor(max_workers=24) as inner_executor:
+        for Volatility in tqdm(all_combinations, desc='计算进度'):
+            while True:
+                try:
+                    # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+                    res = [result for result in tqdm(
+                        inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(),
+                                           [Volatility] * len(stock_data_dict)),
+                        desc='单轮计算进度')]
+
+                except BaseException as e:
+                    print(f'计算错误{e}')
+                    inner_executor = concurrent.futures.ProcessPoolExecutor(max_workers=24)
+                else:
+                    results.append(res)
+                    df_t = tdf(res, Volatility)
+                    df = pd.concat([df, df_t.to_frame().T], ignore_index=True)
+                    break
+            # time.sleep(1)
+            print(f'{Volatility}计算完成,共计算{len(res)}个股票')
+            print(df)
+        print('循环结束')
+        to_df(df)
+        print(f'计算完成,共耗时{dt.now() - start_time}秒')

+ 325 - 0
backtrader/231216_bt.py

@@ -0,0 +1,325 @@
+# coding:utf-8
+import time
+from multiprocessing import freeze_support, Value, Lock
+import backtrader as bt
+from backtrader.feeds import PandasData
+import backtrader.indicators as btind
+from sqlalchemy import create_engine, text
+import pymysql
+from tqdm import tqdm
+import concurrent.futures
+import numpy as np
+import pandas as pd
+import platform
+import datetime
+from datetime import datetime as dt
+from itertools import product
+import psutil
+import logging
+import multiprocessing as mp
+from itertools import islice
+
+from func_timeout import func_set_timeout, FunctionTimedOut
+from functools import partial
+
+
+class MyPandasData(PandasData):
+    lines = ('hl', 'dif', 'dea', 'macd', 'rsi_6', 'rsi_12', 'rsi_24',)
+    params = (('hl', 7),
+              ('dif', 8),
+              ('dea', 9),
+              ('macd', 10),
+              ('rsi_6', 11),
+              ('rsi_12', 12),
+              ('rsi_24', 13),
+              )
+
+
+class TestStrategy(bt.Strategy):
+    params = (
+        ("num", 3),
+        ('Volatility', 0),
+        ('rate', 1),  # 注意要有逗号!!
+    )
+
+    def log(self, txt, dt=None):
+        # 记录策略的执行日志
+        dt = dt or self.datas[0].datetime.date(0)
+        # print('%s, %s' % (dt.isoformat(), txt))
+
+    def __init__(self):
+        try:
+            self.pos_price = 0
+            self.dataclose = self.datas[0].close
+            self.dataopen = self.datas[0].open
+            self.high = self.datas[0].high
+            self.low = self.datas[0].low
+            self.volume = self.datas[0].volume
+            self.hl = self.datas[0].hl
+            self.dif = self.datas[0].dif
+            self.dea = self.datas[0].dea
+            self.macd = self.datas[0].macd
+            self.rsi_6 = self.datas[0].rsi_6
+            self.rsi_12 = self.datas[0].rsi_12
+            self.rsi_24 = self.datas[0].rsi_24
+            self.sma5 = btind.MovingAverageSimple(self.datas[0].close, period=5)
+            self.sma10 = btind.MovingAverageSimple(self.datas[0].close, period=10)
+            self.sma20 = btind.MovingAverageSimple(self.datas[0].close, period=20)
+            self.sma30 = btind.MovingAverageSimple(self.datas[0].close, period=30)
+            self.sma40 = btind.MovingAverageSimple(self.datas[0].close, period=40)
+            self.sma50 = btind.MovingAverageSimple(self.datas[0].close, period=50)
+            self.sma60 = btind.MovingAverageSimple(self.datas[0].close, period=60)
+            self.sma70 = btind.MovingAverageSimple(self.datas[0].close, period=70)
+            self.sma80 = btind.MovingAverageSimple(self.datas[0].close, period=80)
+            self.sma90 = btind.MovingAverageSimple(self.datas[0].close, period=90)
+            self.sma100 = btind.MovingAverageSimple(self.datas[0].close, period=100)
+            self.sma110 = btind.MovingAverageSimple(self.datas[0].close, period=110)
+            self.sma120 = btind.MovingAverageSimple(self.datas[0].close, period=120)
+            # self.sma_vol = btind.MovingAverageSimple(self.datas[0].close, period=self.params.Volatility)
+        except BaseException as e:
+            print(f'初始化错误{e}')
+
+    def notify_order(self, order):
+        """
+        订单状态处理
+
+        Arguments:
+            order {object} -- 订单状态
+        """
+        if order.status in [order.Submitted, order.Accepted]:
+            # 如订单已被处理,则不用做任何事情
+            return
+
+        # 检查订单是否完成
+        if order.status in [order.Completed]:
+            if order.isbuy():
+                self.buyprice = order.executed.price
+                self.buycomm = order.executed.comm
+            self.bar_executed = len(self)
+
+        # 订单因为缺少资金之类的原因被拒绝执行
+        elif order.status in [order.Canceled, order.Margin, order.Rejected]:
+            pass
+            # self.log('Order Canceled/Margin/Rejected')
+
+        # 订单状态处理完成,设为空
+        self.order = None
+
+    def notify_trade(self, trade):
+        """
+        交易成果
+
+        Arguments:
+            trade {object} -- 交易状态
+        """
+        if not trade.isclosed:
+            return
+
+        # 显示交易的毛利率和净利润
+        # self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' % (trade.pnl, trade.pnlcomm))
+
+    def next(self):
+        vot = self.params.Volatility / 100
+        rate = self.params.rate / 100
+        if self.sma5[0] > self.sma10[0] > self.sma20[0] \
+                and self.dataopen[0] > self.dataclose[0] and self.sma20[0] > self.sma20[-1] > self.sma20[-2] \
+                and (1 + vot) > self.sma5[0] / self.sma20[0] > (1 - vot) and (1 + rate) > self.sma10[0] / self.sma20[
+            0] > (1 - rate):
+            self.order = self.buy()
+            self.pos_price = self.dataclose[0]
+
+        elif (self.hl[0] == 5 and self.dataclose[0] < self.sma5[0]) or (self.dataclose[0] < self.sma5[0]):
+            self.order = self.close()
+            self.pos_price = 0
+
+    def stop(self):
+        # pass
+        self.log(u'(MA趋势交易效果) Ending Value %.2f' % (self.broker.getvalue()))
+
+
+def to_df(df):
+    print('开始存数据')
+    df.sort_values(by=['短期乖离率', '长期乖离率'], ascending=True, inplace=True)
+    df = df.reset_index(drop=True)
+    if platform.node() == 'DanieldeMBP.lan':
+        df.to_csv(f"/Users/daniel/Documents/策略/Ma5乖离7买入{dt.now().strftime('%Y%m%d%H%m%S')}.csv",
+                  index=True,
+                  encoding='utf_8_sig', mode='w')
+    else:
+        df.to_csv(f"C:\策略结果\乖离率{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
+                  encoding='utf_8_sig', mode='w')
+    print(f'结果:, \n, {df}')
+
+ 
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+
+def query_database(table_name):
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8')
+    df = pd.read_sql_table(table_name, engine)
+    return df
+
+
+def get_stock_data():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qmt_stocks_tech')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '1d')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            # table_list = table_list[0: 10]
+            cursor.close()
+            db.close()
+            print(f'开始数据库读取')
+            with concurrent.futures.ProcessPoolExecutor(max_workers=20) as executor:
+                # 使用executor.map方法实现多进程并行查询数据库,得到每个表的数据,并存储在一个字典中
+                data_dict = {table_name: df for table_name, df in
+                             tqdm(zip(table_list, executor.map(query_database, table_list)))}
+            print(f'数据库读取完成')
+            break
+        except BaseException as e:
+            print(f'数据库读取错误{e}')
+            continue
+    return data_dict
+
+
+def backtrader_test(stock_data, stock_name,  Volatility, rate):
+    # print(f'开始回测{stock_name}')
+    try:
+        cerebro = bt.Cerebro()
+        stock_data.time = pd.to_datetime(stock_data.time)
+        stock_data['HL'] = stock_data['HL'].map({'L': 1,
+                                                 'LL': 2,
+                                                 'L*': 3,
+                                                 'H': 4,
+                                                 'HH': 5,
+                                                 'H*': 6,
+                                                 '-': 7})
+        cerebro.addstrategy(TestStrategy, Volatility=Volatility, rate=rate)
+        data = MyPandasData(dataname=stock_data,
+                            fromdate=datetime.datetime(2017, 1, 1),
+                            todate=datetime.datetime(2023, 10, 30),
+                            datetime='time',
+                            open='open_back',
+                            close='close_back',
+                            high='high_back',
+                            low='low_back',
+                            volume='volume_back',
+                            hl='HL',
+                            dif='dif',
+                            dea='dea',
+                            macd='macd',
+                            rsi_6='rsi_6',
+                            rsi_12='rsi_12',
+                            rsi_24='rsi_24',
+                            )
+        cerebro.adddata(data)
+        cerebro.addstrategy(TestStrategy)
+        cerebro.broker.setcash(100000.0)
+        cerebro.addsizer(bt.sizers.FixedSize, stake=10000)
+        cerebro.broker.setcommission(commission=0.001)
+        cerebro.run()
+    except BaseException as e:
+        print(f'{stock_name}回测错误{e}')
+        return np.nan
+    # print(cerebro.broker.getvalue() - 100000.0)
+    # print(stock_name)
+    else:
+        return cerebro.broker.getvalue() - 100000.0
+
+
+def tdf(tt, Volatility, rate):
+    num_nan = np.isnan(tt).sum()  # Count NaN values
+    print(f'num_nan={num_nan}')
+
+    filtered_result = [r for r in tt if not np.isnan(r)]  # Filter out NaN values
+    print(f'filtered_result={filtered_result}')
+
+    # Calculate statistics
+    num_profits = len([r for r in tt if r > 0])
+    num_losses = len([r for r in tt if r < 0])
+    profit_ratio = num_profits / (len(filtered_result))
+    total_profit = sum([r for r in tt if r > 0])
+    avg_profit = total_profit / num_profits if num_profits else 0
+    max_profit = max(tt)
+    min_profit = min([r for r in tt if r > 0]) if num_profits else 0
+    total_loss = sum([r for r in tt if r < 0])
+    avg_loss = total_loss / num_losses if num_losses else 0
+    max_loss = min(tt)
+    min_loss = max([r for r in tt if r < 0]) if num_losses else 0
+    # Append the results into the DataFrame
+    result_dict = {'短期乖离率': Volatility, '长期乖离率': rate, '盈利个数': num_profits,
+                   '盈利比例': profit_ratio, '总盈利': total_profit, '平均盈利': avg_profit,
+                   '最大盈利': max_profit, '最小盈利': min_profit, '总亏损': total_loss,
+                   '平均亏损': avg_loss, '最大亏损': max_loss, '最小亏损': min_loss, '未计算个股数': num_nan}
+    df_t = pd.Series(result_dict)
+    return df_t
+
+
+if __name__ == '__main__':
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+    cpu_list = list(range(0, 24))
+    print(cpu_list)
+    pus = psutil.Process()
+    pus.cpu_affinity(cpu_list)
+    start_time = dt.now()
+
+    # 定义需要穷举的参数值
+    Volatility = range(1, 61, 1)  # 短期乖离率
+    rates = range(1, 121, 1)  # 长期乖离率
+    # 生成所有参数组合
+    all_combinations = list(product(Volatility, rates))
+    print(f'共需计算{len(all_combinations)}次')
+
+    # 获取数据
+    stock_data_dict = get_stock_data()
+    results = []
+
+    df = pd.DataFrame(
+        columns=['短期乖离率', '长期乖离率',  '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利', '最小盈利',
+                 '总亏损',
+                 '平均亏损', '最大亏损', '最小亏损', '未计算个股数'])
+
+    err_list = []
+
+    # 设置每一轮的任务数
+    CHUNK_SIZE = 200  # 您可以根据需要进行调整
+    timeout = 120
+    max_retries = 3
+    with concurrent.futures.ProcessPoolExecutor(max_workers=18) as inner_executor:
+        for Volatility, rate in tqdm(all_combinations, desc='计算进度'):
+            while True:
+                try:
+                    # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+                    res = [result for result in tqdm(
+                        inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(),
+                                            [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict)),
+                        desc='单轮计算进度')]
+
+                except BaseException as e:
+                    print(f'计算错误{e}')
+                    inner_executor = concurrent.futures.ProcessPoolExecutor(max_workers=18)
+                else:
+                    results.append(res)
+                    df_t = tdf(res, Volatility, rate)
+                    df = pd.concat([df, df_t.to_frame().T], ignore_index=True)
+                    break
+            # time.sleep(1)
+            print(f'{Volatility}计算完成,共计算{len(res)}个股票')
+            to_df(df)
+            print(df)
+        print('循环结束')
+        to_df(df)
+        print(f'计算完成,共耗时{dt.now() - start_time}秒')

+ 316 - 0
backtrader/23918_bt.py

@@ -0,0 +1,316 @@
+# coding:utf-8
+import time
+from multiprocessing import freeze_support, Value, Lock
+import backtrader as bt
+from backtrader.feeds import PandasData
+import backtrader.indicators as btind
+from sqlalchemy import create_engine, text
+import pymysql
+from tqdm import tqdm
+import concurrent.futures
+import numpy as np
+import pandas as pd
+import platform
+import datetime
+from datetime import datetime as dt
+from itertools import product
+import psutil
+import logging
+import multiprocessing as mp
+from itertools import islice
+
+from func_timeout import func_set_timeout, FunctionTimedOut
+from functools import partial
+
+
+class MyPandasData(PandasData):
+    lines = ('hl', 'dif', 'dea', 'macd', 'rsi_6', 'rsi_12', 'rsi_24',)
+    params = (('hl', 7),
+              ('dif', 8),
+              ('dea', 9),
+              ('macd', 10),
+              ('rsi_6', 11),
+              ('rsi_12', 12),
+              ('rsi_24', 13),
+              )
+
+
+class TestStrategy(bt.Strategy):
+    params = (
+        ("num", 3),
+        ('Volatility', 0),
+        ('rate', 3),  # 注意要有逗号!!
+    )
+
+    def log(self, txt, dt=None):
+        # 记录策略的执行日志
+        dt = dt or self.datas[0].datetime.date(0)
+        # print('%s, %s' % (dt.isoformat(), txt))
+
+    def __init__(self):
+        try:
+            self.pos_price = 0
+            self.dataclose = self.datas[0].close
+            self.dataopen = self.datas[0].open
+            self.high = self.datas[0].high
+            self.low = self.datas[0].low
+            self.volume = self.datas[0].volume
+            self.hl = self.datas[0].hl
+            self.dif = self.datas[0].dif
+            self.dea = self.datas[0].dea
+            self.macd = self.datas[0].macd
+            self.rsi_6 = self.datas[0].rsi_6
+            self.rsi_12 = self.datas[0].rsi_12
+            self.rsi_24 = self.datas[0].rsi_24
+            self.sma5 = btind.MovingAverageSimple(self.datas[0].close, period=5)
+            self.sma10 = btind.MovingAverageSimple(self.datas[0].close, period=10)
+            self.sma20 = btind.MovingAverageSimple(self.datas[0].close, period=20)
+            self.sma60 = btind.MovingAverageSimple(self.datas[0].close, period=60)
+        except BaseException as e:
+            print(f'初始化错误{e}')
+
+    def notify_order(self, order):
+        """
+        订单状态处理
+
+        Arguments:
+            order {object} -- 订单状态
+        """
+        if order.status in [order.Submitted, order.Accepted]:
+            # 如订单已被处理,则不用做任何事情
+            return
+
+        # 检查订单是否完成
+        if order.status in [order.Completed]:
+            if order.isbuy():
+                self.buyprice = order.executed.price
+                self.buycomm = order.executed.comm
+            self.bar_executed = len(self)
+
+        # 订单因为缺少资金之类的原因被拒绝执行
+        elif order.status in [order.Canceled, order.Margin, order.Rejected]:
+            pass
+            # self.log('Order Canceled/Margin/Rejected')
+
+        # 订单状态处理完成,设为空
+        self.order = None
+
+    def notify_trade(self, trade):
+        """
+        交易成果
+
+        Arguments:
+            trade {object} -- 交易状态
+        """
+        if not trade.isclosed:
+            return
+
+        # 显示交易的毛利率和净利润
+        # self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' % (trade.pnl, trade.pnlcomm))
+
+    def next(self):
+        rate = self.params.rate / 100
+        vola = self.params.Volatility / 100
+        if self.low[0] < self.sma5[0] * (1 - rate) and self.sma5[0] < self.sma5[-1] < self.sma5[-2] < self.sma10[-2] < \
+                self.sma20[-2]:
+            self.order = self.buy()
+            self.pos_price = self.low[-1]
+
+
+        # elif (self.hl[0] == 5 or self.dataclose[0] < self.sma5[0]):
+        elif ((self.high[0] < self.sma5[0] and self.dataclose[0] < (self.high[0] * (1 - vola))) or
+              (self.high[0] > self.sma5[0] > self.dataclose[0]) or self.dataclose[0] < self.pos_price) \
+                or (self.dataclose[0] < self.sma5[0]) or (self.dataclose[0] > self.sma5[0] * (1 + rate)):
+            self.order = self.close()
+            self.pos_price = 0
+
+    def stop(self):
+        # pass
+        self.log(u'(MA趋势交易效果) Ending Value %.2f' % (self.broker.getvalue()))
+
+
+def to_df(df):
+    print('开始存数据')
+    df.sort_values(by=['MA5乖离率', '当日回落'], ascending=True, inplace=True)
+    df = df.reset_index(drop=True)
+    if platform.node() == 'DanieldeMBP.lan':
+        df.to_csv(f"/Users/daniel/Documents/策略/Ma5乖离7买入{dt.now().strftime('%Y%m%d%H%m%S')}.csv",
+                  index=True,
+                  encoding='utf_8_sig', mode='w')
+    else:
+        df.to_csv(f"C:\策略结果\Ma5乖离7买入{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
+                  encoding='utf_8_sig', mode='w')
+    print(f'结果:, \n, {df}')
+
+
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+
+def query_database(table_name):
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8')
+    df = pd.read_sql_table(table_name, engine)
+    return df
+
+
+def get_stock_data():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qmt_stocks_tech')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '1d')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            # table_list = table_list[0: 10]
+            cursor.close()
+            db.close()
+            print(f'开始数据库读取')
+            with concurrent.futures.ProcessPoolExecutor(max_workers=16) as executor:
+                # 使用executor.map方法实现多进程并行查询数据库,得到每个表的数据,并存储在一个字典中
+                data_dict = {table_name: df for table_name, df in
+                             tqdm(zip(table_list, executor.map(query_database, table_list)))}
+            print(f'数据库读取完成')
+            break
+        except BaseException as e:
+            print(f'数据库读取错误{e}')
+            continue
+    return data_dict
+
+
+def backtrader_test(stock_data, stock_name, vot, rate):
+    # print(f'开始回测{stock_name}')
+    try:
+        cerebro = bt.Cerebro()
+        stock_data.time = pd.to_datetime(stock_data.time)
+        stock_data['HL'] = stock_data['HL'].map({'L': 1,
+                                                 'LL': 2,
+                                                 'L*': 3,
+                                                 'H': 4,
+                                                 'HH': 5,
+                                                 'H*': 6,
+                                                 '-': 7})
+        cerebro.addstrategy(TestStrategy, Volatility=vot, rate=rate)
+        data = MyPandasData(dataname=stock_data,
+                            fromdate=datetime.datetime(2017, 1, 1),
+                            todate=datetime.datetime(2022, 10, 30),
+                            datetime='time',
+                            open='open_back',
+                            close='close_back',
+                            high='high_back',
+                            low='low_back',
+                            volume='volume_back',
+                            hl='HL',
+                            dif='dif',
+                            dea='dea',
+                            macd='macd',
+                            rsi_6='rsi_6',
+                            rsi_12='rsi_12',
+                            rsi_24='rsi_24',
+                            )
+        cerebro.adddata(data)
+        cerebro.addstrategy(TestStrategy)
+        cerebro.broker.setcash(100000.0)
+        cerebro.addsizer(bt.sizers.FixedSize, stake=10000)
+        cerebro.broker.setcommission(commission=0.001)
+        cerebro.run()
+    except  BaseException as e:
+        print(f'{stock_name}回测错误{e}')
+        return np.nan
+    # print(cerebro.broker.getvalue() - 100000.0)
+    # print(stock_name)
+    else:
+        return cerebro.broker.getvalue() - 100000.0
+
+
+def tdf(tt, rate, Volatility):
+    num_nan = np.isnan(tt).sum()  # Count NaN values
+    print(f'num_nan={num_nan}')
+
+    filtered_result = [r for r in tt if not np.isnan(r)]  # Filter out NaN values
+    print(f'filtered_result={filtered_result}')
+
+    # Calculate statistics
+    num_profits = len([r for r in tt if r > 0])
+    num_losses = len([r for r in tt if r < 0])
+    profit_ratio = num_profits / (len(filtered_result))
+    total_profit = sum([r for r in tt if r > 0])
+    avg_profit = total_profit / num_profits if num_profits else 0
+    max_profit = max(tt)
+    min_profit = min([r for r in tt if r > 0]) if num_profits else 0
+    total_loss = sum([r for r in tt if r < 0])
+    avg_loss = total_loss / num_losses if num_losses else 0
+    max_loss = min(tt)
+    min_loss = max([r for r in tt if r < 0]) if num_losses else 0
+    # Append the results into the DataFrame
+    result_dict = {'MA5乖离率': rate, '当日回落': Volatility, '盈利个数': num_profits,
+                   '盈利比例': profit_ratio, '总盈利': total_profit, '平均盈利': avg_profit,
+                   '最大盈利': max_profit, '最小盈利': min_profit, '总亏损': total_loss,
+                   '平均亏损': avg_loss, '最大亏损': max_loss, '最小亏损': min_loss, '未计算个股数': num_nan}
+    df_t = pd.Series(result_dict)
+    return df_t
+
+
+if __name__ == '__main__':
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+    cpu_list = list(range(0, 23))
+    print(cpu_list)
+    pus = psutil.Process()
+    pus.cpu_affinity(cpu_list)
+    start_time = dt.now()
+
+    # 定义需要穷举的参数值
+    Volatilitys = range(1, 10, 1)  # 当日回撤
+    rates = range(3, 20, 1)  # 乖离率
+    # 生成所有参数组合
+    all_combinations = list(product(Volatilitys, rates))
+    print(f'共需计算{len(all_combinations)}次')
+
+    # 获取数据
+    stock_data_dict = get_stock_data()
+    results = []
+
+    df = pd.DataFrame(
+        columns=['MA5乖离率', '当日回落', '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利', '最小盈利',
+                 '总亏损',
+                 '平均亏损', '最大亏损', '最小亏损', '未计算个股数'])
+
+    err_list = []
+
+    # 设置每一轮的任务数
+    CHUNK_SIZE = 200  # 您可以根据需要进行调整
+    timeout = 120
+    max_retries = 3
+    with concurrent.futures.ProcessPoolExecutor(max_workers=24) as inner_executor:
+        for Volatility, rate in tqdm(all_combinations, desc='计算进度'):
+            while True:
+                try:
+                    # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+                    res = [result for result in tqdm(
+                        inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(),
+                                           [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict)),
+                        desc='单轮计算进度')]
+
+                except BaseException as e:
+                    print(f'计算错误{e}')
+                    inner_executor = concurrent.futures.ProcessPoolExecutor(max_workers=20)
+                else:
+                    results.append(res)
+                    df_t = tdf(res, rate, Volatility)
+                    df = pd.concat([df, df_t.to_frame().T], ignore_index=True)
+                    break
+            # time.sleep(1)
+            print(f'{rate}计算完成,共计算{len(res)}个股票')
+            print(df)
+        print('循环结束')
+        to_df(df)
+        print(f'计算完成,共耗时{dt.now() - start_time}秒')

+ 324 - 0
backtrader/240212_bt.py

@@ -0,0 +1,324 @@
+# coding:utf-8
+import time
+from multiprocessing import freeze_support, Value, Lock
+import backtrader as bt
+from backtrader.feeds import PandasData
+import backtrader.indicators as btind
+from sqlalchemy import create_engine, text
+import pymysql
+from tqdm import tqdm
+import concurrent.futures
+import numpy as np
+import pandas as pd
+import platform
+import datetime
+from datetime import datetime as dt
+from itertools import product
+import psutil
+import logging
+import multiprocessing as mp
+from itertools import islice
+
+from func_timeout import func_set_timeout, FunctionTimedOut
+from functools import partial
+
+
+class MyPandasData(PandasData):
+    lines = ('hl', 'dif', 'dea', 'macd', 'rsi_6', 'rsi_12', 'rsi_24',)
+    params = (('hl', 7),
+              ('dif', 8),
+              ('dea', 9),
+              ('macd', 10),
+              ('rsi_6', 11),
+              ('rsi_12', 12),
+              ('rsi_24', 13),
+              )
+
+
+class TestStrategy(bt.Strategy):
+    params = (
+        ("num", 3),
+        ('Volatility', 0),
+        ('rate', 1),  # 注意要有逗号!!
+    )
+
+    def log(self, txt, dt=None):
+        # 记录策略的执行日志
+        dt = dt or self.datas[0].datetime.date(0)
+        # print('%s, %s' % (dt.isoformat(), txt))
+
+    def __init__(self):
+        try:
+            self.pos_price = 0
+            self.dataclose = self.datas[0].close
+            self.dataopen = self.datas[0].open
+            self.high = self.datas[0].high
+            self.low = self.datas[0].low
+            self.volume = self.datas[0].volume
+            self.hl = self.datas[0].hl
+            self.dif = self.datas[0].dif
+            self.dea = self.datas[0].dea
+            self.macd = self.datas[0].macd
+            self.rsi_6 = self.datas[0].rsi_6
+            self.rsi_12 = self.datas[0].rsi_12
+            self.rsi_24 = self.datas[0].rsi_24
+            self.sma5 = btind.MovingAverageSimple(self.datas[0].close, period=5)
+            self.sma10 = btind.MovingAverageSimple(self.datas[0].close, period=10)
+            self.sma20 = btind.MovingAverageSimple(self.datas[0].close, period=20)
+            self.sma30 = btind.MovingAverageSimple(self.datas[0].close, period=30)
+            self.sma40 = btind.MovingAverageSimple(self.datas[0].close, period=40)
+            self.sma50 = btind.MovingAverageSimple(self.datas[0].close, period=50)
+            self.sma60 = btind.MovingAverageSimple(self.datas[0].close, period=60)
+            self.sma70 = btind.MovingAverageSimple(self.datas[0].close, period=70)
+            self.sma80 = btind.MovingAverageSimple(self.datas[0].close, period=80)
+            self.sma90 = btind.MovingAverageSimple(self.datas[0].close, period=90)
+            self.sma100 = btind.MovingAverageSimple(self.datas[0].close, period=100)
+            self.sma110 = btind.MovingAverageSimple(self.datas[0].close, period=110)
+            self.sma120 = btind.MovingAverageSimple(self.datas[0].close, period=120)
+            # self.sma_vol = btind.MovingAverageSimple(self.datas[0].close, period=self.params.Volatility)
+        except BaseException as e:
+            print(f'初始化错误{e}')
+
+    def notify_order(self, order):
+        """
+        订单状态处理
+
+        Arguments:
+            order {object} -- 订单状态
+        """
+        if order.status in [order.Submitted, order.Accepted]:
+            # 如订单已被处理,则不用做任何事情
+            return
+
+        # 检查订单是否完成
+        if order.status in [order.Completed]:
+            if order.isbuy():
+                self.buyprice = order.executed.price
+                self.buycomm = order.executed.comm
+            self.bar_executed = len(self)
+
+        # 订单因为缺少资金之类的原因被拒绝执行
+        elif order.status in [order.Canceled, order.Margin, order.Rejected]:
+            pass
+            # self.log('Order Canceled/Margin/Rejected')
+
+        # 订单状态处理完成,设为空
+        self.order = None
+
+    def notify_trade(self, trade):
+        """
+        交易成果
+
+        Arguments:
+            trade {object} -- 交易状态
+        """
+        if not trade.isclosed:
+            return
+
+        # 显示交易的毛利率和净利润
+        # self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' % (trade.pnl, trade.pnlcomm))
+
+    def next(self):
+        vot = self.params.Volatility / 100
+        rate = self.params.rate / 100
+        if self.sma5[0] > self.sma10[0] > self.sma20[0] \
+                and self.dataopen[0] > self.dataclose[0] and self.sma20[0] > self.sma20[-1] > self.sma20[-2] \
+                and (1 + vot) > self.sma5[0] / self.sma20[0] > (1 - vot) and (1 + rate) > self.sma10[0] / self.sma20[
+            0] > (1 - rate):
+            self.order = self.buy()
+            self.pos_price = self.dataclose[0]
+
+        elif (self.hl[0] == 5 and self.dataclose[0] < self.sma5[0]) or (self.dataclose[0] < self.sma5[0]):
+            self.order = self.close()
+            self.pos_price = 0
+
+    def stop(self):
+        # pass
+        self.log(u'(MA趋势交易效果) Ending Value %.2f' % (self.broker.getvalue()))
+
+
+def to_df(df):
+    print('开始存数据')
+    df.sort_values(by=['MA5乖离率', '当日回落'], ascending=True, inplace=True)
+    df = df.reset_index(drop=True)
+    if platform.node() == 'DanieldeMBP.lan':
+        df.to_csv(f"/Users/daniel/Documents/策略/Ma5乖离7买入{dt.now().strftime('%Y%m%d%H%m%S')}.csv",
+                  index=True,
+                  encoding='utf_8_sig', mode='w')
+    else:
+        df.to_csv(f"C:\策略结果\均线发散{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
+                  encoding='utf_8_sig', mode='w')
+    print(f'结果:, \n, {df}')
+
+
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+
+def query_database(table_name):
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8')
+    df = pd.read_sql_table(table_name, engine)
+    return df
+
+
+def get_stock_data():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qmt_stocks_tech')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '1d')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            # table_list = table_list[0: 10]
+            cursor.close()
+            db.close()
+            print(f'开始数据库读取')
+            with concurrent.futures.ProcessPoolExecutor(max_workers=20) as executor:
+                # 使用executor.map方法实现多进程并行查询数据库,得到每个表的数据,并存储在一个字典中
+                data_dict = {table_name: df for table_name, df in
+                             tqdm(zip(table_list, executor.map(query_database, table_list)))}
+            print(f'数据库读取完成')
+            break
+        except BaseException as e:
+            print(f'数据库读取错误{e}')
+            continue
+    return data_dict
+
+
+def backtrader_test(stock_data, stock_name,  Volatility, rate):
+    # print(f'开始回测{stock_name}')
+    try:
+        cerebro = bt.Cerebro()
+        stock_data.time = pd.to_datetime(stock_data.time)
+        stock_data['HL'] = stock_data['HL'].map({'L': 1,
+                                                 'LL': 2,
+                                                 'L*': 3,
+                                                 'H': 4,
+                                                 'HH': 5,
+                                                 'H*': 6,
+                                                 '-': 7})
+        cerebro.addstrategy(TestStrategy, Volatility=Volatility, rate=rate)
+        data = MyPandasData(dataname=stock_data,
+                            fromdate=datetime.datetime(2017, 1, 1),
+                            todate=datetime.datetime(2023, 10, 30),
+                            datetime='time',
+                            open='open_back',
+                            close='close_back',
+                            high='high_back',
+                            low='low_back',
+                            volume='volume_back',
+                            hl='HL',
+                            dif='dif',
+                            dea='dea',
+                            macd='macd',
+                            rsi_6='rsi_6',
+                            rsi_12='rsi_12',
+                            rsi_24='rsi_24',
+                            )
+        cerebro.adddata(data)
+        cerebro.addstrategy(TestStrategy)
+        cerebro.broker.setcash(100000.0)
+        cerebro.addsizer(bt.sizers.FixedSize, stake=10000)
+        cerebro.broker.setcommission(commission=0.001)
+        cerebro.run()
+    except BaseException as e:
+        print(f'{stock_name}回测错误{e}')
+        return np.nan
+    # print(cerebro.broker.getvalue() - 100000.0)
+    # print(stock_name)
+    else:
+        return cerebro.broker.getvalue() - 100000.0
+
+
+def tdf(tt, Volatility, rate):
+    num_nan = np.isnan(tt).sum()  # Count NaN values
+    print(f'num_nan={num_nan}')
+
+    filtered_result = [r for r in tt if not np.isnan(r)]  # Filter out NaN values
+    print(f'filtered_result={filtered_result}')
+
+    # Calculate statistics
+    num_profits = len([r for r in tt if r > 0])
+    num_losses = len([r for r in tt if r < 0])
+    profit_ratio = num_profits / (len(filtered_result))
+    total_profit = sum([r for r in tt if r > 0])
+    avg_profit = total_profit / num_profits if num_profits else 0
+    max_profit = max(tt)
+    min_profit = min([r for r in tt if r > 0]) if num_profits else 0
+    total_loss = sum([r for r in tt if r < 0])
+    avg_loss = total_loss / num_losses if num_losses else 0
+    max_loss = min(tt)
+    min_loss = max([r for r in tt if r < 0]) if num_losses else 0
+    # Append the results into the DataFrame
+    result_dict = {'短期乖离率': Volatility, '长期乖离率': rate, '盈利个数': num_profits,
+                   '盈利比例': profit_ratio, '总盈利': total_profit, '平均盈利': avg_profit,
+                   '最大盈利': max_profit, '最小盈利': min_profit, '总亏损': total_loss,
+                   '平均亏损': avg_loss, '最大亏损': max_loss, '最小亏损': min_loss, '未计算个股数': num_nan}
+    df_t = pd.Series(result_dict)
+    return df_t
+
+
+if __name__ == '__main__':
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+    cpu_list = list(range(8, 24))
+    print(cpu_list)
+    pus = psutil.Process()
+    pus.cpu_affinity(cpu_list)
+    start_time = dt.now()
+
+    # 定义需要穷举的参数值
+    Volatility = range(1, 31, 1)  # 短期乖离率
+    rates = range(1, 121, 1)  # 长期乖离率
+    # 生成所有参数组合
+    all_combinations = list(product(Volatility, rates))
+    print(f'共需计算{len(all_combinations)}次')
+
+    # 获取数据
+    stock_data_dict = get_stock_data()
+    results = []
+
+    df = pd.DataFrame(
+        columns=['短期乖离率', '长期乖离率',  '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利', '最小盈利',
+                 '总亏损',
+                 '平均亏损', '最大亏损', '最小亏损', '未计算个股数'])
+
+    err_list = []
+
+    # 设置每一轮的任务数
+    CHUNK_SIZE = 200  # 您可以根据需要进行调整
+    timeout = 120
+    max_retries = 3
+    with concurrent.futures.ProcessPoolExecutor(max_workers=18) as inner_executor:
+        for Volatility, rate in tqdm(all_combinations, desc='计算进度'):
+            while True:
+                try:
+                    # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+                    res = [result for result in tqdm(
+                        inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(),
+                                            [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict)),
+                        desc='单轮计算进度')]
+
+                except BaseException as e:
+                    print(f'计算错误{e}')
+                    inner_executor = concurrent.futures.ProcessPoolExecutor(max_workers=20)
+                else:
+                    results.append(res)
+                    df_t = tdf(res, Volatility, rate)
+                    df = pd.concat([df, df_t.to_frame().T], ignore_index=True)
+                    break
+            # time.sleep(1)
+            print(f'{Volatility}计算完成,共计算{len(res)}个股票')
+            print(df)
+        print('循环结束')
+        to_df(df)
+        print(f'计算完成,共耗时{dt.now() - start_time}秒')

+ 273 - 0
backtrader/333.py

@@ -0,0 +1,273 @@
+from multiprocessing import freeze_support, Value, Lock
+import backtrader as bt
+from backtrader.feeds import PandasData
+import backtrader.indicators as btind
+from sqlalchemy import create_engine, text
+import pymysql
+from tqdm import tqdm
+import concurrent.futures
+import pandas as pd
+import matplotlib
+import datetime
+from datetime import datetime as dt
+from itertools import product
+import psutil
+import logging
+import multiprocessing as mp
+from itertools import islice
+
+
+class MyPandasData(PandasData):
+    lines = ('hl', 'dif', 'dea', 'macd', 'rsi_6', 'rsi_12', 'rsi_24',)
+    params = (('hl', 7),
+              ('dif', 8),
+              ('dea', 9),
+              ('macd', 10),
+              ('rsi_6', 11),
+              ('rsi_12', 12),
+              ('rsi_24', 13),
+              )
+    '''
+    lines = ('change_pct', 'net_amount_main', 'net_pct_main', 'net_amount_xl', 'net_pct_xl', 'net_amount_l', 'net_pct_l'
+             , 'net_amount_m', 'net_pct_m', 'net_amount_s', 'net_pct_s',)
+    params = (('change_pct', 7),
+              ('net_amount_main', 8),
+              ('net_pct_main', 9),
+              ('net_amount_xl', 10),
+              ('net_pct_xl', 11),
+              ('net_amount_l', 12),
+              ('net_pct_l', 13),
+              ('net_amount_m', 14),
+              ('net_pct_m', 15),
+              ('net_amount_s', 16),
+              ('net_pct_s', 17),
+              )
+    '''
+
+
+class TestStrategy(bt.Strategy):
+    def log(self, txt, dt=None):
+        # 记录策略的执行日志
+        dt = dt or self.datas[0].datetime.date(0)
+        # print('%s, %s' % (dt.isoformat(), txt))
+
+    def __init__(self):
+        # 保存收盘价的引用
+        self.dataclose = self.datas[0].close
+
+    def next(self):
+        # 记录收盘价
+        self.log('Close, %.2f' % self.dataclose[0])
+        # 今天的收盘价 < 昨天收盘价
+        if self.dataclose[0] < self.dataclose[-1]:
+            # 昨天收盘价 < 前天的收盘价
+            if self.dataclose[-1] < self.dataclose[-2]:
+                # 买入
+                self.log('买入, %.2f' % self.dataclose[0])
+                self.buy()
+
+def t():
+    print('tttt')
+
+
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+
+def query_database(table_name):
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_tech?charset=utf8')
+    df = pd.read_sql_table(table_name, engine)
+    return df
+
+
+def get_stock_data():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qmt_stocks_tech')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '1d')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            # table_list = table_list[0: 10]
+            cursor.close()
+            db.close()
+            print(f'开始数据库读取')
+            with concurrent.futures.ProcessPoolExecutor(max_workers=24) as executor:
+                # 使用executor.map方法实现多进程并行查询数据库,得到每个表的数据,并存储在一个字典中
+                data_dict = {table_name: df for table_name, df in
+                             tqdm(zip(table_list, executor.map(query_database, table_list)))}
+            print(f'数据库读取完成')
+            break
+        except BaseException as e:
+            print(f'数据库读取错误{e}')
+            continue
+    return data_dict
+
+
+def backtrader_test(stock_data, stock_name, num, vot, rate):
+    cerebro = bt.Cerebro()
+    stock_data.time = pd.to_datetime(stock_data.time)
+    stock_data['HL'] = stock_data['HL'].map({'L': 1,
+                                             'LL': 2,
+                                             'L*': 3,
+                                             'H': 4,
+                                             'HH': 5,
+                                             'H*': 6,
+                                             '-': 7})
+    data = MyPandasData(dataname=stock_data,
+                        fromdate=datetime.datetime(2017, 1, 1),
+                        todate=datetime.datetime(2022, 10, 30),
+                        datetime='time',
+                        open='open_back',
+                        close='close_back',
+                        high='high_back',
+                        low='low_back',
+                        volume='volume_back',
+                        hl='HL',
+                        dif='dif',
+                        dea='dea',
+                        macd='macd',
+                        rsi_6='rsi_6',
+                        rsi_12='rsi_12',
+                        rsi_24='rsi_24',
+                        )
+    cerebro.adddata(data)
+    cerebro.addstrategy(TestStrategy)
+    cerebro.broker.setcash(100000.0)
+    cerebro.addsizer(bt.sizers.FixedSize, stake=100)
+    cerebro.broker.setcommission(commission=0.001)
+    cerebro.run()
+    return cerebro.broker.getvalue() - 100000.0
+
+
+def bbt(stock_data_dict, num, Volatility, rate):
+    # while True:
+    #     exception_flag = False
+    async_results = []
+    try:
+        # 设置每一轮的任务数
+        CHUNK_SIZE = 200  # 您可以根据需要进行调整
+
+        for chunk in tqdm(chunked_iterable(stock_data_dict.items(), CHUNK_SIZE)):
+            print(f'chunk:{chunk[0][0]}-{chunk[-1][0]}')
+            with mp.Pool(processes=min(CHUNK_SIZE, len(chunk), 24)) as pool:  # 使用最小值确保不会超出任务数或超过24核心
+                for stock, df_stock in chunk:
+                    async_result = pool.apply_async(func=backtrader_test, args=(df_stock, stock, num, Volatility, rate))
+                    async_results.append(async_result)
+                pool.close()
+                pool.join()
+        # with concurrent.futures.ProcessPoolExecutor(max_workers=18) as inner_executor:
+        #     print(f'开始计算{num},{Volatility},{rate}')
+        #     # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+        #     results = [result for result in
+        #             inner_executor.map(backtrader_test, stock_data_dict.values(), stock_data_dict.keys(),
+        #                                 [num] * len(stock_data_dict),
+        #                                 [Volatility] * len(stock_data_dict), [rate] * len(stock_data_dict),
+        #                                 timeout=1200)]
+            # except concurrent.futures.TimeoutError as e:
+            #     print(f'计算超时{e}')
+            #     results = []
+            #     exception_flag = True
+    except BaseException as e:
+        print(f'计算错误{e}')
+        results = True
+
+    outputs = [result.get() for result in async_results]
+    print(outputs)
+    return outputs
+
+
+if __name__ == '__main__':
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+    cpu_list = list(range(24))
+    pus = psutil.Process()
+    pus.cpu_affinity(cpu_list)
+
+    # 定义需要穷举的参数值
+    nums = range(60, 80, 20)
+    Volatilitys = range(5, 6, 1)
+    rates = range(3, 4, 1)
+    # 生成所有参数组合
+    all_combinations = list(product(nums, Volatilitys, rates))
+    print(f'共需计算{len(all_combinations)}次')
+
+    # 获取数据
+    stock_data_dict = get_stock_data()
+    results = []
+
+    # 获取stock_data_dict的第1个value,即第1个DataFrame
+    # stock_data = next(iter(stock_data_dict.values()))
+    # print(stock_data)
+
+    for num, Volatility, rate in tqdm(all_combinations, desc='计算进度'):
+        result = bbt(stock_data_dict, num, Volatility, rate)
+        results.append(result)
+        print(results, len(results), len(results[0]))
+
+    df = pd.DataFrame(
+        columns=['周期', '波动率', 'MA5斜率', '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利', '最小盈利', '总亏损',
+                '平均亏损', '最大亏损', '最小亏损'])
+    for tt in results:
+        num_profits = len([r for r in tt if r > 0])
+        num_losses = len([r for r in tt if r < 0])
+        profit_ratio = num_profits / len(stock_data_dict)
+        total_profit = sum([r for r in tt if r > 0])
+        avg_profit = total_profit / num_profits if num_profits else 0
+        max_profit = max(tt)
+        min_profit = min([r for r in tt if r > 0]) if num_profits else 0
+        total_loss = sum([r for r in tt if r < 0])
+        avg_loss = total_loss / num_losses if num_losses else 0
+        max_loss = min(tt)
+        min_loss = max([r for r in tt if r < 0]) if num_losses else 0
+        # Append the results into the DataFrame
+        result_dict = {'周期': num, '波动率': Volatility, 'MA5斜率': rate, '盈利个数': num_profits,
+                        '盈利比例': profit_ratio, '总盈利': total_profit, '平均盈利': avg_profit,
+                        '最大盈利': max_profit, '最小盈利': min_profit, '总亏损': total_loss,
+                        '平均亏损': avg_loss, '最大亏损': max_loss, '最小亏损': min_loss}
+        df_t = pd.Series(result_dict)
+        print(df_t)
+        df = pd.concat([df, df_t.to_frame().T], ignore_index=True)
+
+    print(df)
+    exit()
+    num = 60
+    Volatility = 5
+    rate = 3
+    i = 0
+    st = dt.now()
+    while True:
+        i += 1
+        try:
+            results = bbt(stock_data_dict, num, Volatility, rate)
+        except BaseException as e:
+            print(f'计算错误{e}')
+            break
+        print(results)
+        if results is True:
+            print(f'计算错误,重新计算')
+            continue
+        else:
+            print(f'第{i}次计算完成,耗时{dt.now() - st}')
+    print(f'计算结果为{len(results)}')
+    print(results)
+    print(f'全部计算完成,共{len(results)}次')
+    exit()
+
+    getvalue = backtrader_test(stock_data)
+
+    if getvalue > 100000:
+        print('盈利')
+    else:
+        print('亏损')
+    # 绘制图像
+    # cerebro.plot()

+ 313 - 0
backtrader/futures_231116.py

@@ -0,0 +1,313 @@
+# coding:utf-8
+import time
+from multiprocessing import freeze_support, Value, Lock
+import backtrader as bt
+from backtrader.feeds import PandasData
+import backtrader.indicators as btind
+from sqlalchemy import create_engine, text
+import pymysql
+from tqdm import tqdm
+import concurrent.futures
+import numpy as np
+import pandas as pd
+import platform
+import datetime
+from datetime import datetime as dt
+from itertools import product
+import psutil
+import logging
+import multiprocessing as mp
+from itertools import islice
+
+from func_timeout import func_set_timeout, FunctionTimedOut
+from functools import partial
+
+
+class MyPandasData(PandasData):
+    lines = ('hl', 'dif', 'dea', 'macd', 'rsi_6', 'rsi_12', 'rsi_24',)
+    params = (('hl', 7),
+              ('dif', 8),
+              ('dea', 9),
+              ('macd', 10),
+              ('rsi_6', 11),
+              ('rsi_12', 12),
+              ('rsi_24', 13),
+              )
+
+
+class TestStrategy(bt.Strategy):
+    params = (
+        ("num", 3),
+        ('Volatility', 0),
+        ('rate', 3),  # 注意要有逗号!!
+    )
+
+    def log(self, txt, dt=None):
+        # 记录策略的执行日志
+        dt = dt or self.datas[0].datetime.date(0)
+        # print('%s, %s' % (dt.isoformat(), txt))
+
+    def __init__(self):
+        try:
+            self.pos_price = 0
+            self.dataclose = self.datas[0].close
+            self.dataopen = self.datas[0].open
+            self.high = self.datas[0].high
+            self.low = self.datas[0].low
+            self.volume = self.datas[0].volume
+            self.hl = self.datas[0].hl
+            self.dif = self.datas[0].dif
+            self.dea = self.datas[0].dea
+            self.macd = self.datas[0].macd
+            self.rsi_6 = self.datas[0].rsi_6
+            self.rsi_12 = self.datas[0].rsi_12
+            self.rsi_24 = self.datas[0].rsi_24
+            self.sma5 = btind.MovingAverageSimple(self.datas[0].close, period=5)
+            self.sma10 = btind.MovingAverageSimple(self.datas[0].close, period=10)
+            self.sma20 = btind.MovingAverageSimple(self.datas[0].close, period=20)
+            self.sma60 = btind.MovingAverageSimple(self.datas[0].close, period=60)
+            # self.sma_vol = btind.MovingAverageSimple(self.datas[0].close, period=Volatility)
+        except BaseException as e:
+            print(f'初始化错误{e}')
+
+    def notify_order(self, order):
+        """
+        订单状态处理
+
+        Arguments:
+            order {object} -- 订单状态
+        """
+        if order.status in [order.Submitted, order.Accepted]:
+            # 如订单已被处理,则不用做任何事情
+            return
+
+        # 检查订单是否完成
+        if order.status in [order.Completed]:
+            if order.isbuy():
+                self.buyprice = order.executed.price
+                self.buycomm = order.executed.comm
+            self.bar_executed = len(self)
+
+        # 订单因为缺少资金之类的原因被拒绝执行
+        elif order.status in [order.Canceled, order.Margin, order.Rejected]:
+            pass
+            # self.log('Order Canceled/Margin/Rejected')
+
+        # 订单状态处理完成,设为空
+        self.order = None
+
+    def notify_trade(self, trade):
+        """
+        交易成果
+
+        Arguments:
+            trade {object} -- 交易状态
+        """
+        if not trade.isclosed:
+            return
+
+        # 显示交易的毛利率和净利润
+        # self.log('OPERATION PROFIT, GROSS %.2f, NET %.2f' % (trade.pnl, trade.pnlcomm))
+
+    def next(self):
+        vola = self.params.Volatility
+        rate = self.params.rate
+        if (self.hl[0] == 1 or self.hl[0] == 2 or self.hl[0] == 3) and self.rsi_6 < rate:
+            self.order = self.buy()
+            self.pos_price = self.dataclose[0]
+
+        elif (self.hl[0] == 5 or self.dataclose[0] < self.sma5[0]) and self.rsi_6 > vola:
+            self.order = self.close()
+            self.pos_price = 0
+
+    def stop(self):
+        # pass
+        self.log(u'(MA趋势交易效果) Ending Value %.2f' % (self.broker.getvalue()))
+
+def to_df(df):
+    print('开始存数据')
+    df.sort_values(by=['rsi低轨', 'rsi高轨'], ascending=True, inplace=True)
+    df = df.reset_index(drop=True)
+    if platform.node() == 'DanieldeMBP.lan':
+        df.to_csv(f"/Users/daniel/Documents/策略/Ma5乖离7买入{dt.now().strftime('%Y%m%d%H%m%S')}.csv",
+                  index=True,
+                  encoding='utf_8_sig', mode='w')
+    else:
+        df.to_csv(f"C:\策略结果\期货rsi{dt.now().strftime('%Y%m%d%H%m%S')}.csv", index=True,
+                  encoding='utf_8_sig', mode='w')
+    print(f'结果:, \n, {df}')
+
+
+def chunked_iterable(iterable, size):
+    """将可迭代对象分割为指定大小的块"""
+    it = iter(iterable)
+    while True:
+        chunk = tuple(islice(it, size))
+        if not chunk:
+            return
+        yield chunk
+
+
+def query_database(table_name):
+    engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qihuo_tech?charset=utf8')
+    df = pd.read_sql_table(table_name, engine)
+    return df
+
+
+def get_stock_data():
+    while True:
+        try:
+            db = pymysql.connect(host='localhost',
+                                 user='root',
+                                 port=3307,
+                                 password='r6kEwqWU9!v3',
+                                 database='qihuo_tech')
+            cursor = db.cursor()
+            cursor.execute("show tables like '%%%s%%' " % '00.')
+            table_list = [tuple[0] for tuple in cursor.fetchall()]
+            # table_list = table_list[0: 10]
+            cursor.close()
+            db.close()
+            print(f'开始数据库读取')
+            with concurrent.futures.ProcessPoolExecutor(max_workers=16) as executor:
+                # 使用executor.map方法实现多进程并行查询数据库,得到每个表的数据,并存储在一个字典中
+                data_dict = {table_name: df for table_name, df in
+                             tqdm(zip(table_list, executor.map(query_database, table_list)))}
+            print(f'数据库读取完成')
+            break
+        except BaseException as e:
+            print(f'数据库读取错误{e}')
+            continue
+    return data_dict
+
+
+def backtrader_test(stock_data, stock_name, vot, rate):
+    print(f'开始回测{stock_name}')
+    try:
+        cerebro = bt.Cerebro()
+        stock_data.time = pd.to_datetime(stock_data.time)
+        stock_data['HL'] = stock_data['HL'].map({'L': 1,
+                                                 'LL': 2,
+                                                 'L*': 3,
+                                                 'H': 4,
+                                                 'HH': 5,
+                                                 'H*': 6,
+                                                 '-': 7})
+        cerebro.addstrategy(TestStrategy, Volatility=vot, rate=rate)
+        data = MyPandasData(dataname=stock_data,
+                            fromdate=datetime.datetime(2017, 1, 1),
+                            todate=datetime.datetime(2022, 10, 30),
+                            datetime='time',
+                            open='open_back',
+                            close='close_back',
+                            high='high_back',
+                            low='low_back',
+                            volume='volume',
+                            hl='HL',
+                            dif='dif',
+                            dea='dea',
+                            macd='macd',
+                            rsi_6='rsi_6',
+                            rsi_12='rsi_12',
+                            rsi_24='rsi_24',
+                            )
+        cerebro.adddata(data)
+        cerebro.addstrategy(TestStrategy)
+        cerebro.broker.setcash(1000000.0)
+        cerebro.addsizer(bt.sizers.FixedSize, stake=1000)
+        cerebro.broker.setcommission(commission=0.001)
+        cerebro.run()
+    except BaseException as e:
+        print(f'{stock_name}回测错误{e}')
+        return np.nan
+    # print(cerebro.broker.getvalue() - 100000.0)
+    # print(stock_name)
+    else:
+        return cerebro.broker.getvalue() - 1000000.0
+
+
+def tdf(tt, Volatility, rate):
+    num_nan = np.isnan(tt).sum()  # Count NaN values
+    print(f'num_nan={num_nan,Volatility,rate}')
+
+    filtered_result = [r for r in tt if not np.isnan(r)]  # Filter out NaN values
+    print(f'filtered_result={filtered_result}')
+
+    # Calculate statistics
+    num_profits = len([r for r in tt if r > 0])
+    num_losses = len([r for r in tt if r < 0])
+    profit_ratio = num_profits / (len(filtered_result))
+    total_profit = sum([r for r in tt if r > 0])
+    avg_profit = total_profit / num_profits if num_profits else 0
+    max_profit = max(tt)
+    min_profit = min([r for r in tt if r > 0]) if num_profits else 0
+    total_loss = sum([r for r in tt if r < 0])
+    avg_loss = total_loss / num_losses if num_losses else 0
+    max_loss = min(tt)
+    min_loss = max([r for r in tt if r < 0]) if num_losses else 0
+    # Append the results into the DataFrame
+    result_dict = {'rsi低轨': Volatility, 'rsi高轨': rate, '盈利个数': num_profits,
+                   '盈利比例': profit_ratio, '总盈利': total_profit, '平均盈利': avg_profit,
+                   '最大盈利': max_profit, '最小盈利': min_profit, '总亏损': total_loss,
+                   '平均亏损': avg_loss, '最大亏损': max_loss, '最小亏损': min_loss, '未计算个股数': num_nan}
+    df_t = pd.Series(result_dict)
+    return df_t
+
+
+if __name__ == '__main__':
+    # logger = mp.log_to_stderr()
+    # logger.setLevel(logging.DEBUG)
+    cpu_list = list(range(0, 23))
+    # print(cpu_list)
+    pus = psutil.Process()
+    pus.cpu_affinity(cpu_list)
+    start_time = dt.now()
+
+    # 定义需要穷举的参数值
+    Volatility = range(0, 35, 2)  # rsi低轨
+    rates = range(65, 100, 2)  # rsi高轨
+    # 生成所有参数组合
+    all_combinations = list(product(Volatility, rates))
+    print(f'共需计算{len(all_combinations)}次')
+
+    # 获取数据
+    futures_list = get_stock_data()
+    print(futures_list.keys())
+
+    results = []
+
+    df = pd.DataFrame(
+        columns=['rsi低轨', 'rsi高轨', '盈利个数', '盈利比例', '总盈利', '平均盈利', '最大盈利', '最小盈利',
+                 '总亏损', '平均亏损', '最大亏损', '最小亏损', '未计算个股数'])
+
+    err_list = []
+
+    # 设置每一轮的任务数
+    CHUNK_SIZE = 200  # 您可以根据需要进行调整
+    timeout = 120
+    max_retries = 3
+    with concurrent.futures.ProcessPoolExecutor(max_workers=8) as inner_executor:
+        for Volatility, rates in tqdm(all_combinations, desc='计算进度'):
+            print(f'开始计算{Volatility,rates}')
+            while True:
+                try:
+                    # 使用executor.map方法实现多进程并行计算不同参数组合的结果
+                    res = [result for result in tqdm(
+                        inner_executor.map(backtrader_test, futures_list.values(), futures_list.keys(),
+                                           [Volatility] * len(futures_list), [rates] * len(futures_list)),
+                        desc='单轮计算进度')]
+
+                except BaseException as e:
+                    print(f'计算错误{e}')
+                    inner_executor = concurrent.futures.ProcessPoolExecutor(max_workers=24)
+                else:
+                    results.append(res)
+                    df_t = tdf(res, Volatility, rates)
+                    df = pd.concat([df, df_t.to_frame().T], ignore_index=True)
+                    break
+            # time.sleep(1)
+            print(f'{Volatility,rates}计算完成,共计算{len(res)}个股票')
+            print(df)
+        print('循环结束')
+        to_df(df)
+        print(f'计算完成,共耗时{dt.now() - start_time}秒')

+ 41 - 0
cudf/201.py

@@ -0,0 +1,41 @@
+import cudf
+import multiprocessing as mp
+from sqlalchemy import create_engine
+from datetime import datetime as dt
+import pandas as pd
+import logging
+
+def t(df, u):
+    st = dt.now()
+    t = cudf.DataFrame()
+    try:
+        for i in range(len(df)):
+            t = cudf.concat([t, df.iloc[i].to_frame().T], axis=0)
+    except BaseException as e:
+        print(e)
+    t.loc[i + 1, 'HL'] = 1
+    print(f'{u}完成,{dt.now() - st}')
+
+if __name__ == '__main__':
+    # mp.freeze_support()
+    logger = mp.log_to_stderr()
+    logger.setLevel(logging.DEBUG)
+    engine = create_engine(
+        'mysql+pymysql://root:r6kEwqWU9!v3@172.18.0.4:3306/qmt_stocks_whole?charset=utf8')
+
+    # engine = create_engine(
+    #     'mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qmt_stocks_whole?charset=utf8')
+
+    df = pd.read_sql_table('000001.SZ_1d', con=engine.connect())
+    df = cudf.from_pandas(df)
+    print(df.shape)
+    # df_cudf = cudf.from_pandas(df)
+    pool = mp.Pool(2)
+    try:
+        for j in range(1):
+            pool.apply_async(func=t, args=(df, j))
+        pool.close()
+        pool.join()
+    except BaseException as e:
+        print(e)
+

+ 14 - 0
cudf/tee.py

@@ -0,0 +1,14 @@
+import cudf
+import numpy as np
+import pandas as pd
+
+
+pandas_df = pd.DataFrame({'a': np.random.randint(0, 1000000000, size=1000000000),
+                          'b': np.random.randint(0, 1000000000, size=1000000000)})
+
+# exit()
+print('1')
+cudf_df = cudf.DataFrame({'a': np.random.randint(0, 1000000000, size=1000000000),
+                          'b': np.random.randint(0, 1000000000, size=1000000000)})
+
+print('finished')

+ 12 - 0
cudf/test.py

@@ -0,0 +1,12 @@
+import cudf
+import numpy as np
+import pandas as pd
+
+
+pandas_df = pd.DataFrame({'a': np.random.randint(0, 100000000, size=100000000),
+                          'b': np.random.randint(0, 100000000, size=100000000)})
+
+# exit()
+
+cudf_df = cudf.DataFrame({'a': np.random.randint(0, 100000000, size=100000000),
+                          'b': np.random.randint(0, 100000000, size=100000000)})

+ 20 - 2
docker.start.txt

@@ -24,7 +24,7 @@ docker run --restart=always --log-opt max-size=40960m --log-opt max-file=2 -p 63
 
 
 20230515
-docker run  -itd  -p 3307:3306 --name mysql8033 --privileged=true  --restart unless-stopped  -v C:/docker_mysql/stock_data:/var/lib/mysql -e MYSQL_ROOT_PASSWORD=r6kEwqWU9!v3  -d mysql:8.0.33
+docker run  -itd  -p 3307:3306 --name mysql8033 --privileged=true --ip 172.18.0.2 --restart unless-stopped  -v C:/docker_mysql/stock_data:/var/lib/mysql -e MYSQL_ROOT_PASSWORD=r6kEwqWU9!v3  -d mysql:8.0.33
 docker run  -itd  -p 3309:3306 --name mysqltest -e character-set-server=utf8mb4 --privileged=true  --restart unless-stopped   -e MYSQL_ROOT_PASSWORD=r6kEwqWU9!v3  -d mysql:8.0.33 --skip-log-bin --disable-log-bin --log-error=/var/lib/mysql/err.log
 
 20230626
@@ -34,11 +34,28 @@ docker run -d --name cudf --gpus all -it --shm-size=1g --ulimit memlock=-1 --uli
 
 
 20230706
-docker run --name tidb -d -v C:/docker_tidb:/tmp/tidb --privileged=true --restart unless-stopped -p 3308:4000 -p 10080:10080 --network stk-net pingcap/tidb:latest
+docker run --name tidb -d -v C:/docker_tidb:/tmp/tidb --privileged=true --ip 172.18.0.6 --restart unless-stopped -p 3308:4000 -p 10080:10080 --network stk-net pingcap/tidb:latest
 docker run --name pgsql -d -v C:/docker_pgsql:/var/lib/postgresql --privileged=true --restart unless-stopped -p 3309:5432 -e POSTGRES_PASSWORD='r6kEwqWU9!v3' --network stk-net postgres:13
 docker run --name tidb -d -v C:/docker_tidb:/tmp/tidb --privileged=true --restart unless-stopped -p 3308:4000 -p 10080:10080 --network stk-net pingcap/tidb:latest
 
 
+docker run  -itd  -p 3409:3306 --name mysql83 --privileged=true  --ip 172.18.0.8 --restart unless-stopped  -v C:/docker_mysql/stock_data_2:/var/lib/mysql -e MYSQL_ROOT_PASSWORD=r6kEwqWU9!v3  -d mysql:8.0.33
+
+20230715
+docker run -itd --name tidb -d -v C:/docker_tidb:/tmp/tidb --privileged=true --ip 172.18.0.6 --restart unless-stopped -p 3308:4000 -p 10080:10080 --network stk-net pingcap/tidb:latest
+docker run -itd  -p 3307:3306 --name mysql8033 --privileged=true --network stk-net --ip 172.18.0.2 --restart unless-stopped  -v C:/docker_mysql/stock_data:/var/lib/mysql -e MYSQL_ROOT_PASSWORD=r6kEwqWU9!v3  -d mysql:8.0.33
+docker run --name pgsql -d -v C:/docker_pgsql:/var/lib/postgresql --privileged=true --ip 172.18.0.4 --restart unless-stopped -p 3309:5432 -e POSTGRES_PASSWORD='r6kEwqWU9!v3' --network stk-net postgres:13
+
+docker run -itd --name tidb -d --privileged=true --ip 172.18.0.6 --restart unless-stopped -p 3308:4000 -p 10080:10080 -p 8822:22 --network stk-net ubuntu:23.10
+docker run -itd --name tidb -d --privileged=true --ip 172.18.0.6 --restart unless-stopped -p 3308:4000 -p 10080:10080 -p 8822:22 -p 9090:9090 -p 13000:3000 -p 12379:2379 --network stk-net ee
+
+
+
+# 注意p8722:22
+docker pull nvcr.io/nvidia/rapidsai/rapidsai-core:23.06-cuda11.8-runtime-ubuntu22.04-py3.10
+docker run -d --name cudf --gpus all -it --shm-size=1g --ulimit memlock=-1 --ulimit stack=67108864 --restart unless-stopped --network stk-net -p 8888:8888 -p 8787:8787 -p 8786:8786 -p 8722:22 nvcr.io/nvidia/rapidsai/rapidsai-core:23.06-cuda11.8-runtime-ubuntu22.04-py3.10
+
+
 # For advice on how to change settings please see
 # http://dev.mysql.com/doc/refman/8.0/en/server-configuration-defaults.html
 
@@ -59,6 +76,7 @@ docker run --name tidb -d -v C:/docker_tidb:/tmp/tidb --privileged=true --restar
 # sort_buffer_size = 2M
 # read_rnd_buffer_size = 2M
 
+
 # Remove leading # to revert to previous value for default_authentication_plugin,
 # this will increase compatibility with older clients. For background, see:
 # https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_default_authentication_plugin