| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114 | from jqdatasdk import *auth('18019403367', 'Qwer4321')import pandas as pdimport pymysqlfrom sqlalchemy import create_engineimport threadingfrom datetime import datetime as dtstarttime = dt.now()# 连接数据库# db_stk_sql = pymysql.connect(host='localhost',#                              user='root',#                              port=3307,#                              password='r6kEwqWU9!v3',#                              database='stocks',#                              connect_timeout=600)### db_qbh = pymysql.connect(host='localhost',#                          user='root',#                          port=3307,#                          password='r6kEwqWU9!v3',#                          database='qbh',#                          charset='utf8')### cursor = db_qbh.cursor()# engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qbh_hlfx?charset=utf8')engine2 = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/stocks?charset=utf8')stocks = list(get_all_securities(['stock'], date='2021-12-31').index)# stocks =stocks[0:40]thd = threading.local()# docker run --name mysql -p 3307:3306 -e MYSQL_ROOT_PASSWORD='r6kEwqWU9!v3' -v /Users/daniel/mysqldata:/var/lib/mysql -d mysql:8.0-oracledef qbh(stocks, engine, engine_backup):    for stock in stocks:        thd.new_df = pd.DataFrame(columns=('date', 'open', 'close', 'high', 'low', 'volume', 'money', 'HL'))        # print(new_df.head())        thd.df_day = stk['stk' + stock[:6]]        for i in thd.df_day.index:            if i == 0 or i == 1:                thd.new_df = pd.concat([thd.new_df, thd.df_day.iloc[[i]]], ignore_index=True)            # 不包含            elif (thd.new_df.iloc[-1, 3] > thd.df_day.loc[i, 'high']                  and thd.new_df.iloc[-1, 4] > thd.df_day.loc[i, 'low']) \                    or (thd.new_df.iloc[-1, 3] < thd.df_day.loc[i, 'high']                        and thd.new_df.iloc[-1, 4] < thd.df_day.loc[i, 'low']):                thd.new_df = pd.concat([thd.new_df, thd.df_day.iloc[[i]]], ignore_index=True)            # 包含            else:                # (new_df.iloc[-1,3]>=df_day.loc[i,'high'] and new_df.iloc[-1,4]<= df_day.loc[i,'low']):                # 左高,下降                if thd.new_df.iloc[-2, 3] > thd.new_df.iloc[-1, 3]:                    thd.new_df.iloc[-1, 3] = min(thd.new_df.iloc[-1, 3], thd.df_day.loc[i, 'high'])                    thd.new_df.iloc[-1, 4] = min(thd.new_df.iloc[-1, 4], thd.df_day.loc[i, 'low'])                else:                    # 右高,上升                    thd.new_df.iloc[-1, 3] = max(thd.new_df.iloc[-1, 3], thd.df_day.loc[i, 'high'])                    thd.new_df.iloc[-1, 4] = max(thd.new_df.iloc[-1, 4], thd.df_day.loc[i, 'low'])        thd.new_df.to_sql('stk%s_%s' % (stock[:6], u), con=engine, index=True, if_exists='replace')        with engine.connect() as con:            con.execute('ALTER TABLE stk%s_%s ADD PRIMARY KEY (`date`);' % (stock[:6], u))        # thd.new_df.to_sql('stk%s_%s' % (stock[:6], u), con=engine_backup, index=True, if_exists='replace')        # with engine_backup.connect() as con_backup:        #     con_backup.execute('ALTER TABLE stk%s_%s ADD PRIMARY KEY (`date`);' % (stock[:6], u))        # thd.new_df.to_csv(        #     '/Users/daniel/Library/CloudStorage/OneDrive-个人/个人/python_stocks/20220211qbh/qbh%s.csv' % stock[:6])        print(stock)        print("**************")        #        # # new_df.to_csv('new_df.csv')        #        # #return new_dfstk = locals()engine = []engine_backup = []u = '30m'# 获取数据存入DataFramefor stock in stocks:    stk['stk'+stock[:6]] = pd.read_sql_query('select date,open,close,high,low,volume,money from stk%s_%s' % (stock[:6], u), engine2)    # print(stock, stk['stk'+stock[:6]])print("###############################################################################################################"      "###############################################################################################################"      "###############################################################################################################"      "###############################################################################################################"      "###############################################################################################################"      "###############################################################################################################"      "###############################################################################################################")# 开始去包含# qbh(stocks)thread_list = []step = 1000times_engine = 0for m in range(0, len(stocks), step):    engine.append(create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qbh_hlfx?charset=utf8', pool_recycle= 3600))    engine_backup.append(create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qbh_hlfx_backup?charset=utf8', pool_recycle= 3600))    thread = threading.Thread(target=qbh, args=(stocks[m:m + step], engine[times_engine], engine_backup[times_engine]))    times_engine =times_engine + 1    thread.start()    thread_list.append(thread)for thread in thread_list:    thread.join()endtime = dt.now()print((endtime-starttime).seconds)
 |