Quellcode durchsuchen

修改数据库连接

Daniel vor 3 Jahren
Ursprung
Commit
3b9ee24c38
1 geänderte Dateien mit 48 neuen und 39 gelöschten Zeilen
  1. 48 39
      qbh.py

+ 48 - 39
qbh.py

@@ -29,49 +29,57 @@ import threading
 engine2 = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/stocks?charset=utf8')
 
 stocks = list(get_all_securities(['stock'], date='2021-12-31').index)
-# stocks =stocks[0:40]
+stocks =stocks[0:40]
 
 thd = threading.local()
 
 
-def qbh(stocks , engine):
+def qbh(stocks, engine, engine_backup):
     for stock in stocks:
-        print(stock)
-        thd.new_df = pd.DataFrame(columns=('date', 'open', 'close', 'high', 'low', 'volume', 'money', 'HL'))
-        # print(new_df.head())
-        thd.df_day = stk['stk' + stock[:6]]
-        for i in thd.df_day.index:
-            if i == 0 or i == 1:
-                thd.new_df = pd.concat([thd.new_df, thd.df_day.iloc[[i]]], ignore_index=True)
-            # 不包含
-            elif (thd.new_df.iloc[-1, 3] > thd.df_day.loc[i, 'high']
-                  and thd.new_df.iloc[-1, 4] > thd.df_day.loc[i, 'low']) \
-                    or (thd.new_df.iloc[-1, 3] < thd.df_day.loc[i, 'high']
-                        and thd.new_df.iloc[-1, 4] < thd.df_day.loc[i, 'low']):
-                thd.new_df = pd.concat([thd.new_df, thd.df_day.iloc[[i]]], ignore_index=True)
-            # 包含
-            else:
-                # (new_df.iloc[-1,3]>=df_day.loc[i,'high'] and new_df.iloc[-1,4]<= df_day.loc[i,'low']):
-                if thd.new_df.iloc[-2, 3] > thd.new_df.iloc[-1, 3]: #左高,下降
-                    thd.new_df.iloc[-1, 3] = min(thd.new_df.iloc[-1, 3], thd.df_day.loc[i, 'high'])
-                    thd.new_df.iloc[-1, 4] = min(thd.new_df.iloc[-1, 4], thd.df_day.loc[i, 'low'])
-                else:
-                    # 右高,上升
-                    thd.new_df.iloc[-1, 3] = max(thd.new_df.iloc[-1, 3], thd.df_day.loc[i, 'high'])
-                    thd.new_df.iloc[-1, 4] = max(thd.new_df.iloc[-1, 4], thd.df_day.loc[i, 'low'])
-        thd.new_df.to_sql('stk%s_%s' % (stock[:6], u), con=engine, index=True, if_exists='replace')
-        with engine.connect() as con:
-            con.execute('ALTER TABLE stk%s_%s ADD PRIMARY KEY (`date`);'% (stock[:6], u))
-        print(stock)
-        print("**************")
-
-        # new_df.to_csv('new_df.csv')
-
-        #return new_df
+        print(engine)
+        print(engine_backup)
+        # thd.new_df = pd.DataFrame(columns=('date', 'open', 'close', 'high', 'low', 'volume', 'money', 'HL'))
+        # # print(new_df.head())
+        # thd.df_day = stk['stk' + stock[:6]]
+        # for i in thd.df_day.index:
+        #     if i == 0 or i == 1:
+        #         thd.new_df = pd.concat([thd.new_df, thd.df_day.iloc[[i]]], ignore_index=True)
+        #     # 不包含
+        #     elif (thd.new_df.iloc[-1, 3] > thd.df_day.loc[i, 'high']
+        #           and thd.new_df.iloc[-1, 4] > thd.df_day.loc[i, 'low']) \
+        #             or (thd.new_df.iloc[-1, 3] < thd.df_day.loc[i, 'high']
+        #                 and thd.new_df.iloc[-1, 4] < thd.df_day.loc[i, 'low']):
+        #         thd.new_df = pd.concat([thd.new_df, thd.df_day.iloc[[i]]], ignore_index=True)
+        #     # 包含
+        #     else:
+        #         # (new_df.iloc[-1,3]>=df_day.loc[i,'high'] and new_df.iloc[-1,4]<= df_day.loc[i,'low']):
+        #         # 左高,下降
+        #         if thd.new_df.iloc[-2, 3] > thd.new_df.iloc[-1, 3]:
+        #             thd.new_df.iloc[-1, 3] = min(thd.new_df.iloc[-1, 3], thd.df_day.loc[i, 'high'])
+        #             thd.new_df.iloc[-1, 4] = min(thd.new_df.iloc[-1, 4], thd.df_day.loc[i, 'low'])
+        #         else:
+        #             # 右高,上升
+        #             thd.new_df.iloc[-1, 3] = max(thd.new_df.iloc[-1, 3], thd.df_day.loc[i, 'high'])
+        #             thd.new_df.iloc[-1, 4] = max(thd.new_df.iloc[-1, 4], thd.df_day.loc[i, 'low'])
+        # thd.new_df.to_sql('stk%s_%s' % (stock[:6], u), con=engine, index=True, if_exists='replace')
+        # with engine.connect() as con:
+        #     con.execute('ALTER TABLE stk%s_%s ADD PRIMARY KEY (`date`);' % (stock[:6], u))
+        # thd.new_df.to_sql('stk%s_%s' % (stock[:6], u), con=engine_backup, index=True, if_exists='replace')
+        # with engine_backup.connect() as con_backup:
+        #     con_backup.execute('ALTER TABLE stk%s_%s ADD PRIMARY KEY (`date`);' % (stock[:6], u))
+        # thd.new_df.to_csv(
+        #     '/Users/daniel/Library/CloudStorage/OneDrive-个人/个人/python_stocks/20220211qbh/qbh%s.csv' % stock[:6])
+        # print(stock)
+        # print("**************")
+        #
+        # # new_df.to_csv('new_df.csv')
+        #
+        # #return new_df
 
 
 stk = locals()
-engine = locals()
+engine = []
+engine_backup = []
 
 u = '1d'
 # 获取数据存入DataFrame
@@ -89,7 +97,8 @@ print("#########################################################################
 
 # 开始去包含
 # qbh(stocks)
-
-for m in range(0, len(stocks), 50):
-    engine[m] = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qbh_hlfx?charset=utf8', pool_recycle=3600)
-    threading.Thread(target=qbh, args=(stocks[m:m + 50], engine[m])).start()
+step = 100
+for m in range(0, len(stocks), step):
+    engine.append(create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qbh_hlfx?charset=utf8', pool_recycle= 3600))
+    engine_backup.append(create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qbh_hlfx_backup?charset=utf8', pool_recycle= 3600))
+    threading.Thread(target=qbh, args=(stocks[m:m + step], engine[m], engine_backup[m])).start()