qbh.py 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114
  1. from jqdatasdk import *
  2. auth('18019403367', 'Qwer4321')
  3. import pandas as pd
  4. import pymysql
  5. from sqlalchemy import create_engine
  6. import threading
  7. from datetime import datetime as dt
  8. starttime = dt.now()
  9. # 连接数据库
  10. # db_stk_sql = pymysql.connect(host='localhost',
  11. # user='root',
  12. # port=3307,
  13. # password='r6kEwqWU9!v3',
  14. # database='stocks',
  15. # connect_timeout=600)
  16. #
  17. #
  18. # db_qbh = pymysql.connect(host='localhost',
  19. # user='root',
  20. # port=3307,
  21. # password='r6kEwqWU9!v3',
  22. # database='qbh',
  23. # charset='utf8')
  24. #
  25. #
  26. # cursor = db_qbh.cursor()
  27. # engine = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qbh_hlfx?charset=utf8')
  28. engine2 = create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/stocks?charset=utf8')
  29. stocks = list(get_all_securities(['stock'], date='2021-12-31').index)
  30. # stocks =stocks[0:40]
  31. thd = threading.local()
  32. # docker run --name mysql -p 3307:3306 -e MYSQL_ROOT_PASSWORD='r6kEwqWU9!v3' -v /Users/daniel/mysqldata:/var/lib/mysql -d mysql:8.0-oracle
  33. def qbh(stocks, engine, engine_backup):
  34. for stock in stocks:
  35. thd.new_df = pd.DataFrame(columns=('date', 'open', 'close', 'high', 'low', 'volume', 'money', 'HL'))
  36. # print(new_df.head())
  37. thd.df_day = stk['stk' + stock[:6]]
  38. for i in thd.df_day.index:
  39. if i == 0 or i == 1:
  40. thd.new_df = pd.concat([thd.new_df, thd.df_day.iloc[[i]]], ignore_index=True)
  41. # 不包含
  42. elif (thd.new_df.iloc[-1, 3] > thd.df_day.loc[i, 'high']
  43. and thd.new_df.iloc[-1, 4] > thd.df_day.loc[i, 'low']) \
  44. or (thd.new_df.iloc[-1, 3] < thd.df_day.loc[i, 'high']
  45. and thd.new_df.iloc[-1, 4] < thd.df_day.loc[i, 'low']):
  46. thd.new_df = pd.concat([thd.new_df, thd.df_day.iloc[[i]]], ignore_index=True)
  47. # 包含
  48. else:
  49. # (new_df.iloc[-1,3]>=df_day.loc[i,'high'] and new_df.iloc[-1,4]<= df_day.loc[i,'low']):
  50. # 左高,下降
  51. if thd.new_df.iloc[-2, 3] > thd.new_df.iloc[-1, 3]:
  52. thd.new_df.iloc[-1, 3] = min(thd.new_df.iloc[-1, 3], thd.df_day.loc[i, 'high'])
  53. thd.new_df.iloc[-1, 4] = min(thd.new_df.iloc[-1, 4], thd.df_day.loc[i, 'low'])
  54. else:
  55. # 右高,上升
  56. thd.new_df.iloc[-1, 3] = max(thd.new_df.iloc[-1, 3], thd.df_day.loc[i, 'high'])
  57. thd.new_df.iloc[-1, 4] = max(thd.new_df.iloc[-1, 4], thd.df_day.loc[i, 'low'])
  58. thd.new_df.to_sql('stk%s_%s' % (stock[:6], u), con=engine, index=True, if_exists='replace')
  59. with engine.connect() as con:
  60. con.execute('ALTER TABLE stk%s_%s ADD PRIMARY KEY (`date`);' % (stock[:6], u))
  61. # thd.new_df.to_sql('stk%s_%s' % (stock[:6], u), con=engine_backup, index=True, if_exists='replace')
  62. # with engine_backup.connect() as con_backup:
  63. # con_backup.execute('ALTER TABLE stk%s_%s ADD PRIMARY KEY (`date`);' % (stock[:6], u))
  64. # thd.new_df.to_csv(
  65. # '/Users/daniel/Library/CloudStorage/OneDrive-个人/个人/python_stocks/20220211qbh/qbh%s.csv' % stock[:6])
  66. print(stock)
  67. print("**************")
  68. #
  69. # # new_df.to_csv('new_df.csv')
  70. #
  71. # #return new_df
  72. stk = locals()
  73. engine = []
  74. engine_backup = []
  75. u = '30m'
  76. # 获取数据存入DataFrame
  77. for stock in stocks:
  78. stk['stk'+stock[:6]] = pd.read_sql_query('select date,open,close,high,low,volume,money from stk%s_%s' % (stock[:6], u), engine2)
  79. # print(stock, stk['stk'+stock[:6]])
  80. print("###############################################################################################################"
  81. "###############################################################################################################"
  82. "###############################################################################################################"
  83. "###############################################################################################################"
  84. "###############################################################################################################"
  85. "###############################################################################################################"
  86. "###############################################################################################################")
  87. # 开始去包含
  88. # qbh(stocks)
  89. thread_list = []
  90. step = 1000
  91. times_engine = 0
  92. for m in range(0, len(stocks), step):
  93. engine.append(create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qbh_hlfx?charset=utf8', pool_recycle= 3600))
  94. engine_backup.append(create_engine('mysql+pymysql://root:r6kEwqWU9!v3@localhost:3307/qbh_hlfx_backup?charset=utf8', pool_recycle= 3600))
  95. thread = threading.Thread(target=qbh, args=(stocks[m:m + step], engine[times_engine], engine_backup[times_engine]))
  96. times_engine =times_engine + 1
  97. thread.start()
  98. thread_list.append(thread)
  99. for thread in thread_list:
  100. thread.join()
  101. endtime = dt.now()
  102. print((endtime-starttime).seconds)