# coding=utf-8 import os import time import math import numpy as np import
pandasas pd import tushare as ts import tensorflow as tf import matplotlib.
pyplotas plt from tensorflow.keras.layers import Dropout,Dense,SimpleRNN from
sklearn.preprocessing import MinMaxScaler from sklearn.metrics import
mean_squared_error,mean_absolute_error INTnpseed=19740425
################################################################################
### Crawl the specified stock from the specified time to the current latest K Line data , Enter stock code ,K Line type , start time , End time , Save to file , No output ### Save to csv Data instance of file :
### , date, open,close, high, low, volume, code ### 0,2010-01-04,
7.43,7.538,7.707,7.345, 66162.85,600888 ### 1,2010-01-05,7.604,8.291,8.291,
7.58,227146.79,600888 ### 2,2010-01-06,8.437,9.119,9.119,8.375,318151.07,600888
################################################################################
def FUNCgetstockdata(STRcode,STRtype,STRbegindate,STRenddate): FILEhandle=ts.
get_k_data(STRcode,ktype=STRtype,start=STRbegindate,end=STRenddate) FILEname=
"./"+STRcode+".csv" FILEhandle.to_csv(FILEname)
################################################################################
### Gets the string of the current day's date , No input , return 10 Bit length string
################################################################################
def FUNCgetyyyymmdd(): STRtoday=time.strftime("%Y%m%d",time.gmtime()) #
Get current date , as :20200512 STRyyyymmdd=STRtoday[0:4]+"-"+STRtoday[4:6]+"-"+STRtoday[6:]
# Construction date string , as :2020-05-12 return STRyyyymmdd
################################################################################
### Reads the of the specified stock from the file K Line data , Enter stock code , Output list data ### Cyclic neural network , Input training dataset , Test data set
################################################################################
def FUNCrnnnet(STRcode): FUNCgetstockdata(STRcode,"D","2000-01-01",
FUNCgetyyyymmdd()) # Crawl data FILEname="./"+STRcode+".csv" STOCKLIST=pd.read_csv(
FILEname) # Read data train_set=STOCKLIST.iloc[0:int(len(STOCKLIST)*0.7),2:3] #
Before taking 70% Recorded page 2 column , Opening price test_set =STOCKLIST.iloc[int(len(STOCKLIST)*0.7): ,2:3] #
After taking 30% Recorded page 2 column , Opening price sc=MinMaxScaler(feature_range=(0,1)) # Normalize ### fit():
Method calculates the parameters μ and σ and saves them as internal objects.
### explain : In short , Is to get the training set X Mean value of , variance , Maximum , minimum value , These training sets X Inherent properties . ### transform(): Method using
these calculated parameters apply the transformation to a particular dataset.
### explain : stay fit Based on , Standardization , Dimensionality reduction , Normalization and other operations ( See which tool is used , as PCA,StandardScaler etc. ). ###
fit_transform(): joins the fit() and transform() method for transformation of
dataset. ### explain :fit_transform yes fit and transform Combination of , It includes both training and transformation . ###
transform() and fit_transform() The functions of both are unified processing of data ( For example, standardization ~N(0,1), Scale data ( mapping ) To a fixed interval , normalization , Regularization, etc )
###
fit_transform(trainData) Fit some data first fit, Find the part Overall indicators of , Such as mean value , variance , Maximum, minimum, etc ( According to the specific purpose of conversion ), Then the trainData Convert transform, So as to realize the standardization of data , Normalization, etc .
### transform() and fit_transform() The results are the same , Running as like as two peas do not mean that these two functions can be replaced. , Absolutely not ###
transform The function must be replaced by fit_transform Functional ,fit_transform Function cannot be replaced with transform function
train_set_scaled=sc.fit_transform(train_set) ### >>> train_set >>>
train_set_scaled ### open ### 0 1.547 array([[0.02520901], ### 1 1.678
[0.03356947], ### 2 1.607 [0.02903823], ### .. .. ... ### 3356 4.105
[0.18846129], ### 3357 4.203 [0.19471568], ### 3358 4.128 [0.18992916]])
test_set=sc.transform(test_set) ### >>> test_set >>> test_set ### open ### 3359
4.270 array([[0.19899164], ### 3360 4.233 [0.19663029], ### 3361 4.113
[0.18897186], ### .. .. ... ### 4796 4.510 [0.21430851], ### 4797 4.510
[0.21430851], ### 4798 4.670 [0.22451975]]) x_train=[] y_train=[] x_test =[]
y_test=[] for i in range(60,len(train_set_scaled)): x_train.append(
train_set_scaled[i-60:i,0]) y_train.append(train_set_scaled[i,0]) np.random.seed
(INTnpseed) np.random.shuffle(x_train) np.random.seed(INTnpseed) np.random.
shuffle(y_train) np.random.seed(INTnpseed) x_train,y_train=np.array(x_train),np.
array(y_train) x_train=np.reshape(x_train,(x_train.shape[0],60,1)) for i in
range(60,len(test_set)): x_test.append(test_set[i-60:i,0]) y_test.append(
test_set[i,0]) x_test,y_test=np.array(x_test),np.array(y_test) x_test=np.reshape
(x_test,(x_test.shape[0],60,1)) model=tf.keras.Sequential([ SimpleRNN(80,
return_sequences=True), Dropout(0.2), SimpleRNN(100), Dropout(0.2), Dense(1)])
model.compile( optimizer=tf.keras.optimizers.Adam(0.01), loss=
"mean_squared_error", metrics=["sparse_categorical_accuracy"])
checkpoint_save_path="./checkpoint.stock/stock.ckpt" if os.path.exists(
checkpoint_save_path+".index"): print(
"-------------------- Loading model --------------------") model.load_weights(
checkpoint_save_path) cp_callback=tf.keras.callbacks.ModelCheckpoint( filepath=
checkpoint_save_path, save_weights_only=True, save_best_only=True, monitor=
"val_loss") history=model.fit( x_train, y_train, batch_size=128, epochs=100,
validation_data=(x_test,y_test), validation_freq=1, callbacks=[cp_callback])
model.summary() print(model.trainable_variables) file=open('./weights.stock.txt'
, 'w') for v in model.trainable_variables: file.write(str(v.name) + '\n') file.
write(str(v.shape) + '\n') file.write(str(v.numpy()) + '\n') file.close() ###
Displays the of training sets and validation sets acc and loss curve acc =history.history['sparse_categorical_accuracy'] loss =
history.history['loss'] val_loss=history.history['val_loss'] plt.rcParams[
'font.sans-serif'] = ['simhei'] plt.plot(acc, label=u' Training accuracy ') plt.plot(loss,
label=u' Training error rate ') plt.plot(val_loss,label=u' Verification error rate ') plt.title(u' Training and verification error rate ') plt.
legend() plt.show() predicted_stock_price=model.predict(x_test)
predicted_stock_price=sc.inverse_transform(predicted_stock_price) # Inverse normalization
real_stock_price=sc.inverse_transform(test_set[60:]) plt.plot(real_stock_price,
color="red",label=u" Actual share price ") plt.plot(predicted_stock_price,color="blue",label=u
" Forecast stock price ") plt.title(u" Stock price forecast ") plt.xlabel(u" time ") plt.ylabel(u" price of stock ") plt.legend() plt.
show() mse =mean_squared_error(predicted_stock_price,real_stock_price) rmse=math
.sqrt(mean_squared_error(predicted_stock_price,real_stock_price)) mae =
mean_absolute_error(predicted_stock_price,real_stock_price) print(
" Mean square error :{:>0.6f}, Root mean square error :{:>0.6f}, Mean absolute error :{:>0.6f}".format(mse,rmse,mae))
################################################################################
### Using the trained cyclic neural network , Before input 60 Day stock price , Forecast latest valuation
################################################################################
def FUNCusernnnet(STRcode): FUNCgetstockdata(STRcode,"D","2000-01-01",
FUNCgetyyyymmdd()) # Crawl data FILEname="./"+STRcode+".csv" STOCKLIST=pd.read_csv(
FILEname) # Read data train_set=STOCKLIST.iloc[len(STOCKLIST)-60:,2:3] # Before taking 60 Day opening price sc=
MinMaxScaler(feature_range=(0,1)) # Normalize train_set_scaled=sc.fit_transform(
train_set) x_train=[] for i in range(60,len(train_set_scaled)+1): x_train.append
(train_set_scaled[i-60:i,0]) x_train=np.array(x_train) x_train=np.reshape(
x_train,(x_train.shape[0],60,1)) model=tf.keras.Sequential([ SimpleRNN(80,
return_sequences=True), Dropout(0.2), SimpleRNN(100), Dropout(0.2), Dense(1)])
model.compile( optimizer=tf.keras.optimizers.Adam(0.01), loss=
"mean_squared_error", metrics=["sparse_categorical_accuracy"])
checkpoint_save_path="./checkpoint.stock/stock.ckpt" if os.path.exists(
checkpoint_save_path+".index"): print(
"-------------------- Loading model --------------------") model.load_weights(
checkpoint_save_path) cp_callback=tf.keras.callbacks.ModelCheckpoint( filepath=
checkpoint_save_path, save_weights_only=True, save_best_only=True, monitor=
"val_loss") predicted_stock_price=model.predict(x_train) predicted_stock_price=
sc.inverse_transform(predicted_stock_price) # Inverse normalization print(" forecast "+STRcode+" Estimated as :"+str
(predicted_stock_price)) time.sleep(5) while True: os.system("clear") print(
" Stock forecasting program >>>>>>>>") print(" L600888............ train 600888 Xinjiang Zhonghe data ") print("
P600888............ forecast 600888 Xinjiang Zhonghe appraisal ") print(" L600289............ train 600289 Yiyang Xintong data ")
print(" P600289............ forecast 600289 Yiyang Xintong appraisal ") print("
L002208............ train 002208 Hefei urban construction data ") print(" P002208............ forecast 002208 Hefei Urban Construction Appraisal ")
print(" Quit............... Exit the system ") STRinput=input(" >>>>>>>> Please enter a selection :") STRinput=
STRinput.upper() # Convert input to uppercase if STRinput=="L600888": FUNCrnnnet("600888") elif
STRinput=="P600888": FUNCusernnnet("600888") elif STRinput=="L600289":
FUNCrnnnet("600289") elif STRinput=="P600289": FUNCusernnnet("600289") elif
STRinput=="L002208": FUNCrnnnet("002208") elif STRinput=="P002208":
FUNCusernnnet("002208") elif STRinput=="QUIT": break else: continue
Technology