-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathforecasting.py
102 lines (75 loc) · 2.75 KB
/
forecasting.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import numpy as np
from tensorflow import keras
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Dense, LSTM, Dropout
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
import matplotlib.pyplot as plt
from alpha_vantage.timeseries import TimeSeries
import CONFIG
RANDOM_SEED = 42
np.random.seed(RANDOM_SEED)
TS_KEY = CONFIG.VARIABLES['TS_KEY']
def forecasting_stock(quo):
quo = quo.upper()
ts = TimeSeries(key=TS_KEY, output_format='pandas')
data = ""
try:
data = ts.get_daily_adjusted(symbol=quo, outputsize='full')
except:
msg = "No data found"
return msg
data_cutoff = min(1200, len(data[0]))
if data_cutoff <= 130:
msg = "Not enough historical data to forecast"
return msg
df = data[0][:data_cutoff]
df = df.iloc[::-1]
train_size = int(len(df) * 0.8)
test_size = len(df) - train_size
train, test = df.iloc[0:train_size], df.iloc[train_size:len(df)]
# print(len(train), len(test))
train = train.drop(['4. close'], axis=1)
scaler = MinMaxScaler()
train = scaler.fit_transform(train)
X_train = []
y_train = []
for i in range(60, train.shape[0]):
X_train.append(train[i-60:i])
y_train.append(train[i, 0])
X_train, y_train = np.array(X_train), np.array(y_train)
# print(X_train.shape)
regressor = Sequential()
regressor.add(LSTM(units = 60, activation = 'relu', return_sequences = True, input_shape = (X_train.shape[1], 7)))
regressor.add(Dropout(0.2))
regressor.add(LSTM(units = 60, activation = 'relu', return_sequences = True))
regressor.add(Dropout(0.2))
regressor.add(LSTM(units = 80, activation = 'relu', return_sequences = True))
regressor.add(Dropout(0.2))
regressor.add(LSTM(units = 120, activation = 'relu'))
regressor.add(Dropout(0.2))
regressor.add(Dense(units = 1))
# regressor.summary()
regressor.compile(optimizer='adam', loss = 'mean_squared_error')
regressor.fit(X_train, y_train, epochs=3, batch_size=32)
# testing data
train = df.iloc[0:train_size]
past_60_days = train.tail(60)
test_df = past_60_days.append(test, ignore_index=True)
test_df = test_df.drop(['4. close'], axis=1)
inputs = scaler.transform(test_df)
X_test = []
y_test = []
for i in range(60, inputs.shape[0]):
X_test.append(inputs[i-60:i])
y_test.append(inputs[i, 0])
X_test, y_test = np.array(X_test), np.array(y_test)
# print(X_test.shape, y_test.shape)
y_pred = regressor.predict(X_test)
scale = 1/scaler.scale_[0]
y_pred = y_pred*scale
y_test = y_test*scale
y_pred = y_pred[-5:].tolist()
for i in range(5):
y_pred[i] = str(round(y_pred[i][0], 2))
return "\n".join(y_pred)