updated with json

This commit is contained in:
klein panic
2024-11-11 19:51:46 -05:00
parent 6e43197957
commit c72d61fc42
7 changed files with 20220 additions and 0 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,30 @@
attrs==24.2.0
beautifulsoup4==4.12.3
certifi==2024.8.30
charset-normalizer==3.4.0
flake8==7.1.1
h11==0.14.0
idna==3.10
mccabe==0.7.0
numpy==2.1.2
outcome==1.3.0.post0
pandas==2.2.3
pycodestyle==2.12.1
pyflakes==3.2.0
PySocks==1.7.1
python-dateutil==2.9.0.post0
pytz==2024.2
requests==2.32.3
selenium==4.25.0
six==1.16.0
sniffio==1.3.1
sortedcontainers==2.4.0
soupsieve==2.6
tqdm==4.66.6
trio==0.27.0
trio-websocket==0.11.1
typing_extensions==4.12.2
tzdata==2024.2
urllib3==2.2.3
websocket-client==1.8.0
wsproto==1.2.0

View File

@@ -0,0 +1,60 @@
import json
from datetime import datetime
from ibapi.client import EClient
from ibapi.wrapper import EWrapper
from ibapi.contract import Contract
class IBKRDataRetriever(EWrapper, EClient):
def __init__(self):
EClient.__init__(self, self)
def connect_and_retrieve_data(self):
self.connect("127.0.0.1", 7497, clientId=0) # Ensure IB Gateway or TWS is running
contract = Contract()
contract.symbol = "AAPL" # Example stock; replace as needed
contract.secType = "STK"
contract.exchange = "SMART"
contract.currency = "USD"
self.reqHistoricalData(
reqId=1,
contract=contract,
endDateTime=datetime.now().strftime("%Y%m%d %H:%M:%S"),
durationStr="1 D",
barSizeSetting="1 day",
whatToShow="MIDPOINT",
useRTH=1,
formatDate=1,
keepUpToDate=False,
chartOptions=[]
)
def historicalData(self, reqId, bar):
data = {
"Date": bar.date,
"Close/Last": bar.close,
"Volume": bar.volume,
"Open": bar.open,
"High": bar.high,
"Low": bar.low
}
self.save_data_to_json(data)
def save_data_to_json(self, data):
json_path = "../data/HistoricalData.json"
try:
with open(json_path, "r") as file:
historical_data = json.load(file)
except FileNotFoundError:
historical_data = []
historical_data.insert(0, data)
with open(json_path, "w") as file:
json.dump(historical_data, file, indent=4)
print(f"Data saved to {json_path}")
if __name__ == "__main__":
app = IBKRDataRetriever()
app.connect_and_retrieve_data()

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

78
src/IBKR/predict_price.py Normal file
View File

@@ -0,0 +1,78 @@
import pandas as pd
import numpy as np
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import mean_squared_error, mean_absolute_error
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.callbacks import EarlyStopping
# Load the training and testing data
training_data = pd.read_csv("3_years_training_data.csv")
testing_data = pd.read_csv("3_month_testing_data.csv")
# Drop unnecessary columns
training_data = training_data.drop(columns=["Unnamed: 0", "Date"])
testing_data = testing_data.drop(columns=["Unnamed: 0", "Date"])
# Create lagged features for the model
def create_lagged_features(data, n_lags=3):
df = data.copy()
for lag in range(1, n_lags + 1):
df[f'Close_lag_{lag}'] = df['Close'].shift(lag)
df.dropna(inplace=True) # Remove rows with NaN values due to shifting
return df
# Apply lagged features to the training and testing datasets
training_data = create_lagged_features(training_data)
testing_data = create_lagged_features(testing_data)
# Separate features and target
X_train = training_data.drop(columns=["Close"]).values
y_train = training_data["Close"].values
X_test = testing_data.drop(columns=["Close"]).values
y_test = testing_data["Close"].values
# Standardize the features
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)
# Build the neural network model
model = Sequential([
Dense(64, activation='sigmoid', input_shape=(X_train.shape[1],)),
Dense(32, activation='sigmoid'),
Dense(16, activation='sigmoid'),
Dense(1) # Output layer for regression
])
# Compile the model
model.compile(optimizer='adam', loss='mse', metrics=['mae'])
# Use early stopping to prevent overfitting
early_stopping = EarlyStopping(monitor='val_loss', patience=10, restore_best_weights=True)
# Train the model
history = model.fit(
X_train, y_train,
epochs=100,
batch_size=32,
validation_split=0.2,
callbacks=[early_stopping],
verbose=1
)
# Evaluate the model on the test set
y_pred = model.predict(X_test).flatten()
mse = mean_squared_error(y_test, y_pred)
mae = mean_absolute_error(y_test, y_pred)
print(f"Neural Network MSE: {mse:.2f}")
print(f"Neural Network MAE: {mae:.2f}")
# Prepare the latest data to predict tomorrow's price
latest_data = testing_data.tail(1).drop(columns=["Close"])
latest_data_scaled = scaler.transform(latest_data)
# Predict tomorrow's close price
tomorrow_pred = model.predict(latest_data_scaled)
print(f"Predicted Close Price for Tomorrow: {tomorrow_pred[0][0]:.2f}")

47
src/IBKR/requirements.txt Normal file
View File

@@ -0,0 +1,47 @@
absl-py==2.1.0
astunparse==1.6.3
certifi==2024.8.30
charset-normalizer==3.4.0
flatbuffers==24.3.25
gast==0.6.0
google-pasta==0.2.0
grpcio==1.67.1
h5py==3.12.1
ibapi==9.81.1.post1
idna==3.10
importlib_metadata==8.5.0
joblib==1.4.2
keras==3.6.0
libclang==18.1.1
Markdown==3.7
markdown-it-py==3.0.0
MarkupSafe==3.0.2
mdurl==0.1.2
ml-dtypes==0.4.1
namex==0.0.8
numpy==2.0.2
opt_einsum==3.4.0
optree==0.13.0
packaging==24.1
pandas==2.2.3
protobuf==5.28.3
Pygments==2.18.0
python-dateutil==2.9.0.post0
pytz==2024.2
requests==2.32.3
rich==13.9.4
scikit-learn==1.5.2
scipy==1.13.1
six==1.16.0
tensorboard==2.18.0
tensorboard-data-server==0.7.2
tensorflow==2.18.0
tensorflow-io-gcs-filesystem==0.37.1
termcolor==2.5.0
threadpoolctl==3.5.0
typing_extensions==4.12.2
tzdata==2024.2
urllib3==2.2.3
Werkzeug==3.1.1
wrapt==1.16.0
zipp==3.20.2