-
-
Notifications
You must be signed in to change notification settings - Fork 347
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #849 from abckhush/main
Tesla Historic Stock Price Analysis
- Loading branch information
Showing
12 changed files
with
76 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
# **Tesla Ticker | Historical Stock Price Dataset** | ||
|
||
This dataset contains the historical stock prices of Tesla, Inc. (TSLA). | ||
|
||
### Insights | ||
- Analyze historical trends in Tesla's stock price | ||
- Forecast future stock price movements | ||
|
||
### Source | ||
Acquired from the Yahoo Finance library | ||
|
||
### Dataset Link | ||
https://www.kaggle.com/datasets/muhammadibrahimqasmi/tesla-stock-insights-and-predictions/data |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions
1
Tesla Historic Stock Price Analysis/Model/tesla-ticker-bigru.ipynb
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
{"cells":[{"cell_type":"markdown","metadata":{},"source":["# Tesla Ticker| Histroical Stock Price Dataset| GRU| BiGRU"]},{"cell_type":"code","execution_count":1,"metadata":{"_cell_guid":"b1076dfc-b9ad-4769-8c92-a6c4dae69d19","_uuid":"8f2839f25d086af736a60e9eeb907d3b93b6e0e5","execution":{"iopub.execute_input":"2024-06-20T17:33:21.834984Z","iopub.status.busy":"2024-06-20T17:33:21.834489Z","iopub.status.idle":"2024-06-20T17:33:42.911579Z","shell.execute_reply":"2024-06-20T17:33:42.910348Z","shell.execute_reply.started":"2024-06-20T17:33:21.834946Z"},"trusted":true},"outputs":[{"name":"stderr","output_type":"stream","text":["2024-06-20 17:33:29.010197: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n","2024-06-20 17:33:29.010414: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n","2024-06-20 17:33:29.195465: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n"]}],"source":["# Import Libararies\n","import pandas as pd\n","import seaborn as sns\n","import matplotlib.pyplot as plt\n","import numpy as np\n","# implort plotly for interactive plots\n","import plotly.express as px\n","import plotly.graph_objects as go\n","from plotly.offline import iplot\n","#import plot_acf\n","from statsmodels.graphics.tsaplots import plot_acf\n","# import seasonal decompose\n","from statsmodels.tsa.seasonal import seasonal_decompose\n","# scaler\n","from sklearn.preprocessing import MinMaxScaler\n","# train_test_split\n","from sklearn.model_selection import train_test_split\n","# sequential using keras\n","from tensorflow.keras.models import Sequential\n","# Dense, Bidirectional,GRU Layers using keras\n","from tensorflow.keras.layers import Dense, Bidirectional, GRU\n","# Adam optimizer\n","from tensorflow.keras.optimizers import Adam\n","# Early stoppng or callbacks\n","from tensorflow.keras.callbacks import EarlyStopping\n","# import metrics\n","from sklearn.metrics import r2_score, mean_squared_error\n","# import warnings\n","import warnings \n","warnings.filterwarnings('ignore')"]},{"cell_type":"markdown","metadata":{},"source":["## BiGRU"]},{"cell_type":"code","execution_count":27,"metadata":{"execution":{"iopub.execute_input":"2024-06-20T17:38:25.753838Z","iopub.status.busy":"2024-06-20T17:38:25.753284Z","iopub.status.idle":"2024-06-20T17:39:00.332807Z","shell.execute_reply":"2024-06-20T17:39:00.331452Z","shell.execute_reply.started":"2024-06-20T17:38:25.753799Z"},"trusted":true},"outputs":[{"name":"stdout","output_type":"stream","text":["Epoch 1/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m11s\u001b[0m 97ms/step - loss: 0.1124 - mae: 0.2437 - val_loss: 0.0053 - val_mae: 0.0648\n","Epoch 2/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 51ms/step - loss: 0.0051 - mae: 0.0563 - val_loss: 0.0048 - val_mae: 0.0593\n","Epoch 3/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 47ms/step - loss: 0.0031 - mae: 0.0406 - val_loss: 0.0025 - val_mae: 0.0432\n","Epoch 4/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 48ms/step - loss: 0.0020 - mae: 0.0333 - val_loss: 0.0019 - val_mae: 0.0361\n","Epoch 5/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 49ms/step - loss: 0.0015 - mae: 0.0285 - val_loss: 0.0014 - val_mae: 0.0297\n","Epoch 6/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 48ms/step - loss: 0.0013 - mae: 0.0255 - val_loss: 0.0011 - val_mae: 0.0269\n","Epoch 7/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 48ms/step - loss: 0.0011 - mae: 0.0242 - val_loss: 0.0011 - val_mae: 0.0271\n","Epoch 8/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 49ms/step - loss: 0.0011 - mae: 0.0244 - val_loss: 0.0012 - val_mae: 0.0285\n","Epoch 9/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 48ms/step - loss: 0.0011 - mae: 0.0246 - val_loss: 0.0013 - val_mae: 0.0287\n","Epoch 10/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 48ms/step - loss: 0.0011 - mae: 0.0240 - val_loss: 0.0012 - val_mae: 0.0277\n","Epoch 11/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 48ms/step - loss: 0.0010 - mae: 0.0233 - val_loss: 0.0011 - val_mae: 0.0270\n","Epoch 12/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 48ms/step - loss: 0.0010 - mae: 0.0229 - val_loss: 0.0011 - val_mae: 0.0264\n","Epoch 13/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 48ms/step - loss: 9.8533e-04 - mae: 0.0226 - val_loss: 0.0010 - val_mae: 0.0259\n","Epoch 14/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 48ms/step - loss: 9.6125e-04 - mae: 0.0223 - val_loss: 0.0010 - val_mae: 0.0253\n","Epoch 15/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 48ms/step - loss: 9.4035e-04 - mae: 0.0221 - val_loss: 9.6782e-04 - val_mae: 0.0249\n","Epoch 16/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 48ms/step - loss: 9.2149e-04 - mae: 0.0218 - val_loss: 9.3528e-04 - val_mae: 0.0244\n","Epoch 17/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 55ms/step - loss: 9.0458e-04 - mae: 0.0216 - val_loss: 9.0525e-04 - val_mae: 0.0240\n","Epoch 18/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 64ms/step - loss: 8.8945e-04 - mae: 0.0214 - val_loss: 8.7729e-04 - val_mae: 0.0236\n","Epoch 19/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m2s\u001b[0m 55ms/step - loss: 8.7584e-04 - mae: 0.0212 - val_loss: 8.5052e-04 - val_mae: 0.0232\n","Epoch 20/20\n","\u001b[1m20/20\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m1s\u001b[0m 51ms/step - loss: 8.6375e-04 - mae: 0.0211 - val_loss: 8.2513e-04 - val_mae: 0.0229\n","\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m2s\u001b[0m 164ms/step\n","\u001b[1m6/6\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 14ms/step - loss: 4.8865e-04 - mae: 0.0169\n","Test Loss: 0.0005475360667333007\n","Test MAE: 0.018060998991131783\n","R2 Score: 0.9174902227784482\n","Mean Squared Error: 0.0005475360082505356\n"]}],"source":["import numpy as np\n","import tensorflow as tf\n","from sklearn.model_selection import train_test_split\n","from sklearn.preprocessing import MinMaxScaler\n","from sklearn.metrics import mean_squared_error, r2_score\n","from tensorflow.keras.models import Sequential\n","from tensorflow.keras.layers import Bidirectional, GRU, Dense\n","from tensorflow.keras.optimizers import Adam\n","from tensorflow.keras.callbacks import EarlyStopping\n","\n","# Normalize the data\n","scaler = MinMaxScaler()\n","df['Close'] = scaler.fit_transform(df[['Close']])\n","\n","# Split the dataset into training and testing sets\n","train_data, test_data = train_test_split(df['Close'], test_size=0.2, shuffle=False, random_state=42)\n","\n","# Prepare input sequences and target values\n","def create_sequences(data, window_size):\n"," X, y = [], []\n"," for i in range(len(data) - window_size):\n"," X.append(data[i:i+window_size])\n"," y.append(data[i+window_size])\n"," return np.array(X), np.array(y)\n","\n","window_size = 30\n","X_train, y_train = create_sequences(train_data.values, window_size)\n","X_test, y_test = create_sequences(test_data.values, window_size)\n","\n","# Reshape input data to include the feature dimension\n","X_train = X_train.reshape(-1, window_size, 1)\n","X_test = X_test.reshape(-1, window_size, 1)\n","\n","# Define the deeper BiGRU model with early stopping\n","model = Sequential([\n"," Bidirectional(GRU(64, return_sequences=True, input_shape=(window_size, 1))),\n"," Bidirectional(GRU(64)),\n"," Dense(32, activation='relu'),\n"," Dense(1)\n","])\n","\n","model.compile(optimizer=Adam(), loss='mse', metrics=['mae'])\n","\n","# Define early stopping criteria\n","early_stopping = EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)\n","\n","# Train the model with early stopping\n","model.fit(X_train, y_train, epochs=20, batch_size=32, validation_split=0.2, callbacks=[early_stopping])\n","\n","# Predictions\n","y_pred = model.predict(X_test)\n","\n","# Evaluation\n","loss, mae = model.evaluate(X_test, y_test)\n","print('Test Loss:', loss)\n","print('Test MAE:', mae)\n","\n","# Calculate R2 score\n","r2 = r2_score(y_test, y_pred)\n","print('R2 Score:', r2)\n","\n","# Calculate MSE\n","mse = mean_squared_error(y_test, y_pred)\n","print('Mean Squared Error:', mse)"]}],"metadata":{"kaggle":{"accelerator":"none","dataSources":[{"datasetId":4772437,"sourceId":8084907,"sourceType":"datasetVersion"}],"dockerImageVersionId":30732,"isGpuEnabled":false,"isInternetEnabled":true,"language":"python","sourceType":"notebook"},"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.10.13"}},"nbformat":4,"nbformat_minor":4} |
1 change: 1 addition & 0 deletions
1
Tesla Historic Stock Price Analysis/Model/tesla-ticker-data-exploration-1.ipynb
Large diffs are not rendered by default.
Oops, something went wrong.
1 change: 1 addition & 0 deletions
1
Tesla Historic Stock Price Analysis/Model/tesla-ticker-data-exploration-2.ipynb
Large diffs are not rendered by default.
Oops, something went wrong.
Oops, something went wrong.