diff --git a/api/__pycache__/app.cpython-310.pyc b/api/__pycache__/app.cpython-310.pyc index e0cb02c71747bd6c7c0edfdb70161572f9f8e08a..a694bd589ce1602823bdfe06ffd3bb86cfc44283 100644 Binary files a/api/__pycache__/app.cpython-310.pyc and b/api/__pycache__/app.cpython-310.pyc differ diff --git a/api/run.py b/api/run.py index 8e1ba814547677540c27df9ee15c8bf9469236d8..71116980b90ad7cae6b5de6588a9e270e8f543f2 100644 --- a/api/run.py +++ b/api/run.py @@ -10,10 +10,11 @@ import pandas as pd import datetime as dt import numpy as np import json +import time from sqlalchemy import create_engine -from sklearn.preprocessing import MinMaxScaler +from sklearn.preprocessing import MinMaxScaler from tensorflow import keras from keras.models import Sequential from keras.layers import Dense, LSTM, Dropout @@ -48,12 +49,23 @@ x_test = np.array([]) y_test = np.array([]) predictions = np.array([]) -# Lstm Model + +# Initialising the LSTM model with training + model = Sequential() -feature_length = 100 +feature_length = 10 +testing_records=5 +batch_size = 5 +epochs = 10 +accuracy=0 +execute_time=0 # Scaler -scaler = MinMaxScaler(feature_range=(0,1)) +scaler = MinMaxScaler() + +# MySQL database connection +db_connection_str = 'mysql+pymysql://root:@localhost/csct' +db_connection = create_engine(db_connection_str) #============================================ @@ -63,15 +75,15 @@ scaler = MinMaxScaler(feature_range=(0,1)) # Read data from data base and re arrange data +#--------------------------------------------- def read_data_set(): # Declare global variable global data_set global raw_data_set global index_data_set - # Define database connection - db_connection_str = 'mysql+pymysql://root:@localhost/csct' - db_connection = create_engine(db_connection_str) + + # Read data in to Data Frame raw_data_set = pd.read_sql('SELECT * FROM aw_product_demand', con=db_connection) @@ -95,35 +107,61 @@ def read_data_set(): return data_set +#------------------------------------------------------------------------------- +def save_tranning_history(epochs,time_steps,batch_size,execute_time,accuracy): + id=db_connection.execute("INSERT INTO `trannings` ( `date_time`, `epochs`, `time_steps`, `batch_size`, `execute_time`, `accurecy`) VALUES ( CURRENT_TIMESTAMP, '"+str(epochs)+"', '"+str(time_steps)+"', '"+str(batch_size)+"', '"+str(execute_time)+"', '"+str(accuracy)+"')") + print("Row Added = ",id.rowcount) + +#------------------------------------------------------------------------------- + # Function to create x and y data -def Create_Features_and_Targets(data, feature_length): - # Declare Xand Y List - X = list() - Y = list() - for i in range(len(data) - feature_length -1): - # create X array shift by feature length - X.append(data[i:(i + feature_length), 0]) - - Y.append(data[i + feature_length, 0]) - # convert to numpy array format - X = np.array(X) - Y = np.array(Y) - return X,Y +def input_and_targert(data,feature_length): + + # Genarate Samples + x_samples = list() + y_samples = list() + NumerOfRows=len(data) + for i in range(feature_length , NumerOfRows , 1): + x_sample = data[i-feature_length:i] + y_sample = data[i] + x_samples.append(x_sample) + y_samples.append(y_sample) + + # Reshape the input as a 3D (Number of samles,length of features,features) + + #Reshape input + X = np.array(x_samples) + X=X.reshape(X.shape[0],X.shape[1],1) + print("\n____Input Data Shape :____") + print(X.shape) + + # Reshape Target + Y=np.array(y_samples) + Y=Y.reshape(Y.shape[0],1) + print("\n ______Target Data Shape :______") + print(Y.shape) + + return X,Y +#---------------------------------------------------------------------------- # function to calculate prediction def predict_given_date(data, date, feature_length): if date not in data.index: data.loc[date]=0 idx = data.index.get_loc(date) - close_col = data.iloc[:,1:2] - close_col = close_col.iloc[idx - feature_length : idx,:].values - close_col = np.expand_dims(scaler.transform(close_col) , axis = 0) - Prediction = model.predict(close_col) - Prediction=np.array(Prediction).reshape(-1, 1) - Prediction = scaler.inverse_transform(Prediction) - return Prediction -# =========================== End of Function ================= + # close_col = data.iloc[:,1:2] + # close_col = close_col.iloc[idx - feature_length : idx,:].values + # close_col = np.expand_dims(scaler.transform(close_col) , axis = 0) + # Prediction = model.predict(close_col) + # Prediction=np.array(Prediction).reshape(-1, 1) + # Prediction = scaler.inverse_transform(Prediction) + return idx + + +#------------------------------------------------------------------------------ + +# =========================== End of support Functions ================= #============================= Main funtion =================== def setup(): @@ -133,6 +171,9 @@ def setup(): global data_set global predictions global result + global scaler + global accuracy + global execute_time # Read Data from DataSet read_data_set() @@ -142,57 +183,114 @@ def setup(): data = data_set.iloc[:,1:2] data = data.values + ds = scaler.fit(data) + scal_data=ds.transform(data) + + + scal_data = scaler.fit_transform(data) + + x,y= input_and_targert(scal_data,feature_length) + + # Split the data into training and test + x_train=x[:-testing_records] + x_test=x[-testing_records:] + y_train=y[:-testing_records] + y_test=y[-testing_records:] + + + print("\n___Tranning Data Shape___ ") + print(x_train.shape) + print(y_train.shape) + print("\n___Testing Data Shape___ ") + print(x_test.shape) + print(y_test.shape) + + for inp, out in zip(x_train[0:2], y_train[0:2]): + print(inp,'--', out) - data = scaler.fit_transform(data) - - - X_train,y_train= Create_Features_and_Targets(data,feature_length) - X_train = np.reshape(X_train,(X_train.shape[0],X_train.shape[1],1)) - - print(X_train) - - # model - model = Sequential([ - LSTM(100,return_sequences=True,input_shape=(X_train.shape[1],1)), - Dropout(0.3), - LSTM(100, return_sequences = False), - Dropout(0.3), - - Dense(1), - ]) - model.compile(optimizer='adam',loss="mean_squared_error") - model.summary() - - # Training the model - history = model.fit( - X_train, - y_train, - epochs = 5, - batch_size = 12, - verbose=1, - ) - - testData = data_set.iloc[:,1:2] # Get 'Close' feature - y_real=testData.iloc[feature_length+1:,0:].values #Actual values - x_test = testData.iloc[:,0:].values # data to test - # normalizing the Data using Scaler.transform function - x_test = scaler.transform(x_test) - x_test, y_test = Create_Features_and_Targets(x_test, feature_length) - # Making data 3 dimensional - x_test = np.reshape(x_test,(x_test.shape[0],x_test.shape[1],1)) - y_pred = model.predict(x_test) - predictions=np.array(scaler.inverse_transform(y_pred)).ravel() - result["Date"]=data_set.iloc[feature_length+1:]["Date"] - result["Predictions"]=predictions - result["OrderDemand"]=data_set.iloc[feature_length+1:]["Order_Demand"] - - result.plot() - plt.show() - print(result) - - #save model - model.save('lstm_model.h5') + # Defining Input shapes for LSTM + time_steps=x_train.shape[1] + features=x_train.shape[2] + print("Number of TimeSteps:", time_steps) + print("Number of Features:", features) + + + + #Add First LSTM Layer + + model.add(LSTM(units = 10, activation = 'relu', input_shape = (time_steps, features), return_sequences=True)) + + # Adding the Second hidden layer and the LSTM layer + + model.add(LSTM(units = 5, activation = 'relu', input_shape = (time_steps, features), return_sequences=True)) + + # Adding the Third hidden layer and the LSTM layer + model.add(LSTM(units = 5, activation = 'relu', return_sequences=False )) + + + # Adding the output layer + model.add(Dense(units = 1)) + + # Compiling model + model.compile(optimizer = 'adam', loss = 'mean_squared_error') + + + # Measuring the time taken by the model to train + start_time=time.time() + + # Fitting the RNN to the Training set + model_history=model.fit(x_train, y_train, batch_size , epochs) + + end_time=time.time() + execute_time=round((end_time-start_time)/60,2) + print("__Total Time Taken: ", execute_time, ' Minutes___') + + + + # Making predictions on test data + predicted_Price = model.predict(x_test) + predicted_Price = scaler.inverse_transform(predicted_Price) + + # Getting the original price values for testing data + orig=y_test + orig=scaler.inverse_transform(y_test) + + # Accuracy of the predictions + accuracy=100 - (100*(abs(orig-predicted_Price)/orig)).mean() + accuracy=round(accuracy,2) + print('Accuracy:',accuracy) + + save_tranning_history(epochs,time_steps,batch_size,execute_time,accuracy) + + + # Generating predictions on full data + TrainPredictions=scaler.inverse_transform(model.predict(x_train)) + TestPredictions=scaler.inverse_transform(model.predict(x_test)) + + FullDataPredictions=np.append(TrainPredictions, TestPredictions) + FullDataOrig=data[time_steps:] + + # plotting the full data + plt.plot(FullDataPredictions, color = 'blue', label = 'Predicted Price') + plt.plot(FullDataOrig , color = 'lightblue', label = 'Original Price') + + plt.title('Stock Price Predictions') + plt.xlabel('Trading Date') + plt.ylabel('Stock Price') + plt.legend() + fig=plt.gcf() + fig.set_figwidth(20) + fig.set_figheight(8) + plt.show() + + + result["Date"]=data_set.iloc[time_steps:]["Date"] + result["Predictions"]=FullDataPredictions + result["OrderDemand"]=data[time_steps:] + + print(data_set.tail(10)) + # ==================================================================== # main function call @@ -293,20 +391,20 @@ def forecast_to_date(): result=predict_given_date(index_data_set,new_date, feature_length) - df=pd.DataFrame() - # df=pd.DataFrame(data=result,columns=["Prediction"]) - df['Date']=pd.date_range(start=new_date,periods=feature_length) - df=df.loc[::-1] - df['Prediction']=result + # df=pd.DataFrame() + # # df=pd.DataFrame(data=result,columns=["Prediction"]) + # df['Date']=pd.date_range(start=new_date,periods=feature_length) + # df=df.loc[::-1] + # df['Prediction']=result - df['Date']= pd.to_datetime(df['Date']).dt.date - df.sort_values('Date', inplace=True) - df['Date']=df['Date'].astype(str) - df=df.set_index(df['Date']) - df=df.tail(1) + # df['Date']= pd.to_datetime(df['Date']).dt.date + # df.sort_values('Date', inplace=True) + # df['Date']=df['Date'].astype(str) + # df=df.set_index(df['Date']) + # df=df.tail(1) - return df.to_json(orient='records') + return result @app.route("/forecast_to_range",methods=["POST"]) def forecast_to_range(): diff --git a/dash_board.php b/dash_board.php index 8ea5be195d9a6d35a75078233effdfd5ff223d18..e61ea547981413d010af6a99c32220e103591da8 100644 --- a/dash_board.php +++ b/dash_board.php @@ -14,6 +14,8 @@ <?php include_once './data/result_data_set.php';?> + <?php if($date_validate!=''){?> + <div class="row"> <div class="col-md-12"> @@ -34,6 +36,8 @@ </div> + <?php }?> + <script type="text/javascript"> @@ -41,180 +45,59 @@ var myChart = echarts.init(chartDom); var option; - - const upColor = '#ec0000'; - const upBorderColor = '#8A0000'; - const downColor = '#00da3c'; - const downBorderColor = '#008F28'; - // Each item: open,close,lowest,highest - const data0 = splitData([ - <?=$train_set?> - ]); - - function splitData(rawData) { - const categoryData = []; - const values = []; - for (var i = 0; i < rawData.length; i++) { - categoryData.push(rawData[i].splice(0, 1)[0]); - values.push(rawData[i]); - } - return { - categoryData: categoryData, - values: values - }; - } - - function calculateMA(dayCount) { - var result = []; - for (var i = 0, len = data0.values.length; i < len; i++) { - if (i < dayCount) { - result.push('-'); - continue; - } - var sum = 0; - for (var j = 0; j < dayCount; j++) { - sum += +data0.values[i - j][1]; - } - result.push(sum / dayCount); - } - return result; - } option = { title: { - text: 'Product Demand', - left: 0 + text: 'Order Demad' }, tooltip: { - trigger: 'axis', - axisPointer: { - type: 'cross' - } + trigger: 'axis' }, legend: { - data: ['Product Demand'] + data: ['Order Demand', 'Predictions', 'Error'] }, grid: { - left: '10%', - right: '10%', - bottom: '15%' + left: '3%', + right: '4%', + bottom: '3%', + containLabel: true + }, + toolbox: { + feature: { + saveAsImage: {} + } }, xAxis: { type: 'category', - data: data0.categoryData, - boundaryGap: false, - axisLine: { - onZero: false - }, - splitLine: { - show: false - }, - min: 'dataMin', - max: 'dataMax' + + data: [<?=$date_validate?>] }, yAxis: { - scale: true, - splitArea: { - show: true - } + type: 'value' }, - dataZoom: [{ - type: 'inside', - start: 50, - end: 100 + series: [{ + name: 'Order Deamand', + type: 'line', + + data: [<?=$order_validate?>] }, { - show: true, - type: 'slider', - top: '90%', - start: 50, - end: 100 - } - ], - series: [{ - name: 'Product Demand', - type: 'candlestick', - data: data0.values, - itemStyle: { - color: upColor, - color0: downColor, - borderColor: upBorderColor, - borderColor0: downBorderColor - }, - markPoint: { - label: { - formatter: function(param) { - return param != null ? Math.round(param.value) + '' : ''; - } - }, - data: [ - - { - name: 'average value on close', - type: 'average', - valueDim: 'close' - } - ], - tooltip: { - formatter: function(param) { - return param.name + '<br>' + (param.data.coord || ''); - } - } - }, - markLine: { - symbol: ['none', 'none'], - data: [ - [{ - name: 'from lowest to highest', - type: 'min', - valueDim: 'lowest', - symbol: 'circle', - symbolSize: 10, - label: { - show: false - }, - emphasis: { - label: { - show: false - } - } - }, - { - type: 'max', - valueDim: 'highest', - symbol: 'circle', - symbolSize: 10, - label: { - show: false - }, - emphasis: { - label: { - show: false - } - } - } - ], - { - name: 'min line on close', - type: 'min', - valueDim: 'close' - } - ] - } + name: 'Predections', + type: 'line', + + data: [<?=$predection?>] }, { - name: 'MA5', + name: 'Error', type: 'line', - data: calculateMA(5), - smooth: true, - lineStyle: { - opacity: 0.5 - } + + data: [<?=$error?>] } ] }; - option && myChart.setOption(option); </script> + + <script src="dist/js/chart2.js" type="text/javascript"> </script> <script src="dist/js/chart3.js" type="text/javascript"> </script> diff --git a/data/result_data_set.php b/data/result_data_set.php index b9f1daeb455db89c0a72ef479850d563d267334b..2774487034aad7190774f7b5c3b262b13a83b2c5 100644 --- a/data/result_data_set.php +++ b/data/result_data_set.php @@ -1,9 +1,11 @@ <?php +// Fetch Result from API Core + $curl = curl_init(); curl_setopt_array($curl, array( - CURLOPT_URL => 'http://127.0.0.1:5000/result_tranning', + CURLOPT_URL => 'http://127.0.0.1:5000/validation', CURLOPT_RETURNTRANSFER => true, CURLOPT_ENCODING => '', CURLOPT_MAXREDIRS => 10, @@ -13,40 +15,7 @@ curl_setopt_array($curl, array( CURLOPT_CUSTOMREQUEST => 'GET', )); -$training_data = curl_exec($curl); - -curl_close($curl); - -$arr = json_decode($training_data, true); - -$dates = ''; -$order_demands = ''; -$train_set = ''; - - -foreach ($arr as $key => $value) { - $dates = $dates."'".$value['Date']."',"; - $order_demands= $order_demands."'".$value['OrderDemand']."',"; - $train_set = $train_set . "['" . $value['Date'] . "'," . $value['OrderDemand'] . "],"; - -} - -var_dump($train_set); - - - -$curl = curl_init(); -curl_setopt_array($curl, array( - CURLOPT_URL => 'http://127.0.0.1:5000/result_validate', - CURLOPT_RETURNTRANSFER => true, - CURLOPT_ENCODING => '', - CURLOPT_MAXREDIRS => 10, - CURLOPT_TIMEOUT => 0, - CURLOPT_FOLLOWLOCATION => true, - CURLOPT_HTTP_VERSION => CURL_HTTP_VERSION_1_1, - CURLOPT_CUSTOMREQUEST => 'GET', -)); $validate = curl_exec($curl); @@ -55,17 +24,25 @@ curl_close($curl); $arr_new = json_decode($validate, true); + $date_validate = ''; $order_validate = ''; $predection = ''; +$error = ''; foreach ($arr_new as $key => $value) { - $dates = $dates."'".$value['Date']."',"; - $date_validate = $date_validate . "'" . $value['Date'] . "',"; - $predection = $predection . "'" . $value['Predictions'] . "',"; + + $date_validate = $date_validate . "'" .$value['Date']. "',"; + $predection = $predection . "'" .round( $value['Predictions']). "',"; $order_validate= $order_validate."'".$value['OrderDemand']."',"; + $error_val=$value['OrderDemand']-$value['Predictions']; + $error = $error."'". round($error_val)."',"; } +//============================================================================== + +// Fetch warehouse details from database +