Spaces:
Runtime error
Runtime error
| import streamlit as st | |
| from src.data import StoreDataLoader | |
| from src.model import Model_Load | |
| import matplotlib.pyplot as plt | |
| import seaborn as sns | |
| import plotly.graph_objects as go | |
| from sklearn.metrics import mean_absolute_error,mean_squared_error | |
| import numpy as np | |
| import pandas as pd | |
| from src.prediction import test_prediction,val_prediction,create_week_date_featues | |
| import plotly.express as px | |
| #----------------hide menubar and footer---------------------------------- | |
| hide_streamlit_style = """ | |
| <style> | |
| #MainMenu {visibility: hidden;} | |
| footer {visibility: hidden;} | |
| </style> | |
| """ | |
| st.markdown(hide_streamlit_style, unsafe_allow_html=True) | |
| #------------------------------------------------------------- | |
| ## Load model object | |
| model_obj=Model_Load() | |
| #-------------------------------------------------------------- | |
| def convert_df(df): | |
| return df.to_csv(index=False).encode('utf-8') | |
| #----------------------------------------------------------------- | |
| ## Title of Page | |
| st.markdown(""" | |
| <div style='text-align: center; margin-top:-70px; margin-bottom: -50px;margin-left: -50px;'> | |
| <h2 style='font-size: 20px; font-family: Courier New, monospace; | |
| letter-spacing: 2px; text-decoration: none;'> | |
| <img src="https://acis.affineanalytics.co.in/assets/images/logo_small.png" alt="logo" width="70" height="30"> | |
| <span style='background: linear-gradient(45deg, #ed4965, #c05aaf); | |
| -webkit-background-clip: text; | |
| -webkit-text-fill-color: transparent; | |
| text-shadow: none;'> | |
| Product Demand Forecasting Dashboard | |
| </span> | |
| <span style='font-size: 40%;'> | |
| <sup style='position: relative; top: 5px; color: #ed4965;'>by Affine</sup> | |
| </span> | |
| </h2> | |
| </div> | |
| """, unsafe_allow_html=True) | |
| #--------------------------------------------------------------------------------------------------------------------- | |
| # select the model(Sidebar) | |
| with st.sidebar: | |
| st.markdown("""<div style='text-align: left; margin-top:-200px;margin-left:-40px;'> | |
| <img src="https://affine.ai/wp-content/uploads/2023/05/Affine-Logo.svg" alt="logo" width="300" height="60"> | |
| </div>""", unsafe_allow_html=True) | |
| option=st.selectbox("Select Model",['TFT','Prophet']) | |
| #------------------------------------------------------------------------------------------------------------ | |
| # TFT | |
| if option=='TFT': | |
| #-------------------------------------------------------------------------------------------------------- | |
| ## TFT data path and load | |
| path='data/train.csv' | |
| obj=StoreDataLoader(path) | |
| train_dataset,test_dataset,training,validation,earliest_time=obj.tft_data() | |
| print(f"TRAINING ::START DATE ::{train_dataset['date'].min()} :: END DATE ::{train_dataset['date'].max()}") | |
| print(f"TESTING ::START DATE ::{test_dataset['date'].min()} :: END DATE ::{test_dataset['date'].max()}") | |
| list_store=train_dataset['store'].unique() | |
| list_items=train_dataset['item'].unique() | |
| #--------------------------------------------------------------------------------------------------------- | |
| try: | |
| # load the pre trained tft model | |
| model=model_obj.store_model_load(option) | |
| with st.sidebar: | |
| # st.success('Model Loaded successfully', icon="✅") | |
| # select the store id | |
| store=st.selectbox("Select Store ID",list_store) | |
| # select the item id | |
| item=st.selectbox("Select Product ID",list_items) | |
| #-------------------------------------------------------------------------------------------------------------- | |
| ## prediction on testing data | |
| testing_results=test_prediction(model,train_dataset=train_dataset,test_dataset=test_dataset | |
| ,earliest_time=earliest_time,store_id=store,item_id=item) | |
| # find kpi | |
| rmse=np.around(np.sqrt(mean_squared_error(testing_results['Lead_1'],testing_results['prediction'])),2) | |
| mae=np.around(mean_absolute_error(testing_results['Lead_1'],testing_results['prediction']),2) | |
| print(f"TEST DATA = Item ID : {item} :: MAE : {mae} :: RMSE : {rmse}") | |
| #--------------------------------------tft future prediction------------------------------------------- | |
| final_data=pd.concat([train_dataset,test_dataset]) | |
| consumer_data=final_data.loc[(final_data['store']==store) & (final_data['item']==item)] | |
| consumer_data.fillna(0,inplace=True) | |
| date_list=[] | |
| demand_prediction=[] | |
| for i in range(30): | |
| # select last 150 records as an enocer + decoder data | |
| encoder_data = consumer_data[lambda x: x.days_from_start > x.days_from_start.max() - 150] | |
| last_data = consumer_data[lambda x: x.days_from_start == x.days_from_start.max()] | |
| # prediction date and time | |
| date_list.append(encoder_data.tail(1).iloc[-1,:]['date']) | |
| # prediction for the last 30 records | |
| test_prediction = model.predict(encoder_data, | |
| mode="prediction", | |
| trainer_kwargs=dict(accelerator="cpu"), | |
| return_x=True) | |
| # create the next day record | |
| decoder_data = pd.concat( | |
| [last_data.assign(date=lambda x: x.date + pd.offsets.DateOffset(i)) for i in range(1, 2)], | |
| ignore_index=True, | |
| ) | |
| # find the hours_from_start & days_from_start | |
| decoder_data["hours_from_start"] = (decoder_data["date"] - earliest_time).dt.seconds / 60 / 60 + (decoder_data["date"] - earliest_time).dt.days * 24 | |
| decoder_data['hours_from_start'] = decoder_data['hours_from_start'].astype('int') | |
| decoder_data["hours_from_start"] += encoder_data["hours_from_start"].max() + 1 - decoder_data["hours_from_start"].min() | |
| # add time index consistent with "data" | |
| decoder_data["days_from_start"] = (decoder_data["date"] - earliest_time).apply(lambda x:x.days) | |
| # adding the datetime features | |
| decoder_data=create_week_date_featues(decoder_data,'date') | |
| # last timestep predicted record as assume next day actual demand(for more day forecasting) | |
| decoder_data['sales']=float(test_prediction.output[0][-1]) | |
| # append this prediction into the list | |
| demand_prediction.append(float(test_prediction.output[0][-1])) | |
| # update prediction time idx | |
| decoder_data['time_idx']=int(test_prediction.x['decoder_time_idx'][0][-1]) | |
| # add the next day record into the original data | |
| consumer_data=pd.concat([consumer_data,decoder_data]) | |
| # fina lag and update | |
| consumer_data['lag_1']=consumer_data['sales'].shift(1) | |
| consumer_data['lag_5']=consumer_data['sales'].shift(5) | |
| # reset the index | |
| consumer_data=consumer_data.reset_index(drop=True) | |
| # forecast values for the next 30 days/timesteps | |
| d2=pd.DataFrame({"date":date_list,"prediction":demand_prediction})[['date','prediction']] | |
| # update the store and item ids | |
| d2['store']=store | |
| d2['item']=item | |
| #----------------------------TFT and Prophet model KPI---------------------------------------- | |
| with st.sidebar: | |
| st.markdown(f""" | |
| <style> | |
| /* Sidebar header style */ | |
| .sidebar-header {{ | |
| padding: 1px; | |
| background-color: #9966FF; | |
| text-align: center; | |
| font-size: 13px; | |
| font-weight: bold; | |
| color: #FFF ; | |
| }} | |
| </style> | |
| <div class="sidebar-header"> | |
| Models Evalution | |
| </div> | |
| """,unsafe_allow_html=True) | |
| st.dataframe(pd.DataFrame({"KPI":['RMSE','MAE'],"TFT":[7.73,6.17],"Prophet":[7.32,6.01]}).set_index('KPI'),width=300) | |
| # d2=pd.DataFrame({"KPI":['RMSE','MAE','RMSE','MAE'],"model":['TFT','TFT','Prophet','Prophet'],"Score":[7.73,6.17,7.32,6.01]}) | |
| # fig = px.bar(d2, x="KPI", y="Score", | |
| # color='model', barmode='group', | |
| # height=200,width=300,text_auto=True,) | |
| # st.plotly_chart(fig) | |
| #------------------------------------Prophet model KPI--------------------------------------------------------- | |
| st.markdown(f""" | |
| <style> | |
| /* Sidebar header style */ | |
| .sidebar-header {{ | |
| padding: 3px; | |
| background-color:linear-gradient(45deg, #ed4965, #c05aaf); | |
| text-align: center; | |
| font-size: 13px; | |
| font-weight: bold; | |
| color: #FFF ; | |
| }} | |
| </style> | |
| <div class="sidebar-header"> | |
| KPI :: {item} | |
| </div> | |
| """,unsafe_allow_html=True) | |
| st.dataframe(pd.DataFrame({"KPI":['RMSE','MAE'],"TFT":[rmse,mae]}).set_index('KPI'),width=300) | |
| #-------------------------------------------------------------------------------------------------------------- | |
| # tabs | |
| tab1,tab2=st.tabs(['📈Forecast Plot','🗃Forecast Table']) #tab3-'🗃Actual Table' | |
| #------------------------------------------------Tab-1----------------------------------------------------------- | |
| tab1.markdown(""" | |
| <div style='text-align: left; margin-top:-10px;margin-bottom:-10px;'> | |
| <h2 style='font-size: 30px; font-family: Palatino, serif; | |
| letter-spacing: 2px; text-decoration: none;'> | |
| 📈 | |
| <span style='background: linear-gradient(45deg, #ed4965, #c05aaf); | |
| -webkit-background-clip: text; | |
| -webkit-text-fill-color: transparent; | |
| text-shadow: none;'> | |
| Forecast Plot | |
| </span> | |
| <span style='font-size: 40%;'> | |
| <sup style='position: relative; top: 5px; color: #ed4965;'></sup> | |
| </span> | |
| </h2> | |
| </div> | |
| """, unsafe_allow_html=True) | |
| # change dtype on prediction column | |
| testing_results['prediction']=testing_results['prediction'].apply(lambda x:round(x)) | |
| testing_results['date']=testing_results['date'].dt.date | |
| d2['prediction']=d2['prediction'].apply(lambda x:round(x)) | |
| d2['date']=d2['date'].dt.date | |
| # training_data=train_dataset.loc[(train_dataset['store']==store)&(train_dataset['item']==item)][['date','Lead_1']].iloc[-60:,:] | |
| #---------------------------------------------forecast plot--------------------------------------------- | |
| fig = go.Figure([ | |
| # go.Scatter(x=training_data['date'],y=training_data['Lead_1'],name='Train Observed',line=dict(color='rgba(50, 205, 50, 0.7)')), | |
| #go.Scatter(x=y_train_pred['ds'],y=y_train_pred['yhat'],name='Prophet Pred.(10 Item)',line=dict(color='blue', dash='dot')), | |
| go.Scatter(x=testing_results['date'], y=testing_results['Lead_1'],name='Observed',line=dict(color='rgba(218, 112, 214, 0.5)')), | |
| go.Scatter(x=testing_results['date'],y=testing_results['prediction'],name='Historical Forecast',line=dict(color='#9400D3', dash='dash')), | |
| go.Scatter(x=d2['date'],y=d2['prediction'],name='Future Forecast',line=dict(color='Dark Orange', dash='dot'))]) | |
| fig.update_layout( | |
| xaxis_title='Date', | |
| yaxis_title='Order Demand', | |
| margin=dict(l=0, r=0, t=50, b=0), | |
| xaxis=dict(title_font=dict(size=20)), | |
| yaxis=dict(title_font=dict(size=20))) | |
| fig.update_layout(width=700,height=400) | |
| tab1.plotly_chart(fig) | |
| #----------------------------------------------Tab-2------------------------------------------------------------ | |
| tab2.markdown(""" | |
| <div style='text-align: left; margin-top:-10px;'> | |
| <h2 style='font-size: 30px; font-family: Palatino, serif; | |
| letter-spacing: 2px; text-decoration: none;'> | |
| 📃 | |
| <span style='background: linear-gradient(45deg, #ed4965, #c05aaf); | |
| -webkit-background-clip: text; | |
| -webkit-text-fill-color: transparent; | |
| text-shadow: none;'> | |
| Forecast Table | |
| </span> | |
| <span style='font-size: 40%;'> | |
| <sup style='position: relative; top: 5px; color: #ed4965;'></sup> | |
| </span> | |
| </h2> | |
| </div> | |
| """, unsafe_allow_html=True) | |
| final_r=pd.concat([d2[['date','store','item','prediction']],testing_results[['date','store','item','prediction']]]).sort_values('date').drop_duplicates().reset_index(drop=True) | |
| csv = convert_df(final_r) | |
| tab2.dataframe(final_r,width=500) | |
| tab2.download_button( | |
| "Download", | |
| csv, | |
| "file.csv", | |
| "text/csv", | |
| key='download-csv' | |
| ) | |
| #--------------------------------Tab-3---------------------------------------------- | |
| # tab3.markdown(""" | |
| # <div style='text-align: left; margin-top:-10px;margin-bottom:-10px;'> | |
| # <h2 style='font-size: 30px; font-family: Palatino, serif; | |
| # letter-spacing: 2px; text-decoration: none;'> | |
| # 📈 | |
| # <span style='background: linear-gradient(45deg, #ed4965, #c05aaf); | |
| # -webkit-background-clip: text; | |
| # -webkit-text-fill-color: transparent; | |
| # text-shadow: none;'> | |
| # Actual Dataset | |
| # </span> | |
| # <span style='font-size: 40%;'> | |
| # <sup style='position: relative; top: 5px; color: #ed4965;'></sup> | |
| # </span> | |
| # </h2> | |
| # </div> | |
| # """, unsafe_allow_html=True) | |
| # train_a=train_dataset.loc[(train_dataset['store']==store) & (train_dataset['item']==item)][['date','store','item','sales']] | |
| # test_a=test_dataset.loc[(test_dataset['store']==store) & (test_dataset['item']==item)][['date','store','item','sales']] | |
| # actual_final_data=pd.concat([train_a,test_a]) | |
| # actual_final_data['date']=actual_final_data['date'].dt.date | |
| # tab3.dataframe(actual_final_data,width=500) | |
| except: | |
| st.sidebar.error('Model Not Loaded successfully!',icon="🚨") | |
| #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ | |
| #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ | |
| elif option=='Prophet': | |
| print("prophet") | |
| #---------------------------------------------------Data---------------------------------------------------- | |
| # Prophet data | |
| path='data/train.csv' | |
| obj=StoreDataLoader(path) | |
| fb_train_data,fb_test_data,item_dummay,store_dummay=obj.fb_data() | |
| # st.write(fb_train_data.columns) | |
| # st.write(fb_test_data.columns) | |
| # print(fb_test_data.columns) | |
| print(f"TRAINING ::START DATE ::{fb_train_data['ds'].min()} :: END DATE ::{fb_train_data['ds'].max()}") | |
| print(f"TESTING ::START DATE ::{fb_test_data['ds'].min()} :: END DATE ::{fb_test_data['ds'].max()}") | |
| train_new=fb_train_data.drop('y',axis=1) | |
| test_new=fb_test_data.drop('y',axis=1) | |
| #----------------------------------------------model Load---------------------------------------------------- | |
| try: | |
| fb_model=model_obj.store_model_load(option) | |
| # with st.sidebar: | |
| # st.success('Model Loaded successfully', icon="✅") | |
| #-------------------------------------select store & item --------------------------------------------------- | |
| list_items=item_dummay.columns | |
| list_store=store_dummay.columns | |
| with st.sidebar: | |
| store=st.selectbox("Select Store",list_store) | |
| item=st.selectbox("Select Product",list_items) | |
| #------------------------------------------prediction--------------------------------------------------------------- | |
| test_prediction=fb_model.predict(test_new.loc[test_new[item]==1]) | |
| train_prediction=fb_model.predict(train_new.loc[train_new[item]==1]) | |
| y_true_test=fb_test_data.loc[fb_test_data[item]==1] | |
| y_true_train=fb_train_data.loc[fb_train_data[item]==1] | |
| y_train_pred=train_prediction[['ds','yhat']].iloc[-60:,:] | |
| y_train_true=y_true_train[['ds','y']].iloc[-60:,:] | |
| y_test_pred=test_prediction[['ds','yhat']] | |
| y_test_true=y_true_test[['ds','y']] | |
| #----------------------------------------KPI--------------------------------------------------------------- | |
| rmse=np.sqrt(mean_squared_error(y_test_true['y'],y_test_pred['yhat'])) | |
| mae=mean_absolute_error(y_test_true['y'],y_test_pred['yhat']) | |
| #---------------------------------future prediction--------------------------------------- | |
| fb_final=pd.concat([fb_train_data,fb_test_data]) | |
| # extract the data for selected store and item | |
| fb_consumer=fb_final.loc[(fb_final[store]==1) & (fb_final[item]==1)] | |
| # list of dates and prediction | |
| date_list=[] | |
| prediction_list=[] | |
| # predicting the next 30 days product demand | |
| for i in range(30): | |
| # select only date record | |
| next_prediction=fb_consumer.tail(1).drop('y',axis=1) # drop target of last 01/01/2015 00:00:00 | |
| # predict next timestep demand | |
| prediction=fb_model.predict(next_prediction) # pass other feature value to the model | |
| # append date and predicted demand | |
| date_list.append(prediction['ds'][0]) ## append the datetime of prediction | |
| prediction_list.append(prediction['yhat'][0]) ## append the next timestep prediction | |
| #--------------------------next timestep data simulate------------------------------------------------------------- | |
| last_data = fb_consumer[lambda x: x.ds == x.ds.max()] # last date present in data | |
| # next timestep | |
| decoder_data = pd.concat( | |
| [last_data.assign(ds=lambda x: x.ds + pd.offsets.DateOffset(i)) for i in range(1, 2)], | |
| ignore_index=True, | |
| ) | |
| # update next timestep datetime covariates | |
| decoder_data=create_week_date_featues(decoder_data,'ds') | |
| # update last day demand prediction to the here as an actual demand value(using for more future timestep prediction) | |
| decoder_data['sales']=prediction['yhat'][0] # assume next timestep prediction as actual | |
| # update this next record into the original data | |
| fb_consumer=pd.concat([fb_consumer,decoder_data]) # append that next timestep data to into main data | |
| # find shift of power usage and update into the datset | |
| fb_consumer['lag_1']=fb_consumer['sales'].shift(1) | |
| fb_consumer['lag_5']=fb_consumer['sales'].shift(5) | |
| fb_consumer=fb_consumer.reset_index(drop=True) # reset_index | |
| future_prediction=pd.DataFrame({"ds":date_list,"yhat":prediction_list}) | |
| future_prediction['store']=store | |
| future_prediction['item']=item | |
| with st.sidebar: | |
| st.markdown(f""" | |
| <style> | |
| /* Sidebar header style */ | |
| .sidebar-header {{ | |
| padding: 1px; | |
| background-color: #9966FF; | |
| text-align: center; | |
| font-size: 13px; | |
| font-weight: bold; | |
| color: #FFF ; | |
| }} | |
| </style> | |
| <div class="sidebar-header"> | |
| Models Evalution | |
| </div> | |
| """,unsafe_allow_html=True) | |
| st.dataframe(pd.DataFrame({"KPI":['RMSE','MAE'],"TFT":[7.73,6.17],"Prophet":[7.32,6.01]}).set_index('KPI'),width=300) | |
| st.markdown(f""" | |
| <style> | |
| /* Sidebar header style */ | |
| .sidebar-header {{ | |
| padding: 3px; | |
| background-color:linear-gradient(45deg, #ed4965, #c05aaf); | |
| text-align: center; | |
| font-size: 13px; | |
| font-weight: bold; | |
| color: #FFF ; | |
| }} | |
| </style> | |
| <div class="sidebar-header"> | |
| KPI :: {item} | |
| </div> | |
| """,unsafe_allow_html=True) | |
| st.dataframe(pd.DataFrame({"KPI":['RMSE','MAE'],"Prophet":[rmse,mae]}).set_index('KPI'),width=300) | |
| #---------------------------------------Tabs----------------------------------------------------------------------- | |
| tab1,tab2=st.tabs(['📈Forecast Plot','🗃Forecast Table']) #tab3- '🗃Actual Table' | |
| #-------------------------------------------Tab-1=Forecast plot--------------------------------------------------- | |
| tab1.markdown(""" | |
| <div style='text-align: left; margin-top:-10px;margin-bottom:-10px;'> | |
| <h2 style='font-size: 30px; font-family: Palatino, serif; | |
| letter-spacing: 2px; text-decoration: none;'> | |
| 📈 | |
| <span style='background: linear-gradient(45deg, #ed4965, #c05aaf); | |
| -webkit-background-clip: text; | |
| -webkit-text-fill-color: transparent; | |
| text-shadow: none;'> | |
| Forecast Plot | |
| </span> | |
| <span style='font-size: 40%;'> | |
| <sup style='position: relative; top: 5px; color: #ed4965;'></sup> | |
| </span> | |
| </h2> | |
| </div> | |
| """, unsafe_allow_html=True) | |
| ## round fig. | |
| y_train_true['y']=y_train_true['y'].astype('int') | |
| y_train_pred['yhat']=y_train_pred['yhat'].astype('int') | |
| y_test_true['y']=y_test_true['y'].astype('int') | |
| y_test_pred['yhat']=y_test_pred['yhat'].astype('int') | |
| future_prediction['yhat']=future_prediction['yhat'].astype('int') | |
| y_train_true['ds']=y_train_true['ds'].dt.date | |
| y_train_pred['ds']=y_train_pred['ds'].dt.date | |
| y_test_true['ds']=y_test_true['ds'].dt.date | |
| y_test_pred['ds']=y_test_pred['ds'].dt.date | |
| future_prediction['ds']=future_prediction['ds'].dt.date | |
| #-----------------------------plot--------------------------------------------------------------------------------------------- | |
| fig = go.Figure([ | |
| # go.Scatter(x=y_train_true['ds'],y=y_train_true['y'],name='Train Observed',line=dict(color='rgba(50, 205, 50, 0.7)' )), | |
| # go.Scatter(x=y_train_pred['ds'],y=y_train_pred['yhat'],name='Prophet Pred.(10 Item)',line=dict(color='#32CD32', dash='dot')), | |
| go.Scatter(x=y_test_true['ds'], y=y_test_true['y'],name='Observed',line=dict(color='rgba(218, 112, 214, 0.5)')), | |
| go.Scatter(x=y_test_pred['ds'],y=y_test_pred['yhat'],name='Historical Forecast',line=dict(color='#9400D3', dash='dash')), | |
| go.Scatter(x=future_prediction['ds'],y=future_prediction['yhat'],name='Future Forecast',line=dict(color='Dark Orange', dash='dot'))]) | |
| fig.update_layout( | |
| xaxis_title='Date', | |
| yaxis_title='Order Demand', | |
| margin=dict(l=0, r=0, t=50, b=0), | |
| xaxis=dict(title_font=dict(size=20)), | |
| yaxis=dict(title_font=dict(size=20))) | |
| fig.update_layout(width=700,height=400) | |
| tab1.plotly_chart(fig) | |
| #----------------------------------------Tab-2------------------------------------------------------------ | |
| results=y_test_pred.reset_index() | |
| results['store']='store_1' | |
| results['item']=item | |
| tab2.markdown(""" | |
| <div style='text-align: left; margin-top:-10px;'> | |
| <h2 style='font-size: 30px; font-family: Palatino, serif; | |
| letter-spacing: 2px; text-decoration: none;'> | |
| 📃 | |
| <span style='background: linear-gradient(45deg, #ed4965, #c05aaf); | |
| -webkit-background-clip: text; | |
| -webkit-text-fill-color: transparent; | |
| text-shadow: none;'> | |
| Forecast Table | |
| </span> | |
| <span style='font-size: 40%;'> | |
| <sup style='position: relative; top: 5px; color: #ed4965;'></sup> | |
| </span> | |
| </h2> | |
| </div> | |
| """, unsafe_allow_html=True) | |
| final_r=pd.concat([future_prediction[['ds','store','item','yhat']],results[['ds','store','item','yhat']]]).sort_values('ds').drop_duplicates().reset_index(drop=True) | |
| csv = convert_df(final_r) | |
| tab2.dataframe(final_r,width=500) | |
| tab2.download_button( | |
| "Download", | |
| csv, | |
| "file.csv", | |
| "text/csv", | |
| key='download-csv' | |
| ) | |
| #------------------------------------------Tab-3-------------------------------------------------- | |
| # train_a=fb_train_data.loc[fb_train_data[item]==1][['ds','sales']] | |
| # # train_a['store']=1 | |
| # # train_a['item']=item | |
| # test_a=fb_test_data.loc[fb_test_data[item]==1][['ds','sales']] | |
| # # test_a['store']=1 | |
| # # test_a['item']=item.split('_')[-1] | |
| # actual_final_data=pd.concat([train_a,test_a]) | |
| # actual_final_data['store']=1 | |
| # actual_final_data['item']=item.split('_')[-1] | |
| # actual_final_data['ds']=actual_final_data['ds'].dt.date | |
| # actual_final_data.rename({"ds":'date'},inplace=True) | |
| # tab3.dataframe(actual_final_data[['date','store','item','sales']],width=500) | |
| except: | |
| st.sidebar.error('Model Not Loaded successfully!',icon="🚨") | |