Example code for a regression model with multiple layers. In addition to the input of the first layer, it keeps adding new inputs to the later layers.
Prepare data
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
# create range of monthly dates
download_dates = pd.date_range(start='2019-01-01', end='2020-01-01', freq='MS')
# URL from Chrome DevTools Console
base_url = ("https://climate.weather.gc.ca/climate_data/bulk_data_e.html?format=csv&"
"stationID=51442&Year={}&Month={}&Day=7&timeframe=1&submit=Download+Data") # add format option to year and month
# create list of remote URL from base URL
list_of_url = [base_url.format(date.year, date.month) for date in download_dates]
# download and combine multiple files into one DataFrame
df = pd.concat((pd.read_csv(url) for url in list_of_url))
data = df.loc[:, (df != df.iloc[0]).any()]
data=data[data.columns[data.isnull().mean() < 0.9]]
keepcolumns=['Temp (°C)','Dew Point Temp (°C)', 'Rel Hum (%)','Stn Press (kPa)','Wind Spd (km/h)']
data=data[keepcolumns]
predictors=['Temp (°C)', 'Stn Press (kPa)','Dew Point Temp (°C)','Rel Hum (%)']
predictand=['Wind Spd (km/h)']
X = data[predictors].values
y = data[predictand].values
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.30, random_state=40)
transformer = StandardScaler().fit(X_train)
X_train_t=transformer.transform(X_train)
X_test_t =transformer.transform(X_test)
ytransformer = StandardScaler().fit(y_train)
y_train_t=ytransformer.transform(y_train)
y_test_t =ytransformer.transform(y_test)
import tensorflow as tf
import keras
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout
input_X0=keras.Input(shape=(2,),name='Tas and Ps')
X=Dense(32,name='Dense32')(input_X0)
X=Dropout(0.3,name='Dropout_dense32')(X)
X=Dense(16,activation='relu',name='Dense16')(X)
X=Dropout(0.1,name='Dropout_Dense16')(X)
X=Dense(8,activation='relu',name='Dense8')(X)
X=Dropout(0.1,name='Dropout_Dense8')(X)
input_X1=keras.Input(shape=(2,),name='Td and Hur')
# combine the 2 subset of input for the next step
X=keras.layers.concatenate([X,input_X1],name='Combined')
X=Dense(16,activation='relu',name='Dense16')(X)
X=Dropout(0.1,name='Dropout_Dense16')(X)
X=Dense(8,activation='relu',name='Dense8')(X)
X=Dropout(0.1,name='Dropout_Dense8')(X)
output = Dense(1,activation='sigmoid',name='Final_Output')(X)
model = keras.Model(
inputs = [input_X0,input_X1],
outputs = output
)
model.summary()
# fit network
model.compile(loss='mae', optimizer='adam')
#model.compile(loss='mse', optimizer='adam')
#model.compile(loss=tf.keras.losses.MeanSquaredLogarithmicError(), optimizer='adam')
history = model.fit(
{'Tas and Ps':X_train_t[:,:2],'Td and Hur':X_train_t[:,2:]},
{'Final_Output':y_train_t},
epochs=10,
batch_size=64,
validation_data=({'Tas and Ps':X_test_t[:,:2],'Td and Hur':X_test_t[:,:2]}, {'Final_Output':y_test}),
verbose=2)
model.save("model_regression")
keras.utils.plot_model(model, "test.png", show_shapes=True)
from sklearn.metrics import mean_squared_error
pred= model.predict({'Tas and Ps':X_test_t[:,:2],'Td and Hur':X_test_t[:,:2]})
print(np.sqrt(mean_squared_error(y_test_t,pred)))