Kerastuner:“ValueError:不是法律参数”问题,当我使用LSTM网络时,但密集层工作正常

2024-06-24 11:48:50 发布

您现在位置:Python中文网/ 问答频道 /正文

使用LSTM时会出现“ValueError:非合法参数”问题。但是,如果我只使用密集层,它工作得很好

使用LSTM时发生错误

def model_builder(hp_units1=40):
    model = Sequential()
    model.add(LSTM(units = hp_units1, return_sequences = True, input_shape = (X.shape[1], 1)))
    model.add(Dropout(0.2))
    model.add(LSTM(units = 40, return_sequences = True))
    model.add(Dropout(0.2))
    model.add(Dense(units = 1))
    optimizer = Adam(learning_rate=hp_learning_rate,momentum=hp_momentum)
    model.compile(optimizer = optimizer,loss = 'mean_squared_error',metrics=['accuracy'])
    return model

hp_units1 = [30+i*5 for i in range(5)]
hp_learning_rate =[0.01, 0.001, 0.0001, 0.00001]
hp_momentum = [0.0, 0.2, 0.4, 0.6, 0.8, 0.9]
param_grid = dict(units1=hp_units1,learning_rate=hp_learning_rate, momentum=hp_momentum)

model = KerasRegressor(build_fn=model_builder,nb_epoch=100, batch_size=32,verbose=0)
grid = GridSearchCV(estimator=model, param_grid=param_grid, n_jobs=-1)
grid_result = grid.fit(X, Y)

但是,使用致密层时,其效果良好

def baseline_model(learn_rate=0.01, momentum=0): # (optimizer = 'adam')
    model = Sequential()
    input = X.shape[1]
    model.add(Dense(5, input_shape=(input,), kernel_initializer='normal', activation='relu'))
    model.add(Dense(4, kernel_initializer='normal', activation='linear'))
    model.add(Dense(1))
    optimizer = SGD(lr=learn_rate, momentum=momentum)
    model.compile(loss='mean_squared_error', optimizer=optimizer,metrics=['accuracy'])
    return model

model = KerasRegressor(build_fn=baseline_model,nb_epoch=100, batch_size=10,verbose=0)

learn_rate = [0.001, 0.01, 0.1, 0.2, 0.3]
momentum = [0.0, 0.2, 0.4, 0.6, 0.8, 0.9]
param_grid = dict(learn_rate=learn_rate, momentum=momentum)

grid = GridSearchCV(estimator=model, param_grid=param_grid, n_jobs=-1)
grid_result = grid.fit(X, Y)