@@ -265,12 +265,12 @@ class2ind <- function (x, drop2nd = FALSE) {
265265# ' @param x A data frame or matrix of predictors
266266# ' @param y A vector (factor or numeric) or matrix (numeric) of outcome data.
267267# ' @param hidden_units An integer for the number of hidden units.
268- # ' @param decay A non-negative real number for the amount of weight decay. Either
268+ # ' @param penalty A non-negative real number for the amount of weight decay. Either
269269# ' this parameter _or_ `dropout` can specified.
270270# ' @param dropout The proportion of parameters to set to zero. Either
271- # ' this parameter _or_ `decay ` can specified.
271+ # ' this parameter _or_ `penalty ` can specified.
272272# ' @param epochs An integer for the number of passes through the data.
273- # ' @param act A character string for the type of activation function between layers.
273+ # ' @param activation A character string for the type of activation function between layers.
274274# ' @param seeds A vector of three positive integers to control randomness of the
275275# ' calculations.
276276# ' @param ... Currently ignored.
@@ -279,11 +279,11 @@ class2ind <- function (x, drop2nd = FALSE) {
279279# ' @export
280280keras_mlp <-
281281 function (x , y ,
282- hidden_units = 5 , decay = 0 , dropout = 0 , epochs = 20 , act = " softmax" ,
282+ hidden_units = 5 , penalty = 0 , dropout = 0 , epochs = 20 , activation = " softmax" ,
283283 seeds = sample.int(10 ^ 5 , size = 3 ),
284284 ... ) {
285285
286- if (decay > 0 & dropout > 0 ) {
286+ if (penalty > 0 & dropout > 0 ) {
287287 stop(" Please use either dropoput or weight decay." , call. = FALSE )
288288 }
289289 if (! is.matrix(x )) {
@@ -307,20 +307,20 @@ keras_mlp <-
307307
308308 model <- keras :: keras_model_sequential()
309309
310- if (decay > 0 ) {
310+ if (penalty > 0 ) {
311311 model %> %
312312 keras :: layer_dense(
313313 units = hidden_units ,
314- activation = act ,
314+ activation = activation ,
315315 input_shape = ncol(x ),
316- kernel_regularizer = keras :: regularizer_l2(decay ),
316+ kernel_regularizer = keras :: regularizer_l2(penalty ),
317317 kernel_initializer = keras :: initializer_glorot_uniform(seed = seeds [1 ])
318318 )
319319 } else {
320320 model %> %
321321 keras :: layer_dense(
322322 units = hidden_units ,
323- activation = act ,
323+ activation = activation ,
324324 input_shape = ncol(x ),
325325 kernel_initializer = keras :: initializer_glorot_uniform(seed = seeds [1 ])
326326 )
@@ -330,7 +330,7 @@ keras_mlp <-
330330 model %> %
331331 keras :: layer_dense(
332332 units = hidden_units ,
333- activation = act ,
333+ activation = activation ,
334334 input_shape = ncol(x ),
335335 kernel_initializer = keras :: initializer_glorot_uniform(seed = seeds [1 ])
336336 ) %> %
0 commit comments