From 2e22f60c2e226d7262240c75d81513106942d603 Mon Sep 17 00:00:00 2001 From: trian-gles <69212477+trian-gles@users.noreply.github.com> Date: Fri, 28 Jun 2024 09:13:47 +0200 Subject: [PATCH] relu activation --- tf.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tf.js b/tf.js index 9d60fcc..d0eabfe 100644 --- a/tf.js +++ b/tf.js @@ -12,7 +12,7 @@ var model = tf.sequential(); -model.add(tf.layers.dense({units: hiddenSize, inputShape: [inputShape]})); +model.add(tf.layers.dense({units: hiddenSize, inputShape: [inputShape], activation: 'relu'})); model.add(tf.layers.dense({units: outputShape}));