diff --git a/RNN - Multilabel.ipynb b/RNN - Multilabel.ipynb index 947c180..e671e5e 100644 --- a/RNN - Multilabel.ipynb +++ b/RNN - Multilabel.ipynb @@ -186,7 +186,7 @@ " output, state = tf.nn.dynamic_rnn(multi_layer_cell, x, dtype = tf.float32)\n", " output_flattened = tf.reshape(output, [-1, n_hidden])\n", " output_logits = tf.add(tf.matmul(output_flattened,weight),bias)\n", - " output_all = tf.nn.sigmoid(output_logits)\n", + " output_all = (output_logits)\n", " output_reshaped = tf.reshape(output_all,[-1,n_steps,n_classes])\n", " output_last = tf.gather(tf.transpose(output_reshaped,[1,0,2]), n_steps - 1) \n", " #output = tf.transpose(output, [1, 0, 2])\n", @@ -226,8 +226,8 @@ "outputs": [], "source": [ "#all_steps_cost=tf.reduce_mean(-tf.reduce_mean((y_steps * tf.log(y_all))+(1 - y_steps) * tf.log(1 - y_all),reduction_indices=1))\n", - "all_steps_cost = -tf.reduce_mean((y_steps * tf.log(y_all)) + (1 - y_steps) * tf.log(1 - y_all))\n", - "last_step_cost = -tf.reduce_mean((y * tf.log(y_last)) + ((1 - y) * tf.log(1 - y_last)))\n", + "all_steps_cost = -tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(y_all, y))\n", + "last_step_cost = -tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(y_last, y))\n", "loss_function = (alpha * all_steps_cost) + ((1 - alpha) * last_step_cost)\n", "\n", "optimizer = tf.train.AdamOptimizer(learning_rate = learning_rate).minimize(loss_function)"