From 12cb3256a829ffc0db99407a94b49c827cb26585 Mon Sep 17 00:00:00 2001 From: MrSB21 <borudesuraj.0705@gmail.com> Date: Mon, 26 May 2025 12:46:26 +0100 Subject: [PATCH] Final changes --- .../regression/Decision_tree_regression(Small Dataset).ipynb | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/notebooks/regression/Decision_tree_regression(Small Dataset).ipynb b/notebooks/regression/Decision_tree_regression(Small Dataset).ipynb index af4c055..46ba9c6 100644 --- a/notebooks/regression/Decision_tree_regression(Small Dataset).ipynb +++ b/notebooks/regression/Decision_tree_regression(Small Dataset).ipynb @@ -1407,7 +1407,7 @@ "id": "816b4ae7", "metadata": {}, "source": [ - "- With these parameters, the model's performance improved significantly. This means the model now explains over 44% of the variance in productivity, more than twice as much as the default model. The improvements can be attributed to regularization, better choice of splitting criteria (poisson), and smarter structural constraints (e.g., limiting depth and number of leaves). The model is now better at generalizing to unseen data, and errors have decreased both in absolute (MAE) and squared (RMSE) terms." + "- With these parameters, the model's performance improved significantly. This means the model now explains over 44% of the variance in productivity, more than twice as much as the default model. The improvements can be attributed to regularization, better choice of splitting criteria (poisson), and smarter structural constraints (e.g., limiting depth and number of leaves). The model is now better at generalizing to unseen data, and errors have decreased both in absolute (MAE) and squared (MSE) terms." ] }, { @@ -1450,8 +1450,7 @@ "plt.title('Tuned Decision Tree Regressor Metrics')\n", "plt.ylabel('Score')\n", "plt.grid(axis='y', linestyle='--', alpha=0.5)\n", - "plt.tight_layout()\n", - "plt.show()\n" + "plt.tight_layout()" ] }, { -- GitLab