From ff60b2fad181b5d7e1f7bd71e8b5ab3efc4a665c Mon Sep 17 00:00:00 2001 From: R0w9h <4ranci0ne@gmail.com> Date: Fri, 22 Sep 2023 20:23:45 +0900 Subject: [PATCH] Update kohya-LoRA-trainer-XL.ipynb --- kohya-LoRA-trainer-XL.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kohya-LoRA-trainer-XL.ipynb b/kohya-LoRA-trainer-XL.ipynb index 38ac5201..3b1ed4a7 100644 --- a/kohya-LoRA-trainer-XL.ipynb +++ b/kohya-LoRA-trainer-XL.ipynb @@ -1410,7 +1410,7 @@ "\n", "# @title ## **4.2. Optimizer Config**\n", "# @markdown Use `Adafactor` optimizer. `RMSprop 8bit` or `Adagrad 8bit` may work. `AdamW 8bit` doesn't seem to work.\n", - "optimizer_type = \"AdaFactor\" # @param [\"AdamW\", \"AdamW8bit\", \"Lion8bit\", \"Lion\", \"SGDNesterov\", \"SGDNesterov8bit\", \"DAdaptation(DAdaptAdamPreprint)\", \"DAdaptAdaGrad\", \"DAdaptAdam\", \"DAdaptAdan\", \"DAdaptAdanIP\", \"DAdaptLion\", \"DAdaptSGD\", \"AdaFactor\"]\n", + "optimizer_type = \"AdaFactor\" # @param [\"AdamW\", \"AdamW8bit\", \"Lion8bit\", \"Lion\", \"SGDNesterov\", \"SGDNesterov8bit\", \"DAdaptAdampreprint\", \"DAdaptAdaGrad\", \"DAdaptAdam\", \"DAdaptAdan\", \"DAdaptAdanIP\", \"DAdaptLion\", \"DAdaptSGD\", \"AdaFactor\"]\n", "# @markdown Specify `optimizer_args` to add `additional` args for optimizer, e.g: `[\"weight_decay=0.6\"]`\n", "optimizer_args = \"[ \\\"scale_parameter=False\\\", \\\"relative_step=False\\\", \\\"warmup_init=False\\\" ]\" # @param {'type':'string'}\n", "# @markdown ### **Learning Rate Config**\n", @@ -2170,4 +2170,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} \ No newline at end of file +}