Skip to content

Commit 13c7e87

Browse files
committed
fix #5749
Former-commit-id: 23dbe9a
1 parent d183966 commit 13c7e87

File tree

5 files changed

+16
-14
lines changed

5 files changed

+16
-14
lines changed

.dockerignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@ data
77
docker
88
saves
99
hf_cache
10+
ms_cache
11+
om_cache
1012
output
1113
.dockerignore
1214
.gitattributes

.env.local

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
# Note: actually we do not support .env, just for reference
22
# api
3-
API_HOST=0.0.0.0
4-
API_PORT=8000
3+
API_HOST=
4+
API_PORT=
55
API_KEY=
6-
API_MODEL_NAME=gpt-3.5-turbo
6+
API_MODEL_NAME=
77
FASTAPI_ROOT_PATH=
88
# general
99
DISABLE_VERSION_CHECK=
@@ -21,13 +21,14 @@ RANK=
2121
NPROC_PER_NODE=
2222
# wandb
2323
WANDB_DISABLED=
24-
WANDB_PROJECT=huggingface
24+
WANDB_PROJECT=
2525
WANDB_API_KEY=
2626
# gradio ui
27-
GRADIO_SHARE=False
28-
GRADIO_SERVER_NAME=0.0.0.0
27+
GRADIO_SHARE=
28+
GRADIO_SERVER_NAME=
2929
GRADIO_SERVER_PORT=
3030
GRADIO_ROOT_PATH=
31+
GRADIO_IPV6=
3132
# setup
3233
ENABLE_SHORT_CONSOLE=1
3334
# reserved (do not use)

src/llamafactory/train/callbacks.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -124,12 +124,12 @@ def __init__(self, processor: "ProcessorMixin") -> None:
124124
def on_save(self, args: "TrainingArguments", state: "TrainerState", control: "TrainerControl", **kwargs):
125125
if args.should_save:
126126
output_dir = os.path.join(args.output_dir, f"{PREFIX_CHECKPOINT_DIR}-{state.global_step}")
127-
getattr(self.processor, "image_processor").save_pretrained(output_dir)
127+
self.processor.save_pretrained(output_dir)
128128

129129
@override
130130
def on_train_end(self, args: "TrainingArguments", state: "TrainerState", control: "TrainerControl", **kwargs):
131131
if args.should_save:
132-
getattr(self.processor, "image_processor").save_pretrained(args.output_dir)
132+
self.processor.save_pretrained(args.output_dir)
133133

134134

135135
class PissaConvertCallback(TrainerCallback):

src/llamafactory/train/tuner.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -133,11 +133,9 @@ def export_model(args: Optional[Dict[str, Any]] = None) -> None:
133133
tokenizer.push_to_hub(model_args.export_hub_model_id, token=model_args.hf_hub_token)
134134

135135
if processor is not None:
136-
getattr(processor, "image_processor").save_pretrained(model_args.export_dir)
136+
processor.save_pretrained(model_args.export_dir)
137137
if model_args.export_hub_model_id is not None:
138-
getattr(processor, "image_processor").push_to_hub(
139-
model_args.export_hub_model_id, token=model_args.hf_hub_token
140-
)
138+
processor.push_to_hub(model_args.export_hub_model_id, token=model_args.hf_hub_token)
141139

142140
except Exception as e:
143141
logger.warning(f"Cannot save tokenizer, please copy the files manually: {e}.")

src/webui.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,9 @@
1818

1919

2020
def main():
21-
gradio_share = os.environ.get("GRADIO_SHARE", "0").lower() in ["true", "1"]
22-
server_name = os.environ.get("GRADIO_SERVER_NAME", "0.0.0.0")
21+
gradio_ipv6 = os.getenv("GRADIO_IPV6", "0").lower() in ["true", "1"]
22+
gradio_share = os.getenv("GRADIO_SHARE", "0").lower() in ["true", "1"]
23+
server_name = os.getenv("GRADIO_SERVER_NAME", "[::]" if gradio_ipv6 else "0.0.0.0")
2324
create_ui().queue().launch(share=gradio_share, server_name=server_name, inbrowser=True)
2425

2526

0 commit comments

Comments
 (0)