(model,LoraConfig(task_type=TaskType.CAUSAL_LM,inference_mode=False,r=8,lora_alpha=16,lora_dropout=0.1,bias="none",target_modules=['dense'],modules_to_save=['embed_out'], )dataset=load_dataset('tatsu-lab/alpaca',split="train[:100]")train_data=dataset.map(lambdax:tokenizer(x['text'...
(checkpoint_folder, "pt_lora_model") kwargs["model"].save_pretrained(peft_model_path) kwargs["tokenizer"].save_pretrained(peft_model_path) def on_save(self, args, state, control, **kwargs): self.save_model(args, state, kwargs) return control def on_train_end(self, args, state, ...
'Lora', 'multidiffusion-upscaler-for-automatic1111', 'ScuNET', 'sd-dynamic-thresholding', 'sd-extension-system-info', 'sd-webui-agent-scheduler', 'sd-webui-controlnet', 'stable-diffusion-webui-images-browser', 'stable-diffusion-webui-rembg', 'SwinIR'] ...