fofr / sdxl-santa-hat
SDXL fine-tuned on Santa Hats
- Public
- 845 runs
-
L40S
- SDXL fine-tune
Prediction
fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdcIDr6p7d23bfdzxehatpa4n2zxdueStatusSucceededSourceWebHardwareA40 (Large)Total durationCreatedInput
- width
- 768
- height
- 768
- prompt
- A photo of a cat wearing a TOK santa hat
- refine
- expert_ensemble_refiner
- scheduler
- K_EULER
- lora_scale
- 0.6
- num_outputs
- 1
- guidance_scale
- 7.5
- apply_watermark
- high_noise_frac
- 0.9
- negative_prompt
- prompt_strength
- 0.8
- num_inference_steps
- 25
{ "width": 768, "height": 768, "prompt": "A photo of a cat wearing a TOK santa hat", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "negative_prompt": "", "prompt_strength": 0.8, "num_inference_steps": 25 }
Install Replicate’s Node.js client library:npm install replicate
Import and set up the client:import Replicate from "replicate"; const replicate = new Replicate({ auth: process.env.REPLICATE_API_TOKEN, });
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run( "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", { input: { width: 768, height: 768, prompt: "A photo of a cat wearing a TOK santa hat", refine: "expert_ensemble_refiner", scheduler: "K_EULER", lora_scale: 0.6, num_outputs: 1, guidance_scale: 7.5, apply_watermark: false, high_noise_frac: 0.9, negative_prompt: "", prompt_strength: 0.8, num_inference_steps: 25 } } ); // To access the file URL: console.log(output[0].url()); //=> "http://example.com" // To write the file to disk: fs.writeFile("my-image.png", output[0]);
To learn more, take a look at the guide on getting started with Node.js.
Install Replicate’s Python client library:pip install replicate
Import the client:import replicate
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run( "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", input={ "width": 768, "height": 768, "prompt": "A photo of a cat wearing a TOK santa hat", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": False, "high_noise_frac": 0.9, "negative_prompt": "", "prompt_strength": 0.8, "num_inference_steps": 25 } ) print(output)
To learn more, take a look at the guide on getting started with Python.
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \ -H "Authorization: Bearer $REPLICATE_API_TOKEN" \ -H "Content-Type: application/json" \ -H "Prefer: wait" \ -d $'{ "version": "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", "input": { "width": 768, "height": 768, "prompt": "A photo of a cat wearing a TOK santa hat", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "negative_prompt": "", "prompt_strength": 0.8, "num_inference_steps": 25 } }' \ https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Output
{ "completed_at": "2023-12-19T10:15:33.572395Z", "created_at": "2023-12-19T10:15:28.335492Z", "data_removed": false, "error": null, "id": "r6p7d23bfdzxehatpa4n2zxdue", "input": { "width": 768, "height": 768, "prompt": "A photo of a cat wearing a TOK santa hat", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "negative_prompt": "", "prompt_strength": 0.8, "num_inference_steps": 25 }, "logs": "Using seed: 38079\nskipping loading .. weights already loaded\nPrompt: A photo of a cat wearing a <s0><s1> santa hat\ntxt2img mode\n 0%| | 0/22 [00:00<?, ?it/s]\n 5%|▍ | 1/22 [00:00<00:03, 6.18it/s]\n 9%|▉ | 2/22 [00:00<00:03, 6.15it/s]\n 14%|█▎ | 3/22 [00:00<00:03, 6.15it/s]\n 18%|█▊ | 4/22 [00:00<00:02, 6.15it/s]\n 23%|██▎ | 5/22 [00:00<00:02, 6.14it/s]\n 27%|██▋ | 6/22 [00:00<00:02, 6.14it/s]\n 32%|███▏ | 7/22 [00:01<00:02, 6.14it/s]\n 36%|███▋ | 8/22 [00:01<00:02, 6.15it/s]\n 41%|████ | 9/22 [00:01<00:02, 6.15it/s]\n 45%|████▌ | 10/22 [00:01<00:01, 6.15it/s]\n 50%|█████ | 11/22 [00:01<00:01, 6.15it/s]\n 55%|█████▍ | 12/22 [00:01<00:01, 6.15it/s]\n 59%|█████▉ | 13/22 [00:02<00:01, 6.14it/s]\n 64%|██████▎ | 14/22 [00:02<00:01, 6.14it/s]\n 68%|██████▊ | 15/22 [00:02<00:01, 6.14it/s]\n 73%|███████▎ | 16/22 [00:02<00:00, 6.14it/s]\n 77%|███████▋ | 17/22 [00:02<00:00, 6.14it/s]\n 82%|████████▏ | 18/22 [00:02<00:00, 6.14it/s]\n 86%|████████▋ | 19/22 [00:03<00:00, 6.13it/s]\n 91%|█████████ | 20/22 [00:03<00:00, 6.14it/s]\n 95%|█████████▌| 21/22 [00:03<00:00, 6.14it/s]\n100%|██████████| 22/22 [00:03<00:00, 6.14it/s]\n100%|██████████| 22/22 [00:03<00:00, 6.14it/s]\n 0%| | 0/3 [00:00<?, ?it/s]\n 33%|███▎ | 1/3 [00:00<00:00, 7.76it/s]\n 67%|██████▋ | 2/3 [00:00<00:00, 7.68it/s]\n100%|██████████| 3/3 [00:00<00:00, 7.66it/s]\n100%|██████████| 3/3 [00:00<00:00, 7.67it/s]", "metrics": { "predict_time": 5.226535, "total_time": 5.236903 }, "output": [ "https://replicate.delivery/pbxt/wVAffWSrOfBbQIRPd6fBNyT1qRSAIPbsTKGV6djPKY5V0HPIB/out-0.png" ], "started_at": "2023-12-19T10:15:28.345860Z", "status": "succeeded", "urls": { "get": "https://api.replicate.com/v1/predictions/r6p7d23bfdzxehatpa4n2zxdue", "cancel": "https://api.replicate.com/v1/predictions/r6p7d23bfdzxehatpa4n2zxdue/cancel" }, "version": "bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc" }
Generated inUsing seed: 38079 skipping loading .. weights already loaded Prompt: A photo of a cat wearing a <s0><s1> santa hat txt2img mode 0%| | 0/22 [00:00<?, ?it/s] 5%|▍ | 1/22 [00:00<00:03, 6.18it/s] 9%|▉ | 2/22 [00:00<00:03, 6.15it/s] 14%|█▎ | 3/22 [00:00<00:03, 6.15it/s] 18%|█▊ | 4/22 [00:00<00:02, 6.15it/s] 23%|██▎ | 5/22 [00:00<00:02, 6.14it/s] 27%|██▋ | 6/22 [00:00<00:02, 6.14it/s] 32%|███▏ | 7/22 [00:01<00:02, 6.14it/s] 36%|███▋ | 8/22 [00:01<00:02, 6.15it/s] 41%|████ | 9/22 [00:01<00:02, 6.15it/s] 45%|████▌ | 10/22 [00:01<00:01, 6.15it/s] 50%|█████ | 11/22 [00:01<00:01, 6.15it/s] 55%|█████▍ | 12/22 [00:01<00:01, 6.15it/s] 59%|█████▉ | 13/22 [00:02<00:01, 6.14it/s] 64%|██████▎ | 14/22 [00:02<00:01, 6.14it/s] 68%|██████▊ | 15/22 [00:02<00:01, 6.14it/s] 73%|███████▎ | 16/22 [00:02<00:00, 6.14it/s] 77%|███████▋ | 17/22 [00:02<00:00, 6.14it/s] 82%|████████▏ | 18/22 [00:02<00:00, 6.14it/s] 86%|████████▋ | 19/22 [00:03<00:00, 6.13it/s] 91%|█████████ | 20/22 [00:03<00:00, 6.14it/s] 95%|█████████▌| 21/22 [00:03<00:00, 6.14it/s] 100%|██████████| 22/22 [00:03<00:00, 6.14it/s] 100%|██████████| 22/22 [00:03<00:00, 6.14it/s] 0%| | 0/3 [00:00<?, ?it/s] 33%|███▎ | 1/3 [00:00<00:00, 7.76it/s] 67%|██████▋ | 2/3 [00:00<00:00, 7.68it/s] 100%|██████████| 3/3 [00:00<00:00, 7.66it/s] 100%|██████████| 3/3 [00:00<00:00, 7.67it/s]
Prediction
fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdcIDjljqjs3bjx47hqm4tumcmzh2uiStatusSucceededSourceWebHardwareA40 (Large)Total durationCreatedInput
- width
- 768
- height
- 1024
- prompt
- A photo of a cute panda wearing a TOK santa hat in space
- refine
- expert_ensemble_refiner
- scheduler
- K_EULER
- lora_scale
- 0.6
- num_outputs
- 1
- guidance_scale
- 7.5
- apply_watermark
- high_noise_frac
- 0.9
- prompt_strength
- 0.8
- num_inference_steps
- 25
{ "width": 768, "height": 1024, "prompt": "A photo of a cute panda wearing a TOK santa hat in space", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "prompt_strength": 0.8, "num_inference_steps": 25 }
Install Replicate’s Node.js client library:npm install replicate
Import and set up the client:import Replicate from "replicate"; const replicate = new Replicate({ auth: process.env.REPLICATE_API_TOKEN, });
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run( "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", { input: { width: 768, height: 1024, prompt: "A photo of a cute panda wearing a TOK santa hat in space", refine: "expert_ensemble_refiner", scheduler: "K_EULER", lora_scale: 0.6, num_outputs: 1, guidance_scale: 7.5, apply_watermark: false, high_noise_frac: 0.9, prompt_strength: 0.8, num_inference_steps: 25 } } ); // To access the file URL: console.log(output[0].url()); //=> "http://example.com" // To write the file to disk: fs.writeFile("my-image.png", output[0]);
To learn more, take a look at the guide on getting started with Node.js.
Install Replicate’s Python client library:pip install replicate
Import the client:import replicate
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run( "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", input={ "width": 768, "height": 1024, "prompt": "A photo of a cute panda wearing a TOK santa hat in space", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": False, "high_noise_frac": 0.9, "prompt_strength": 0.8, "num_inference_steps": 25 } ) print(output)
To learn more, take a look at the guide on getting started with Python.
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \ -H "Authorization: Bearer $REPLICATE_API_TOKEN" \ -H "Content-Type: application/json" \ -H "Prefer: wait" \ -d $'{ "version": "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", "input": { "width": 768, "height": 1024, "prompt": "A photo of a cute panda wearing a TOK santa hat in space", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "prompt_strength": 0.8, "num_inference_steps": 25 } }' \ https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Output
{ "completed_at": "2023-12-19T10:46:46.141808Z", "created_at": "2023-12-19T10:46:39.856426Z", "data_removed": false, "error": null, "id": "jljqjs3bjx47hqm4tumcmzh2ui", "input": { "width": 768, "height": 1024, "prompt": "A photo of a cute panda wearing a TOK santa hat in space", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "prompt_strength": 0.8, "num_inference_steps": 25 }, "logs": "Using seed: 46652\nskipping loading .. weights already loaded\nPrompt: A photo of a cute panda wearing a <s0><s1> santa hat in space\ntxt2img mode\n 0%| | 0/22 [00:00<?, ?it/s]\n 5%|▍ | 1/22 [00:00<00:04, 4.93it/s]\n 9%|▉ | 2/22 [00:00<00:04, 4.90it/s]\n 14%|█▎ | 3/22 [00:00<00:03, 4.91it/s]\n 18%|█▊ | 4/22 [00:00<00:03, 4.91it/s]\n 23%|██▎ | 5/22 [00:01<00:03, 4.92it/s]\n 27%|██▋ | 6/22 [00:01<00:03, 4.91it/s]\n 32%|███▏ | 7/22 [00:01<00:03, 4.91it/s]\n 36%|███▋ | 8/22 [00:01<00:02, 4.90it/s]\n 41%|████ | 9/22 [00:01<00:02, 4.90it/s]\n 45%|████▌ | 10/22 [00:02<00:02, 4.90it/s]\n 50%|█████ | 11/22 [00:02<00:02, 4.91it/s]\n 55%|█████▍ | 12/22 [00:02<00:02, 4.90it/s]\n 59%|█████▉ | 13/22 [00:02<00:01, 4.90it/s]\n 64%|██████▎ | 14/22 [00:02<00:01, 4.90it/s]\n 68%|██████▊ | 15/22 [00:03<00:01, 4.90it/s]\n 73%|███████▎ | 16/22 [00:03<00:01, 4.90it/s]\n 77%|███████▋ | 17/22 [00:03<00:01, 4.90it/s]\n 82%|████████▏ | 18/22 [00:03<00:00, 4.89it/s]\n 86%|████████▋ | 19/22 [00:03<00:00, 4.89it/s]\n 91%|█████████ | 20/22 [00:04<00:00, 4.90it/s]\n 95%|█████████▌| 21/22 [00:04<00:00, 4.89it/s]\n100%|██████████| 22/22 [00:04<00:00, 4.89it/s]\n100%|██████████| 22/22 [00:04<00:00, 4.90it/s]\n 0%| | 0/3 [00:00<?, ?it/s]\n 33%|███▎ | 1/3 [00:00<00:00, 6.18it/s]\n 67%|██████▋ | 2/3 [00:00<00:00, 6.14it/s]\n100%|██████████| 3/3 [00:00<00:00, 6.12it/s]\n100%|██████████| 3/3 [00:00<00:00, 6.13it/s]", "metrics": { "predict_time": 6.278039, "total_time": 6.285382 }, "output": [ "https://replicate.delivery/pbxt/NIXGS8kseYXvZ6dTaZYEeBbNEeUpQfZjJBDeEnz5UnqrSTegE/out-0.png" ], "started_at": "2023-12-19T10:46:39.863769Z", "status": "succeeded", "urls": { "get": "https://api.replicate.com/v1/predictions/jljqjs3bjx47hqm4tumcmzh2ui", "cancel": "https://api.replicate.com/v1/predictions/jljqjs3bjx47hqm4tumcmzh2ui/cancel" }, "version": "bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc" }
Generated inUsing seed: 46652 skipping loading .. weights already loaded Prompt: A photo of a cute panda wearing a <s0><s1> santa hat in space txt2img mode 0%| | 0/22 [00:00<?, ?it/s] 5%|▍ | 1/22 [00:00<00:04, 4.93it/s] 9%|▉ | 2/22 [00:00<00:04, 4.90it/s] 14%|█▎ | 3/22 [00:00<00:03, 4.91it/s] 18%|█▊ | 4/22 [00:00<00:03, 4.91it/s] 23%|██▎ | 5/22 [00:01<00:03, 4.92it/s] 27%|██▋ | 6/22 [00:01<00:03, 4.91it/s] 32%|███▏ | 7/22 [00:01<00:03, 4.91it/s] 36%|███▋ | 8/22 [00:01<00:02, 4.90it/s] 41%|████ | 9/22 [00:01<00:02, 4.90it/s] 45%|████▌ | 10/22 [00:02<00:02, 4.90it/s] 50%|█████ | 11/22 [00:02<00:02, 4.91it/s] 55%|█████▍ | 12/22 [00:02<00:02, 4.90it/s] 59%|█████▉ | 13/22 [00:02<00:01, 4.90it/s] 64%|██████▎ | 14/22 [00:02<00:01, 4.90it/s] 68%|██████▊ | 15/22 [00:03<00:01, 4.90it/s] 73%|███████▎ | 16/22 [00:03<00:01, 4.90it/s] 77%|███████▋ | 17/22 [00:03<00:01, 4.90it/s] 82%|████████▏ | 18/22 [00:03<00:00, 4.89it/s] 86%|████████▋ | 19/22 [00:03<00:00, 4.89it/s] 91%|█████████ | 20/22 [00:04<00:00, 4.90it/s] 95%|█████████▌| 21/22 [00:04<00:00, 4.89it/s] 100%|██████████| 22/22 [00:04<00:00, 4.89it/s] 100%|██████████| 22/22 [00:04<00:00, 4.90it/s] 0%| | 0/3 [00:00<?, ?it/s] 33%|███▎ | 1/3 [00:00<00:00, 6.18it/s] 67%|██████▋ | 2/3 [00:00<00:00, 6.14it/s] 100%|██████████| 3/3 [00:00<00:00, 6.12it/s] 100%|██████████| 3/3 [00:00<00:00, 6.13it/s]
Prediction
fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdcIDykj7lm3bstjrggyhfmiygxp6eaStatusSucceededSourceWebHardwareA40 (Large)Total durationCreatedInput
- width
- 768
- height
- 768
- prompt
- A sci-fi concept art painting of Elon Musk wearing a TOK santa hat
- refine
- expert_ensemble_refiner
- scheduler
- K_EULER
- lora_scale
- 0.6
- num_outputs
- 1
- guidance_scale
- 7.5
- apply_watermark
- high_noise_frac
- 0.9
- negative_prompt
- prompt_strength
- 0.8
- num_inference_steps
- 25
{ "width": 768, "height": 768, "prompt": "A sci-fi concept art painting of Elon Musk wearing a TOK santa hat", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "negative_prompt": "", "prompt_strength": 0.8, "num_inference_steps": 25 }
Install Replicate’s Node.js client library:npm install replicate
Import and set up the client:import Replicate from "replicate"; const replicate = new Replicate({ auth: process.env.REPLICATE_API_TOKEN, });
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run( "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", { input: { width: 768, height: 768, prompt: "A sci-fi concept art painting of Elon Musk wearing a TOK santa hat", refine: "expert_ensemble_refiner", scheduler: "K_EULER", lora_scale: 0.6, num_outputs: 1, guidance_scale: 7.5, apply_watermark: false, high_noise_frac: 0.9, negative_prompt: "", prompt_strength: 0.8, num_inference_steps: 25 } } ); // To access the file URL: console.log(output[0].url()); //=> "http://example.com" // To write the file to disk: fs.writeFile("my-image.png", output[0]);
To learn more, take a look at the guide on getting started with Node.js.
Install Replicate’s Python client library:pip install replicate
Import the client:import replicate
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run( "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", input={ "width": 768, "height": 768, "prompt": "A sci-fi concept art painting of Elon Musk wearing a TOK santa hat", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": False, "high_noise_frac": 0.9, "negative_prompt": "", "prompt_strength": 0.8, "num_inference_steps": 25 } ) print(output)
To learn more, take a look at the guide on getting started with Python.
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \ -H "Authorization: Bearer $REPLICATE_API_TOKEN" \ -H "Content-Type: application/json" \ -H "Prefer: wait" \ -d $'{ "version": "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", "input": { "width": 768, "height": 768, "prompt": "A sci-fi concept art painting of Elon Musk wearing a TOK santa hat", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "negative_prompt": "", "prompt_strength": 0.8, "num_inference_steps": 25 } }' \ https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Output
{ "completed_at": "2023-12-19T10:19:08.299657Z", "created_at": "2023-12-19T10:19:03.071520Z", "data_removed": false, "error": null, "id": "ykj7lm3bstjrggyhfmiygxp6ea", "input": { "width": 768, "height": 768, "prompt": "A sci-fi concept art painting of Elon Musk wearing a TOK santa hat", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "negative_prompt": "", "prompt_strength": 0.8, "num_inference_steps": 25 }, "logs": "Using seed: 12435\nskipping loading .. weights already loaded\nPrompt: A sci-fi concept art painting of Elon Musk wearing a <s0><s1> santa hat\ntxt2img mode\n 0%| | 0/22 [00:00<?, ?it/s]\n 5%|▍ | 1/22 [00:00<00:03, 6.23it/s]\n 9%|▉ | 2/22 [00:00<00:03, 6.20it/s]\n 14%|█▎ | 3/22 [00:00<00:03, 6.19it/s]\n 18%|█▊ | 4/22 [00:00<00:02, 6.19it/s]\n 23%|██▎ | 5/22 [00:00<00:02, 6.20it/s]\n 27%|██▋ | 6/22 [00:00<00:02, 6.20it/s]\n 32%|███▏ | 7/22 [00:01<00:02, 6.20it/s]\n 36%|███▋ | 8/22 [00:01<00:02, 6.20it/s]\n 41%|████ | 9/22 [00:01<00:02, 6.20it/s]\n 45%|████▌ | 10/22 [00:01<00:01, 6.20it/s]\n 50%|█████ | 11/22 [00:01<00:01, 6.19it/s]\n 55%|█████▍ | 12/22 [00:01<00:01, 6.19it/s]\n 59%|█████▉ | 13/22 [00:02<00:01, 6.19it/s]\n 64%|██████▎ | 14/22 [00:02<00:01, 6.19it/s]\n 68%|██████▊ | 15/22 [00:02<00:01, 6.18it/s]\n 73%|███████▎ | 16/22 [00:02<00:00, 6.18it/s]\n 77%|███████▋ | 17/22 [00:02<00:00, 6.18it/s]\n 82%|████████▏ | 18/22 [00:02<00:00, 6.18it/s]\n 86%|████████▋ | 19/22 [00:03<00:00, 6.19it/s]\n 91%|█████████ | 20/22 [00:03<00:00, 6.19it/s]\n 95%|█████████▌| 21/22 [00:03<00:00, 6.19it/s]\n100%|██████████| 22/22 [00:03<00:00, 6.19it/s]\n100%|██████████| 22/22 [00:03<00:00, 6.19it/s]\n 0%| | 0/3 [00:00<?, ?it/s]\n 33%|███▎ | 1/3 [00:00<00:00, 7.83it/s]\n 67%|██████▋ | 2/3 [00:00<00:00, 7.74it/s]\n100%|██████████| 3/3 [00:00<00:00, 7.74it/s]\n100%|██████████| 3/3 [00:00<00:00, 7.75it/s]", "metrics": { "predict_time": 5.219719, "total_time": 5.228137 }, "output": [ "https://replicate.delivery/pbxt/wef1OQczthunUkYvuOsfeSmtT5rsEDa1AedslraVGkIdDQegE/out-0.png" ], "started_at": "2023-12-19T10:19:03.079938Z", "status": "succeeded", "urls": { "get": "https://api.replicate.com/v1/predictions/ykj7lm3bstjrggyhfmiygxp6ea", "cancel": "https://api.replicate.com/v1/predictions/ykj7lm3bstjrggyhfmiygxp6ea/cancel" }, "version": "bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc" }
Generated inUsing seed: 12435 skipping loading .. weights already loaded Prompt: A sci-fi concept art painting of Elon Musk wearing a <s0><s1> santa hat txt2img mode 0%| | 0/22 [00:00<?, ?it/s] 5%|▍ | 1/22 [00:00<00:03, 6.23it/s] 9%|▉ | 2/22 [00:00<00:03, 6.20it/s] 14%|█▎ | 3/22 [00:00<00:03, 6.19it/s] 18%|█▊ | 4/22 [00:00<00:02, 6.19it/s] 23%|██▎ | 5/22 [00:00<00:02, 6.20it/s] 27%|██▋ | 6/22 [00:00<00:02, 6.20it/s] 32%|███▏ | 7/22 [00:01<00:02, 6.20it/s] 36%|███▋ | 8/22 [00:01<00:02, 6.20it/s] 41%|████ | 9/22 [00:01<00:02, 6.20it/s] 45%|████▌ | 10/22 [00:01<00:01, 6.20it/s] 50%|█████ | 11/22 [00:01<00:01, 6.19it/s] 55%|█████▍ | 12/22 [00:01<00:01, 6.19it/s] 59%|█████▉ | 13/22 [00:02<00:01, 6.19it/s] 64%|██████▎ | 14/22 [00:02<00:01, 6.19it/s] 68%|██████▊ | 15/22 [00:02<00:01, 6.18it/s] 73%|███████▎ | 16/22 [00:02<00:00, 6.18it/s] 77%|███████▋ | 17/22 [00:02<00:00, 6.18it/s] 82%|████████▏ | 18/22 [00:02<00:00, 6.18it/s] 86%|████████▋ | 19/22 [00:03<00:00, 6.19it/s] 91%|█████████ | 20/22 [00:03<00:00, 6.19it/s] 95%|█████████▌| 21/22 [00:03<00:00, 6.19it/s] 100%|██████████| 22/22 [00:03<00:00, 6.19it/s] 100%|██████████| 22/22 [00:03<00:00, 6.19it/s] 0%| | 0/3 [00:00<?, ?it/s] 33%|███▎ | 1/3 [00:00<00:00, 7.83it/s] 67%|██████▋ | 2/3 [00:00<00:00, 7.74it/s] 100%|██████████| 3/3 [00:00<00:00, 7.74it/s] 100%|██████████| 3/3 [00:00<00:00, 7.75it/s]
Prediction
fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdcIDieqzn5dbk62d245vienpxgh5z4StatusSucceededSourceWebHardwareA40 (Large)Total durationCreatedInput
- width
- 768
- height
- 1024
- prompt
- Lightning McQueen wearing a TOK santa hat
- refine
- expert_ensemble_refiner
- scheduler
- K_EULER
- lora_scale
- 0.6
- num_outputs
- 1
- guidance_scale
- 7.5
- apply_watermark
- high_noise_frac
- 0.9
- prompt_strength
- 0.8
- num_inference_steps
- 25
{ "width": 768, "height": 1024, "prompt": "Lightning McQueen wearing a TOK santa hat", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "prompt_strength": 0.8, "num_inference_steps": 25 }
Install Replicate’s Node.js client library:npm install replicate
Import and set up the client:import Replicate from "replicate"; const replicate = new Replicate({ auth: process.env.REPLICATE_API_TOKEN, });
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run( "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", { input: { width: 768, height: 1024, prompt: "Lightning McQueen wearing a TOK santa hat", refine: "expert_ensemble_refiner", scheduler: "K_EULER", lora_scale: 0.6, num_outputs: 1, guidance_scale: 7.5, apply_watermark: false, high_noise_frac: 0.9, prompt_strength: 0.8, num_inference_steps: 25 } } ); // To access the file URL: console.log(output[0].url()); //=> "http://example.com" // To write the file to disk: fs.writeFile("my-image.png", output[0]);
To learn more, take a look at the guide on getting started with Node.js.
Install Replicate’s Python client library:pip install replicate
Import the client:import replicate
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run( "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", input={ "width": 768, "height": 1024, "prompt": "Lightning McQueen wearing a TOK santa hat", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": False, "high_noise_frac": 0.9, "prompt_strength": 0.8, "num_inference_steps": 25 } ) print(output)
To learn more, take a look at the guide on getting started with Python.
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \ -H "Authorization: Bearer $REPLICATE_API_TOKEN" \ -H "Content-Type: application/json" \ -H "Prefer: wait" \ -d $'{ "version": "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", "input": { "width": 768, "height": 1024, "prompt": "Lightning McQueen wearing a TOK santa hat", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "prompt_strength": 0.8, "num_inference_steps": 25 } }' \ https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Output
{ "completed_at": "2023-12-19T10:51:05.327751Z", "created_at": "2023-12-19T10:50:53.151890Z", "data_removed": false, "error": null, "id": "ieqzn5dbk62d245vienpxgh5z4", "input": { "width": 768, "height": 1024, "prompt": "Lightning McQueen wearing a TOK santa hat", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "prompt_strength": 0.8, "num_inference_steps": 25 }, "logs": "Using seed: 49425\nLoading fine-tuned model\nDoes not have Unet. assume we are using LoRA\nLoading Unet LoRA\nPrompt: Lightning McQueen wearing a <s0><s1> santa hat\ntxt2img mode\n 0%| | 0/22 [00:00<?, ?it/s]\n 5%|▍ | 1/22 [00:00<00:04, 4.95it/s]\n 9%|▉ | 2/22 [00:00<00:04, 4.93it/s]\n 14%|█▎ | 3/22 [00:00<00:03, 4.92it/s]\n 18%|█▊ | 4/22 [00:00<00:03, 4.92it/s]\n 23%|██▎ | 5/22 [00:01<00:03, 4.91it/s]\n 27%|██▋ | 6/22 [00:01<00:03, 4.91it/s]\n 32%|███▏ | 7/22 [00:01<00:03, 4.91it/s]\n 36%|███▋ | 8/22 [00:01<00:02, 4.91it/s]\n 41%|████ | 9/22 [00:01<00:02, 4.91it/s]\n 45%|████▌ | 10/22 [00:02<00:02, 4.91it/s]\n 50%|█████ | 11/22 [00:02<00:02, 4.90it/s]\n 55%|█████▍ | 12/22 [00:02<00:02, 4.91it/s]\n 59%|█████▉ | 13/22 [00:02<00:01, 4.92it/s]\n 64%|██████▎ | 14/22 [00:02<00:01, 4.93it/s]\n 68%|██████▊ | 15/22 [00:03<00:01, 4.92it/s]\n 73%|███████▎ | 16/22 [00:03<00:01, 4.93it/s]\n 77%|███████▋ | 17/22 [00:03<00:01, 4.93it/s]\n 82%|████████▏ | 18/22 [00:03<00:00, 4.93it/s]\n 86%|████████▋ | 19/22 [00:03<00:00, 4.93it/s]\n 91%|█████████ | 20/22 [00:04<00:00, 4.93it/s]\n 95%|█████████▌| 21/22 [00:04<00:00, 4.93it/s]\n100%|██████████| 22/22 [00:04<00:00, 4.93it/s]\n100%|██████████| 22/22 [00:04<00:00, 4.92it/s]\n 0%| | 0/3 [00:00<?, ?it/s]\n 33%|███▎ | 1/3 [00:00<00:00, 6.20it/s]\n 67%|██████▋ | 2/3 [00:00<00:00, 6.16it/s]\n100%|██████████| 3/3 [00:00<00:00, 6.14it/s]\n100%|██████████| 3/3 [00:00<00:00, 6.15it/s]", "metrics": { "predict_time": 6.698185, "total_time": 12.175861 }, "output": [ "https://replicate.delivery/pbxt/L0395FRALJqjL9jtfbflZyLgAWlDgK6O2VvMjKFNTwcYekHkA/out-0.png" ], "started_at": "2023-12-19T10:50:58.629566Z", "status": "succeeded", "urls": { "get": "https://api.replicate.com/v1/predictions/ieqzn5dbk62d245vienpxgh5z4", "cancel": "https://api.replicate.com/v1/predictions/ieqzn5dbk62d245vienpxgh5z4/cancel" }, "version": "bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc" }
Generated inUsing seed: 49425 Loading fine-tuned model Does not have Unet. assume we are using LoRA Loading Unet LoRA Prompt: Lightning McQueen wearing a <s0><s1> santa hat txt2img mode 0%| | 0/22 [00:00<?, ?it/s] 5%|▍ | 1/22 [00:00<00:04, 4.95it/s] 9%|▉ | 2/22 [00:00<00:04, 4.93it/s] 14%|█▎ | 3/22 [00:00<00:03, 4.92it/s] 18%|█▊ | 4/22 [00:00<00:03, 4.92it/s] 23%|██▎ | 5/22 [00:01<00:03, 4.91it/s] 27%|██▋ | 6/22 [00:01<00:03, 4.91it/s] 32%|███▏ | 7/22 [00:01<00:03, 4.91it/s] 36%|███▋ | 8/22 [00:01<00:02, 4.91it/s] 41%|████ | 9/22 [00:01<00:02, 4.91it/s] 45%|████▌ | 10/22 [00:02<00:02, 4.91it/s] 50%|█████ | 11/22 [00:02<00:02, 4.90it/s] 55%|█████▍ | 12/22 [00:02<00:02, 4.91it/s] 59%|█████▉ | 13/22 [00:02<00:01, 4.92it/s] 64%|██████▎ | 14/22 [00:02<00:01, 4.93it/s] 68%|██████▊ | 15/22 [00:03<00:01, 4.92it/s] 73%|███████▎ | 16/22 [00:03<00:01, 4.93it/s] 77%|███████▋ | 17/22 [00:03<00:01, 4.93it/s] 82%|████████▏ | 18/22 [00:03<00:00, 4.93it/s] 86%|████████▋ | 19/22 [00:03<00:00, 4.93it/s] 91%|█████████ | 20/22 [00:04<00:00, 4.93it/s] 95%|█████████▌| 21/22 [00:04<00:00, 4.93it/s] 100%|██████████| 22/22 [00:04<00:00, 4.93it/s] 100%|██████████| 22/22 [00:04<00:00, 4.92it/s] 0%| | 0/3 [00:00<?, ?it/s] 33%|███▎ | 1/3 [00:00<00:00, 6.20it/s] 67%|██████▋ | 2/3 [00:00<00:00, 6.16it/s] 100%|██████████| 3/3 [00:00<00:00, 6.14it/s] 100%|██████████| 3/3 [00:00<00:00, 6.15it/s]
Prediction
fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdcIDv76yxn3bdehfj27o5crlso6z2uStatusSucceededSourceWebHardwareA40 (Large)Total durationCreatedInput
- width
- 768
- height
- 768
- prompt
- Batman wearing a TOK santa hat, superhero
- refine
- expert_ensemble_refiner
- scheduler
- K_EULER
- lora_scale
- 0.6
- num_outputs
- 1
- guidance_scale
- 7.5
- apply_watermark
- high_noise_frac
- 0.9
- negative_prompt
- ugly, distorted, broken, dog, cat, pet, animal
- prompt_strength
- 0.8
- num_inference_steps
- 25
{ "width": 768, "height": 768, "prompt": "Batman wearing a TOK santa hat, superhero", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "negative_prompt": "ugly, distorted, broken, dog, cat, pet, animal", "prompt_strength": 0.8, "num_inference_steps": 25 }
Install Replicate’s Node.js client library:npm install replicate
Import and set up the client:import Replicate from "replicate"; const replicate = new Replicate({ auth: process.env.REPLICATE_API_TOKEN, });
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run( "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", { input: { width: 768, height: 768, prompt: "Batman wearing a TOK santa hat, superhero", refine: "expert_ensemble_refiner", scheduler: "K_EULER", lora_scale: 0.6, num_outputs: 1, guidance_scale: 7.5, apply_watermark: false, high_noise_frac: 0.9, negative_prompt: "ugly, distorted, broken, dog, cat, pet, animal", prompt_strength: 0.8, num_inference_steps: 25 } } ); // To access the file URL: console.log(output[0].url()); //=> "http://example.com" // To write the file to disk: fs.writeFile("my-image.png", output[0]);
To learn more, take a look at the guide on getting started with Node.js.
Install Replicate’s Python client library:pip install replicate
Import the client:import replicate
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run( "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", input={ "width": 768, "height": 768, "prompt": "Batman wearing a TOK santa hat, superhero", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": False, "high_noise_frac": 0.9, "negative_prompt": "ugly, distorted, broken, dog, cat, pet, animal", "prompt_strength": 0.8, "num_inference_steps": 25 } ) print(output)
To learn more, take a look at the guide on getting started with Python.
Run fofr/sdxl-santa-hat using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \ -H "Authorization: Bearer $REPLICATE_API_TOKEN" \ -H "Content-Type: application/json" \ -H "Prefer: wait" \ -d $'{ "version": "fofr/sdxl-santa-hat:bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc", "input": { "width": 768, "height": 768, "prompt": "Batman wearing a TOK santa hat, superhero", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "negative_prompt": "ugly, distorted, broken, dog, cat, pet, animal", "prompt_strength": 0.8, "num_inference_steps": 25 } }' \ https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Output
{ "completed_at": "2023-12-19T10:34:28.160150Z", "created_at": "2023-12-19T10:34:09.361853Z", "data_removed": false, "error": null, "id": "v76yxn3bdehfj27o5crlso6z2u", "input": { "width": 768, "height": 768, "prompt": "Batman wearing a TOK santa hat, superhero", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": false, "high_noise_frac": 0.9, "negative_prompt": "ugly, distorted, broken, dog, cat, pet, animal", "prompt_strength": 0.8, "num_inference_steps": 25 }, "logs": "Using seed: 56138\nEnsuring enough disk space...\nFree disk space: 1434433486848\nDownloading weights: https://replicate.delivery/pbxt/Rcx1efXxm5jvdkNMevTAxwTjTZbm7QbCDz0v1ZI6Y375wjHkA/trained_model.tar\n2023-12-19T10:34:18Z | INFO | [ Initiating ] dest=/src/weights-cache/c99f168d8eabbe43 minimum_chunk_size=150M url=https://replicate.delivery/pbxt/Rcx1efXxm5jvdkNMevTAxwTjTZbm7QbCDz0v1ZI6Y375wjHkA/trained_model.tar\n2023-12-19T10:34:19Z | INFO | [ Complete ] dest=/src/weights-cache/c99f168d8eabbe43 size=\"186 MB\" total_elapsed=0.908s url=https://replicate.delivery/pbxt/Rcx1efXxm5jvdkNMevTAxwTjTZbm7QbCDz0v1ZI6Y375wjHkA/trained_model.tar\nb''\nDownloaded weights in 1.0353984832763672 seconds\nLoading fine-tuned model\nDoes not have Unet. assume we are using LoRA\nLoading Unet LoRA\nPrompt: Batman wearing a <s0><s1> santa hat, superhero\ntxt2img mode\n 0%| | 0/22 [00:00<?, ?it/s]/usr/local/lib/python3.9/site-packages/torch/nn/modules/conv.py:459: UserWarning: Applied workaround for CuDNN issue, install nvrtc.so (Triggered internally at ../aten/src/ATen/native/cudnn/Conv_v8.cpp:80.)\nreturn F.conv2d(input, weight, bias, self.stride,\n 5%|▍ | 1/22 [00:01<00:35, 1.67s/it]\n 9%|▉ | 2/22 [00:01<00:15, 1.28it/s]\n 14%|█▎ | 3/22 [00:01<00:09, 2.00it/s]\n 18%|█▊ | 4/22 [00:02<00:06, 2.72it/s]\n 23%|██▎ | 5/22 [00:02<00:04, 3.41it/s]\n 27%|██▋ | 6/22 [00:02<00:03, 4.01it/s]\n 32%|███▏ | 7/22 [00:02<00:03, 4.53it/s]\n 36%|███▋ | 8/22 [00:02<00:02, 4.94it/s]\n 41%|████ | 9/22 [00:02<00:02, 5.25it/s]\n 45%|████▌ | 10/22 [00:03<00:02, 5.50it/s]\n 50%|█████ | 11/22 [00:03<00:01, 5.67it/s]\n 55%|█████▍ | 12/22 [00:03<00:01, 5.81it/s]\n 59%|█████▉ | 13/22 [00:03<00:01, 5.91it/s]\n 64%|██████▎ | 14/22 [00:03<00:01, 5.98it/s]\n 68%|██████▊ | 15/22 [00:03<00:01, 6.02it/s]\n 73%|███████▎ | 16/22 [00:04<00:00, 6.05it/s]\n 77%|███████▋ | 17/22 [00:04<00:00, 6.07it/s]\n 82%|████████▏ | 18/22 [00:04<00:00, 6.09it/s]\n 86%|████████▋ | 19/22 [00:04<00:00, 6.10it/s]\n 91%|█████████ | 20/22 [00:04<00:00, 6.11it/s]\n 95%|█████████▌| 21/22 [00:04<00:00, 6.11it/s]\n100%|██████████| 22/22 [00:05<00:00, 6.12it/s]\n100%|██████████| 22/22 [00:05<00:00, 4.32it/s]\n 0%| | 0/3 [00:00<?, ?it/s]\n 33%|███▎ | 1/3 [00:00<00:00, 5.67it/s]\n 67%|██████▋ | 2/3 [00:00<00:00, 6.67it/s]\n100%|██████████| 3/3 [00:00<00:00, 7.08it/s]\n100%|██████████| 3/3 [00:00<00:00, 6.84it/s]", "metrics": { "predict_time": 10.067844, "total_time": 18.798297 }, "output": [ "https://replicate.delivery/pbxt/jLb5aYls6aIGKBrwryFG8KrbEsm4e6S6wcP0V3P2aevzOyDSA/out-0.png" ], "started_at": "2023-12-19T10:34:18.092306Z", "status": "succeeded", "urls": { "get": "https://api.replicate.com/v1/predictions/v76yxn3bdehfj27o5crlso6z2u", "cancel": "https://api.replicate.com/v1/predictions/v76yxn3bdehfj27o5crlso6z2u/cancel" }, "version": "bbb9acdbaeb0ea3ffa52a2102ec067c3329de677e17d6638e8a74243363f2fdc" }
Generated inUsing seed: 56138 Ensuring enough disk space... Free disk space: 1434433486848 Downloading weights: https://replicate.delivery/pbxt/Rcx1efXxm5jvdkNMevTAxwTjTZbm7QbCDz0v1ZI6Y375wjHkA/trained_model.tar 2023-12-19T10:34:18Z | INFO | [ Initiating ] dest=/src/weights-cache/c99f168d8eabbe43 minimum_chunk_size=150M url=https://replicate.delivery/pbxt/Rcx1efXxm5jvdkNMevTAxwTjTZbm7QbCDz0v1ZI6Y375wjHkA/trained_model.tar 2023-12-19T10:34:19Z | INFO | [ Complete ] dest=/src/weights-cache/c99f168d8eabbe43 size="186 MB" total_elapsed=0.908s url=https://replicate.delivery/pbxt/Rcx1efXxm5jvdkNMevTAxwTjTZbm7QbCDz0v1ZI6Y375wjHkA/trained_model.tar b'' Downloaded weights in 1.0353984832763672 seconds Loading fine-tuned model Does not have Unet. assume we are using LoRA Loading Unet LoRA Prompt: Batman wearing a <s0><s1> santa hat, superhero txt2img mode 0%| | 0/22 [00:00<?, ?it/s]/usr/local/lib/python3.9/site-packages/torch/nn/modules/conv.py:459: UserWarning: Applied workaround for CuDNN issue, install nvrtc.so (Triggered internally at ../aten/src/ATen/native/cudnn/Conv_v8.cpp:80.) return F.conv2d(input, weight, bias, self.stride, 5%|▍ | 1/22 [00:01<00:35, 1.67s/it] 9%|▉ | 2/22 [00:01<00:15, 1.28it/s] 14%|█▎ | 3/22 [00:01<00:09, 2.00it/s] 18%|█▊ | 4/22 [00:02<00:06, 2.72it/s] 23%|██▎ | 5/22 [00:02<00:04, 3.41it/s] 27%|██▋ | 6/22 [00:02<00:03, 4.01it/s] 32%|███▏ | 7/22 [00:02<00:03, 4.53it/s] 36%|███▋ | 8/22 [00:02<00:02, 4.94it/s] 41%|████ | 9/22 [00:02<00:02, 5.25it/s] 45%|████▌ | 10/22 [00:03<00:02, 5.50it/s] 50%|█████ | 11/22 [00:03<00:01, 5.67it/s] 55%|█████▍ | 12/22 [00:03<00:01, 5.81it/s] 59%|█████▉ | 13/22 [00:03<00:01, 5.91it/s] 64%|██████▎ | 14/22 [00:03<00:01, 5.98it/s] 68%|██████▊ | 15/22 [00:03<00:01, 6.02it/s] 73%|███████▎ | 16/22 [00:04<00:00, 6.05it/s] 77%|███████▋ | 17/22 [00:04<00:00, 6.07it/s] 82%|████████▏ | 18/22 [00:04<00:00, 6.09it/s] 86%|████████▋ | 19/22 [00:04<00:00, 6.10it/s] 91%|█████████ | 20/22 [00:04<00:00, 6.11it/s] 95%|█████████▌| 21/22 [00:04<00:00, 6.11it/s] 100%|██████████| 22/22 [00:05<00:00, 6.12it/s] 100%|██████████| 22/22 [00:05<00:00, 4.32it/s] 0%| | 0/3 [00:00<?, ?it/s] 33%|███▎ | 1/3 [00:00<00:00, 5.67it/s] 67%|██████▋ | 2/3 [00:00<00:00, 6.67it/s] 100%|██████████| 3/3 [00:00<00:00, 7.08it/s] 100%|██████████| 3/3 [00:00<00:00, 6.84it/s]
Want to make some of these yourself?
Run this model