Prediction
samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537IDhhydaytbcn3gk4oaso654er4s4StatusSucceededSourceWebHardwareA40 (Large)Total durationCreatedInput
- width
- 1024
- height
- 1024
- prompt
- AI in the style of TOK. Two people discussing about the future of AI
- refine
- expert_ensemble_refiner
- scheduler
- K_EULER
- lora_scale
- 0.6
- num_outputs
- 1
- refine_steps
- null
- guidance_scale
- 7.5
- apply_watermark
- high_noise_frac
- 0.85
- negative_prompt
- prompt_strength
- 0.8
- num_inference_steps
- 50
{ "width": 1024, "height": 1024, "prompt": "AI in the style of TOK. Two people discussing about the future of AI", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "refine_steps": null, "guidance_scale": 7.5, "apply_watermark": true, "high_noise_frac": 0.85, "negative_prompt": "", "prompt_strength": 0.8, "num_inference_steps": 50 }
Install Replicate’s Node.js client library:npm install replicate
Import and set up the client:import Replicate from "replicate"; import fs from "node:fs"; const replicate = new Replicate({ auth: process.env.REPLICATE_API_TOKEN, });
Run samueltof/sdxl-gamo using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run( "samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537", { input: { width: 1024, height: 1024, prompt: "AI in the style of TOK. Two people discussing about the future of AI", refine: "expert_ensemble_refiner", scheduler: "K_EULER", lora_scale: 0.6, num_outputs: 1, guidance_scale: 7.5, apply_watermark: true, high_noise_frac: 0.85, negative_prompt: "", prompt_strength: 0.8, num_inference_steps: 50 } } ); // To access the file URL: console.log(output[0].url()); //=> "http://example.com" // To write the file to disk: fs.writeFile("my-image.png", output[0]);
To learn more, take a look at the guide on getting started with Node.js.
Install Replicate’s Python client library:pip install replicate
Import the client:import replicate
Run samueltof/sdxl-gamo using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run( "samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537", input={ "width": 1024, "height": 1024, "prompt": "AI in the style of TOK. Two people discussing about the future of AI", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": True, "high_noise_frac": 0.85, "negative_prompt": "", "prompt_strength": 0.8, "num_inference_steps": 50 } ) # To access the file URL: print(output[0].url()) #=> "http://example.com" # To write the file to disk: with open("my-image.png", "wb") as file: file.write(output[0].read())
To learn more, take a look at the guide on getting started with Python.
Run samueltof/sdxl-gamo using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \ -H "Authorization: Bearer $REPLICATE_API_TOKEN" \ -H "Content-Type: application/json" \ -H "Prefer: wait" \ -d $'{ "version": "samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537", "input": { "width": 1024, "height": 1024, "prompt": "AI in the style of TOK. Two people discussing about the future of AI", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": true, "high_noise_frac": 0.85, "negative_prompt": "", "prompt_strength": 0.8, "num_inference_steps": 50 } }' \ https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Output
{ "completed_at": "2023-10-16T20:43:59.632503Z", "created_at": "2023-10-16T20:43:43.383484Z", "data_removed": false, "error": null, "id": "hhydaytbcn3gk4oaso654er4s4", "input": { "width": 1024, "height": 1024, "prompt": "AI in the style of TOK. Two people discussing about the future of AI", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "refine_steps": null, "guidance_scale": 7.5, "apply_watermark": true, "high_noise_frac": 0.85, "negative_prompt": "", "prompt_strength": 0.8, "num_inference_steps": 50 }, "logs": "Using seed: 52483\nskipping loading .. weights already loaded\nPrompt: AI in the style of <s0><s1>. Two people discussing about the future of AI\ntxt2img mode\n 0%| | 0/42 [00:00<?, ?it/s]\n 2%|▏ | 1/42 [00:00<00:11, 3.66it/s]\n 5%|▍ | 2/42 [00:00<00:10, 3.64it/s]\n 7%|▋ | 3/42 [00:00<00:10, 3.64it/s]\n 10%|▉ | 4/42 [00:01<00:10, 3.65it/s]\n 12%|█▏ | 5/42 [00:01<00:10, 3.65it/s]\n 14%|█▍ | 6/42 [00:01<00:09, 3.64it/s]\n 17%|█▋ | 7/42 [00:01<00:09, 3.65it/s]\n 19%|█▉ | 8/42 [00:02<00:09, 3.65it/s]\n 21%|██▏ | 9/42 [00:02<00:09, 3.65it/s]\n 24%|██▍ | 10/42 [00:02<00:08, 3.64it/s]\n 26%|██▌ | 11/42 [00:03<00:08, 3.64it/s]\n 29%|██▊ | 12/42 [00:03<00:08, 3.64it/s]\n 31%|███ | 13/42 [00:03<00:07, 3.64it/s]\n 33%|███▎ | 14/42 [00:03<00:07, 3.64it/s]\n 36%|███▌ | 15/42 [00:04<00:07, 3.64it/s]\n 38%|███▊ | 16/42 [00:04<00:07, 3.64it/s]\n 40%|████ | 17/42 [00:04<00:06, 3.64it/s]\n 43%|████▎ | 18/42 [00:04<00:06, 3.63it/s]\n 45%|████▌ | 19/42 [00:05<00:06, 3.63it/s]\n 48%|████▊ | 20/42 [00:05<00:06, 3.64it/s]\n 50%|█████ | 21/42 [00:05<00:05, 3.64it/s]\n 52%|█████▏ | 22/42 [00:06<00:05, 3.64it/s]\n 55%|█████▍ | 23/42 [00:06<00:05, 3.64it/s]\n 57%|█████▋ | 24/42 [00:06<00:04, 3.64it/s]\n 60%|█████▉ | 25/42 [00:06<00:04, 3.64it/s]\n 62%|██████▏ | 26/42 [00:07<00:04, 3.63it/s]\n 64%|██████▍ | 27/42 [00:07<00:04, 3.63it/s]\n 67%|██████▋ | 28/42 [00:07<00:03, 3.64it/s]\n 69%|██████▉ | 29/42 [00:07<00:03, 3.64it/s]\n 71%|███████▏ | 30/42 [00:08<00:03, 3.63it/s]\n 74%|███████▍ | 31/42 [00:08<00:03, 3.63it/s]\n 76%|███████▌ | 32/42 [00:08<00:02, 3.63it/s]\n 79%|███████▊ | 33/42 [00:09<00:02, 3.63it/s]\n 81%|████████ | 34/42 [00:09<00:02, 3.63it/s]\n 83%|████████▎ | 35/42 [00:09<00:01, 3.63it/s]\n 86%|████████▌ | 36/42 [00:09<00:01, 3.63it/s]\n 88%|████████▊ | 37/42 [00:10<00:01, 3.63it/s]\n 90%|█████████ | 38/42 [00:10<00:01, 3.63it/s]\n 93%|█████████▎| 39/42 [00:10<00:00, 3.62it/s]\n 95%|█████████▌| 40/42 [00:11<00:00, 3.62it/s]\n 98%|█████████▊| 41/42 [00:11<00:00, 3.62it/s]\n100%|██████████| 42/42 [00:11<00:00, 3.63it/s]\n100%|██████████| 42/42 [00:11<00:00, 3.64it/s]\n 0%| | 0/8 [00:00<?, ?it/s]\n 12%|█▎ | 1/8 [00:00<00:01, 4.20it/s]\n 25%|██▌ | 2/8 [00:00<00:01, 4.19it/s]\n 38%|███▊ | 3/8 [00:00<00:01, 4.19it/s]\n 50%|█████ | 4/8 [00:00<00:00, 4.17it/s]\n 62%|██████▎ | 5/8 [00:01<00:00, 4.17it/s]\n 75%|███████▌ | 6/8 [00:01<00:00, 4.17it/s]\n 88%|████████▊ | 7/8 [00:01<00:00, 4.16it/s]\n100%|██████████| 8/8 [00:01<00:00, 4.16it/s]\n100%|██████████| 8/8 [00:01<00:00, 4.17it/s]", "metrics": { "predict_time": 15.072435, "total_time": 16.249019 }, "output": [ "https://replicate.delivery/pbxt/YeazGPSOFvTxOqfUeQLxuqZei9R6SJoCAnI2Oayecu58Rp2NC/out-0.png" ], "started_at": "2023-10-16T20:43:44.560068Z", "status": "succeeded", "urls": { "get": "https://api.replicate.com/v1/predictions/hhydaytbcn3gk4oaso654er4s4", "cancel": "https://api.replicate.com/v1/predictions/hhydaytbcn3gk4oaso654er4s4/cancel" }, "version": "ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537" }
Generated inUsing seed: 52483 skipping loading .. weights already loaded Prompt: AI in the style of <s0><s1>. Two people discussing about the future of AI txt2img mode 0%| | 0/42 [00:00<?, ?it/s] 2%|▏ | 1/42 [00:00<00:11, 3.66it/s] 5%|▍ | 2/42 [00:00<00:10, 3.64it/s] 7%|▋ | 3/42 [00:00<00:10, 3.64it/s] 10%|▉ | 4/42 [00:01<00:10, 3.65it/s] 12%|█▏ | 5/42 [00:01<00:10, 3.65it/s] 14%|█▍ | 6/42 [00:01<00:09, 3.64it/s] 17%|█▋ | 7/42 [00:01<00:09, 3.65it/s] 19%|█▉ | 8/42 [00:02<00:09, 3.65it/s] 21%|██▏ | 9/42 [00:02<00:09, 3.65it/s] 24%|██▍ | 10/42 [00:02<00:08, 3.64it/s] 26%|██▌ | 11/42 [00:03<00:08, 3.64it/s] 29%|██▊ | 12/42 [00:03<00:08, 3.64it/s] 31%|███ | 13/42 [00:03<00:07, 3.64it/s] 33%|███▎ | 14/42 [00:03<00:07, 3.64it/s] 36%|███▌ | 15/42 [00:04<00:07, 3.64it/s] 38%|███▊ | 16/42 [00:04<00:07, 3.64it/s] 40%|████ | 17/42 [00:04<00:06, 3.64it/s] 43%|████▎ | 18/42 [00:04<00:06, 3.63it/s] 45%|████▌ | 19/42 [00:05<00:06, 3.63it/s] 48%|████▊ | 20/42 [00:05<00:06, 3.64it/s] 50%|█████ | 21/42 [00:05<00:05, 3.64it/s] 52%|█████▏ | 22/42 [00:06<00:05, 3.64it/s] 55%|█████▍ | 23/42 [00:06<00:05, 3.64it/s] 57%|█████▋ | 24/42 [00:06<00:04, 3.64it/s] 60%|█████▉ | 25/42 [00:06<00:04, 3.64it/s] 62%|██████▏ | 26/42 [00:07<00:04, 3.63it/s] 64%|██████▍ | 27/42 [00:07<00:04, 3.63it/s] 67%|██████▋ | 28/42 [00:07<00:03, 3.64it/s] 69%|██████▉ | 29/42 [00:07<00:03, 3.64it/s] 71%|███████▏ | 30/42 [00:08<00:03, 3.63it/s] 74%|███████▍ | 31/42 [00:08<00:03, 3.63it/s] 76%|███████▌ | 32/42 [00:08<00:02, 3.63it/s] 79%|███████▊ | 33/42 [00:09<00:02, 3.63it/s] 81%|████████ | 34/42 [00:09<00:02, 3.63it/s] 83%|████████▎ | 35/42 [00:09<00:01, 3.63it/s] 86%|████████▌ | 36/42 [00:09<00:01, 3.63it/s] 88%|████████▊ | 37/42 [00:10<00:01, 3.63it/s] 90%|█████████ | 38/42 [00:10<00:01, 3.63it/s] 93%|█████████▎| 39/42 [00:10<00:00, 3.62it/s] 95%|█████████▌| 40/42 [00:11<00:00, 3.62it/s] 98%|█████████▊| 41/42 [00:11<00:00, 3.62it/s] 100%|██████████| 42/42 [00:11<00:00, 3.63it/s] 100%|██████████| 42/42 [00:11<00:00, 3.64it/s] 0%| | 0/8 [00:00<?, ?it/s] 12%|█▎ | 1/8 [00:00<00:01, 4.20it/s] 25%|██▌ | 2/8 [00:00<00:01, 4.19it/s] 38%|███▊ | 3/8 [00:00<00:01, 4.19it/s] 50%|█████ | 4/8 [00:00<00:00, 4.17it/s] 62%|██████▎ | 5/8 [00:01<00:00, 4.17it/s] 75%|███████▌ | 6/8 [00:01<00:00, 4.17it/s] 88%|████████▊ | 7/8 [00:01<00:00, 4.16it/s] 100%|██████████| 8/8 [00:01<00:00, 4.16it/s] 100%|██████████| 8/8 [00:01<00:00, 4.17it/s]
Prediction
samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537IDpoj6wldburjl7f4aw2axinm3fuStatusSucceededSourceWebHardwareA40 (Large)Total durationCreatedInput
- width
- 1024
- height
- 1024
- prompt
- AI in the style of TOK. Two people discussing about the future of AI, detailed, cinematic
- refine
- no_refiner
- scheduler
- K_EULER
- lora_scale
- 0.6
- num_outputs
- 1
- refine_steps
- null
- guidance_scale
- 7.5
- apply_watermark
- high_noise_frac
- 0.85
- negative_prompt
- noisy
- prompt_strength
- 0.8
- num_inference_steps
- 50
{ "width": 1024, "height": 1024, "prompt": "AI in the style of TOK. Two people discussing about the future of AI, detailed, cinematic", "refine": "no_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "refine_steps": null, "guidance_scale": 7.5, "apply_watermark": true, "high_noise_frac": 0.85, "negative_prompt": "noisy", "prompt_strength": 0.8, "num_inference_steps": 50 }
Install Replicate’s Node.js client library:npm install replicate
Import and set up the client:import Replicate from "replicate"; import fs from "node:fs"; const replicate = new Replicate({ auth: process.env.REPLICATE_API_TOKEN, });
Run samueltof/sdxl-gamo using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run( "samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537", { input: { width: 1024, height: 1024, prompt: "AI in the style of TOK. Two people discussing about the future of AI, detailed, cinematic", refine: "no_refiner", scheduler: "K_EULER", lora_scale: 0.6, num_outputs: 1, guidance_scale: 7.5, apply_watermark: true, high_noise_frac: 0.85, negative_prompt: "noisy", prompt_strength: 0.8, num_inference_steps: 50 } } ); // To access the file URL: console.log(output[0].url()); //=> "http://example.com" // To write the file to disk: fs.writeFile("my-image.png", output[0]);
To learn more, take a look at the guide on getting started with Node.js.
Install Replicate’s Python client library:pip install replicate
Import the client:import replicate
Run samueltof/sdxl-gamo using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run( "samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537", input={ "width": 1024, "height": 1024, "prompt": "AI in the style of TOK. Two people discussing about the future of AI, detailed, cinematic", "refine": "no_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": True, "high_noise_frac": 0.85, "negative_prompt": "noisy", "prompt_strength": 0.8, "num_inference_steps": 50 } ) # To access the file URL: print(output[0].url()) #=> "http://example.com" # To write the file to disk: with open("my-image.png", "wb") as file: file.write(output[0].read())
To learn more, take a look at the guide on getting started with Python.
Run samueltof/sdxl-gamo using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \ -H "Authorization: Bearer $REPLICATE_API_TOKEN" \ -H "Content-Type: application/json" \ -H "Prefer: wait" \ -d $'{ "version": "samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537", "input": { "width": 1024, "height": 1024, "prompt": "AI in the style of TOK. Two people discussing about the future of AI, detailed, cinematic", "refine": "no_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": true, "high_noise_frac": 0.85, "negative_prompt": "noisy", "prompt_strength": 0.8, "num_inference_steps": 50 } }' \ https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Output
{ "completed_at": "2023-10-16T20:46:29.265836Z", "created_at": "2023-10-16T20:46:13.424663Z", "data_removed": false, "error": null, "id": "poj6wldburjl7f4aw2axinm3fu", "input": { "width": 1024, "height": 1024, "prompt": "AI in the style of TOK. Two people discussing about the future of AI, detailed, cinematic", "refine": "no_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "refine_steps": null, "guidance_scale": 7.5, "apply_watermark": true, "high_noise_frac": 0.85, "negative_prompt": "noisy", "prompt_strength": 0.8, "num_inference_steps": 50 }, "logs": "Using seed: 60784\nskipping loading .. weights already loaded\nPrompt: AI in the style of <s0><s1>. Two people discussing about the future of AI, detailed, cinematic\ntxt2img mode\n 0%| | 0/50 [00:00<?, ?it/s]\n 2%|▏ | 1/50 [00:00<00:13, 3.66it/s]\n 4%|▍ | 2/50 [00:00<00:13, 3.66it/s]\n 6%|▌ | 3/50 [00:00<00:12, 3.66it/s]\n 8%|▊ | 4/50 [00:01<00:12, 3.66it/s]\n 10%|█ | 5/50 [00:01<00:12, 3.66it/s]\n 12%|█▏ | 6/50 [00:01<00:12, 3.66it/s]\n 14%|█▍ | 7/50 [00:01<00:11, 3.66it/s]\n 16%|█▌ | 8/50 [00:02<00:11, 3.65it/s]\n 18%|█▊ | 9/50 [00:02<00:11, 3.65it/s]\n 20%|██ | 10/50 [00:02<00:10, 3.65it/s]\n 22%|██▏ | 11/50 [00:03<00:10, 3.65it/s]\n 24%|██▍ | 12/50 [00:03<00:10, 3.65it/s]\n 26%|██▌ | 13/50 [00:03<00:10, 3.65it/s]\n 28%|██▊ | 14/50 [00:03<00:09, 3.65it/s]\n 30%|███ | 15/50 [00:04<00:09, 3.64it/s]\n 32%|███▏ | 16/50 [00:04<00:09, 3.64it/s]\n 34%|███▍ | 17/50 [00:04<00:09, 3.64it/s]\n 36%|███▌ | 18/50 [00:04<00:08, 3.64it/s]\n 38%|███▊ | 19/50 [00:05<00:08, 3.64it/s]\n 40%|████ | 20/50 [00:05<00:08, 3.64it/s]\n 42%|████▏ | 21/50 [00:05<00:07, 3.64it/s]\n 44%|████▍ | 22/50 [00:06<00:07, 3.64it/s]\n 46%|████▌ | 23/50 [00:06<00:07, 3.63it/s]\n 48%|████▊ | 24/50 [00:06<00:07, 3.63it/s]\n 50%|█████ | 25/50 [00:06<00:06, 3.64it/s]\n 52%|█████▏ | 26/50 [00:07<00:06, 3.64it/s]\n 54%|█████▍ | 27/50 [00:07<00:06, 3.64it/s]\n 56%|█████▌ | 28/50 [00:07<00:06, 3.63it/s]\n 58%|█████▊ | 29/50 [00:07<00:05, 3.63it/s]\n 60%|██████ | 30/50 [00:08<00:05, 3.63it/s]\n 62%|██████▏ | 31/50 [00:08<00:05, 3.63it/s]\n 64%|██████▍ | 32/50 [00:08<00:04, 3.63it/s]\n 66%|██████▌ | 33/50 [00:09<00:04, 3.63it/s]\n 68%|██████▊ | 34/50 [00:09<00:04, 3.63it/s]\n 70%|███████ | 35/50 [00:09<00:04, 3.63it/s]\n 72%|███████▏ | 36/50 [00:09<00:03, 3.63it/s]\n 74%|███████▍ | 37/50 [00:10<00:03, 3.63it/s]\n 76%|███████▌ | 38/50 [00:10<00:03, 3.63it/s]\n 78%|███████▊ | 39/50 [00:10<00:03, 3.63it/s]\n 80%|████████ | 40/50 [00:10<00:02, 3.63it/s]\n 82%|████████▏ | 41/50 [00:11<00:02, 3.63it/s]\n 84%|████████▍ | 42/50 [00:11<00:02, 3.63it/s]\n 86%|████████▌ | 43/50 [00:11<00:01, 3.63it/s]\n 88%|████████▊ | 44/50 [00:12<00:01, 3.63it/s]\n 90%|█████████ | 45/50 [00:12<00:01, 3.63it/s]\n 92%|█████████▏| 46/50 [00:12<00:01, 3.63it/s]\n 94%|█████████▍| 47/50 [00:12<00:00, 3.63it/s]\n 96%|█████████▌| 48/50 [00:13<00:00, 3.63it/s]\n 98%|█████████▊| 49/50 [00:13<00:00, 3.63it/s]\n100%|██████████| 50/50 [00:13<00:00, 3.63it/s]\n100%|██████████| 50/50 [00:13<00:00, 3.64it/s]", "metrics": { "predict_time": 15.313827, "total_time": 15.841173 }, "output": [ "https://replicate.delivery/pbxt/xJfQ6l4AF0yDZamfdHLpa7cVUYfYwdC5JDcfTn9OJHgQyU7GB/out-0.png" ], "started_at": "2023-10-16T20:46:13.952009Z", "status": "succeeded", "urls": { "get": "https://api.replicate.com/v1/predictions/poj6wldburjl7f4aw2axinm3fu", "cancel": "https://api.replicate.com/v1/predictions/poj6wldburjl7f4aw2axinm3fu/cancel" }, "version": "ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537" }
Generated inUsing seed: 60784 skipping loading .. weights already loaded Prompt: AI in the style of <s0><s1>. Two people discussing about the future of AI, detailed, cinematic txt2img mode 0%| | 0/50 [00:00<?, ?it/s] 2%|▏ | 1/50 [00:00<00:13, 3.66it/s] 4%|▍ | 2/50 [00:00<00:13, 3.66it/s] 6%|▌ | 3/50 [00:00<00:12, 3.66it/s] 8%|▊ | 4/50 [00:01<00:12, 3.66it/s] 10%|█ | 5/50 [00:01<00:12, 3.66it/s] 12%|█▏ | 6/50 [00:01<00:12, 3.66it/s] 14%|█▍ | 7/50 [00:01<00:11, 3.66it/s] 16%|█▌ | 8/50 [00:02<00:11, 3.65it/s] 18%|█▊ | 9/50 [00:02<00:11, 3.65it/s] 20%|██ | 10/50 [00:02<00:10, 3.65it/s] 22%|██▏ | 11/50 [00:03<00:10, 3.65it/s] 24%|██▍ | 12/50 [00:03<00:10, 3.65it/s] 26%|██▌ | 13/50 [00:03<00:10, 3.65it/s] 28%|██▊ | 14/50 [00:03<00:09, 3.65it/s] 30%|███ | 15/50 [00:04<00:09, 3.64it/s] 32%|███▏ | 16/50 [00:04<00:09, 3.64it/s] 34%|███▍ | 17/50 [00:04<00:09, 3.64it/s] 36%|███▌ | 18/50 [00:04<00:08, 3.64it/s] 38%|███▊ | 19/50 [00:05<00:08, 3.64it/s] 40%|████ | 20/50 [00:05<00:08, 3.64it/s] 42%|████▏ | 21/50 [00:05<00:07, 3.64it/s] 44%|████▍ | 22/50 [00:06<00:07, 3.64it/s] 46%|████▌ | 23/50 [00:06<00:07, 3.63it/s] 48%|████▊ | 24/50 [00:06<00:07, 3.63it/s] 50%|█████ | 25/50 [00:06<00:06, 3.64it/s] 52%|█████▏ | 26/50 [00:07<00:06, 3.64it/s] 54%|█████▍ | 27/50 [00:07<00:06, 3.64it/s] 56%|█████▌ | 28/50 [00:07<00:06, 3.63it/s] 58%|█████▊ | 29/50 [00:07<00:05, 3.63it/s] 60%|██████ | 30/50 [00:08<00:05, 3.63it/s] 62%|██████▏ | 31/50 [00:08<00:05, 3.63it/s] 64%|██████▍ | 32/50 [00:08<00:04, 3.63it/s] 66%|██████▌ | 33/50 [00:09<00:04, 3.63it/s] 68%|██████▊ | 34/50 [00:09<00:04, 3.63it/s] 70%|███████ | 35/50 [00:09<00:04, 3.63it/s] 72%|███████▏ | 36/50 [00:09<00:03, 3.63it/s] 74%|███████▍ | 37/50 [00:10<00:03, 3.63it/s] 76%|███████▌ | 38/50 [00:10<00:03, 3.63it/s] 78%|███████▊ | 39/50 [00:10<00:03, 3.63it/s] 80%|████████ | 40/50 [00:10<00:02, 3.63it/s] 82%|████████▏ | 41/50 [00:11<00:02, 3.63it/s] 84%|████████▍ | 42/50 [00:11<00:02, 3.63it/s] 86%|████████▌ | 43/50 [00:11<00:01, 3.63it/s] 88%|████████▊ | 44/50 [00:12<00:01, 3.63it/s] 90%|█████████ | 45/50 [00:12<00:01, 3.63it/s] 92%|█████████▏| 46/50 [00:12<00:01, 3.63it/s] 94%|█████████▍| 47/50 [00:12<00:00, 3.63it/s] 96%|█████████▌| 48/50 [00:13<00:00, 3.63it/s] 98%|█████████▊| 49/50 [00:13<00:00, 3.63it/s] 100%|██████████| 50/50 [00:13<00:00, 3.63it/s] 100%|██████████| 50/50 [00:13<00:00, 3.64it/s]
Prediction
samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537IDtoip7nlbtjwqzufqwqsnrddy5iStatusSucceededSourceWebHardwareA40 (Large)Total durationCreatedInput
- width
- 1024
- height
- 1024
- prompt
- In the style of TOK. Two people discussing about the future of AI, professional, high quality, cinematic
- refine
- expert_ensemble_refiner
- scheduler
- K_EULER
- lora_scale
- 0.6
- num_outputs
- 1
- refine_steps
- null
- guidance_scale
- 7.5
- apply_watermark
- high_noise_frac
- 0.85
- negative_prompt
- ugly, blurry
- prompt_strength
- 0.8
- num_inference_steps
- 50
{ "width": 1024, "height": 1024, "prompt": "In the style of TOK. Two people discussing about the future of AI, professional, high quality, cinematic", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "refine_steps": null, "guidance_scale": 7.5, "apply_watermark": true, "high_noise_frac": 0.85, "negative_prompt": "ugly, blurry", "prompt_strength": 0.8, "num_inference_steps": 50 }
Install Replicate’s Node.js client library:npm install replicate
Import and set up the client:import Replicate from "replicate"; import fs from "node:fs"; const replicate = new Replicate({ auth: process.env.REPLICATE_API_TOKEN, });
Run samueltof/sdxl-gamo using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run( "samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537", { input: { width: 1024, height: 1024, prompt: "In the style of TOK. Two people discussing about the future of AI, professional, high quality, cinematic", refine: "expert_ensemble_refiner", scheduler: "K_EULER", lora_scale: 0.6, num_outputs: 1, guidance_scale: 7.5, apply_watermark: true, high_noise_frac: 0.85, negative_prompt: "ugly, blurry", prompt_strength: 0.8, num_inference_steps: 50 } } ); // To access the file URL: console.log(output[0].url()); //=> "http://example.com" // To write the file to disk: fs.writeFile("my-image.png", output[0]);
To learn more, take a look at the guide on getting started with Node.js.
Install Replicate’s Python client library:pip install replicate
Import the client:import replicate
Run samueltof/sdxl-gamo using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run( "samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537", input={ "width": 1024, "height": 1024, "prompt": "In the style of TOK. Two people discussing about the future of AI, professional, high quality, cinematic", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": True, "high_noise_frac": 0.85, "negative_prompt": "ugly, blurry", "prompt_strength": 0.8, "num_inference_steps": 50 } ) # To access the file URL: print(output[0].url()) #=> "http://example.com" # To write the file to disk: with open("my-image.png", "wb") as file: file.write(output[0].read())
To learn more, take a look at the guide on getting started with Python.
Run samueltof/sdxl-gamo using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \ -H "Authorization: Bearer $REPLICATE_API_TOKEN" \ -H "Content-Type: application/json" \ -H "Prefer: wait" \ -d $'{ "version": "samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537", "input": { "width": 1024, "height": 1024, "prompt": "In the style of TOK. Two people discussing about the future of AI, professional, high quality, cinematic", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": true, "high_noise_frac": 0.85, "negative_prompt": "ugly, blurry", "prompt_strength": 0.8, "num_inference_steps": 50 } }' \ https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Output
{ "completed_at": "2023-10-16T22:26:01.262067Z", "created_at": "2023-10-16T22:25:43.557178Z", "data_removed": false, "error": null, "id": "toip7nlbtjwqzufqwqsnrddy5i", "input": { "width": 1024, "height": 1024, "prompt": "In the style of TOK. Two people discussing about the future of AI, professional, high quality, cinematic", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "refine_steps": null, "guidance_scale": 7.5, "apply_watermark": true, "high_noise_frac": 0.85, "negative_prompt": "ugly, blurry", "prompt_strength": 0.8, "num_inference_steps": 50 }, "logs": "Using seed: 39541\nskipping loading .. weights already loaded\nPrompt: In the style of <s0><s1>. Two people discussing about the future of AI, professional, high quality, cinematic\ntxt2img mode\n 0%| | 0/42 [00:00<?, ?it/s]\n 2%|▏ | 1/42 [00:00<00:11, 3.70it/s]\n 5%|▍ | 2/42 [00:00<00:10, 3.69it/s]\n 7%|▋ | 3/42 [00:00<00:10, 3.69it/s]\n 10%|▉ | 4/42 [00:01<00:10, 3.69it/s]\n 12%|█▏ | 5/42 [00:01<00:10, 3.68it/s]\n 14%|█▍ | 6/42 [00:01<00:09, 3.68it/s]\n 17%|█▋ | 7/42 [00:01<00:09, 3.68it/s]\n 19%|█▉ | 8/42 [00:02<00:09, 3.67it/s]\n 21%|██▏ | 9/42 [00:02<00:08, 3.67it/s]\n 24%|██▍ | 10/42 [00:02<00:08, 3.67it/s]\n 26%|██▌ | 11/42 [00:02<00:08, 3.67it/s]\n 29%|██▊ | 12/42 [00:03<00:08, 3.67it/s]\n 31%|███ | 13/42 [00:03<00:07, 3.67it/s]\n 33%|███▎ | 14/42 [00:03<00:07, 3.67it/s]\n 36%|███▌ | 15/42 [00:04<00:07, 3.67it/s]\n 38%|███▊ | 16/42 [00:04<00:07, 3.67it/s]\n 40%|████ | 17/42 [00:04<00:06, 3.67it/s]\n 43%|████▎ | 18/42 [00:04<00:06, 3.67it/s]\n 45%|████▌ | 19/42 [00:05<00:06, 3.66it/s]\n 48%|████▊ | 20/42 [00:05<00:06, 3.66it/s]\n 50%|█████ | 21/42 [00:05<00:05, 3.66it/s]\n 52%|█████▏ | 22/42 [00:05<00:05, 3.66it/s]\n 55%|█████▍ | 23/42 [00:06<00:05, 3.66it/s]\n 57%|█████▋ | 24/42 [00:06<00:04, 3.66it/s]\n 60%|█████▉ | 25/42 [00:06<00:04, 3.66it/s]\n 62%|██████▏ | 26/42 [00:07<00:04, 3.66it/s]\n 64%|██████▍ | 27/42 [00:07<00:04, 3.66it/s]\n 67%|██████▋ | 28/42 [00:07<00:03, 3.66it/s]\n 69%|██████▉ | 29/42 [00:07<00:03, 3.67it/s]\n 71%|███████▏ | 30/42 [00:08<00:03, 3.68it/s]\n 74%|███████▍ | 31/42 [00:08<00:02, 3.68it/s]\n 76%|███████▌ | 32/42 [00:08<00:02, 3.68it/s]\n 79%|███████▊ | 33/42 [00:08<00:02, 3.68it/s]\n 81%|████████ | 34/42 [00:09<00:02, 3.68it/s]\n 83%|████████▎ | 35/42 [00:09<00:01, 3.68it/s]\n 86%|████████▌ | 36/42 [00:09<00:01, 3.68it/s]\n 88%|████████▊ | 37/42 [00:10<00:01, 3.68it/s]\n 90%|█████████ | 38/42 [00:10<00:01, 3.68it/s]\n 93%|█████████▎| 39/42 [00:10<00:00, 3.68it/s]\n 95%|█████████▌| 40/42 [00:10<00:00, 3.68it/s]\n 98%|█████████▊| 41/42 [00:11<00:00, 3.68it/s]\n100%|██████████| 42/42 [00:11<00:00, 3.68it/s]\n100%|██████████| 42/42 [00:11<00:00, 3.67it/s]\n 0%| | 0/8 [00:00<?, ?it/s]\n 12%|█▎ | 1/8 [00:00<00:01, 4.27it/s]\n 25%|██▌ | 2/8 [00:00<00:01, 4.26it/s]\n 38%|███▊ | 3/8 [00:00<00:01, 4.25it/s]\n 50%|█████ | 4/8 [00:00<00:00, 4.25it/s]\n 62%|██████▎ | 5/8 [00:01<00:00, 4.24it/s]\n 75%|███████▌ | 6/8 [00:01<00:00, 4.24it/s]\n 88%|████████▊ | 7/8 [00:01<00:00, 4.24it/s]\n100%|██████████| 8/8 [00:01<00:00, 4.24it/s]\n100%|██████████| 8/8 [00:01<00:00, 4.24it/s]", "metrics": { "predict_time": 14.827135, "total_time": 17.704889 }, "output": [ "https://replicate.delivery/pbxt/XfJkktUArBSBaKeI8FoCAIZztRekga6zwT2nSBFWELkxTtdjA/out-0.png" ], "started_at": "2023-10-16T22:25:46.434932Z", "status": "succeeded", "urls": { "get": "https://api.replicate.com/v1/predictions/toip7nlbtjwqzufqwqsnrddy5i", "cancel": "https://api.replicate.com/v1/predictions/toip7nlbtjwqzufqwqsnrddy5i/cancel" }, "version": "ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537" }
Generated inUsing seed: 39541 skipping loading .. weights already loaded Prompt: In the style of <s0><s1>. Two people discussing about the future of AI, professional, high quality, cinematic txt2img mode 0%| | 0/42 [00:00<?, ?it/s] 2%|▏ | 1/42 [00:00<00:11, 3.70it/s] 5%|▍ | 2/42 [00:00<00:10, 3.69it/s] 7%|▋ | 3/42 [00:00<00:10, 3.69it/s] 10%|▉ | 4/42 [00:01<00:10, 3.69it/s] 12%|█▏ | 5/42 [00:01<00:10, 3.68it/s] 14%|█▍ | 6/42 [00:01<00:09, 3.68it/s] 17%|█▋ | 7/42 [00:01<00:09, 3.68it/s] 19%|█▉ | 8/42 [00:02<00:09, 3.67it/s] 21%|██▏ | 9/42 [00:02<00:08, 3.67it/s] 24%|██▍ | 10/42 [00:02<00:08, 3.67it/s] 26%|██▌ | 11/42 [00:02<00:08, 3.67it/s] 29%|██▊ | 12/42 [00:03<00:08, 3.67it/s] 31%|███ | 13/42 [00:03<00:07, 3.67it/s] 33%|███▎ | 14/42 [00:03<00:07, 3.67it/s] 36%|███▌ | 15/42 [00:04<00:07, 3.67it/s] 38%|███▊ | 16/42 [00:04<00:07, 3.67it/s] 40%|████ | 17/42 [00:04<00:06, 3.67it/s] 43%|████▎ | 18/42 [00:04<00:06, 3.67it/s] 45%|████▌ | 19/42 [00:05<00:06, 3.66it/s] 48%|████▊ | 20/42 [00:05<00:06, 3.66it/s] 50%|█████ | 21/42 [00:05<00:05, 3.66it/s] 52%|█████▏ | 22/42 [00:05<00:05, 3.66it/s] 55%|█████▍ | 23/42 [00:06<00:05, 3.66it/s] 57%|█████▋ | 24/42 [00:06<00:04, 3.66it/s] 60%|█████▉ | 25/42 [00:06<00:04, 3.66it/s] 62%|██████▏ | 26/42 [00:07<00:04, 3.66it/s] 64%|██████▍ | 27/42 [00:07<00:04, 3.66it/s] 67%|██████▋ | 28/42 [00:07<00:03, 3.66it/s] 69%|██████▉ | 29/42 [00:07<00:03, 3.67it/s] 71%|███████▏ | 30/42 [00:08<00:03, 3.68it/s] 74%|███████▍ | 31/42 [00:08<00:02, 3.68it/s] 76%|███████▌ | 32/42 [00:08<00:02, 3.68it/s] 79%|███████▊ | 33/42 [00:08<00:02, 3.68it/s] 81%|████████ | 34/42 [00:09<00:02, 3.68it/s] 83%|████████▎ | 35/42 [00:09<00:01, 3.68it/s] 86%|████████▌ | 36/42 [00:09<00:01, 3.68it/s] 88%|████████▊ | 37/42 [00:10<00:01, 3.68it/s] 90%|█████████ | 38/42 [00:10<00:01, 3.68it/s] 93%|█████████▎| 39/42 [00:10<00:00, 3.68it/s] 95%|█████████▌| 40/42 [00:10<00:00, 3.68it/s] 98%|█████████▊| 41/42 [00:11<00:00, 3.68it/s] 100%|██████████| 42/42 [00:11<00:00, 3.68it/s] 100%|██████████| 42/42 [00:11<00:00, 3.67it/s] 0%| | 0/8 [00:00<?, ?it/s] 12%|█▎ | 1/8 [00:00<00:01, 4.27it/s] 25%|██▌ | 2/8 [00:00<00:01, 4.26it/s] 38%|███▊ | 3/8 [00:00<00:01, 4.25it/s] 50%|█████ | 4/8 [00:00<00:00, 4.25it/s] 62%|██████▎ | 5/8 [00:01<00:00, 4.24it/s] 75%|███████▌ | 6/8 [00:01<00:00, 4.24it/s] 88%|████████▊ | 7/8 [00:01<00:00, 4.24it/s] 100%|██████████| 8/8 [00:01<00:00, 4.24it/s] 100%|██████████| 8/8 [00:01<00:00, 4.24it/s]
Prediction
samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537IDlrimijdbk5zi26tz64rl7pctfiStatusSucceededSourceWebHardwareA40 (Large)Total durationCreatedInput
- width
- 1024
- height
- 1024
- prompt
- In the style of TOK. A girl creating AI generated content, professional, high quality, cinematic
- refine
- expert_ensemble_refiner
- scheduler
- K_EULER
- lora_scale
- 0.6
- num_outputs
- 1
- refine_steps
- null
- guidance_scale
- 7.5
- apply_watermark
- high_noise_frac
- 0.85
- negative_prompt
- ugly, blurry
- prompt_strength
- 0.8
- num_inference_steps
- 50
{ "width": 1024, "height": 1024, "prompt": "In the style of TOK. A girl creating AI generated content, professional, high quality, cinematic", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "refine_steps": null, "guidance_scale": 7.5, "apply_watermark": true, "high_noise_frac": 0.85, "negative_prompt": "ugly, blurry", "prompt_strength": 0.8, "num_inference_steps": 50 }
Install Replicate’s Node.js client library:npm install replicate
Import and set up the client:import Replicate from "replicate"; import fs from "node:fs"; const replicate = new Replicate({ auth: process.env.REPLICATE_API_TOKEN, });
Run samueltof/sdxl-gamo using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run( "samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537", { input: { width: 1024, height: 1024, prompt: "In the style of TOK. A girl creating AI generated content, professional, high quality, cinematic", refine: "expert_ensemble_refiner", scheduler: "K_EULER", lora_scale: 0.6, num_outputs: 1, guidance_scale: 7.5, apply_watermark: true, high_noise_frac: 0.85, negative_prompt: "ugly, blurry", prompt_strength: 0.8, num_inference_steps: 50 } } ); // To access the file URL: console.log(output[0].url()); //=> "http://example.com" // To write the file to disk: fs.writeFile("my-image.png", output[0]);
To learn more, take a look at the guide on getting started with Node.js.
Install Replicate’s Python client library:pip install replicate
Import the client:import replicate
Run samueltof/sdxl-gamo using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run( "samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537", input={ "width": 1024, "height": 1024, "prompt": "In the style of TOK. A girl creating AI generated content, professional, high quality, cinematic", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": True, "high_noise_frac": 0.85, "negative_prompt": "ugly, blurry", "prompt_strength": 0.8, "num_inference_steps": 50 } ) # To access the file URL: print(output[0].url()) #=> "http://example.com" # To write the file to disk: with open("my-image.png", "wb") as file: file.write(output[0].read())
To learn more, take a look at the guide on getting started with Python.
Run samueltof/sdxl-gamo using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \ -H "Authorization: Bearer $REPLICATE_API_TOKEN" \ -H "Content-Type: application/json" \ -H "Prefer: wait" \ -d $'{ "version": "samueltof/sdxl-gamo:ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537", "input": { "width": 1024, "height": 1024, "prompt": "In the style of TOK. A girl creating AI generated content, professional, high quality, cinematic", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "guidance_scale": 7.5, "apply_watermark": true, "high_noise_frac": 0.85, "negative_prompt": "ugly, blurry", "prompt_strength": 0.8, "num_inference_steps": 50 } }' \ https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Output
{ "completed_at": "2023-10-16T22:27:08.505685Z", "created_at": "2023-10-16T22:26:52.035109Z", "data_removed": false, "error": null, "id": "lrimijdbk5zi26tz64rl7pctfi", "input": { "width": 1024, "height": 1024, "prompt": "In the style of TOK. A girl creating AI generated content, professional, high quality, cinematic", "refine": "expert_ensemble_refiner", "scheduler": "K_EULER", "lora_scale": 0.6, "num_outputs": 1, "refine_steps": null, "guidance_scale": 7.5, "apply_watermark": true, "high_noise_frac": 0.85, "negative_prompt": "ugly, blurry", "prompt_strength": 0.8, "num_inference_steps": 50 }, "logs": "Using seed: 4226\nskipping loading .. weights already loaded\nPrompt: In the style of <s0><s1>. A girl creating AI generated content, professional, high quality, cinematic\ntxt2img mode\n 0%| | 0/42 [00:00<?, ?it/s]\n 2%|▏ | 1/42 [00:00<00:11, 3.71it/s]\n 5%|▍ | 2/42 [00:00<00:10, 3.70it/s]\n 7%|▋ | 3/42 [00:00<00:10, 3.69it/s]\n 10%|▉ | 4/42 [00:01<00:10, 3.68it/s]\n 12%|█▏ | 5/42 [00:01<00:10, 3.67it/s]\n 14%|█▍ | 6/42 [00:01<00:09, 3.67it/s]\n 17%|█▋ | 7/42 [00:01<00:09, 3.67it/s]\n 19%|█▉ | 8/42 [00:02<00:09, 3.67it/s]\n 21%|██▏ | 9/42 [00:02<00:08, 3.67it/s]\n 24%|██▍ | 10/42 [00:02<00:08, 3.67it/s]\n 26%|██▌ | 11/42 [00:02<00:08, 3.67it/s]\n 29%|██▊ | 12/42 [00:03<00:08, 3.66it/s]\n 31%|███ | 13/42 [00:03<00:07, 3.67it/s]\n 33%|███▎ | 14/42 [00:03<00:07, 3.66it/s]\n 36%|███▌ | 15/42 [00:04<00:07, 3.66it/s]\n 38%|███▊ | 16/42 [00:04<00:07, 3.66it/s]\n 40%|████ | 17/42 [00:04<00:06, 3.66it/s]\n 43%|████▎ | 18/42 [00:04<00:06, 3.66it/s]\n 45%|████▌ | 19/42 [00:05<00:06, 3.66it/s]\n 48%|████▊ | 20/42 [00:05<00:06, 3.66it/s]\n 50%|█████ | 21/42 [00:05<00:05, 3.66it/s]\n 52%|█████▏ | 22/42 [00:05<00:05, 3.67it/s]\n 55%|█████▍ | 23/42 [00:06<00:05, 3.67it/s]\n 57%|█████▋ | 24/42 [00:06<00:04, 3.67it/s]\n 60%|█████▉ | 25/42 [00:06<00:04, 3.68it/s]\n 62%|██████▏ | 26/42 [00:07<00:04, 3.68it/s]\n 64%|██████▍ | 27/42 [00:07<00:04, 3.68it/s]\n 67%|██████▋ | 28/42 [00:07<00:03, 3.68it/s]\n 69%|██████▉ | 29/42 [00:07<00:03, 3.68it/s]\n 71%|███████▏ | 30/42 [00:08<00:03, 3.68it/s]\n 74%|███████▍ | 31/42 [00:08<00:02, 3.68it/s]\n 76%|███████▌ | 32/42 [00:08<00:02, 3.68it/s]\n 79%|███████▊ | 33/42 [00:08<00:02, 3.68it/s]\n 81%|████████ | 34/42 [00:09<00:02, 3.68it/s]\n 83%|████████▎ | 35/42 [00:09<00:01, 3.68it/s]\n 86%|████████▌ | 36/42 [00:09<00:01, 3.68it/s]\n 88%|████████▊ | 37/42 [00:10<00:01, 3.68it/s]\n 90%|█████████ | 38/42 [00:10<00:01, 3.68it/s]\n 93%|█████████▎| 39/42 [00:10<00:00, 3.68it/s]\n 95%|█████████▌| 40/42 [00:10<00:00, 3.67it/s]\n 98%|█████████▊| 41/42 [00:11<00:00, 3.67it/s]\n100%|██████████| 42/42 [00:11<00:00, 3.68it/s]\n100%|██████████| 42/42 [00:11<00:00, 3.67it/s]\n 0%| | 0/8 [00:00<?, ?it/s]\n 12%|█▎ | 1/8 [00:00<00:01, 4.27it/s]\n 25%|██▌ | 2/8 [00:00<00:01, 4.27it/s]\n 38%|███▊ | 3/8 [00:00<00:01, 4.24it/s]\n 50%|█████ | 4/8 [00:00<00:00, 4.24it/s]\n 62%|██████▎ | 5/8 [00:01<00:00, 4.23it/s]\n 75%|███████▌ | 6/8 [00:01<00:00, 4.23it/s]\n 88%|████████▊ | 7/8 [00:01<00:00, 4.24it/s]\n100%|██████████| 8/8 [00:01<00:00, 4.24it/s]\n100%|██████████| 8/8 [00:01<00:00, 4.24it/s]", "metrics": { "predict_time": 14.87436, "total_time": 16.470576 }, "output": [ "https://replicate.delivery/pbxt/aHciG5puDDIxLlS1o0VgdZ2ibZCb9AcX0mft2lKOh28dVb3IA/out-0.png" ], "started_at": "2023-10-16T22:26:53.631325Z", "status": "succeeded", "urls": { "get": "https://api.replicate.com/v1/predictions/lrimijdbk5zi26tz64rl7pctfi", "cancel": "https://api.replicate.com/v1/predictions/lrimijdbk5zi26tz64rl7pctfi/cancel" }, "version": "ab89b79e55e45f45d903449d23c17ca67420e0d5ea2defce45409dd12951c537" }
Generated inUsing seed: 4226 skipping loading .. weights already loaded Prompt: In the style of <s0><s1>. A girl creating AI generated content, professional, high quality, cinematic txt2img mode 0%| | 0/42 [00:00<?, ?it/s] 2%|▏ | 1/42 [00:00<00:11, 3.71it/s] 5%|▍ | 2/42 [00:00<00:10, 3.70it/s] 7%|▋ | 3/42 [00:00<00:10, 3.69it/s] 10%|▉ | 4/42 [00:01<00:10, 3.68it/s] 12%|█▏ | 5/42 [00:01<00:10, 3.67it/s] 14%|█▍ | 6/42 [00:01<00:09, 3.67it/s] 17%|█▋ | 7/42 [00:01<00:09, 3.67it/s] 19%|█▉ | 8/42 [00:02<00:09, 3.67it/s] 21%|██▏ | 9/42 [00:02<00:08, 3.67it/s] 24%|██▍ | 10/42 [00:02<00:08, 3.67it/s] 26%|██▌ | 11/42 [00:02<00:08, 3.67it/s] 29%|██▊ | 12/42 [00:03<00:08, 3.66it/s] 31%|███ | 13/42 [00:03<00:07, 3.67it/s] 33%|███▎ | 14/42 [00:03<00:07, 3.66it/s] 36%|███▌ | 15/42 [00:04<00:07, 3.66it/s] 38%|███▊ | 16/42 [00:04<00:07, 3.66it/s] 40%|████ | 17/42 [00:04<00:06, 3.66it/s] 43%|████▎ | 18/42 [00:04<00:06, 3.66it/s] 45%|████▌ | 19/42 [00:05<00:06, 3.66it/s] 48%|████▊ | 20/42 [00:05<00:06, 3.66it/s] 50%|█████ | 21/42 [00:05<00:05, 3.66it/s] 52%|█████▏ | 22/42 [00:05<00:05, 3.67it/s] 55%|█████▍ | 23/42 [00:06<00:05, 3.67it/s] 57%|█████▋ | 24/42 [00:06<00:04, 3.67it/s] 60%|█████▉ | 25/42 [00:06<00:04, 3.68it/s] 62%|██████▏ | 26/42 [00:07<00:04, 3.68it/s] 64%|██████▍ | 27/42 [00:07<00:04, 3.68it/s] 67%|██████▋ | 28/42 [00:07<00:03, 3.68it/s] 69%|██████▉ | 29/42 [00:07<00:03, 3.68it/s] 71%|███████▏ | 30/42 [00:08<00:03, 3.68it/s] 74%|███████▍ | 31/42 [00:08<00:02, 3.68it/s] 76%|███████▌ | 32/42 [00:08<00:02, 3.68it/s] 79%|███████▊ | 33/42 [00:08<00:02, 3.68it/s] 81%|████████ | 34/42 [00:09<00:02, 3.68it/s] 83%|████████▎ | 35/42 [00:09<00:01, 3.68it/s] 86%|████████▌ | 36/42 [00:09<00:01, 3.68it/s] 88%|████████▊ | 37/42 [00:10<00:01, 3.68it/s] 90%|█████████ | 38/42 [00:10<00:01, 3.68it/s] 93%|█████████▎| 39/42 [00:10<00:00, 3.68it/s] 95%|█████████▌| 40/42 [00:10<00:00, 3.67it/s] 98%|█████████▊| 41/42 [00:11<00:00, 3.67it/s] 100%|██████████| 42/42 [00:11<00:00, 3.68it/s] 100%|██████████| 42/42 [00:11<00:00, 3.67it/s] 0%| | 0/8 [00:00<?, ?it/s] 12%|█▎ | 1/8 [00:00<00:01, 4.27it/s] 25%|██▌ | 2/8 [00:00<00:01, 4.27it/s] 38%|███▊ | 3/8 [00:00<00:01, 4.24it/s] 50%|█████ | 4/8 [00:00<00:00, 4.24it/s] 62%|██████▎ | 5/8 [00:01<00:00, 4.23it/s] 75%|███████▌ | 6/8 [00:01<00:00, 4.23it/s] 88%|████████▊ | 7/8 [00:01<00:00, 4.24it/s] 100%|██████████| 8/8 [00:01<00:00, 4.24it/s] 100%|██████████| 8/8 [00:01<00:00, 4.24it/s]
Want to make some of these yourself?
Run this model