Failed to load versions. Head to the versions page to see all versions for this model.
You're looking at a specific version of this model. Jump to the model overview.
qunash /circassian-culture-flux-3000-steps:bf772d87
Input
Run this model in Node.js with one line of code:
npm install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import Replicate from "replicate";
const replicate = new Replicate({
auth: process.env.REPLICATE_API_TOKEN,
});
Run qunash/circassian-culture-flux-3000-steps using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run(
"qunash/circassian-culture-flux-3000-steps:bf772d87d563c647335dd4bdf50ca56ee5ced4fe55bf0e8ed3d7f874303ae688",
{
input: {
model: "dev",
prompt: "A Circassian female in a traditional Circassian dress fasha with visible eyebrows, hazel eyes, small lips, a bit broader short nose, almond-shaped eyes, a broad chin, and a prominent jawline. She has shoulder-length hair, a slight smile, high cheekbones, and is in a portrait against a white background. The photo was taken with Kodak Portra 800 film.",
go_fast: false,
lora_scale: 1,
megapixels: "1",
num_outputs: 4,
aspect_ratio: "1:1",
output_format: "webp",
guidance_scale: 3.5,
output_quality: 80,
prompt_strength: 0.8,
extra_lora_scale: 1,
num_inference_steps: 50
}
}
);
// To access the file URL:
console.log(output[0].url()); //=> "http://example.com"
// To write the file to disk:
fs.writeFile("my-image.png", output[0]);
To learn more, take a look at the guide on getting started with Node.js.
pip install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import replicate
Run qunash/circassian-culture-flux-3000-steps using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run(
"qunash/circassian-culture-flux-3000-steps:bf772d87d563c647335dd4bdf50ca56ee5ced4fe55bf0e8ed3d7f874303ae688",
input={
"model": "dev",
"prompt": "A Circassian female in a traditional Circassian dress fasha with visible eyebrows, hazel eyes, small lips, a bit broader short nose, almond-shaped eyes, a broad chin, and a prominent jawline. She has shoulder-length hair, a slight smile, high cheekbones, and is in a portrait against a white background. The photo was taken with Kodak Portra 800 film.",
"go_fast": False,
"lora_scale": 1,
"megapixels": "1",
"num_outputs": 4,
"aspect_ratio": "1:1",
"output_format": "webp",
"guidance_scale": 3.5,
"output_quality": 80,
"prompt_strength": 0.8,
"extra_lora_scale": 1,
"num_inference_steps": 50
}
)
print(output)
To learn more, take a look at the guide on getting started with Python.
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
Run qunash/circassian-culture-flux-3000-steps using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \
-H "Authorization: Bearer $REPLICATE_API_TOKEN" \
-H "Content-Type: application/json" \
-H "Prefer: wait" \
-d $'{
"version": "qunash/circassian-culture-flux-3000-steps:bf772d87d563c647335dd4bdf50ca56ee5ced4fe55bf0e8ed3d7f874303ae688",
"input": {
"model": "dev",
"prompt": "A Circassian female in a traditional Circassian dress fasha with visible eyebrows, hazel eyes, small lips, a bit broader short nose, almond-shaped eyes, a broad chin, and a prominent jawline. She has shoulder-length hair, a slight smile, high cheekbones, and is in a portrait against a white background. The photo was taken with Kodak Portra 800 film.",
"go_fast": false,
"lora_scale": 1,
"megapixels": "1",
"num_outputs": 4,
"aspect_ratio": "1:1",
"output_format": "webp",
"guidance_scale": 3.5,
"output_quality": 80,
"prompt_strength": 0.8,
"extra_lora_scale": 1,
"num_inference_steps": 50
}
}' \
https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Output
{
"completed_at": "2024-08-18T08:41:43.354769Z",
"created_at": "2024-08-18T08:40:50.190000Z",
"data_removed": false,
"error": null,
"id": "1221wd3k1srm20chcjjrbdbbmg",
"input": {
"model": "dev",
"prompt": "A Circassian female in a traditional Circassian dress fasha with visible eyebrows, hazel eyes, small lips, a bit broader short nose, almond-shaped eyes, a broad chin, and a prominent jawline. She has shoulder-length hair, a slight smile, high cheekbones, and is in a portrait against a white background. The photo was taken with Kodak Portra 800 film.",
"lora_scale": 1,
"num_outputs": 4,
"aspect_ratio": "1:1",
"output_format": "webp",
"guidance_scale": 3.5,
"output_quality": 80,
"num_inference_steps": 50
},
"logs": "Using seed: 9905\nPrompt: A Circassian female in a traditional Circassian dress fasha with visible eyebrows, hazel eyes, small lips, a bit broader short nose, almond-shaped eyes, a broad chin, and a prominent jawline. She has shoulder-length hair, a slight smile, high cheekbones, and is in a portrait against a white background. The photo was taken with Kodak Portra 800 film.\ntxt2img mode\nUsing dev model\nLoading LoRA weights\nweights already loaded!\nThe following part of your input was truncated because CLIP can only handle sequences up to 77 tokens: ['kodak portra 8 0 0 film.', 'kodak portra 8 0 0 film.', 'kodak portra 8 0 0 film.', 'kodak portra 8 0 0 film.']\n 0%| | 0/50 [00:00<?, ?it/s]\n 2%|▏ | 1/50 [00:01<00:50, 1.03s/it]\n 4%|▍ | 2/50 [00:01<00:43, 1.11it/s]\n 6%|▌ | 3/50 [00:02<00:45, 1.04it/s]\n 8%|▊ | 4/50 [00:03<00:45, 1.02it/s]\n 10%|█ | 5/50 [00:04<00:44, 1.00it/s]\n 12%|█▏ | 6/50 [00:05<00:44, 1.01s/it]\n 14%|█▍ | 7/50 [00:06<00:43, 1.01s/it]\n 16%|█▌ | 8/50 [00:07<00:42, 1.02s/it]\n 18%|█▊ | 9/50 [00:09<00:41, 1.02s/it]\n 20%|██ | 10/50 [00:10<00:41, 1.03s/it]\n 22%|██▏ | 11/50 [00:11<00:40, 1.03s/it]\n 24%|██▍ | 12/50 [00:12<00:39, 1.03s/it]\n 26%|██▌ | 13/50 [00:13<00:38, 1.03s/it]\n 28%|██▊ | 14/50 [00:14<00:37, 1.03s/it]\n 30%|███ | 15/50 [00:15<00:36, 1.03s/it]\n 32%|███▏ | 16/50 [00:16<00:35, 1.03s/it]\n 34%|███▍ | 17/50 [00:17<00:33, 1.03s/it]\n 36%|███▌ | 18/50 [00:18<00:33, 1.03s/it]\n 38%|███▊ | 19/50 [00:19<00:31, 1.03s/it]\n 40%|████ | 20/50 [00:20<00:30, 1.03s/it]\n 42%|████▏ | 21/50 [00:21<00:29, 1.03s/it]\n 44%|████▍ | 22/50 [00:22<00:28, 1.04s/it]\n 46%|████▌ | 23/50 [00:23<00:27, 1.04s/it]\n 48%|████▊ | 24/50 [00:24<00:26, 1.03s/it]\n 50%|█████ | 25/50 [00:25<00:25, 1.03s/it]\n 52%|█████▏ | 26/50 [00:26<00:24, 1.03s/it]\n 54%|█████▍ | 27/50 [00:27<00:23, 1.03s/it]\n 56%|█████▌ | 28/50 [00:28<00:22, 1.04s/it]\n 58%|█████▊ | 29/50 [00:29<00:21, 1.04s/it]\n 60%|██████ | 30/50 [00:30<00:20, 1.04s/it]\n 62%|██████▏ | 31/50 [00:31<00:19, 1.04s/it]\n 64%|██████▍ | 32/50 [00:32<00:18, 1.04s/it]\n 66%|██████▌ | 33/50 [00:33<00:17, 1.04s/it]\n 68%|██████▊ | 34/50 [00:34<00:16, 1.04s/it]\n 70%|███████ | 35/50 [00:35<00:15, 1.04s/it]\n 72%|███████▏ | 36/50 [00:36<00:14, 1.04s/it]\n 74%|███████▍ | 37/50 [00:38<00:13, 1.04s/it]\n 76%|███████▌ | 38/50 [00:39<00:12, 1.04s/it]\n 78%|███████▊ | 39/50 [00:40<00:11, 1.04s/it]\n 80%|████████ | 40/50 [00:41<00:10, 1.04s/it]\n 82%|████████▏ | 41/50 [00:42<00:09, 1.04s/it]\n 84%|████████▍ | 42/50 [00:43<00:08, 1.04s/it]\n 86%|████████▌ | 43/50 [00:44<00:07, 1.04s/it]\n 88%|████████▊ | 44/50 [00:45<00:06, 1.04s/it]\n 90%|█████████ | 45/50 [00:46<00:05, 1.04s/it]\n 92%|█████████▏| 46/50 [00:47<00:04, 1.04s/it]\n 94%|█████████▍| 47/50 [00:48<00:03, 1.04s/it]\n 96%|█████████▌| 48/50 [00:49<00:02, 1.04s/it]\n 98%|█████████▊| 49/50 [00:50<00:01, 1.04s/it]\n100%|██████████| 50/50 [00:51<00:00, 1.04s/it]\n100%|██████████| 50/50 [00:51<00:00, 1.03s/it]",
"metrics": {
"predict_time": 53.131398988,
"total_time": 53.164769
},
"output": [
"https://replicate.delivery/yhqm/wu5fxnAZ8pWdXiuGpQ8yV9Py01GjhZJxRmicTHfLec0NusnmA/out-0.webp",
"https://replicate.delivery/yhqm/T2kepfBPS6q6Mkj6XLysoaOdfvHptgf7rs3TxVuDHZ6ccZPNB/out-1.webp",
"https://replicate.delivery/yhqm/ePBAA0vKPmVSd6fGlNf7AVvofD0x8ZbqUbmrhfjP87Q44ye0E/out-2.webp",
"https://replicate.delivery/yhqm/SFj2brHc8jI8LxgKU8HcU3DoekQaoqhSeiPOImc4kaXHX2TTA/out-3.webp"
],
"started_at": "2024-08-18T08:40:50.223370Z",
"status": "succeeded",
"urls": {
"get": "https://api.replicate.com/v1/predictions/1221wd3k1srm20chcjjrbdbbmg",
"cancel": "https://api.replicate.com/v1/predictions/1221wd3k1srm20chcjjrbdbbmg/cancel"
},
"version": "bf772d87d563c647335dd4bdf50ca56ee5ced4fe55bf0e8ed3d7f874303ae688"
}
Using seed: 9905
Prompt: A Circassian female in a traditional Circassian dress fasha with visible eyebrows, hazel eyes, small lips, a bit broader short nose, almond-shaped eyes, a broad chin, and a prominent jawline. She has shoulder-length hair, a slight smile, high cheekbones, and is in a portrait against a white background. The photo was taken with Kodak Portra 800 film.
txt2img mode
Using dev model
Loading LoRA weights
weights already loaded!
The following part of your input was truncated because CLIP can only handle sequences up to 77 tokens: ['kodak portra 8 0 0 film.', 'kodak portra 8 0 0 film.', 'kodak portra 8 0 0 film.', 'kodak portra 8 0 0 film.']
0%| | 0/50 [00:00<?, ?it/s]
2%|▏ | 1/50 [00:01<00:50, 1.03s/it]
4%|▍ | 2/50 [00:01<00:43, 1.11it/s]
6%|▌ | 3/50 [00:02<00:45, 1.04it/s]
8%|▊ | 4/50 [00:03<00:45, 1.02it/s]
10%|█ | 5/50 [00:04<00:44, 1.00it/s]
12%|█▏ | 6/50 [00:05<00:44, 1.01s/it]
14%|█▍ | 7/50 [00:06<00:43, 1.01s/it]
16%|█▌ | 8/50 [00:07<00:42, 1.02s/it]
18%|█▊ | 9/50 [00:09<00:41, 1.02s/it]
20%|██ | 10/50 [00:10<00:41, 1.03s/it]
22%|██▏ | 11/50 [00:11<00:40, 1.03s/it]
24%|██▍ | 12/50 [00:12<00:39, 1.03s/it]
26%|██▌ | 13/50 [00:13<00:38, 1.03s/it]
28%|██▊ | 14/50 [00:14<00:37, 1.03s/it]
30%|███ | 15/50 [00:15<00:36, 1.03s/it]
32%|███▏ | 16/50 [00:16<00:35, 1.03s/it]
34%|███▍ | 17/50 [00:17<00:33, 1.03s/it]
36%|███▌ | 18/50 [00:18<00:33, 1.03s/it]
38%|███▊ | 19/50 [00:19<00:31, 1.03s/it]
40%|████ | 20/50 [00:20<00:30, 1.03s/it]
42%|████▏ | 21/50 [00:21<00:29, 1.03s/it]
44%|████▍ | 22/50 [00:22<00:28, 1.04s/it]
46%|████▌ | 23/50 [00:23<00:27, 1.04s/it]
48%|████▊ | 24/50 [00:24<00:26, 1.03s/it]
50%|█████ | 25/50 [00:25<00:25, 1.03s/it]
52%|█████▏ | 26/50 [00:26<00:24, 1.03s/it]
54%|█████▍ | 27/50 [00:27<00:23, 1.03s/it]
56%|█████▌ | 28/50 [00:28<00:22, 1.04s/it]
58%|█████▊ | 29/50 [00:29<00:21, 1.04s/it]
60%|██████ | 30/50 [00:30<00:20, 1.04s/it]
62%|██████▏ | 31/50 [00:31<00:19, 1.04s/it]
64%|██████▍ | 32/50 [00:32<00:18, 1.04s/it]
66%|██████▌ | 33/50 [00:33<00:17, 1.04s/it]
68%|██████▊ | 34/50 [00:34<00:16, 1.04s/it]
70%|███████ | 35/50 [00:35<00:15, 1.04s/it]
72%|███████▏ | 36/50 [00:36<00:14, 1.04s/it]
74%|███████▍ | 37/50 [00:38<00:13, 1.04s/it]
76%|███████▌ | 38/50 [00:39<00:12, 1.04s/it]
78%|███████▊ | 39/50 [00:40<00:11, 1.04s/it]
80%|████████ | 40/50 [00:41<00:10, 1.04s/it]
82%|████████▏ | 41/50 [00:42<00:09, 1.04s/it]
84%|████████▍ | 42/50 [00:43<00:08, 1.04s/it]
86%|████████▌ | 43/50 [00:44<00:07, 1.04s/it]
88%|████████▊ | 44/50 [00:45<00:06, 1.04s/it]
90%|█████████ | 45/50 [00:46<00:05, 1.04s/it]
92%|█████████▏| 46/50 [00:47<00:04, 1.04s/it]
94%|█████████▍| 47/50 [00:48<00:03, 1.04s/it]
96%|█████████▌| 48/50 [00:49<00:02, 1.04s/it]
98%|█████████▊| 49/50 [00:50<00:01, 1.04s/it]
100%|██████████| 50/50 [00:51<00:00, 1.04s/it]
100%|██████████| 50/50 [00:51<00:00, 1.03s/it]