Readme
This model doesn't have a readme.
Adapted to have multi-lora support also for schnell: https://replicate.com/lucataco/flux-dev-multi-lora
Run this model in Node.js with one line of code:
npm install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import Replicate from "replicate";
import fs from "node:fs";
const replicate = new Replicate({
auth: process.env.REPLICATE_API_TOKEN,
});
Run jensbosseparra/flux1-schnell-multi-lora using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run(
"jensbosseparra/flux1-schnell-multi-lora:85e4655b8b7d00ee7b66e2ff6986a489f710ff799bb4a62a7d7b34e66a3bdcd2",
{
input: {
seed: 43,
prompt: "a beautiful scandinavian girl posing in the mountains, the sun is shining, the grass is green and flowers are everywhere, she is wearing a patterned colorful top. be4u7y",
hf_loras: ["https://huggingface.co/Octree/flux-schnell-lora/resolve/main/flux-schnell-lora.safetensors","https://huggingface.co/hugovntr/flux-schnell-realism/resolve/main/schnell-realism_v1.safetensors"],
lora_scales: [0.8,0.9],
num_outputs: 1,
aspect_ratio: "1:1",
output_format: "webp",
guidance_scale: 0,
output_quality: 100,
prompt_strength: 0.8,
num_inference_steps: 4
}
}
);
// To access the file URL:
console.log(output[0].url()); //=> "http://example.com"
// To write the file to disk:
fs.writeFile("my-image.png", output[0]);
To learn more, take a look at the guide on getting started with Node.js.
pip install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import replicate
Run jensbosseparra/flux1-schnell-multi-lora using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run(
"jensbosseparra/flux1-schnell-multi-lora:85e4655b8b7d00ee7b66e2ff6986a489f710ff799bb4a62a7d7b34e66a3bdcd2",
input={
"seed": 43,
"prompt": "a beautiful scandinavian girl posing in the mountains, the sun is shining, the grass is green and flowers are everywhere, she is wearing a patterned colorful top. be4u7y",
"hf_loras": ["https://huggingface.co/Octree/flux-schnell-lora/resolve/main/flux-schnell-lora.safetensors","https://huggingface.co/hugovntr/flux-schnell-realism/resolve/main/schnell-realism_v1.safetensors"],
"lora_scales": [0.8,0.9],
"num_outputs": 1,
"aspect_ratio": "1:1",
"output_format": "webp",
"guidance_scale": 0,
"output_quality": 100,
"prompt_strength": 0.8,
"num_inference_steps": 4
}
)
print(output)
To learn more, take a look at the guide on getting started with Python.
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
Run jensbosseparra/flux1-schnell-multi-lora using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \
-H "Authorization: Bearer $REPLICATE_API_TOKEN" \
-H "Content-Type: application/json" \
-H "Prefer: wait" \
-d $'{
"version": "jensbosseparra/flux1-schnell-multi-lora:85e4655b8b7d00ee7b66e2ff6986a489f710ff799bb4a62a7d7b34e66a3bdcd2",
"input": {
"seed": 43,
"prompt": "a beautiful scandinavian girl posing in the mountains, the sun is shining, the grass is green and flowers are everywhere, she is wearing a patterned colorful top. be4u7y",
"hf_loras": ["https://huggingface.co/Octree/flux-schnell-lora/resolve/main/flux-schnell-lora.safetensors","https://huggingface.co/hugovntr/flux-schnell-realism/resolve/main/schnell-realism_v1.safetensors"],
"lora_scales": [0.8,0.9],
"num_outputs": 1,
"aspect_ratio": "1:1",
"output_format": "webp",
"guidance_scale": 0,
"output_quality": 100,
"prompt_strength": 0.8,
"num_inference_steps": 4
}
}' \
https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Add a payment method to run this model.
By signing in, you agree to our
terms of service and privacy policy
{
"completed_at": "2024-11-03T17:30:10.925995Z",
"created_at": "2024-11-03T17:30:04.602000Z",
"data_removed": false,
"error": null,
"id": "f7rqmrn7q9rj00cjyc9sf4f0wr",
"input": {
"seed": 43,
"prompt": "a beautiful scandinavian girl posing in the mountains, the sun is shining, the grass is green and flowers are everywhere, she is wearing a patterned colorful top. be4u7y",
"hf_loras": [
"https://huggingface.co/Octree/flux-schnell-lora/resolve/main/flux-schnell-lora.safetensors",
"https://huggingface.co/hugovntr/flux-schnell-realism/resolve/main/schnell-realism_v1.safetensors"
],
"lora_scales": [
0.8,
0.9
],
"num_outputs": 1,
"aspect_ratio": "1:1",
"output_format": "webp",
"guidance_scale": 0,
"output_quality": 100,
"prompt_strength": 0.8,
"num_inference_steps": 4
},
"logs": "Using seed: 43\nPrompt: a beautiful scandinavian girl posing in the mountains, the sun is shining, the grass is green and flowers are everywhere, she is wearing a patterned colorful top. be4u7y\ntxt2img mode\nDownloading LoRA weights from - HF URL: https://huggingface.co/Octree/flux-schnell-lora/resolve/main/flux-schnell-lora.safetensors\nHuggingFace slug from URL: Octree/flux-schnell-lora, weight name: flux-schnell-lora.safetensors\nLoading LoRA took: 0.68 seconds\nDownloading LoRA weights from - HF URL: https://huggingface.co/hugovntr/flux-schnell-realism/resolve/main/schnell-realism_v1.safetensors\nHuggingFace slug from URL: hugovntr/flux-schnell-realism, weight name: schnell-realism_v1.safetensors\nUnsuppored keys for ai-toolkit: dict_keys(['lora_te1_text_model_encoder_layers_0_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_0_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_0_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_0_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_0_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_0_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_0_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_10_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_10_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_10_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_10_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_10_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_10_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_11_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_11_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_11_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_11_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_11_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_11_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_1_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_1_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_1_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_1_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_1_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_1_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_2_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_2_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_2_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_2_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_2_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_2_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_3_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_3_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_3_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_3_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_3_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_3_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_4_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_4_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_4_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_4_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_4_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_4_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_5_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_5_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_5_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_5_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_5_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_6_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_6_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_6_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_6_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_6_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_6_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_7_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_7_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_7_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_7_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_7_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_7_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_8_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_8_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_8_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_8_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_8_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_8_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_9_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_9_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_9_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_9_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_9_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_9_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_v_proj.lora_up.weight'])\nLoading LoRA took: 2.27 seconds\n 0%| | 0/4 [00:00<?, ?it/s]\n 25%|██▌ | 1/4 [00:00<00:01, 1.85it/s]\n 50%|█████ | 2/4 [00:00<00:00, 2.45it/s]\n 75%|███████▌ | 3/4 [00:01<00:00, 2.26it/s]\n100%|██████████| 4/4 [00:01<00:00, 2.18it/s]\n100%|██████████| 4/4 [00:01<00:00, 2.19it/s]",
"metrics": {
"predict_time": 6.315119646,
"total_time": 6.323995
},
"output": [
"https://replicate.delivery/yhqm/YPC4ZAAnzxamDtTFLBsOrXUsfYDfBib2bnzVNtd3KMwiUWtTA/out-0.webp"
],
"started_at": "2024-11-03T17:30:04.610875Z",
"status": "succeeded",
"urls": {
"stream": "https://stream.replicate.com/v1/files/qoxq-wyjnobksnzmjuo6ymvhcuykdid3xkhg4iqksrxqiky3qp5mcflxq",
"get": "https://api.replicate.com/v1/predictions/f7rqmrn7q9rj00cjyc9sf4f0wr",
"cancel": "https://api.replicate.com/v1/predictions/f7rqmrn7q9rj00cjyc9sf4f0wr/cancel"
},
"version": "1af36f429c0be8b88f7f8adfe6e720d9f1ab3b72ab62c78e768d08e91e4c7b68"
}
Using seed: 43
Prompt: a beautiful scandinavian girl posing in the mountains, the sun is shining, the grass is green and flowers are everywhere, she is wearing a patterned colorful top. be4u7y
txt2img mode
Downloading LoRA weights from - HF URL: https://huggingface.co/Octree/flux-schnell-lora/resolve/main/flux-schnell-lora.safetensors
HuggingFace slug from URL: Octree/flux-schnell-lora, weight name: flux-schnell-lora.safetensors
Loading LoRA took: 0.68 seconds
Downloading LoRA weights from - HF URL: https://huggingface.co/hugovntr/flux-schnell-realism/resolve/main/schnell-realism_v1.safetensors
HuggingFace slug from URL: hugovntr/flux-schnell-realism, weight name: schnell-realism_v1.safetensors
Unsuppored keys for ai-toolkit: dict_keys(['lora_te1_text_model_encoder_layers_0_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_0_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_0_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_0_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_0_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_0_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_0_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_10_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_10_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_10_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_10_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_10_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_10_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_11_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_11_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_11_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_11_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_11_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_11_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_1_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_1_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_1_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_1_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_1_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_1_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_2_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_2_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_2_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_2_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_2_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_2_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_3_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_3_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_3_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_3_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_3_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_3_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_4_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_4_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_4_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_4_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_4_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_4_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_5_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_5_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_5_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_5_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_5_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_6_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_6_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_6_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_6_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_6_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_6_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_7_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_7_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_7_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_7_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_7_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_7_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_8_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_8_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_8_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_8_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_8_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_8_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_9_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_9_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_9_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_9_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_9_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_9_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_v_proj.lora_up.weight'])
Loading LoRA took: 2.27 seconds
0%| | 0/4 [00:00<?, ?it/s]
25%|██▌ | 1/4 [00:00<00:01, 1.85it/s]
50%|█████ | 2/4 [00:00<00:00, 2.45it/s]
75%|███████▌ | 3/4 [00:01<00:00, 2.26it/s]
100%|██████████| 4/4 [00:01<00:00, 2.18it/s]
100%|██████████| 4/4 [00:01<00:00, 2.19it/s]
This output was created using a different version of the model, jensbosseparra/flux1-schnell-multi-lora:1af36f42.
This model costs approximately $0.0018 to run on Replicate, or 555 runs per $1, but this varies depending on your inputs. It is also open source and you can run it on your own computer with Docker.
This model runs on Nvidia A100 (80GB) GPU hardware. Predictions typically complete within 2 seconds.
This model doesn't have a readme.
This model is cold. You'll get a fast response if the model is warm and already running, and a slower response if the model is cold and starting up.
This model costs approximately $0.0018 to run on Replicate, but this varies depending on your inputs. View more.
Choose a file from your machine
Hint: you can also drag files onto the input
Using seed: 43
Prompt: a beautiful scandinavian girl posing in the mountains, the sun is shining, the grass is green and flowers are everywhere, she is wearing a patterned colorful top. be4u7y
txt2img mode
Downloading LoRA weights from - HF URL: https://huggingface.co/Octree/flux-schnell-lora/resolve/main/flux-schnell-lora.safetensors
HuggingFace slug from URL: Octree/flux-schnell-lora, weight name: flux-schnell-lora.safetensors
Loading LoRA took: 0.68 seconds
Downloading LoRA weights from - HF URL: https://huggingface.co/hugovntr/flux-schnell-realism/resolve/main/schnell-realism_v1.safetensors
HuggingFace slug from URL: hugovntr/flux-schnell-realism, weight name: schnell-realism_v1.safetensors
Unsuppored keys for ai-toolkit: dict_keys(['lora_te1_text_model_encoder_layers_0_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_0_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_0_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_0_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_0_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_0_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_0_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_0_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_10_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_10_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_10_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_10_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_10_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_10_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_10_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_11_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_11_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_11_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_11_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_11_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_11_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_11_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_1_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_1_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_1_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_1_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_1_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_1_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_1_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_2_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_2_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_2_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_2_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_2_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_2_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_2_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_3_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_3_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_3_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_3_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_3_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_3_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_3_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_4_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_4_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_4_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_4_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_4_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_4_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_4_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_5_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_5_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_5_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_5_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_5_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_5_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_6_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_6_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_6_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_6_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_6_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_6_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_6_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_7_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_7_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_7_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_7_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_7_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_7_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_7_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_8_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_8_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_8_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_8_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_8_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_8_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_8_self_attn_v_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_mlp_fc1.alpha', 'lora_te1_text_model_encoder_layers_9_mlp_fc1.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_mlp_fc1.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_mlp_fc2.alpha', 'lora_te1_text_model_encoder_layers_9_mlp_fc2.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_mlp_fc2.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_k_proj.alpha', 'lora_te1_text_model_encoder_layers_9_self_attn_k_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_k_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_out_proj.alpha', 'lora_te1_text_model_encoder_layers_9_self_attn_out_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_out_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_q_proj.alpha', 'lora_te1_text_model_encoder_layers_9_self_attn_q_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_q_proj.lora_up.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_v_proj.alpha', 'lora_te1_text_model_encoder_layers_9_self_attn_v_proj.lora_down.weight', 'lora_te1_text_model_encoder_layers_9_self_attn_v_proj.lora_up.weight'])
Loading LoRA took: 2.27 seconds
0%| | 0/4 [00:00<?, ?it/s]
25%|██▌ | 1/4 [00:00<00:01, 1.85it/s]
50%|█████ | 2/4 [00:00<00:00, 2.45it/s]
75%|███████▌ | 3/4 [00:01<00:00, 2.26it/s]
100%|██████████| 4/4 [00:01<00:00, 2.18it/s]
100%|██████████| 4/4 [00:01<00:00, 2.19it/s]