Readme
This model doesn't have a readme.
Flux fine-tune of the Transamerica Pyramid building in San Francisco
Run this model in Node.js with one line of code:
npm install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import Replicate from "replicate";
const replicate = new Replicate({
auth: process.env.REPLICATE_API_TOKEN,
});
Run tarot-cards/transamerica-pyramid using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run(
"tarot-cards/transamerica-pyramid:c9e3e8f4858f6baa8a9df266ee7e9214596202bb41972f152f7b9a6ad433b3fe",
{
input: {
model: "dev",
width: 832,
height: 1440,
prompt: "The zeke/transamerica-pyramid Transamerica building in San Francisco. \"the emperor\" in the style of TOK a trtcrd, tarot style",
go_fast: false,
extra_lora: "apolinario/flux-tarot-v1",
lora_scale: 1,
megapixels: "1",
num_outputs: 1,
aspect_ratio: "custom",
output_format: "webp",
guidance_scale: 3.5,
output_quality: 100,
prompt_strength: 0.8,
extra_lora_scale: 1,
num_inference_steps: 28
}
}
);
// To access the file URL:
console.log(output[0].url()); //=> "http://example.com"
// To write the file to disk:
fs.writeFile("my-image.png", output[0]);
To learn more, take a look at the guide on getting started with Node.js.
pip install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import replicate
Run tarot-cards/transamerica-pyramid using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run(
"tarot-cards/transamerica-pyramid:c9e3e8f4858f6baa8a9df266ee7e9214596202bb41972f152f7b9a6ad433b3fe",
input={
"model": "dev",
"width": 832,
"height": 1440,
"prompt": "The zeke/transamerica-pyramid Transamerica building in San Francisco. \"the emperor\" in the style of TOK a trtcrd, tarot style",
"go_fast": False,
"extra_lora": "apolinario/flux-tarot-v1",
"lora_scale": 1,
"megapixels": "1",
"num_outputs": 1,
"aspect_ratio": "custom",
"output_format": "webp",
"guidance_scale": 3.5,
"output_quality": 100,
"prompt_strength": 0.8,
"extra_lora_scale": 1,
"num_inference_steps": 28
}
)
print(output)
To learn more, take a look at the guide on getting started with Python.
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
Run tarot-cards/transamerica-pyramid using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \
-H "Authorization: Bearer $REPLICATE_API_TOKEN" \
-H "Content-Type: application/json" \
-H "Prefer: wait" \
-d $'{
"version": "tarot-cards/transamerica-pyramid:c9e3e8f4858f6baa8a9df266ee7e9214596202bb41972f152f7b9a6ad433b3fe",
"input": {
"model": "dev",
"width": 832,
"height": 1440,
"prompt": "The zeke/transamerica-pyramid Transamerica building in San Francisco. \\"the emperor\\" in the style of TOK a trtcrd, tarot style",
"go_fast": false,
"extra_lora": "apolinario/flux-tarot-v1",
"lora_scale": 1,
"megapixels": "1",
"num_outputs": 1,
"aspect_ratio": "custom",
"output_format": "webp",
"guidance_scale": 3.5,
"output_quality": 100,
"prompt_strength": 0.8,
"extra_lora_scale": 1,
"num_inference_steps": 28
}
}' \
https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Add a payment method to run this model.
By signing in, you agree to our
terms of service and privacy policy
{
"completed_at": "2025-02-04T06:20:40.195889Z",
"created_at": "2025-02-04T06:20:30.777000Z",
"data_removed": false,
"error": null,
"id": "2keef5gjq5rm80cmsyf8xbdek4",
"input": {
"model": "dev",
"width": 832,
"height": 1440,
"prompt": "The zeke/transamerica-pyramid Transamerica building in San Francisco. \"the emperor\" in the style of TOK a trtcrd, tarot style",
"go_fast": false,
"extra_lora": "apolinario/flux-tarot-v1",
"lora_scale": 1,
"megapixels": "1",
"num_outputs": 1,
"aspect_ratio": "custom",
"output_format": "webp",
"guidance_scale": 3.5,
"output_quality": 100,
"prompt_strength": 0.8,
"extra_lora_scale": 1,
"num_inference_steps": 28
},
"logs": "2025-02-04 06:20:30.806 | DEBUG | fp8.lora_loading:apply_lora_to_model:574 - Extracting keys\n2025-02-04 06:20:30.806 | DEBUG | fp8.lora_loading:apply_lora_to_model:581 - Keys extracted\nApplying LoRA: 0%| | 0/304 [00:00<?, ?it/s]\nApplying LoRA: 94%|█████████▍| 285/304 [00:00<00:00, 2848.68it/s]\nApplying LoRA: 100%|██████████| 304/304 [00:00<00:00, 2750.42it/s]\n2025-02-04 06:20:30.917 | SUCCESS | fp8.lora_loading:unload_loras:564 - LoRAs unloaded in 0.11s\nfree=28885966225408\nDownloading weights\n2025-02-04T06:20:30Z | INFO | [ Initiating ] chunk_size=150M dest=/tmp/tmp9z_wcc1a/weights url=https://replicate.delivery/xezq/bH3EqYKdqiK7BlzHakPqDj0tn8fzI9xlcYuvOZzhHho0C7FKA/trained_model.tar\n2025-02-04T06:20:31Z | INFO | [ Complete ] dest=/tmp/tmp9z_wcc1a/weights size=\"172 MB\" total_elapsed=0.951s url=https://replicate.delivery/xezq/bH3EqYKdqiK7BlzHakPqDj0tn8fzI9xlcYuvOZzhHho0C7FKA/trained_model.tar\nDownloaded weights in 0.97s\nfree=28885793337344\nDownloading weights\n2025-02-04T06:20:31Z | INFO | [ Initiating ] chunk_size=150M dest=/tmp/tmpxgszx8wx/weights url=https://replicate.com/apolinario/flux-tarot-v1/_weights\n2025-02-04T06:20:32Z | INFO | [ Redirect ] redirect_url=https://replicate.delivery/yhqm/P0f0U8kSZX3WPyee7NQHScd7S3IwjvC2tWKfiKG7nIOQdXONB/trained_model.tar url=https://replicate.com/apolinario/flux-tarot-v1/_weights\n2025-02-04T06:20:32Z | INFO | [ Complete ] dest=/tmp/tmpxgszx8wx/weights size=\"172 MB\" total_elapsed=0.740s url=https://replicate.com/apolinario/flux-tarot-v1/_weights\nDownloaded weights in 0.76s\n2025-02-04 06:20:32.658 | INFO | fp8.lora_loading:convert_lora_weights:498 - Loading LoRA weights for /src/weights-cache/48e652e259b57e0e\n2025-02-04 06:20:32.732 | INFO | fp8.lora_loading:convert_lora_weights:519 - LoRA weights loaded\n2025-02-04 06:20:32.732 | DEBUG | fp8.lora_loading:apply_lora_to_model:574 - Extracting keys\n2025-02-04 06:20:32.732 | DEBUG | fp8.lora_loading:apply_lora_to_model:581 - Keys extracted\nApplying LoRA: 0%| | 0/304 [00:00<?, ?it/s]\nApplying LoRA: 94%|█████████▍| 286/304 [00:00<00:00, 2859.56it/s]\nApplying LoRA: 100%|██████████| 304/304 [00:00<00:00, 2757.11it/s]\n2025-02-04 06:20:32.843 | SUCCESS | fp8.lora_loading:load_lora:539 - LoRA applied in 0.19s\n2025-02-04 06:20:32.843 | INFO | fp8.lora_loading:convert_lora_weights:498 - Loading LoRA weights for /src/weights-cache/3ab907c323b24c96\n2025-02-04 06:20:32.974 | INFO | fp8.lora_loading:convert_lora_weights:519 - LoRA weights loaded\n2025-02-04 06:20:32.974 | DEBUG | fp8.lora_loading:apply_lora_to_model:574 - Extracting keys\n2025-02-04 06:20:32.975 | DEBUG | fp8.lora_loading:apply_lora_to_model:581 - Keys extracted\nApplying LoRA: 0%| | 0/304 [00:00<?, ?it/s]\nApplying LoRA: 94%|█████████▍| 286/304 [00:00<00:00, 2857.31it/s]\nApplying LoRA: 100%|██████████| 304/304 [00:00<00:00, 2754.68it/s]\n2025-02-04 06:20:33.085 | SUCCESS | fp8.lora_loading:load_lora:539 - LoRA applied in 0.24s\nUsing seed: 10868\n0it [00:00, ?it/s]\n1it [00:00, 7.23it/s]\n2it [00:00, 5.07it/s]\n3it [00:00, 4.61it/s]\n4it [00:00, 4.43it/s]\n5it [00:01, 4.32it/s]\n6it [00:01, 4.26it/s]\n7it [00:01, 4.23it/s]\n8it [00:01, 4.21it/s]\n9it [00:02, 4.19it/s]\n10it [00:02, 4.17it/s]\n11it [00:02, 4.17it/s]\n12it [00:02, 4.17it/s]\n13it [00:03, 4.17it/s]\n14it [00:03, 4.16it/s]\n15it [00:03, 4.15it/s]\n16it [00:03, 4.16it/s]\n17it [00:03, 4.16it/s]\n18it [00:04, 4.16it/s]\n19it [00:04, 4.16it/s]\n20it [00:04, 4.16it/s]\n21it [00:04, 4.15it/s]\n22it [00:05, 4.15it/s]\n23it [00:05, 4.15it/s]\n24it [00:05, 4.16it/s]\n25it [00:05, 4.16it/s]\n26it [00:06, 4.15it/s]\n27it [00:06, 4.15it/s]\n28it [00:06, 4.15it/s]\n28it [00:06, 4.22it/s]\nTotal safe images: 1 out of 1",
"metrics": {
"predict_time": 9.388470135,
"total_time": 9.418889
},
"output": [
"https://replicate.delivery/xezq/9KfdLrrrs6UcGS0tu8CCZikqeQ3HNAFRlesJw3rjU2rwdsXoA/out-0.webp"
],
"started_at": "2025-02-04T06:20:30.807419Z",
"status": "succeeded",
"urls": {
"stream": "https://stream.replicate.com/v1/files/bcwr-ay3ko4cdbnsjnayhk3mptmh575h3lpuvyerf7fcadqfcvd23gf2a",
"get": "https://api.replicate.com/v1/predictions/2keef5gjq5rm80cmsyf8xbdek4",
"cancel": "https://api.replicate.com/v1/predictions/2keef5gjq5rm80cmsyf8xbdek4/cancel"
},
"version": "c9e3e8f4858f6baa8a9df266ee7e9214596202bb41972f152f7b9a6ad433b3fe"
}
2025-02-04 06:20:30.806 | DEBUG | fp8.lora_loading:apply_lora_to_model:574 - Extracting keys
2025-02-04 06:20:30.806 | DEBUG | fp8.lora_loading:apply_lora_to_model:581 - Keys extracted
Applying LoRA: 0%| | 0/304 [00:00<?, ?it/s]
Applying LoRA: 94%|█████████▍| 285/304 [00:00<00:00, 2848.68it/s]
Applying LoRA: 100%|██████████| 304/304 [00:00<00:00, 2750.42it/s]
2025-02-04 06:20:30.917 | SUCCESS | fp8.lora_loading:unload_loras:564 - LoRAs unloaded in 0.11s
free=28885966225408
Downloading weights
2025-02-04T06:20:30Z | INFO | [ Initiating ] chunk_size=150M dest=/tmp/tmp9z_wcc1a/weights url=https://replicate.delivery/xezq/bH3EqYKdqiK7BlzHakPqDj0tn8fzI9xlcYuvOZzhHho0C7FKA/trained_model.tar
2025-02-04T06:20:31Z | INFO | [ Complete ] dest=/tmp/tmp9z_wcc1a/weights size="172 MB" total_elapsed=0.951s url=https://replicate.delivery/xezq/bH3EqYKdqiK7BlzHakPqDj0tn8fzI9xlcYuvOZzhHho0C7FKA/trained_model.tar
Downloaded weights in 0.97s
free=28885793337344
Downloading weights
2025-02-04T06:20:31Z | INFO | [ Initiating ] chunk_size=150M dest=/tmp/tmpxgszx8wx/weights url=https://replicate.com/apolinario/flux-tarot-v1/_weights
2025-02-04T06:20:32Z | INFO | [ Redirect ] redirect_url=https://replicate.delivery/yhqm/P0f0U8kSZX3WPyee7NQHScd7S3IwjvC2tWKfiKG7nIOQdXONB/trained_model.tar url=https://replicate.com/apolinario/flux-tarot-v1/_weights
2025-02-04T06:20:32Z | INFO | [ Complete ] dest=/tmp/tmpxgszx8wx/weights size="172 MB" total_elapsed=0.740s url=https://replicate.com/apolinario/flux-tarot-v1/_weights
Downloaded weights in 0.76s
2025-02-04 06:20:32.658 | INFO | fp8.lora_loading:convert_lora_weights:498 - Loading LoRA weights for /src/weights-cache/48e652e259b57e0e
2025-02-04 06:20:32.732 | INFO | fp8.lora_loading:convert_lora_weights:519 - LoRA weights loaded
2025-02-04 06:20:32.732 | DEBUG | fp8.lora_loading:apply_lora_to_model:574 - Extracting keys
2025-02-04 06:20:32.732 | DEBUG | fp8.lora_loading:apply_lora_to_model:581 - Keys extracted
Applying LoRA: 0%| | 0/304 [00:00<?, ?it/s]
Applying LoRA: 94%|█████████▍| 286/304 [00:00<00:00, 2859.56it/s]
Applying LoRA: 100%|██████████| 304/304 [00:00<00:00, 2757.11it/s]
2025-02-04 06:20:32.843 | SUCCESS | fp8.lora_loading:load_lora:539 - LoRA applied in 0.19s
2025-02-04 06:20:32.843 | INFO | fp8.lora_loading:convert_lora_weights:498 - Loading LoRA weights for /src/weights-cache/3ab907c323b24c96
2025-02-04 06:20:32.974 | INFO | fp8.lora_loading:convert_lora_weights:519 - LoRA weights loaded
2025-02-04 06:20:32.974 | DEBUG | fp8.lora_loading:apply_lora_to_model:574 - Extracting keys
2025-02-04 06:20:32.975 | DEBUG | fp8.lora_loading:apply_lora_to_model:581 - Keys extracted
Applying LoRA: 0%| | 0/304 [00:00<?, ?it/s]
Applying LoRA: 94%|█████████▍| 286/304 [00:00<00:00, 2857.31it/s]
Applying LoRA: 100%|██████████| 304/304 [00:00<00:00, 2754.68it/s]
2025-02-04 06:20:33.085 | SUCCESS | fp8.lora_loading:load_lora:539 - LoRA applied in 0.24s
Using seed: 10868
0it [00:00, ?it/s]
1it [00:00, 7.23it/s]
2it [00:00, 5.07it/s]
3it [00:00, 4.61it/s]
4it [00:00, 4.43it/s]
5it [00:01, 4.32it/s]
6it [00:01, 4.26it/s]
7it [00:01, 4.23it/s]
8it [00:01, 4.21it/s]
9it [00:02, 4.19it/s]
10it [00:02, 4.17it/s]
11it [00:02, 4.17it/s]
12it [00:02, 4.17it/s]
13it [00:03, 4.17it/s]
14it [00:03, 4.16it/s]
15it [00:03, 4.15it/s]
16it [00:03, 4.16it/s]
17it [00:03, 4.16it/s]
18it [00:04, 4.16it/s]
19it [00:04, 4.16it/s]
20it [00:04, 4.16it/s]
21it [00:04, 4.15it/s]
22it [00:05, 4.15it/s]
23it [00:05, 4.15it/s]
24it [00:05, 4.16it/s]
25it [00:05, 4.16it/s]
26it [00:06, 4.15it/s]
27it [00:06, 4.15it/s]
28it [00:06, 4.15it/s]
28it [00:06, 4.22it/s]
Total safe images: 1 out of 1
This model runs on Nvidia H100 GPU hardware. We don't yet have enough runs of this model to provide performance information.
This model doesn't have a readme.
This model is warm. You'll get a fast response if the model is warm and already running, and a slower response if the model is cold and starting up.
Choose a file from your machine
Hint: you can also drag files onto the input
Choose a file from your machine
Hint: you can also drag files onto the input
2025-02-04 06:20:30.806 | DEBUG | fp8.lora_loading:apply_lora_to_model:574 - Extracting keys
2025-02-04 06:20:30.806 | DEBUG | fp8.lora_loading:apply_lora_to_model:581 - Keys extracted
Applying LoRA: 0%| | 0/304 [00:00<?, ?it/s]
Applying LoRA: 94%|█████████▍| 285/304 [00:00<00:00, 2848.68it/s]
Applying LoRA: 100%|██████████| 304/304 [00:00<00:00, 2750.42it/s]
2025-02-04 06:20:30.917 | SUCCESS | fp8.lora_loading:unload_loras:564 - LoRAs unloaded in 0.11s
free=28885966225408
Downloading weights
2025-02-04T06:20:30Z | INFO | [ Initiating ] chunk_size=150M dest=/tmp/tmp9z_wcc1a/weights url=https://replicate.delivery/xezq/bH3EqYKdqiK7BlzHakPqDj0tn8fzI9xlcYuvOZzhHho0C7FKA/trained_model.tar
2025-02-04T06:20:31Z | INFO | [ Complete ] dest=/tmp/tmp9z_wcc1a/weights size="172 MB" total_elapsed=0.951s url=https://replicate.delivery/xezq/bH3EqYKdqiK7BlzHakPqDj0tn8fzI9xlcYuvOZzhHho0C7FKA/trained_model.tar
Downloaded weights in 0.97s
free=28885793337344
Downloading weights
2025-02-04T06:20:31Z | INFO | [ Initiating ] chunk_size=150M dest=/tmp/tmpxgszx8wx/weights url=https://replicate.com/apolinario/flux-tarot-v1/_weights
2025-02-04T06:20:32Z | INFO | [ Redirect ] redirect_url=https://replicate.delivery/yhqm/P0f0U8kSZX3WPyee7NQHScd7S3IwjvC2tWKfiKG7nIOQdXONB/trained_model.tar url=https://replicate.com/apolinario/flux-tarot-v1/_weights
2025-02-04T06:20:32Z | INFO | [ Complete ] dest=/tmp/tmpxgszx8wx/weights size="172 MB" total_elapsed=0.740s url=https://replicate.com/apolinario/flux-tarot-v1/_weights
Downloaded weights in 0.76s
2025-02-04 06:20:32.658 | INFO | fp8.lora_loading:convert_lora_weights:498 - Loading LoRA weights for /src/weights-cache/48e652e259b57e0e
2025-02-04 06:20:32.732 | INFO | fp8.lora_loading:convert_lora_weights:519 - LoRA weights loaded
2025-02-04 06:20:32.732 | DEBUG | fp8.lora_loading:apply_lora_to_model:574 - Extracting keys
2025-02-04 06:20:32.732 | DEBUG | fp8.lora_loading:apply_lora_to_model:581 - Keys extracted
Applying LoRA: 0%| | 0/304 [00:00<?, ?it/s]
Applying LoRA: 94%|█████████▍| 286/304 [00:00<00:00, 2859.56it/s]
Applying LoRA: 100%|██████████| 304/304 [00:00<00:00, 2757.11it/s]
2025-02-04 06:20:32.843 | SUCCESS | fp8.lora_loading:load_lora:539 - LoRA applied in 0.19s
2025-02-04 06:20:32.843 | INFO | fp8.lora_loading:convert_lora_weights:498 - Loading LoRA weights for /src/weights-cache/3ab907c323b24c96
2025-02-04 06:20:32.974 | INFO | fp8.lora_loading:convert_lora_weights:519 - LoRA weights loaded
2025-02-04 06:20:32.974 | DEBUG | fp8.lora_loading:apply_lora_to_model:574 - Extracting keys
2025-02-04 06:20:32.975 | DEBUG | fp8.lora_loading:apply_lora_to_model:581 - Keys extracted
Applying LoRA: 0%| | 0/304 [00:00<?, ?it/s]
Applying LoRA: 94%|█████████▍| 286/304 [00:00<00:00, 2857.31it/s]
Applying LoRA: 100%|██████████| 304/304 [00:00<00:00, 2754.68it/s]
2025-02-04 06:20:33.085 | SUCCESS | fp8.lora_loading:load_lora:539 - LoRA applied in 0.24s
Using seed: 10868
0it [00:00, ?it/s]
1it [00:00, 7.23it/s]
2it [00:00, 5.07it/s]
3it [00:00, 4.61it/s]
4it [00:00, 4.43it/s]
5it [00:01, 4.32it/s]
6it [00:01, 4.26it/s]
7it [00:01, 4.23it/s]
8it [00:01, 4.21it/s]
9it [00:02, 4.19it/s]
10it [00:02, 4.17it/s]
11it [00:02, 4.17it/s]
12it [00:02, 4.17it/s]
13it [00:03, 4.17it/s]
14it [00:03, 4.16it/s]
15it [00:03, 4.15it/s]
16it [00:03, 4.16it/s]
17it [00:03, 4.16it/s]
18it [00:04, 4.16it/s]
19it [00:04, 4.16it/s]
20it [00:04, 4.16it/s]
21it [00:04, 4.15it/s]
22it [00:05, 4.15it/s]
23it [00:05, 4.15it/s]
24it [00:05, 4.16it/s]
25it [00:05, 4.16it/s]
26it [00:06, 4.15it/s]
27it [00:06, 4.15it/s]
28it [00:06, 4.15it/s]
28it [00:06, 4.22it/s]
Total safe images: 1 out of 1