Readme
This model doesn't have a readme.
Controllable generative AI art
Run this model in Node.js with one line of code:
npm install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import Replicate from "replicate";
const replicate = new Replicate({
auth: process.env.REPLICATE_API_TOKEN,
});
Run ltejedor/differentiable-rasterizer-vector-graphics using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run(
"ltejedor/differentiable-rasterizer-vector-graphics:efc7ea72d81d13e99a1a2ec5d40dbf9893e851f0a7363ef3e8e44f5a142e1aa2",
{
input: {
loss: "CLIP",
prompt: "red panda",
patch_url: "https://storage.googleapis.com/dm_arnheim_3_assets/collage_patches/animals.npy",
color_space: "RGB space",
num_patches: 100,
optim_steps: 250,
background_red: 0,
background_blue: 0,
background_green: 0,
initial_positions: [["image_5.png",-0.194643,0.121429],["image_4.png",0.301786,-0.217857],["image_8.png",0.294643,0.35],["image_9.png",-0.648214,0.157143]],
initial_colour_values: [],
initial_transformations: []
}
}
);
console.log(output);
To learn more, take a look at the guide on getting started with Node.js.
pip install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import replicate
Run ltejedor/differentiable-rasterizer-vector-graphics using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run(
"ltejedor/differentiable-rasterizer-vector-graphics:efc7ea72d81d13e99a1a2ec5d40dbf9893e851f0a7363ef3e8e44f5a142e1aa2",
input={
"loss": "CLIP",
"prompt": "red panda",
"patch_url": "https://storage.googleapis.com/dm_arnheim_3_assets/collage_patches/animals.npy",
"color_space": "RGB space",
"num_patches": 100,
"optim_steps": 250,
"background_red": 0,
"background_blue": 0,
"background_green": 0,
"initial_positions": [["image_5.png",-0.194643,0.121429],["image_4.png",0.301786,-0.217857],["image_8.png",0.294643,0.35],["image_9.png",-0.648214,0.157143]],
"initial_colour_values": [],
"initial_transformations": []
}
)
print(output)
To learn more, take a look at the guide on getting started with Python.
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
Run ltejedor/differentiable-rasterizer-vector-graphics using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \
-H "Authorization: Bearer $REPLICATE_API_TOKEN" \
-H "Content-Type: application/json" \
-H "Prefer: wait" \
-d $'{
"version": "efc7ea72d81d13e99a1a2ec5d40dbf9893e851f0a7363ef3e8e44f5a142e1aa2",
"input": {
"loss": "CLIP",
"prompt": "red panda",
"patch_url": "https://storage.googleapis.com/dm_arnheim_3_assets/collage_patches/animals.npy",
"color_space": "RGB space",
"num_patches": 100,
"optim_steps": 250,
"background_red": 0,
"background_blue": 0,
"background_green": 0,
"initial_positions": [["image_5.png",-0.194643,0.121429],["image_4.png",0.301786,-0.217857],["image_8.png",0.294643,0.35],["image_9.png",-0.648214,0.157143]],
"initial_colour_values": [],
"initial_transformations": []
}
}' \
https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
brew install cog
If you don’t have Homebrew, there are other installation options available.
Run this to download the model and run it in your local environment:
cog predict r8.im/ltejedor/differentiable-rasterizer-vector-graphics@sha256:efc7ea72d81d13e99a1a2ec5d40dbf9893e851f0a7363ef3e8e44f5a142e1aa2 \
-i 'loss="CLIP"' \
-i 'prompt="red panda"' \
-i 'patch_url="https://storage.googleapis.com/dm_arnheim_3_assets/collage_patches/animals.npy"' \
-i 'color_space="RGB space"' \
-i 'num_patches=100' \
-i 'optim_steps=250' \
-i 'background_red=0' \
-i 'background_blue=0' \
-i 'background_green=0' \
-i 'initial_positions=[["image_5.png",-0.194643,0.121429],["image_4.png",0.301786,-0.217857],["image_8.png",0.294643,0.35],["image_9.png",-0.648214,0.157143]]' \
-i 'initial_colour_values=[]' \
-i 'initial_transformations=[]'
To learn more, take a look at the Cog documentation.
Run this to download the model and run it in your local environment:
docker run -d -p 5000:5000 --gpus=all r8.im/ltejedor/differentiable-rasterizer-vector-graphics@sha256:efc7ea72d81d13e99a1a2ec5d40dbf9893e851f0a7363ef3e8e44f5a142e1aa2
curl -s -X POST \ -H "Content-Type: application/json" \ -d $'{ "input": { "loss": "CLIP", "prompt": "red panda", "patch_url": "https://storage.googleapis.com/dm_arnheim_3_assets/collage_patches/animals.npy", "color_space": "RGB space", "num_patches": 100, "optim_steps": 250, "background_red": 0, "background_blue": 0, "background_green": 0, "initial_positions": [["image_5.png",-0.194643,0.121429],["image_4.png",0.301786,-0.217857],["image_8.png",0.294643,0.35],["image_9.png",-0.648214,0.157143]], "initial_colour_values": [], "initial_transformations": [] } }' \ http://localhost:5000/predictions
To learn more, take a look at the Cog documentation.
Add a payment method to run this model.
By signing in, you agree to our
terms of service and privacy policy
{
"completed_at": "2025-03-19T00:26:06.657594Z",
"created_at": "2025-03-19T00:23:21.260000Z",
"data_removed": false,
"error": null,
"id": "2p6axtm9nhrj60cnnf4a3r6cn0",
"input": {
"prompt": "red panda",
"initial_positions": [
[
"image_5.png",
-0.194643,
0.121429
],
[
"image_4.png",
0.301786,
-0.217857
],
[
"image_8.png",
0.294643,
0.35
],
[
"image_9.png",
-0.648214,
0.157143
]
]
},
"logs": "initial positions\n[['image_5.png', '-0.194643', '0.121429'], ['image_4.png', '0.301786', '-0.217857'], ['image_8.png', '0.294643', '0.35'], ['image_9.png', '-0.648214', '0.157143']]\nTiling 1x1 collages\nOptimisation:\nTile size: 448x448\nGlobal size: 448x448 (WxH)\nHigh res:\nTile size: 896x896\nGlobal size: 896x896 (WxH)\nTile 0 prompts: ['a photorealistic sky with sun', 'a photorealistic sky', 'a photorealistic sky with moon', 'a photorealistic tree', 'a photorealistic tree', 'a photorealistic tree', 'a photorealistic field', 'a photorealistic field', 'a photorealistic chicken', 'red panda']\nNew collage creator for y0, x0 with bg\nimage (not stitch) min 0.0, max 0.0\nUsing cached version of animals.npy\nPatch set animals.npy, fixed_scale_patches? True, fixed_scale_coeff=0.5, patch_max_proportion=5\nMax size for fixed scale patches: (896,896)\n<class 'bool'>\nPatch 0 scaled by 0.50\nPatch 1 scaled by 0.50\nPatch 2 scaled by 0.50\nPatch 3 scaled by 0.50\nPatch 4 scaled by 0.50\nPatch 5 scaled by 0.50\nPatch 6 scaled by 0.50\nPatch 7 scaled by 0.50\nPatch 8 scaled by 0.50\nPatch 9 scaled by 0.50\nPatch 10 scaled by 0.50\nPatch 11 scaled by 0.50\nPatch 12 scaled by 0.50\nPatch 13 scaled by 0.50\nPatch 14 scaled by 0.50\nPatch 15 scaled by 0.50\nPatch 16 scaled by 0.50\nPatch 17 scaled by 0.50\nPatch 18 scaled by 0.50\nPatch 19 scaled by 0.50\nPatch 20 scaled by 0.50\nPatch 21 scaled by 0.50\nPatch 22 scaled by 0.50\nPatch 23 scaled by 0.50\nPatch 24 scaled by 0.50\nPatch 25 scaled by 0.50\nPatch 26 scaled by 0.50\nPatch 27 scaled by 0.50\nPatch 28 scaled by 0.50\nPatch 29 scaled by 0.50\nPatch 30 scaled by 0.50\nPatch 31 scaled by 0.50\nPatch 32 scaled by 0.50\nPatch 33 scaled by 0.50\nPatch 34 scaled by 0.50\nPatch 35 scaled by 0.50\nPatch 36 scaled by 0.50\nPatch 37 scaled by 0.50\nPatch 38 scaled by 0.50\nPatch 39 scaled by 0.50\nPatch 40 scaled by 0.50\nPatch 41 scaled by 0.50\nPatch 42 scaled by 0.50\nPatch sizes during optimisation:\nPatch 0 of shape (172, 136, 4)\nPatch 1 of shape (192, 122, 4)\nPatch 2 of shape (144, 334, 4)\nPatch 3 of shape (142, 126, 4)\nPatch 4 of shape (152, 182, 4)\nPatch 5 of shape (148, 146, 4)\nPatch 6 of shape (206, 194, 4)\nPatch 7 of shape (148, 138, 4)\nPatch 8 of shape (182, 260, 4)\nPatch 9 of shape (159, 262, 4)\nPatch 10 of shape (174, 226, 4)\nPatch 11 of shape (157, 158, 4)\nPatch 12 of shape (96, 193, 4)\nPatch 13 of shape (254, 145, 4)\nPatch 14 of shape (176, 116, 4)\nPatch 15 of shape (166, 178, 4)\nPatch 16 of shape (151, 209, 4)\nPatch 17 of shape (129, 208, 4)\nPatch 18 of shape (158, 161, 4)\nPatch 19 of shape (122, 192, 4)\nPatch 20 of shape (143, 230, 4)\nPatch 21 of shape (195, 149, 4)\nPatch 22 of shape (159, 178, 4)\nPatch 23 of shape (71, 184, 4)\nPatch 24 of shape (126, 218, 4)\nPatch 25 of shape (134, 177, 4)\nPatch 26 of shape (146, 162, 4)\nPatch 27 of shape (156, 170, 4)\nPatch 28 of shape (154, 233, 4)\nPatch 29 of shape (233, 166, 4)\nPatch 30 of shape (57, 201, 4)\nPatch 31 of shape (148, 219, 4)\nPatch 32 of shape (159, 218, 4)\nPatch 33 of shape (146, 228, 4)\nPatch 34 of shape (249, 136, 4)\nPatch 35 of shape (170, 146, 4)\nPatch 36 of shape (161, 133, 4)\nPatch 37 of shape (150, 172, 4)\nPatch 38 of shape (163, 206, 4)\nPatch 39 of shape (154, 194, 4)\nPatch 40 of shape (184, 417, 4)\nPatch 41 of shape (134, 218, 4)\nPatch 42 of shape (180, 256, 4)\n43 patches, max (184, 417, 4), min (57, 201, 4)\nPatch sizes for high-resolution final image:\nPatch 0 of shape (344, 272, 4)\nPatch 1 of shape (385, 245, 4)\nPatch 2 of shape (288, 668, 4)\nPatch 3 of shape (284, 252, 4)\nPatch 4 of shape (303, 364, 4)\nPatch 5 of shape (296, 292, 4)\nPatch 6 of shape (412, 387, 4)\nPatch 7 of shape (296, 275, 4)\nPatch 8 of shape (364, 521, 4)\nPatch 9 of shape (318, 524, 4)\nPatch 10 of shape (349, 453, 4)\nPatch 11 of shape (314, 316, 4)\nPatch 12 of shape (193, 386, 4)\nPatch 13 of shape (508, 290, 4)\nPatch 14 of shape (352, 232, 4)\nPatch 15 of shape (333, 355, 4)\nPatch 16 of shape (302, 418, 4)\nPatch 17 of shape (258, 416, 4)\nPatch 18 of shape (316, 322, 4)\nPatch 19 of shape (244, 384, 4)\nPatch 20 of shape (286, 460, 4)\nPatch 21 of shape (390, 298, 4)\nPatch 22 of shape (318, 357, 4)\nPatch 23 of shape (142, 368, 4)\nPatch 24 of shape (253, 436, 4)\nPatch 25 of shape (267, 354, 4)\nPatch 26 of shape (292, 323, 4)\nPatch 27 of shape (311, 341, 4)\nPatch 28 of shape (307, 466, 4)\nPatch 29 of shape (466, 332, 4)\nPatch 30 of shape (114, 402, 4)\nPatch 31 of shape (296, 438, 4)\nPatch 32 of shape (318, 437, 4)\nPatch 33 of shape (291, 455, 4)\nPatch 34 of shape (498, 273, 4)\nPatch 35 of shape (340, 291, 4)\nPatch 36 of shape (322, 266, 4)\nPatch 37 of shape (300, 344, 4)\nPatch 38 of shape (326, 413, 4)\nPatch 39 of shape (308, 387, 4)\nPatch 40 of shape (367, 834, 4)\nPatch 41 of shape (268, 436, 4)\nPatch 42 of shape (360, 512, 4)\n43 patches, max (367, 834, 4), min (114, 402, 4)\nGlobal prompt is red panda\nComposition prompts ['a photorealistic sky with sun', 'a photorealistic sky', 'a photorealistic sky with moon', 'a photorealistic tree', 'a photorealistic tree', 'a photorealistic tree', 'a photorealistic field', 'a photorealistic field', 'a photorealistic chicken', 'red panda']\nPopulationAffineTransforms is_high_res=False, requires_grad=True\nPopulationColourRGBTransforms for 100 patches, 2 individuals\nPopulationColourRGBTransforms requires_grad=True\nBackground image of size torch.Size([3, 448, 448])\nStarting optimization of collage.\nUpdated patches in 0.337s\ntorch.Size([2, 448, 448, 3])\n/src/src/training.py:391: FutureWarning: `torch.nn.utils.clip_grad_norm` is now deprecated in favor of `torch.nn.utils.clip_grad_norm_`.\ntorch.nn.utils.clip_grad_norm(generator.parameters(),\nimage (stitch) min 0.0, max 1.0\nSaving temporary image output_20250319_002503//optim_0.png (shape=(448, 896, 3))\n[ WARN:0@8.649] global loadsave.cpp:848 imwrite_ Unsupported depth image for selected encoder is fallbacked to CV_8U.\nIteration 0, rendering loss -0.812805\n0\ntorch.Size([2, 448, 448, 3])\n1\ntorch.Size([2, 448, 448, 3])\n2\ntorch.Size([2, 448, 448, 3])\n3\ntorch.Size([2, 448, 448, 3])\n4\ntorch.Size([2, 448, 448, 3])\n5\ntorch.Size([2, 448, 448, 3])\n6\ntorch.Size([2, 448, 448, 3])\n7\ntorch.Size([2, 448, 448, 3])\n8\ntorch.Size([2, 448, 448, 3])\n9\ntorch.Size([2, 448, 448, 3])\n10\ntorch.Size([2, 448, 448, 3])\n11\ntorch.Size([2, 448, 448, 3])\n12\ntorch.Size([2, 448, 448, 3])\n13\ntorch.Size([2, 448, 448, 3])\n14\ntorch.Size([2, 448, 448, 3])\n15\ntorch.Size([2, 448, 448, 3])\n16\ntorch.Size([2, 448, 448, 3])\n17\ntorch.Size([2, 448, 448, 3])\n18\ntorch.Size([2, 448, 448, 3])\n19\ntorch.Size([2, 448, 448, 3])\n20\ntorch.Size([2, 448, 448, 3])\n21\ntorch.Size([2, 448, 448, 3])\n22\ntorch.Size([2, 448, 448, 3])\n23\ntorch.Size([2, 448, 448, 3])\n24\ntorch.Size([2, 448, 448, 3])\n25\ntorch.Size([2, 448, 448, 3])\n26\ntorch.Size([2, 448, 448, 3])\n27\ntorch.Size([2, 448, 448, 3])\n28\ntorch.Size([2, 448, 448, 3])\n29\ntorch.Size([2, 448, 448, 3])\n30\ntorch.Size([2, 448, 448, 3])\n31\ntorch.Size([2, 448, 448, 3])\n32\ntorch.Size([2, 448, 448, 3])\n33\ntorch.Size([2, 448, 448, 3])\n34\ntorch.Size([2, 448, 448, 3])\n35\ntorch.Size([2, 448, 448, 3])\n36\ntorch.Size([2, 448, 448, 3])\n37\ntorch.Size([2, 448, 448, 3])\n38\ntorch.Size([2, 448, 448, 3])\n39\ntorch.Size([2, 448, 448, 3])\n40\ntorch.Size([2, 448, 448, 3])\n41\ntorch.Size([2, 448, 448, 3])\n42\ntorch.Size([2, 448, 448, 3])\n43\ntorch.Size([2, 448, 448, 3])\n44\ntorch.Size([2, 448, 448, 3])\n45\ntorch.Size([2, 448, 448, 3])\n46\ntorch.Size([2, 448, 448, 3])\n47\ntorch.Size([2, 448, 448, 3])\n48\ntorch.Size([2, 448, 448, 3])\n49\ntorch.Size([2, 448, 448, 3])\nimage (stitch) min 0.0, max 1.0\nSaving temporary image output_20250319_002503//optim_50.png (shape=(448, 896, 3))\nIteration 50, rendering loss -0.847153\n50\ntorch.Size([2, 448, 448, 3])\n51\ntorch.Size([2, 448, 448, 3])\n52\ntorch.Size([2, 448, 448, 3])\n53\ntorch.Size([2, 448, 448, 3])\n54\ntorch.Size([2, 448, 448, 3])\n55\ntorch.Size([2, 448, 448, 3])\n56\ntorch.Size([2, 448, 448, 3])\n57\ntorch.Size([2, 448, 448, 3])\n58\ntorch.Size([2, 448, 448, 3])\n59\ntorch.Size([2, 448, 448, 3])\n60\ntorch.Size([2, 448, 448, 3])\n61\ntorch.Size([2, 448, 448, 3])\n62\ntorch.Size([2, 448, 448, 3])\n63\ntorch.Size([2, 448, 448, 3])\n64\ntorch.Size([2, 448, 448, 3])\n65\ntorch.Size([2, 448, 448, 3])\n66\ntorch.Size([2, 448, 448, 3])\n67\ntorch.Size([2, 448, 448, 3])\n68\ntorch.Size([2, 448, 448, 3])\n69\ntorch.Size([2, 448, 448, 3])\n70\ntorch.Size([2, 448, 448, 3])\n71\ntorch.Size([2, 448, 448, 3])\n72\ntorch.Size([2, 448, 448, 3])\n73\ntorch.Size([2, 448, 448, 3])\n74\ntorch.Size([2, 448, 448, 3])\n75\ntorch.Size([2, 448, 448, 3])\n76\ntorch.Size([2, 448, 448, 3])\n77\ntorch.Size([2, 448, 448, 3])\n78\ntorch.Size([2, 448, 448, 3])\n79\ntorch.Size([2, 448, 448, 3])\n80\ntorch.Size([2, 448, 448, 3])\n81\ntorch.Size([2, 448, 448, 3])\n82\ntorch.Size([2, 448, 448, 3])\n83\ntorch.Size([2, 448, 448, 3])\n84\ntorch.Size([2, 448, 448, 3])\n85\ntorch.Size([2, 448, 448, 3])\n86\ntorch.Size([2, 448, 448, 3])\n87\ntorch.Size([2, 448, 448, 3])\n88\ntorch.Size([2, 448, 448, 3])\n89\ntorch.Size([2, 448, 448, 3])\n90\ntorch.Size([2, 448, 448, 3])\n91\ntorch.Size([2, 448, 448, 3])\n92\ntorch.Size([2, 448, 448, 3])\n93\ntorch.Size([2, 448, 448, 3])\n94\ntorch.Size([2, 448, 448, 3])\n95\ntorch.Size([2, 448, 448, 3])\n96\ntorch.Size([2, 448, 448, 3])\n97\ntorch.Size([2, 448, 448, 3])\n98\ntorch.Size([2, 448, 448, 3])\n99\ntorch.Size([2, 448, 448, 3])\nimage (stitch) min 0.0, max 1.0\nSaving temporary image output_20250319_002503//optim_100.png (shape=(448, 896, 3))\nIteration 100, rendering loss -0.866760\nUpdated patches in 0.083s\n100\ntorch.Size([2, 448, 448, 3])\n101\ntorch.Size([2, 448, 448, 3])\n102\ntorch.Size([2, 448, 448, 3])\n103\ntorch.Size([2, 448, 448, 3])\n104\ntorch.Size([2, 448, 448, 3])\n105\ntorch.Size([2, 448, 448, 3])\n106\ntorch.Size([2, 448, 448, 3])\n107\ntorch.Size([2, 448, 448, 3])\n108\ntorch.Size([2, 448, 448, 3])\n109\ntorch.Size([2, 448, 448, 3])\n110\ntorch.Size([2, 448, 448, 3])\n111\ntorch.Size([2, 448, 448, 3])\n112\ntorch.Size([2, 448, 448, 3])\n113\ntorch.Size([2, 448, 448, 3])\n114\ntorch.Size([2, 448, 448, 3])\n115\ntorch.Size([2, 448, 448, 3])\n116\ntorch.Size([2, 448, 448, 3])\n117\ntorch.Size([2, 448, 448, 3])\n118\ntorch.Size([2, 448, 448, 3])\n119\ntorch.Size([2, 448, 448, 3])\n120\ntorch.Size([2, 448, 448, 3])\n121\ntorch.Size([2, 448, 448, 3])\n122\ntorch.Size([2, 448, 448, 3])\n123\ntorch.Size([2, 448, 448, 3])\n124\ntorch.Size([2, 448, 448, 3])\n125\ntorch.Size([2, 448, 448, 3])\n126\ntorch.Size([2, 448, 448, 3])\n127\ntorch.Size([2, 448, 448, 3])\n128\ntorch.Size([2, 448, 448, 3])\n129\ntorch.Size([2, 448, 448, 3])\n130\ntorch.Size([2, 448, 448, 3])\n131\ntorch.Size([2, 448, 448, 3])\n132\ntorch.Size([2, 448, 448, 3])\n133\ntorch.Size([2, 448, 448, 3])\n134\ntorch.Size([2, 448, 448, 3])\n135\ntorch.Size([2, 448, 448, 3])\n136\ntorch.Size([2, 448, 448, 3])\n137\ntorch.Size([2, 448, 448, 3])\n138\ntorch.Size([2, 448, 448, 3])\n139\ntorch.Size([2, 448, 448, 3])\n140\ntorch.Size([2, 448, 448, 3])\n141\ntorch.Size([2, 448, 448, 3])\n142\ntorch.Size([2, 448, 448, 3])\n143\ntorch.Size([2, 448, 448, 3])\n144\ntorch.Size([2, 448, 448, 3])\n145\ntorch.Size([2, 448, 448, 3])\n146\ntorch.Size([2, 448, 448, 3])\n147\ntorch.Size([2, 448, 448, 3])\n148\ntorch.Size([2, 448, 448, 3])\n149\ntorch.Size([2, 448, 448, 3])\nimage (stitch) min 0.0, max 1.0\nSaving temporary image output_20250319_002503//optim_150.png (shape=(448, 896, 3))\nIteration 150, rendering loss -0.922791\n150\ntorch.Size([2, 448, 448, 3])\n151\ntorch.Size([2, 448, 448, 3])\n152\ntorch.Size([2, 448, 448, 3])\n153\ntorch.Size([2, 448, 448, 3])\n154\ntorch.Size([2, 448, 448, 3])\n155\ntorch.Size([2, 448, 448, 3])\n156\ntorch.Size([2, 448, 448, 3])\n157\ntorch.Size([2, 448, 448, 3])\n158\ntorch.Size([2, 448, 448, 3])\n159\ntorch.Size([2, 448, 448, 3])\n160\ntorch.Size([2, 448, 448, 3])\n161\ntorch.Size([2, 448, 448, 3])\n162\ntorch.Size([2, 448, 448, 3])\n163\ntorch.Size([2, 448, 448, 3])\n164\ntorch.Size([2, 448, 448, 3])\n165\ntorch.Size([2, 448, 448, 3])\n166\ntorch.Size([2, 448, 448, 3])\n167\ntorch.Size([2, 448, 448, 3])\n168\ntorch.Size([2, 448, 448, 3])\n169\ntorch.Size([2, 448, 448, 3])\n170\ntorch.Size([2, 448, 448, 3])\n171\ntorch.Size([2, 448, 448, 3])\n172\ntorch.Size([2, 448, 448, 3])\n173\ntorch.Size([2, 448, 448, 3])\n174\ntorch.Size([2, 448, 448, 3])\n175\ntorch.Size([2, 448, 448, 3])\n176\ntorch.Size([2, 448, 448, 3])\n177\ntorch.Size([2, 448, 448, 3])\n178\ntorch.Size([2, 448, 448, 3])\n179\ntorch.Size([2, 448, 448, 3])\n180\ntorch.Size([2, 448, 448, 3])\n181\ntorch.Size([2, 448, 448, 3])\n182\ntorch.Size([2, 448, 448, 3])\n183\ntorch.Size([2, 448, 448, 3])\n184\ntorch.Size([2, 448, 448, 3])\n185\ntorch.Size([2, 448, 448, 3])\n186\ntorch.Size([2, 448, 448, 3])\n187\ntorch.Size([2, 448, 448, 3])\n188\ntorch.Size([2, 448, 448, 3])\n189\ntorch.Size([2, 448, 448, 3])\n190\ntorch.Size([2, 448, 448, 3])\n191\ntorch.Size([2, 448, 448, 3])\n192\ntorch.Size([2, 448, 448, 3])\n193\ntorch.Size([2, 448, 448, 3])\n194\ntorch.Size([2, 448, 448, 3])\n195\ntorch.Size([2, 448, 448, 3])\n196\ntorch.Size([2, 448, 448, 3])\n197\ntorch.Size([2, 448, 448, 3])\n198\ntorch.Size([2, 448, 448, 3])\n199\ntorch.Size([2, 448, 448, 3])\nimage (stitch) min 0.0, max 1.0\nSaving temporary image output_20250319_002503//optim_200.png (shape=(448, 896, 3))\nIteration 200, rendering loss -0.958267\nUpdated patches in 0.082s\n200\ntorch.Size([2, 448, 448, 3])\n201\ntorch.Size([2, 448, 448, 3])\n202\ntorch.Size([2, 448, 448, 3])\n203\ntorch.Size([2, 448, 448, 3])\n204\ntorch.Size([2, 448, 448, 3])\n205\ntorch.Size([2, 448, 448, 3])\n206\ntorch.Size([2, 448, 448, 3])\n207\ntorch.Size([2, 448, 448, 3])\n208\ntorch.Size([2, 448, 448, 3])\n209\ntorch.Size([2, 448, 448, 3])\n210\ntorch.Size([2, 448, 448, 3])\n211\ntorch.Size([2, 448, 448, 3])\n212\ntorch.Size([2, 448, 448, 3])\n213\ntorch.Size([2, 448, 448, 3])\n214\ntorch.Size([2, 448, 448, 3])\n215\ntorch.Size([2, 448, 448, 3])\n216\ntorch.Size([2, 448, 448, 3])\n217\ntorch.Size([2, 448, 448, 3])\n218\ntorch.Size([2, 448, 448, 3])\n219\ntorch.Size([2, 448, 448, 3])\n220\ntorch.Size([2, 448, 448, 3])\n221\ntorch.Size([2, 448, 448, 3])\n222\ntorch.Size([2, 448, 448, 3])\n223\ntorch.Size([2, 448, 448, 3])\n224\ntorch.Size([2, 448, 448, 3])\n225\ntorch.Size([2, 448, 448, 3])\n226\ntorch.Size([2, 448, 448, 3])\n227\ntorch.Size([2, 448, 448, 3])\n228\ntorch.Size([2, 448, 448, 3])\n229\ntorch.Size([2, 448, 448, 3])\n230\ntorch.Size([2, 448, 448, 3])\n231\ntorch.Size([2, 448, 448, 3])\n232\ntorch.Size([2, 448, 448, 3])\n233\ntorch.Size([2, 448, 448, 3])\n234\ntorch.Size([2, 448, 448, 3])\n235\ntorch.Size([2, 448, 448, 3])\n236\ntorch.Size([2, 448, 448, 3])\n237\ntorch.Size([2, 448, 448, 3])\n238\ntorch.Size([2, 448, 448, 3])\n239\ntorch.Size([2, 448, 448, 3])\n240\ntorch.Size([2, 448, 448, 3])\n241\ntorch.Size([2, 448, 448, 3])\n242\ntorch.Size([2, 448, 448, 3])\n243\ntorch.Size([2, 448, 448, 3])\n244\ntorch.Size([2, 448, 448, 3])\n245\ntorch.Size([2, 448, 448, 3])\n246\ntorch.Size([2, 448, 448, 3])\n247\ntorch.Size([2, 448, 448, 3])\n248\ntorch.Size([2, 448, 448, 3])\nimage (stitch) min 0.0, max 1.0\nSaving model to output_20250319_002503/...\n249\nPopulationAffineTransforms is_high_res=True, requires_grad=False\nPopulationColourRGBTransforms for 100 patches, 1 individuals\nPopulationColourRGBTransforms requires_grad=False\nBackground image of size torch.Size([3, 896, 896])\nLowest loss: -1.0089111328125 @ index 1:\n[0, 0] idx [0:896], [0:896]\nFinished [0, 0] idx [0:896], [0:896]\ntorch.Size([1, 896, 896, 4])\nImage has alpha channel\nPopulationAffineTransforms is_high_res=True, requires_grad=False\nPopulationColourRGBTransforms for 100 patches, 1 individuals\nPopulationColourRGBTransforms requires_grad=False\nBackground image of size torch.Size([3, 896, 896])\nLowest loss: -1.0089111328125 @ index 1:\nNot using background_image\n[0, 0] idx [0:896], [0:896]\nSetting alpha to zero outside of patches\nFinished [0, 0] idx [0:896], [0:896]\ntorch.Size([1, 896, 896, 4])\nImage has alpha channel",
"metrics": {
"predict_time": 63.184180862,
"total_time": 165.397594
},
"output": [
"https://replicate.delivery/yhqm/HYrDIJhupAovBFKRnb1HTE68HhIzawJyI2R3neeBeDQCH4zoA/optim_0.png",
"https://replicate.delivery/yhqm/vzTCOXgwjU4AIBB1GZakrShSU9RZqyxy7NeGqigesUHtD8ZUA/optim_50.png",
"https://replicate.delivery/yhqm/huHDdtJXWW5eTS7G4FZ6y64ULMKXlrFLJZEXYL1HfsX5D8ZUA/optim_100.png",
"https://replicate.delivery/yhqm/hdbWLMjZKT5MLdYKuLHUzAg4Gh8JXpPfmvCLM2kAhnnCCeZUA/optim_150.png",
"https://replicate.delivery/yhqm/vF6jG5UUnqbpDVuAYAQNj2JJfhWILNu1MWPVPfnqCM6RE8ZUA/optim_200.png"
],
"started_at": "2025-03-19T00:25:03.473414Z",
"status": "succeeded",
"urls": {
"stream": "https://stream.replicate.com/v1/files/yswh-ugixdgzbf3ai7d6ypcvbdgeckh4bs2klinet3bfksujzpagz4yoa",
"get": "https://api.replicate.com/v1/predictions/2p6axtm9nhrj60cnnf4a3r6cn0",
"cancel": "https://api.replicate.com/v1/predictions/2p6axtm9nhrj60cnnf4a3r6cn0/cancel"
},
"version": "2f0207fe8c5953b0be5b1d2fee8f7975f00db7083783204d36f6e3c2a15b267c"
}
initial positions
[['image_5.png', '-0.194643', '0.121429'], ['image_4.png', '0.301786', '-0.217857'], ['image_8.png', '0.294643', '0.35'], ['image_9.png', '-0.648214', '0.157143']]
Tiling 1x1 collages
Optimisation:
Tile size: 448x448
Global size: 448x448 (WxH)
High res:
Tile size: 896x896
Global size: 896x896 (WxH)
Tile 0 prompts: ['a photorealistic sky with sun', 'a photorealistic sky', 'a photorealistic sky with moon', 'a photorealistic tree', 'a photorealistic tree', 'a photorealistic tree', 'a photorealistic field', 'a photorealistic field', 'a photorealistic chicken', 'red panda']
New collage creator for y0, x0 with bg
image (not stitch) min 0.0, max 0.0
Using cached version of animals.npy
Patch set animals.npy, fixed_scale_patches? True, fixed_scale_coeff=0.5, patch_max_proportion=5
Max size for fixed scale patches: (896,896)
<class 'bool'>
Patch 0 scaled by 0.50
Patch 1 scaled by 0.50
Patch 2 scaled by 0.50
Patch 3 scaled by 0.50
Patch 4 scaled by 0.50
Patch 5 scaled by 0.50
Patch 6 scaled by 0.50
Patch 7 scaled by 0.50
Patch 8 scaled by 0.50
Patch 9 scaled by 0.50
Patch 10 scaled by 0.50
Patch 11 scaled by 0.50
Patch 12 scaled by 0.50
Patch 13 scaled by 0.50
Patch 14 scaled by 0.50
Patch 15 scaled by 0.50
Patch 16 scaled by 0.50
Patch 17 scaled by 0.50
Patch 18 scaled by 0.50
Patch 19 scaled by 0.50
Patch 20 scaled by 0.50
Patch 21 scaled by 0.50
Patch 22 scaled by 0.50
Patch 23 scaled by 0.50
Patch 24 scaled by 0.50
Patch 25 scaled by 0.50
Patch 26 scaled by 0.50
Patch 27 scaled by 0.50
Patch 28 scaled by 0.50
Patch 29 scaled by 0.50
Patch 30 scaled by 0.50
Patch 31 scaled by 0.50
Patch 32 scaled by 0.50
Patch 33 scaled by 0.50
Patch 34 scaled by 0.50
Patch 35 scaled by 0.50
Patch 36 scaled by 0.50
Patch 37 scaled by 0.50
Patch 38 scaled by 0.50
Patch 39 scaled by 0.50
Patch 40 scaled by 0.50
Patch 41 scaled by 0.50
Patch 42 scaled by 0.50
Patch sizes during optimisation:
Patch 0 of shape (172, 136, 4)
Patch 1 of shape (192, 122, 4)
Patch 2 of shape (144, 334, 4)
Patch 3 of shape (142, 126, 4)
Patch 4 of shape (152, 182, 4)
Patch 5 of shape (148, 146, 4)
Patch 6 of shape (206, 194, 4)
Patch 7 of shape (148, 138, 4)
Patch 8 of shape (182, 260, 4)
Patch 9 of shape (159, 262, 4)
Patch 10 of shape (174, 226, 4)
Patch 11 of shape (157, 158, 4)
Patch 12 of shape (96, 193, 4)
Patch 13 of shape (254, 145, 4)
Patch 14 of shape (176, 116, 4)
Patch 15 of shape (166, 178, 4)
Patch 16 of shape (151, 209, 4)
Patch 17 of shape (129, 208, 4)
Patch 18 of shape (158, 161, 4)
Patch 19 of shape (122, 192, 4)
Patch 20 of shape (143, 230, 4)
Patch 21 of shape (195, 149, 4)
Patch 22 of shape (159, 178, 4)
Patch 23 of shape (71, 184, 4)
Patch 24 of shape (126, 218, 4)
Patch 25 of shape (134, 177, 4)
Patch 26 of shape (146, 162, 4)
Patch 27 of shape (156, 170, 4)
Patch 28 of shape (154, 233, 4)
Patch 29 of shape (233, 166, 4)
Patch 30 of shape (57, 201, 4)
Patch 31 of shape (148, 219, 4)
Patch 32 of shape (159, 218, 4)
Patch 33 of shape (146, 228, 4)
Patch 34 of shape (249, 136, 4)
Patch 35 of shape (170, 146, 4)
Patch 36 of shape (161, 133, 4)
Patch 37 of shape (150, 172, 4)
Patch 38 of shape (163, 206, 4)
Patch 39 of shape (154, 194, 4)
Patch 40 of shape (184, 417, 4)
Patch 41 of shape (134, 218, 4)
Patch 42 of shape (180, 256, 4)
43 patches, max (184, 417, 4), min (57, 201, 4)
Patch sizes for high-resolution final image:
Patch 0 of shape (344, 272, 4)
Patch 1 of shape (385, 245, 4)
Patch 2 of shape (288, 668, 4)
Patch 3 of shape (284, 252, 4)
Patch 4 of shape (303, 364, 4)
Patch 5 of shape (296, 292, 4)
Patch 6 of shape (412, 387, 4)
Patch 7 of shape (296, 275, 4)
Patch 8 of shape (364, 521, 4)
Patch 9 of shape (318, 524, 4)
Patch 10 of shape (349, 453, 4)
Patch 11 of shape (314, 316, 4)
Patch 12 of shape (193, 386, 4)
Patch 13 of shape (508, 290, 4)
Patch 14 of shape (352, 232, 4)
Patch 15 of shape (333, 355, 4)
Patch 16 of shape (302, 418, 4)
Patch 17 of shape (258, 416, 4)
Patch 18 of shape (316, 322, 4)
Patch 19 of shape (244, 384, 4)
Patch 20 of shape (286, 460, 4)
Patch 21 of shape (390, 298, 4)
Patch 22 of shape (318, 357, 4)
Patch 23 of shape (142, 368, 4)
Patch 24 of shape (253, 436, 4)
Patch 25 of shape (267, 354, 4)
Patch 26 of shape (292, 323, 4)
Patch 27 of shape (311, 341, 4)
Patch 28 of shape (307, 466, 4)
Patch 29 of shape (466, 332, 4)
Patch 30 of shape (114, 402, 4)
Patch 31 of shape (296, 438, 4)
Patch 32 of shape (318, 437, 4)
Patch 33 of shape (291, 455, 4)
Patch 34 of shape (498, 273, 4)
Patch 35 of shape (340, 291, 4)
Patch 36 of shape (322, 266, 4)
Patch 37 of shape (300, 344, 4)
Patch 38 of shape (326, 413, 4)
Patch 39 of shape (308, 387, 4)
Patch 40 of shape (367, 834, 4)
Patch 41 of shape (268, 436, 4)
Patch 42 of shape (360, 512, 4)
43 patches, max (367, 834, 4), min (114, 402, 4)
Global prompt is red panda
Composition prompts ['a photorealistic sky with sun', 'a photorealistic sky', 'a photorealistic sky with moon', 'a photorealistic tree', 'a photorealistic tree', 'a photorealistic tree', 'a photorealistic field', 'a photorealistic field', 'a photorealistic chicken', 'red panda']
PopulationAffineTransforms is_high_res=False, requires_grad=True
PopulationColourRGBTransforms for 100 patches, 2 individuals
PopulationColourRGBTransforms requires_grad=True
Background image of size torch.Size([3, 448, 448])
Starting optimization of collage.
Updated patches in 0.337s
torch.Size([2, 448, 448, 3])
/src/src/training.py:391: FutureWarning: `torch.nn.utils.clip_grad_norm` is now deprecated in favor of `torch.nn.utils.clip_grad_norm_`.
torch.nn.utils.clip_grad_norm(generator.parameters(),
image (stitch) min 0.0, max 1.0
Saving temporary image output_20250319_002503//optim_0.png (shape=(448, 896, 3))
[ WARN:0@8.649] global loadsave.cpp:848 imwrite_ Unsupported depth image for selected encoder is fallbacked to CV_8U.
Iteration 0, rendering loss -0.812805
0
torch.Size([2, 448, 448, 3])
1
torch.Size([2, 448, 448, 3])
2
torch.Size([2, 448, 448, 3])
3
torch.Size([2, 448, 448, 3])
4
torch.Size([2, 448, 448, 3])
5
torch.Size([2, 448, 448, 3])
6
torch.Size([2, 448, 448, 3])
7
torch.Size([2, 448, 448, 3])
8
torch.Size([2, 448, 448, 3])
9
torch.Size([2, 448, 448, 3])
10
torch.Size([2, 448, 448, 3])
11
torch.Size([2, 448, 448, 3])
12
torch.Size([2, 448, 448, 3])
13
torch.Size([2, 448, 448, 3])
14
torch.Size([2, 448, 448, 3])
15
torch.Size([2, 448, 448, 3])
16
torch.Size([2, 448, 448, 3])
17
torch.Size([2, 448, 448, 3])
18
torch.Size([2, 448, 448, 3])
19
torch.Size([2, 448, 448, 3])
20
torch.Size([2, 448, 448, 3])
21
torch.Size([2, 448, 448, 3])
22
torch.Size([2, 448, 448, 3])
23
torch.Size([2, 448, 448, 3])
24
torch.Size([2, 448, 448, 3])
25
torch.Size([2, 448, 448, 3])
26
torch.Size([2, 448, 448, 3])
27
torch.Size([2, 448, 448, 3])
28
torch.Size([2, 448, 448, 3])
29
torch.Size([2, 448, 448, 3])
30
torch.Size([2, 448, 448, 3])
31
torch.Size([2, 448, 448, 3])
32
torch.Size([2, 448, 448, 3])
33
torch.Size([2, 448, 448, 3])
34
torch.Size([2, 448, 448, 3])
35
torch.Size([2, 448, 448, 3])
36
torch.Size([2, 448, 448, 3])
37
torch.Size([2, 448, 448, 3])
38
torch.Size([2, 448, 448, 3])
39
torch.Size([2, 448, 448, 3])
40
torch.Size([2, 448, 448, 3])
41
torch.Size([2, 448, 448, 3])
42
torch.Size([2, 448, 448, 3])
43
torch.Size([2, 448, 448, 3])
44
torch.Size([2, 448, 448, 3])
45
torch.Size([2, 448, 448, 3])
46
torch.Size([2, 448, 448, 3])
47
torch.Size([2, 448, 448, 3])
48
torch.Size([2, 448, 448, 3])
49
torch.Size([2, 448, 448, 3])
image (stitch) min 0.0, max 1.0
Saving temporary image output_20250319_002503//optim_50.png (shape=(448, 896, 3))
Iteration 50, rendering loss -0.847153
50
torch.Size([2, 448, 448, 3])
51
torch.Size([2, 448, 448, 3])
52
torch.Size([2, 448, 448, 3])
53
torch.Size([2, 448, 448, 3])
54
torch.Size([2, 448, 448, 3])
55
torch.Size([2, 448, 448, 3])
56
torch.Size([2, 448, 448, 3])
57
torch.Size([2, 448, 448, 3])
58
torch.Size([2, 448, 448, 3])
59
torch.Size([2, 448, 448, 3])
60
torch.Size([2, 448, 448, 3])
61
torch.Size([2, 448, 448, 3])
62
torch.Size([2, 448, 448, 3])
63
torch.Size([2, 448, 448, 3])
64
torch.Size([2, 448, 448, 3])
65
torch.Size([2, 448, 448, 3])
66
torch.Size([2, 448, 448, 3])
67
torch.Size([2, 448, 448, 3])
68
torch.Size([2, 448, 448, 3])
69
torch.Size([2, 448, 448, 3])
70
torch.Size([2, 448, 448, 3])
71
torch.Size([2, 448, 448, 3])
72
torch.Size([2, 448, 448, 3])
73
torch.Size([2, 448, 448, 3])
74
torch.Size([2, 448, 448, 3])
75
torch.Size([2, 448, 448, 3])
76
torch.Size([2, 448, 448, 3])
77
torch.Size([2, 448, 448, 3])
78
torch.Size([2, 448, 448, 3])
79
torch.Size([2, 448, 448, 3])
80
torch.Size([2, 448, 448, 3])
81
torch.Size([2, 448, 448, 3])
82
torch.Size([2, 448, 448, 3])
83
torch.Size([2, 448, 448, 3])
84
torch.Size([2, 448, 448, 3])
85
torch.Size([2, 448, 448, 3])
86
torch.Size([2, 448, 448, 3])
87
torch.Size([2, 448, 448, 3])
88
torch.Size([2, 448, 448, 3])
89
torch.Size([2, 448, 448, 3])
90
torch.Size([2, 448, 448, 3])
91
torch.Size([2, 448, 448, 3])
92
torch.Size([2, 448, 448, 3])
93
torch.Size([2, 448, 448, 3])
94
torch.Size([2, 448, 448, 3])
95
torch.Size([2, 448, 448, 3])
96
torch.Size([2, 448, 448, 3])
97
torch.Size([2, 448, 448, 3])
98
torch.Size([2, 448, 448, 3])
99
torch.Size([2, 448, 448, 3])
image (stitch) min 0.0, max 1.0
Saving temporary image output_20250319_002503//optim_100.png (shape=(448, 896, 3))
Iteration 100, rendering loss -0.866760
Updated patches in 0.083s
100
torch.Size([2, 448, 448, 3])
101
torch.Size([2, 448, 448, 3])
102
torch.Size([2, 448, 448, 3])
103
torch.Size([2, 448, 448, 3])
104
torch.Size([2, 448, 448, 3])
105
torch.Size([2, 448, 448, 3])
106
torch.Size([2, 448, 448, 3])
107
torch.Size([2, 448, 448, 3])
108
torch.Size([2, 448, 448, 3])
109
torch.Size([2, 448, 448, 3])
110
torch.Size([2, 448, 448, 3])
111
torch.Size([2, 448, 448, 3])
112
torch.Size([2, 448, 448, 3])
113
torch.Size([2, 448, 448, 3])
114
torch.Size([2, 448, 448, 3])
115
torch.Size([2, 448, 448, 3])
116
torch.Size([2, 448, 448, 3])
117
torch.Size([2, 448, 448, 3])
118
torch.Size([2, 448, 448, 3])
119
torch.Size([2, 448, 448, 3])
120
torch.Size([2, 448, 448, 3])
121
torch.Size([2, 448, 448, 3])
122
torch.Size([2, 448, 448, 3])
123
torch.Size([2, 448, 448, 3])
124
torch.Size([2, 448, 448, 3])
125
torch.Size([2, 448, 448, 3])
126
torch.Size([2, 448, 448, 3])
127
torch.Size([2, 448, 448, 3])
128
torch.Size([2, 448, 448, 3])
129
torch.Size([2, 448, 448, 3])
130
torch.Size([2, 448, 448, 3])
131
torch.Size([2, 448, 448, 3])
132
torch.Size([2, 448, 448, 3])
133
torch.Size([2, 448, 448, 3])
134
torch.Size([2, 448, 448, 3])
135
torch.Size([2, 448, 448, 3])
136
torch.Size([2, 448, 448, 3])
137
torch.Size([2, 448, 448, 3])
138
torch.Size([2, 448, 448, 3])
139
torch.Size([2, 448, 448, 3])
140
torch.Size([2, 448, 448, 3])
141
torch.Size([2, 448, 448, 3])
142
torch.Size([2, 448, 448, 3])
143
torch.Size([2, 448, 448, 3])
144
torch.Size([2, 448, 448, 3])
145
torch.Size([2, 448, 448, 3])
146
torch.Size([2, 448, 448, 3])
147
torch.Size([2, 448, 448, 3])
148
torch.Size([2, 448, 448, 3])
149
torch.Size([2, 448, 448, 3])
image (stitch) min 0.0, max 1.0
Saving temporary image output_20250319_002503//optim_150.png (shape=(448, 896, 3))
Iteration 150, rendering loss -0.922791
150
torch.Size([2, 448, 448, 3])
151
torch.Size([2, 448, 448, 3])
152
torch.Size([2, 448, 448, 3])
153
torch.Size([2, 448, 448, 3])
154
torch.Size([2, 448, 448, 3])
155
torch.Size([2, 448, 448, 3])
156
torch.Size([2, 448, 448, 3])
157
torch.Size([2, 448, 448, 3])
158
torch.Size([2, 448, 448, 3])
159
torch.Size([2, 448, 448, 3])
160
torch.Size([2, 448, 448, 3])
161
torch.Size([2, 448, 448, 3])
162
torch.Size([2, 448, 448, 3])
163
torch.Size([2, 448, 448, 3])
164
torch.Size([2, 448, 448, 3])
165
torch.Size([2, 448, 448, 3])
166
torch.Size([2, 448, 448, 3])
167
torch.Size([2, 448, 448, 3])
168
torch.Size([2, 448, 448, 3])
169
torch.Size([2, 448, 448, 3])
170
torch.Size([2, 448, 448, 3])
171
torch.Size([2, 448, 448, 3])
172
torch.Size([2, 448, 448, 3])
173
torch.Size([2, 448, 448, 3])
174
torch.Size([2, 448, 448, 3])
175
torch.Size([2, 448, 448, 3])
176
torch.Size([2, 448, 448, 3])
177
torch.Size([2, 448, 448, 3])
178
torch.Size([2, 448, 448, 3])
179
torch.Size([2, 448, 448, 3])
180
torch.Size([2, 448, 448, 3])
181
torch.Size([2, 448, 448, 3])
182
torch.Size([2, 448, 448, 3])
183
torch.Size([2, 448, 448, 3])
184
torch.Size([2, 448, 448, 3])
185
torch.Size([2, 448, 448, 3])
186
torch.Size([2, 448, 448, 3])
187
torch.Size([2, 448, 448, 3])
188
torch.Size([2, 448, 448, 3])
189
torch.Size([2, 448, 448, 3])
190
torch.Size([2, 448, 448, 3])
191
torch.Size([2, 448, 448, 3])
192
torch.Size([2, 448, 448, 3])
193
torch.Size([2, 448, 448, 3])
194
torch.Size([2, 448, 448, 3])
195
torch.Size([2, 448, 448, 3])
196
torch.Size([2, 448, 448, 3])
197
torch.Size([2, 448, 448, 3])
198
torch.Size([2, 448, 448, 3])
199
torch.Size([2, 448, 448, 3])
image (stitch) min 0.0, max 1.0
Saving temporary image output_20250319_002503//optim_200.png (shape=(448, 896, 3))
Iteration 200, rendering loss -0.958267
Updated patches in 0.082s
200
torch.Size([2, 448, 448, 3])
201
torch.Size([2, 448, 448, 3])
202
torch.Size([2, 448, 448, 3])
203
torch.Size([2, 448, 448, 3])
204
torch.Size([2, 448, 448, 3])
205
torch.Size([2, 448, 448, 3])
206
torch.Size([2, 448, 448, 3])
207
torch.Size([2, 448, 448, 3])
208
torch.Size([2, 448, 448, 3])
209
torch.Size([2, 448, 448, 3])
210
torch.Size([2, 448, 448, 3])
211
torch.Size([2, 448, 448, 3])
212
torch.Size([2, 448, 448, 3])
213
torch.Size([2, 448, 448, 3])
214
torch.Size([2, 448, 448, 3])
215
torch.Size([2, 448, 448, 3])
216
torch.Size([2, 448, 448, 3])
217
torch.Size([2, 448, 448, 3])
218
torch.Size([2, 448, 448, 3])
219
torch.Size([2, 448, 448, 3])
220
torch.Size([2, 448, 448, 3])
221
torch.Size([2, 448, 448, 3])
222
torch.Size([2, 448, 448, 3])
223
torch.Size([2, 448, 448, 3])
224
torch.Size([2, 448, 448, 3])
225
torch.Size([2, 448, 448, 3])
226
torch.Size([2, 448, 448, 3])
227
torch.Size([2, 448, 448, 3])
228
torch.Size([2, 448, 448, 3])
229
torch.Size([2, 448, 448, 3])
230
torch.Size([2, 448, 448, 3])
231
torch.Size([2, 448, 448, 3])
232
torch.Size([2, 448, 448, 3])
233
torch.Size([2, 448, 448, 3])
234
torch.Size([2, 448, 448, 3])
235
torch.Size([2, 448, 448, 3])
236
torch.Size([2, 448, 448, 3])
237
torch.Size([2, 448, 448, 3])
238
torch.Size([2, 448, 448, 3])
239
torch.Size([2, 448, 448, 3])
240
torch.Size([2, 448, 448, 3])
241
torch.Size([2, 448, 448, 3])
242
torch.Size([2, 448, 448, 3])
243
torch.Size([2, 448, 448, 3])
244
torch.Size([2, 448, 448, 3])
245
torch.Size([2, 448, 448, 3])
246
torch.Size([2, 448, 448, 3])
247
torch.Size([2, 448, 448, 3])
248
torch.Size([2, 448, 448, 3])
image (stitch) min 0.0, max 1.0
Saving model to output_20250319_002503/...
249
PopulationAffineTransforms is_high_res=True, requires_grad=False
PopulationColourRGBTransforms for 100 patches, 1 individuals
PopulationColourRGBTransforms requires_grad=False
Background image of size torch.Size([3, 896, 896])
Lowest loss: -1.0089111328125 @ index 1:
[0, 0] idx [0:896], [0:896]
Finished [0, 0] idx [0:896], [0:896]
torch.Size([1, 896, 896, 4])
Image has alpha channel
PopulationAffineTransforms is_high_res=True, requires_grad=False
PopulationColourRGBTransforms for 100 patches, 1 individuals
PopulationColourRGBTransforms requires_grad=False
Background image of size torch.Size([3, 896, 896])
Lowest loss: -1.0089111328125 @ index 1:
Not using background_image
[0, 0] idx [0:896], [0:896]
Setting alpha to zero outside of patches
Finished [0, 0] idx [0:896], [0:896]
torch.Size([1, 896, 896, 4])
Image has alpha channel
This example was created by a different version, ltejedor/differentiable-rasterizer-vector-graphics:2f0207fe.
This model runs on Nvidia A100 (80GB) GPU hardware. We don't yet have enough runs of this model to provide performance information.
This model doesn't have a readme.
This model is cold. You'll get a fast response if the model is warm and already running, and a slower response if the model is cold and starting up.
Choose a file from your machine
Hint: you can also drag files onto the input
initial positions
[['image_5.png', '-0.194643', '0.121429'], ['image_4.png', '0.301786', '-0.217857'], ['image_8.png', '0.294643', '0.35'], ['image_9.png', '-0.648214', '0.157143']]
Tiling 1x1 collages
Optimisation:
Tile size: 448x448
Global size: 448x448 (WxH)
High res:
Tile size: 896x896
Global size: 896x896 (WxH)
Tile 0 prompts: ['a photorealistic sky with sun', 'a photorealistic sky', 'a photorealistic sky with moon', 'a photorealistic tree', 'a photorealistic tree', 'a photorealistic tree', 'a photorealistic field', 'a photorealistic field', 'a photorealistic chicken', 'red panda']
New collage creator for y0, x0 with bg
image (not stitch) min 0.0, max 0.0
Using cached version of animals.npy
Patch set animals.npy, fixed_scale_patches? True, fixed_scale_coeff=0.5, patch_max_proportion=5
Max size for fixed scale patches: (896,896)
<class 'bool'>
Patch 0 scaled by 0.50
Patch 1 scaled by 0.50
Patch 2 scaled by 0.50
Patch 3 scaled by 0.50
Patch 4 scaled by 0.50
Patch 5 scaled by 0.50
Patch 6 scaled by 0.50
Patch 7 scaled by 0.50
Patch 8 scaled by 0.50
Patch 9 scaled by 0.50
Patch 10 scaled by 0.50
Patch 11 scaled by 0.50
Patch 12 scaled by 0.50
Patch 13 scaled by 0.50
Patch 14 scaled by 0.50
Patch 15 scaled by 0.50
Patch 16 scaled by 0.50
Patch 17 scaled by 0.50
Patch 18 scaled by 0.50
Patch 19 scaled by 0.50
Patch 20 scaled by 0.50
Patch 21 scaled by 0.50
Patch 22 scaled by 0.50
Patch 23 scaled by 0.50
Patch 24 scaled by 0.50
Patch 25 scaled by 0.50
Patch 26 scaled by 0.50
Patch 27 scaled by 0.50
Patch 28 scaled by 0.50
Patch 29 scaled by 0.50
Patch 30 scaled by 0.50
Patch 31 scaled by 0.50
Patch 32 scaled by 0.50
Patch 33 scaled by 0.50
Patch 34 scaled by 0.50
Patch 35 scaled by 0.50
Patch 36 scaled by 0.50
Patch 37 scaled by 0.50
Patch 38 scaled by 0.50
Patch 39 scaled by 0.50
Patch 40 scaled by 0.50
Patch 41 scaled by 0.50
Patch 42 scaled by 0.50
Patch sizes during optimisation:
Patch 0 of shape (172, 136, 4)
Patch 1 of shape (192, 122, 4)
Patch 2 of shape (144, 334, 4)
Patch 3 of shape (142, 126, 4)
Patch 4 of shape (152, 182, 4)
Patch 5 of shape (148, 146, 4)
Patch 6 of shape (206, 194, 4)
Patch 7 of shape (148, 138, 4)
Patch 8 of shape (182, 260, 4)
Patch 9 of shape (159, 262, 4)
Patch 10 of shape (174, 226, 4)
Patch 11 of shape (157, 158, 4)
Patch 12 of shape (96, 193, 4)
Patch 13 of shape (254, 145, 4)
Patch 14 of shape (176, 116, 4)
Patch 15 of shape (166, 178, 4)
Patch 16 of shape (151, 209, 4)
Patch 17 of shape (129, 208, 4)
Patch 18 of shape (158, 161, 4)
Patch 19 of shape (122, 192, 4)
Patch 20 of shape (143, 230, 4)
Patch 21 of shape (195, 149, 4)
Patch 22 of shape (159, 178, 4)
Patch 23 of shape (71, 184, 4)
Patch 24 of shape (126, 218, 4)
Patch 25 of shape (134, 177, 4)
Patch 26 of shape (146, 162, 4)
Patch 27 of shape (156, 170, 4)
Patch 28 of shape (154, 233, 4)
Patch 29 of shape (233, 166, 4)
Patch 30 of shape (57, 201, 4)
Patch 31 of shape (148, 219, 4)
Patch 32 of shape (159, 218, 4)
Patch 33 of shape (146, 228, 4)
Patch 34 of shape (249, 136, 4)
Patch 35 of shape (170, 146, 4)
Patch 36 of shape (161, 133, 4)
Patch 37 of shape (150, 172, 4)
Patch 38 of shape (163, 206, 4)
Patch 39 of shape (154, 194, 4)
Patch 40 of shape (184, 417, 4)
Patch 41 of shape (134, 218, 4)
Patch 42 of shape (180, 256, 4)
43 patches, max (184, 417, 4), min (57, 201, 4)
Patch sizes for high-resolution final image:
Patch 0 of shape (344, 272, 4)
Patch 1 of shape (385, 245, 4)
Patch 2 of shape (288, 668, 4)
Patch 3 of shape (284, 252, 4)
Patch 4 of shape (303, 364, 4)
Patch 5 of shape (296, 292, 4)
Patch 6 of shape (412, 387, 4)
Patch 7 of shape (296, 275, 4)
Patch 8 of shape (364, 521, 4)
Patch 9 of shape (318, 524, 4)
Patch 10 of shape (349, 453, 4)
Patch 11 of shape (314, 316, 4)
Patch 12 of shape (193, 386, 4)
Patch 13 of shape (508, 290, 4)
Patch 14 of shape (352, 232, 4)
Patch 15 of shape (333, 355, 4)
Patch 16 of shape (302, 418, 4)
Patch 17 of shape (258, 416, 4)
Patch 18 of shape (316, 322, 4)
Patch 19 of shape (244, 384, 4)
Patch 20 of shape (286, 460, 4)
Patch 21 of shape (390, 298, 4)
Patch 22 of shape (318, 357, 4)
Patch 23 of shape (142, 368, 4)
Patch 24 of shape (253, 436, 4)
Patch 25 of shape (267, 354, 4)
Patch 26 of shape (292, 323, 4)
Patch 27 of shape (311, 341, 4)
Patch 28 of shape (307, 466, 4)
Patch 29 of shape (466, 332, 4)
Patch 30 of shape (114, 402, 4)
Patch 31 of shape (296, 438, 4)
Patch 32 of shape (318, 437, 4)
Patch 33 of shape (291, 455, 4)
Patch 34 of shape (498, 273, 4)
Patch 35 of shape (340, 291, 4)
Patch 36 of shape (322, 266, 4)
Patch 37 of shape (300, 344, 4)
Patch 38 of shape (326, 413, 4)
Patch 39 of shape (308, 387, 4)
Patch 40 of shape (367, 834, 4)
Patch 41 of shape (268, 436, 4)
Patch 42 of shape (360, 512, 4)
43 patches, max (367, 834, 4), min (114, 402, 4)
Global prompt is red panda
Composition prompts ['a photorealistic sky with sun', 'a photorealistic sky', 'a photorealistic sky with moon', 'a photorealistic tree', 'a photorealistic tree', 'a photorealistic tree', 'a photorealistic field', 'a photorealistic field', 'a photorealistic chicken', 'red panda']
PopulationAffineTransforms is_high_res=False, requires_grad=True
PopulationColourRGBTransforms for 100 patches, 2 individuals
PopulationColourRGBTransforms requires_grad=True
Background image of size torch.Size([3, 448, 448])
Starting optimization of collage.
Updated patches in 0.337s
torch.Size([2, 448, 448, 3])
/src/src/training.py:391: FutureWarning: `torch.nn.utils.clip_grad_norm` is now deprecated in favor of `torch.nn.utils.clip_grad_norm_`.
torch.nn.utils.clip_grad_norm(generator.parameters(),
image (stitch) min 0.0, max 1.0
Saving temporary image output_20250319_002503//optim_0.png (shape=(448, 896, 3))
[ WARN:0@8.649] global loadsave.cpp:848 imwrite_ Unsupported depth image for selected encoder is fallbacked to CV_8U.
Iteration 0, rendering loss -0.812805
0
torch.Size([2, 448, 448, 3])
1
torch.Size([2, 448, 448, 3])
2
torch.Size([2, 448, 448, 3])
3
torch.Size([2, 448, 448, 3])
4
torch.Size([2, 448, 448, 3])
5
torch.Size([2, 448, 448, 3])
6
torch.Size([2, 448, 448, 3])
7
torch.Size([2, 448, 448, 3])
8
torch.Size([2, 448, 448, 3])
9
torch.Size([2, 448, 448, 3])
10
torch.Size([2, 448, 448, 3])
11
torch.Size([2, 448, 448, 3])
12
torch.Size([2, 448, 448, 3])
13
torch.Size([2, 448, 448, 3])
14
torch.Size([2, 448, 448, 3])
15
torch.Size([2, 448, 448, 3])
16
torch.Size([2, 448, 448, 3])
17
torch.Size([2, 448, 448, 3])
18
torch.Size([2, 448, 448, 3])
19
torch.Size([2, 448, 448, 3])
20
torch.Size([2, 448, 448, 3])
21
torch.Size([2, 448, 448, 3])
22
torch.Size([2, 448, 448, 3])
23
torch.Size([2, 448, 448, 3])
24
torch.Size([2, 448, 448, 3])
25
torch.Size([2, 448, 448, 3])
26
torch.Size([2, 448, 448, 3])
27
torch.Size([2, 448, 448, 3])
28
torch.Size([2, 448, 448, 3])
29
torch.Size([2, 448, 448, 3])
30
torch.Size([2, 448, 448, 3])
31
torch.Size([2, 448, 448, 3])
32
torch.Size([2, 448, 448, 3])
33
torch.Size([2, 448, 448, 3])
34
torch.Size([2, 448, 448, 3])
35
torch.Size([2, 448, 448, 3])
36
torch.Size([2, 448, 448, 3])
37
torch.Size([2, 448, 448, 3])
38
torch.Size([2, 448, 448, 3])
39
torch.Size([2, 448, 448, 3])
40
torch.Size([2, 448, 448, 3])
41
torch.Size([2, 448, 448, 3])
42
torch.Size([2, 448, 448, 3])
43
torch.Size([2, 448, 448, 3])
44
torch.Size([2, 448, 448, 3])
45
torch.Size([2, 448, 448, 3])
46
torch.Size([2, 448, 448, 3])
47
torch.Size([2, 448, 448, 3])
48
torch.Size([2, 448, 448, 3])
49
torch.Size([2, 448, 448, 3])
image (stitch) min 0.0, max 1.0
Saving temporary image output_20250319_002503//optim_50.png (shape=(448, 896, 3))
Iteration 50, rendering loss -0.847153
50
torch.Size([2, 448, 448, 3])
51
torch.Size([2, 448, 448, 3])
52
torch.Size([2, 448, 448, 3])
53
torch.Size([2, 448, 448, 3])
54
torch.Size([2, 448, 448, 3])
55
torch.Size([2, 448, 448, 3])
56
torch.Size([2, 448, 448, 3])
57
torch.Size([2, 448, 448, 3])
58
torch.Size([2, 448, 448, 3])
59
torch.Size([2, 448, 448, 3])
60
torch.Size([2, 448, 448, 3])
61
torch.Size([2, 448, 448, 3])
62
torch.Size([2, 448, 448, 3])
63
torch.Size([2, 448, 448, 3])
64
torch.Size([2, 448, 448, 3])
65
torch.Size([2, 448, 448, 3])
66
torch.Size([2, 448, 448, 3])
67
torch.Size([2, 448, 448, 3])
68
torch.Size([2, 448, 448, 3])
69
torch.Size([2, 448, 448, 3])
70
torch.Size([2, 448, 448, 3])
71
torch.Size([2, 448, 448, 3])
72
torch.Size([2, 448, 448, 3])
73
torch.Size([2, 448, 448, 3])
74
torch.Size([2, 448, 448, 3])
75
torch.Size([2, 448, 448, 3])
76
torch.Size([2, 448, 448, 3])
77
torch.Size([2, 448, 448, 3])
78
torch.Size([2, 448, 448, 3])
79
torch.Size([2, 448, 448, 3])
80
torch.Size([2, 448, 448, 3])
81
torch.Size([2, 448, 448, 3])
82
torch.Size([2, 448, 448, 3])
83
torch.Size([2, 448, 448, 3])
84
torch.Size([2, 448, 448, 3])
85
torch.Size([2, 448, 448, 3])
86
torch.Size([2, 448, 448, 3])
87
torch.Size([2, 448, 448, 3])
88
torch.Size([2, 448, 448, 3])
89
torch.Size([2, 448, 448, 3])
90
torch.Size([2, 448, 448, 3])
91
torch.Size([2, 448, 448, 3])
92
torch.Size([2, 448, 448, 3])
93
torch.Size([2, 448, 448, 3])
94
torch.Size([2, 448, 448, 3])
95
torch.Size([2, 448, 448, 3])
96
torch.Size([2, 448, 448, 3])
97
torch.Size([2, 448, 448, 3])
98
torch.Size([2, 448, 448, 3])
99
torch.Size([2, 448, 448, 3])
image (stitch) min 0.0, max 1.0
Saving temporary image output_20250319_002503//optim_100.png (shape=(448, 896, 3))
Iteration 100, rendering loss -0.866760
Updated patches in 0.083s
100
torch.Size([2, 448, 448, 3])
101
torch.Size([2, 448, 448, 3])
102
torch.Size([2, 448, 448, 3])
103
torch.Size([2, 448, 448, 3])
104
torch.Size([2, 448, 448, 3])
105
torch.Size([2, 448, 448, 3])
106
torch.Size([2, 448, 448, 3])
107
torch.Size([2, 448, 448, 3])
108
torch.Size([2, 448, 448, 3])
109
torch.Size([2, 448, 448, 3])
110
torch.Size([2, 448, 448, 3])
111
torch.Size([2, 448, 448, 3])
112
torch.Size([2, 448, 448, 3])
113
torch.Size([2, 448, 448, 3])
114
torch.Size([2, 448, 448, 3])
115
torch.Size([2, 448, 448, 3])
116
torch.Size([2, 448, 448, 3])
117
torch.Size([2, 448, 448, 3])
118
torch.Size([2, 448, 448, 3])
119
torch.Size([2, 448, 448, 3])
120
torch.Size([2, 448, 448, 3])
121
torch.Size([2, 448, 448, 3])
122
torch.Size([2, 448, 448, 3])
123
torch.Size([2, 448, 448, 3])
124
torch.Size([2, 448, 448, 3])
125
torch.Size([2, 448, 448, 3])
126
torch.Size([2, 448, 448, 3])
127
torch.Size([2, 448, 448, 3])
128
torch.Size([2, 448, 448, 3])
129
torch.Size([2, 448, 448, 3])
130
torch.Size([2, 448, 448, 3])
131
torch.Size([2, 448, 448, 3])
132
torch.Size([2, 448, 448, 3])
133
torch.Size([2, 448, 448, 3])
134
torch.Size([2, 448, 448, 3])
135
torch.Size([2, 448, 448, 3])
136
torch.Size([2, 448, 448, 3])
137
torch.Size([2, 448, 448, 3])
138
torch.Size([2, 448, 448, 3])
139
torch.Size([2, 448, 448, 3])
140
torch.Size([2, 448, 448, 3])
141
torch.Size([2, 448, 448, 3])
142
torch.Size([2, 448, 448, 3])
143
torch.Size([2, 448, 448, 3])
144
torch.Size([2, 448, 448, 3])
145
torch.Size([2, 448, 448, 3])
146
torch.Size([2, 448, 448, 3])
147
torch.Size([2, 448, 448, 3])
148
torch.Size([2, 448, 448, 3])
149
torch.Size([2, 448, 448, 3])
image (stitch) min 0.0, max 1.0
Saving temporary image output_20250319_002503//optim_150.png (shape=(448, 896, 3))
Iteration 150, rendering loss -0.922791
150
torch.Size([2, 448, 448, 3])
151
torch.Size([2, 448, 448, 3])
152
torch.Size([2, 448, 448, 3])
153
torch.Size([2, 448, 448, 3])
154
torch.Size([2, 448, 448, 3])
155
torch.Size([2, 448, 448, 3])
156
torch.Size([2, 448, 448, 3])
157
torch.Size([2, 448, 448, 3])
158
torch.Size([2, 448, 448, 3])
159
torch.Size([2, 448, 448, 3])
160
torch.Size([2, 448, 448, 3])
161
torch.Size([2, 448, 448, 3])
162
torch.Size([2, 448, 448, 3])
163
torch.Size([2, 448, 448, 3])
164
torch.Size([2, 448, 448, 3])
165
torch.Size([2, 448, 448, 3])
166
torch.Size([2, 448, 448, 3])
167
torch.Size([2, 448, 448, 3])
168
torch.Size([2, 448, 448, 3])
169
torch.Size([2, 448, 448, 3])
170
torch.Size([2, 448, 448, 3])
171
torch.Size([2, 448, 448, 3])
172
torch.Size([2, 448, 448, 3])
173
torch.Size([2, 448, 448, 3])
174
torch.Size([2, 448, 448, 3])
175
torch.Size([2, 448, 448, 3])
176
torch.Size([2, 448, 448, 3])
177
torch.Size([2, 448, 448, 3])
178
torch.Size([2, 448, 448, 3])
179
torch.Size([2, 448, 448, 3])
180
torch.Size([2, 448, 448, 3])
181
torch.Size([2, 448, 448, 3])
182
torch.Size([2, 448, 448, 3])
183
torch.Size([2, 448, 448, 3])
184
torch.Size([2, 448, 448, 3])
185
torch.Size([2, 448, 448, 3])
186
torch.Size([2, 448, 448, 3])
187
torch.Size([2, 448, 448, 3])
188
torch.Size([2, 448, 448, 3])
189
torch.Size([2, 448, 448, 3])
190
torch.Size([2, 448, 448, 3])
191
torch.Size([2, 448, 448, 3])
192
torch.Size([2, 448, 448, 3])
193
torch.Size([2, 448, 448, 3])
194
torch.Size([2, 448, 448, 3])
195
torch.Size([2, 448, 448, 3])
196
torch.Size([2, 448, 448, 3])
197
torch.Size([2, 448, 448, 3])
198
torch.Size([2, 448, 448, 3])
199
torch.Size([2, 448, 448, 3])
image (stitch) min 0.0, max 1.0
Saving temporary image output_20250319_002503//optim_200.png (shape=(448, 896, 3))
Iteration 200, rendering loss -0.958267
Updated patches in 0.082s
200
torch.Size([2, 448, 448, 3])
201
torch.Size([2, 448, 448, 3])
202
torch.Size([2, 448, 448, 3])
203
torch.Size([2, 448, 448, 3])
204
torch.Size([2, 448, 448, 3])
205
torch.Size([2, 448, 448, 3])
206
torch.Size([2, 448, 448, 3])
207
torch.Size([2, 448, 448, 3])
208
torch.Size([2, 448, 448, 3])
209
torch.Size([2, 448, 448, 3])
210
torch.Size([2, 448, 448, 3])
211
torch.Size([2, 448, 448, 3])
212
torch.Size([2, 448, 448, 3])
213
torch.Size([2, 448, 448, 3])
214
torch.Size([2, 448, 448, 3])
215
torch.Size([2, 448, 448, 3])
216
torch.Size([2, 448, 448, 3])
217
torch.Size([2, 448, 448, 3])
218
torch.Size([2, 448, 448, 3])
219
torch.Size([2, 448, 448, 3])
220
torch.Size([2, 448, 448, 3])
221
torch.Size([2, 448, 448, 3])
222
torch.Size([2, 448, 448, 3])
223
torch.Size([2, 448, 448, 3])
224
torch.Size([2, 448, 448, 3])
225
torch.Size([2, 448, 448, 3])
226
torch.Size([2, 448, 448, 3])
227
torch.Size([2, 448, 448, 3])
228
torch.Size([2, 448, 448, 3])
229
torch.Size([2, 448, 448, 3])
230
torch.Size([2, 448, 448, 3])
231
torch.Size([2, 448, 448, 3])
232
torch.Size([2, 448, 448, 3])
233
torch.Size([2, 448, 448, 3])
234
torch.Size([2, 448, 448, 3])
235
torch.Size([2, 448, 448, 3])
236
torch.Size([2, 448, 448, 3])
237
torch.Size([2, 448, 448, 3])
238
torch.Size([2, 448, 448, 3])
239
torch.Size([2, 448, 448, 3])
240
torch.Size([2, 448, 448, 3])
241
torch.Size([2, 448, 448, 3])
242
torch.Size([2, 448, 448, 3])
243
torch.Size([2, 448, 448, 3])
244
torch.Size([2, 448, 448, 3])
245
torch.Size([2, 448, 448, 3])
246
torch.Size([2, 448, 448, 3])
247
torch.Size([2, 448, 448, 3])
248
torch.Size([2, 448, 448, 3])
image (stitch) min 0.0, max 1.0
Saving model to output_20250319_002503/...
249
PopulationAffineTransforms is_high_res=True, requires_grad=False
PopulationColourRGBTransforms for 100 patches, 1 individuals
PopulationColourRGBTransforms requires_grad=False
Background image of size torch.Size([3, 896, 896])
Lowest loss: -1.0089111328125 @ index 1:
[0, 0] idx [0:896], [0:896]
Finished [0, 0] idx [0:896], [0:896]
torch.Size([1, 896, 896, 4])
Image has alpha channel
PopulationAffineTransforms is_high_res=True, requires_grad=False
PopulationColourRGBTransforms for 100 patches, 1 individuals
PopulationColourRGBTransforms requires_grad=False
Background image of size torch.Size([3, 896, 896])
Lowest loss: -1.0089111328125 @ index 1:
Not using background_image
[0, 0] idx [0:896], [0:896]
Setting alpha to zero outside of patches
Finished [0, 0] idx [0:896], [0:896]
torch.Size([1, 896, 896, 4])
Image has alpha channel