Failed to load versions. Head to the versions page to see all versions for this model.
You're looking at a specific version of this model. Jump to the model overview.
Input
Run this model in Node.js with one line of code:
npm install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import Replicate from "replicate";
import fs from "node:fs";
const replicate = new Replicate({
auth: process.env.REPLICATE_API_TOKEN,
});
Run cjwbw/clip-gen using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run(
"cjwbw/clip-gen:890080e4c757d324b4a4597161f8dc5ed35b06e4fdfb2954f94babfe4b1cde67",
{
input: {
text: "A photo of a tower in front of a mountain",
num_samples: 8
}
}
);
// To access the file URL:
console.log(output.url()); //=> "http://example.com"
// To write the file to disk:
fs.writeFile("my-image.png", output);
To learn more, take a look at the guide on getting started with Node.js.
pip install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import replicate
Run cjwbw/clip-gen using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run(
"cjwbw/clip-gen:890080e4c757d324b4a4597161f8dc5ed35b06e4fdfb2954f94babfe4b1cde67",
input={
"text": "A photo of a tower in front of a mountain",
"num_samples": 8
}
)
print(output)
To learn more, take a look at the guide on getting started with Python.
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
Run cjwbw/clip-gen using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \
-H "Authorization: Bearer $REPLICATE_API_TOKEN" \
-H "Content-Type: application/json" \
-H "Prefer: wait" \
-d $'{
"version": "cjwbw/clip-gen:890080e4c757d324b4a4597161f8dc5ed35b06e4fdfb2954f94babfe4b1cde67",
"input": {
"text": "A photo of a tower in front of a mountain",
"num_samples": 8
}
}' \
https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Add a payment method to run this model.
By signing in, you agree to our
terms of service and privacy policy
Output
{
"completed_at": "2022-08-04T12:58:14.598993Z",
"created_at": "2022-08-04T12:53:09.511309Z",
"data_removed": false,
"error": null,
"id": "miwszzml25gj3daippafur4pry",
"input": {
"text": "A photo of a tower in front of a mountain",
"num_samples": "8"
},
"logs": "/root/.pyenv/versions/3.8.13/lib/python3.8/site-packages/torch/nn/functional.py:3631: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n warnings.warn(\nCLIP similarity tensor([0.3261, 0.3129, 0.3123, 0.3098, 0.3079, 0.3044, 0.3014, 0.3003],\n device='cuda:0')\ntensor([[[[0.3793, 0.3799, 0.3688, ..., 0.3842, 0.3850, 0.3909],\n [0.3702, 0.3780, 0.3728, ..., 0.3884, 0.3941, 0.3874],\n [0.3781, 0.3775, 0.3754, ..., 0.3886, 0.3859, 0.3903],\n ...,\n [0.0469, 0.0693, 0.0654, ..., 0.7021, 0.7025, 0.7113],\n [0.0506, 0.0686, 0.0592, ..., 0.7111, 0.7086, 0.7079],\n [0.0440, 0.0475, 0.0654, ..., 0.6946, 0.7019, 0.7127]],\n\n [[0.5051, 0.4978, 0.4964, ..., 0.5012, 0.5091, 0.5047],\n [0.5030, 0.5071, 0.5064, ..., 0.5021, 0.5055, 0.4950],\n [0.5079, 0.5117, 0.5092, ..., 0.5047, 0.5017, 0.4992],\n ...,\n [0.0537, 0.0862, 0.0863, ..., 0.7395, 0.7404, 0.7409],\n [0.0649, 0.0768, 0.0809, ..., 0.7463, 0.7454, 0.7508],\n [0.0774, 0.0738, 0.0965, ..., 0.7315, 0.7396, 0.7480]],\n\n [[0.6324, 0.6411, 0.6413, ..., 0.6368, 0.6246, 0.6207],\n [0.6397, 0.6500, 0.6451, ..., 0.6376, 0.6345, 0.6260],\n [0.6421, 0.6453, 0.6478, ..., 0.6404, 0.6334, 0.6279],\n ...,\n [0.0761, 0.0832, 0.0734, ..., 0.7633, 0.7629, 0.7679],\n [0.0771, 0.0687, 0.0601, ..., 0.7600, 0.7651, 0.7577],\n [0.0617, 0.0666, 0.0791, ..., 0.7568, 0.7595, 0.7644]]],\n\n\n [[[0.3284, 0.3191, 0.3130, ..., 0.3412, 0.3449, 0.3540],\n [0.3092, 0.3163, 0.3157, ..., 0.3437, 0.3499, 0.3451],\n [0.3191, 0.3177, 0.3156, ..., 0.3407, 0.3407, 0.3439],\n ...,\n [0.1339, 0.1344, 0.1177, ..., 0.7005, 0.6961, 0.7035],\n [0.1637, 0.1185, 0.1515, ..., 0.7017, 0.7001, 0.7078],\n [0.1375, 0.1294, 0.1805, ..., 0.6837, 0.6908, 0.7135]],\n\n [[0.4451, 0.4391, 0.4407, ..., 0.4740, 0.4833, 0.4807],\n [0.4390, 0.4429, 0.4394, ..., 0.4737, 0.4748, 0.4632],\n [0.4424, 0.4473, 0.4425, ..., 0.4719, 0.4705, 0.4666],\n ...,\n [0.1571, 0.1395, 0.1394, ..., 0.7182, 0.7137, 0.7151],\n [0.1850, 0.1166, 0.1679, ..., 0.7225, 0.7202, 0.7335],\n [0.1863, 0.1818, 0.2159, ..., 0.7067, 0.7153, 0.7349]],\n\n [[0.5962, 0.6093, 0.6118, ..., 0.6446, 0.6357, 0.6318],\n [0.5983, 0.6114, 0.6050, ..., 0.6350, 0.6386, 0.6316],\n [0.6002, 0.6060, 0.6105, ..., 0.6386, 0.6350, 0.6306],\n ...,\n [0.1529, 0.1215, 0.1154, ..., 0.7242, 0.7167, 0.7201],\n [0.1958, 0.1228, 0.1504, ..., 0.7174, 0.7249, 0.7232],\n [0.1987, 0.1976, 0.2082, ..., 0.7147, 0.7181, 0.7388]]],\n\n\n [[[0.4125, 0.4222, 0.4103, ..., 0.3412, 0.3449, 0.3531],\n [0.4116, 0.4258, 0.4134, ..., 0.3449, 0.3508, 0.3484],\n [0.4208, 0.4191, 0.4168, ..., 0.3397, 0.3416, 0.3478],\n ...,\n [0.2563, 0.2564, 0.2562, ..., 0.7143, 0.7108, 0.7114],\n [0.2482, 0.2622, 0.2633, ..., 0.7144, 0.7073, 0.7112],\n [0.2220, 0.2169, 0.2416, ..., 0.7065, 0.7009, 0.7193]],\n\n [[0.5788, 0.5815, 0.5823, ..., 0.5051, 0.5133, 0.5088],\n [0.5823, 0.5885, 0.5843, ..., 0.5044, 0.5072, 0.4945],\n [0.5866, 0.5888, 0.5916, ..., 0.5049, 0.5043, 0.4985],\n ...,\n [0.2994, 0.2973, 0.2968, ..., 0.7989, 0.8041, 0.7991],\n [0.2888, 0.2958, 0.3031, ..., 0.7996, 0.8064, 0.8149],\n [0.2721, 0.2564, 0.2840, ..., 0.8034, 0.7972, 0.7973]],\n\n [[0.7592, 0.7657, 0.7657, ..., 0.6946, 0.6836, 0.6761],\n [0.7558, 0.7678, 0.7637, ..., 0.6912, 0.6936, 0.6796],\n [0.7594, 0.7605, 0.7664, ..., 0.6936, 0.6901, 0.6801],\n ...,\n [0.2959, 0.2904, 0.2869, ..., 0.8531, 0.8477, 0.8536],\n [0.2955, 0.2867, 0.2850, ..., 0.8449, 0.8562, 0.8431],\n [0.2685, 0.2626, 0.2681, ..., 0.8536, 0.8471, 0.8345]]],\n\n\n ...,\n\n\n [[[0.4001, 0.4107, 0.4004, ..., 0.4133, 0.4147, 0.4193],\n [0.3988, 0.4086, 0.4043, ..., 0.4161, 0.4214, 0.4189],\n [0.4086, 0.4039, 0.4008, ..., 0.4104, 0.4136, 0.4183],\n ...,\n [0.3377, 0.3170, 0.3318, ..., 0.1098, 0.1304, 0.1540],\n [0.3219, 0.3343, 0.3203, ..., 0.0791, 0.1110, 0.1609],\n [0.3304, 0.3278, 0.3240, ..., 0.0356, 0.0781, 0.1305]],\n\n [[0.5637, 0.5663, 0.5661, ..., 0.5792, 0.5899, 0.5820],\n [0.5657, 0.5729, 0.5680, ..., 0.5786, 0.5817, 0.5707],\n [0.5709, 0.5753, 0.5727, ..., 0.5769, 0.5806, 0.5712],\n ...,\n [0.3875, 0.3632, 0.3823, ..., 0.1186, 0.1686, 0.1844],\n [0.3849, 0.3972, 0.3865, ..., 0.0923, 0.1173, 0.1652],\n [0.3779, 0.3803, 0.3885, ..., 0.0717, 0.0995, 0.1352]],\n\n [[0.7064, 0.7141, 0.7115, ..., 0.7090, 0.7088, 0.7112],\n [0.7052, 0.7133, 0.7107, ..., 0.7084, 0.7118, 0.7080],\n [0.7078, 0.7063, 0.7139, ..., 0.7123, 0.7097, 0.7007],\n ...,\n [0.4279, 0.4017, 0.4162, ..., 0.1097, 0.1173, 0.1316],\n [0.4252, 0.4267, 0.4185, ..., 0.0814, 0.0843, 0.1242],\n [0.4085, 0.4141, 0.4226, ..., 0.0528, 0.0714, 0.1035]]],\n\n\n [[[0.5250, 0.5499, 0.5434, ..., 0.5830, 0.5849, 0.5770],\n [0.5364, 0.5527, 0.5484, ..., 0.5877, 0.5895, 0.5921],\n [0.5459, 0.5476, 0.5434, ..., 0.5845, 0.5870, 0.5965],\n ...,\n [0.5922, 0.5870, 0.5562, ..., 0.3617, 0.3574, 0.3254],\n [0.5925, 0.5852, 0.5662, ..., 0.3291, 0.3234, 0.3423],\n [0.5910, 0.5804, 0.5600, ..., 0.3531, 0.3482, 0.3480]],\n\n [[0.6724, 0.6798, 0.6803, ..., 0.7122, 0.7230, 0.7117],\n [0.6787, 0.6851, 0.6829, ..., 0.7139, 0.7176, 0.7085],\n [0.6830, 0.6861, 0.6847, ..., 0.7133, 0.7187, 0.7081],\n ...,\n [0.6352, 0.6264, 0.5925, ..., 0.3488, 0.3313, 0.3079],\n [0.6435, 0.6341, 0.6089, ..., 0.3152, 0.3093, 0.3368],\n [0.6199, 0.6100, 0.6032, ..., 0.3475, 0.3447, 0.3407]],\n\n [[0.8095, 0.8237, 0.8225, ..., 0.8400, 0.8413, 0.8400],\n [0.8128, 0.8209, 0.8220, ..., 0.8426, 0.8517, 0.8460],\n [0.8174, 0.8154, 0.8220, ..., 0.8489, 0.8483, 0.8327],\n ...,\n [0.6392, 0.6224, 0.5844, ..., 0.3042, 0.3009, 0.2775],\n [0.6580, 0.6426, 0.6098, ..., 0.2864, 0.2775, 0.3005],\n [0.6246, 0.6203, 0.5948, ..., 0.3069, 0.3027, 0.3032]]],\n\n\n [[[0.4699, 0.4794, 0.4707, ..., 0.4194, 0.4267, 0.4282],\n [0.4664, 0.4784, 0.4638, ..., 0.4214, 0.4312, 0.4236],\n [0.4713, 0.4780, 0.4761, ..., 0.4203, 0.4224, 0.4273],\n ...,\n [0.3135, 0.2852, 0.2978, ..., 0.6796, 0.6790, 0.6824],\n [0.3278, 0.3193, 0.3010, ..., 0.6869, 0.6783, 0.6757],\n [0.3336, 0.3232, 0.3195, ..., 0.6789, 0.6722, 0.6755]],\n\n [[0.5406, 0.5417, 0.5415, ..., 0.4926, 0.5033, 0.5013],\n [0.5411, 0.5439, 0.5422, ..., 0.4937, 0.4976, 0.4840],\n [0.5449, 0.5465, 0.5477, ..., 0.4926, 0.4926, 0.4915],\n ...,\n [0.3694, 0.3351, 0.3504, ..., 0.7261, 0.7294, 0.7287],\n [0.4021, 0.3906, 0.3714, ..., 0.7283, 0.7301, 0.7347],\n [0.4029, 0.3884, 0.3873, ..., 0.7301, 0.7269, 0.7219]],\n\n [[0.6500, 0.6594, 0.6604, ..., 0.6005, 0.5965, 0.5907],\n [0.6531, 0.6622, 0.6571, ..., 0.5952, 0.5982, 0.5915],\n [0.6586, 0.6570, 0.6594, ..., 0.6016, 0.5963, 0.5941],\n ...,\n [0.4533, 0.4121, 0.4211, ..., 0.7656, 0.7665, 0.7743],\n [0.4812, 0.4588, 0.4305, ..., 0.7616, 0.7659, 0.7595],\n [0.4617, 0.4489, 0.4512, ..., 0.7675, 0.7632, 0.7540]]]],\n device='cuda:0')\n<class 'torch.Tensor'>",
"metrics": {
"predict_time": 232.703889,
"total_time": 305.087684
},
"output": "https://replicate.delivery/mgxm/bf48f358-1bd5-40d8-825b-09cec58c2a84/output.png",
"started_at": "2022-08-04T12:54:21.895104Z",
"status": "succeeded",
"urls": {
"get": "https://api.replicate.com/v1/predictions/miwszzml25gj3daippafur4pry",
"cancel": "https://api.replicate.com/v1/predictions/miwszzml25gj3daippafur4pry/cancel"
},
"version": "890080e4c757d324b4a4597161f8dc5ed35b06e4fdfb2954f94babfe4b1cde67"
}
/root/.pyenv/versions/3.8.13/lib/python3.8/site-packages/torch/nn/functional.py:3631: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.
warnings.warn(
CLIP similarity tensor([0.3261, 0.3129, 0.3123, 0.3098, 0.3079, 0.3044, 0.3014, 0.3003],
device='cuda:0')
tensor([[[[0.3793, 0.3799, 0.3688, ..., 0.3842, 0.3850, 0.3909],
[0.3702, 0.3780, 0.3728, ..., 0.3884, 0.3941, 0.3874],
[0.3781, 0.3775, 0.3754, ..., 0.3886, 0.3859, 0.3903],
...,
[0.0469, 0.0693, 0.0654, ..., 0.7021, 0.7025, 0.7113],
[0.0506, 0.0686, 0.0592, ..., 0.7111, 0.7086, 0.7079],
[0.0440, 0.0475, 0.0654, ..., 0.6946, 0.7019, 0.7127]],
[[0.5051, 0.4978, 0.4964, ..., 0.5012, 0.5091, 0.5047],
[0.5030, 0.5071, 0.5064, ..., 0.5021, 0.5055, 0.4950],
[0.5079, 0.5117, 0.5092, ..., 0.5047, 0.5017, 0.4992],
...,
[0.0537, 0.0862, 0.0863, ..., 0.7395, 0.7404, 0.7409],
[0.0649, 0.0768, 0.0809, ..., 0.7463, 0.7454, 0.7508],
[0.0774, 0.0738, 0.0965, ..., 0.7315, 0.7396, 0.7480]],
[[0.6324, 0.6411, 0.6413, ..., 0.6368, 0.6246, 0.6207],
[0.6397, 0.6500, 0.6451, ..., 0.6376, 0.6345, 0.6260],
[0.6421, 0.6453, 0.6478, ..., 0.6404, 0.6334, 0.6279],
...,
[0.0761, 0.0832, 0.0734, ..., 0.7633, 0.7629, 0.7679],
[0.0771, 0.0687, 0.0601, ..., 0.7600, 0.7651, 0.7577],
[0.0617, 0.0666, 0.0791, ..., 0.7568, 0.7595, 0.7644]]],
[[[0.3284, 0.3191, 0.3130, ..., 0.3412, 0.3449, 0.3540],
[0.3092, 0.3163, 0.3157, ..., 0.3437, 0.3499, 0.3451],
[0.3191, 0.3177, 0.3156, ..., 0.3407, 0.3407, 0.3439],
...,
[0.1339, 0.1344, 0.1177, ..., 0.7005, 0.6961, 0.7035],
[0.1637, 0.1185, 0.1515, ..., 0.7017, 0.7001, 0.7078],
[0.1375, 0.1294, 0.1805, ..., 0.6837, 0.6908, 0.7135]],
[[0.4451, 0.4391, 0.4407, ..., 0.4740, 0.4833, 0.4807],
[0.4390, 0.4429, 0.4394, ..., 0.4737, 0.4748, 0.4632],
[0.4424, 0.4473, 0.4425, ..., 0.4719, 0.4705, 0.4666],
...,
[0.1571, 0.1395, 0.1394, ..., 0.7182, 0.7137, 0.7151],
[0.1850, 0.1166, 0.1679, ..., 0.7225, 0.7202, 0.7335],
[0.1863, 0.1818, 0.2159, ..., 0.7067, 0.7153, 0.7349]],
[[0.5962, 0.6093, 0.6118, ..., 0.6446, 0.6357, 0.6318],
[0.5983, 0.6114, 0.6050, ..., 0.6350, 0.6386, 0.6316],
[0.6002, 0.6060, 0.6105, ..., 0.6386, 0.6350, 0.6306],
...,
[0.1529, 0.1215, 0.1154, ..., 0.7242, 0.7167, 0.7201],
[0.1958, 0.1228, 0.1504, ..., 0.7174, 0.7249, 0.7232],
[0.1987, 0.1976, 0.2082, ..., 0.7147, 0.7181, 0.7388]]],
[[[0.4125, 0.4222, 0.4103, ..., 0.3412, 0.3449, 0.3531],
[0.4116, 0.4258, 0.4134, ..., 0.3449, 0.3508, 0.3484],
[0.4208, 0.4191, 0.4168, ..., 0.3397, 0.3416, 0.3478],
...,
[0.2563, 0.2564, 0.2562, ..., 0.7143, 0.7108, 0.7114],
[0.2482, 0.2622, 0.2633, ..., 0.7144, 0.7073, 0.7112],
[0.2220, 0.2169, 0.2416, ..., 0.7065, 0.7009, 0.7193]],
[[0.5788, 0.5815, 0.5823, ..., 0.5051, 0.5133, 0.5088],
[0.5823, 0.5885, 0.5843, ..., 0.5044, 0.5072, 0.4945],
[0.5866, 0.5888, 0.5916, ..., 0.5049, 0.5043, 0.4985],
...,
[0.2994, 0.2973, 0.2968, ..., 0.7989, 0.8041, 0.7991],
[0.2888, 0.2958, 0.3031, ..., 0.7996, 0.8064, 0.8149],
[0.2721, 0.2564, 0.2840, ..., 0.8034, 0.7972, 0.7973]],
[[0.7592, 0.7657, 0.7657, ..., 0.6946, 0.6836, 0.6761],
[0.7558, 0.7678, 0.7637, ..., 0.6912, 0.6936, 0.6796],
[0.7594, 0.7605, 0.7664, ..., 0.6936, 0.6901, 0.6801],
...,
[0.2959, 0.2904, 0.2869, ..., 0.8531, 0.8477, 0.8536],
[0.2955, 0.2867, 0.2850, ..., 0.8449, 0.8562, 0.8431],
[0.2685, 0.2626, 0.2681, ..., 0.8536, 0.8471, 0.8345]]],
...,
[[[0.4001, 0.4107, 0.4004, ..., 0.4133, 0.4147, 0.4193],
[0.3988, 0.4086, 0.4043, ..., 0.4161, 0.4214, 0.4189],
[0.4086, 0.4039, 0.4008, ..., 0.4104, 0.4136, 0.4183],
...,
[0.3377, 0.3170, 0.3318, ..., 0.1098, 0.1304, 0.1540],
[0.3219, 0.3343, 0.3203, ..., 0.0791, 0.1110, 0.1609],
[0.3304, 0.3278, 0.3240, ..., 0.0356, 0.0781, 0.1305]],
[[0.5637, 0.5663, 0.5661, ..., 0.5792, 0.5899, 0.5820],
[0.5657, 0.5729, 0.5680, ..., 0.5786, 0.5817, 0.5707],
[0.5709, 0.5753, 0.5727, ..., 0.5769, 0.5806, 0.5712],
...,
[0.3875, 0.3632, 0.3823, ..., 0.1186, 0.1686, 0.1844],
[0.3849, 0.3972, 0.3865, ..., 0.0923, 0.1173, 0.1652],
[0.3779, 0.3803, 0.3885, ..., 0.0717, 0.0995, 0.1352]],
[[0.7064, 0.7141, 0.7115, ..., 0.7090, 0.7088, 0.7112],
[0.7052, 0.7133, 0.7107, ..., 0.7084, 0.7118, 0.7080],
[0.7078, 0.7063, 0.7139, ..., 0.7123, 0.7097, 0.7007],
...,
[0.4279, 0.4017, 0.4162, ..., 0.1097, 0.1173, 0.1316],
[0.4252, 0.4267, 0.4185, ..., 0.0814, 0.0843, 0.1242],
[0.4085, 0.4141, 0.4226, ..., 0.0528, 0.0714, 0.1035]]],
[[[0.5250, 0.5499, 0.5434, ..., 0.5830, 0.5849, 0.5770],
[0.5364, 0.5527, 0.5484, ..., 0.5877, 0.5895, 0.5921],
[0.5459, 0.5476, 0.5434, ..., 0.5845, 0.5870, 0.5965],
...,
[0.5922, 0.5870, 0.5562, ..., 0.3617, 0.3574, 0.3254],
[0.5925, 0.5852, 0.5662, ..., 0.3291, 0.3234, 0.3423],
[0.5910, 0.5804, 0.5600, ..., 0.3531, 0.3482, 0.3480]],
[[0.6724, 0.6798, 0.6803, ..., 0.7122, 0.7230, 0.7117],
[0.6787, 0.6851, 0.6829, ..., 0.7139, 0.7176, 0.7085],
[0.6830, 0.6861, 0.6847, ..., 0.7133, 0.7187, 0.7081],
...,
[0.6352, 0.6264, 0.5925, ..., 0.3488, 0.3313, 0.3079],
[0.6435, 0.6341, 0.6089, ..., 0.3152, 0.3093, 0.3368],
[0.6199, 0.6100, 0.6032, ..., 0.3475, 0.3447, 0.3407]],
[[0.8095, 0.8237, 0.8225, ..., 0.8400, 0.8413, 0.8400],
[0.8128, 0.8209, 0.8220, ..., 0.8426, 0.8517, 0.8460],
[0.8174, 0.8154, 0.8220, ..., 0.8489, 0.8483, 0.8327],
...,
[0.6392, 0.6224, 0.5844, ..., 0.3042, 0.3009, 0.2775],
[0.6580, 0.6426, 0.6098, ..., 0.2864, 0.2775, 0.3005],
[0.6246, 0.6203, 0.5948, ..., 0.3069, 0.3027, 0.3032]]],
[[[0.4699, 0.4794, 0.4707, ..., 0.4194, 0.4267, 0.4282],
[0.4664, 0.4784, 0.4638, ..., 0.4214, 0.4312, 0.4236],
[0.4713, 0.4780, 0.4761, ..., 0.4203, 0.4224, 0.4273],
...,
[0.3135, 0.2852, 0.2978, ..., 0.6796, 0.6790, 0.6824],
[0.3278, 0.3193, 0.3010, ..., 0.6869, 0.6783, 0.6757],
[0.3336, 0.3232, 0.3195, ..., 0.6789, 0.6722, 0.6755]],
[[0.5406, 0.5417, 0.5415, ..., 0.4926, 0.5033, 0.5013],
[0.5411, 0.5439, 0.5422, ..., 0.4937, 0.4976, 0.4840],
[0.5449, 0.5465, 0.5477, ..., 0.4926, 0.4926, 0.4915],
...,
[0.3694, 0.3351, 0.3504, ..., 0.7261, 0.7294, 0.7287],
[0.4021, 0.3906, 0.3714, ..., 0.7283, 0.7301, 0.7347],
[0.4029, 0.3884, 0.3873, ..., 0.7301, 0.7269, 0.7219]],
[[0.6500, 0.6594, 0.6604, ..., 0.6005, 0.5965, 0.5907],
[0.6531, 0.6622, 0.6571, ..., 0.5952, 0.5982, 0.5915],
[0.6586, 0.6570, 0.6594, ..., 0.6016, 0.5963, 0.5941],
...,
[0.4533, 0.4121, 0.4211, ..., 0.7656, 0.7665, 0.7743],
[0.4812, 0.4588, 0.4305, ..., 0.7616, 0.7659, 0.7595],
[0.4617, 0.4489, 0.4512, ..., 0.7675, 0.7632, 0.7540]]]],
device='cuda:0')
<class 'torch.Tensor'>