You're looking at a specific version of this model. Jump to the model overview.
pixray /text2image:5c347a4b
Input
Run this model in Node.js with one line of code:
npm install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import Replicate from "replicate";
const replicate = new Replicate({
auth: process.env.REPLICATE_API_TOKEN,
});
Run pixray/text2image using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run(
"pixray/text2image:5c347a4bfa1d4523a58ae614c2194e15f2ae682b57e3797a5bb468920aa70ebf",
{
input: {
drawer: "vqgan",
prompts: "Using artists as an example, when anyone can create amazing art, there will be incredible upside for humanity, but downside for most individual artists. (On the other hand, totally new kinds of art will be possible, and the skill that will matter will be imagination.)",
settings: "\n"
}
}
);
console.log(output);
To learn more, take a look at the guide on getting started with Node.js.
pip install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import replicate
Run pixray/text2image using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run(
"pixray/text2image:5c347a4bfa1d4523a58ae614c2194e15f2ae682b57e3797a5bb468920aa70ebf",
input={
"drawer": "vqgan",
"prompts": "Using artists as an example, when anyone can create amazing art, there will be incredible upside for humanity, but downside for most individual artists. (On the other hand, totally new kinds of art will be possible, and the skill that will matter will be imagination.)",
"settings": "\n"
}
)
# The pixray/text2image model can stream output as it's running.
# The predict method returns an iterator, and you can iterate over that output.
for item in output:
# https://replicate.com/pixray/text2image/api#output-schema
print(item)
To learn more, take a look at the guide on getting started with Python.
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
Run pixray/text2image using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \
-H "Authorization: Bearer $REPLICATE_API_TOKEN" \
-H "Content-Type: application/json" \
-H "Prefer: wait" \
-d $'{
"version": "5c347a4bfa1d4523a58ae614c2194e15f2ae682b57e3797a5bb468920aa70ebf",
"input": {
"drawer": "vqgan",
"prompts": "Using artists as an example, when anyone can create amazing art, there will be incredible upside for humanity, but downside for most individual artists. (On the other hand, totally new kinds of art will be possible, and the skill that will matter will be imagination.)",
"settings": "\\n"
}
}' \
https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
brew install cog
If you don’t have Homebrew, there are other installation options available.
Run this to download the model and run it in your local environment:
cog predict r8.im/pixray/text2image@sha256:5c347a4bfa1d4523a58ae614c2194e15f2ae682b57e3797a5bb468920aa70ebf \
-i 'drawer="vqgan"' \
-i 'prompts="Using artists as an example, when anyone can create amazing art, there will be incredible upside for humanity, but downside for most individual artists. (On the other hand, totally new kinds of art will be possible, and the skill that will matter will be imagination.)"' \
-i $'settings="\\n"'
To learn more, take a look at the Cog documentation.
Run this to download the model and run it in your local environment:
docker run -d -p 5000:5000 --gpus=all r8.im/pixray/text2image@sha256:5c347a4bfa1d4523a58ae614c2194e15f2ae682b57e3797a5bb468920aa70ebf
curl -s -X POST \ -H "Content-Type: application/json" \ -d $'{ "input": { "drawer": "vqgan", "prompts": "Using artists as an example, when anyone can create amazing art, there will be incredible upside for humanity, but downside for most individual artists. (On the other hand, totally new kinds of art will be possible, and the skill that will matter will be imagination.)", "settings": "\\n" } }' \ http://localhost:5000/predictions
To learn more, take a look at the Cog documentation.
Add a payment method to run this model.
Each run costs approximately $0.095. Alternatively, try out our featured models for free.
By signing in, you agree to our
terms of service and privacy policy
Output
{
"completed_at": "2022-01-23T04:11:49.144273Z",
"created_at": "2022-01-23T03:58:45.487200Z",
"data_removed": false,
"error": null,
"id": "s4pasue4lvbldopl4eolqyoiee",
"input": {
"prompts": "Using artists as an example, when anyone can create amazing art, there will be incredible upside for humanity, but downside for most individual artists. (On the other hand, totally new kinds of art will be possible, and the skill that will matter will be imagination.)",
"settings": "\n"
},
"logs": "---> BasePixrayPredictor Predict\nUsing seed:\n6555919756984463465\nLoaded CLIP RN50: 102.01M params\nLoaded CLIP ViT-B/32: 151.28M params\nLoaded CLIP ViT-B/16: 149.62M params\nUsing device:\ncuda:0\nOptimising using:\nAdam\nUsing text prompts:\n['Using artists as an example, when anyone can create amazing art, there will be incredible upside for humanity, but downside for most individual artists. (On the other hand, totally new kinds of art will be possible, and the skill that will matter will be imagination.)']\n\n0it [00:00, ?it/s]\n/root/.pyenv/versions/3.8.12/lib/python3.8/site-packages/torch/nn/functional.py:3609: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n warnings.warn(\niter: 0, loss: 2.98, losses: 0.969, 0.0885, 0.909, 0.0641, 0.887, 0.0655 (-0=>2.984)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 10, loss: 2.91, losses: 0.949, 0.0894, 0.883, 0.0632, 0.864, 0.0634 (-0=>2.912)\n\n0it [00:00, ?it/s]\n\n0it [00:09, ?it/s]\n\n0it [00:00, ?it/s]\niter: 20, loss: 2.91, losses: 0.944, 0.0899, 0.882, 0.0637, 0.865, 0.0627 (-1=>2.901)\n\n0it [00:00, ?it/s]\n\n0it [00:09, ?it/s]\n\n0it [00:00, ?it/s]\niter: 30, loss: 2.89, losses: 0.94, 0.0885, 0.878, 0.0634, 0.861, 0.0632 (-0=>2.894)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 40, loss: 2.89, losses: 0.938, 0.09, 0.878, 0.064, 0.856, 0.0629 (-1=>2.884)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 50, loss: 2.87, losses: 0.928, 0.0929, 0.872, 0.0645, 0.85, 0.0624 (-0=>2.87)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 60, loss: 2.85, losses: 0.926, 0.0901, 0.863, 0.064, 0.841, 0.0624 (-0=>2.847)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 70, loss: 2.82, losses: 0.921, 0.0893, 0.854, 0.0648, 0.831, 0.0618 (-0=>2.821)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 80, loss: 2.76, losses: 0.903, 0.089, 0.836, 0.0658, 0.808, 0.0623 (-0=>2.765)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 90, loss: 2.75, losses: 0.899, 0.0893, 0.829, 0.0663, 0.803, 0.0649 (-2=>2.741)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 100, loss: 2.72, losses: 0.891, 0.0893, 0.816, 0.0684, 0.788, 0.0658 (-0=>2.719)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 110, loss: 2.72, losses: 0.891, 0.0902, 0.812, 0.0685, 0.788, 0.0661 (-4=>2.71)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 120, loss: 2.71, losses: 0.89, 0.0892, 0.814, 0.0677, 0.785, 0.0659 (-6=>2.7)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 130, loss: 2.72, losses: 0.891, 0.0899, 0.819, 0.0678, 0.789, 0.0658 (-7=>2.698)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 140, loss: 2.71, losses: 0.885, 0.0912, 0.812, 0.0688, 0.784, 0.0656 (-17=>2.698)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 150, loss: 2.71, losses: 0.887, 0.0903, 0.813, 0.0681, 0.785, 0.0658 (-1=>2.692)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 160, loss: 2.71, losses: 0.89, 0.0894, 0.815, 0.0669, 0.787, 0.0656 (-11=>2.692)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 170, loss: 2.7, losses: 0.884, 0.0897, 0.81, 0.0683, 0.784, 0.0661 (-4=>2.689)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 180, loss: 2.71, losses: 0.886, 0.09, 0.815, 0.0682, 0.782, 0.0658 (-14=>2.689)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 190, loss: 2.7, losses: 0.885, 0.0898, 0.814, 0.0677, 0.782, 0.0662 (-24=>2.689)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 200, loss: 2.71, losses: 0.887, 0.0897, 0.815, 0.068, 0.785, 0.0661 (-34=>2.689)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 210, loss: 2.71, losses: 0.884, 0.0899, 0.814, 0.0687, 0.784, 0.0664 (-44=>2.689)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 220, loss: 2.71, losses: 0.886, 0.0895, 0.815, 0.0678, 0.781, 0.0662 (-54=>2.689)\n\n0it [00:00, ?it/s]\nDropping learning rate\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 230, loss: 2.7, losses: 0.885, 0.0884, 0.816, 0.0674, 0.782, 0.0659 (-2=>2.7)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 240, loss: 2.71, losses: 0.886, 0.0911, 0.817, 0.069, 0.785, 0.0664 (-3=>2.696)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 250, loss: 2.71, losses: 0.884, 0.0911, 0.818, 0.0684, 0.787, 0.0662 (-13=>2.696)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 260, loss: 2.73, losses: 0.888, 0.0895, 0.824, 0.0676, 0.791, 0.0656 (-23=>2.696)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 270, loss: 2.7, losses: 0.883, 0.0916, 0.812, 0.0688, 0.781, 0.0666 (-33=>2.696)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 280, loss: 2.73, losses: 0.892, 0.0889, 0.82, 0.0673, 0.792, 0.0653 (-43=>2.696)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 290, loss: 2.72, losses: 0.891, 0.0888, 0.819, 0.0675, 0.79, 0.0655 (-53=>2.696)\n\n0it [00:00, ?it/s]\n\n0it [00:10, ?it/s]\n\n0it [00:00, ?it/s]\niter: 300, finished (-63=>2.696)\n\n0it [00:00, ?it/s]\n\n0it [00:00, ?it/s]",
"metrics": {
"predict_time": 326.76229,
"total_time": 783.657073
},
"output": [
{
"file": "https://replicate.delivery/mgxm/6befcecd-1767-4580-9f4a-7f75634baccb/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/8c9444d6-2c46-4df0-a778-47b2a0c7a450/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/ca2b1cdc-070a-457a-bcbc-b2ac4e5ed037/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/9faa48d1-3e6a-4a6f-ab1b-62d89eaaacb9/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/7d2a1f60-3b83-45d6-92d7-8de844871151/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/3a13c12b-d36b-4155-8b8b-fb656ca5d2d9/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/212e805e-df53-4792-aad1-661210593c73/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/d10210d1-cf8e-4645-b088-31c811228910/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/a302cb5a-c1af-4b85-aba8-8582cc97ac46/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/962a182a-6347-4af2-a7bc-4abc4f6ca217/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/eee8ab02-1e4c-4a5d-8293-7f859bf93500/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/e08d7dd7-1d38-48fa-9f8b-6d9fa1db9ade/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/3578fd13-53d0-430c-8eb9-972314bc9b91/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/d47bf5bb-f8d2-4c63-a5b7-65f967824e92/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/ae89f627-3dd8-4c8d-b25d-a73a7f5c5d6d/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/f92d704d-7949-4537-b1c1-828435a67738/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/0d7989e2-eda9-40d6-b312-0370c1b0a54e/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/e7ad16fd-9410-4f11-8196-53d5c93a2f8e/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/8efdf5d2-abbf-43c3-8acf-586d37518dec/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/e3a57beb-aafd-4503-9182-d36c19244cfe/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/37f2c254-8726-4493-a52e-d787c165099a/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/7ff85bca-8791-4332-bfc2-22f2c930d319/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/70ddfe5b-f0ff-45d9-ae7f-752ca62387e7/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/d28bc34b-ae3a-482e-896d-1b6fea039162/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/a966a3a9-5ae7-4978-bd84-9164b37b2e10/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/5169317f-b645-484d-8004-7c13aec9a68b/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/18ccb055-45ee-41c2-a7a4-6054d7dab463/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/229e8064-ca3f-4a53-bb6c-c6bcb9583806/tempfile.png"
},
{
"file": "https://replicate.delivery/mgxm/6f7fe288-d550-4016-a9ae-c06ee1051d49/tempfile.png"
}
],
"started_at": "2022-01-23T04:06:22.381983Z",
"status": "succeeded",
"urls": {
"get": "https://api.replicate.com/v1/predictions/s4pasue4lvbldopl4eolqyoiee",
"cancel": "https://api.replicate.com/v1/predictions/s4pasue4lvbldopl4eolqyoiee/cancel"
},
"version": "3cd30ff3fbbe99c4bee36976b6d275233f635a3d980b708c404f82aea85f5092"
}
---> BasePixrayPredictor Predict
Using seed:
6555919756984463465
Loaded CLIP RN50: 102.01M params
Loaded CLIP ViT-B/32: 151.28M params
Loaded CLIP ViT-B/16: 149.62M params
Using device:
cuda:0
Optimising using:
Adam
Using text prompts:
['Using artists as an example, when anyone can create amazing art, there will be incredible upside for humanity, but downside for most individual artists. (On the other hand, totally new kinds of art will be possible, and the skill that will matter will be imagination.)']
0it [00:00, ?it/s]
/root/.pyenv/versions/3.8.12/lib/python3.8/site-packages/torch/nn/functional.py:3609: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.
warnings.warn(
iter: 0, loss: 2.98, losses: 0.969, 0.0885, 0.909, 0.0641, 0.887, 0.0655 (-0=>2.984)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 10, loss: 2.91, losses: 0.949, 0.0894, 0.883, 0.0632, 0.864, 0.0634 (-0=>2.912)
0it [00:00, ?it/s]
0it [00:09, ?it/s]
0it [00:00, ?it/s]
iter: 20, loss: 2.91, losses: 0.944, 0.0899, 0.882, 0.0637, 0.865, 0.0627 (-1=>2.901)
0it [00:00, ?it/s]
0it [00:09, ?it/s]
0it [00:00, ?it/s]
iter: 30, loss: 2.89, losses: 0.94, 0.0885, 0.878, 0.0634, 0.861, 0.0632 (-0=>2.894)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 40, loss: 2.89, losses: 0.938, 0.09, 0.878, 0.064, 0.856, 0.0629 (-1=>2.884)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 50, loss: 2.87, losses: 0.928, 0.0929, 0.872, 0.0645, 0.85, 0.0624 (-0=>2.87)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 60, loss: 2.85, losses: 0.926, 0.0901, 0.863, 0.064, 0.841, 0.0624 (-0=>2.847)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 70, loss: 2.82, losses: 0.921, 0.0893, 0.854, 0.0648, 0.831, 0.0618 (-0=>2.821)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 80, loss: 2.76, losses: 0.903, 0.089, 0.836, 0.0658, 0.808, 0.0623 (-0=>2.765)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 90, loss: 2.75, losses: 0.899, 0.0893, 0.829, 0.0663, 0.803, 0.0649 (-2=>2.741)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 100, loss: 2.72, losses: 0.891, 0.0893, 0.816, 0.0684, 0.788, 0.0658 (-0=>2.719)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 110, loss: 2.72, losses: 0.891, 0.0902, 0.812, 0.0685, 0.788, 0.0661 (-4=>2.71)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 120, loss: 2.71, losses: 0.89, 0.0892, 0.814, 0.0677, 0.785, 0.0659 (-6=>2.7)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 130, loss: 2.72, losses: 0.891, 0.0899, 0.819, 0.0678, 0.789, 0.0658 (-7=>2.698)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 140, loss: 2.71, losses: 0.885, 0.0912, 0.812, 0.0688, 0.784, 0.0656 (-17=>2.698)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 150, loss: 2.71, losses: 0.887, 0.0903, 0.813, 0.0681, 0.785, 0.0658 (-1=>2.692)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 160, loss: 2.71, losses: 0.89, 0.0894, 0.815, 0.0669, 0.787, 0.0656 (-11=>2.692)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 170, loss: 2.7, losses: 0.884, 0.0897, 0.81, 0.0683, 0.784, 0.0661 (-4=>2.689)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 180, loss: 2.71, losses: 0.886, 0.09, 0.815, 0.0682, 0.782, 0.0658 (-14=>2.689)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 190, loss: 2.7, losses: 0.885, 0.0898, 0.814, 0.0677, 0.782, 0.0662 (-24=>2.689)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 200, loss: 2.71, losses: 0.887, 0.0897, 0.815, 0.068, 0.785, 0.0661 (-34=>2.689)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 210, loss: 2.71, losses: 0.884, 0.0899, 0.814, 0.0687, 0.784, 0.0664 (-44=>2.689)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 220, loss: 2.71, losses: 0.886, 0.0895, 0.815, 0.0678, 0.781, 0.0662 (-54=>2.689)
0it [00:00, ?it/s]
Dropping learning rate
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 230, loss: 2.7, losses: 0.885, 0.0884, 0.816, 0.0674, 0.782, 0.0659 (-2=>2.7)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 240, loss: 2.71, losses: 0.886, 0.0911, 0.817, 0.069, 0.785, 0.0664 (-3=>2.696)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 250, loss: 2.71, losses: 0.884, 0.0911, 0.818, 0.0684, 0.787, 0.0662 (-13=>2.696)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 260, loss: 2.73, losses: 0.888, 0.0895, 0.824, 0.0676, 0.791, 0.0656 (-23=>2.696)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 270, loss: 2.7, losses: 0.883, 0.0916, 0.812, 0.0688, 0.781, 0.0666 (-33=>2.696)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 280, loss: 2.73, losses: 0.892, 0.0889, 0.82, 0.0673, 0.792, 0.0653 (-43=>2.696)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 290, loss: 2.72, losses: 0.891, 0.0888, 0.819, 0.0675, 0.79, 0.0655 (-53=>2.696)
0it [00:00, ?it/s]
0it [00:10, ?it/s]
0it [00:00, ?it/s]
iter: 300, finished (-63=>2.696)
0it [00:00, ?it/s]
0it [00:00, ?it/s]
This example was created by a different version, pixray/text2image:3cd30ff3.