{
"max_new_tokens": 100,
"temperature": 0.2,
"text": "import torch.nn as nn"
}npm install replicate
REPLICATE_API_TOKEN environment variable:export REPLICATE_API_TOKEN=r8_Lye**********************************
This is your API token. Keep it to yourself.
import Replicate from "replicate";
const replicate = new Replicate({
auth: process.env.REPLICATE_API_TOKEN,
});
Run dsingal0/stablecode-completion-alpha-3b-4k using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run(
"dsingal0/stablecode-completion-alpha-3b-4k:fbecedaf4d824f6de70b2075ba8a35ee306f37b37a335cc839ce75a9e44d0039",
{
input: {
max_new_tokens: 100,
temperature: 0.2,
text: "import torch.nn as nn"
}
}
);
console.log(output);
To learn more, take a look at the guide on getting started with Node.js.
pip install replicate
REPLICATE_API_TOKEN environment variable:export REPLICATE_API_TOKEN=r8_Lye**********************************
This is your API token. Keep it to yourself.
import replicate
Run dsingal0/stablecode-completion-alpha-3b-4k using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run(
"dsingal0/stablecode-completion-alpha-3b-4k:fbecedaf4d824f6de70b2075ba8a35ee306f37b37a335cc839ce75a9e44d0039",
input={
"max_new_tokens": 100,
"temperature": 0.2,
"text": "import torch.nn as nn"
}
)
print(output)
To learn more, take a look at the guide on getting started with Python.
REPLICATE_API_TOKEN environment variable:export REPLICATE_API_TOKEN=r8_Lye**********************************
This is your API token. Keep it to yourself.
Run dsingal0/stablecode-completion-alpha-3b-4k using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \
-H "Authorization: Bearer $REPLICATE_API_TOKEN" \
-H "Content-Type: application/json" \
-H "Prefer: wait" \
-d $'{
"version": "dsingal0/stablecode-completion-alpha-3b-4k:fbecedaf4d824f6de70b2075ba8a35ee306f37b37a335cc839ce75a9e44d0039",
"input": {
"max_new_tokens": 100,
"temperature": 0.2,
"text": "import torch.nn as nn"
}
}' \
https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
{
"id": "hdp4573bydxqvxh2hu7ieqmwh4",
"model": "dsingal0/stablecode-completion-alpha-3b-4k",
"version": "fbecedaf4d824f6de70b2075ba8a35ee306f37b37a335cc839ce75a9e44d0039",
"input": {
"max_new_tokens": 100,
"temperature": 0.2,
"text": "import torch.nn as nn"
},
"logs": "Setting `pad_token_id` to `eos_token_id`:0 for open-end generation.\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nfrom torch.autograd import Variable\nimport torch\nimport numpy as np\nimport math\nfrom torch.utils.data import DataLoader\nfrom torchvision import datasets, transforms\nfrom torchvision.utils import save_image\nfrom torchvision.utils import make_grid\nfrom torchvision.utils import make_grid\nfrom torchvision.utils import save_image\nfrom torchvision.utils import make_grid\nfrom torchvision.",
"output": "import torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nfrom torch.autograd import Variable\nimport torch\nimport numpy as np\nimport math\nfrom torch.utils.data import DataLoader\nfrom torchvision import datasets, transforms\nfrom torchvision.utils import save_image\nfrom torchvision.utils import make_grid\nfrom torchvision.utils import make_grid\nfrom torchvision.utils import save_image\nfrom torchvision.utils import make_grid\nfrom torchvision.",
"data_removed": false,
"error": null,
"source": "web",
"status": "succeeded",
"created_at": "2023-08-13T01:11:20.904913Z",
"started_at": "2023-08-13T01:13:05.632033Z",
"completed_at": "2023-08-13T01:13:09.628665Z",
"urls": {
"cancel": "https://api.replicate.com/v1/predictions/hdp4573bydxqvxh2hu7ieqmwh4/cancel",
"get": "https://api.replicate.com/v1/predictions/hdp4573bydxqvxh2hu7ieqmwh4"
},
"metrics": {
"predict_time": 3.996632,
"total_time": 108.723752
}
}