defaultdef print_hello_world():
typetext
{
"max_new_tokens": 100,
"prompt": "def print_hello_world():",
"temperature": 0.01,
"top_k": -1,
"top_p": 0.95
}npm install replicate
REPLICATE_API_TOKEN environment variable:export REPLICATE_API_TOKEN=r8_3hz**********************************
This is your API token. Keep it to yourself.
import Replicate from "replicate";
const replicate = new Replicate({
auth: process.env.REPLICATE_API_TOKEN,
});
Run cjwbw/starcoder2-15b using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run(
"cjwbw/starcoder2-15b:d67b7d32b63bb8a2cf6b95c523921408e38ce7d7228fdff7b1eb636dc2c5ecd8",
{
input: {
max_new_tokens: 100,
prompt: "def print_hello_world():",
temperature: 0.01,
top_k: -1,
top_p: 0.95
}
}
);
console.log(output);
To learn more, take a look at the guide on getting started with Node.js.
pip install replicate
REPLICATE_API_TOKEN environment variable:export REPLICATE_API_TOKEN=r8_3hz**********************************
This is your API token. Keep it to yourself.
import replicate
Run cjwbw/starcoder2-15b using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run(
"cjwbw/starcoder2-15b:d67b7d32b63bb8a2cf6b95c523921408e38ce7d7228fdff7b1eb636dc2c5ecd8",
input={
"max_new_tokens": 100,
"prompt": "def print_hello_world():",
"temperature": 0.01,
"top_k": -1,
"top_p": 0.95
}
)
# The cjwbw/starcoder2-15b model can stream output as it's running.
# The predict method returns an iterator, and you can iterate over that output.
for item in output:
# https://replicate.com/cjwbw/starcoder2-15b/api#output-schema
print(item, end="")
To learn more, take a look at the guide on getting started with Python.
REPLICATE_API_TOKEN environment variable:export REPLICATE_API_TOKEN=r8_3hz**********************************
This is your API token. Keep it to yourself.
Run cjwbw/starcoder2-15b using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \
-H "Authorization: Bearer $REPLICATE_API_TOKEN" \
-H "Content-Type: application/json" \
-H "Prefer: wait" \
-d $'{
"version": "cjwbw/starcoder2-15b:d67b7d32b63bb8a2cf6b95c523921408e38ce7d7228fdff7b1eb636dc2c5ecd8",
"input": {
"max_new_tokens": 100,
"prompt": "def print_hello_world():",
"temperature": 0.01,
"top_k": -1,
"top_p": 0.95
}
}' \
https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
def print_hello_world():
print("Hello World")
print_hello_world()
# +
# 1.1.2.1
# 1.1.2.2
# 1.1.2.3
# 1.1.2.4
# 1.1.2.5
# 1.1.2.6
# 1.1.2.7
# 1.1.2.8
#{
"id": "45gxk6lbioeuqmjignlrrpnvke",
"model": "cjwbw/starcoder2-15b",
"version": "d67b7d32b63bb8a2cf6b95c523921408e38ce7d7228fdff7b1eb636dc2c5ecd8",
"input": {
"max_new_tokens": 100,
"prompt": "def print_hello_world():",
"temperature": 0.01,
"top_k": -1,
"top_p": 0.95
},
"logs": "/root/.pyenv/versions/3.11.7/lib/python3.11/site-packages/transformers/generation/configuration_utils.py:492: UserWarning: `do_sample` is set to `False`. However, `temperature` is set to `0.01` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `temperature`.\nwarnings.warn(\n/root/.pyenv/versions/3.11.7/lib/python3.11/site-packages/transformers/generation/configuration_utils.py:497: UserWarning: `do_sample` is set to `False`. However, `top_p` is set to `0.95` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `top_p`.\nwarnings.warn(\n/root/.pyenv/versions/3.11.7/lib/python3.11/site-packages/transformers/generation/configuration_utils.py:509: UserWarning: `do_sample` is set to `False`. However, `top_k` is set to `-1` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `top_k`.\nwarnings.warn(\nThe attention mask and the pad token id were not set. As a consequence, you may observe unexpected behavior. Please pass your input's `attention_mask` to obtain reliable results.\nSetting `pad_token_id` to `eos_token_id`:50256 for open-end generation.",
"output": [
"def ",
"print_hello_world():\n ",
" ",
"",
"",
"print(\"Hello ",
"",
"World\")\n",
"\n",
"",
"",
"",
"",
"",
"",
"print_hello_world()\n",
"\n",
"",
"# ",
"+\n",
"",
"# ",
"",
"",
"",
"",
"",
"",
"",
"1.1.2.1\n",
"",
"# ",
"",
"",
"",
"",
"",
"",
"",
"1.1.2.2\n",
"",
"# ",
"",
"",
"",
"",
"",
"",
"",
"1.1.2.3\n",
"",
"# ",
"",
"",
"",
"",
"",
"",
"",
"1.1.2.4\n",
"",
"# ",
"",
"",
"",
"",
"",
"",
"",
"1.1.2.5\n",
"",
"# ",
"",
"",
"",
"",
"",
"",
"",
"1.1.2.6\n",
"",
"# ",
"",
"",
"",
"",
"",
"",
"",
"1.1.2.7\n",
"",
"# ",
"",
"",
"",
"",
"",
"",
"",
"1.1.2.8\n",
"",
"#"
],
"data_removed": false,
"error": null,
"source": "web",
"status": "succeeded",
"created_at": "2024-03-20T10:44:06.928414Z",
"started_at": "2024-03-20T10:45:30.890665Z",
"completed_at": "2024-03-20T10:45:37.2003Z",
"urls": {
"cancel": "https://api.replicate.com/v1/predictions/45gxk6lbioeuqmjignlrrpnvke/cancel",
"get": "https://api.replicate.com/v1/predictions/45gxk6lbioeuqmjignlrrpnvke",
"stream": "https://streaming-api.svc.us.c.replicate.net/v1/streams/asnnem5s67ulfvuvjl54se4jwb6fiafieowiwzchnk365bqd7sdq"
},
"metrics": {
"predict_time": 6.309635,
"total_time": 90.271886
}
}