typetext
{
"do_sample": false,
"max_new_tokens": 512,
"messages": "[{\"role\": \"user\", \"content\": \"please create a rust enum named prediction status, with three variants starting, in progress and complete\"}]",
"num_return_sequences": 1
}npm install replicate
REPLICATE_API_TOKEN environment variable:export REPLICATE_API_TOKEN=r8_Doi**********************************
This is your API token. Keep it to yourself.
import Replicate from "replicate";
const replicate = new Replicate({
auth: process.env.REPLICATE_API_TOKEN,
});
Run kcaverly/deepseek-coder-6.7b-instruct using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run(
"kcaverly/deepseek-coder-6.7b-instruct:e86e32a176426ab0bcbc2beaeb94b90334777219815be3aef1e958def2c73c56",
{
input: {
do_sample: false,
max_new_tokens: 512,
messages: "[{\"role\": \"user\", \"content\": \"please create a rust enum named prediction status, with three variants starting, in progress and complete\"}]",
num_return_sequences: 1
}
}
);
console.log(output);
To learn more, take a look at the guide on getting started with Node.js.
pip install replicate
REPLICATE_API_TOKEN environment variable:export REPLICATE_API_TOKEN=r8_Doi**********************************
This is your API token. Keep it to yourself.
import replicate
Run kcaverly/deepseek-coder-6.7b-instruct using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run(
"kcaverly/deepseek-coder-6.7b-instruct:e86e32a176426ab0bcbc2beaeb94b90334777219815be3aef1e958def2c73c56",
input={
"do_sample": False,
"max_new_tokens": 512,
"messages": "[{\"role\": \"user\", \"content\": \"please create a rust enum named prediction status, with three variants starting, in progress and complete\"}]",
"num_return_sequences": 1
}
)
# The kcaverly/deepseek-coder-6.7b-instruct model can stream output as it's running.
# The predict method returns an iterator, and you can iterate over that output.
for item in output:
# https://replicate.com/kcaverly/deepseek-coder-6.7b-instruct/api#output-schema
print(item, end="")
To learn more, take a look at the guide on getting started with Python.
REPLICATE_API_TOKEN environment variable:export REPLICATE_API_TOKEN=r8_Doi**********************************
This is your API token. Keep it to yourself.
Run kcaverly/deepseek-coder-6.7b-instruct using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \
-H "Authorization: Bearer $REPLICATE_API_TOKEN" \
-H "Content-Type: application/json" \
-H "Prefer: wait" \
-d $'{
"version": "kcaverly/deepseek-coder-6.7b-instruct:e86e32a176426ab0bcbc2beaeb94b90334777219815be3aef1e958def2c73c56",
"input": {
"do_sample": false,
"max_new_tokens": 512,
"messages": "[{\\"role\\": \\"user\\", \\"content\\": \\"please create a rust enum named prediction status, with three variants starting, in progress and complete\\"}]",
"num_return_sequences": 1
}
}' \
https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Sure, here is how you can define an enum named `PredictionStatus` with three variants: `Starting`, `InProgress`, and `Complete` in Rust:
```rust
#[derive(Debug)]
enum PredictionStatus {
Starting,
InProgress,
Complete,
}
```
In this code:
- `#[derive(Debug)]` is an attribute that tells Rust to derive certain traits for this enum. In this case, `Debug` is used to enable printing of the enum variants for debugging purposes.
- `enum PredictionStatus` defines a new enum type named `PredictionStatus`.
- `Starting`, `InProgress`, and `Complete` are the variants of the enum.
You can use this enum in your code like this:
```rust
fn main() {
let status = PredictionStatus::Starting;
println!("{:?}", status); // prints: Starting
}
```
In this code, `PredictionStatus::Starting` creates a value of the `PredictionStatus` enum with the variant `Starting`. The `println!` macro with the `{:?}` format string is used to print the enum value, which will print the variant name.
{
"id": "fjugwu3b7gvoanx577qjyd6jfa",
"model": "kcaverly/deepseek-coder-6.7b-instruct",
"version": "e86e32a176426ab0bcbc2beaeb94b90334777219815be3aef1e958def2c73c56",
"input": {
"do_sample": false,
"max_new_tokens": 512,
"messages": "[{\"role\": \"user\", \"content\": \"please create a rust enum named prediction status, with three variants starting, in progress and complete\"}]",
"num_return_sequences": 1
},
"logs": "/root/.pyenv/versions/3.11.7/lib/python3.11/site-packages/transformers/generation/configuration_utils.py:386: UserWarning: `do_sample` is set to `False`. However, `top_p` is set to `None` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `top_p`.\nwarnings.warn(\n/root/.pyenv/versions/3.11.7/lib/python3.11/site-packages/transformers/generation/configuration_utils.py:396: UserWarning: `do_sample` is set to `False`. However, `top_k` is set to `None` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `top_k`.\nwarnings.warn(\nThe attention mask and the pad token id were not set. As a consequence, you may observe unexpected behavior. Please pass your input's `attention_mask` to obtain reliable results.\nSetting `pad_token_id` to `eos_token_id`:32021 for open-end generation.\nSure,\nhere\nis\nhow\nyou\ncan\ndefine\nan\nenum\nnamed\n`PredictionStatus`\nwith\nthree\nvariants:\n`Starting`,\n`InProgress`,\nand\n`Complete`\nin\nRust:\n```rust\n#[derive(Debug)]\nenum\nPredictionStatus\n{\nStarting,\nInProgress,\nComplete,\n}\n```\nIn\nthis\ncode:\n-\n`#[derive(Debug)]`\nis\nan\nattribute\nthat\ntells\nRust\nto\nderive\ncertain\ntraits\nfor\nthis\nenum.\nIn\nthis\ncase,\n`Debug`\nis\nused\nto\nenable\nprinting\nof\nthe\nenum\nvariants\nfor\ndebugging\npurposes.\n-\n`enum\nPredictionStatus`\ndefines\na\nnew\nenum\ntype\nnamed\n`PredictionStatus`.\n-\n`Starting`,\n`InProgress`,\nand\n`Complete`\nare\nthe\nvariants\nof\nthe\nenum.\nYou\ncan\nuse\nthis\nenum\nin\nyour\ncode\nlike\nthis:\n```rust\nfn\nmain()\n{\nlet\nstatus\n=\nPredictionStatus::Starting;\nprintln!(\"{:?}\",\nstatus);\n//\nprints:\nStarting\n}\n```\nIn\nthis\ncode,\n`PredictionStatus::Starting`\ncreates\na\nvalue\nof\nthe\n`PredictionStatus`\nenum\nwith\nthe\nvariant\n`Starting`.\nThe\n`println!`\nmacro\nwith\nthe\n`{:?}`\nformat\nstring\nis\nused\nto\nprint\nthe\nenum\nvalue,\nwhich\nwill\nprint\nthe\nvariant\nname.\n<|EOT|>",
"output": [
"",
"",
"Sure, ",
"here ",
"is ",
"how ",
"you ",
"can ",
"define ",
"an ",
"enum ",
"named ",
"",
"",
"",
"",
"`PredictionStatus` ",
"with ",
"three ",
"",
"variants: ",
"",
"",
"",
"",
"`Starting`, ",
"",
"",
"",
"",
"`InProgress`, ",
"and ",
"",
"",
"`Complete` ",
"in ",
"",
"",
"Rust:\n",
"\n",
"",
"",
"```rust\n",
"",
"",
"",
"",
"",
"",
"",
"#[derive(Debug)]\n",
"",
"enum ",
"",
"",
"PredictionStatus ",
"{\n",
" ",
" ",
"",
"Starting,\n",
" ",
" ",
"",
"",
"InProgress,\n",
" ",
" ",
"",
"Complete,\n",
"",
"}\n",
"",
"```\n",
"\n",
"",
"In ",
"this ",
"",
"code:\n",
"\n",
"",
"- ",
"",
"",
"",
"",
"",
"",
"",
"",
"`#[derive(Debug)]` ",
"is ",
"an ",
"attribute ",
"that ",
"tells ",
"",
"Rust ",
"to ",
"derive ",
"certain ",
"traits ",
"for ",
"this ",
"",
"enum. ",
"In ",
"this ",
"",
"case, ",
"",
"",
"`Debug` ",
"is ",
"used ",
"to ",
"enable ",
"printing ",
"of ",
"the ",
"enum ",
"variants ",
"for ",
"",
"debugging ",
"",
"purposes.\n",
"",
"- ",
"",
"`enum ",
"",
"",
"",
"PredictionStatus` ",
"defines ",
"a ",
"new ",
"enum ",
"type ",
"named ",
"",
"",
"",
"",
"",
"`PredictionStatus`.\n",
"",
"- ",
"",
"",
"",
"",
"`Starting`, ",
"",
"",
"",
"",
"`InProgress`, ",
"and ",
"",
"",
"`Complete` ",
"are ",
"the ",
"variants ",
"of ",
"the ",
"",
"enum.\n",
"\n",
"",
"You ",
"can ",
"use ",
"this ",
"enum ",
"in ",
"your ",
"code ",
"like ",
"",
"this:\n",
"\n",
"",
"",
"```rust\n",
"",
"fn ",
"",
"main() ",
"{\n",
" ",
" ",
"let ",
"status ",
"= ",
"",
"",
"",
"",
"",
"",
"PredictionStatus::Starting;\n",
" ",
" ",
"",
"",
"",
"",
"",
"",
"",
"",
"println!(\"{:?}\", ",
"",
"status); ",
" ",
"// ",
"",
"prints: ",
"Starting\n",
"",
"}\n",
"",
"```\n",
"\n",
"",
"In ",
"this ",
"",
"code, ",
"",
"",
"",
"",
"",
"",
"",
"`PredictionStatus::Starting` ",
"creates ",
"a ",
"value ",
"of ",
"the ",
"",
"",
"",
"",
"`PredictionStatus` ",
"enum ",
"with ",
"the ",
"variant ",
"",
"",
"",
"",
"`Starting`. ",
"The ",
"",
"",
"",
"`println!` ",
"macro ",
"with ",
"the ",
"",
"",
"",
"",
"",
"`{:?}` ",
"format ",
"string ",
"is ",
"used ",
"to ",
"print ",
"the ",
"enum ",
"",
"value, ",
"which ",
"will ",
"print ",
"the ",
"variant ",
"",
"name.\n",
""
],
"data_removed": false,
"error": null,
"source": "web",
"status": "succeeded",
"created_at": "2023-12-07T16:33:36.05121Z",
"started_at": "2023-12-07T16:37:11.585287Z",
"completed_at": "2023-12-07T16:37:26.569813Z",
"urls": {
"cancel": "https://api.replicate.com/v1/predictions/fjugwu3b7gvoanx577qjyd6jfa/cancel",
"get": "https://api.replicate.com/v1/predictions/fjugwu3b7gvoanx577qjyd6jfa",
"stream": "https://streaming-api.svc.us.c.replicate.net/v1/predictions/fjugwu3b7gvoanx577qjyd6jfa"
},
"metrics": {
"predict_time": 14.984526,
"total_time": 230.518603
}
}