You're looking at a specific version of this model. Jump to the model overview.
lucataco /ollama-llama3.2-vision-11b:d4e81fc1
Input
Run this model in Node.js with one line of code:
npm install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import Replicate from "replicate";
const replicate = new Replicate({
auth: process.env.REPLICATE_API_TOKEN,
});
Run lucataco/ollama-llama3.2-vision-11b using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run(
"lucataco/ollama-llama3.2-vision-11b:d4e81fc1472556464f1ee5cea4de177b2fe95a6eaadb5f63335df1ba654597af",
{
input: {
image: "https://replicate.delivery/pbxt/M9qceG4RL6Ov5dpJwPv76d6L1UoNtMAfoGq9OgN6dkp4IJJf/ai2d_test.jpg",
top_p: 0.95,
prompt: "What happens when you take out white cat from this chain?",
max_tokens: 512,
temperature: 0.7
}
}
);
console.log(output);
To learn more, take a look at the guide on getting started with Node.js.
pip install replicate
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import replicate
Run lucataco/ollama-llama3.2-vision-11b using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run(
"lucataco/ollama-llama3.2-vision-11b:d4e81fc1472556464f1ee5cea4de177b2fe95a6eaadb5f63335df1ba654597af",
input={
"image": "https://replicate.delivery/pbxt/M9qceG4RL6Ov5dpJwPv76d6L1UoNtMAfoGq9OgN6dkp4IJJf/ai2d_test.jpg",
"top_p": 0.95,
"prompt": "What happens when you take out white cat from this chain?",
"max_tokens": 512,
"temperature": 0.7
}
)
# The lucataco/ollama-llama3.2-vision-11b model can stream output as it's running.
# The predict method returns an iterator, and you can iterate over that output.
for item in output:
# https://replicate.com/lucataco/ollama-llama3.2-vision-11b/api#output-schema
print(item, end="")
To learn more, take a look at the guide on getting started with Python.
REPLICATE_API_TOKEN
environment variable:export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
Run lucataco/ollama-llama3.2-vision-11b using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \
-H "Authorization: Bearer $REPLICATE_API_TOKEN" \
-H "Content-Type: application/json" \
-H "Prefer: wait" \
-d $'{
"version": "lucataco/ollama-llama3.2-vision-11b:d4e81fc1472556464f1ee5cea4de177b2fe95a6eaadb5f63335df1ba654597af",
"input": {
"image": "https://replicate.delivery/pbxt/M9qceG4RL6Ov5dpJwPv76d6L1UoNtMAfoGq9OgN6dkp4IJJf/ai2d_test.jpg",
"top_p": 0.95,
"prompt": "What happens when you take out white cat from this chain?",
"max_tokens": 512,
"temperature": 0.7
}
}' \
https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
brew install cog
If you don’t have Homebrew, there are other installation options available.
Run this to download the model and run it in your local environment:
cog predict r8.im/lucataco/ollama-llama3.2-vision-11b@sha256:d4e81fc1472556464f1ee5cea4de177b2fe95a6eaadb5f63335df1ba654597af \
-i 'image="https://replicate.delivery/pbxt/M9qceG4RL6Ov5dpJwPv76d6L1UoNtMAfoGq9OgN6dkp4IJJf/ai2d_test.jpg"' \
-i 'top_p=0.95' \
-i 'prompt="What happens when you take out white cat from this chain?"' \
-i 'max_tokens=512' \
-i 'temperature=0.7'
To learn more, take a look at the Cog documentation.
Run this to download the model and run it in your local environment:
docker run -d -p 5000:5000 --gpus=all r8.im/lucataco/ollama-llama3.2-vision-11b@sha256:d4e81fc1472556464f1ee5cea4de177b2fe95a6eaadb5f63335df1ba654597af
curl -s -X POST \ -H "Content-Type: application/json" \ -d $'{ "input": { "image": "https://replicate.delivery/pbxt/M9qceG4RL6Ov5dpJwPv76d6L1UoNtMAfoGq9OgN6dkp4IJJf/ai2d_test.jpg", "top_p": 0.95, "prompt": "What happens when you take out white cat from this chain?", "max_tokens": 512, "temperature": 0.7 } }' \ http://localhost:5000/predictions
To learn more, take a look at the Cog documentation.
Add a payment method to run this model.
Each run costs approximately $0.0067. Alternatively, try out our featured models for free.
By signing in, you agree to our
terms of service and privacy policy
Output
{
"completed_at": "2024-12-17T04:14:06.219985Z",
"created_at": "2024-12-17T04:14:03.519000Z",
"data_removed": false,
"error": null,
"id": "sfnwfw5gzxrm80cktb9bwq1kt4",
"input": {
"image": "https://replicate.delivery/pbxt/M9qceG4RL6Ov5dpJwPv76d6L1UoNtMAfoGq9OgN6dkp4IJJf/ai2d_test.jpg",
"top_p": 0.95,
"prompt": "What happens when you take out white cat from this chain?",
"max_tokens": 512,
"temperature": 0.7
},
"logs": null,
"metrics": {
"predict_time": 2.695380626,
"total_time": 2.700985
},
"output": [
"The",
" white",
" cat",
" is",
" not",
" present",
" in",
" the",
" food",
" web",
".",
" The",
" correct",
" term",
" for",
" a",
" wild",
" animal",
" would",
" be",
" \"",
"wild",
"cat",
".\"",
" \n\n",
"If",
" we",
" were",
" to",
" remove",
" a",
" wild",
"cat",
" from",
" the",
" ecosystem",
",",
" it",
" would",
" no",
" longer",
" be",
" able",
" to",
" hunt",
" or",
" prey",
" on",
" other",
" animals",
".",
" This",
" could",
" have",
" casc",
"ading",
" effects",
" throughout",
" the",
" entire",
" ecosystem",
".\n\n",
"*",
"Answer",
"*:",
" Wild",
"cat",
" will",
" lose",
" its",
" source",
" of",
" food",
" and",
" nutrients",
""
],
"started_at": "2024-12-17T04:14:03.524605Z",
"status": "succeeded",
"urls": {
"stream": "https://stream-b.svc.ric1.c.replicate.net/v1/streams/hjzy54d45y2mwusy5us6dtpiznccxgza5sn5olufj4kfi7xt2pda",
"get": "https://api.replicate.com/v1/predictions/sfnwfw5gzxrm80cktb9bwq1kt4",
"cancel": "https://api.replicate.com/v1/predictions/sfnwfw5gzxrm80cktb9bwq1kt4/cancel"
},
"version": "d4e81fc1472556464f1ee5cea4de177b2fe95a6eaadb5f63335df1ba654597af"
}