Path to JSONL file with text data. Each line should be a JSON object with a 'text' field.
Batch size to use when processing text data.
Default: 32
Whether to normalize embeddings.
Default: true
Run this model in Node.js with one line of code:
npm install replicate
REPLICATE_API_TOKEN
export REPLICATE_API_TOKEN=<paste-your-token-here>
Find your API token in your account settings.
import Replicate from "replicate"; const replicate = new Replicate({ auth: process.env.REPLICATE_API_TOKEN, });
Run nateraw/bge-large-en-batched using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run( "nateraw/bge-large-en-batched:2d008f128c3dad6834ec4323a96766211754f40602e669ad7fdfd6468644c70a", { input: { path: "https://replicate.delivery/pbxt/JdHX0Er26JMM85ryJIH71JG5WVTeehTTipBqsmV2f5XdaS4V/samsum.txt", batch_size: 128, normalize_embeddings: true } } ); // To access the file URL: console.log(output.url()); //=> "http://example.com" // To write the file to disk: fs.writeFile("my-image.png", output);
To learn more, take a look at the guide on getting started with Node.js.
pip install replicate
import replicate
output = replicate.run( "nateraw/bge-large-en-batched:2d008f128c3dad6834ec4323a96766211754f40602e669ad7fdfd6468644c70a", input={ "path": "https://replicate.delivery/pbxt/JdHX0Er26JMM85ryJIH71JG5WVTeehTTipBqsmV2f5XdaS4V/samsum.txt", "batch_size": 128, "normalize_embeddings": True } ) print(output)
To learn more, take a look at the guide on getting started with Python.
curl -s -X POST \ -H "Authorization: Bearer $REPLICATE_API_TOKEN" \ -H "Content-Type: application/json" \ -H "Prefer: wait" \ -d $'{ "version": "nateraw/bge-large-en-batched:2d008f128c3dad6834ec4323a96766211754f40602e669ad7fdfd6468644c70a", "input": { "path": "https://replicate.delivery/pbxt/JdHX0Er26JMM85ryJIH71JG5WVTeehTTipBqsmV2f5XdaS4V/samsum.txt", "batch_size": 128, "normalize_embeddings": true } }' \ https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
{ "completed_at": "2023-10-03T02:15:41.603267Z", "created_at": "2023-10-03T02:11:30.385163Z", "data_removed": false, "error": null, "id": "43yfqm3bmuiqifdh2cubnkc6fy", "input": { "path": "https://replicate.delivery/pbxt/JdHX0Er26JMM85ryJIH71JG5WVTeehTTipBqsmV2f5XdaS4V/samsum.txt", "batch_size": 128, "normalize_embeddings": true }, "logs": "Downloading data files: 0%| | 0/1 [00:00<?, ?it/s]\nDownloading data files: 100%|██████████| 1/1 [00:00<00:00, 7796.10it/s]\nExtracting data files: 0%| | 0/1 [00:00<?, ?it/s]\nExtracting data files: 100%|██████████| 1/1 [00:00<00:00, 956.95it/s]\nGenerating train split: 0 examples [00:00, ? examples/s]\nGenerating train split: 14732 examples [00:00, 447452.36 examples/s]\nMap: 0%| | 0/14732 [00:00<?, ? examples/s]\nMap: 7%|▋ | 1000/14732 [00:12<02:54, 78.52 examples/s]\nMap: 14%|█▎ | 2000/14732 [00:23<02:30, 84.88 examples/s]\nMap: 20%|██ | 3000/14732 [00:34<02:13, 88.12 examples/s]\nMap: 27%|██▋ | 4000/14732 [00:46<02:01, 88.16 examples/s]\nMap: 34%|███▍ | 5000/14732 [00:57<01:50, 88.05 examples/s]\nMap: 34%|███▍ | 5000/14732 [01:07<01:50, 88.05 examples/s]\nMap: 41%|████ | 6000/14732 [01:08<01:37, 89.76 examples/s]\nMap: 48%|████▊ | 7000/14732 [01:19<01:27, 88.67 examples/s]\nMap: 54%|█████▍ | 8000/14732 [01:30<01:15, 89.13 examples/s]\nMap: 54%|█████▍ | 8000/14732 [01:41<01:15, 89.13 examples/s]\nMap: 61%|██████ | 9000/14732 [01:42<01:04, 88.81 examples/s]\nMap: 68%|██████▊ | 10000/14732 [01:53<00:53, 88.45 examples/s]\nMap: 75%|███████▍ | 11000/14732 [02:04<00:42, 88.79 examples/s]\nMap: 81%|████████▏ | 12000/14732 [02:15<00:30, 88.88 examples/s]\nMap: 88%|████████▊ | 13000/14732 [02:26<00:19, 89.31 examples/s]\nMap: 88%|████████▊ | 13000/14732 [02:37<00:19, 89.31 examples/s]\nMap: 95%|█████████▌| 14000/14732 [02:38<00:08, 89.62 examples/s]\nMap: 100%|██████████| 14732/14732 [02:46<00:00, 88.49 examples/s]\nMap: 100%|██████████| 14732/14732 [02:46<00:00, 88.39 examples/s]", "metrics": { "predict_time": 172.671675, "total_time": 251.218104 }, "output": "https://pbxt.replicate.delivery/feWKGHryXxng70oUvxGekSYJCj11wd1ZJFCCQrL79qfu0KpGB/embeddings.npy", "started_at": "2023-10-03T02:12:48.931592Z", "status": "succeeded", "urls": { "get": "https://api.replicate.com/v1/predictions/43yfqm3bmuiqifdh2cubnkc6fy", "cancel": "https://api.replicate.com/v1/predictions/43yfqm3bmuiqifdh2cubnkc6fy/cancel" }, "version": "2d008f128c3dad6834ec4323a96766211754f40602e669ad7fdfd6468644c70a" }
Downloading data files: 0%| | 0/1 [00:00<?, ?it/s] Downloading data files: 100%|██████████| 1/1 [00:00<00:00, 7796.10it/s] Extracting data files: 0%| | 0/1 [00:00<?, ?it/s] Extracting data files: 100%|██████████| 1/1 [00:00<00:00, 956.95it/s] Generating train split: 0 examples [00:00, ? examples/s] Generating train split: 14732 examples [00:00, 447452.36 examples/s] Map: 0%| | 0/14732 [00:00<?, ? examples/s] Map: 7%|▋ | 1000/14732 [00:12<02:54, 78.52 examples/s] Map: 14%|█▎ | 2000/14732 [00:23<02:30, 84.88 examples/s] Map: 20%|██ | 3000/14732 [00:34<02:13, 88.12 examples/s] Map: 27%|██▋ | 4000/14732 [00:46<02:01, 88.16 examples/s] Map: 34%|███▍ | 5000/14732 [00:57<01:50, 88.05 examples/s] Map: 34%|███▍ | 5000/14732 [01:07<01:50, 88.05 examples/s] Map: 41%|████ | 6000/14732 [01:08<01:37, 89.76 examples/s] Map: 48%|████▊ | 7000/14732 [01:19<01:27, 88.67 examples/s] Map: 54%|█████▍ | 8000/14732 [01:30<01:15, 89.13 examples/s] Map: 54%|█████▍ | 8000/14732 [01:41<01:15, 89.13 examples/s] Map: 61%|██████ | 9000/14732 [01:42<01:04, 88.81 examples/s] Map: 68%|██████▊ | 10000/14732 [01:53<00:53, 88.45 examples/s] Map: 75%|███████▍ | 11000/14732 [02:04<00:42, 88.79 examples/s] Map: 81%|████████▏ | 12000/14732 [02:15<00:30, 88.88 examples/s] Map: 88%|████████▊ | 13000/14732 [02:26<00:19, 89.31 examples/s] Map: 88%|████████▊ | 13000/14732 [02:37<00:19, 89.31 examples/s] Map: 95%|█████████▌| 14000/14732 [02:38<00:08, 89.62 examples/s] Map: 100%|██████████| 14732/14732 [02:46<00:00, 88.49 examples/s] Map: 100%|██████████| 14732/14732 [02:46<00:00, 88.39 examples/s]
This model runs on Nvidia L40S GPU hardware. We don't yet have enough runs of this model to provide performance information.
This model doesn't have a readme.
This model is cold. You'll get a fast response if the model is warm and already running, and a slower response if the model is cold and starting up.
Choose a file from your machine
Hint: you can also drag files onto the input