Create
Creates a model response for the given chat conversation.
client.chat.completions.create(CompletionCreateParamsbody, RequestOptionsoptions?): CompletionCreateResponse { id, choices, created, 3 more } | Stream<ChatCompletionChunk { id, choices, created, 3 more } >
/chat/completions
Creates a model response for the given chat conversation.
Parameters
Returns
Creates a model response for the given chat conversation.
import Gradient from '@digitalocean/gradient';
const client = new Gradient({
accessToken: 'My Access Token',
});
const completion = await client.chat.completions.create({
messages: [{ content: 'string', role: 'system' }],
model: 'llama3-8b-instruct',
});
console.log(completion.id);
{
"id": "id",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": {
"content": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0,
"top_logprobs": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0
}
]
}
],
"refusal": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0,
"top_logprobs": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0
}
]
}
]
},
"message": {
"content": "content",
"reasoning_content": "reasoning_content",
"refusal": "refusal",
"role": "assistant",
"tool_calls": [
{
"id": "id",
"function": {
"arguments": "arguments",
"name": "name"
},
"type": "function"
}
]
}
}
],
"created": 0,
"model": "model",
"object": "chat.completion",
"usage": {
"completion_tokens": 0,
"prompt_tokens": 0,
"total_tokens": 0
}
}
Returns Examples
{
"id": "id",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": {
"content": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0,
"top_logprobs": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0
}
]
}
],
"refusal": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0,
"top_logprobs": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0
}
]
}
]
},
"message": {
"content": "content",
"reasoning_content": "reasoning_content",
"refusal": "refusal",
"role": "assistant",
"tool_calls": [
{
"id": "id",
"function": {
"arguments": "arguments",
"name": "name"
},
"type": "function"
}
]
}
}
],
"created": 0,
"model": "model",
"object": "chat.completion",
"usage": {
"completion_tokens": 0,
"prompt_tokens": 0,
"total_tokens": 0
}
}