Create
Creates a model response for the given chat conversation.
post/chat/completions
Creates a model response for the given chat conversation.
Body Parameters
Returns
curl $GRADIENT_INFERENCE_ENDPOINT/v1/chat/completions \
-H 'Content-Type: application/json' \
-H "Authorization: Bearer $DIGITALOCEAN_ACCESS_TOKEN" \
-d '{
"messages": [
{
"content": "string",
"role": "system"
}
],
"model": "llama3-8b-instruct",
"n": 1,
"temperature": 1,
"top_p": 1,
"user": "user-1234"
}'
{
"id": "id",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": {
"content": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0,
"top_logprobs": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0
}
]
}
],
"refusal": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0,
"top_logprobs": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0
}
]
}
]
},
"message": {
"content": "content",
"refusal": "refusal",
"role": "assistant",
"tool_calls": [
{
"id": "id",
"function": {
"arguments": "arguments",
"name": "name"
},
"type": "function"
}
]
}
}
],
"created": 0,
"model": "model",
"object": "chat.completion",
"usage": {
"completion_tokens": 0,
"prompt_tokens": 0,
"total_tokens": 0
}
}
Returns Examples
{
"id": "id",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": {
"content": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0,
"top_logprobs": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0
}
]
}
],
"refusal": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0,
"top_logprobs": [
{
"token": "token",
"bytes": [
0
],
"logprob": 0
}
]
}
]
},
"message": {
"content": "content",
"refusal": "refusal",
"role": "assistant",
"tool_calls": [
{
"id": "id",
"function": {
"arguments": "arguments",
"name": "name"
},
"type": "function"
}
]
}
}
],
"created": 0,
"model": "model",
"object": "chat.completion",
"usage": {
"completion_tokens": 0,
"prompt_tokens": 0,
"total_tokens": 0
}
}