POST Chat Completions
{{baseUrl}}/models/v2/openai/v1/chat/completions
HEADERS

Authorization
BODY json

{
  "model": "",
  "messages": [
    {
      "role": "",
      "content": ""
    }
  ],
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}
Examples
REQUEST

CURL *hnd = curl_easy_init();

curl_easy_setopt(hnd, CURLOPT_CUSTOMREQUEST, "POST");
curl_easy_setopt(hnd, CURLOPT_URL, "{{baseUrl}}/models/v2/openai/v1/chat/completions");

struct curl_slist *headers = NULL;
headers = curl_slist_append(headers, "authorization: ");
headers = curl_slist_append(headers, "content-type: application/json");
curl_easy_setopt(hnd, CURLOPT_HTTPHEADER, headers);

curl_easy_setopt(hnd, CURLOPT_POSTFIELDS, "{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}");

CURLcode ret = curl_easy_perform(hnd);
(require '[clj-http.client :as client])

(client/post "{{baseUrl}}/models/v2/openai/v1/chat/completions" {:headers {:authorization ""}
                                                                                 :content-type :json
                                                                                 :form-params {:model ""
                                                                                               :messages [{:role ""
                                                                                                           :content ""}]
                                                                                               :max_tokens 0
                                                                                               :temperature ""
                                                                                               :stream false}})
require "http/client"

url = "{{baseUrl}}/models/v2/openai/v1/chat/completions"
headers = HTTP::Headers{
  "authorization" => ""
  "content-type" => "application/json"
}
reqBody = "{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}"

response = HTTP::Client.post url, headers: headers, body: reqBody
puts response.body
using System.Net.Http.Headers;
var client = new HttpClient();
var request = new HttpRequestMessage
{
    Method = HttpMethod.Post,
    RequestUri = new Uri("{{baseUrl}}/models/v2/openai/v1/chat/completions"),
    Headers =
    {
        { "authorization", "" },
    },
    Content = new StringContent("{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}")
    {
        Headers =
        {
            ContentType = new MediaTypeHeaderValue("application/json")
        }
    }
};
using (var response = await client.SendAsync(request))
{
    response.EnsureSuccessStatusCode();
    var body = await response.Content.ReadAsStringAsync();
    Console.WriteLine(body);
}
var client = new RestClient("{{baseUrl}}/models/v2/openai/v1/chat/completions");
var request = new RestRequest("", Method.Post);
request.AddHeader("authorization", "");
request.AddHeader("content-type", "application/json");
request.AddParameter("application/json", "{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}", ParameterType.RequestBody);
var response = client.Execute(request);
package main

import (
	"fmt"
	"strings"
	"net/http"
	"io"
)

func main() {

	url := "{{baseUrl}}/models/v2/openai/v1/chat/completions"

	payload := strings.NewReader("{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}")

	req, _ := http.NewRequest("POST", url, payload)

	req.Header.Add("authorization", "")
	req.Header.Add("content-type", "application/json")

	res, _ := http.DefaultClient.Do(req)

	defer res.Body.Close()
	body, _ := io.ReadAll(res.Body)

	fmt.Println(res)
	fmt.Println(string(body))

}
POST /baseUrl/models/v2/openai/v1/chat/completions HTTP/1.1
Authorization: 
Content-Type: application/json
Host: example.com
Content-Length: 147

{
  "model": "",
  "messages": [
    {
      "role": "",
      "content": ""
    }
  ],
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}
AsyncHttpClient client = new DefaultAsyncHttpClient();
client.prepare("POST", "{{baseUrl}}/models/v2/openai/v1/chat/completions")
  .setHeader("authorization", "")
  .setHeader("content-type", "application/json")
  .setBody("{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}")
  .execute()
  .toCompletableFuture()
  .thenAccept(System.out::println)
  .join();

client.close();
HttpRequest request = HttpRequest.newBuilder()
    .uri(URI.create("{{baseUrl}}/models/v2/openai/v1/chat/completions"))
    .header("authorization", "")
    .header("content-type", "application/json")
    .method("POST", HttpRequest.BodyPublishers.ofString("{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}"))
    .build();
HttpResponse response = HttpClient.newHttpClient().send(request, HttpResponse.BodyHandlers.ofString());
System.out.println(response.body());
OkHttpClient client = new OkHttpClient();

MediaType mediaType = MediaType.parse("application/json");
RequestBody body = RequestBody.create(mediaType, "{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}");
Request request = new Request.Builder()
  .url("{{baseUrl}}/models/v2/openai/v1/chat/completions")
  .post(body)
  .addHeader("authorization", "")
  .addHeader("content-type", "application/json")
  .build();

Response response = client.newCall(request).execute();
HttpResponse response = Unirest.post("{{baseUrl}}/models/v2/openai/v1/chat/completions")
  .header("authorization", "")
  .header("content-type", "application/json")
  .body("{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}")
  .asString();
const data = JSON.stringify({
  model: '',
  messages: [
    {
      role: '',
      content: ''
    }
  ],
  max_tokens: 0,
  temperature: '',
  stream: false
});

const xhr = new XMLHttpRequest();
xhr.withCredentials = true;

xhr.addEventListener('readystatechange', function () {
  if (this.readyState === this.DONE) {
    console.log(this.responseText);
  }
});

xhr.open('POST', '{{baseUrl}}/models/v2/openai/v1/chat/completions');
xhr.setRequestHeader('authorization', '');
xhr.setRequestHeader('content-type', 'application/json');

xhr.send(data);
import axios from 'axios';

const options = {
  method: 'POST',
  url: '{{baseUrl}}/models/v2/openai/v1/chat/completions',
  headers: {authorization: '', 'content-type': 'application/json'},
  data: {
    model: '',
    messages: [{role: '', content: ''}],
    max_tokens: 0,
    temperature: '',
    stream: false
  }
};

try {
  const { data } = await axios.request(options);
  console.log(data);
} catch (error) {
  console.error(error);
}
const url = '{{baseUrl}}/models/v2/openai/v1/chat/completions';
const options = {
  method: 'POST',
  headers: {authorization: '', 'content-type': 'application/json'},
  body: '{"model":"","messages":[{"role":"","content":""}],"max_tokens":0,"temperature":"","stream":false}'
};

try {
  const response = await fetch(url, options);
  const data = await response.json();
  console.log(data);
} catch (error) {
  console.error(error);
}
const settings = {
  async: true,
  crossDomain: true,
  url: '{{baseUrl}}/models/v2/openai/v1/chat/completions',
  method: 'POST',
  headers: {
    authorization: '',
    'content-type': 'application/json'
  },
  processData: false,
  data: '{\n  "model": "",\n  "messages": [\n    {\n      "role": "",\n      "content": ""\n    }\n  ],\n  "max_tokens": 0,\n  "temperature": "",\n  "stream": false\n}'
};

$.ajax(settings).done(function (response) {
  console.log(response);
});
val client = OkHttpClient()

val mediaType = MediaType.parse("application/json")
val body = RequestBody.create(mediaType, "{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}")
val request = Request.Builder()
  .url("{{baseUrl}}/models/v2/openai/v1/chat/completions")
  .post(body)
  .addHeader("authorization", "")
  .addHeader("content-type", "application/json")
  .build()

val response = client.newCall(request).execute()
const http = require('https');

const options = {
  method: 'POST',
  hostname: 'example.com',
  port: null,
  path: '/baseUrl/models/v2/openai/v1/chat/completions',
  headers: {
    authorization: '',
    'content-type': 'application/json'
  }
};

const req = http.request(options, function (res) {
  const chunks = [];

  res.on('data', function (chunk) {
    chunks.push(chunk);
  });

  res.on('end', function () {
    const body = Buffer.concat(chunks);
    console.log(body.toString());
  });
});

req.write(JSON.stringify({
  model: '',
  messages: [{role: '', content: ''}],
  max_tokens: 0,
  temperature: '',
  stream: false
}));
req.end();
const request = require('request');

const options = {
  method: 'POST',
  url: '{{baseUrl}}/models/v2/openai/v1/chat/completions',
  headers: {authorization: '', 'content-type': 'application/json'},
  body: {
    model: '',
    messages: [{role: '', content: ''}],
    max_tokens: 0,
    temperature: '',
    stream: false
  },
  json: true
};

request(options, function (error, response, body) {
  if (error) throw new Error(error);

  console.log(body);
});
const unirest = require('unirest');

const req = unirest('POST', '{{baseUrl}}/models/v2/openai/v1/chat/completions');

req.headers({
  authorization: '',
  'content-type': 'application/json'
});

req.type('json');
req.send({
  model: '',
  messages: [
    {
      role: '',
      content: ''
    }
  ],
  max_tokens: 0,
  temperature: '',
  stream: false
});

req.end(function (res) {
  if (res.error) throw new Error(res.error);

  console.log(res.body);
});
const axios = require('axios').default;

const options = {
  method: 'POST',
  url: '{{baseUrl}}/models/v2/openai/v1/chat/completions',
  headers: {authorization: '', 'content-type': 'application/json'},
  data: {
    model: '',
    messages: [{role: '', content: ''}],
    max_tokens: 0,
    temperature: '',
    stream: false
  }
};

try {
  const { data } = await axios.request(options);
  console.log(data);
} catch (error) {
  console.error(error);
}
const fetch = require('node-fetch');

const url = '{{baseUrl}}/models/v2/openai/v1/chat/completions';
const options = {
  method: 'POST',
  headers: {authorization: '', 'content-type': 'application/json'},
  body: '{"model":"","messages":[{"role":"","content":""}],"max_tokens":0,"temperature":"","stream":false}'
};

try {
  const response = await fetch(url, options);
  const data = await response.json();
  console.log(data);
} catch (error) {
  console.error(error);
}
#import 

NSDictionary *headers = @{ @"authorization": @"",
                           @"content-type": @"application/json" };
NSDictionary *parameters = @{ @"model": @"",
                              @"messages": @[ @{ @"role": @"", @"content": @"" } ],
                              @"max_tokens": @0,
                              @"temperature": @"",
                              @"stream": @NO };

NSData *postData = [NSJSONSerialization dataWithJSONObject:parameters options:0 error:nil];

NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:[NSURL URLWithString:@"{{baseUrl}}/models/v2/openai/v1/chat/completions"]
                                                       cachePolicy:NSURLRequestUseProtocolCachePolicy
                                                   timeoutInterval:10.0];
[request setHTTPMethod:@"POST"];
[request setAllHTTPHeaderFields:headers];
[request setHTTPBody:postData];

NSURLSession *session = [NSURLSession sharedSession];
NSURLSessionDataTask *dataTask = [session dataTaskWithRequest:request
                                            completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
                                                if (error) {
                                                    NSLog(@"%@", error);
                                                } else {
                                                    NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *) response;
                                                    NSLog(@"%@", httpResponse);
                                                }
                                            }];
[dataTask resume];
open Cohttp_lwt_unix
open Cohttp
open Lwt

let uri = Uri.of_string "{{baseUrl}}/models/v2/openai/v1/chat/completions" in
let headers = Header.add_list (Header.init ()) [
  ("authorization", "");
  ("content-type", "application/json");
] in
let body = Cohttp_lwt_body.of_string "{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}" in

Client.call ~headers ~body `POST uri
>>= fun (res, body_stream) ->
  (* Do stuff with the result *)
 "{{baseUrl}}/models/v2/openai/v1/chat/completions",
  CURLOPT_RETURNTRANSFER => true,
  CURLOPT_ENCODING => "",
  CURLOPT_MAXREDIRS => 10,
  CURLOPT_TIMEOUT => 30,
  CURLOPT_HTTP_VERSION => CURL_HTTP_VERSION_1_1,
  CURLOPT_CUSTOMREQUEST => "POST",
  CURLOPT_POSTFIELDS => json_encode([
    'model' => '',
    'messages' => [
        [
                'role' => '',
                'content' => ''
        ]
    ],
    'max_tokens' => 0,
    'temperature' => '',
    'stream' => null
  ]),
  CURLOPT_HTTPHEADER => [
    "authorization: ",
    "content-type: application/json"
  ],
]);

$response = curl_exec($curl);
$err = curl_error($curl);

curl_close($curl);

if ($err) {
  echo "cURL Error #:" . $err;
} else {
  echo $response;
}
request('POST', '{{baseUrl}}/models/v2/openai/v1/chat/completions', [
  'body' => '{
  "model": "",
  "messages": [
    {
      "role": "",
      "content": ""
    }
  ],
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}',
  'headers' => [
    'authorization' => '',
    'content-type' => 'application/json',
  ],
]);

echo $response->getBody();
setUrl('{{baseUrl}}/models/v2/openai/v1/chat/completions');
$request->setMethod(HTTP_METH_POST);

$request->setHeaders([
  'authorization' => '',
  'content-type' => 'application/json'
]);

$request->setContentType('application/json');
$request->setBody(json_encode([
  'model' => '',
  'messages' => [
    [
        'role' => '',
        'content' => ''
    ]
  ],
  'max_tokens' => 0,
  'temperature' => '',
  'stream' => null
]));

try {
  $response = $request->send();

  echo $response->getBody();
} catch (HttpException $ex) {
  echo $ex;
}
append(json_encode([
  'model' => '',
  'messages' => [
    [
        'role' => '',
        'content' => ''
    ]
  ],
  'max_tokens' => 0,
  'temperature' => '',
  'stream' => null
]));
$request->setRequestUrl('{{baseUrl}}/models/v2/openai/v1/chat/completions');
$request->setRequestMethod('POST');
$request->setBody($body);

$request->setHeaders([
  'authorization' => '',
  'content-type' => 'application/json'
]);

$client->enqueue($request)->send();
$response = $client->getResponse();

echo $response->getBody();
$headers=@{}
$headers.Add("authorization", "")
$headers.Add("content-type", "application/json")
$response = Invoke-WebRequest -Uri '{{baseUrl}}/models/v2/openai/v1/chat/completions' -Method POST -Headers $headers -ContentType 'application/json' -Body '{
  "model": "",
  "messages": [
    {
      "role": "",
      "content": ""
    }
  ],
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}'
$headers=@{}
$headers.Add("authorization", "")
$headers.Add("content-type", "application/json")
$response = Invoke-RestMethod -Uri '{{baseUrl}}/models/v2/openai/v1/chat/completions' -Method POST -Headers $headers -ContentType 'application/json' -Body '{
  "model": "",
  "messages": [
    {
      "role": "",
      "content": ""
    }
  ],
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}'
import http.client

conn = http.client.HTTPSConnection("example.com")

payload = "{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}"

headers = {
    'authorization': "",
    'content-type': "application/json"
}

conn.request("POST", "/baseUrl/models/v2/openai/v1/chat/completions", payload, headers)

res = conn.getresponse()
data = res.read()

print(data.decode("utf-8"))
import requests

url = "{{baseUrl}}/models/v2/openai/v1/chat/completions"

payload = {
    "model": "",
    "messages": [
        {
            "role": "",
            "content": ""
        }
    ],
    "max_tokens": 0,
    "temperature": "",
    "stream": False
}
headers = {
    "authorization": "",
    "content-type": "application/json"
}

response = requests.post(url, json=payload, headers=headers)

print(response.json())
library(httr)

url <- "{{baseUrl}}/models/v2/openai/v1/chat/completions"

payload <- "{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}"

encode <- "json"

response <- VERB("POST", url, body = payload, add_headers('authorization' = ''), content_type("application/json"), encode = encode)

content(response, "text")
require 'uri'
require 'net/http'

url = URI("{{baseUrl}}/models/v2/openai/v1/chat/completions")

http = Net::HTTP.new(url.host, url.port)
http.use_ssl = true

request = Net::HTTP::Post.new(url)
request["authorization"] = ''
request["content-type"] = 'application/json'
request.body = "{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}"

response = http.request(request)
puts response.read_body
require 'faraday'

conn = Faraday.new(
  url: 'https://example.com',
  headers: {'Content-Type' => 'application/json'}
)

response = conn.post('/baseUrl/models/v2/openai/v1/chat/completions') do |req|
  req.headers['authorization'] = ''
  req.body = "{\n  \"model\": \"\",\n  \"messages\": [\n    {\n      \"role\": \"\",\n      \"content\": \"\"\n    }\n  ],\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}"
end

puts response.status
puts response.body
use serde_json::json;
use reqwest;

#[tokio::main]
pub async fn main() {
    let url = "{{baseUrl}}/models/v2/openai/v1/chat/completions";

    let payload = json!({
        "model": "",
        "messages": (
            json!({
                "role": "",
                "content": ""
            })
        ),
        "max_tokens": 0,
        "temperature": "",
        "stream": false
    });

    let mut headers = reqwest::header::HeaderMap::new();
    headers.insert("authorization", "".parse().unwrap());
    headers.insert("content-type", "application/json".parse().unwrap());

    let client = reqwest::Client::new();
    let response = client.post(url)
        .headers(headers)
        .json(&payload)
        .send()
        .await;

    let results = response.unwrap()
        .json::()
        .await
        .unwrap();

    dbg!(results);
}
curl --request POST \
  --url {{baseUrl}}/models/v2/openai/v1/chat/completions \
  --header 'authorization: ' \
  --header 'content-type: application/json' \
  --data '{
  "model": "",
  "messages": [
    {
      "role": "",
      "content": ""
    }
  ],
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}'
echo '{
  "model": "",
  "messages": [
    {
      "role": "",
      "content": ""
    }
  ],
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}' |  \
  http POST {{baseUrl}}/models/v2/openai/v1/chat/completions \
  authorization:'' \
  content-type:application/json
wget --quiet \
  --method POST \
  --header 'authorization: ' \
  --header 'content-type: application/json' \
  --body-data '{\n  "model": "",\n  "messages": [\n    {\n      "role": "",\n      "content": ""\n    }\n  ],\n  "max_tokens": 0,\n  "temperature": "",\n  "stream": false\n}' \
  --output-document \
  - {{baseUrl}}/models/v2/openai/v1/chat/completions
import Foundation

let headers = [
  "authorization": "",
  "content-type": "application/json"
]
let parameters = [
  "model": "",
  "messages": [
    [
      "role": "",
      "content": ""
    ]
  ],
  "max_tokens": 0,
  "temperature": "",
  "stream": false
] as [String : Any]

let postData = JSONSerialization.data(withJSONObject: parameters, options: [])

let request = NSMutableURLRequest(url: NSURL(string: "{{baseUrl}}/models/v2/openai/v1/chat/completions")! as URL,
                                        cachePolicy: .useProtocolCachePolicy,
                                    timeoutInterval: 10.0)
request.httpMethod = "POST"
request.allHTTPHeaderFields = headers
request.httpBody = postData as Data

let session = URLSession.shared
let dataTask = session.dataTask(with: request as URLRequest, completionHandler: { (data, response, error) -> Void in
  if (error != nil) {
    print(error as Any)
  } else {
    let httpResponse = response as? HTTPURLResponse
    print(httpResponse)
  }
})

dataTask.resume()
POST Completions
{{baseUrl}}/models/v2/openai/v1/completions
HEADERS

Authorization
BODY json

{
  "model": "",
  "prompt": "",
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}
Examples
REQUEST

CURL *hnd = curl_easy_init();

curl_easy_setopt(hnd, CURLOPT_CUSTOMREQUEST, "POST");
curl_easy_setopt(hnd, CURLOPT_URL, "{{baseUrl}}/models/v2/openai/v1/completions");

struct curl_slist *headers = NULL;
headers = curl_slist_append(headers, "authorization: ");
headers = curl_slist_append(headers, "content-type: application/json");
curl_easy_setopt(hnd, CURLOPT_HTTPHEADER, headers);

curl_easy_setopt(hnd, CURLOPT_POSTFIELDS, "{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}");

CURLcode ret = curl_easy_perform(hnd);
(require '[clj-http.client :as client])

(client/post "{{baseUrl}}/models/v2/openai/v1/completions" {:headers {:authorization ""}
                                                                            :content-type :json
                                                                            :form-params {:model ""
                                                                                          :prompt ""
                                                                                          :max_tokens 0
                                                                                          :temperature ""
                                                                                          :stream false}})
require "http/client"

url = "{{baseUrl}}/models/v2/openai/v1/completions"
headers = HTTP::Headers{
  "authorization" => ""
  "content-type" => "application/json"
}
reqBody = "{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}"

response = HTTP::Client.post url, headers: headers, body: reqBody
puts response.body
using System.Net.Http.Headers;
var client = new HttpClient();
var request = new HttpRequestMessage
{
    Method = HttpMethod.Post,
    RequestUri = new Uri("{{baseUrl}}/models/v2/openai/v1/completions"),
    Headers =
    {
        { "authorization", "" },
    },
    Content = new StringContent("{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}")
    {
        Headers =
        {
            ContentType = new MediaTypeHeaderValue("application/json")
        }
    }
};
using (var response = await client.SendAsync(request))
{
    response.EnsureSuccessStatusCode();
    var body = await response.Content.ReadAsStringAsync();
    Console.WriteLine(body);
}
var client = new RestClient("{{baseUrl}}/models/v2/openai/v1/completions");
var request = new RestRequest("", Method.Post);
request.AddHeader("authorization", "");
request.AddHeader("content-type", "application/json");
request.AddParameter("application/json", "{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}", ParameterType.RequestBody);
var response = client.Execute(request);
package main

import (
	"fmt"
	"strings"
	"net/http"
	"io"
)

func main() {

	url := "{{baseUrl}}/models/v2/openai/v1/completions"

	payload := strings.NewReader("{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}")

	req, _ := http.NewRequest("POST", url, payload)

	req.Header.Add("authorization", "")
	req.Header.Add("content-type", "application/json")

	res, _ := http.DefaultClient.Do(req)

	defer res.Body.Close()
	body, _ := io.ReadAll(res.Body)

	fmt.Println(res)
	fmt.Println(string(body))

}
POST /baseUrl/models/v2/openai/v1/completions HTTP/1.1
Authorization: 
Content-Type: application/json
Host: example.com
Content-Length: 92

{
  "model": "",
  "prompt": "",
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}
AsyncHttpClient client = new DefaultAsyncHttpClient();
client.prepare("POST", "{{baseUrl}}/models/v2/openai/v1/completions")
  .setHeader("authorization", "")
  .setHeader("content-type", "application/json")
  .setBody("{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}")
  .execute()
  .toCompletableFuture()
  .thenAccept(System.out::println)
  .join();

client.close();
HttpRequest request = HttpRequest.newBuilder()
    .uri(URI.create("{{baseUrl}}/models/v2/openai/v1/completions"))
    .header("authorization", "")
    .header("content-type", "application/json")
    .method("POST", HttpRequest.BodyPublishers.ofString("{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}"))
    .build();
HttpResponse response = HttpClient.newHttpClient().send(request, HttpResponse.BodyHandlers.ofString());
System.out.println(response.body());
OkHttpClient client = new OkHttpClient();

MediaType mediaType = MediaType.parse("application/json");
RequestBody body = RequestBody.create(mediaType, "{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}");
Request request = new Request.Builder()
  .url("{{baseUrl}}/models/v2/openai/v1/completions")
  .post(body)
  .addHeader("authorization", "")
  .addHeader("content-type", "application/json")
  .build();

Response response = client.newCall(request).execute();
HttpResponse response = Unirest.post("{{baseUrl}}/models/v2/openai/v1/completions")
  .header("authorization", "")
  .header("content-type", "application/json")
  .body("{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}")
  .asString();
const data = JSON.stringify({
  model: '',
  prompt: '',
  max_tokens: 0,
  temperature: '',
  stream: false
});

const xhr = new XMLHttpRequest();
xhr.withCredentials = true;

xhr.addEventListener('readystatechange', function () {
  if (this.readyState === this.DONE) {
    console.log(this.responseText);
  }
});

xhr.open('POST', '{{baseUrl}}/models/v2/openai/v1/completions');
xhr.setRequestHeader('authorization', '');
xhr.setRequestHeader('content-type', 'application/json');

xhr.send(data);
import axios from 'axios';

const options = {
  method: 'POST',
  url: '{{baseUrl}}/models/v2/openai/v1/completions',
  headers: {authorization: '', 'content-type': 'application/json'},
  data: {model: '', prompt: '', max_tokens: 0, temperature: '', stream: false}
};

try {
  const { data } = await axios.request(options);
  console.log(data);
} catch (error) {
  console.error(error);
}
const url = '{{baseUrl}}/models/v2/openai/v1/completions';
const options = {
  method: 'POST',
  headers: {authorization: '', 'content-type': 'application/json'},
  body: '{"model":"","prompt":"","max_tokens":0,"temperature":"","stream":false}'
};

try {
  const response = await fetch(url, options);
  const data = await response.json();
  console.log(data);
} catch (error) {
  console.error(error);
}
const settings = {
  async: true,
  crossDomain: true,
  url: '{{baseUrl}}/models/v2/openai/v1/completions',
  method: 'POST',
  headers: {
    authorization: '',
    'content-type': 'application/json'
  },
  processData: false,
  data: '{\n  "model": "",\n  "prompt": "",\n  "max_tokens": 0,\n  "temperature": "",\n  "stream": false\n}'
};

$.ajax(settings).done(function (response) {
  console.log(response);
});
val client = OkHttpClient()

val mediaType = MediaType.parse("application/json")
val body = RequestBody.create(mediaType, "{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}")
val request = Request.Builder()
  .url("{{baseUrl}}/models/v2/openai/v1/completions")
  .post(body)
  .addHeader("authorization", "")
  .addHeader("content-type", "application/json")
  .build()

val response = client.newCall(request).execute()
const http = require('https');

const options = {
  method: 'POST',
  hostname: 'example.com',
  port: null,
  path: '/baseUrl/models/v2/openai/v1/completions',
  headers: {
    authorization: '',
    'content-type': 'application/json'
  }
};

const req = http.request(options, function (res) {
  const chunks = [];

  res.on('data', function (chunk) {
    chunks.push(chunk);
  });

  res.on('end', function () {
    const body = Buffer.concat(chunks);
    console.log(body.toString());
  });
});

req.write(JSON.stringify({model: '', prompt: '', max_tokens: 0, temperature: '', stream: false}));
req.end();
const request = require('request');

const options = {
  method: 'POST',
  url: '{{baseUrl}}/models/v2/openai/v1/completions',
  headers: {authorization: '', 'content-type': 'application/json'},
  body: {model: '', prompt: '', max_tokens: 0, temperature: '', stream: false},
  json: true
};

request(options, function (error, response, body) {
  if (error) throw new Error(error);

  console.log(body);
});
const unirest = require('unirest');

const req = unirest('POST', '{{baseUrl}}/models/v2/openai/v1/completions');

req.headers({
  authorization: '',
  'content-type': 'application/json'
});

req.type('json');
req.send({
  model: '',
  prompt: '',
  max_tokens: 0,
  temperature: '',
  stream: false
});

req.end(function (res) {
  if (res.error) throw new Error(res.error);

  console.log(res.body);
});
const axios = require('axios').default;

const options = {
  method: 'POST',
  url: '{{baseUrl}}/models/v2/openai/v1/completions',
  headers: {authorization: '', 'content-type': 'application/json'},
  data: {model: '', prompt: '', max_tokens: 0, temperature: '', stream: false}
};

try {
  const { data } = await axios.request(options);
  console.log(data);
} catch (error) {
  console.error(error);
}
const fetch = require('node-fetch');

const url = '{{baseUrl}}/models/v2/openai/v1/completions';
const options = {
  method: 'POST',
  headers: {authorization: '', 'content-type': 'application/json'},
  body: '{"model":"","prompt":"","max_tokens":0,"temperature":"","stream":false}'
};

try {
  const response = await fetch(url, options);
  const data = await response.json();
  console.log(data);
} catch (error) {
  console.error(error);
}
#import 

NSDictionary *headers = @{ @"authorization": @"",
                           @"content-type": @"application/json" };
NSDictionary *parameters = @{ @"model": @"",
                              @"prompt": @"",
                              @"max_tokens": @0,
                              @"temperature": @"",
                              @"stream": @NO };

NSData *postData = [NSJSONSerialization dataWithJSONObject:parameters options:0 error:nil];

NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:[NSURL URLWithString:@"{{baseUrl}}/models/v2/openai/v1/completions"]
                                                       cachePolicy:NSURLRequestUseProtocolCachePolicy
                                                   timeoutInterval:10.0];
[request setHTTPMethod:@"POST"];
[request setAllHTTPHeaderFields:headers];
[request setHTTPBody:postData];

NSURLSession *session = [NSURLSession sharedSession];
NSURLSessionDataTask *dataTask = [session dataTaskWithRequest:request
                                            completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
                                                if (error) {
                                                    NSLog(@"%@", error);
                                                } else {
                                                    NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *) response;
                                                    NSLog(@"%@", httpResponse);
                                                }
                                            }];
[dataTask resume];
open Cohttp_lwt_unix
open Cohttp
open Lwt

let uri = Uri.of_string "{{baseUrl}}/models/v2/openai/v1/completions" in
let headers = Header.add_list (Header.init ()) [
  ("authorization", "");
  ("content-type", "application/json");
] in
let body = Cohttp_lwt_body.of_string "{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}" in

Client.call ~headers ~body `POST uri
>>= fun (res, body_stream) ->
  (* Do stuff with the result *)
 "{{baseUrl}}/models/v2/openai/v1/completions",
  CURLOPT_RETURNTRANSFER => true,
  CURLOPT_ENCODING => "",
  CURLOPT_MAXREDIRS => 10,
  CURLOPT_TIMEOUT => 30,
  CURLOPT_HTTP_VERSION => CURL_HTTP_VERSION_1_1,
  CURLOPT_CUSTOMREQUEST => "POST",
  CURLOPT_POSTFIELDS => json_encode([
    'model' => '',
    'prompt' => '',
    'max_tokens' => 0,
    'temperature' => '',
    'stream' => null
  ]),
  CURLOPT_HTTPHEADER => [
    "authorization: ",
    "content-type: application/json"
  ],
]);

$response = curl_exec($curl);
$err = curl_error($curl);

curl_close($curl);

if ($err) {
  echo "cURL Error #:" . $err;
} else {
  echo $response;
}
request('POST', '{{baseUrl}}/models/v2/openai/v1/completions', [
  'body' => '{
  "model": "",
  "prompt": "",
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}',
  'headers' => [
    'authorization' => '',
    'content-type' => 'application/json',
  ],
]);

echo $response->getBody();
setUrl('{{baseUrl}}/models/v2/openai/v1/completions');
$request->setMethod(HTTP_METH_POST);

$request->setHeaders([
  'authorization' => '',
  'content-type' => 'application/json'
]);

$request->setContentType('application/json');
$request->setBody(json_encode([
  'model' => '',
  'prompt' => '',
  'max_tokens' => 0,
  'temperature' => '',
  'stream' => null
]));

try {
  $response = $request->send();

  echo $response->getBody();
} catch (HttpException $ex) {
  echo $ex;
}
append(json_encode([
  'model' => '',
  'prompt' => '',
  'max_tokens' => 0,
  'temperature' => '',
  'stream' => null
]));
$request->setRequestUrl('{{baseUrl}}/models/v2/openai/v1/completions');
$request->setRequestMethod('POST');
$request->setBody($body);

$request->setHeaders([
  'authorization' => '',
  'content-type' => 'application/json'
]);

$client->enqueue($request)->send();
$response = $client->getResponse();

echo $response->getBody();
$headers=@{}
$headers.Add("authorization", "")
$headers.Add("content-type", "application/json")
$response = Invoke-WebRequest -Uri '{{baseUrl}}/models/v2/openai/v1/completions' -Method POST -Headers $headers -ContentType 'application/json' -Body '{
  "model": "",
  "prompt": "",
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}'
$headers=@{}
$headers.Add("authorization", "")
$headers.Add("content-type", "application/json")
$response = Invoke-RestMethod -Uri '{{baseUrl}}/models/v2/openai/v1/completions' -Method POST -Headers $headers -ContentType 'application/json' -Body '{
  "model": "",
  "prompt": "",
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}'
import http.client

conn = http.client.HTTPSConnection("example.com")

payload = "{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}"

headers = {
    'authorization': "",
    'content-type': "application/json"
}

conn.request("POST", "/baseUrl/models/v2/openai/v1/completions", payload, headers)

res = conn.getresponse()
data = res.read()

print(data.decode("utf-8"))
import requests

url = "{{baseUrl}}/models/v2/openai/v1/completions"

payload = {
    "model": "",
    "prompt": "",
    "max_tokens": 0,
    "temperature": "",
    "stream": False
}
headers = {
    "authorization": "",
    "content-type": "application/json"
}

response = requests.post(url, json=payload, headers=headers)

print(response.json())
library(httr)

url <- "{{baseUrl}}/models/v2/openai/v1/completions"

payload <- "{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}"

encode <- "json"

response <- VERB("POST", url, body = payload, add_headers('authorization' = ''), content_type("application/json"), encode = encode)

content(response, "text")
require 'uri'
require 'net/http'

url = URI("{{baseUrl}}/models/v2/openai/v1/completions")

http = Net::HTTP.new(url.host, url.port)
http.use_ssl = true

request = Net::HTTP::Post.new(url)
request["authorization"] = ''
request["content-type"] = 'application/json'
request.body = "{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}"

response = http.request(request)
puts response.read_body
require 'faraday'

conn = Faraday.new(
  url: 'https://example.com',
  headers: {'Content-Type' => 'application/json'}
)

response = conn.post('/baseUrl/models/v2/openai/v1/completions') do |req|
  req.headers['authorization'] = ''
  req.body = "{\n  \"model\": \"\",\n  \"prompt\": \"\",\n  \"max_tokens\": 0,\n  \"temperature\": \"\",\n  \"stream\": false\n}"
end

puts response.status
puts response.body
use serde_json::json;
use reqwest;

#[tokio::main]
pub async fn main() {
    let url = "{{baseUrl}}/models/v2/openai/v1/completions";

    let payload = json!({
        "model": "",
        "prompt": "",
        "max_tokens": 0,
        "temperature": "",
        "stream": false
    });

    let mut headers = reqwest::header::HeaderMap::new();
    headers.insert("authorization", "".parse().unwrap());
    headers.insert("content-type", "application/json".parse().unwrap());

    let client = reqwest::Client::new();
    let response = client.post(url)
        .headers(headers)
        .json(&payload)
        .send()
        .await;

    let results = response.unwrap()
        .json::()
        .await
        .unwrap();

    dbg!(results);
}
curl --request POST \
  --url {{baseUrl}}/models/v2/openai/v1/completions \
  --header 'authorization: ' \
  --header 'content-type: application/json' \
  --data '{
  "model": "",
  "prompt": "",
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}'
echo '{
  "model": "",
  "prompt": "",
  "max_tokens": 0,
  "temperature": "",
  "stream": false
}' |  \
  http POST {{baseUrl}}/models/v2/openai/v1/completions \
  authorization:'' \
  content-type:application/json
wget --quiet \
  --method POST \
  --header 'authorization: ' \
  --header 'content-type: application/json' \
  --body-data '{\n  "model": "",\n  "prompt": "",\n  "max_tokens": 0,\n  "temperature": "",\n  "stream": false\n}' \
  --output-document \
  - {{baseUrl}}/models/v2/openai/v1/completions
import Foundation

let headers = [
  "authorization": "",
  "content-type": "application/json"
]
let parameters = [
  "model": "",
  "prompt": "",
  "max_tokens": 0,
  "temperature": "",
  "stream": false
] as [String : Any]

let postData = JSONSerialization.data(withJSONObject: parameters, options: [])

let request = NSMutableURLRequest(url: NSURL(string: "{{baseUrl}}/models/v2/openai/v1/completions")! as URL,
                                        cachePolicy: .useProtocolCachePolicy,
                                    timeoutInterval: 10.0)
request.httpMethod = "POST"
request.allHTTPHeaderFields = headers
request.httpBody = postData as Data

let session = URLSession.shared
let dataTask = session.dataTask(with: request as URLRequest, completionHandler: { (data, response, error) -> Void in
  if (error != nil) {
    print(error as Any)
  } else {
    let httpResponse = response as? HTTPURLResponse
    print(httpResponse)
  }
})

dataTask.resume()