ChatGPT -- Evaluating the ChatGPT API

ChatGPT -- Evaluating the ChatGPT API

Let's start with a simple bash script

Sample JSON request to the ChatGPT API

{
  "model": "gpt-3.5-turbo",
  "messages": [
    {
      "role": "user",
      "content": "What is the OpenAI mission?"
    }
  ]
}

The bash script send the above payload

#!/bin/bash
set -x #echo on

DATA='{ "model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "What is the OpenAI mission?"}]}'

echo $DATA | jq

curl --location --insecure --request POST $OPENAI_URL \
--header "Authorization: Bearer $OPENAI_KEY" \
--header 'Content-Type: application/json' \
--data-raw "$DATA" \
| jq

Sample JSON response from the ChatGPT API

{
  "id": "chatcmpl-6rg9nqm5oyUqQrz5fuaXTR5jqZmDQ",
  "object": "chat.completion",
  "created": 1678251455,
  "model": "gpt-3.5-turbo-0301",
  "usage": {
    "prompt_tokens": 14,
    "completion_tokens": 100,
    "total_tokens": 114
  },
  "choices": [
    {
      "message": {
        "role": "assistant",
        "content": "\n\nAs an AI language model, I can tell you that the OpenAI mission is to develop and promote safe and beneficial artificial intelligence (AI) that benefits humanity. The organization was founded by a group of leading AI experts, including Elon Musk and Sam Altman, and is committed to advancing the understanding and ethical use of AI technology. In addition to conducting research and developing new applications for AI, OpenAI also works to educate the public about the potential benefits and risks of this rapidly evolving field."
      },
      "finish_reason": null,
      "index": 0
    }
  ]
}

Let's issue a simple API call to query ChatGPT using Nim

Nim source code to interact with the OpenAI API via HTTPS

Load the OpenAI API Key from the environment and setup the API endpoint URL

Install a Nim json parsing library using Nimble which is the Nim package manager tool

nimble install jsony
nimble install cligen
Downloading https://github.com/treeform/jsony using git
Verifying dependencies for jsony@1.1.5
Installing jsony@1.1.5
Success: jsony installed successfully.
Downloading https://github.com/c-blake/cligen.git using git
Verifying dependencies for cligen@1.5.41
Installing cligen@1.5.41
Success: cligen installed successfully.
# Standard Library Imports
import std / [asyncdispatch, json, logging, httpclient, macros, os, strformat, strutils, tables]
# Third party library Imports
import jsony

let
  loggingFormat = "[$datetime] - $appname - $levelname :::: "
  consoleLog = newConsoleLogger(fmtStr=loggingFormat)
  errorLog = newFileLogger("errors.log", fmtStr=loggingFormat, levelThreshold=lvlError)
  rollingLog = newRollingFileLogger("app-rolling.log", fmtStr=loggingFormat)

addHandler(consoleLog)
addHandler(errorLog)
addHandler(rollingLog)

let
  openAiKey = getEnv("OPENAI_KEY")
  openAiUrl = getEnv("OPENAI_URL")

info fmt"OpenAI Key was found = {not openAiKey.isEmptyOrWhitespace}"
info fmt"OpenAI URL = {openAiUrl}"

#+RESULTS[9da969666710ac840c943cf8e02157f73214b984]: chatgpt.nim

[2023-03-11T11:17:29] - nim_src_TDmGqb - INFO :::: OpenAI Key was found = true
[2023-03-11T11:17:29] - nim_src_TDmGqb - INFO :::: OpenAI URL = https://api.openai.com/v1/chat/completions

Nim types to deserialize the ChatGPT response

type
  GptMessage = object
    role: string
    content: string
  GptChoice = object
    message: GptMessage
    finish_reason: string
    index: int
  GptUsage = object
    prompt_tokens: int
    completion_tokens: int
    total_tokens: int
  GptResponse = object
    id: string
    `object`: string
    created: BiggestInt
    model: string
    usage: GptUsage
    choices: seq[GptChoice]

Nim configuration file to enable SSL

d:ssl
d:release

Nim procedures to execute the API calls and parse the JSON results

proc onProgressChanged(total, progress, speed: BiggestInt) {.async.} =
  debug fmt"Downloaded {progress} of {total}"
  debug fmt"Current rate: {speed div 1000} kb/s"

proc asyncPost(
  url: string, messages: seq[Table[string, string]],
  model = "gpt-3.5-turbo", key = openAiKey
): Future[string] {.async.} =
  debug fmt"Messages: {messages}"

  let
    payload = %*{
      "model": model,
      "messages": messages,
    }
    headers = newHttpHeaders({
        "Content-Type": "application/json",
        "Authorization": fmt"Bearer {key}",
    })
    client = newAsyncHttpClient()

  client.onProgressChanged = onProgressChanged
  let response = await client.request(
    url, httpMethod = HttpPost,
    headers = headers,
    body = $payload
  )

  let
    respStatus = response.status
    respBody = await response.body
    respBodyJson = respBody.parseJson.pretty

  debug fmt"Response Status: {respStatus}"
  debug fmt"Response Body: {respBodyJson}"

  result = respBodyJson

# asyncQA is exported and public with the * suffix
proc asyncQA*(messages: seq[Table[string, string]], url: string = openAiUrl): Future[string] {.async.} =
  ## Asynchronous Question and Answer with ChatGPT
  let
    answerJson = await url.asyncPost(messages)
    answer = answerJson.fromJson(GptResponse)
    answerText = answer.choices[0].message.content

  result = answerText

  debug fmt"Answer: {answerText}"

proc asUsr*(query: string): seq[Table[string, string]] =
  result = @[{"role": "user", "content": query}.toTable]

proc asAsstnt*(response: string): seq[Table[string, string]] =
  result = @[{"role": "assistant", "content": response}.toTable]

Proceed with Querying ChatGPT via the OpenAI API with the following prompts:

when isMainModule:
 let
   messages: seq[Table[string, string]] = @[]
   missionQueryFuture = "What is the OpenAI mission?".asUsr.asyncQA
   missionQueryFutureFr = "Quelle est la mission de OpenAI?".asUsr.asyncQA
   capabilitiesQueryFuture = "What can you do that's Cool?".asUsr.asyncQA
   capabilitiesQueryFutureFr = "Qu'est-ce que tu peux faire qui est vraiment Cool?".asUsr.asyncQA

 waitFor:
   missionQueryFuture and
   capabilitiesQueryFuture and
   missionQueryFutureFr and
   capabilitiesQueryFutureFr

 debug fmt"Mission Query Completed = {missionQueryFuture.finished}"
 debug fmt"Capabilities Query Completed = {capabilitiesQueryFuture.finished}"
 debug fmt"Mission Query FR Completed = {missionQueryFutureFr.finished}"
 debug fmt"Capabilities Query FR Completed = {capabilitiesQueryFutureFr.finished}"

 let
   mission = missionQueryFuture.read
   missionFr = missionQueryFutureFr.read
   capabilities = capabilitiesQueryFuture.read
   capabilitiesFr = capabilitiesQueryFutureFr.read

 info fmt"Mission: {mission}"
 info fmt"Mission FR: {missionFr}"
 info fmt"Capabilities: {capabilities}"
 info fmt"Capabilities FR: {capabilitiesFr}"

Execute the Nim code to interact with the OpenAI API for ChatGPT

#!/bin/bash
set -x #echo on

nim cpp -r chatgpt/chatgpt.nim
rm chatgpt

Let's write a simple app to reuse our reusable module above

Let's issue more queries to ChatGPT using Nim

import std / [asyncDispatch, logging, strformat]
import chatgpt / [chatgpt]

when isMainModule:
  echo "Further Testing..."

  let
    whatNextFuture = "What's next?".asUsr.asyncQA
    whatNextFutureFr = "Qu'est-ce qui vient après?".asUsr.asyncQA

  waitFor:
    whatNextFuture and
    whatNextFutureFr

  let
    whatNext = whatNextFuture.read
    whatNextFr = whatNextFutureFr.read

  info fmt"What's next: {whatNext}"
  info fmt"Ce qui vient après: {whatNextFr}"

Testing ChatGPT social...

#!/bin/bash
set -x #echo on

nim cpp -r chatgpt_social.nim
rm chatgpt_social

Nim CLI App for prompting and querying the OpenAI API for ChatGPT

Let's build a ChatGPT CLI app in Nim

[
    { "role": "user", "content": "What is the OpenAI mission?" },
    { "role": "assistant", "content": "The OpenAI mission is to create and promote safe AI (artificial intelligence) that benefits humanity as a whole. They aim to develop and advance artificial intelligence in a way that is safe, transparent, and aligned with human values. OpenAI wants to ensure that the development of AI technology benefits society as a whole, rather than contributing to social inequalities or other negative outcomes. They also believe in promoting collaboration and sharing of resources and knowledge in AI research and development to accelerate progress and avoid duplication of efforts." },
    { "role": "user", "content": "Tell me some more..." },
    { "role": "assistant", "content": "OpenAI was founded in 2015 by a group of prominent individuals from the tech industry, including Elon Musk, Sam Altman, Greg Brockman, and others. Their initial goal was to create AI technology that was beneficial for humanity while simultaneously addressing the potential risks and negative consequences of AI development. One of the main ways that OpenAI aims to achieve its mission is through research and development of advanced AI systems that can be trained to perform complex tasks and solve real-world problems. They also advocate for ethical and socially responsible approaches to AI development and deployment. Additionally, OpenAI has created several tools and resources that are available to the public, including language models like GPT-3 and machine learning frameworks like PyTorch. They also host conferences and workshops to foster collaboration and sharing of knowledge among AI researchers and developers. Overall, OpenAI's mission is to create and promote AI for the greater good, with a focus on transparency, fairness, and safety." },
]
Tell me some more...
import std / [asyncDispatch, json, logging, os, sequtils, strformat, strutils, tables]
import chatgpt / [chatgpt]

proc processArgs(query = "user_query.txt", role = "user", prompts = "prompts.json") =
  let progName = split(getAppFileName(), "/")[getAppFileName().count("/")]
  if query != "":
    info fmt"Role: {role} -- Query: {query}"

    let
      prompts = fmt"""{prompts.readFile}""".parseJson.to(seq[Table[string, string]])
      queryMsg = query.readFile.asUsr
      messages = concat(prompts, queryMsg)
      response = waitFor messages.asyncQA

    debug fmt"Prompts: {prompts}"

    info fmt"Response: {response}"
  else:
    error fmt"ERROR:::: {progName} needs a query"

when isMainModule:
  import cligen
  echo "ChatGPT CLI..."
  dispatch(processArgs)
API Calls are essential to communitcating with remote services in Software Engineering.
JSON Payloads are the preferred choice as information exchange format.
[
    { "role": "user", "content": "You will be my translator from English to French today, and as my translator you will translate everything I say to English keeping in mind that I may use some Technical terms pertain to programming, software engineering, datascience, and devops" },
    { "role": "assistant", "content": "Yes, I am ready to assist you and translate your English content into the most appropriate French equivalent in a way that accounts for the proper terms to use with regards to programming, software engineering, datascience, and devops." },
]
#!/bin/bash
set -x #echo on

nim cpp -r chatgpt_cli.nim $@
rm chatgpt_cli

Join us on Discord

The Ubuntu TechHive on Discord

COME HANGOUT!
Join us on Discord