Let’s Build a Chatbot

Using Shiny, OpenAI, and RStudio to Build a Chatbot

James Wade

API Calls

An Example from OpenAI Docs


curl https://api.openai.com/v1/chat/completions \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $OPENAI_API_KEY" \
-d '{
"model": "gpt-3.5-turbo",
"messages": [{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "Hello!"}]
}'

Constructing Messages for OpenAI


The message body:

{
  "model": "gpt-3.5-turbo",
  "messages": [
    {"role": "system", "content": "You are a helpful assistant."},
    {"role": "user", "content": "Hello!"}
  ]
}

Send requests with {httr2}

library(httr2)
library(purrr)

Send requests with {httr2}

library(httr2)
library(purrr)

# construct the message body
user_message <- list(list(role = "user", content = "Hello"))
body <- list(model = "gpt-3.5-turbo", messages = user_message)

Send requests with {httr2}

library(httr2)
library(purrr)

# construct the message body
user_message <- list(list(role = "user", content = "Hello!"))
body <- list(model = "gpt-3.5-turbo", messages = user_message)

# send the request
resp <-
  request("https://api.openai.com/v1") |> 
  req_url_path_append("chat/completions") |> 
  req_auth_bearer_token(token = Sys.getenv("OPENAI_API_KEY")) |> 
  req_body_json(body) |> 
  req_perform()

Send requests with {httr2}

library(httr2)
library(purrr)

# construct the message body
user_message <- list(list(role = "user", content = "Hello!"))
body <- list(model = "gpt-3.5-turbo", messages = user_message)

# send the request
resp <-
  request("https://api.openai.com/v1") |>
  req_url_path_append("chat/completions") |> 
  req_auth_bearer_token(token = Sys.getenv("OPENAI_API_KEY")) |> 
  req_body_json(body) |> 
  req_perform()

# process the response
resp |>
  resp_body_json(simplifyVector = TRUE) |> 
  pluck("choices", "message", "content")
[1] "Hello! How can I assist you today?"

Examining the Response

resp |> 
  resp_body_json(simplifyVector = TRUE)
$id
[1] "chatcmpl-8tIGZLfmm3u3NuDC16ElF5CrWBHuF"

$object
[1] "chat.completion"

$created
[1] 1708188947

$model
[1] "gpt-3.5-turbo-0125"

$choices
  index message.role                    message.content logprobs finish_reason
1     0    assistant Hello! How can I assist you today?       NA          stop

$usage
$usage$prompt_tokens
[1] 9

$usage$completion_tokens
[1] 9

$usage$total_tokens
[1] 18


$system_fingerprint
[1] "fp_69829325d0"

Wrapping it in a function

library(httr2)
library(purrr)

chat <- function(message, api_key = Sys.getenv("OPENAI_API_KEY")) {
  user_message <- list(list(role = "user", content = message))
  body <- list(model = "gpt-3.5-turbo",
               messages = user_message)
  resp <-
    request("https://api.openai.com/v1") |> 
    req_url_path_append("chat/completions") |> 
    req_auth_bearer_token(token = api_key) |> 
    req_body_json(body) |> 
    req_perform()
  
  resp |> 
    resp_body_json(simplifyVector = TRUE) |> 
    pluck("choices", "message", "content")
}

Let’s Try it Out

Trying out chat()


chat("What is your favorite color?")
[1] "I'm a language model AI from OpenAI, so I don't have personal preferences or feelings like humans do. But I can certainly appreciate all the beautiful colors in the world! What's your favorite color?"


chat("Show me a simple ggplot2 example. Only code with comments. Be brief.")
[1] "```\n# Load ggplot2 package\nlibrary(ggplot2)\n\n# Create a simple scatter plot\nggplot(mtcars, aes(x = mpg, y = wt)) + \n  geom_point()\n```"

A Prettier Response

answer <- chat("Make a ggplot2 in an RMarkdown document and briefly tell me
               what you made.")
answer |> cat()
```{r}
library(ggplot2)

# Create a scatter plot of car weights and miles per gallon
ggplot(mpg, aes(x = wt, y = hwy)) +
  geom_point() +
  labs(title = "Car Weight vs. Miles Per Gallon",
       x = "Weight (in 1000 lbs)",
       y = "Highway MPG")
```

In this ggplot2 visualization, I created a scatter plot comparing car weights (in 1000 lbs) on the x-axis to highway miles per gallon (MPG) on the y-axis using the `mpg` dataset that comes with ggplot2. Each point represents a different car, showing the relationship between weight and fuel efficiency. The plot title and axis labels provide context for the viewer.

An Even Prettier Response

library(ggplot2)

# Create a scatter plot of car weights and miles per gallon
ggplot(mpg, aes(x = wt, y = hwy)) +
  geom_point() +
  labs(title = "Car Weight vs. Miles Per Gallon",
       x = "Weight (in 1000 lbs)",
       y = "Highway MPG")

In this ggplot2 visualization, I created a scatter plot comparing car weights (in 1000 lbs) on the x-axis to highway miles per gallon (MPG) on the y-axis using the mpg dataset that comes with ggplot2. Each point represents a different car, showing the relationship between weight and fuel efficiency. The plot title and axis labels provide context for the viewer.

Helper Functions

chat()

chat <- function(user_message, 
                 history = NULL,
                 system_prompt = c("general", "code"),
                 api_key = Sys.getenv("OPENAI_API_KEY")) {
  system   <- get_system_prompt(system_prompt)
  prompt   <- prepare_prompt(user_message, system_prompt, history)
  base_url <- "https://api.openai.com/v1"
  body     <- list(model = "gpt-3.5-turbo",
                   messages = prompt)
  
  # <httr2_request_pipeline>
  # <process_response>
}

Helper Functions

get_system_prompt()

get_system_prompt <- function(system = c("general", "code")) {
  instructions <- 
    switch(system,
           "general" = "You are a helpful assistant.",
           "code"    = "<code_assistant_prompt>")
  list(list(role = "system", content = instructions))
}


prepare_prompt()

prepare_prompt <- function(user_message, system_prompt, history) {
  user_prompt <-  list(list(role = "user", content = user_message))
  c(system_prompt, history, user_prompt) |> compact()
}

Shiny Build

30:00

Deployment