Using Shiny, OpenAI, and RStudio to Build a Chatbot
The message body:
{httr2}
{httr2}
{httr2}
library(httr2)
library(purrr)
# construct the message body
user_message <- list(list(role = "user", content = "Hello!"))
body <- list(model = "gpt-3.5-turbo", messages = user_message)
# send the request
resp <-
request("https://api.openai.com/v1") |>
req_url_path_append("chat/completions") |>
req_auth_bearer_token(token = Sys.getenv("OPENAI_API_KEY")) |>
req_body_json(body) |>
req_perform()
{httr2}
library(httr2)
library(purrr)
# construct the message body
user_message <- list(list(role = "user", content = "Hello!"))
body <- list(model = "gpt-3.5-turbo", messages = user_message)
# send the request
resp <-
request("https://api.openai.com/v1") |>
req_url_path_append("chat/completions") |>
req_auth_bearer_token(token = Sys.getenv("OPENAI_API_KEY")) |>
req_body_json(body) |>
req_perform()
# process the response
resp |>
resp_body_json(simplifyVector = TRUE) |>
pluck("choices", "message", "content")
[1] "Hello! How can I assist you today?"
$id
[1] "chatcmpl-8tIGZLfmm3u3NuDC16ElF5CrWBHuF"
$object
[1] "chat.completion"
$created
[1] 1708188947
$model
[1] "gpt-3.5-turbo-0125"
$choices
index message.role message.content logprobs finish_reason
1 0 assistant Hello! How can I assist you today? NA stop
$usage
$usage$prompt_tokens
[1] 9
$usage$completion_tokens
[1] 9
$usage$total_tokens
[1] 18
$system_fingerprint
[1] "fp_69829325d0"
library(httr2)
library(purrr)
chat <- function(message, api_key = Sys.getenv("OPENAI_API_KEY")) {
user_message <- list(list(role = "user", content = message))
body <- list(model = "gpt-3.5-turbo",
messages = user_message)
resp <-
request("https://api.openai.com/v1") |>
req_url_path_append("chat/completions") |>
req_auth_bearer_token(token = api_key) |>
req_body_json(body) |>
req_perform()
resp |>
resp_body_json(simplifyVector = TRUE) |>
pluck("choices", "message", "content")
}
chat()
[1] "I'm a language model AI from OpenAI, so I don't have personal preferences or feelings like humans do. But I can certainly appreciate all the beautiful colors in the world! What's your favorite color?"
answer <- chat("Make a ggplot2 in an RMarkdown document and briefly tell me
what you made.")
answer |> cat()
```{r}
library(ggplot2)
# Create a scatter plot of car weights and miles per gallon
ggplot(mpg, aes(x = wt, y = hwy)) +
geom_point() +
labs(title = "Car Weight vs. Miles Per Gallon",
x = "Weight (in 1000 lbs)",
y = "Highway MPG")
```
In this ggplot2 visualization, I created a scatter plot comparing car weights (in 1000 lbs) on the x-axis to highway miles per gallon (MPG) on the y-axis using the `mpg` dataset that comes with ggplot2. Each point represents a different car, showing the relationship between weight and fuel efficiency. The plot title and axis labels provide context for the viewer.
library(ggplot2)
# Create a scatter plot of car weights and miles per gallon
ggplot(mpg, aes(x = wt, y = hwy)) +
geom_point() +
labs(title = "Car Weight vs. Miles Per Gallon",
x = "Weight (in 1000 lbs)",
y = "Highway MPG")
In this ggplot2 visualization, I created a scatter plot comparing car weights (in 1000 lbs) on the x-axis to highway miles per gallon (MPG) on the y-axis using the mpg
dataset that comes with ggplot2. Each point represents a different car, showing the relationship between weight and fuel efficiency. The plot title and axis labels provide context for the viewer.
chat()
chat <- function(user_message,
history = NULL,
system_prompt = c("general", "code"),
api_key = Sys.getenv("OPENAI_API_KEY")) {
system <- get_system_prompt(system_prompt)
prompt <- prepare_prompt(user_message, system_prompt, history)
base_url <- "https://api.openai.com/v1"
body <- list(model = "gpt-3.5-turbo",
messages = prompt)
# <httr2_request_pipeline>
# <process_response>
}
get_system_prompt()
get_system_prompt <- function(system = c("general", "code")) {
instructions <-
switch(system,
"general" = "You are a helpful assistant.",
"code" = "<code_assistant_prompt>")
list(list(role = "system", content = instructions))
}
prepare_prompt()
30:00