# shellcheck shell=dash
# Using chatgpt api
# ref: https://platform.openai.com/docs/api-reference/chat/create

# --maxtoken --proxy --apikey

___x_cmd_openai_request(){
    ___x_cmd_openai_has_apikey || return $?
    local message=;  local apikey=;   local proxy=;    local maxtoken=;
    # Default value
    ___x_cmd_openai_cur apikey:= model:= maxtoken:= 2>/dev/null
    local model="${model:-"$___X_CMD_OPENAI_DEFAULT_FIRST_MODEL"}"
    while [ $# -gt 0 ]; do
        case "$1" in
            --message)      message="$2";   [ $# -ge 2 ] || N=openai M="Please provide message value"   log:ret:64; shift 2 ;;
            --apikey)       apikey="$2";    [ $# -ge 2 ] || N=openai M="Please provide apikey value"    log:ret:64; shift 2 ;;
            --proxy)        proxy="$2";     [ $# -ge 2 ] || N=openai M="Please provide proxy value"     log:ret:64; shift 2 ;;
            --maxtoken)     maxtoken="$2";  [ $# -ge 2 ] || N=openai M="Please provide maxtoken value"  log:ret:64; shift 2 ;;
            --model)        model="$2";     [ $# -ge 2 ] || N=openai M="Please provide model value"     log:ret:64; shift 2 ;;
            *)              break;
        esac
    done

    # default maxtoken 1000
    local data='{
        "model": '"\"$model\""',
        "messages": '"$message"',
        "temperature": 0.7,
        "max_tokens": '"${maxtoken:-1000}"',
        "stream": true
    }'
    openai:debug --data "$data"

    # TODO: Need to json quote the content
    # LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8
    ___x_cmd proxy runifset "$proxy" \
    ___x_cmd curl \       \
        -sS https://api.openai.com/v1/chat/completions  \
        -H "Content-Type: application/json"             \
        -H "Authorization: Bearer $apikey"              \
        -d "$data" # | tee result.json
}
