Files
futurewalker/app/Helpers/FirstParty/OpenAI/OpenAI.php

203 lines
9.1 KiB
PHP

<?php
namespace App\Helpers\FirstParty\OpenAI;
use Exception;
use Illuminate\Support\Facades\Http;
use Illuminate\Support\Facades\Log;
class OpenAI
{
public static function getRssPostMeta($user_prompt, $model_max_tokens = 1536, $timeout = 60)
{
$openai_config = 'openai-gpt-4-turbo';
$system_prompt = "Based on given article, populate the following in valid JSON format\n{\n\"title\":\"(Title based on article)\",\n\"keywords\":[\"(Important keywords in 1-2 words per keyword)\"],\n\"category\":\"(Updates|Opinions|Features|New Launches|How Tos|Reviews|AI Prompts)\",\n\"summary\":\"(Summarise article in 60-90 words to help readers understand what article is about)\",\n\"entities\":[(List of companies, brands that are considered as main entites in 1-2 words. per entity)],\n\"society_impact\":\"(Explain in 30-50 words how this article content's can impact society on technological aspect)\",\n\"society_impact_level:\"(low|medium|high)\"\n}";
return self::getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout);
}
public static function getArticleMeta($user_prompt, $model_max_tokens = 1536, $timeout = 60)
{
$openai_config = 'openai-gpt-4-turbo';
$system_prompt = "Based on given article, populate the following in valid JSON format\n{\n\"title\":\"(Title based on article)\",\n\"keywords\":[\"(Important keywords in 1-2 words per keyword)\"],\n\"category\":\"(Updates|Opinions|Features|New Launches|How Tos|Reviews)\",\n\"summary\":\"(Summarise article in 80-100 words to help readers understand what article is about)\",\n\"entities\":[(List of companies, brands that are considered as main entites in 1-2 words. per entity)],\n\"society_impact\":\"(Explain how this article content's can impact society on AI and\/or technology aspect )\",\n\"society_impact_level:\"(low|medium|high)\"\n}";
return self::getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout);
}
public static function writeArticle($user_prompt, $model_max_tokens = 1536, $timeout = 180)
{
$openai_config = 'openai-gpt-3-5-turbo-1106';
$system_prompt = "Write a news article in US grade 9 English, approximately 600-800 words, formatted in Markdown. \n\nIMPORTANT RULES\n- Do not add photos, publish date, or author\n- Only have 1 heading, which is the article title\n- Write in the following article structure:\n# Main article title\n\nParagraph 1\n\nParagraph 2\n\nParagraph 3, etc.\n\nConclusion";
return self::getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout, 'text');
}
public static function titleSuggestions($user_prompt, $model_max_tokens = 512, $timeout = 60)
{
$openai_config = 'openai-gpt-3-5-turbo-1106';
$system_prompt = "1. identify meaningful & potential keywords in this blog post article title. also estimate other related keywords to the title.\n\n2. using identify keywords, propose search queries i can use to find relevant articles online\n\n3. recommend writing tone that will entice readers.\n\n4. using identified keywords, propose article headings with key facts to highlight for this article, without reviews\n\n\nreturn all content in json: {\n\"identified_keywords\":[],\n\"related_keywords\":[],\n\"proposed_search_queries\":[],\n\"writing_tone\":[],\n\"article_headings\":[],\n}";
return self::getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout);
}
public static function topTitlePicksById($user_prompt, $model_max_tokens = 256, $timeout = 60)
{
$openai_config = 'openai-gpt-4-turbo';
$system_prompt = 'Pick 10-15 unique articles that are focused on different product launches, ensuring each is interesting, informative, and casts a positive light on the technology and AI industry. Avoid selecting multiple articles that center around the same product or feature. Ensure that titles selected do not share primary keywords—such as the name of a product or specific technology feature—and strictly return a list of IDs only, without title, strictly in this JSON format: {"ids":[]}. Titles should represent a diverse range of topics and products within the technology and AI space without repetition.';
return self::getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout = 800);
}
private static function getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout, $response_format = 'json_object')
{
$model = config("platform.ai.{$openai_config}.model");
$input_cost_per_thousand_tokens = config("platform.ai.{$openai_config}.input_cost_per_thousand_tokens");
$output_cost_per_thousand_tokens = config("platform.ai.{$openai_config}.output_cost_per_thousand_tokens");
$output_token = 1280;
try {
$obj = self::chatCompletionApi($system_prompt, $user_prompt, $model, $output_token, $response_format, $timeout);
$input_cost = self::getCostUsage($obj->usage_detailed->prompt_tokens, $input_cost_per_thousand_tokens);
$output_cost = self::getCostUsage($obj->usage_detailed->completion_tokens, $output_cost_per_thousand_tokens);
$output = $obj->reply;
if ($response_format == 'json_object') {
$output = json_decode(self::jsonFixer($obj->reply), false, 512, JSON_THROW_ON_ERROR);
}
return (object) [
'prompts' => (object) [
'system_prompt' => $system_prompt,
'user_prompt' => $user_prompt,
],
'cost' => $input_cost + $output_cost,
'output' => $output,
'token_usage' => $obj->usage,
'token_usage_detailed' => $obj->usage_detailed,
];
} catch (Exception $e) {
return self::getDefaultFailedResponse($system_prompt, $user_prompt, $e);
}
return self::getDefaultFailedResponse($system_prompt, $user_prompt);
}
private static function getDefaultFailedResponse($system_prompt, $user_prompt, $exception = null)
{
$exception_message = null;
if (! is_null($exception)) {
$exception_message = $exception->getMessage();
}
return (object) [
'exception' => $exception_message,
'prompts' => (object) [
'system_prompt' => $system_prompt,
'user_prompt' => $user_prompt,
],
'cost' => 0,
'output' => null,
'token_usage' => 0,
'token_usage_detailed' => (object) [
'completion_tokens' => 0,
'prompt_tokens' => 0,
'total_tokens' => 0,
],
];
}
private static function getCostUsage($token_usage, $cost_per_thousand_tokens)
{
$calc = $token_usage / 1000;
return $calc * $cost_per_thousand_tokens;
}
private static function jsonFixer($json_string)
{
$json_string = str_replace("\n", '', $json_string);
// try {
// return (new JsonFixer)->fix($json_string);
// }
// catch(Exception $e) {
// }
return $json_string;
}
public static function chatCompletionApi($system_prompt, $user_prompt, $model, $max_token = 2500, $response_format = 'text', $timeout = 800)
{
if ($response_format == 'json_object') {
$arr = [
'model' => $model,
'max_tokens' => $max_token,
'response_format' => (object) [
'type' => 'json_object',
],
'messages' => [
['role' => 'system', 'content' => $system_prompt],
['role' => 'user', 'content' => $user_prompt],
],
];
} else {
$arr = [
'model' => $model,
'max_tokens' => $max_token,
'messages' => [
['role' => 'system', 'content' => $system_prompt],
['role' => 'user', 'content' => $user_prompt],
],
];
}
try {
$response = Http::timeout($timeout)->withToken(config('platform.ai.openai.api_key'))
->post('https://api.openai.com/v1/chat/completions', $arr);
$json_response = json_decode($response->body());
//dump($json_response);
if (isset($json_response->error)) {
Log::error(serialize($json_response));
throw new Exception(serialize($json_response->error));
}
$obj = (object) [
'usage' => $json_response?->usage?->total_tokens,
'usage_detailed' => $json_response?->usage,
'reply' => $json_response?->choices[0]?->message?->content,
];
return $obj;
} catch (Exception $e) {
////dd($response->body());
//inspector()->reportException($e);
throw ($e);
}
return null;
}
}