Sync
This commit is contained in:
@@ -8,85 +8,152 @@
|
||||
|
||||
class OpenAI
|
||||
{
|
||||
public static function writeProductArticle($excerpt, $photos, $categories)
|
||||
public static function getSiteSummary($parent_categories, $user_prompt, $model_max_tokens = 1536, $timeout = 60)
|
||||
{
|
||||
//$excerpt = substr($excerpt, 0, 1500);
|
||||
$openai_config = 'openai-gpt-3-5-turbo-1106';
|
||||
|
||||
$category_str = implode('|', $categories);
|
||||
$category_list = implode('|', $parent_categories->pluck('name')->toArray());
|
||||
|
||||
$system_prompt = '
|
||||
You are tasked with writing a product introduction & review article using the provided excerpt. Write as if you are reviewing the product by a third party, avoiding pronouns. Emphasize the product\'s performance, features, and notable aspects. Do not mention marketplace-related information. Return the output as a minified JSON in this format:
|
||||
{"category": "($category_str)","title": "(Start with product name, 60-70 characters)","excerpt": "(150-160 characters, do not start with a verb)","cliffhanger": "(70-80 characters, enticing sentence)","body": "(Markdown, 700-900 words)"}
|
||||
$system_prompt = "Based on the website content containing an AI tool, return a valid JSON containing:\n{\n\"is_ai_tool\":(true|false),\n\"ai_tool_name\":\"(AI Tool Name)\",\n\"is_app_web_both\":\"(app|web|both)\",\n\"tagline\":\"(One line tagline in 6-8 words)\",\n\"summary\": \"(Summary of AI tool in 2-3 parapgraphs, 140-180 words using grade 8 US english)\",\n\"pricing_type\": \"(Free|Free Trial|Freemium|Subscription|Usage Based)\",\n\"main_category\": \"(AI Training|Art|Audio|Avatars|Business|Chatbots|Coaching|Content Generation|Data|Dating|Design|Dev|Education|Emailing|Finance|Gaming|GPTs|Health|Legal|Marketing|Music|Networking|Personal Assistance|Planning|Podcasting|Productivity|Project Management|Prompting|Reporting|Research|Sales|Security|SEO|Shopping|Simulation|Social|Speech|Support|Task|Testing|Training|Translation|UI\/UX|Video|Workflow|Writing)\",\n\"keywords\":[\"(Identify relevant keywords for this AI Tool, 1-2 words each, at least)\"],\n\"qna\":[{\"q\":\"Typical FAQ that readers want to know, up to 5 questions\",\"a\":\"Answer of the question\"}]\n}";
|
||||
|
||||
Mandatory Requirements:
|
||||
- Language: US grade 8-9 English
|
||||
- Use these sections when applicable:
|
||||
-- ### Introduction
|
||||
-- ### Overview
|
||||
-- ### Specifications (use valid Markdown table. Two columns: Features and Specifications. Use `| --- | --- |` as a separator.)
|
||||
-- ### Price (in given currency)
|
||||
-- ### (Choose one: Should I Buy?, Conclusion, Final Thoughts, Our Verdict)
|
||||
- Only use facts from the provided excerpt
|
||||
- Don\'t use titles inside the markdown body
|
||||
- Use \'###\' for all article sections
|
||||
- Pick the closest provided category
|
||||
- Do not use newline in the JSON structure
|
||||
';
|
||||
return self::getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout);
|
||||
}
|
||||
|
||||
$user_prompt = "EXCERPT\n------------\n{$excerpt}\n";
|
||||
private static function getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout, $response_format = 'json_object')
|
||||
{
|
||||
$model = config("platform.ai.{$openai_config}.model");
|
||||
$input_cost_per_thousand_tokens = config("platform.ai.{$openai_config}.input_cost_per_thousand_tokens");
|
||||
$output_cost_per_thousand_tokens = config("platform.ai.{$openai_config}.output_cost_per_thousand_tokens");
|
||||
|
||||
if (count($photos) > 0) {
|
||||
$system_prompt .= '- Include 3 markdown images with the article title as caption in every section, excluding Introduction.\n';
|
||||
$user_prompt .= "\n\MARKDOWN IMAGES\n------------\n";
|
||||
foreach ($photos as $photo) {
|
||||
$user_prompt .= "{$photo}\n";
|
||||
}
|
||||
}
|
||||
$output_token = 1280;
|
||||
|
||||
$output = (self::chatCompletion($system_prompt, $user_prompt, 'gpt-3.5-turbo', 1500));
|
||||
try {
|
||||
|
||||
// dump($user_prompt);
|
||||
// dd($output);
|
||||
$obj = self::chatCompletionApi($system_prompt, $user_prompt, $model, $output_token, $response_format, $timeout);
|
||||
|
||||
if (! is_null($output)) {
|
||||
try {
|
||||
return json_decode($output, false, 512, JSON_THROW_ON_ERROR);
|
||||
} catch (Exception $e) {
|
||||
Log::error($output);
|
||||
inspector()->reportException($e);
|
||||
$input_cost = self::getCostUsage($obj->usage_detailed->prompt_tokens, $input_cost_per_thousand_tokens);
|
||||
$output_cost = self::getCostUsage($obj->usage_detailed->completion_tokens, $output_cost_per_thousand_tokens);
|
||||
|
||||
return null;
|
||||
$output = $obj->reply;
|
||||
|
||||
if ($response_format == 'json_object') {
|
||||
$output = json_decode(self::jsonFixer($obj->reply), false, 512, JSON_THROW_ON_ERROR);
|
||||
}
|
||||
|
||||
return (object) [
|
||||
'prompts' => (object) [
|
||||
'system_prompt' => $system_prompt,
|
||||
'user_prompt' => $user_prompt,
|
||||
],
|
||||
'cost' => $input_cost + $output_cost,
|
||||
'output' => $output,
|
||||
'token_usage' => $obj->usage,
|
||||
'token_usage_detailed' => $obj->usage_detailed,
|
||||
];
|
||||
} catch (Exception $e) {
|
||||
return self::getDefaultFailedResponse($system_prompt, $user_prompt, $e);
|
||||
}
|
||||
|
||||
return null;
|
||||
return self::getDefaultFailedResponse($system_prompt, $user_prompt);
|
||||
|
||||
}
|
||||
|
||||
public static function chatCompletion($system_prompt, $user_prompt, $model, $max_token = 2500)
|
||||
private static function getDefaultFailedResponse($system_prompt, $user_prompt, $exception = null)
|
||||
{
|
||||
$exception_message = null;
|
||||
|
||||
if (! is_null($exception)) {
|
||||
$exception_message = $exception->getMessage();
|
||||
}
|
||||
|
||||
return (object) [
|
||||
'exception' => $exception_message,
|
||||
'prompts' => (object) [
|
||||
'system_prompt' => $system_prompt,
|
||||
'user_prompt' => $user_prompt,
|
||||
],
|
||||
'cost' => 0,
|
||||
'output' => null,
|
||||
'token_usage' => 0,
|
||||
'token_usage_detailed' => (object) [
|
||||
'completion_tokens' => 0,
|
||||
'prompt_tokens' => 0,
|
||||
'total_tokens' => 0,
|
||||
],
|
||||
];
|
||||
}
|
||||
|
||||
private static function getCostUsage($token_usage, $cost_per_thousand_tokens)
|
||||
{
|
||||
$calc = $token_usage / 1000;
|
||||
|
||||
return $calc * $cost_per_thousand_tokens;
|
||||
}
|
||||
|
||||
private static function jsonFixer($json_string)
|
||||
{
|
||||
$json_string = str_replace("\n", '', $json_string);
|
||||
|
||||
// try {
|
||||
// return (new JsonFixer)->fix($json_string);
|
||||
// }
|
||||
// catch(Exception $e) {
|
||||
|
||||
// }
|
||||
return $json_string;
|
||||
|
||||
}
|
||||
|
||||
public static function chatCompletionApi($system_prompt, $user_prompt, $model, $max_token = 2500, $response_format = 'text', $timeout = 800)
|
||||
{
|
||||
|
||||
if ($response_format == 'json_object') {
|
||||
$arr = [
|
||||
'model' => $model,
|
||||
'max_tokens' => $max_token,
|
||||
'response_format' => (object) [
|
||||
'type' => 'json_object',
|
||||
],
|
||||
'messages' => [
|
||||
['role' => 'system', 'content' => $system_prompt],
|
||||
['role' => 'user', 'content' => $user_prompt],
|
||||
],
|
||||
];
|
||||
} else {
|
||||
$arr = [
|
||||
'model' => $model,
|
||||
'max_tokens' => $max_token,
|
||||
'messages' => [
|
||||
['role' => 'system', 'content' => $system_prompt],
|
||||
['role' => 'user', 'content' => $user_prompt],
|
||||
],
|
||||
];
|
||||
}
|
||||
|
||||
try {
|
||||
$response = Http::timeout(800)->withToken(config('platform.ai.openai.api_key'))
|
||||
->post('https://api.openai.com/v1/chat/completions', [
|
||||
'model' => $model,
|
||||
'max_tokens' => $max_token,
|
||||
'messages' => [
|
||||
['role' => 'system', 'content' => $system_prompt],
|
||||
['role' => 'user', 'content' => $user_prompt],
|
||||
],
|
||||
]);
|
||||
$response = Http::timeout($timeout)->withToken(config('platform.ai.openai.api_key'))
|
||||
->post('https://api.openai.com/v1/chat/completions', $arr);
|
||||
|
||||
//dd($response->body());
|
||||
$json_response = json_decode($response->body());
|
||||
|
||||
$json_response = json_decode($response->body(), false, 512, JSON_THROW_ON_ERROR);
|
||||
//dump($json_response);
|
||||
|
||||
$reply = $json_response?->choices[0]?->message?->content;
|
||||
if (isset($json_response->error)) {
|
||||
Log::error(serialize($json_response));
|
||||
throw new Exception(serialize($json_response->error));
|
||||
}
|
||||
|
||||
return $reply;
|
||||
$obj = (object) [
|
||||
'usage' => $json_response?->usage?->total_tokens,
|
||||
'usage_detailed' => $json_response?->usage,
|
||||
'reply' => $json_response?->choices[0]?->message?->content,
|
||||
|
||||
];
|
||||
|
||||
return $obj;
|
||||
} catch (Exception $e) {
|
||||
Log::error($response->body());
|
||||
inspector()->reportException($e);
|
||||
////dd($response->body());
|
||||
//inspector()->reportException($e);
|
||||
throw ($e);
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user