Add (initial): futurewalker code

This commit is contained in:
2023-11-20 00:15:18 +08:00
parent f8602cb456
commit 9ce3e5c82a
166 changed files with 15941 additions and 1072 deletions

View File

@@ -5,118 +5,184 @@
use Exception;
use Illuminate\Support\Facades\Http;
use Illuminate\Support\Facades\Log;
use Illuminate\Support\Str;
class OpenAI
{
public static function writeArticle($title, $description, $article_type, $min, $max)
public static function getArticleMeta($user_prompt, $model_max_tokens = 1536, $timeout = 60)
{
$system_prompt = "
Using the general article structure, please create a Markdown format article on the topic given. The article should prioritize accuracy and provide genuine value to readers. Avoid making assumptions or adding unverified facts. Ensure the article is between {$min}-{$max} words. Write with 8th & 9th grade US english standard.\n\n
Structure:\n\n
Title\n
Provide a headline that captures the essence of the article's focus and is tailored to the reader's needs.\n\n
Introduction\n
Offer a brief overview or background of the topic, ensuring it's engaging and invites readers to continue reading.\n\n
Main Body\n\n
Subsection\n
Introduce foundational information about the topic, ensuring content is accurate and based on known facts. Avoid generic or speculative statements.\n\n
Subsection (if applicable)\n
If helpful, use Markdown to create tables to convey comparison of data. Ensure data is accurate and relevant to the reader.\n\n
Subsection\n
Dive deep into primary elements or facets of the topic, ensuring content is factual and offers value.\n\n
Subsection\n
Discuss real-world applications or significance, highlighting practical implications or actionable insights for the reader.\n\n
Subsection (optional)\n
Provide context or relate the topic to relevant past events or trends, making it relatable and more comprehensive.\n\n
Subsection (if applicable)\n
Predict outcomes, trends, or ramifications, but ensure predictions are rooted in known information or logical reasoning.\n\n
Subsection\n
Summarise key points or lessons, ensuring they resonate with the initial objectives of the article and provide clear takeaways.\n\n
Conclusion\n
Revisit main points and offer final thoughts or recommendations that are actionable and beneficial to the reader.\n\n
FAQs (optional)\n
Address anticipated questions or misconceptions about the topic. Prioritize questions that readers are most likely to have and provide clear, concise answers based on factual information.\n
Q: Question\n\n
A: Answer\n
";
$user_prompt = "Title: {$title}\nDescription: {$description}\nArticleType: {$article_type}";
$openai_config = 'openai-gpt-4-turbo';
return self::chatCompletion($system_prompt, $user_prompt, 'gpt-3.5-turbo', 1200);
$system_prompt = "Based on given article, populate the following in valid JSON format\n{\n\"title\":\"(Title based on article)\",\n\"keywords\":[\"(Important keywords in 1-2 words per keyword)\"],\n\"category\":\"(Updates|Opinions|Features|New Launches|How Tos|Reviews)\",\n\"summary\":\"(Summarise article in 80-100 words to help readers understand what article is about)\",\n\"entities\":[(List of companies, brands that are considered as main entites in 1-2 words. per entity)],\n\"society_impact\":[\"(Explain how this article content's can impact society on AI and\/or technology aspect )\"],\n\"society_impact_level:\"(low|medium|high)\"\n}";
return self::getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout);
}
public static function createNewArticleTitle($current_title, $supporting_data)
public static function writeArticle($user_prompt, $model_max_tokens = 1536, $timeout = 180)
{
$system_prompt = "Based on provided article title, identify the main keyword in 1-2 words. Once identified, use the main keyword only to generate an easy-to-read unique, helpful article title.\n\n
Requirements:\n
2 descriptive photos keywords to represent article title when put together side-by-side\n
No realtime information required\n
No guides and how tos\n
No punctuation in titles especially colons :\n
90-130 characters\n\n
return in following json format {\"main_keyword\":\"(Main Keyword)\",\"title\":\"(Title in 90-130 letters)\",\"short_title\":\"(Short Title in 30-40 letters)\",\"article_type\":\"(How-tos|Guides|Interview|Review|Commentary|Feature|News|Editorial|Report|Research|Case-study|Overview|Tutorial|Update|Spotlight|Insights)\",\"description\":\"(Summarize in max 120 letters, add cliffhanger is possible to attract readers)\",\"photo_keywords\":[\"photo keyword 1\",\"photo keyword 2\"]}";
$openai_config = 'openai-gpt-3-5-turbo-1106';
$supporting_data = Str::substr($supporting_data, 0, 2100);
$system_prompt = "Write a news article in US grade 9 English, approximately 600-800 words, formatted in Markdown. \n\nIMPORTANT RULES\n- Do not add photos, publish date, or author\n- Only have 1 heading, which is the article title\n- Write in the following article structure:\n# Main article title\n\nParagraph 1\n\nParagraph 2\n\nParagraph 3, etc.\n\nConclusion";
$user_prompt = "Article Title: {$current_title}\n Article Description: {$supporting_data}\n";
$reply = self::chatCompletion($system_prompt, $user_prompt, 'gpt-3.5-turbo', 900);
try {
return json_decode($reply, false);
} catch (Exception $e) {
return null;
}
return self::getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout, 'text');
}
public static function suggestArticleTitles($current_title, $supporting_data, $suggestion_counts)
public static function titleSuggestions($user_prompt, $model_max_tokens = 512, $timeout = 60)
{
$system_prompt = "Based on provided article title, identify the main keyword in 1-2 words. Once identified, use the main keyword only to generate {$suggestion_counts} easy-to-read unique, helpful title articles.\n\n
Requirements:\n
2 descriptive photos keywords to represent article title when put together side-by-side\n
No realtime information required\n
No guides and how tos\n
No punctuation in titles especially colons :\n
90-130 characters\n\n
return in following json format {\"main_keyword\":\"(Main Keyword)\",\"suggestions\":[{\"title\":\"(Title in 90-130 letters)\",\"short_title\":\"(Short Title in 30-40 letters)\",\"article_type\":\"(How-tos|Guides|Interview|Review|Commentary|Feature|News|Editorial|Report|Research|Case-study|Overview|Tutorial|Update|Spotlight|Insights)\",\"description\":\"(SEO description based on main keyword)\",\"photo_keywords\":[\"photo keyword 1\",\"photo keyword 2\"]}]}";
$openai_config = 'openai-gpt-3-5-turbo-1106';
$user_prompt = "Article Title: {$current_title}\n Article Description: {$supporting_data}\n";
$system_prompt = "1. identify meaningful & potential keywords in this blog post article title. also estimate other related keywords to the title.\n\n2. using identify keywords, propose search queries i can use to find relevant articles online\n\n3. recommend writing tone that will entice readers.\n\n4. using identified keywords, propose article headings with key facts to highlight for this article, without reviews\n\n\nreturn all content in json: {\n\"identified_keywords\":[],\n\"related_keywords\":[],\n\"proposed_search_queries\":[],\n\"writing_tone\":[],\n\"article_headings\":[],\n}";
$reply = self::chatCompletion($system_prompt, $user_prompt, 'gpt-3.5-turbo');
return self::getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout);
}
public static function topTitlePicksById($user_prompt, $model_max_tokens = 256, $timeout = 60)
{
$openai_config = 'openai-gpt-4-turbo';
$system_prompt = 'Pick 10-15 unique articles that are focused on different product launches, ensuring each is interesting, informative, and casts a positive light on the technology and AI industry. Avoid selecting multiple articles that center around the same product or feature. Ensure that titles selected do not share primary keywords—such as the name of a product or specific technology feature—and strictly return a list of IDs only, without title, strictly in this JSON format: {"ids":[]}. Titles should represent a diverse range of topics and products within the technology and AI space without repetition.';
return self::getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout = 800);
}
private static function getChatCompletion($user_prompt, $system_prompt, $openai_config, $model_max_tokens, $timeout, $response_format = 'json_object')
{
$model = config("platform.ai.{$openai_config}.model");
$input_cost_per_thousand_tokens = config("platform.ai.{$openai_config}.input_cost_per_thousand_tokens");
$output_cost_per_thousand_tokens = config("platform.ai.{$openai_config}.output_cost_per_thousand_tokens");
$output_token = 1280;
try {
return json_decode($reply, false);
$obj = self::chatCompletionApi($system_prompt, $user_prompt, $model, $output_token, $response_format, $timeout);
$input_cost = self::getCostUsage($obj->usage_detailed->prompt_tokens, $input_cost_per_thousand_tokens);
$output_cost = self::getCostUsage($obj->usage_detailed->completion_tokens, $output_cost_per_thousand_tokens);
$output = $obj->reply;
if ($response_format == 'json_object') {
$output = json_decode(self::jsonFixer($obj->reply), false, 512, JSON_THROW_ON_ERROR);
}
return (object) [
'prompts' => (object) [
'system_prompt' => $system_prompt,
'user_prompt' => $user_prompt,
],
'cost' => $input_cost + $output_cost,
'output' => $output,
'token_usage' => $obj->usage,
'token_usage_detailed' => $obj->usage_detailed,
];
} catch (Exception $e) {
return null;
return self::getDefaultFailedResponse($system_prompt, $user_prompt, $e);
}
return self::getDefaultFailedResponse($system_prompt, $user_prompt);
}
public static function chatCompletion($system_prompt, $user_prompt, $model, $max_token = 2500)
private static function getDefaultFailedResponse($system_prompt, $user_prompt, $exception = null)
{
$exception_message = null;
if (! is_null($exception)) {
$exception_message = $exception->getMessage();
}
return (object) [
'exception' => $exception_message,
'prompts' => (object) [
'system_prompt' => $system_prompt,
'user_prompt' => $user_prompt,
],
'cost' => 0,
'output' => null,
'token_usage' => 0,
'token_usage_detailed' => (object) [
'completion_tokens' => 0,
'prompt_tokens' => 0,
'total_tokens' => 0,
],
];
}
private static function getCostUsage($token_usage, $cost_per_thousand_tokens)
{
$calc = $token_usage / 1000;
return $calc * $cost_per_thousand_tokens;
}
private static function jsonFixer($json_string)
{
$json_string = str_replace("\n", '', $json_string);
// try {
// return (new JsonFixer)->fix($json_string);
// }
// catch(Exception $e) {
// }
return $json_string;
}
public static function chatCompletionApi($system_prompt, $user_prompt, $model, $max_token = 2500, $response_format = 'text', $timeout = 800)
{
if ($response_format == 'json_object') {
$arr = [
'model' => $model,
'max_tokens' => $max_token,
'response_format' => (object) [
'type' => 'json_object',
],
'messages' => [
['role' => 'system', 'content' => $system_prompt],
['role' => 'user', 'content' => $user_prompt],
],
];
} else {
$arr = [
'model' => $model,
'max_tokens' => $max_token,
'messages' => [
['role' => 'system', 'content' => $system_prompt],
['role' => 'user', 'content' => $user_prompt],
],
];
}
try {
$response = Http::timeout(800)->withToken(config('platform.ai.openai.api_key'))
->post('https://api.openai.com/v1/chat/completions', [
'model' => $model,
'max_tokens' => $max_token,
'messages' => [
['role' => 'system', 'content' => $system_prompt],
['role' => 'user', 'content' => $user_prompt],
],
]);
$response = Http::timeout($timeout)->withToken(config('platform.ai.openai.api_key'))
->post('https://api.openai.com/v1/chat/completions', $arr);
$json_response = json_decode($response->body());
$reply = $json_response?->choices[0]?->message?->content;
//dump($json_response);
return $reply;
if (isset($json_response->error)) {
Log::error(serialize($json_response));
throw new Exception(serialize($json_response->error));
}
$obj = (object) [
'usage' => $json_response?->usage?->total_tokens,
'usage_detailed' => $json_response?->usage,
'reply' => $json_response?->choices[0]?->message?->content,
];
return $obj;
} catch (Exception $e) {
Log::error($response->body());
inspector()->reportException($e);
////dd($response->body());
//inspector()->reportException($e);
throw ($e);
}