Add (article): ai gen, front views
This commit is contained in:
243
app/Jobs/Tasks/GenerateArticleFeaturedImageTask.php
Normal file
243
app/Jobs/Tasks/GenerateArticleFeaturedImageTask.php
Normal file
@@ -0,0 +1,243 @@
|
||||
<?php
|
||||
|
||||
namespace App\Jobs\Tasks;
|
||||
|
||||
use App\Helpers\FirstParty\OSSUploader\OSSUploader;
|
||||
use App\Helpers\ThirdParty\DFS\SettingSerpLiveAdvanced;
|
||||
use App\Models\NewsSerpResult;
|
||||
use DFSClientV3\DFSClient;
|
||||
use Exception;
|
||||
use Illuminate\Support\Facades\Http;
|
||||
use Image;
|
||||
|
||||
class GenerateArticleFeaturedImageTask
|
||||
{
|
||||
public static function handle($post)
|
||||
{
|
||||
$keyword = $post->main_keyword;
|
||||
$title = $post->short_title;
|
||||
$article_type = $post->type;
|
||||
$country_iso = 'US';
|
||||
$country_name = get_country_name_by_iso($country_iso);
|
||||
|
||||
$images = [];
|
||||
|
||||
$client = new DFSClient(
|
||||
config('dataforseo.login'),
|
||||
config('dataforseo.password'),
|
||||
config('dataforseo.timeout'),
|
||||
config('dataforseo.api_version'),
|
||||
config('dataforseo.url'),
|
||||
);
|
||||
|
||||
// You will receive SERP data specific to the indicated keyword, search engine, and location parameters
|
||||
$serp_model = new SettingSerpLiveAdvanced();
|
||||
|
||||
$serp_model->setSe('google');
|
||||
$serp_model->setSeType('images');
|
||||
$serp_model->setKeyword($keyword);
|
||||
$serp_model->setLocationName($country_name);
|
||||
$serp_model->setDepth(100);
|
||||
$serp_model->setLanguageCode('en');
|
||||
$serp_res = $serp_model->getAsJson();
|
||||
|
||||
// try {
|
||||
$serp_obj = json_decode($serp_res, false, 512, JSON_THROW_ON_ERROR);
|
||||
|
||||
//dd($serp_obj);
|
||||
|
||||
if ($serp_obj?->status_code == 20000) {
|
||||
$json_file_name = config('platform.dataset.news.images_serp.file_prefix').str_slug($keyword).'-'.epoch_now_timestamp().'.json';
|
||||
|
||||
$upload_status = OSSUploader::uploadJson(
|
||||
config('platform.dataset.news.images_serp.driver'),
|
||||
config('platform.dataset.news.images_serp.path'),
|
||||
$json_file_name,
|
||||
$serp_obj);
|
||||
|
||||
if ($upload_status) {
|
||||
$news_serp_result = new NewsSerpResult();
|
||||
$news_serp_result->serp_provider = 'dfs';
|
||||
$news_serp_result->serp_se = 'google';
|
||||
$news_serp_result->serp_se_type = 'images';
|
||||
$news_serp_result->serp_keyword = $keyword;
|
||||
$news_serp_result->serp_country_iso = strtoupper($country_iso);
|
||||
$news_serp_result->serp_cost = $serp_obj?->cost;
|
||||
$news_serp_result->result_count = $serp_obj?->tasks[0]?->result[0]?->items_count;
|
||||
$news_serp_result->filename = $json_file_name;
|
||||
$news_serp_result->status = 'initial';
|
||||
|
||||
if ($news_serp_result->save()) {
|
||||
|
||||
$serp_items = $serp_obj?->tasks[0]?->result[0]?->items;
|
||||
|
||||
//dd($serp_items);
|
||||
|
||||
foreach ($serp_items as $item) {
|
||||
if ($item->type == 'images_search') {
|
||||
//dd($item);
|
||||
$images[] = $item->source_url;
|
||||
|
||||
if (count($images) > 20) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
//return $news_serp_result;
|
||||
} else {
|
||||
throw new Exception('Uploading failed', 1);
|
||||
}
|
||||
} else {
|
||||
throw new Exception('Data failed', 1);
|
||||
}
|
||||
// } catch (Exception $e) {
|
||||
// return null;
|
||||
// }
|
||||
|
||||
$numImagesInCanvas = 2;
|
||||
|
||||
if ($numImagesInCanvas > count($images)) {
|
||||
$numImagesInCanvas = count($images);
|
||||
}
|
||||
|
||||
$canvasWidth = 720;
|
||||
$canvasHeight = 405;
|
||||
|
||||
$canvas = Image::canvas($canvasWidth, $canvasHeight);
|
||||
|
||||
// Add Images
|
||||
$imageWidth = $canvasWidth / $numImagesInCanvas;
|
||||
|
||||
// Process and place each image
|
||||
$xOffset = 0; // Horizontal offset to place each image
|
||||
for ($i = 0; $i < count($images); $i++) {
|
||||
$url = $images[$i];
|
||||
|
||||
try {
|
||||
$imageResponse = Http::timeout(300)->withHeaders([
|
||||
'User-Agent' => 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36',
|
||||
])->get($url);
|
||||
|
||||
$imageContent = $imageResponse->body();
|
||||
|
||||
$image = Image::make($imageContent)
|
||||
->resize(null, $canvasHeight, function ($constraint) {
|
||||
$constraint->aspectRatio();
|
||||
})
|
||||
->resizeCanvas($imageWidth, $canvasHeight, 'center', false, [255, 255, 255, 0]);
|
||||
//->blur(6)
|
||||
|
||||
$canvas->insert($image, 'top-left', $xOffset, 0);
|
||||
$xOffset += $imageWidth;
|
||||
} catch (Exception $e) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
///
|
||||
|
||||
$fontSize = 28;
|
||||
$articleTypeFontSize = 24;
|
||||
$padding = 15;
|
||||
|
||||
$fontPath = resource_path('fonts/Inter/Inter-Black.ttf');
|
||||
|
||||
// Split title into words and reconstruct lines
|
||||
$words = explode(' ', $title);
|
||||
$lines = [''];
|
||||
$currentLineIndex = 0;
|
||||
|
||||
foreach ($words as $word) {
|
||||
$potentialLine = $lines[$currentLineIndex] ? $lines[$currentLineIndex].' '.$word : $word;
|
||||
|
||||
$box = imagettfbbox($fontSize, 0, $fontPath, $potentialLine);
|
||||
$textWidth = abs($box[2] - $box[0]);
|
||||
|
||||
if ($textWidth < $canvasWidth * 0.9) {
|
||||
$lines[$currentLineIndex] = $potentialLine;
|
||||
} else {
|
||||
$currentLineIndex++;
|
||||
$lines[$currentLineIndex] = $word;
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate the dimensions of the article_type text
|
||||
$articleTypeBox = imagettfbbox($articleTypeFontSize, 0, $fontPath, $article_type);
|
||||
$articleTypeWidth = abs($articleTypeBox[2] - $articleTypeBox[0]);
|
||||
$articleTypeHeight = abs($articleTypeBox[7] - $articleTypeBox[1]);
|
||||
|
||||
// Define the start Y position for the article type overlay
|
||||
$articleOverlayStartY = $canvasHeight - ($fontSize * count($lines)) - ($articleTypeHeight + 4 * $padding);
|
||||
|
||||
// Create the blue overlay for the article type
|
||||
$overlayWidth = $articleTypeWidth + 2 * $padding;
|
||||
$canvas->rectangle(20, $articleOverlayStartY, 10 + $overlayWidth, $articleOverlayStartY + $articleTypeHeight + 2 * $padding, function ($draw) {
|
||||
$draw->background([255, 255, 255, 0.8]);
|
||||
});
|
||||
|
||||
// Overlay the article_type text within its overlay, centered horizontally and vertically
|
||||
$textStartX = 20 + ($overlayWidth - $articleTypeWidth) / 2; // Center the text horizontally
|
||||
$canvas->text(strtoupper($article_type), $textStartX, $articleOverlayStartY + ($articleTypeHeight + 2 * $padding) / 2, function ($font) use ($articleTypeFontSize, $fontPath) {
|
||||
$font->file($fontPath);
|
||||
$font->size($articleTypeFontSize);
|
||||
$font->color('#0000FF');
|
||||
$font->align('left');
|
||||
$font->valign('middle'); // This ensures the text is vertically centered within the overlay
|
||||
});
|
||||
|
||||
// Create the blue overlay for the title
|
||||
$titleOverlayStartY = $articleOverlayStartY + $articleTypeHeight + 2 * $padding;
|
||||
|
||||
$canvas->rectangle(0, $titleOverlayStartY, $canvasWidth, $canvasHeight, function ($draw) {
|
||||
$draw->background([0, 0, 255, 0.5]);
|
||||
});
|
||||
|
||||
// Draw each line for the title
|
||||
$yPosition = $titleOverlayStartY + $padding;
|
||||
foreach ($lines as $line) {
|
||||
$canvas->text($line, $canvasWidth / 2, $yPosition, function ($font) use ($fontSize, $fontPath) {
|
||||
$font->file($fontPath);
|
||||
$font->size($fontSize);
|
||||
$font->color('#FFFFFF');
|
||||
$font->align('center');
|
||||
$font->valign('top');
|
||||
});
|
||||
$yPosition += $fontSize + $padding;
|
||||
}
|
||||
|
||||
$filename = $post->slug.'-'.epoch_now_timestamp().'.jpg';
|
||||
|
||||
$ok = OSSUploader::uploadFile('r2', 'post_images/', $filename, (string) $canvas->stream('jpeg'));
|
||||
|
||||
// LQIP
|
||||
// Clone the main image for LQIP version
|
||||
$lqipImage = clone $canvas;
|
||||
|
||||
// Create the LQIP version of the image
|
||||
$lqipImage->fit(10, 10, function ($constraint) {
|
||||
$constraint->aspectRatio();
|
||||
});
|
||||
|
||||
$lqipImage->encode('jpg', 5);
|
||||
|
||||
// LQIP filename
|
||||
$lqip_filename = $post->slug.'-'.epoch_now_timestamp().'_lqip.jpg';
|
||||
|
||||
// Upload the LQIP version using OSSUploader
|
||||
$lqip_ok = OSSUploader::uploadFile('r2', 'post_images/', $lqip_filename, (string) $lqipImage->stream('jpeg'));
|
||||
|
||||
if ($ok && $lqip_ok) {
|
||||
|
||||
$post->featured_image = 'post_images/'.$filename;
|
||||
$post->status = 'publish';
|
||||
$post->save();
|
||||
|
||||
return $post;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
82
app/Jobs/Tasks/GenerateArticleTask.php
Normal file
82
app/Jobs/Tasks/GenerateArticleTask.php
Normal file
@@ -0,0 +1,82 @@
|
||||
<?php
|
||||
|
||||
namespace App\Jobs\Tasks;
|
||||
|
||||
use App\Helpers\FirstParty\OpenAI\OpenAI;
|
||||
use App\Models\Author;
|
||||
use App\Models\Post;
|
||||
use App\Models\PostCategory;
|
||||
use App\Models\SerpUrl;
|
||||
use Exception;
|
||||
|
||||
class GenerateArticleTask
|
||||
{
|
||||
public static function handle(SerpUrl $serp_url)
|
||||
{
|
||||
|
||||
$ai_titles = OpenAI::suggestArticleTitles($serp_url->title, $serp_url->description, 1);
|
||||
|
||||
if (is_null($ai_titles)) {
|
||||
return self::saveAndReturnSerpProcessStatus($serp_url, -2);
|
||||
}
|
||||
|
||||
$suggestion = null;
|
||||
|
||||
// dump($ai_titles);
|
||||
|
||||
try {
|
||||
$random_key = array_rand($ai_titles?->suggestions, 1);
|
||||
|
||||
$suggestion = $ai_titles->suggestions[$random_key];
|
||||
|
||||
} catch (Exception $e) {
|
||||
return self::saveAndReturnSerpProcessStatus($serp_url, -1);
|
||||
}
|
||||
|
||||
if (is_null($suggestion)) {
|
||||
return self::saveAndReturnSerpProcessStatus($serp_url, -3);
|
||||
}
|
||||
|
||||
$markdown = OpenAI::writeArticle($suggestion->title, $suggestion->description, $suggestion->article_type, 500, 800);
|
||||
|
||||
if (is_empty($markdown)) {
|
||||
return self::saveAndReturnSerpProcessStatus($serp_url, -4);
|
||||
}
|
||||
|
||||
$post = new Post;
|
||||
$post->title = $suggestion->title;
|
||||
$post->type = $suggestion->article_type;
|
||||
$post->short_title = $ai_titles->short_title;
|
||||
$post->main_keyword = $ai_titles->main_keyword;
|
||||
$post->keywords = $suggestion->photo_keywords;
|
||||
$post->slug = str_slug($suggestion->title);
|
||||
$post->excerpt = $suggestion->description;
|
||||
$post->author_id = Author::find(1)->id;
|
||||
$post->featured = false;
|
||||
$post->featured_image = null;
|
||||
$post->body = $markdown;
|
||||
$post->status = 'draft';
|
||||
|
||||
if ($post->save()) {
|
||||
$post_category = new PostCategory;
|
||||
$post_category->post_id = $post->id;
|
||||
$post_category->category_id = $serp_url->category->id;
|
||||
|
||||
if ($post_category->save()) {
|
||||
return self::saveAndReturnSerpProcessStatus($serp_url, 1);
|
||||
} else {
|
||||
return self::saveAndReturnSerpProcessStatus($serp_url, -5);
|
||||
}
|
||||
}
|
||||
|
||||
return self::saveAndReturnSerpProcessStatus($serp_url, -6);
|
||||
}
|
||||
|
||||
private static function saveAndReturnSerpProcessStatus($serp_url, $process_status)
|
||||
{
|
||||
$serp_url->process_status = $process_status;
|
||||
$serp_url->save();
|
||||
|
||||
return $serp_url->process_status;
|
||||
}
|
||||
}
|
||||
79
app/Jobs/Tasks/GetNewsSerpTask.php
Normal file
79
app/Jobs/Tasks/GetNewsSerpTask.php
Normal file
@@ -0,0 +1,79 @@
|
||||
<?php
|
||||
|
||||
namespace App\Jobs\Tasks;
|
||||
|
||||
use App\Helpers\FirstParty\OSSUploader\OSSUploader;
|
||||
use App\Helpers\ThirdParty\DFS\SettingSerpLiveAdvanced;
|
||||
use App\Models\Category;
|
||||
use App\Models\NewsSerpResult;
|
||||
use DFSClientV3\DFSClient;
|
||||
use Exception;
|
||||
|
||||
class GetNewsSerpTask
|
||||
{
|
||||
public static function handle(Category $category, $country_iso)
|
||||
{
|
||||
$country_name = get_country_name_by_iso($country_iso);
|
||||
|
||||
$keyword = strtolower("{$category->name}");
|
||||
|
||||
$client = new DFSClient(
|
||||
config('dataforseo.login'),
|
||||
config('dataforseo.password'),
|
||||
config('dataforseo.timeout'),
|
||||
config('dataforseo.api_version'),
|
||||
config('dataforseo.url'),
|
||||
);
|
||||
|
||||
// You will receive SERP data specific to the indicated keyword, search engine, and location parameters
|
||||
$serp_model = new SettingSerpLiveAdvanced();
|
||||
|
||||
$serp_model->setSe('google');
|
||||
$serp_model->setSeType('news');
|
||||
$serp_model->setKeyword($keyword);
|
||||
$serp_model->setLocationName($country_name);
|
||||
$serp_model->setDepth(100);
|
||||
$serp_model->setLanguageCode('en');
|
||||
$serp_res = $serp_model->getAsJson();
|
||||
|
||||
try {
|
||||
$serp_obj = json_decode($serp_res, false, 512, JSON_THROW_ON_ERROR);
|
||||
|
||||
if ($serp_obj?->status_code == 20000) {
|
||||
$json_file_name = config('platform.dataset.news.news_serp.file_prefix').str_slug($category->name).'-'.epoch_now_timestamp().'.json';
|
||||
|
||||
$upload_status = OSSUploader::uploadJson(
|
||||
config('platform.dataset.news.news_serp.driver'),
|
||||
config('platform.dataset.news.news_serp.path'),
|
||||
$json_file_name,
|
||||
$serp_obj);
|
||||
|
||||
if ($upload_status) {
|
||||
$news_serp_result = new NewsSerpResult;
|
||||
$news_serp_result->category_id = $category->id;
|
||||
$news_serp_result->category_name = $category->name;
|
||||
$news_serp_result->serp_provider = 'dfs';
|
||||
$news_serp_result->serp_se = 'google';
|
||||
$news_serp_result->serp_se_type = 'news';
|
||||
$news_serp_result->serp_keyword = $keyword;
|
||||
$news_serp_result->serp_country_iso = strtoupper($country_iso);
|
||||
$news_serp_result->serp_cost = $serp_obj?->cost;
|
||||
$news_serp_result->result_count = $serp_obj?->tasks[0]?->result[0]?->items_count;
|
||||
$news_serp_result->filename = $json_file_name;
|
||||
$news_serp_result->status = 'initial';
|
||||
if ($news_serp_result->save()) {
|
||||
$category->serp_at = now();
|
||||
$category->save();
|
||||
}
|
||||
|
||||
return $news_serp_result;
|
||||
} else {
|
||||
throw new Exception('Uploading failed', 1);
|
||||
}
|
||||
}
|
||||
} catch (Exception $e) {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
146
app/Jobs/Tasks/ParseNewsSerpDomainsTask.php
Normal file
146
app/Jobs/Tasks/ParseNewsSerpDomainsTask.php
Normal file
@@ -0,0 +1,146 @@
|
||||
<?php
|
||||
|
||||
namespace App\Jobs\Tasks;
|
||||
|
||||
use App\Helpers\FirstParty\OSSUploader\OSSUploader;
|
||||
use App\Models\Category;
|
||||
use App\Models\NewsSerpResult;
|
||||
use App\Models\SerpUrl;
|
||||
use Carbon\Carbon;
|
||||
use Exception;
|
||||
|
||||
class ParseNewsSerpDomainsTask
|
||||
{
|
||||
public static function handle(NewsSerpResult $news_serp_result, $serp_counts = 1)
|
||||
{
|
||||
//dd($news_serp_result->category->serp_at);
|
||||
|
||||
$serp_results = null;
|
||||
|
||||
$success = false;
|
||||
|
||||
try {
|
||||
|
||||
$serp_results = OSSUploader::readJson(
|
||||
config('platform.dataset.news.news_serp.driver'),
|
||||
config('platform.dataset.news.news_serp.path'),
|
||||
$news_serp_result->filename)?->tasks[0]?->result[0]?->items;
|
||||
|
||||
} catch (Exception $e) {
|
||||
$serp_results = null;
|
||||
}
|
||||
|
||||
if (! is_null($serp_results)) {
|
||||
|
||||
$valid_serps = [];
|
||||
|
||||
foreach ($serp_results as $serp_item) {
|
||||
|
||||
$news_date = Carbon::parse($serp_item->timestamp);
|
||||
|
||||
if (is_empty($serp_item->url)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// if (!str_contains($serp_item->time_published, "hours"))
|
||||
// {
|
||||
// continue;
|
||||
// }
|
||||
|
||||
$serp_url = SerpUrl::where('url', $serp_item->url)->first();
|
||||
|
||||
if (! is_null($serp_url)) {
|
||||
if ($serp_url->status == 'blocked') {
|
||||
continue;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (str_contains($serp_item->title, ':')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$valid_serps[] = $serp_item;
|
||||
|
||||
if (count($valid_serps) >= $serp_counts) {
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//dd($valid_serps);
|
||||
|
||||
foreach ($valid_serps as $serp_item) {
|
||||
|
||||
//dd($serp_item);
|
||||
|
||||
if (is_null($serp_url)) {
|
||||
$serp_url = new SerpUrl;
|
||||
$serp_url->category_id = $news_serp_result->category_id;
|
||||
$serp_url->category_name = $news_serp_result->category_name;
|
||||
$serp_url->news_serp_result_id = $news_serp_result->id;
|
||||
}
|
||||
|
||||
$serp_url->source = 'serp';
|
||||
$serp_url->url = self::normalizeUrl($serp_item->url);
|
||||
$serp_url->country_iso = $news_serp_result->serp_country_iso;
|
||||
|
||||
if (! is_empty($serp_item->title)) {
|
||||
$serp_url->title = $serp_item->title;
|
||||
}
|
||||
|
||||
if (! is_empty($serp_item->snippet)) {
|
||||
$serp_url->description = $serp_item->snippet;
|
||||
}
|
||||
|
||||
if ($serp_url->isDirty()) {
|
||||
$serp_url->serp_at = $news_serp_result->category->serp_at;
|
||||
}
|
||||
|
||||
if ($serp_url->save()) {
|
||||
$success = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return $success;
|
||||
}
|
||||
|
||||
private static function normalizeUrl($url)
|
||||
{
|
||||
try {
|
||||
$parsedUrl = parse_url($url);
|
||||
|
||||
// Force the scheme to https to avoid duplicate content issues
|
||||
$parsedUrl['scheme'] = 'https';
|
||||
|
||||
if (! isset($parsedUrl['host'])) {
|
||||
// If the host is not present, throw an exception
|
||||
throw new \Exception('Host not found in URL');
|
||||
}
|
||||
|
||||
// Check if the path is set and ends with a trailing slash, if so, remove it
|
||||
if (isset($parsedUrl['path']) && substr($parsedUrl['path'], -1) === '/') {
|
||||
$parsedUrl['path'] = rtrim($parsedUrl['path'], '/');
|
||||
}
|
||||
|
||||
// Remove query parameters
|
||||
unset($parsedUrl['query']);
|
||||
|
||||
$normalizedUrl = sprintf(
|
||||
'%s://%s%s',
|
||||
$parsedUrl['scheme'],
|
||||
$parsedUrl['host'],
|
||||
$parsedUrl['path'] ?? ''
|
||||
);
|
||||
|
||||
// Remove fragment if exists
|
||||
$normalizedUrl = preg_replace('/#.*$/', '', $normalizedUrl);
|
||||
|
||||
return $normalizedUrl;
|
||||
} catch (\Exception $e) {
|
||||
// In case of an exception, return the original URL
|
||||
return $url;
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user