Browse Source

search page speed optimization, yggverse/cache library integration begin

main
ghost 1 year ago
parent
commit
30520f6047
  1. 3
      composer.json
  2. 2
      src/config/app.php.example
  3. 50
      src/public/search.php

3
composer.json

@ -4,7 +4,8 @@ @@ -4,7 +4,8 @@
"type": "project",
"require": {
"php": ">=8.1",
"yggverse/parser": ">=0.1.0"
"yggverse/parser": ">=0.1.0",
"yggverse/cache": ">=0.3.0"
},
"license": "MIT",
"autoload": {

2
src/config/app.php.example

@ -77,6 +77,8 @@ define('SPHINX_PORT', 9306); @@ -77,6 +77,8 @@ define('SPHINX_PORT', 9306);
// Memcached
define('MEMCACHED_HOST', 'localhost');
define('MEMCACHED_PORT', 11211);
define('MEMCACHED_NAMESPACE', 'YGGo');
define('MEMCACHED_TIMEOUT', 300);
// Snaps

50
src/public/search.php

@ -33,6 +33,9 @@ try { @@ -33,6 +33,9 @@ try {
}
// Connect memcached
// @TODO
// legacy, upgrade to yggverse/cache instead
// https://github.com/YGGverse/cache-php
try {
$memcached = new Memcached();
@ -45,6 +48,18 @@ try { @@ -45,6 +48,18 @@ try {
exit;
}
// Connect Yggverse\Cache\Memory
try {
$memory = new Yggverse\Cache\Memory(MEMCACHED_HOST, MEMCACHED_PORT, MEMCACHED_NAMESPACE, MEMCACHED_TIMEOUT + time());
} catch(Exception $e) {
var_dump($e);
exit;
}
// Filter request data
$t = !empty($_GET['t']) ? Filter::url($_GET['t']) : 'text';
$m = !empty($_GET['m']) ? Filter::url($_GET['m']) : 'default';
@ -52,16 +67,19 @@ $q = !empty($_GET['q']) ? Filter::url($_GET['q']) : ''; @@ -52,16 +67,19 @@ $q = !empty($_GET['q']) ? Filter::url($_GET['q']) : '';
$p = !empty($_GET['p']) ? (int) $_GET['p'] : 1;
// Search request
$resultsTotal = $sphinx->searchHostPagesTotal(Filter::searchQuery($q, $m), $t);
$results = $sphinx->searchHostPages(Filter::searchQuery($q, $m), $t, $p * WEBSITE_PAGINATION_SEARCH_PAGE_RESULTS_LIMIT - WEBSITE_PAGINATION_SEARCH_PAGE_RESULTS_LIMIT, WEBSITE_PAGINATION_SEARCH_PAGE_RESULTS_LIMIT, $resultsTotal);
// Mime list
$hostPagesMime = $sphinx->searchHostPagesMime(Filter::searchQuery($q, $m));
if (empty($q)) {
$resultsTotal = 0;
$results = [];
$hostPagesMime = [];
} else {
$resultsTotal = $sphinx->searchHostPagesTotal(Filter::searchQuery($q, $m), $t);
$results = $sphinx->searchHostPages(Filter::searchQuery($q, $m), $t, $p * WEBSITE_PAGINATION_SEARCH_PAGE_RESULTS_LIMIT - WEBSITE_PAGINATION_SEARCH_PAGE_RESULTS_LIMIT, WEBSITE_PAGINATION_SEARCH_PAGE_RESULTS_LIMIT, $resultsTotal);
$hostPagesMime = $sphinx->searchHostPagesMime(Filter::searchQuery($q, $m));
}
// Define page basics
$totalPages = $sphinx->getHostPagesTotal();
$placeholder = Filter::plural($totalPages, [sprintf(_('Over %s page or enter the new one...'), $totalPages),
sprintf(_('Over %s pages or enter the new one...'), $totalPages),
sprintf(_('Over %s pages or enter the new one...'), $totalPages),
@ -107,7 +125,11 @@ if (Yggverse\Parser\Url::is($q)) { @@ -107,7 +125,11 @@ if (Yggverse\Parser\Url::is($q)) {
}
// Count pages in the crawl queue
if ($queueTotal = $db->getHostPageCrawlQueueTotal(time() - CRAWL_HOST_PAGE_QUEUE_SECONDS_OFFSET)) {
$timeThisHour = strtotime(sprintf('%s-%s-%s %s:00', date('Y'), date('n'), date('d'), date('H')));
if ($queueTotal = $memory->getByMethodCallback(
$db, 'getHostPageCrawlQueueTotal', [$timeThisHour - CRAWL_HOST_PAGE_QUEUE_SECONDS_OFFSET], $timeThisHour + 3600
)) {
$alertMessages[] = sprintf(_('* Please wait for all pages crawl to complete (%s in queue).'), $queueTotal);
}
@ -340,9 +362,17 @@ if ($queueTotal = $db->getHostPageCrawlQueueTotal(time() - CRAWL_HOST_PAGE_QUEUE @@ -340,9 +362,17 @@ if ($queueTotal = $db->getHostPageCrawlQueueTotal(time() - CRAWL_HOST_PAGE_QUEUE
<?php } else { ?>
<div style="text-align:center">
<span><?php echo sprintf(_('Total found: %s'), $resultsTotal) ?></span>
<?php if ($q && $queueTotal = $db->getHostPageCrawlQueueTotal(time() - CRAWL_HOST_PAGE_QUEUE_SECONDS_OFFSET)) { ?>
<span><?php echo sprintf(_('* Please wait for all pages crawl to complete (%s in queue).'), $queueTotal) ?></span>
<?php } ?>
<span>
<?php
// Count pages in the crawl queue
if ($q && $queueTotal = $memory->getByMethodCallback(
$db, 'getHostPageCrawlQueueTotal', [$timeThisHour - CRAWL_HOST_PAGE_QUEUE_SECONDS_OFFSET], $timeThisHour + 3600
)) {
echo sprintf(_('* Please wait for all pages crawl to complete (%s in queue).'), $queueTotal);
}
?>
</span>
</div>
<?php } ?>
</main>

Loading…
Cancel
Save