Browse Source

add disk quota validation

main
ghost 2 years ago
parent
commit
8dbb4a06af
  1. 6
      config/app.php.txt
  2. 7
      crontab/crawler.php
  3. 4
      public/search.php

6
config/app.php.txt

@ -40,6 +40,12 @@ define('SPHINX_PORT', 9306); @@ -40,6 +40,12 @@ define('SPHINX_PORT', 9306);
// Crawler settings
/*
* Stop crawler on disk quota reached (Mb)
*
*/
define('CRAWL_STOP_DISK_QUOTA_MB_LEFT', 500);
/*
* Pages (URI) processing limit in the crawler.php queue
*

7
crontab/crawler.php

@ -9,6 +9,13 @@ if (false === sem_acquire($semaphore, true)) { @@ -9,6 +9,13 @@ if (false === sem_acquire($semaphore, true)) {
exit;
}
// Check disk quota
if (CRAWL_STOP_DISK_QUOTA_MB_LEFT > disk_free_space('/') / 1000000) {
echo 'Disk quota reached.' . PHP_EOL;
exit;
}
// Load system dependencies
require_once('../config/app.php');
require_once('../library/curl.php');

4
public/search.php

@ -49,6 +49,9 @@ if (filter_var($q, FILTER_VALIDATE_URL) && preg_match(CRAWL_URL_REGEXP, $q)) { @@ -49,6 +49,9 @@ if (filter_var($q, FILTER_VALIDATE_URL) && preg_match(CRAWL_URL_REGEXP, $q)) {
// Register new host
} else {
// Disk quota not reached
if (CRAWL_STOP_DISK_QUOTA_MB_LEFT < disk_free_space('/') / 1000000) {
// Get robots.txt if exists
$curl = new Curl($hostURL->string . '/robots.txt');
@ -74,6 +77,7 @@ if (filter_var($q, FILTER_VALIDATE_URL) && preg_match(CRAWL_URL_REGEXP, $q)) { @@ -74,6 +77,7 @@ if (filter_var($q, FILTER_VALIDATE_URL) && preg_match(CRAWL_URL_REGEXP, $q)) {
$hostRobots,
$hostRobotsPostfix);
}
}
// Parse page URI
$hostPageURI = Parser::uri($q);

Loading…
Cancel
Save