mirror of
https://github.com/YGGverse/YGGo.git
synced 2025-01-24 13:34:25 +00:00
add disk quota validation
This commit is contained in:
parent
7bee0ebb4d
commit
8dbb4a06af
@ -40,6 +40,12 @@ define('SPHINX_PORT', 9306);
|
||||
|
||||
// Crawler settings
|
||||
|
||||
/*
|
||||
* Stop crawler on disk quota reached (Mb)
|
||||
*
|
||||
*/
|
||||
define('CRAWL_STOP_DISK_QUOTA_MB_LEFT', 500);
|
||||
|
||||
/*
|
||||
* Pages (URI) processing limit in the crawler.php queue
|
||||
*
|
||||
|
@ -9,6 +9,13 @@ if (false === sem_acquire($semaphore, true)) {
|
||||
exit;
|
||||
}
|
||||
|
||||
// Check disk quota
|
||||
if (CRAWL_STOP_DISK_QUOTA_MB_LEFT > disk_free_space('/') / 1000000) {
|
||||
|
||||
echo 'Disk quota reached.' . PHP_EOL;
|
||||
exit;
|
||||
}
|
||||
|
||||
// Load system dependencies
|
||||
require_once('../config/app.php');
|
||||
require_once('../library/curl.php');
|
||||
|
@ -49,30 +49,34 @@ if (filter_var($q, FILTER_VALIDATE_URL) && preg_match(CRAWL_URL_REGEXP, $q)) {
|
||||
// Register new host
|
||||
} else {
|
||||
|
||||
// Get robots.txt if exists
|
||||
$curl = new Curl($hostURL->string . '/robots.txt');
|
||||
// Disk quota not reached
|
||||
if (CRAWL_STOP_DISK_QUOTA_MB_LEFT < disk_free_space('/') / 1000000) {
|
||||
|
||||
if (200 == $curl->getCode() && false !== stripos($curl->getContent(), 'user-agent:')) {
|
||||
$hostRobots = $curl->getContent();
|
||||
} else {
|
||||
$hostRobots = null;
|
||||
// Get robots.txt if exists
|
||||
$curl = new Curl($hostURL->string . '/robots.txt');
|
||||
|
||||
if (200 == $curl->getCode() && false !== stripos($curl->getContent(), 'user-agent:')) {
|
||||
$hostRobots = $curl->getContent();
|
||||
} else {
|
||||
$hostRobots = null;
|
||||
}
|
||||
|
||||
$hostRobotsPostfix = CRAWL_ROBOTS_POSTFIX_RULES;
|
||||
|
||||
$hostStatus = CRAWL_HOST_DEFAULT_STATUS;
|
||||
$hostPageLimit = CRAWL_HOST_DEFAULT_PAGES_LIMIT;
|
||||
$hostId = $db->addHost($hostURL->scheme,
|
||||
$hostURL->name,
|
||||
$hostURL->port,
|
||||
crc32($hostURL->string),
|
||||
time(),
|
||||
null,
|
||||
$hostPageLimit,
|
||||
(string) CRAWL_HOST_DEFAULT_META_ONLY,
|
||||
(string) $hostStatus,
|
||||
$hostRobots,
|
||||
$hostRobotsPostfix);
|
||||
}
|
||||
|
||||
$hostRobotsPostfix = CRAWL_ROBOTS_POSTFIX_RULES;
|
||||
|
||||
$hostStatus = CRAWL_HOST_DEFAULT_STATUS;
|
||||
$hostPageLimit = CRAWL_HOST_DEFAULT_PAGES_LIMIT;
|
||||
$hostId = $db->addHost($hostURL->scheme,
|
||||
$hostURL->name,
|
||||
$hostURL->port,
|
||||
crc32($hostURL->string),
|
||||
time(),
|
||||
null,
|
||||
$hostPageLimit,
|
||||
(string) CRAWL_HOST_DEFAULT_META_ONLY,
|
||||
(string) $hostStatus,
|
||||
$hostRobots,
|
||||
$hostRobotsPostfix);
|
||||
}
|
||||
|
||||
// Parse page URI
|
||||
|
Loading…
x
Reference in New Issue
Block a user