YGGo/config/app.php.txt
2023-04-23 01:14:31 +03:00

84 lines
2.2 KiB
Plaintext

<?php
// Debug
ini_set('display_errors', '1');
ini_set('display_startup_errors', '1');
error_reporting(E_ALL);
// Website
define('WEBSITE_DOMAIN', (issue($_SERVER['HTTP_HOST']) ? 'http://' . $_SERVER['HTTP_HOST'] : ''));
define('WEBSITE_PAGINATION_SEARCH_RESULTS_LIMIT', 100);
define('WEBSITE_IDENTICON_IMAGE_CACHE', true);
// Database
define('DB_HOST', 'localhost');
define('DB_PORT', 3306);
define('DB_NAME', '');
define('DB_USERNAME', '');
define('DB_PASSWORD', '');
// Sphinx
define('SPHINX_HOST', '127.0.0.1');
define('SPHINX_PORT', 9306);
// Crawler settings
define('CRAWL_PAGE_LIMIT', 10);
define('CRAWL_PAGE_SECONDS_OFFSET', 3600);
define('CRAWL_URL_REGEXP', '/^.*$/ui'); // ipv6 only '/^http:\/\/\[[\w:]+\].*$/ui'
/*
* Pages limit per new host by default
*
* Crawler stops indexing on this limit reach to prevent disk overuse
*
* Custom rule for specified host could be provided in the DB `host`.`crawlPageLimit` field
*
*/
define('CRAWL_HOST_DEFAULT_PAGES_LIMIT', 1000);
/*
* Set default auto-crawl status for new host added
*
* true - crawler autostart pages indexer limited by CRAWL_HOST_DEFAULT_PAGES_LIMIT
* false - requires manual validation by the moderator in the DB `host`.`status` field
*
* This option also disable host in the search results
*
*/
define('CRAWL_HOST_DEFAULT_STATUS', true);
/*
* Index only meta tags to prevent disk overuse
* or false to save meta tags + overall plain text page content
*
* Custom rule for specified host could be provided in the DB `host`.`crawlPageMetaOnly` field
*
* This option able to change search results relevance
*
*/
define('CRAWL_HOST_DEFAULT_META_ONLY', false);
/*
* Default robots.txt rules on remote file not exists
* The crawler able to overwrite these rules
*
* Presets
* yggdrasil: /database/yggdrasil/host.robots.md
*
*/
define('CRAWL_ROBOTS_DEFAULT_RULES', null); // string|null
/*
* Permanent rules that append to the robots.txt if exists else CRAWL_ROBOTS_DEFAULT_RULES
* The crawler does not overwrite these rules
*
* Presets
* yggdrasil: /database/yggdrasil/host.robotsPostfix.md
*
*/
define('CRAWL_ROBOTS_POSTFIX_RULES', null); // string|null
// Cleaner settings
define('CLEAN_HOST_LIMIT', 20);
define('CLEAN_HOST_SECONDS_OFFSET', 3600);