|
|
|
<?php
|
|
|
|
|
|
|
|
/*
|
|
|
|
* YGGo! - Distributed & Open Source Web Search Engine
|
|
|
|
*
|
|
|
|
* MIT License
|
|
|
|
|
|
|
|
* Copyright (c) 2023 YGGverse
|
|
|
|
|
|
|
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
* of this software and associated documentation files (the "Software"), to deal
|
|
|
|
* in the Software without restriction, including without limitation the rights
|
|
|
|
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
* copies of the Software, and to permit persons to whom the Software is
|
|
|
|
* furnished to do so, subject to the following conditions:
|
|
|
|
|
|
|
|
* The above copyright notice and this permission notice shall be included in all
|
|
|
|
* copies or substantial portions of the Software.
|
|
|
|
|
|
|
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
* SOFTWARE.
|
|
|
|
*
|
|
|
|
* Default configuration file example
|
|
|
|
* Production name: app.php
|
|
|
|
*
|
|
|
|
* Project home page
|
|
|
|
* https://github.com/YGGverse/YGGo
|
|
|
|
*
|
|
|
|
* Get support
|
|
|
|
* https://github.com/YGGverse/YGGo/issues
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
// Debug
|
|
|
|
ini_set('display_errors', '1');
|
|
|
|
ini_set('display_startup_errors', '1');
|
|
|
|
error_reporting(E_ALL);
|
|
|
|
|
|
|
|
// Website
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Project domain, without slash on postfix
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('WEBSITE_DOMAIN', '');
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Page search results before show the read more link
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('WEBSITE_PAGINATION_SEARCH_PAGE_RESULTS_LIMIT', 100);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Save ident icons to the static webp cache (placed in storage/cache) to prevent CPU overload
|
|
|
|
*
|
|
|
|
* or false - to generate every time on request
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('WEBSITE_IDENTICON_IMAGE_CACHE', true);
|
|
|
|
|
|
|
|
// Database
|
|
|
|
define('DB_HOST', '127.0.0.1');
|
|
|
|
define('DB_PORT', 3306);
|
|
|
|
define('DB_NAME', '');
|
|
|
|
define('DB_USERNAME', '');
|
|
|
|
define('DB_PASSWORD', '');
|
|
|
|
|
|
|
|
// Sphinx
|
|
|
|
define('SPHINX_HOST', '127.0.0.1');
|
|
|
|
define('SPHINX_PORT', 9306);
|
|
|
|
|
|
|
|
// Memcached
|
|
|
|
define('MEMCACHED_HOST', '127.0.0.1');
|
|
|
|
define('MEMCACHED_PORT', 11211);
|
|
|
|
|
|
|
|
// Snaps
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Storage nodes configuration
|
|
|
|
*
|
|
|
|
* Supports optional single 'localhost' and multiple 'FTP' servers
|
|
|
|
*
|
|
|
|
* Comment specified node to disable specified connection
|
|
|
|
*
|
|
|
|
* Make empty array to disable snaps or set quote.mime = false or quote.size = 0 to disable specified instance
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('SNAP_STORAGE', json_encode((object)
|
|
|
|
[
|
|
|
|
'localhost' => [
|
|
|
|
[
|
|
|
|
'directory' => __DIR__ . '/../storage/snap/hp/',
|
|
|
|
'quota' => [
|
|
|
|
'mime' => false,
|
|
|
|
'size' => 10000000024,
|
|
|
|
'request' => [
|
|
|
|
'download' => [
|
|
|
|
'size' => 10000024,
|
|
|
|
'seconds' => 60*60
|
|
|
|
]
|
|
|
|
]
|
|
|
|
],
|
|
|
|
// ...
|
|
|
|
]
|
|
|
|
],
|
|
|
|
'ftp' => [
|
|
|
|
[
|
|
|
|
'port' => 21,
|
|
|
|
'host' => '',
|
|
|
|
'username' => '',
|
|
|
|
'password' => '',
|
|
|
|
'directory' => '/snap',
|
|
|
|
'timeout' => 30,
|
|
|
|
'passive' => true,
|
|
|
|
'quota' => [
|
|
|
|
'mime' => 'text/html,application/xhtml+xml,text/plain,image/webp,image/png,image/gif,image/jpeg,image/ico',
|
|
|
|
'size' => 10000000024,
|
|
|
|
'request' => [
|
|
|
|
'download' => [
|
|
|
|
'size' => 10000024,
|
|
|
|
'seconds' => 60*60
|
|
|
|
]
|
|
|
|
]
|
|
|
|
],
|
|
|
|
],
|
|
|
|
// ...
|
|
|
|
]
|
|
|
|
]
|
|
|
|
));
|
|
|
|
|
|
|
|
// Proxy settings
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Search proxy User Agent name
|
|
|
|
*
|
|
|
|
* Shared to other hosts through CURL requests by search proxy
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('PROXY_CURLOPT_USERAGENT', 'YGGo Search Proxy ( https://github.com/YGGverse/YGGo )');
|
|
|
|
|
|
|
|
// Crawl settings
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Save crawler debug to `logCrawler` table
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_LOG_ENABLED', true);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Auto clean `logCrawler` items older seconds offset
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_LOG_SECONDS_OFFSET', 60*60*24*30);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Crawler / Bot User Agent name
|
|
|
|
*
|
|
|
|
* Shared to other hosts through CURL requests by crawler
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_CURLOPT_USERAGENT', 'YGGo Search Crawler / Bot ( https://github.com/YGGverse/YGGo )');
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Skip curl download on response data size reached
|
|
|
|
*
|
|
|
|
* See also: CURLOPT_TIMEOUT (library/curl.php)
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_CURLOPT_PROGRESSFUNCTION_DOWNLOAD_SIZE_LIMIT', 10485760);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Stop crawler on disk quota reached (Mb)
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_STOP_DISK_QUOTA_MB_LEFT', 500);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Pages (URI) processing limit in the crawler.php queue
|
|
|
|
*
|
|
|
|
* This option related to CRAWL_PAGE_SECONDS_OFFSET value
|
|
|
|
* and the crontab task frequency (https://github.com/YGGverse/YGGo#crontab)
|
|
|
|
*
|
|
|
|
* Usually up to 20 pages per minute,
|
|
|
|
* to prevent websites overload by sending GET crawling requests
|
|
|
|
*
|
|
|
|
* Set 0 to disable
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_PAGE_LIMIT', 20);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Manifest (URI) processing limit in the crawler.php queue
|
|
|
|
*
|
|
|
|
* Used to collect distributed data index
|
|
|
|
* that match CRAWL_URL_REGEXP & CRAWL_MANIFEST_API_VERSION
|
|
|
|
*
|
|
|
|
* This option related to CRAWL_MANIFEST_SECONDS_OFFSET value
|
|
|
|
* and the crontab task frequency (https://github.com/YGGverse/YGGo#crontab)
|
|
|
|
*
|
|
|
|
* Usually up to 20 pages per minute,
|
|
|
|
* to prevent websites overload by sending GET crawling requests
|
|
|
|
*
|
|
|
|
* Set 0 to disable
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_MANIFEST_LIMIT', 10);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Renew page index by timing offset provided
|
|
|
|
*
|
|
|
|
* This option works with CRAWL_PAGE_LIMIT step queue
|
|
|
|
*
|
|
|
|
* Pay attention, that CRAWL_PAGE_LIMIT + CRAWL_PAGE_SECONDS_OFFSET pair
|
|
|
|
* must have enough value to crawl all pages collected in the DB index
|
|
|
|
*
|
|
|
|
* or the crawler can stuck in queue
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_PAGE_SECONDS_OFFSET', 60*60*24*30*12);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Renew home page index by timing offset provided
|
|
|
|
*
|
|
|
|
* Used for new pages scanning in highter priority
|
|
|
|
*
|
|
|
|
* This option works with CRAWL_PAGE_SECONDS_OFFSET and CRAWL_PAGE_LIMIT step queue
|
|
|
|
*
|
|
|
|
* Pay attention, that CRAWL_PAGE_LIMIT + CRAWL_PAGE_SECONDS_OFFSET pair
|
|
|
|
* must have enough value to crawl all pages collected in the DB index
|
|
|
|
*
|
|
|
|
* or the crawler can stuck in queue
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_PAGE_HOME_SECONDS_OFFSET', 60*60*24*7);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Index pages match MIME types
|
|
|
|
*
|
|
|
|
* comma separated
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_PAGE_MIME_INDEX', 'text/html,application/xhtml+xml,text/plain,image/webp,image/png,image/gif,image/jpeg,image/ico,image/svg+xml,video/mp4,video/ogg,video/webm,audio/mpeg,audio/ogg,audio/wav,audio/mp4,audio/aac,audio/aacp,audio/webm,audio/x-caf,audio/x-mpegurl,audio/flac');
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Renew manifests index by timing offset provided
|
|
|
|
*
|
|
|
|
* This option works with CRAWL_MANIFEST_LIMIT step queue
|
|
|
|
*
|
|
|
|
* Pay attention, that CRAWL_MANIFEST_LIMIT + CRAWL_MANIFEST_SECONDS_OFFSET pair
|
|
|
|
* must have enough value to crawl all manifests collected in the DB index
|
|
|
|
*
|
|
|
|
* or the crawler can stuck in queue
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_MANIFEST_SECONDS_OFFSET', 60*60*24*30);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Only URL addresses match this rule will be auto-crawled
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_URL_REGEXP', '/^http:\/\/\[[\w:]+\].*$/ui');
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Pages limit per new host by default
|
|
|
|
*
|
|
|
|
* Crawler stops indexing on this limit reach to prevent disk overuse
|
|
|
|
*
|
|
|
|
* Custom rule for specified host could be provided in the DB `host`.`crawlPageLimit` field
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_HOST_DEFAULT_PAGES_LIMIT', 100000);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Set default auto-crawl status for new host added
|
|
|
|
*
|
|
|
|
* true - crawler autostart pages indexer limited by CRAWL_HOST_DEFAULT_PAGES_LIMIT
|
|
|
|
* false - requires manual validation by the moderator in the DB `host`.`status` field
|
|
|
|
*
|
|
|
|
* This option also disable host in the search results
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_HOST_DEFAULT_STATUS', true);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Index only meta tags
|
|
|
|
* or false to save meta tags + base64 encoded page content in the `hostPage`.`data` field
|
|
|
|
*
|
|
|
|
* Custom rule for specified host could be provided in the DB `host`.`crawlMetaOnly` field
|
|
|
|
*
|
|
|
|
* Warning!
|
|
|
|
* this option disabled requires huge disk storage,
|
|
|
|
* it's experimental feature, oriented for index operations
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_HOST_DEFAULT_META_ONLY', true);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Not suitable/safe for work status for new host by default
|
|
|
|
*
|
|
|
|
* Could be filtered in search results
|
|
|
|
*
|
|
|
|
* Custom rule for specified host could be provided in the DB `host`.`nsfw` field
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_HOST_DEFAULT_NSFW', false);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Collect sitemap index when available
|
|
|
|
*
|
|
|
|
* At this moment, works with CRAWL_ROBOTS_SECONDS_OFFSET/CRAWL_ROBOTS_LIMIT options enabled only
|
|
|
|
*
|
|
|
|
* When sitemap path not provided in robots.txt, crawler scans default /sitemap.xml
|
|
|
|
*
|
|
|
|
* true|false
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_SITEMAPS', true);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Renew robots.txt index by timing offset provided
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_ROBOTS_SECONDS_OFFSET', 60*60*24*7);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Hosts Robots.txt processing limit in the crawler.php queue
|
|
|
|
*
|
|
|
|
* Set 0 to disable
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_ROBOTS_LIMIT', 1);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Default robots.txt rules on remote file not exists
|
|
|
|
* The crawler able to overwrite these rules
|
|
|
|
*
|
|
|
|
* Presets
|
|
|
|
* yggdrasil: /database/yggdrasil/host.robots.md
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_ROBOTS_DEFAULT_RULES', null); // string|null
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Permanent rules that append to the robots.txt if exists else CRAWL_ROBOTS_DEFAULT_RULES
|
|
|
|
* The crawler does not overwrite these rules
|
|
|
|
*
|
|
|
|
* Presets
|
|
|
|
* yggdrasil: /database/yggdrasil/host.robotsPostfix.md
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_ROBOTS_POSTFIX_RULES', null); // string|null
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Generates hostPageDom index based on hostPage.data field
|
|
|
|
*
|
|
|
|
* Could be useful for building semantical index query (config/sphinx.conf.txt)
|
|
|
|
*
|
|
|
|
* At this moment feature available in the CLI only (cli/yggo.php)
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_HOST_PAGE_DOM_SELECTORS', 'h1;h2;h3;h4;h5;h6');
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Strip HTML in the CRAWL_HOST_PAGE_DOM_SELECTORS content
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_HOST_PAGE_DOM_STRIP_TAGS', true);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Look for third-party manifests to collect distributed index
|
|
|
|
*
|
|
|
|
* API address provided in yggo meta tag
|
|
|
|
* will be stored in the `manifest` DB table
|
|
|
|
*
|
|
|
|
* Collecting URL that match CRAWL_URL_REGEXP condition
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_MANIFEST', true);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Manifest API version compatibility
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_MANIFEST_API_VERSION', 0.10);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Set default auto-crawl status for new manifest added
|
|
|
|
*
|
|
|
|
* true - crawler autostart manifest indexer
|
|
|
|
* false - requires manual validation by the moderator in the DB `manifest`.`status` field
|
|
|
|
*
|
|
|
|
* This option applying on CRAWL_MANIFEST enabled
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CRAWL_MANIFEST_DEFAULT_STATUS', true);
|
|
|
|
|
|
|
|
// Cleaner settings
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Save cleaner debug to `logCleaner` table
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CLEAN_LOG_ENABLED', true);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Auto clean `logCleaner` items older seconds offset
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CLEAN_LOG_SECONDS_OFFSET', 60*60*24*30);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Hosts limit per crontab execution step (https://github.com/YGGverse/YGGo#crontab)
|
|
|
|
*
|
|
|
|
* This option works with CLEAN_HOST_SECONDS_OFFSET
|
|
|
|
*
|
|
|
|
* The value depends of CPU resources available
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CLEAN_HOST_LIMIT', 20);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Apply cleaning rules to page older than value provided
|
|
|
|
*
|
|
|
|
* This option works with CLEAN_HOST_LIMIT step queue
|
|
|
|
*
|
|
|
|
* Pay attention, that CLEAN_HOST_LIMIT + CLEAN_HOST_SECONDS_OFFSET pair
|
|
|
|
* must have enough value to process all pages in the DB index
|
|
|
|
*
|
|
|
|
* or the cleaner can stuck in queue
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CLEAN_HOST_SECONDS_OFFSET', 60*60*24*30);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Remove page ban after following time
|
|
|
|
*
|
|
|
|
* This option used in crawler and search page
|
|
|
|
* to prevent extra http requests to unavailable or not condition resources
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CLEAN_PAGE_BAN_SECONDS_OFFSET', 60*60*24*30);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Remove page description history after following time
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CLEAN_PAGE_DESCRIPTION_OFFSET', 60*60*24*30*12*10);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Remove page DOM history after following time
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CLEAN_PAGE_DOM_OFFSET', 60*60*24*30*12*10);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Database tables optimization
|
|
|
|
*
|
|
|
|
* Reorganizes the physical storage of table data and associated index data,
|
|
|
|
* to reduce storage space and improve I/O efficiency when accessing the tables.
|
|
|
|
* Read more: https://www.forknerds.com/reduce-the-size-of-mysql/#Shrink_and_Optimize_MySQL
|
|
|
|
*
|
|
|
|
* When enabled - requires enough of RAM
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('CLEAN_DB_TABLES_OPTIMIZATION', false);
|
|
|
|
|
|
|
|
// API settings
|
|
|
|
|
|
|
|
/*
|
|
|
|
* JSON API features
|
|
|
|
*
|
|
|
|
* When false - every the actions settings below will be ignored
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('API_ENABLED', true);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Search API
|
|
|
|
*
|
|
|
|
* When false - API_SEARCH_PAGINATION_RESULTS_LIMIT will be ignored
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('API_SEARCH_ENABLED', true);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Search results per page
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('API_SEARCH_PAGINATION_RESULTS_LIMIT', 20);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Hosts distribution API
|
|
|
|
*
|
|
|
|
* When false - API_HOSTS_FIELDS will be ignored
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('API_HOSTS_ENABLED', true);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Database host fields comma separated or * to share all the fields
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('API_HOSTS_FIELDS',
|
|
|
|
'`host`.`scheme`,
|
|
|
|
`host`.`name`,
|
|
|
|
`host`.`port`,
|
|
|
|
`host`.`crawlPageLimit`,
|
|
|
|
`host`.`robots`,
|
|
|
|
`host`.`robotsPostfix`,
|
|
|
|
`host`.`nsfw`,
|
|
|
|
`host`.`timeAdded`,
|
|
|
|
`host`.`timeUpdated`,
|
|
|
|
(SELECT COUNT(*) FROM `hostPage` WHERE `hostPage`.`hostId` = `host`.`hostId`) AS `hostPagesTotal`');
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Manifest API
|
|
|
|
*
|
|
|
|
* Application meta sharing between YGGo remote nodes
|
|
|
|
*
|
|
|
|
* When true - make this node public for distributed index sharing
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
define('API_MANIFEST_ENABLED', true);
|