Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/matomo-org/matomo.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEre Maijala <ere.maijala@helsinki.fi>2021-11-25 05:08:46 +0300
committerGitHub <noreply@github.com>2021-11-25 05:08:46 +0300
commite43d2d5d5e8e060acc853c6435b206a541f20c34 (patch)
tree74b20a952f7aeb6b8eacbaf0e28a32f89c31a46f /plugins/CoreConsole
parentd028bd2024d4d32cc732c835f45fa0dc69c9fa87 (diff)
Add option to specify the max number of websites to process by archiver. (#18326)
* Add option to specify the max number of websites to process by archiver. This makes it possible to limit the process lifetime to avoid excessive memory usage (possibly due to PHP bugs such as 79519). * Add an option to limit the number of archives to process. * Clean up a whitespace change. * Make --max-archives-to-process a hard limit.
Diffstat (limited to 'plugins/CoreConsole')
-rw-r--r--plugins/CoreConsole/Commands/CoreArchiver.php6
1 files changed, 6 insertions, 0 deletions
diff --git a/plugins/CoreConsole/Commands/CoreArchiver.php b/plugins/CoreConsole/Commands/CoreArchiver.php
index 9f4f3cd877..25cef83c8f 100644
--- a/plugins/CoreConsole/Commands/CoreArchiver.php
+++ b/plugins/CoreConsole/Commands/CoreArchiver.php
@@ -46,6 +46,8 @@ class CoreArchiver extends ConsoleCommand
$archiver->concurrentRequestsPerWebsite = $input->getOption('concurrent-requests-per-website');
$archiver->maxConcurrentArchivers = $input->getOption('concurrent-archivers');
$archiver->shouldArchiveAllSites = $input->getOption('force-all-websites');
+ $archiver->maxSitesToProcess = $input->getOption('max-websites-to-process');
+ $archiver->maxArchivesToProcess = $input->getOption('max-archives-to-process');
$archiver->setUrlToPiwik($url);
$archiveFilter = new CronArchive\ArchiveFilter();
@@ -111,6 +113,10 @@ class CoreArchiver extends ConsoleCommand
"When processing a website and its segments, number of requests to process in parallel", CronArchive::MAX_CONCURRENT_API_REQUESTS);
$command->addOption('concurrent-archivers', null, InputOption::VALUE_OPTIONAL,
"The number of max archivers to run in parallel. Depending on how you start the archiver as a cronjob, you may need to double the amount of archivers allowed if the same process appears twice in the `ps ex` output.", false);
+ $command->addOption('max-websites-to-process', null, InputOption::VALUE_REQUIRED,
+ "Maximum number of websites to process during a single execution of the archiver. Can be used to limit the process lifetime e.g. to avoid increasing memory usage.");
+ $command->addOption('max-archives-to-process', null, InputOption::VALUE_REQUIRED,
+ "Maximum number of archives to process during a single execution of the archiver. Can be used to limit the process lifetime e.g. to avoid increasing memory usage.");
$command->addOption('disable-scheduled-tasks', null, InputOption::VALUE_NONE,
"Skips executing Scheduled tasks (sending scheduled reports, db optimization, etc.).");
$command->addOption('accept-invalid-ssl-certificate', null, InputOption::VALUE_NONE,