diff options
author | Ere Maijala <ere.maijala@helsinki.fi> | 2021-11-25 05:08:46 +0300 |
---|---|---|
committer | GitHub <noreply@github.com> | 2021-11-25 05:08:46 +0300 |
commit | e43d2d5d5e8e060acc853c6435b206a541f20c34 (patch) | |
tree | 74b20a952f7aeb6b8eacbaf0e28a32f89c31a46f /tests/PHPUnit/Integration | |
parent | d028bd2024d4d32cc732c835f45fa0dc69c9fa87 (diff) |
Add option to specify the max number of websites to process by archiver. (#18326)
* Add option to specify the max number of websites to process by archiver.
This makes it possible to limit the process lifetime to avoid excessive memory usage (possibly due to PHP bugs such as 79519).
* Add an option to limit the number of archives to process.
* Clean up a whitespace change.
* Make --max-archives-to-process a hard limit.
Diffstat (limited to 'tests/PHPUnit/Integration')
-rw-r--r-- | tests/PHPUnit/Integration/CronArchive/QueueConsumerTest.php | 84 |
1 files changed, 84 insertions, 0 deletions
diff --git a/tests/PHPUnit/Integration/CronArchive/QueueConsumerTest.php b/tests/PHPUnit/Integration/CronArchive/QueueConsumerTest.php index a2e6a488a2..ac88ba9b8e 100644 --- a/tests/PHPUnit/Integration/CronArchive/QueueConsumerTest.php +++ b/tests/PHPUnit/Integration/CronArchive/QueueConsumerTest.php @@ -715,6 +715,90 @@ class QueueConsumerTest extends IntegrationTestCase $this->assertEquals($uniqueInvalidationDescs, $invalidationDescs, "Found duplicate archives being processed."); } + public function test_max_websites_to_process() + { + Fixture::createWebsite('2021-11-16'); + Fixture::createWebsite('2021-11-16'); + Fixture::createWebsite('2021-11-16'); + + // force archiving so we don't skip those without visits + Piwik::addAction('Archiving.getIdSitesToArchiveWhenNoVisits', function (&$idSites) { + $idSites[] = 1; + $idSites[] = 2; + $idSites[] = 3; + }); + + $cronArchive = new MockCronArchive(); + $cronArchive->init(); + + $archiveFilter = $this->makeTestArchiveFilter(); + + $queueConsumer = new QueueConsumer( + StaticContainer::get(LoggerInterface::class), + new FixedSiteIds([1, 2, 3]), + 3, + 24, + new Model(), + new SegmentArchiving('beginning_of_time'), + $cronArchive, + new RequestParser(true), + $archiveFilter + ); + $this->assertNull($queueConsumer->setMaxSitesToProcess()); + $this->assertEquals(1, $queueConsumer->setMaxSitesToProcess(1)); + + $invalidations = [ + ['idarchive' => 1, 'name' => 'done', 'idsite' => 1, 'date1' => '2021-11-16', 'date2' => '2021-11-16', 'period' => 1, 'report' => null], + ['idarchive' => 2, 'name' => 'done', 'idsite' => 2, 'date1' => '2021-11-16', 'date2' => '2021-11-16', 'period' => 2, 'report' => null], + ['idarchive' => 3, 'name' => 'done', 'idsite' => 3, 'date1' => '2021-11-16', 'date2' => '2021-11-16', 'period' => 3, 'report' => null], + ]; + + $this->insertInvalidations($invalidations); + + Config::getInstance()->General['enabled_periods_API'] = 'day,week,range'; + + $iteratedInvalidations = []; + while (true) { + $next = $queueConsumer->getNextArchivesToProcess(); + if ($next === null) { + break; + } + if (empty($next)) { + continue; + } + + foreach ($next as &$item) { + $this->simulateJobStart($item['idinvalidation']); + + unset($item['periodObj']); + unset($item['idinvalidation']); + unset($item['ts_invalidated']); + } + + $iteratedInvalidations[] = $next; + } + + $expectedInvalidationsFound = [ + [ + [ + 'idarchive' => '1', + 'name' => 'done', + 'idsite' => '1', + 'date1' => '2021-11-16', + 'date2' => '2021-11-16', + 'period' => '1', + 'ts_started' => null, + 'status' => '0', + 'report' => null, + 'plugin' => null, + 'segment' => '', + ], + ] + ]; + + $this->assertEquals($expectedInvalidationsFound, $iteratedInvalidations, "Invalidations inserted:\n" . var_export($invalidations, true)); + } + private function makeTestArchiveFilter($restrictToDateRange = null, $restrictToPeriods = null, $segmentsToForce = null, $disableSegmentsArchiving = false, $skipSegmentsToday = false) { |