From 8ca2b3180448293d42cdbfe4ad024ccc0f7b5206 Mon Sep 17 00:00:00 2001 From: Roeland Jago Douma Date: Tue, 16 Jul 2019 19:10:09 +0200 Subject: [PATCH] Do not keep searching for recent If userA has a lot of recent files. But only shares 1 file with userB (that has no files at all). We could keep searching until we run out of recent files for userA. Now assume the inactive userB has 20 incomming shares like that from different users. getRecent then basically keeps consuming huge amounts of resources and with each iteration the load on the DB increases (because of the offset). This makes sure we do not get more than 3 times the limit we search for or more than 5 queries. This means we might miss some recent entries but we should fix that separatly. This is just to make sure the load on the DB stays sane. Signed-off-by: Roeland Jago Douma --- lib/private/Files/Node/Folder.php | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/private/Files/Node/Folder.php b/lib/private/Files/Node/Folder.php index 1e9088a7c19a0..b0569a24aa140 100644 --- a/lib/private/Files/Node/Folder.php +++ b/lib/private/Files/Node/Folder.php @@ -383,6 +383,8 @@ public function getRecent($limit, $offset = 0) { // Search in batches of 500 entries $searchLimit = 500; $results = []; + $searchResultCount = 0; + $count = 0; do { $searchResult = $this->recentSearch($searchLimit, $offset, $storageIds, $folderMimetype); @@ -391,6 +393,8 @@ public function getRecent($limit, $offset = 0) { break; } + $searchResultCount += count($searchResult); + $parseResult = $this->recentParse($searchResult, $mountMap, $mimetypeLoader); foreach ($parseResult as $result) { @@ -398,7 +402,8 @@ public function getRecent($limit, $offset = 0) { } $offset += $searchLimit; - } while (count($results) < $limit); + $count++; + } while (count($results) < $limit && ($searchResultCount < (3 * $limit) || $count < 5)); return array_slice($results, 0, $limit); }