diff --git a/app/src/main/java/org/schabi/newpipe/local/feed/service/FeedLoadManager.kt b/app/src/main/java/org/schabi/newpipe/local/feed/service/FeedLoadManager.kt index 901ceadf762..9b0f177d568 100644 --- a/app/src/main/java/org/schabi/newpipe/local/feed/service/FeedLoadManager.kt +++ b/app/src/main/java/org/schabi/newpipe/local/feed/service/FeedLoadManager.kt @@ -17,6 +17,7 @@ import org.schabi.newpipe.database.subscription.NotificationMode import org.schabi.newpipe.database.subscription.SubscriptionEntity import org.schabi.newpipe.extractor.Info import org.schabi.newpipe.extractor.NewPipe +import org.schabi.newpipe.extractor.ServiceList import org.schabi.newpipe.extractor.feed.FeedInfo import org.schabi.newpipe.extractor.stream.StreamInfoItem import org.schabi.newpipe.ktx.getStringSafe @@ -90,6 +91,10 @@ class FeedLoadManager(private val context: Context) { else -> feedDatabaseManager.outdatedSubscriptionsForGroup(groupId, outdatedThreshold) } + // like `currentProgress`, but counts the number of YouTube extractions that have begun, so + // they can be properly throttled every once in a while (see doOnNext below) + val youtubeExtractionCount = AtomicInteger() + return outdatedSubscriptions .take(1) .doOnNext { @@ -105,6 +110,15 @@ class FeedLoadManager(private val context: Context) { .observeOn(Schedulers.io()) .flatMap { Flowable.fromIterable(it) } .takeWhile { !cancelSignal.get() } + .doOnNext { subscriptionEntity -> + // throttle YouTube extractions once every BATCH_SIZE to avoid being rate limited + if (subscriptionEntity.serviceId == ServiceList.YouTube.serviceId) { + val previousCount = youtubeExtractionCount.getAndIncrement() + if (previousCount != 0 && previousCount % BATCH_SIZE == 0) { + Thread.sleep(DELAY_BETWEEN_BATCHES_MILLIS.random()) + } + } + } .parallel(PARALLEL_EXTRACTIONS, PARALLEL_EXTRACTIONS * 2) .runOn(Schedulers.io(), PARALLEL_EXTRACTIONS * 2) .filter { !cancelSignal.get() } @@ -328,7 +342,19 @@ class FeedLoadManager(private val context: Context) { /** * How many extractions will be running in parallel. */ - private const val PARALLEL_EXTRACTIONS = 6 + private const val PARALLEL_EXTRACTIONS = 3 + + /** + * How many YouTube extractions to perform before waiting [DELAY_BETWEEN_BATCHES_MILLIS] + * to avoid being rate limited + */ + private const val BATCH_SIZE = 50 + + /** + * Wait a random delay in this range once every [BATCH_SIZE] YouTube extractions to avoid + * being rate limited + */ + private val DELAY_BETWEEN_BATCHES_MILLIS = (6000L..12000L) /** * Number of items to buffer to mass-insert in the database.