Implement rudimentary rate limiting when importing new items

This commit is contained in:
Salt 2025-01-24 03:35:29 -06:00
parent d78b0cfd94
commit a4e3deda97
2 changed files with 8 additions and 0 deletions

View File

@ -1,6 +1,7 @@
#! /usr/bin/env python3
import datetime
import odyseescraper
import time
import uuid
from django.core.management.base import BaseCommand, CommandError
from django.db import models
@ -35,6 +36,8 @@ class Command(BaseCommand):
except Exception as e:
self.stdout.write(self.style.WARNING(f'Failed to update {str(channel)}: {e}'))
failures.append({"channel": str(channel),"item":releases[release]["title"],"error":e})
# We sleep for 1 second here so that we're not slamming Odysee with tons and tons of requests
time.sleep(1)
if failures:
self.stdout.write(self.style.ERROR('Errors occurred while importing data:'))
for error in failures:

View File

@ -1,6 +1,7 @@
#! /usr/bin/env python3
import json
import requests
import time
odysee_url = r'https://odysee.com'
odysee_api_url = r'https://api.na-backend.odysee.com/api/v1/proxy'
@ -59,6 +60,10 @@ def odysee_get_releases(handle):
}
if i == lastpage:
break
else:
# If we're not on the last page, sleep for a second to be easier on Odysee
# This isn't a proper wait limiter, but it's something.
time.sleep(1)
except requests.RequestException as e:
print(f'RequestException occurred while getting releases for {handle}: {e}')
return None