crawler: add ability to limit cache size

This commit is contained in:
2020-09-30 23:59:55 +02:00
parent 2fc7cd391c
commit eed949736a
3 changed files with 63 additions and 2 deletions

View File

@@ -284,6 +284,12 @@ def cgi_handle_request():
def cgi_start_server():
crawler.default_cache.autotrim()
print('Serving http://localhost:%s/' % PORT)
httpd = wsgiref.simple_server.make_server('', PORT, application)
httpd.serve_forever()
if "gunicorn" in os.getenv('SERVER_SOFTWARE'):
crawler.default_cache.autotrim()