requests_cache: port to new cache cleanup function
I find the API still confusing, but it's better then before.
This commit is contained in:
parent
6ccea00bba
commit
38e6bc6e47
16
autobuild.py
16
autobuild.py
@ -29,7 +29,7 @@ import shutil
|
|||||||
import json
|
import json
|
||||||
import io
|
import io
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone, timedelta
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from typing import Generator, Union, AnyStr, List, Any, Dict, Tuple, Set, Optional, Sequence, \
|
from typing import Generator, Union, AnyStr, List, Any, Dict, Tuple, Set, Optional, Sequence, \
|
||||||
@ -1669,28 +1669,22 @@ def install_requests_cache() -> None:
|
|||||||
|
|
||||||
# Monkey patch globally, so pygithub uses it as well.
|
# Monkey patch globally, so pygithub uses it as well.
|
||||||
# Only do re-validation with etag/date etc and ignore the cache-control headers that
|
# Only do re-validation with etag/date etc and ignore the cache-control headers that
|
||||||
# github sends by default with 60 seconds. This is only possible with requests_cache 0.10+
|
# github sends by default with 60 seconds.
|
||||||
cache_dir = os.path.join(SCRIPT_DIR, '.autobuild_cache')
|
cache_dir = os.path.join(SCRIPT_DIR, '.autobuild_cache')
|
||||||
os.makedirs(cache_dir, exist_ok=True)
|
os.makedirs(cache_dir, exist_ok=True)
|
||||||
requests_cache.install_cache(
|
requests_cache.install_cache(
|
||||||
always_revalidate=True,
|
always_revalidate=True,
|
||||||
cache_control=False,
|
cache_control=False,
|
||||||
expire_after=0,
|
expire_after=requests_cache.EXPIRE_IMMEDIATELY,
|
||||||
backend=SQLiteCache(os.path.join(cache_dir, 'http_cache.sqlite')))
|
backend=SQLiteCache(os.path.join(cache_dir, 'http_cache.sqlite')))
|
||||||
|
|
||||||
# Call this once, so it gets cached from the main thread and can be used in a thread pool
|
# Call this once, so it gets cached from the main thread and can be used in a thread pool
|
||||||
get_requests_session(nocache=True)
|
get_requests_session(nocache=True)
|
||||||
|
|
||||||
# TODO: Use https://github.com/requests-cache/requests-cache/pull/624
|
# Delete old cache entries, so this doesn't grow indefinitely
|
||||||
# once it is released
|
|
||||||
|
|
||||||
# How to limit the cache size is an open question, at least to me:
|
|
||||||
# https://github.com/reclosedev/requests-cache/issues/620
|
|
||||||
# so do it the simple/stupid way
|
|
||||||
cache = requests_cache.get_cache()
|
cache = requests_cache.get_cache()
|
||||||
assert cache is not None
|
assert cache is not None
|
||||||
if cache.response_count() > 200:
|
cache.delete(older_than=timedelta(days=7))
|
||||||
cache.clear()
|
|
||||||
|
|
||||||
|
|
||||||
def requests_cache_disabled() -> Any:
|
def requests_cache_disabled() -> Any:
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user