Check if the cache needs to be updated.
Also add debug messages.
parent
2e6781ee55
commit
ab6388d302
|
@ -4,8 +4,8 @@ Prints the URL of a random follower from your list of Mastodon followers you are
|
|||
|
||||
By default, it doesn't suggest a user that has already been suggested previously. To ignore previous suggestions, you can use the `--ignore-repeats` (or `-i`) parameter.
|
||||
|
||||
Currently, it only downloads your list of followers and followings once and it uses that local copy with any subsequent executions.
|
||||
To force an update of the cache, you can use the `--update-cache` (or `-u`) option.
|
||||
It downloads your list of followers and followings and caches it for reuse on every subsequent exection until it finds you have a new follower of following and in that case it redownloads the full list of followers and followings.
|
||||
If you want to skip this check and always use the cached copy, you can use the `--force-cache` (or `-c`) option or `--update-cache` (or `-u`) option if you want to force an update of the cache.
|
||||
|
||||
## Configuration
|
||||
|
||||
|
|
111
mastoroulette.py
111
mastoroulette.py
|
@ -29,6 +29,10 @@ cache_file = os.path.join(
|
|||
"{}@{}.json".format(USER, urlparse(INSTANCE).netloc)
|
||||
)
|
||||
|
||||
# Set URLs
|
||||
followers_url = f'{INSTANCE}/api/v1/accounts/{USER}/followers'
|
||||
followings_url = f'{INSTANCE}/api/v1/accounts/{USER}/following'
|
||||
|
||||
|
||||
# Function to retrieve API results when they are paginated
|
||||
def get_paged_results(url):
|
||||
|
@ -57,40 +61,98 @@ def get_paged_results(url):
|
|||
return results
|
||||
|
||||
|
||||
def debug(msg):
|
||||
if not args.debug:
|
||||
return
|
||||
print(msg, file=stderr)
|
||||
|
||||
|
||||
# Build arguments list and parse current arguments
|
||||
parser = ArgumentParser(
|
||||
description="Suggests Mastodon users to follow among the followers you don't follow.",
|
||||
epilog="https://git.chuso.net/chuso/mastoroulette")
|
||||
parser.add_argument("-u", "--update-cache",
|
||||
action="store_true", help="Update the cache")
|
||||
cache_options = parser.add_mutually_exclusive_group()
|
||||
cache_options.add_argument("-u", "--update-cache",
|
||||
action="store_true",
|
||||
help="Update the cache")
|
||||
cache_options.add_argument("-c", "--force-cache",
|
||||
action="store_true",
|
||||
help="Force using the cache without checking for updates")
|
||||
parser.add_argument("-i", "--ignore-repeats",
|
||||
action="store_true",
|
||||
help="Allows suggesting users that were already suggested in the past")
|
||||
parser.add_argument("-d", "--debug",
|
||||
action="store_true",
|
||||
help="Show debug messages")
|
||||
args = parser.parse_args()
|
||||
|
||||
# A forced update was not requested
|
||||
if not args.update_cache:
|
||||
try:
|
||||
# Try to load data from the cache file
|
||||
with open(cache_file, "r") as f:
|
||||
cache = json.load(f)
|
||||
followers = cache["followers"]
|
||||
followings = cache["followings"]
|
||||
suggested = cache["suggested"]
|
||||
print("Using cache last updated on {}".format(datetime.fromtimestamp(os.path.getmtime(cache_file))))
|
||||
except Exception as e:
|
||||
# If opening the cache file failed, we reset args.update_cache to True
|
||||
# as if an update was requested, because we do need to run an update
|
||||
# when the cache file is not available
|
||||
print("Ignoring cache, failed to open: {}".format(str(e)), file=stderr)
|
||||
args.update_cache = True
|
||||
suggested = []
|
||||
# Initialize cache data
|
||||
followers = followings = suggested = []
|
||||
|
||||
# An update was requested (either manually or because the cache file is unavailable)
|
||||
try:
|
||||
debug("Trying to open cache file")
|
||||
# Try to load data from the cache file
|
||||
with open(cache_file, "r") as f:
|
||||
cache = json.load(f)
|
||||
# We always need to load the cached suggested accounts as long as
|
||||
# the cache file was successfully opened, regardless of whether
|
||||
# a cache update was requested
|
||||
suggested = cache["suggested"]
|
||||
debug(f"Cache file {cache_file} opened")
|
||||
except Exception as e:
|
||||
# If opening the cache file failed, we reset args.update_cache to True
|
||||
# as if an update was requested, because we do need to run an update
|
||||
# when the cache file is not available
|
||||
print("Ignoring cache, failed to open: {}".format(str(e)), file=stderr)
|
||||
args.update_cache = True
|
||||
|
||||
# A forced update was not requested
|
||||
if args.update_cache:
|
||||
debug("A cache update was requested, ignoring cache file")
|
||||
else:
|
||||
debug("Loading cache data")
|
||||
updated = cache["updated"]
|
||||
followers = cache["followers"]
|
||||
followings = cache["followings"]
|
||||
if args.force_cache:
|
||||
debug("Forced use of the cache was requested. Not checking for cache freshness")
|
||||
else:
|
||||
debug("Checking cache freshness")
|
||||
# If force using the cache is not chosen, we need to check
|
||||
# if the cache is up to date
|
||||
r1 = get(f"{followers_url}?limit=1", headers={"Authorization": BEARER})
|
||||
r2 = get(f"{followings_url}?limit=1", headers={"Authorization": BEARER})
|
||||
|
||||
if r1.status_code != 200 or r2.status_code != 200:
|
||||
# Some error happened checking the server data,
|
||||
# force an update
|
||||
print(f"Cannot check cache: {r1.status_code}, {r2.status_code}", file=stderr)
|
||||
args.update_cache = True
|
||||
else:
|
||||
f1 = r1.json()[0]
|
||||
f2 = r2.json()[0]
|
||||
debug(f"Checking if follower {f1['acct']} and following {f2['acct']} are known")
|
||||
if not (
|
||||
[f for f in followers if f["uri"] == f1["uri"]] and
|
||||
[f for f in followings if f["uri"] == f2["uri"]]
|
||||
):
|
||||
debug("Cached data was found to be not current")
|
||||
# Force a cache update as one follower or following is
|
||||
# not found in the cache and force_cache is not chosen
|
||||
args.update_cache = True
|
||||
|
||||
# An update was requested (either manually or
|
||||
# because the cache file is unavailable or outdated)
|
||||
if args.update_cache:
|
||||
debug("Updating cache")
|
||||
# Retrieve lists of followers and followings
|
||||
followers = get_paged_results(f'{INSTANCE}/api/v1/accounts/{USER}/followers')
|
||||
followings = get_paged_results(f'{INSTANCE}/api/v1/accounts/{USER}/following')
|
||||
followers = get_paged_results(followers_url)
|
||||
followings = get_paged_results(followings_url)
|
||||
updated = datetime.now().timestamp()
|
||||
else:
|
||||
print("Using cache last updated on {}".format(
|
||||
datetime.fromtimestamp(float(updated))
|
||||
))
|
||||
|
||||
# Shuffle followers list
|
||||
shuffle(followers)
|
||||
|
@ -100,11 +162,13 @@ for f in followers:
|
|||
# If the follower is also in the following list, we continue to the next one
|
||||
# We use the `uri` field to compare users because that field is assumed to be unique
|
||||
if [u for u in followings if u["uri"] == f["uri"]]:
|
||||
debug(f"Skipping {f['acct']} as it is already followed")
|
||||
continue
|
||||
# For users that were already suggested, we check the id which is assumed
|
||||
# to be immutable (but instance-specific) as opposed to uri that can change
|
||||
# if the user migrated to a differnt instance
|
||||
if not args.ignore_repeats and f["id"] not in suggested:
|
||||
if not args.ignore_repeats and f["id"] in suggested:
|
||||
debug(f"Skipping {f['acct']} as it was already suggested")
|
||||
continue
|
||||
|
||||
print(f["url"])
|
||||
|
@ -116,6 +180,7 @@ for f in followers:
|
|||
os.makedirs(os.path.dirname(cache_file), 0o700, True)
|
||||
with open(cache_file, "w") as f:
|
||||
json.dump({
|
||||
"updated": updated,
|
||||
"followers": followers,
|
||||
"followings": followings,
|
||||
"suggested": suggested
|
||||
|
|
Loading…
Reference in New Issue