Save followers and followings to a cache file and remember users that were suggested.

Also fix bug that always suggested the first user in the list of followers.
main
Jesus P Rey (Chuso) 2023-08-30 13:35:43 +02:00
parent 7158cf3ac5
commit 3fcfdb95d5
No known key found for this signature in database
GPG Key ID: 0E663E0721D67C24
2 changed files with 58 additions and 7 deletions

View File

@ -25,8 +25,6 @@ For example, to get the numeric id for the account @chuso@mastodon.social: https
Planned features that may be implemented, or may not, depending on when I lose interest on this:
* Remember which users were already suggested so they are not repeated.
* Cache followers and followings so we don't need to download them from the server every time.
* Try to guess when the cache needs to be updated.
* Optionally return more than one user.
* Proper configuration support.

View File

@ -2,10 +2,16 @@
# SPDX-License-Identifier: Artistic-2.0
from argparse import ArgumentParser
from appdirs import user_cache_dir
from datetime import datetime
import json
import os
from random import shuffle
from requests import get
from requests.utils import parse_header_links
from sys import exit
from sys import stderr, exit
from urllib.parse import urlparse
# Configuration (the required user id is the numeric one)
INSTANCE = ''
@ -17,6 +23,12 @@ BEARER = ""
# USER = '206178'
# BEARER = "aV8bTnC2xR3eG7sH6mF1kP0lY9jI4wDqZoXnS2cRGwq"
# Set cache file path
cache_file = os.path.join(
user_cache_dir("mastoroulette"),
"{}@{}.json".format(USER, urlparse(INSTANCE).netloc)
)
# Function to retrieve API results when they are paginated
def get_paged_results(url):
@ -45,9 +57,37 @@ def get_paged_results(url):
return results
# Retrieve lists of followers and followings
followers = get_paged_results(f'{INSTANCE}/api/v1/accounts/{USER}/followers')
followings = get_paged_results(f'{INSTANCE}/api/v1/accounts/{USER}/following')
# Build arguments list and parse current arguments
parser = ArgumentParser(
description="Suggests Mastodon users to follow among the followers you don't follow.",
epilog="https://git.chuso.net/chuso/mastoroulette")
parser.add_argument("-u", "--update-cache",
action="store_true", help="Update the cache")
args = parser.parse_args()
# A forced update was not requested
if not args.update_cache:
try:
# Try to load data from the cache file
with open(cache_file, "r") as f:
cache = json.load(f)
followers = cache["followers"]
followings = cache["followings"]
suggested = cache["suggested"]
print("Using cache last updated on {}".format(datetime.fromtimestamp(os.path.getmtime(cache_file))))
except Exception as e:
# If opening the cache file failed, we reset args.update_cache to True
# as if an update was requested, because we do need to run an update
# when the cache file is not available
print("Ignoring cache, failed to open: {}".format(str(e)), file=stderr)
args.update_cache = True
suggested = []
# An update was requested (either manually or because the cache file is unavailable)
if args.update_cache:
# Retrieve lists of followers and followings
followers = get_paged_results(f'{INSTANCE}/api/v1/accounts/{USER}/followers')
followings = get_paged_results(f'{INSTANCE}/api/v1/accounts/{USER}/following')
# Shuffle followers list
shuffle(followers)
@ -59,5 +99,18 @@ for f in followers:
if [u for u in followings if u["uri"] == f["uri"]]:
continue
print(followers[0]["url"])
print(f["url"])
# Add the new suggested account to the list of users that were
# already suggested and write all data to the cache
suggested.append(f["id"])
os.makedirs(os.path.dirname(cache_file), 0o700, True)
with open(cache_file, "w") as f:
json.dump({
"followers": followers,
"followings": followings,
"suggested": suggested
}, f)
break