Just save this in a file like sort_websites_latency.py and run it with python sort_websites_latency.py.

import time
import requests

TIME_BETWEEN_REQUESTS = 5 # 10 * 60 = 10 minutes
TIME_TOTAL = 60 # 8 * 60 * 60 = 8 hours

def get_latency(url):
    start = time.time()
    requests.get(url)
    end = time.time()
    return end - start

def measure_latencies(urls, duration):
    latencies = {}
    start_time = time.time()
    end_time = start_time + duration
    while time.time() < end_time:
        latencies = measure_latencies_for_urls(urls, latencies)
        time.sleep(TIME_BETWEEN_REQUESTS)
    return latencies

def measure_latencies_for_urls(urls, latencies):
    for url in urls:
        latency = get_latency(url)
        latencies = add_latency_to_url(url, latency, latencies)
    return latencies

def add_latency_to_url(url, latency, latencies):
    if url not in latencies:
        latencies[url] = []
    latencies[url].append(latency)
    return latencies

def average_latencies(latencies):
    averages = []
    for url, latency_list in latencies.items():
        avg_latency = sum(latency_list) / len(latency_list)
        averages.append((url, avg_latency))
    return averages

def sort_latencies(averages):
    return sorted(averages, key=lambda x: x[1])

def get_latency_report(urls, duration):
    latencies = measure_latencies(urls, duration)
    averages = average_latencies(latencies)
    return sort_latencies(averages)

# Example usage
urls = [
    'https://discuss.tchncs.de',
    'https://vlemmy.net',
    'https://lemmy.fmhy.ml',
    'https://sopuli.xyz',
    'https://lemmy.world',
    'https://sh.itjust.works',
    'https://beehaw.org',
    'https://feddit.de',
    'https://lemmygrad.ml',
    'https://lemmy.one',
    'https://lemmy.ca',
    'https://feddit.it',
    'https://lemmy.sdf.org',
    'https://bakchodi.org',
    'https://lemm.ee',
    'https://feddit.dk',
    'https://pawb.social',
    'https://burggit.moe',
    'https://lemmy.burger.rodeo',
    'https://lemmy.nz',
    'https://feddit.nl',
    'https://szmer.info',
    'https://infosec.pub',
    'https://slrpnk.net',
    'https://programming.dev',
    'https://feddit.uk',
    'https://aussie.zone',
    'https://mander.xyz',
    'https://exploding-heads.com',
    'https://reddthat.com',
    'https://lemmynsfw.com',
    'https://sub.wetshaving.social',
    'https://latte.isnot.coffee',
    'https://lemmy.pt',
    'https://monero.house',
    'https://partizle.com',
    'https://dormi.zone',
    'https://yiffit.net',
    'https://waveform.social',
    'https://lemmy.click',
    'https://lemmy.eus',
    'https://lemmy.film',
    'https://iusearchlinux.fyi',
    'https://dataterm.digital',
    'https://pathofexile-discuss.com',
    'https://lemmyrs.org',
    'https://lemmy.studio',
    'https://lemmy.perthchat.org',
    'https://lemmy.podycust.co.uk',
    'https://possumpat.io',
    'https://compuverse.uk',
    'https://lemmy.zip',
    'https://lemmy.villa-straylight.social',
    'https://lemmy.spacestation14.com',
    'https://terefere.eu',
]

report = get_latency_report(urls, TIME_TOTAL)
for url, avg_latency in report:
    print(f'{url}: {avg_latency:.2f} seconds')
  • sparky@lemmy.pt
    link
    fedilink
    English
    arrow-up
    1
    ·
    1 year ago

    Is doing this actually necessary? In practice, it seems like latency is not going to significantly affect your usage of Lemmy. 50ms versus 300ms matters in gaming but not so much in a web browser. It feels like downtime and error rates are the actual data you want, but on the other hand, too many people hammering Lemmies to get this information would have the reverse effect of pushing some of them over load.