all repos — sunstroke @ b87970722405d9db5367d071d420a4d548fa90b3

main.py (view raw)

 1
 2from urllib.error import URLError
 3from datetime import datetime
 4from os import getenv
 5from dotenv import load_dotenv
 6load_dotenv()
 7from Overpost import get_newspaper
 8from MyPyload import Pyload
 9
10NEWSPAPER_PREFIX = getenv("NEWSPAPER_PREFIX") or ""
11HOST_PREFERENCE = [ 'katfile.com', 'rapidgator.net', 'www.easybytez.com' ]
12
13def scroll_list(array, buffer=1000):
14    array_len = len(array)
15    i = 0
16    while i < buffer:
17        if i >= array_len:
18            i = 0
19        yield array[i]
20        i += 1
21
22def get_host(link):
23    return link.split("/")[2]
24
25def filter_links(links, hosts):
26    host = next(hosts)
27    for link in links:
28        if get_host(link) == host:
29            return link
30    return filter_links(links, hosts)
31        
32        
33def get_sorted_links(dictionary):
34    hosts = scroll_list(HOST_PREFERENCE)
35    return [ filter_links(links, hosts) for _, links in dictionary.items() ]
36
37def download_link(connection, name, link):
38    return connection.addPackage(name=name, links=[link])
39
40def handle_links(name, links):
41    try:
42        con = Pyload()
43        return [ download_link(con, name, link) for link in links ]
44    except URLError:
45        print("\nConnessione a Pyload rifiutata.")
46
47    print(len(links), "link da aggiungere manualmente:\n")
48    for link in links:
49        print(link)
50    print()
51    return []
52
53def main():
54    newspapers = get_newspaper(NEWSPAPER_PREFIX, 0) # 0 -> today
55    name = f"{NEWSPAPER_PREFIX} - {datetime.today().strftime('%Y-%m-%d')}"
56    links = get_sorted_links(newspapers)
57    pids = handle_links(name, links)
58    print(len(pids), "link aggiunti a Pyload.")
59    print("Premi INVIO per uscire.")
60    input()
61
62if __name__ == "__main__":
63    exit(main())