This repository has been archived by the owner on Dec 4, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
timing.py
49 lines (41 loc) · 1.54 KB
/
timing.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
__author__ = 'Almenon'
import praw
# http://praw.readthedocs.org
from time import time,sleep,localtime
import bot
from requests.exceptions import ConnectionError
from requests.exceptions import ReadTimeout
from json import load
import logging
last_day = localtime().tm_mday
with open("info/time.txt") as file:
last_scan = load(file)
def save_scan_time():
global last_scan
last_scan = time()
with open("info/time.txt", 'w') as file:
file.write(str(last_scan))
# another approach would be to save newest submission id
# r.get_subreddit("name",place_holder=id) to get content newer than id
bot.login("Python:episodeInfo:v2.0 (by /u/Almenon)")
while True:
try:
# scan posts / mentions / messages / replies
bot.scan(last_scan)
save_scan_time()
# reset post limits each new day
if localtime().tm_mday != last_day:
for key in bot.num_posts: bot.num_posts[key] = 0
last_day = localtime().tm_mday
except praw.errors.PRAWException as e:
logging.exception(e)
logging.info("see http://praw.readthedocs.org/en/stable/pages/exceptions.html")
save_scan_time()
sleep(180)
except (ConnectionError, ReadTimeout) as e:
logging.exception(e)
logging.info("there was an error connecting to reddit. Check if it's down or if there is no internet connection")
save_scan_time()
sleep(1200) # sleep for 20 min
logging.info("sleeping for 3 minutes")
sleep(180) # limit is 2 seconds. 30 is polite time.