deviras/community-threads/main.py

164 lines
5.7 KiB
Python
Raw Normal View History

2023-12-16 18:31:34 +05:30
import praw
import os
from datetime import datetime
import json
2023-12-27 12:28:27 +05:30
from collections import defaultdict
2023-12-19 00:28:55 +05:30
import requests
2023-12-16 18:31:34 +05:30
client_id = os.environ["REDDIT_CLIENT_ID"]
client_secret = os.environ["REDDIT_CLIENT_SECRET"]
reddit_pass = os.environ["REDDIT_PASSWORD"]
username = os.environ["REDDIT_USERNAME"]
2023-12-19 00:35:07 +05:30
token = os.environ["GIST_TOKEN"]
2023-12-19 00:28:55 +05:30
gist_id = os.environ["GIST_ID"]
sub = "developersIndia"
def get_gist_content(gist_id):
headers = {
"Authorization": f"token {token}",
"Accept": "application/vnd.github.v3+json",
}
response = requests.get(f"https://api.github.com/gists/{gist_id}", headers=headers)
gist = response.json()
filename = list(gist["files"].keys())[0]
return gist["files"][filename]["content"]
def update_gist(gist_id, filename, content, description=""):
headers = {
"Authorization": f"token {token}",
"Accept": "application/vnd.github.v3+json",
}
data = {"description": description, "files": {filename: {"content": content}}}
response = requests.patch(
f"https://api.github.com/gists/{gist_id}", headers=headers, json=data
)
return response.json()
2023-12-16 18:31:34 +05:30
def get_collection(reddit):
2023-12-19 00:28:55 +05:30
collection = reddit.subreddit(sub).collections(
2023-12-16 18:31:34 +05:30
permalink="https://reddit.com/r/developersIndia/collection/958aef35-f9cb-414d-ab33-08bc639e47de"
)
return collection
2023-12-19 00:28:55 +05:30
def update_wiki(reddit, wikipage, posts):
2023-12-27 12:28:27 +05:30
# Group posts by year
posts_by_year = defaultdict(list)
for post in posts:
year = datetime.strptime(post['created_at'], '%Y-%m-%dT%H:%M:%S').year
posts_by_year[year].append(post)
# Sort posts within each year
for year in posts_by_year:
posts_by_year[year] = sorted(posts_by_year[year], key=lambda k: k['created_at'], reverse=True)
2023-12-27 13:02:03 +05:30
# Calculate total posts and years
total_posts = sum(len(posts) for posts in posts_by_year.values())
total_years = len(posts_by_year)
2023-12-27 12:28:27 +05:30
wiki_header = """# A collection of must read discussions started by community members"""
2023-12-19 00:28:55 +05:30
content = wiki_header + "\n\n"
2024-01-02 17:03:55 +05:30
content += f"A handpicked collection of **{total_posts}** interesting posts, discussions & high-quality threads gathered over {total_years} years.\n\n"
2023-12-19 00:28:55 +05:30
2023-12-27 12:28:27 +05:30
for year in sorted(posts_by_year.keys(), reverse=True):
content += f"## {year}\n\n"
# Add the posts for this year
for post in posts_by_year[year]:
2024-01-01 19:26:46 +05:30
formatted_date = datetime.strptime(post['created_at'], '%Y-%m-%dT%H:%M:%S').strftime('%d %b, %Y')
2024-01-03 12:52:54 +05:30
content += f"- `{formatted_date}` [**{post['title']}**]({post['url']})\n\n"
2023-12-19 00:28:55 +05:30
2023-12-27 12:28:27 +05:30
# given a wiki link, update the wiki page with new markdown
wikipage = reddit.subreddit(sub).wiki[wikipage]
2023-12-19 00:28:55 +05:30
wikipage.edit(content=content)
print("Wiki updated successfully!")
2023-12-16 18:31:34 +05:30
def main():
reddit = praw.Reddit(
client_id=client_id,
client_secret=client_secret,
username=username,
password=reddit_pass,
user_agent=f"Automod reader by u/{username}",
)
collection = get_collection(reddit)
2023-12-19 00:28:55 +05:30
saved_collection_posts = json.loads(get_gist_content(gist_id))
saved_collection_ids = [post["id"] for post in saved_collection_posts["posts"]]
2023-12-16 18:31:34 +05:30
2023-12-19 00:28:55 +05:30
print(f"Database was last updated on {saved_collection_posts['collection_last_updated']}")
print(f"Collection was last updated on {datetime.utcfromtimestamp(collection.last_update_utc).isoformat()}")
2023-12-16 18:31:34 +05:30
2023-12-19 00:28:55 +05:30
if (
saved_collection_posts["collection_last_updated"]
!= datetime.utcfromtimestamp(collection.last_update_utc).isoformat()
):
print("Collection was updated, getting new posts data...")
2023-12-16 18:31:34 +05:30
2023-12-19 00:28:55 +05:30
# given 2 lists find non-common elements
db_posts = set(saved_collection_ids)
collection_posts = []
for submission in collection:
collection_posts.append(submission.id)
collection_posts = set(collection_posts)
new_posts = list(collection_posts - db_posts)
deleted_posts = list(db_posts - collection_posts)
print(f"Found {len(new_posts)} new posts!")
print(f"Found {len(deleted_posts)} deleted posts!")
posts = []
# load the saved collection posts data
for submission_id in saved_collection_posts["posts"]:
if submission_id["id"] in deleted_posts:
continue
post = {
"title": submission_id["title"],
"url": submission_id["url"],
"id": submission_id["id"],
"num_comments": submission_id["num_comments"],
"created_at": submission_id["created_at"],
"flair_text": submission_id["flair_text"],
}
posts.append(post)
# get the new posts data
for submission_id in new_posts:
submission = reddit.submission(submission_id)
post = {
"title": submission.title,
"url": submission.url,
"id": submission.id,
"num_comments": submission.num_comments,
"created_at": datetime.utcfromtimestamp(
submission.created_utc
).isoformat(),
"flair_text": submission.link_flair_text,
}
posts.append(post)
# sort the posts by created_at
posts = sorted(posts, key=lambda k: k["created_at"])
collection_json = {
"collection_last_updated": datetime.utcfromtimestamp(
collection.last_update_utc
).isoformat(),
"posts": posts,
}
2023-12-16 18:31:34 +05:30
2023-12-19 00:28:55 +05:30
update_gist(gist_id, "collection.json", json.dumps(collection_json, indent=4))
print("Internal database updated successfully!")
update_wiki(reddit, "community-threads", posts)
else:
2023-12-19 00:40:53 +05:30
print("Wiki is up to date!")
2023-12-16 18:31:34 +05:30
if __name__ == "__main__":
main()