This commit is contained in:
retoor 2024-11-26 09:22:24 +01:00
parent a6697fab58
commit a1b3ad2a92
4 changed files with 46 additions and 36 deletions

View File

@ -23,9 +23,7 @@ def dump():
print("\n".join(statistics_text)) print("\n".join(statistics_text))
all_content = "" all_content = ""
for user in list(db.get_users()): for user in list(db.get_users()):
text = ( text = db.get_all_rants_of_user(user)
db.get_all_rants_of_user(user)
)
total_text = "" total_text = ""
if text: if text:
total_text += text total_text += text
@ -42,9 +40,7 @@ def dump():
f.write(" said ") f.write(" said ")
f.write(text) f.write(text)
f.write("```") f.write("```")
text = ( text = db.get_all_posts_of_user(user)
db.get_all_posts_of_user(user)
)
if text: if text:
total_text += text total_text += text
print( print(
@ -76,4 +72,10 @@ def dump():
printr(f"export/view-{view['name']}.json") printr(f"export/view-{view['name']}.json")
with pathlib.Path(f"export/view-{view['name']}.json").open("w+") as f: with pathlib.Path(f"export/view-{view['name']}.json").open("w+") as f:
with db.Db() as _db: with db.Db() as _db:
json.dump([dict(record) for record in _db.query("SELECT * FROM {}".format(view['name']))],f) json.dump(
[
dict(record)
for record in _db.query("SELECT * FROM {}".format(view["name"]))
],
f,
)

View File

@ -206,8 +206,12 @@ def get_db():
"CREATE VIEW posts_of_user AS SELECT user_username as username, GROUP_CONCAT(body) as text FROM comments" "CREATE VIEW posts_of_user AS SELECT user_username as username, GROUP_CONCAT(body) as text FROM comments"
) )
db.query("DROP VIEW IF EXISTS contributions_extended_ranked") db.query("DROP VIEW IF EXISTS contributions_extended_ranked")
db.query("CREATE VIEW contributions_extended_ranked AS SELECT ROW_NUMBER() OVER (ORDER BY upvotes_per_post_on_average DESC) as rank_by_appreciation_based_on_upvotes_per_message, * FROM contributions_extended ORDER BY upvotes_per_post_on_average DESC") db.query(
db.query("CREATE VIEW IF NOT EXISTS views AS SELECT sql, name FROM sqlite_schema WHERE type='view' AND name != 'views';") "CREATE VIEW contributions_extended_ranked AS SELECT ROW_NUMBER() OVER (ORDER BY upvotes_per_post_on_average DESC) as rank_by_appreciation_based_on_upvotes_per_message, * FROM contributions_extended ORDER BY upvotes_per_post_on_average DESC"
)
db.query(
"CREATE VIEW IF NOT EXISTS views AS SELECT sql, name FROM sqlite_schema WHERE type='view' AND name != 'views';"
)
return db return db
@ -246,21 +250,29 @@ def get_views():
with Db() as db: with Db() as db:
return list(db.query("SELECT * FROM views;")) return list(db.query("SELECT * FROM views;"))
def get_contributions(): def get_contributions():
with Db() as db: with Db() as db:
contributions = db.query( contributions = db.query("SELECT * FROM contributions_extended_ranked")
"SELECT * FROM contributions_extended_ranked"
)
return list(contributions) return list(contributions)
def get_upvote_average(): def get_upvote_average():
return avg(contribution["upvotes_per_post_on_average"] for contribution in get_contributions()) return avg(
contribution["upvotes_per_post_on_average"]
for contribution in get_contributions()
)
def get_users(): def get_users():
with Db() as db: with Db() as db:
return [user["username"] for user in db.query("SELECT DISTINCT username FROM contributions ORDER BY username")] return [
user["username"]
for user in db.query(
"SELECT DISTINCT username FROM contributions ORDER BY username"
)
]
def get_user_count(): def get_user_count():
return len(get_users()) return len(get_users())

View File

@ -3,10 +3,9 @@ import asyncio
from drstats.duration import Duration from drstats.duration import Duration
def new_plot(): def new_plot():
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
plt.clf() plt.clf()
plt.cla() plt.cla()
plt.close(0) plt.close(0)
@ -16,13 +15,14 @@ def new_plot():
def get_date_range(): def get_date_range():
with Db() as db: with Db() as db:
record = list(db.query( record = list(
db.query(
"SELECT min(date(created)) as start_date, max(date(created)) as end_date FROM rants" "SELECT min(date(created)) as start_date, max(date(created)) as end_date FROM rants"
))[0] )
)[0]
return record["start_date"], record["end_date"] return record["start_date"], record["end_date"]
def get_date_range_str(): def get_date_range_str():
start_date, end_date = get_date_range() start_date, end_date = get_date_range()
return f"from {start_date} to {end_date}" return f"from {start_date} to {end_date}"
@ -52,7 +52,6 @@ async def rant_stats_per_day():
plt.savefig(f"export/rants_per_day_{get_date_range_str()}.png") plt.savefig(f"export/rants_per_day_{get_date_range_str()}.png")
async def comment_stats_per_day(): async def comment_stats_per_day():
with Duration("Comment stats per day"): with Duration("Comment stats per day"):
@ -78,7 +77,6 @@ async def comment_stats_per_day():
plt.savefig(f"export/comments_per_day_{get_date_range_str()}.png") plt.savefig(f"export/comments_per_day_{get_date_range_str()}.png")
async def rant_stats_per_weekday(): async def rant_stats_per_weekday():
with Duration("Rant stats per weekday"): with Duration("Rant stats per weekday"):
plt = new_plot() plt = new_plot()
@ -99,7 +97,6 @@ async def rant_stats_per_weekday():
plt.savefig(f"export/rants_per_weekday_{get_date_range_str()}.png") plt.savefig(f"export/rants_per_weekday_{get_date_range_str()}.png")
async def comment_stats_per_weekday(): async def comment_stats_per_weekday():
with Duration("Comment stats per weekday"): with Duration("Comment stats per weekday"):
plt = new_plot() plt = new_plot()
@ -120,7 +117,6 @@ async def comment_stats_per_weekday():
plt.savefig(f"export/comments_per_weekday_{get_date_range_str()}.png") plt.savefig(f"export/comments_per_weekday_{get_date_range_str()}.png")
async def rant_stats_per_hour(): async def rant_stats_per_hour():
with Duration("Rant stats per hour"): with Duration("Rant stats per hour"):
plt = new_plot() plt = new_plot()
@ -141,7 +137,6 @@ async def rant_stats_per_hour():
plt.savefig(f"export/rants_per_hour_{get_date_range_str()}.png") plt.savefig(f"export/rants_per_hour_{get_date_range_str()}.png")
async def comment_stats_per_hour(): async def comment_stats_per_hour():
with Duration("Comment stats per hour"): with Duration("Comment stats per hour"):
plt = new_plot() plt = new_plot()
@ -162,7 +157,6 @@ async def comment_stats_per_hour():
plt.savefig(f"export/comments_per_hour_{get_date_range_str()}.png") plt.savefig(f"export/comments_per_hour_{get_date_range_str()}.png")
async def score_most_ignored_last_7_days(): async def score_most_ignored_last_7_days():
with Duration("Score most ignored last 7 days"): with Duration("Score most ignored last 7 days"):
plt = new_plot() plt = new_plot()
@ -187,7 +181,6 @@ async def score_most_ignored_last_7_days():
) )
async def score_last_7_days(): async def score_last_7_days():
with Duration("Upvotes (score) last 7 days"): with Duration("Upvotes (score) last 7 days"):
plt = new_plot() plt = new_plot()
@ -208,7 +201,6 @@ async def score_last_7_days():
plt.savefig(f"export/score_last_7_days_{get_date_range_str()}.png") plt.savefig(f"export/score_last_7_days_{get_date_range_str()}.png")
async def user_score_per_day(username): async def user_score_per_day(username):
with Duration("User {} score per day".format(username)): with Duration("User {} score per day".format(username)):
plt = new_plot() plt = new_plot()
@ -260,7 +252,6 @@ async def user_score_per_day(username):
plt.savefig(f"export/score_user_{username}_{get_date_range_str()}.png") plt.savefig(f"export/score_user_{username}_{get_date_range_str()}.png")
def stats(): def stats():
with Duration("Complete process"): with Duration("Complete process"):
asyncio.run(rant_stats_per_day()) asyncio.run(rant_stats_per_day())

View File

@ -55,6 +55,7 @@ async def _sync_rants(start_from, page_size,count):
print(f"Upserted {count} rant(s).") print(f"Upserted {count} rant(s).")
return count return count
async def sync_rants(): async def sync_rants():
count = 0 count = 0
start_from = 0 start_from = 0
@ -63,11 +64,15 @@ async def sync_rants():
while True: while True:
try: try:
count += await asyncio.wait_for(_sync_rants(start_from, page_size,count),5) count += await asyncio.wait_for(
_sync_rants(start_from, page_size, count), 5
)
start_from += page_size start_from += page_size
except Exception as ex: except Exception as ex:
print(ex) print(ex)
print("If exception described above is an timeout related error, it's due ratelimiting and considered OK.") print(
"If exception described above is an timeout related error, it's due ratelimiting and considered OK."
)
break break