Python script to quickly delete all Reddit posts

Reddit Python

Here’s a script that has been in my library for a while, don’t know why I haven’t shared it before now, all credit to nearengine 🙂

This does a simple delete on all of our Reddit post history, no double delete like other options

redditPurge is a simple script I wrote while learning Python. It destroys your entire Reddit history, letting you start with a clean slate or delete your account with less of a trace.

usage: ./redditPurge.py username password

license: Do whatever you want with it! If you find it useful please send a tweet (@nearengine) or link to this page.

#!/usr/bin/env python

import time, sys, json
import requests

things_list = list()
karma = 0;

#
# Get username & password from arguments
#

if len(sys.argv) == 3:
username = sys.argv[1]
password = sys.argv[2]
else:
print 'usage: '+sys.argv[0]+' username password'
sys.exit()

#
# Do login
#

head = {'User-Agent': 'redditPurge 0.1'}
data = {'user': username, 'passwd': password, 'api_type': 'json'}
client = requests.session()
r = client.post('https://ssl.reddit.com/api/login', data=data, headers=head)

# Check if login was successful & store modhash, otherwise exit
try:
modhash = r.json()['json']['data']['modhash']
except:
if 'json' in r.json().keys():
if 'errors' in r.json()['json'].keys():
print ('[ ERROR ] ' + str(r.status_code) + ': ' +
r.json()['json']['errors'][0][0] + ' ' +
r.json()['json']['errors'][0][1])
else:
print '[ ERROR ] getting modhash'
else:
print '[ ERROR ] getting modhash'
sys.exit()

#
# Get first 100 things
#

print '[ OK ] please wait while your things are fetched...'
rUrl = 'http://www.reddit.com/user/'+username+'/overview.json?limit=100'
r = client.get(rUrl, headers=head)

# Make sure things were found
if len(r.json()['data']['children']) > 0:
# Fetch each thing's ID
for thing in xrange(0, len(r.json()['data']['children'])):
# and save it to the list
things_list.append(r.json()['data']['children'][thing]['data']['name'])
karma += r.json()['data']['children'][thing]['data']['ups']
karma -= r.json()['data']['children'][thing]['data']['downs']

# If not, display the best error we can
else:
if 'json' in r.json().keys():
if 'errors' in r.json()['json'].keys():
print ('[ ERROR ] ' + str(r.status_code) + ': ' +
r.json()['json']['errors'][0][0] + ' ' +
r.json()['json']['errors'][0][1])
else:
print '[ ERROR ] fetching things'
else:
print '[ ERROR ] fetching things'
sys.exit()

#
# If there are more things, fetch them until we run out
#

if r.json()['data']['after'] != None:
while True:
r = client.get(rUrl+'&after='+r.json()['data']['after'], headers=head)

# Make sure things were found
if len(r.json()['data']['children']) > 0:
# Fetch each thing's ID
for thing in xrange(0, len(r.json()['data']['children'])):
# Store every thing ID in the list
things_list.append(
r.json()['data']['children'][thing]['data']['name'])
karma += r.json()['data']['children'][thing]['data']['ups']
karma -= r.json()['data']['children'][thing]['data']['downs']

# If not, display the best error we can
else:
if 'json' in r.json().keys():
if 'errors' in r.json()['json'].keys():
print ('[ ERROR ] ' + str(r.status_code) + ': ' +
r.json()['json']['errors'][0][0] + ' ' +
r.json()['json']['errors'][0][1])
else:
print '[ ERROR ] fetching things'
else:
print '[ ERROR ] fetching things'
sys.exit()

# We're out of things, so stop fetching more
if r.json()['data']['after'] == None:
break

# Otherwise there are more things, continue fetching them
else:
# Reddit's API rate limit is 2s
time.sleep(2)

#
# Now delete all the things!
#

print ('[ OK ] done fetching. you\'re sacrificing ' + str(karma) +
' karma today! here we go:')

count = 1
count_max = len(things_list)

for thing_id in things_list:
# Try deleting the thing
data = {'id': thing_id, 'uh': modhash}
r = client.post('http://www.reddit.com/api/del', data=data, headers=head)
print ('[ ' + str(r.status_code) + ' ] ' + thing_id +
' (' + str(count) + '/' + str(count_max) + ')')
count += 1

# Reddit's API rate limit is 2s
time.sleep(2)

Leave a Reply