785 lines
22 KiB
Python
785 lines
22 KiB
Python
|
|
|
|
|
|
import json, codecs, re, os, pypandoc, striprtf, sqlite3, random
|
|
import heapq, shutil, datetime, urllib
|
|
import itertools, time, markdown, csv, os.path, webbrowser, threading
|
|
import hashlib, funcy, platform, sys, socket
|
|
from striprtf.striprtf import rtf_to_text
|
|
from functools import wraps
|
|
from queue import Queue
|
|
from flask import Flask, request, send_from_directory, Response, render_template
|
|
from flask_socketio import SocketIO, emit
|
|
from werkzeug.routing import PathConverter
|
|
from orgpython import to_html
|
|
from importlib import reload
|
|
from pathlib import Path
|
|
|
|
|
|
from stat import S_ISREG, ST_CTIME, ST_MODE
|
|
|
|
this_machine = socket.gethostname()
|
|
print(this_machine)
|
|
|
|
|
|
# pypandoc striprtf sqlite3 heapq markdown webbrowser funcy platform socket functools queue flask flask_socketio werkzeug orgpython importlib pathlib stat
|
|
|
|
# orgpython webbrowser functools socket platform sqlite3 heapq funcy striprtf flask_socketio queue stat importlib
|
|
|
|
|
|
|
|
# https://github.com/SethMMorton/natsort
|
|
# Simple yet flexible natural sorting in Python
|
|
|
|
|
|
from servertoys import *
|
|
import servertoys
|
|
|
|
#q = Queue()
|
|
|
|
|
|
|
|
|
|
def dict_factory(cursor, row):
|
|
d = {}
|
|
for idx, col in enumerate(cursor.description):
|
|
d[col[0]] = row[idx]
|
|
return d
|
|
|
|
|
|
|
|
def server_save(key,value):
|
|
codecs.open('cache/server_data.txt','a').write( "%s=%s\n" % (str(key),str(value)))
|
|
|
|
def flask_thread(q=0):
|
|
print(" my __name__ is...", end=" ")
|
|
print(__name__)
|
|
|
|
|
|
#print("Starting ... flask_thread ...")
|
|
#app = Flask(__name__, static_url_path='/cache',
|
|
# static_folder='cache',)
|
|
|
|
|
|
app = Flask(__name__, static_url_path='')
|
|
app.config['SECRET_KEY'] = 'secret!abc#xyz91239456'
|
|
app.jinja_env.auto_reload = True
|
|
|
|
print(app)
|
|
|
|
#socketio = SocketIO(app)
|
|
|
|
app.config['TEMPLATES_AUTO_RELOAD'] = True
|
|
|
|
#def before_request():
|
|
# app.jinja_env.cache = {}
|
|
|
|
@app.route('/all')
|
|
def list_all_pics():
|
|
all = servertoys.BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
|
|
|
#paths = sorted(Path(dirpath).iterdir(), key=os.path.getmtime)
|
|
|
|
|
|
allp = os.listdir(all)
|
|
# allp = [base(x) for x in allp]
|
|
#print(allp)
|
|
return "\n".join([ "<li>%s</li>" % a('/img/'+x, x) for x in filter(isPic, allp) ])
|
|
|
|
@app.route('/main')
|
|
def main_app():
|
|
all = servertoys.BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
|
allp = os.listdir(all)
|
|
allp = ['/img/'+x for x in filter(isPic, allp) ]
|
|
allp.sort()
|
|
|
|
allj = json.dumps(allp,indent=2)
|
|
return render_template('index.html', all_pics_json=allj, index="100")
|
|
|
|
@app.route('/main/<index>')
|
|
def main_app_indexed(index):
|
|
all = servertoys.BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
|
allp = os.listdir(all)
|
|
allp = ['/img/'+x for x in filter(isPic, allp) ]
|
|
allp.sort()
|
|
|
|
allj = json.dumps(allp,indent=2)
|
|
return render_template('index.html', all_pics_json=allj, index=str(index))
|
|
|
|
|
|
@app.route('/manage/<index>')
|
|
def manager_app_indexed(index):
|
|
all = servertoys.BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
|
allp = os.listdir(all)
|
|
allp = ['/img/'+x for x in filter(isPic, allp) ]
|
|
allp.sort()
|
|
|
|
allj = json.dumps(allp,indent=2)
|
|
return render_template('manage.html', all_pics_json=allj, index=str(index))
|
|
|
|
|
|
@app.route('/meme/<index>')
|
|
def manager_meme_indexed2(index):
|
|
all = servertoys.MEMEBASE # "/home/phowell/hd2/peter_home/images/Ok Pictures and Memes/"
|
|
allp = os.listdir(all)
|
|
#allp = ['/mimg/'+x for x in filter(isPic, allp) ]
|
|
allp = ['/mimg/'+x for x in allp ]
|
|
allp.sort()
|
|
|
|
allj = json.dumps(allp,indent=2)
|
|
return render_template('manage2.html', which=2, all_pics_json=allj, index=str(index))
|
|
|
|
|
|
@app.route('/m/<index>')
|
|
def manager_app_indexed2(index):
|
|
all = servertoys.BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
|
allp = os.listdir(all)
|
|
allp = ['/img/'+x for x in filter(isPic, allp) ]
|
|
allp.sort()
|
|
|
|
allj = json.dumps(allp,indent=2)
|
|
return render_template('manage2.html', which=1, all_pics_json=allj, index=str(index))
|
|
|
|
|
|
@app.route('/importer/<index>')
|
|
def manager_importer(index):
|
|
chanpath = servertoys.CHANBASE # "/home/phowell/hd2/peter_home_offload/Documents/scripts/chan/"
|
|
if index=='list':
|
|
return "<br />".join( [ "<a href='%s'>%s</a>" % (x,x) for x in sorted(os.listdir(chanpath))] )
|
|
|
|
#return "<br />".join( [ "<a href='%s'>%s</a>" % ("/chan/"+index+"/"+x,x) for x in os.listdir(chanpath + index)] )
|
|
|
|
all = chanpath + index
|
|
allp = os.listdir(all)
|
|
allp = ['/chan/'+index+'/'+ x for x in allp] # filter(isPic, allp) ]
|
|
allp.sort()
|
|
|
|
allj = json.dumps(allp,indent=2)
|
|
return render_template('importer.html', all_pics_json=allj, index=0)
|
|
|
|
|
|
@app.route('/alltags')
|
|
def alltags():
|
|
return servertoys.get_all_tags()
|
|
|
|
@app.route('/alltags/<which>')
|
|
def alltags_which(which):
|
|
return servertoys.get_all_tags(which)
|
|
|
|
@app.route('/add_pic_tag/<ttt>/<ppp>')
|
|
def apt(ttt,ppp):
|
|
return servertoys.add_pic_tag(ttt,ppp)
|
|
|
|
|
|
|
|
|
|
@app.route('/prefix/<index>')
|
|
def prefix_app_indexed(index):
|
|
all = servertoys.BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
|
|
|
allp = os.listdir(all)
|
|
allp = ['/img/'+x for x in filter(isPic, allp) if plain_sw(x,index) ]
|
|
#funcy.filter(lambda x: plain(x).startswith('/img/'+index), allp)
|
|
allp.sort()
|
|
allj = json.dumps(allp,indent=2)
|
|
return render_template('index.html', all_pics_json=allj, index="0")
|
|
|
|
|
|
@app.route('/add_pic/<path>')
|
|
def b(path):
|
|
return add_pic(path)
|
|
|
|
@app.route('/pic')
|
|
def a():
|
|
return get_all_pics(1)
|
|
|
|
@app.route('/pic/<index>')
|
|
def c(index):
|
|
allp = get_all_pics()
|
|
#allp = ['/img/'+x for x in filter(isPic, allp) ]
|
|
#allp.sort()
|
|
|
|
#allj = json.dumps(allp,indent=2)
|
|
allp = [ii for ii in map( lambda x: "/img/" + x, funcy.pluck( 'path', allp ))]
|
|
allj = json.dumps( allp )
|
|
return render_template('manage.html', all_pics_json=allj, index=str(index))
|
|
|
|
@app.route('/i')
|
|
def db_img():
|
|
global con, cur
|
|
con = sqlite3.connect(LIBBASE + 'ooo.db')
|
|
con.row_factory = dict_factory
|
|
cur = con.cursor()
|
|
|
|
cur.execute("SELECT * FROM pics p JOIN pictag pt ON pt.pic=p.id JOIN tags t ON t.id=pt.tag")
|
|
result = cur.fetchall()
|
|
return "<pre>%s</pre>" % json.dumps( result,indent=2 )
|
|
|
|
###
|
|
###
|
|
### VIDEO
|
|
###
|
|
###
|
|
###
|
|
|
|
|
|
@app.route('/videos')
|
|
def vs_app():
|
|
global MOVBASE
|
|
|
|
|
|
# get all entries in the directory
|
|
# Get their stats
|
|
# leave only regular files, insert creation date
|
|
|
|
|
|
#os.path.join(MOVBASE, file_name) for file_name in os.listdir( MOVBASE ))
|
|
#print( list(entries)[1:10] )
|
|
#print( list(entries)[1:10] )
|
|
|
|
#entries = ((os.stat(path), path) for path in entries)
|
|
#entries = ((stat[ST_CTIME], path) for stat, path in entries if S_ISREG(stat[ST_MODE]))
|
|
#print( list(allp)[1:10] )
|
|
#allp = os.listdir(MOVBASE)
|
|
#allp.sort()
|
|
#print( list(allp)[1:10] )
|
|
|
|
|
|
|
|
allp = sorted(Path(MOVBASE).iterdir(), key=os.path.getmtime)
|
|
allp = filter(isVid, allp)
|
|
allp = [ '/vid/'+urllib.parse.quote(x.name ) for x in allp ]
|
|
|
|
allj = json.dumps(allp) # ,indent=2)
|
|
return render_template('allvids.html', all_pics_json=allj, index="100")
|
|
|
|
@app.route('/video')
|
|
def v_app():
|
|
global MOVBASE
|
|
#allp = os.listdir(MOVBASE)
|
|
#allp = ['/vid/'+urllib.parse.quote(x) for x in filter(isVid, allp) ]
|
|
#allp.sort()
|
|
|
|
allp = sorted(Path(MOVBASE).iterdir(), key=os.path.getmtime)
|
|
allp = filter(isVid, allp)
|
|
allp = [ '/vid/'+urllib.parse.quote(x.name ) for x in allp ]
|
|
|
|
|
|
allj = json.dumps(allp,indent=2)
|
|
return render_template('vindex.html', all_pics_json=allj, index="100")
|
|
|
|
@app.route('/video/<index>')
|
|
def v_app_indexed(index):
|
|
global MOVBASE
|
|
allp = filter(isVid,sorted(Path(MOVBASE).iterdir(), key=os.path.getmtime))
|
|
allj = json.dumps([ '/vid/'+urllib.parse.quote(x.name ) for x in allp ],indent=2)
|
|
return render_template('vindex.html', all_pics_json=allj, index=str(index))
|
|
|
|
|
|
#
|
|
# SORTING FILES
|
|
#
|
|
|
|
@app.route('/sort')
|
|
def sorter():
|
|
return plain_sort()
|
|
|
|
|
|
@app.route('/sort/<index>')
|
|
def sort_index(index):
|
|
return plain_sort(index='')
|
|
|
|
|
|
|
|
|
|
#
|
|
# SAVING STUFF
|
|
#
|
|
|
|
@app.route('/save', methods=['POST'])
|
|
def save_post():
|
|
now = datetime.now().strftime('%Y%m%dT%H%M')
|
|
path = request.form['path']
|
|
txt = request.form['content']
|
|
|
|
o3 = codecs.open(server.writing_path + path, 'r', 'utf-8')
|
|
orig_text = o3.read()
|
|
o3.close()
|
|
|
|
bu_filename = server.writing_path + 'older_copies/' + path + '_' + now + '.md'
|
|
o2 = codecs.open( bu_filename, 'w', 'utf-8' )
|
|
o2.write(orig_text)
|
|
o2.close()
|
|
print('wrote backup to %s.' % bu_filename)
|
|
|
|
o1 = codecs.open(server.writing_path+path, 'w', 'utf-8')
|
|
o1.write(txt)
|
|
o1.close()
|
|
return "<h1>Successfully Saved</h1><br>" + a('back to writing folder','/x/writing/index') + \
|
|
" " + a('back to home','/')
|
|
|
|
|
|
#
|
|
# SERVER maintenance type stuff
|
|
@app.route('/rl')
|
|
def restart():
|
|
reload(servertoys)
|
|
#reload(localcache)
|
|
return "Server code reloaded"
|
|
|
|
@app.route("/x/<func>/<arg>/<arrg>")
|
|
def dispatch3(func,arg,arrg):
|
|
print("2 args")
|
|
return "" + server_dispatch(func, arg, arrg)
|
|
|
|
@app.route("/x/<func>/<arg>")
|
|
def dispatch2(func,arg):
|
|
print("1 arg")
|
|
return "" + server_dispatch(func, arg)
|
|
|
|
@app.route("/x/<func>")
|
|
def dispatch(func):
|
|
print("0 arg")
|
|
return server_dispatch(func)
|
|
|
|
@app.route("/api/<func>/<arg>/<arrg>")
|
|
def dispatch3j(func,arg,arrg):
|
|
print("json, 3 args")
|
|
return Response(server_dispatch(func, arg, arrg), mimetype='text/json')
|
|
|
|
@app.route("/api/<func>/<arg>")
|
|
def dispatch2j(func,arg):
|
|
print("json, 1 arg")
|
|
return Response(server_dispatch(func, arg), mimetype='text/json')
|
|
|
|
@app.route("/api/<func>")
|
|
def dispatch1j(func):
|
|
print("json, 0 arg")
|
|
return Response(server_dispatch(func), mimetype='text/json')
|
|
|
|
@app.route("/")
|
|
def home():
|
|
return "<h1>Homepage</h1>"
|
|
|
|
#
|
|
# STATIC ROUTES
|
|
#
|
|
|
|
"""@app.route('/lib/<path:path>')
|
|
def send_jslib(path):
|
|
return send_from_directory('gui/lib', path)"""
|
|
|
|
@app.route('/data/<path:path>')
|
|
def send_cachedata(path):
|
|
#myfile = os.path.join('cache', path).replace('\\','/')
|
|
print(path)
|
|
#return app.send_static_file(myfile)
|
|
return send_from_directory('cache', path)
|
|
|
|
#@app.route('/hello/')
|
|
#@app.route('/hello/<name>')
|
|
|
|
|
|
@app.route("/save/<key>/<val>")
|
|
def s(key,val):
|
|
server_save(key,val)
|
|
return tag('h1','Saved.') + "<br />" + tag('p', 'Saved: %s = %s' % (str(key),str(val)))
|
|
|
|
@app.route("/sample")
|
|
def do_sample():
|
|
return sample()
|
|
|
|
|
|
@app.route("/crazy")
|
|
def hello():
|
|
r = '<link rel="stylesheet" href="static/bootstrap.min.css">'
|
|
r += tag('style', 'textarea { white-space:nowrap; }')
|
|
r += tag('body', \
|
|
tagc('div','container-fluid', \
|
|
tagc('div','row', \
|
|
tagc( 'div', 'col-md-6', tag('pre', walk_file() ) ) + \
|
|
tagc( 'div', 'col-md-6', 'Column 2' + a('Shut Down','/shutdown' ) ) ) ) )
|
|
|
|
|
|
|
|
return r
|
|
|
|
@app.route("/sd")
|
|
def sd():
|
|
print('SIGINT or CTRL-C detected. Exiting gracefully')
|
|
func = request.environ.get('werkzeug.server.shutdown')
|
|
if func is None:
|
|
raise RuntimeError('Not running with the Werkzeug Server')
|
|
func()
|
|
return "Server has shut down."
|
|
|
|
|
|
#@socketio.on('my event', namespace='/test')
|
|
#def test_message(message):
|
|
# print('received and event: "my event" from page. message is: %s' % message)
|
|
# emit('my response', {'data': 'got it! it is MYEVENT'})
|
|
|
|
# Main images folder
|
|
|
|
@app.route('/img/<path:path>')
|
|
def send_img(path):
|
|
return send_from_directory(BASE, path)
|
|
|
|
@app.route('/mimg/<path:path>')
|
|
def send_mimg(path):
|
|
return send_from_directory(MEMEBASE, path)
|
|
|
|
@app.route('/chan/<path:path>')
|
|
def send_chan_img(path):
|
|
chanpath = servertoys.CHANBASE # "/home/phowell/hd2/peter_home_offload/Documents/scripts/chan/"
|
|
return send_from_directory(chanpath, path)
|
|
|
|
@app.route('/vid/<path:path>')
|
|
def send_vid(path):
|
|
return send_from_directory(MOVBASE, urllib.parse.unquote(path))
|
|
|
|
# Static folder
|
|
|
|
@app.route('/lib/<path:path>')
|
|
def send_lib(path):
|
|
return send_from_directory(LIBBASE, path)
|
|
|
|
|
|
app.run(host=HOST, port=PORT)
|
|
|
|
|
|
#socketio.run(app, host= '0.0.0.0', port=9998)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#################################################################################################################
|
|
#################################################################################################################
|
|
######
|
|
###### server infrastructure
|
|
######
|
|
|
|
|
|
|
|
|
|
def server_dispatch_json(function_name,arg='', arg2=''):
|
|
print("Looking for function: %s. arg:%s. arg2:%s." % (function_name, arg, arg2))
|
|
try:
|
|
result = "" + globals()[function_name](arg, arg2)
|
|
print("doing 2 args")
|
|
return result
|
|
except Exception as e:
|
|
print("Error with that: %s" % str(e))
|
|
try:
|
|
result = "" + globals()[function_name](arg) #
|
|
print("doing 1 arg")
|
|
return result
|
|
except Exception as f:
|
|
print("Error with that: %s" % str(f))
|
|
try:
|
|
result = globals()[function_name]()
|
|
print("doing 0 arg")
|
|
return result
|
|
except Exception as gg:
|
|
print("Error with that: %s" % str(gg))
|
|
return json.dumps({'result':'failed: exception', 'e1':str(e), 'e2':str(f), 'e3':str(gg)}, indent=2)
|
|
|
|
|
|
def server_dispatch(function_name,arg='', arg2=''):
|
|
print("Looking for function: %s. arg:%s. arg2:%s." % (function_name, arg, arg2))
|
|
try:
|
|
result = "" + globals()[function_name](arg, arg2)
|
|
print("doing 2 args")
|
|
return result
|
|
except Exception as e:
|
|
print("Error with that: %s" % str(e))
|
|
try:
|
|
result = "" + globals()[function_name](arg) #
|
|
print("doing 1 arg")
|
|
return result
|
|
except Exception as f:
|
|
print("Error with that: %s" % str(f))
|
|
try:
|
|
result = globals()[function_name]()
|
|
print("doing 0 arg")
|
|
return result
|
|
except Exception as gg:
|
|
print("Error with that: %s" % str(gg))
|
|
return json.dumps({'result':'failed: exception', 'e1':str(e), 'e2':str(f), 'e3':str(gg)}, indent=2)
|
|
|
|
|
|
#################################################################################################################
|
|
#################################################################################################################
|
|
######
|
|
###### server startup
|
|
######
|
|
|
|
def serve():
|
|
flask_thread()
|
|
#x = threading.Thread(target=flask_thread, args=(q,))
|
|
#x.start()
|
|
#webbrowser.open_new_tab("http://localhost:%s" % str(PORT))
|
|
print("Started?")
|
|
|
|
if __name__ == '__main__':
|
|
print("Serving on %s" % str(PORT))
|
|
#serve()
|
|
flask_thread()
|
|
print("...serve() function finished?...")
|
|
else:
|
|
pass
|
|
#print("Doing nothing.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#################################################################################################################
|
|
#################################################################################################################
|
|
######
|
|
###### MORE MORE MORE
|
|
######
|
|
|
|
|
|
## Extract frames from video
|
|
#
|
|
# ffmpeg -i DJI_0024.MP4 -vf fps=10/1 output%04d.jpg
|
|
|
|
|
|
|
|
# list files
|
|
# - all
|
|
# - by tag
|
|
# - by rating
|
|
# - untagged / unrated
|
|
# - query
|
|
|
|
|
|
# load / save tag, rate data for a file
|
|
|
|
|
|
|
|
# api for web / events
|
|
# - rating
|
|
# - tag add / remove
|
|
|
|
|
|
|
|
# events for new images, scrapes, imports
|
|
|
|
# a queue of unprocessed files
|
|
|
|
# approve import, move file & save metadata
|
|
|
|
# report on original, crop, resizes,
|
|
|
|
# add to queue for creating crops, resizes, removals
|
|
|
|
|
|
# add to queue for scrape from insta
|
|
|
|
# add to queue for scrape from chan
|
|
|
|
# add to queue for scrape from reddit
|
|
|
|
# toc of folders that have been scraped, ready for import check.
|
|
|
|
# list of urls that have been scraped. Don't double-download.
|
|
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
use: D:\peter_home\Documents\scripts\chan\get.py
|
|
|
|
use: D:\peter_home\Documents\scripts\imageworld\cache.py
|
|
|
|
|
|
https://reddit.com/r/18_19/top?t=year
|
|
https://reddit.com/r/2busty2hide/top?t=year
|
|
https://reddit.com/r/aa_cups/top?t=year
|
|
https://reddit.com/r/adorableporn/top?t=year
|
|
https://reddit.com/r/asiancuties/top?t=year
|
|
https://reddit.com/r/asiansgonewild/top?t=year
|
|
https://reddit.com/r/asianhotties/top?t=year
|
|
https://reddit.com/r/barelylegal/top?t=year
|
|
https://reddit.com/r/barelylegalteens/top?t=year
|
|
https://reddit.com/r/biggerthanyouthought/top?t=year
|
|
https://reddit.com/r/bodyperfection/top?t=year
|
|
https://reddit.com/r/boobies/top?t=year
|
|
https://reddit.com/r/nsfwbraids/top?t=year
|
|
https://reddit.com/r/burstingout/top?t=year
|
|
https://reddit.com/r/bustypetite/top?t=year
|
|
https://reddit.com/r/chubby/top?t=year
|
|
https://reddit.com/r/cleavage/top?t=year
|
|
https://reddit.com/r/curls/top?t=year
|
|
https://reddit.com/r/curvy/top?t=year
|
|
https://reddit.com/r/cutelittlebutts/top?t=year
|
|
https://reddit.com/r/darkangels/top?t=year
|
|
https://reddit.com/r/downblouse/top?t=year
|
|
https://reddit.com/r/dirtysmall/top?t=year
|
|
https://reddit.com/r/ebony/top?t=year
|
|
https://reddit.com/r/fauxbait/top?t=year
|
|
https://reddit.com/r/fuckdoll/top?t=year
|
|
https://reddit.com/r/funsized/top?t=year
|
|
https://reddit.com/r/funwithfriends/top?t=year
|
|
https://reddit.com/r/ginger/top?t=year
|
|
https://reddit.com/r/girlsinschooluniforms/top?t=year
|
|
https://reddit.com/r/GirlsInWhiteTanks/top?t=year
|
|
https://reddit.com/r/girlskissing/top?t=year
|
|
https://reddit.com/r/gonewild18/top?t=year
|
|
https://reddit.com/r/gwnerdy/top?t=year
|
|
https://reddit.com/r/happyembarassedgirls/top?t=year
|
|
https://reddit.com/r/juicyasians/top?t=year
|
|
https://reddit.com/r/just18/top?t=year
|
|
https://reddit.com/r/latinas/top?t=year
|
|
https://reddit.com/r/legalteens/top?t=year
|
|
https://reddit.com/r/o_faces/top?t=year
|
|
https://reddit.com/r/petite/top?t=year
|
|
https://reddit.com/r/petitegonewild/top?t=year
|
|
https://reddit.com/r/pokies/top?t=year
|
|
https://reddit.com/r/prettygirls/top?t=year
|
|
https://reddit.com/r/realasians/top?t=year
|
|
https://reddit.com/r/realgirls/top?t=year
|
|
https://reddit.com/r/redheads/top?t=year
|
|
https://reddit.com/r/repressedgonewild/top?t=year
|
|
https://reddit.com/r/schoolgirlskirts/top?t=year
|
|
https://reddit.com/r/seethru/top?t=year
|
|
https://reddit.com/r/sexyfrex/top?t=year
|
|
https://reddit.com/r/sexygirls/top?t=year
|
|
https://reddit.com/r/shorthairchicks/top?t=year
|
|
https://reddit.com/r/slimthick/top?t=year
|
|
https://reddit.com/r/smallboobs/top?t=year
|
|
https://reddit.com/r/smallcutie/top?t=year
|
|
https://reddit.com/r/stacked/top?t=year
|
|
https://reddit.com/r/tanktops/top?t=year
|
|
https://reddit.com/r/teenbeauties/top?t=year
|
|
https://reddit.com/r/theratio/top?t=year
|
|
https://reddit.com/r/theunderboob/top?t=year
|
|
https://reddit.com/r/thick/top?t=year
|
|
https://reddit.com/r/thicker/top?t=year
|
|
https://reddit.com/r/tinytits/top?t=year
|
|
https://reddit.com/r/twingirls/top?t=year
|
|
https://reddit.com/r/toocuteforporn/top?t=year
|
|
https://reddit.com/r/WhiteTopAndJeans/top?t=year
|
|
https://reddit.com/r/womenofcolor/top?t=year
|
|
https://reddit.com/r/xsmallgirls/top?t=year
|
|
|
|
|
|
toocuteforporn
|
|
|
|
|
|
18_19
|
|
2busty2hide
|
|
aa_cups
|
|
adorableporn
|
|
asiancuties
|
|
asiansgonewild
|
|
asianhotties
|
|
barelylegal
|
|
barelylegalteens
|
|
biggerthanyouthought
|
|
bodyperfection
|
|
boobies
|
|
braids
|
|
burstingout
|
|
bustypetite
|
|
chubby
|
|
cleavage
|
|
curls
|
|
curvy
|
|
cutelittlebutts
|
|
darkangels
|
|
downblouse
|
|
dirtysmall
|
|
ebony
|
|
fauxbait
|
|
fuckdoll
|
|
funsized
|
|
funwithfriends
|
|
ginger
|
|
girlsinschooluniforms
|
|
GirlsInWhiteTanks
|
|
girlskissing
|
|
gonewild18
|
|
gwnerdy
|
|
happyembarassedgirls
|
|
juicyasians
|
|
just18
|
|
latinas
|
|
legalteens
|
|
o_faces
|
|
petite
|
|
petitegonewild
|
|
pokies
|
|
prettygirls
|
|
realasians
|
|
realgirls
|
|
redheads
|
|
repressedgonewild
|
|
seethru
|
|
sexyfrex
|
|
sexygirls
|
|
shorthairchicks
|
|
slimthick
|
|
smallboobs
|
|
smallcutie
|
|
stacked
|
|
tanktops
|
|
teenbeauties
|
|
theratio
|
|
theunderboob
|
|
thick
|
|
thicker
|
|
tinytits
|
|
twingirls
|
|
toocuteforporn
|
|
WhiteTopAndJeans
|
|
womenofcolor
|
|
xsmallgirls
|
|
|
|
|
|
|
|
|
|
|
|
|
|
chan queue
|
|
|
|
younggiant https://8kun.top/s/res/534.html
|
|
acup https://8kun.top/s/res/3386.html
|
|
penninsula https://8kun.top/s/res/35323.html
|
|
caramel https://8kun.top/s/res/9384.html
|
|
chub2 https://8kun.top/s/res/2290.html
|
|
amateur3 https://8kun.top/s/res/27209.html
|
|
vball https://8kun.top/s/res/19691.html
|
|
ariel https://8kun.top/s/res/2739.html
|
|
fb2 https://8kun.top/s/res/27347.html
|
|
models2 https://8kun.top/s/res/5853.html
|
|
south https://8kun.top/s/res/17718.html
|
|
milena https://8kun.top/s/res/16383.html
|
|
pigtails https://8kun.top/s/res/186.html
|
|
slut https://8kun.top/s/res/28135.html
|
|
ibtc https://8kun.top/s/res/15527.html
|
|
creep https://boards.4chan.org/b/thread/859972934
|
|
girlb https://boards.4chan.org/b/thread/859975536
|
|
celeb https://boards.4chan.org/b/thread/860000898
|
|
fb3 https://boards.4chan.org/b/thread/860003419
|
|
|
|
|
|
|
|
"""
|