server code
This commit is contained in:
parent
1c5e88e59f
commit
73bde40ff8
|
|
@ -0,0 +1,784 @@
|
|||
|
||||
|
||||
|
||||
import json, codecs, re, os, pypandoc, striprtf, sqlite3, random
|
||||
import heapq, shutil, datetime, urllib
|
||||
import itertools, time, markdown, csv, os.path, webbrowser, threading
|
||||
import hashlib, funcy, platform, sys, socket
|
||||
from striprtf.striprtf import rtf_to_text
|
||||
from functools import wraps
|
||||
from queue import Queue
|
||||
from flask import Flask, request, send_from_directory, Response, render_template
|
||||
from flask_socketio import SocketIO, emit
|
||||
from werkzeug.routing import PathConverter
|
||||
from orgpython import to_html
|
||||
from importlib import reload
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
from stat import S_ISREG, ST_CTIME, ST_MODE
|
||||
|
||||
this_machine = socket.gethostname()
|
||||
print(this_machine)
|
||||
|
||||
|
||||
# pypandoc striprtf sqlite3 heapq markdown webbrowser funcy platform socket functools queue flask flask_socketio werkzeug orgpython importlib pathlib stat
|
||||
|
||||
# orgpython webbrowser functools socket platform sqlite3 heapq funcy striprtf flask_socketio queue stat importlib
|
||||
|
||||
|
||||
|
||||
# https://github.com/SethMMorton/natsort
|
||||
# Simple yet flexible natural sorting in Python
|
||||
|
||||
|
||||
from servertoys import *
|
||||
import servertoys
|
||||
|
||||
#q = Queue()
|
||||
|
||||
|
||||
|
||||
|
||||
def dict_factory(cursor, row):
|
||||
d = {}
|
||||
for idx, col in enumerate(cursor.description):
|
||||
d[col[0]] = row[idx]
|
||||
return d
|
||||
|
||||
|
||||
|
||||
def server_save(key,value):
|
||||
codecs.open('cache/server_data.txt','a').write( "%s=%s\n" % (str(key),str(value)))
|
||||
|
||||
def flask_thread(q=0):
|
||||
print(" my __name__ is...", end=" ")
|
||||
print(__name__)
|
||||
|
||||
|
||||
#print("Starting ... flask_thread ...")
|
||||
#app = Flask(__name__, static_url_path='/cache',
|
||||
# static_folder='cache',)
|
||||
|
||||
|
||||
app = Flask(__name__, static_url_path='')
|
||||
app.config['SECRET_KEY'] = 'secret!abc#xyz91239456'
|
||||
app.jinja_env.auto_reload = True
|
||||
|
||||
print(app)
|
||||
|
||||
#socketio = SocketIO(app)
|
||||
|
||||
app.config['TEMPLATES_AUTO_RELOAD'] = True
|
||||
|
||||
#def before_request():
|
||||
# app.jinja_env.cache = {}
|
||||
|
||||
@app.route('/all')
|
||||
def list_all_pics():
|
||||
all = servertoys.BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
||||
|
||||
#paths = sorted(Path(dirpath).iterdir(), key=os.path.getmtime)
|
||||
|
||||
|
||||
allp = os.listdir(all)
|
||||
# allp = [base(x) for x in allp]
|
||||
#print(allp)
|
||||
return "\n".join([ "<li>%s</li>" % a('/img/'+x, x) for x in filter(isPic, allp) ])
|
||||
|
||||
@app.route('/main')
|
||||
def main_app():
|
||||
all = servertoys.BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
||||
allp = os.listdir(all)
|
||||
allp = ['/img/'+x for x in filter(isPic, allp) ]
|
||||
allp.sort()
|
||||
|
||||
allj = json.dumps(allp,indent=2)
|
||||
return render_template('index.html', all_pics_json=allj, index="100")
|
||||
|
||||
@app.route('/main/<index>')
|
||||
def main_app_indexed(index):
|
||||
all = servertoys.BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
||||
allp = os.listdir(all)
|
||||
allp = ['/img/'+x for x in filter(isPic, allp) ]
|
||||
allp.sort()
|
||||
|
||||
allj = json.dumps(allp,indent=2)
|
||||
return render_template('index.html', all_pics_json=allj, index=str(index))
|
||||
|
||||
|
||||
@app.route('/manage/<index>')
|
||||
def manager_app_indexed(index):
|
||||
all = servertoys.BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
||||
allp = os.listdir(all)
|
||||
allp = ['/img/'+x for x in filter(isPic, allp) ]
|
||||
allp.sort()
|
||||
|
||||
allj = json.dumps(allp,indent=2)
|
||||
return render_template('manage.html', all_pics_json=allj, index=str(index))
|
||||
|
||||
|
||||
@app.route('/meme/<index>')
|
||||
def manager_meme_indexed2(index):
|
||||
all = servertoys.MEMEBASE # "/home/phowell/hd2/peter_home/images/Ok Pictures and Memes/"
|
||||
allp = os.listdir(all)
|
||||
#allp = ['/mimg/'+x for x in filter(isPic, allp) ]
|
||||
allp = ['/mimg/'+x for x in allp ]
|
||||
allp.sort()
|
||||
|
||||
allj = json.dumps(allp,indent=2)
|
||||
return render_template('manage2.html', which=2, all_pics_json=allj, index=str(index))
|
||||
|
||||
|
||||
@app.route('/m/<index>')
|
||||
def manager_app_indexed2(index):
|
||||
all = servertoys.BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
||||
allp = os.listdir(all)
|
||||
allp = ['/img/'+x for x in filter(isPic, allp) ]
|
||||
allp.sort()
|
||||
|
||||
allj = json.dumps(allp,indent=2)
|
||||
return render_template('manage2.html', which=1, all_pics_json=allj, index=str(index))
|
||||
|
||||
|
||||
@app.route('/importer/<index>')
|
||||
def manager_importer(index):
|
||||
chanpath = servertoys.CHANBASE # "/home/phowell/hd2/peter_home_offload/Documents/scripts/chan/"
|
||||
if index=='list':
|
||||
return "<br />".join( [ "<a href='%s'>%s</a>" % (x,x) for x in sorted(os.listdir(chanpath))] )
|
||||
|
||||
#return "<br />".join( [ "<a href='%s'>%s</a>" % ("/chan/"+index+"/"+x,x) for x in os.listdir(chanpath + index)] )
|
||||
|
||||
all = chanpath + index
|
||||
allp = os.listdir(all)
|
||||
allp = ['/chan/'+index+'/'+ x for x in allp] # filter(isPic, allp) ]
|
||||
allp.sort()
|
||||
|
||||
allj = json.dumps(allp,indent=2)
|
||||
return render_template('importer.html', all_pics_json=allj, index=0)
|
||||
|
||||
|
||||
@app.route('/alltags')
|
||||
def alltags():
|
||||
return servertoys.get_all_tags()
|
||||
|
||||
@app.route('/alltags/<which>')
|
||||
def alltags_which(which):
|
||||
return servertoys.get_all_tags(which)
|
||||
|
||||
@app.route('/add_pic_tag/<ttt>/<ppp>')
|
||||
def apt(ttt,ppp):
|
||||
return servertoys.add_pic_tag(ttt,ppp)
|
||||
|
||||
|
||||
|
||||
|
||||
@app.route('/prefix/<index>')
|
||||
def prefix_app_indexed(index):
|
||||
all = servertoys.BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
||||
|
||||
allp = os.listdir(all)
|
||||
allp = ['/img/'+x for x in filter(isPic, allp) if plain_sw(x,index) ]
|
||||
#funcy.filter(lambda x: plain(x).startswith('/img/'+index), allp)
|
||||
allp.sort()
|
||||
allj = json.dumps(allp,indent=2)
|
||||
return render_template('index.html', all_pics_json=allj, index="0")
|
||||
|
||||
|
||||
@app.route('/add_pic/<path>')
|
||||
def b(path):
|
||||
return add_pic(path)
|
||||
|
||||
@app.route('/pic')
|
||||
def a():
|
||||
return get_all_pics(1)
|
||||
|
||||
@app.route('/pic/<index>')
|
||||
def c(index):
|
||||
allp = get_all_pics()
|
||||
#allp = ['/img/'+x for x in filter(isPic, allp) ]
|
||||
#allp.sort()
|
||||
|
||||
#allj = json.dumps(allp,indent=2)
|
||||
allp = [ii for ii in map( lambda x: "/img/" + x, funcy.pluck( 'path', allp ))]
|
||||
allj = json.dumps( allp )
|
||||
return render_template('manage.html', all_pics_json=allj, index=str(index))
|
||||
|
||||
@app.route('/i')
|
||||
def db_img():
|
||||
global con, cur
|
||||
con = sqlite3.connect(LIBBASE + 'ooo.db')
|
||||
con.row_factory = dict_factory
|
||||
cur = con.cursor()
|
||||
|
||||
cur.execute("SELECT * FROM pics p JOIN pictag pt ON pt.pic=p.id JOIN tags t ON t.id=pt.tag")
|
||||
result = cur.fetchall()
|
||||
return "<pre>%s</pre>" % json.dumps( result,indent=2 )
|
||||
|
||||
###
|
||||
###
|
||||
### VIDEO
|
||||
###
|
||||
###
|
||||
###
|
||||
|
||||
|
||||
@app.route('/videos')
|
||||
def vs_app():
|
||||
global MOVBASE
|
||||
|
||||
|
||||
# get all entries in the directory
|
||||
# Get their stats
|
||||
# leave only regular files, insert creation date
|
||||
|
||||
|
||||
#os.path.join(MOVBASE, file_name) for file_name in os.listdir( MOVBASE ))
|
||||
#print( list(entries)[1:10] )
|
||||
#print( list(entries)[1:10] )
|
||||
|
||||
#entries = ((os.stat(path), path) for path in entries)
|
||||
#entries = ((stat[ST_CTIME], path) for stat, path in entries if S_ISREG(stat[ST_MODE]))
|
||||
#print( list(allp)[1:10] )
|
||||
#allp = os.listdir(MOVBASE)
|
||||
#allp.sort()
|
||||
#print( list(allp)[1:10] )
|
||||
|
||||
|
||||
|
||||
allp = sorted(Path(MOVBASE).iterdir(), key=os.path.getmtime)
|
||||
allp = filter(isVid, allp)
|
||||
allp = [ '/vid/'+urllib.parse.quote(x.name ) for x in allp ]
|
||||
|
||||
allj = json.dumps(allp) # ,indent=2)
|
||||
return render_template('allvids.html', all_pics_json=allj, index="100")
|
||||
|
||||
@app.route('/video')
|
||||
def v_app():
|
||||
global MOVBASE
|
||||
#allp = os.listdir(MOVBASE)
|
||||
#allp = ['/vid/'+urllib.parse.quote(x) for x in filter(isVid, allp) ]
|
||||
#allp.sort()
|
||||
|
||||
allp = sorted(Path(MOVBASE).iterdir(), key=os.path.getmtime)
|
||||
allp = filter(isVid, allp)
|
||||
allp = [ '/vid/'+urllib.parse.quote(x.name ) for x in allp ]
|
||||
|
||||
|
||||
allj = json.dumps(allp,indent=2)
|
||||
return render_template('vindex.html', all_pics_json=allj, index="100")
|
||||
|
||||
@app.route('/video/<index>')
|
||||
def v_app_indexed(index):
|
||||
global MOVBASE
|
||||
allp = filter(isVid,sorted(Path(MOVBASE).iterdir(), key=os.path.getmtime))
|
||||
allj = json.dumps([ '/vid/'+urllib.parse.quote(x.name ) for x in allp ],indent=2)
|
||||
return render_template('vindex.html', all_pics_json=allj, index=str(index))
|
||||
|
||||
|
||||
#
|
||||
# SORTING FILES
|
||||
#
|
||||
|
||||
@app.route('/sort')
|
||||
def sorter():
|
||||
return plain_sort()
|
||||
|
||||
|
||||
@app.route('/sort/<index>')
|
||||
def sort_index(index):
|
||||
return plain_sort(index='')
|
||||
|
||||
|
||||
|
||||
|
||||
#
|
||||
# SAVING STUFF
|
||||
#
|
||||
|
||||
@app.route('/save', methods=['POST'])
|
||||
def save_post():
|
||||
now = datetime.now().strftime('%Y%m%dT%H%M')
|
||||
path = request.form['path']
|
||||
txt = request.form['content']
|
||||
|
||||
o3 = codecs.open(server.writing_path + path, 'r', 'utf-8')
|
||||
orig_text = o3.read()
|
||||
o3.close()
|
||||
|
||||
bu_filename = server.writing_path + 'older_copies/' + path + '_' + now + '.md'
|
||||
o2 = codecs.open( bu_filename, 'w', 'utf-8' )
|
||||
o2.write(orig_text)
|
||||
o2.close()
|
||||
print('wrote backup to %s.' % bu_filename)
|
||||
|
||||
o1 = codecs.open(server.writing_path+path, 'w', 'utf-8')
|
||||
o1.write(txt)
|
||||
o1.close()
|
||||
return "<h1>Successfully Saved</h1><br>" + a('back to writing folder','/x/writing/index') + \
|
||||
" " + a('back to home','/')
|
||||
|
||||
|
||||
#
|
||||
# SERVER maintenance type stuff
|
||||
@app.route('/rl')
|
||||
def restart():
|
||||
reload(servertoys)
|
||||
#reload(localcache)
|
||||
return "Server code reloaded"
|
||||
|
||||
@app.route("/x/<func>/<arg>/<arrg>")
|
||||
def dispatch3(func,arg,arrg):
|
||||
print("2 args")
|
||||
return "" + server_dispatch(func, arg, arrg)
|
||||
|
||||
@app.route("/x/<func>/<arg>")
|
||||
def dispatch2(func,arg):
|
||||
print("1 arg")
|
||||
return "" + server_dispatch(func, arg)
|
||||
|
||||
@app.route("/x/<func>")
|
||||
def dispatch(func):
|
||||
print("0 arg")
|
||||
return server_dispatch(func)
|
||||
|
||||
@app.route("/api/<func>/<arg>/<arrg>")
|
||||
def dispatch3j(func,arg,arrg):
|
||||
print("json, 3 args")
|
||||
return Response(server_dispatch(func, arg, arrg), mimetype='text/json')
|
||||
|
||||
@app.route("/api/<func>/<arg>")
|
||||
def dispatch2j(func,arg):
|
||||
print("json, 1 arg")
|
||||
return Response(server_dispatch(func, arg), mimetype='text/json')
|
||||
|
||||
@app.route("/api/<func>")
|
||||
def dispatch1j(func):
|
||||
print("json, 0 arg")
|
||||
return Response(server_dispatch(func), mimetype='text/json')
|
||||
|
||||
@app.route("/")
|
||||
def home():
|
||||
return "<h1>Homepage</h1>"
|
||||
|
||||
#
|
||||
# STATIC ROUTES
|
||||
#
|
||||
|
||||
"""@app.route('/lib/<path:path>')
|
||||
def send_jslib(path):
|
||||
return send_from_directory('gui/lib', path)"""
|
||||
|
||||
@app.route('/data/<path:path>')
|
||||
def send_cachedata(path):
|
||||
#myfile = os.path.join('cache', path).replace('\\','/')
|
||||
print(path)
|
||||
#return app.send_static_file(myfile)
|
||||
return send_from_directory('cache', path)
|
||||
|
||||
#@app.route('/hello/')
|
||||
#@app.route('/hello/<name>')
|
||||
|
||||
|
||||
@app.route("/save/<key>/<val>")
|
||||
def s(key,val):
|
||||
server_save(key,val)
|
||||
return tag('h1','Saved.') + "<br />" + tag('p', 'Saved: %s = %s' % (str(key),str(val)))
|
||||
|
||||
@app.route("/sample")
|
||||
def do_sample():
|
||||
return sample()
|
||||
|
||||
|
||||
@app.route("/crazy")
|
||||
def hello():
|
||||
r = '<link rel="stylesheet" href="static/bootstrap.min.css">'
|
||||
r += tag('style', 'textarea { white-space:nowrap; }')
|
||||
r += tag('body', \
|
||||
tagc('div','container-fluid', \
|
||||
tagc('div','row', \
|
||||
tagc( 'div', 'col-md-6', tag('pre', walk_file() ) ) + \
|
||||
tagc( 'div', 'col-md-6', 'Column 2' + a('Shut Down','/shutdown' ) ) ) ) )
|
||||
|
||||
|
||||
|
||||
return r
|
||||
|
||||
@app.route("/sd")
|
||||
def sd():
|
||||
print('SIGINT or CTRL-C detected. Exiting gracefully')
|
||||
func = request.environ.get('werkzeug.server.shutdown')
|
||||
if func is None:
|
||||
raise RuntimeError('Not running with the Werkzeug Server')
|
||||
func()
|
||||
return "Server has shut down."
|
||||
|
||||
|
||||
#@socketio.on('my event', namespace='/test')
|
||||
#def test_message(message):
|
||||
# print('received and event: "my event" from page. message is: %s' % message)
|
||||
# emit('my response', {'data': 'got it! it is MYEVENT'})
|
||||
|
||||
# Main images folder
|
||||
|
||||
@app.route('/img/<path:path>')
|
||||
def send_img(path):
|
||||
return send_from_directory(BASE, path)
|
||||
|
||||
@app.route('/mimg/<path:path>')
|
||||
def send_mimg(path):
|
||||
return send_from_directory(MEMEBASE, path)
|
||||
|
||||
@app.route('/chan/<path:path>')
|
||||
def send_chan_img(path):
|
||||
chanpath = servertoys.CHANBASE # "/home/phowell/hd2/peter_home_offload/Documents/scripts/chan/"
|
||||
return send_from_directory(chanpath, path)
|
||||
|
||||
@app.route('/vid/<path:path>')
|
||||
def send_vid(path):
|
||||
return send_from_directory(MOVBASE, urllib.parse.unquote(path))
|
||||
|
||||
# Static folder
|
||||
|
||||
@app.route('/lib/<path:path>')
|
||||
def send_lib(path):
|
||||
return send_from_directory(LIBBASE, path)
|
||||
|
||||
|
||||
app.run(host=HOST, port=PORT)
|
||||
|
||||
|
||||
#socketio.run(app, host= '0.0.0.0', port=9998)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#################################################################################################################
|
||||
#################################################################################################################
|
||||
######
|
||||
###### server infrastructure
|
||||
######
|
||||
|
||||
|
||||
|
||||
|
||||
def server_dispatch_json(function_name,arg='', arg2=''):
|
||||
print("Looking for function: %s. arg:%s. arg2:%s." % (function_name, arg, arg2))
|
||||
try:
|
||||
result = "" + globals()[function_name](arg, arg2)
|
||||
print("doing 2 args")
|
||||
return result
|
||||
except Exception as e:
|
||||
print("Error with that: %s" % str(e))
|
||||
try:
|
||||
result = "" + globals()[function_name](arg) #
|
||||
print("doing 1 arg")
|
||||
return result
|
||||
except Exception as f:
|
||||
print("Error with that: %s" % str(f))
|
||||
try:
|
||||
result = globals()[function_name]()
|
||||
print("doing 0 arg")
|
||||
return result
|
||||
except Exception as gg:
|
||||
print("Error with that: %s" % str(gg))
|
||||
return json.dumps({'result':'failed: exception', 'e1':str(e), 'e2':str(f), 'e3':str(gg)}, indent=2)
|
||||
|
||||
|
||||
def server_dispatch(function_name,arg='', arg2=''):
|
||||
print("Looking for function: %s. arg:%s. arg2:%s." % (function_name, arg, arg2))
|
||||
try:
|
||||
result = "" + globals()[function_name](arg, arg2)
|
||||
print("doing 2 args")
|
||||
return result
|
||||
except Exception as e:
|
||||
print("Error with that: %s" % str(e))
|
||||
try:
|
||||
result = "" + globals()[function_name](arg) #
|
||||
print("doing 1 arg")
|
||||
return result
|
||||
except Exception as f:
|
||||
print("Error with that: %s" % str(f))
|
||||
try:
|
||||
result = globals()[function_name]()
|
||||
print("doing 0 arg")
|
||||
return result
|
||||
except Exception as gg:
|
||||
print("Error with that: %s" % str(gg))
|
||||
return json.dumps({'result':'failed: exception', 'e1':str(e), 'e2':str(f), 'e3':str(gg)}, indent=2)
|
||||
|
||||
|
||||
#################################################################################################################
|
||||
#################################################################################################################
|
||||
######
|
||||
###### server startup
|
||||
######
|
||||
|
||||
def serve():
|
||||
flask_thread()
|
||||
#x = threading.Thread(target=flask_thread, args=(q,))
|
||||
#x.start()
|
||||
#webbrowser.open_new_tab("http://localhost:%s" % str(PORT))
|
||||
print("Started?")
|
||||
|
||||
if __name__ == '__main__':
|
||||
print("Serving on %s" % str(PORT))
|
||||
#serve()
|
||||
flask_thread()
|
||||
print("...serve() function finished?...")
|
||||
else:
|
||||
pass
|
||||
#print("Doing nothing.")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#################################################################################################################
|
||||
#################################################################################################################
|
||||
######
|
||||
###### MORE MORE MORE
|
||||
######
|
||||
|
||||
|
||||
## Extract frames from video
|
||||
#
|
||||
# ffmpeg -i DJI_0024.MP4 -vf fps=10/1 output%04d.jpg
|
||||
|
||||
|
||||
|
||||
# list files
|
||||
# - all
|
||||
# - by tag
|
||||
# - by rating
|
||||
# - untagged / unrated
|
||||
# - query
|
||||
|
||||
|
||||
# load / save tag, rate data for a file
|
||||
|
||||
|
||||
|
||||
# api for web / events
|
||||
# - rating
|
||||
# - tag add / remove
|
||||
|
||||
|
||||
|
||||
# events for new images, scrapes, imports
|
||||
|
||||
# a queue of unprocessed files
|
||||
|
||||
# approve import, move file & save metadata
|
||||
|
||||
# report on original, crop, resizes,
|
||||
|
||||
# add to queue for creating crops, resizes, removals
|
||||
|
||||
|
||||
# add to queue for scrape from insta
|
||||
|
||||
# add to queue for scrape from chan
|
||||
|
||||
# add to queue for scrape from reddit
|
||||
|
||||
# toc of folders that have been scraped, ready for import check.
|
||||
|
||||
# list of urls that have been scraped. Don't double-download.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
||||
use: D:\peter_home\Documents\scripts\chan\get.py
|
||||
|
||||
use: D:\peter_home\Documents\scripts\imageworld\cache.py
|
||||
|
||||
|
||||
https://reddit.com/r/18_19/top?t=year
|
||||
https://reddit.com/r/2busty2hide/top?t=year
|
||||
https://reddit.com/r/aa_cups/top?t=year
|
||||
https://reddit.com/r/adorableporn/top?t=year
|
||||
https://reddit.com/r/asiancuties/top?t=year
|
||||
https://reddit.com/r/asiansgonewild/top?t=year
|
||||
https://reddit.com/r/asianhotties/top?t=year
|
||||
https://reddit.com/r/barelylegal/top?t=year
|
||||
https://reddit.com/r/barelylegalteens/top?t=year
|
||||
https://reddit.com/r/biggerthanyouthought/top?t=year
|
||||
https://reddit.com/r/bodyperfection/top?t=year
|
||||
https://reddit.com/r/boobies/top?t=year
|
||||
https://reddit.com/r/nsfwbraids/top?t=year
|
||||
https://reddit.com/r/burstingout/top?t=year
|
||||
https://reddit.com/r/bustypetite/top?t=year
|
||||
https://reddit.com/r/chubby/top?t=year
|
||||
https://reddit.com/r/cleavage/top?t=year
|
||||
https://reddit.com/r/curls/top?t=year
|
||||
https://reddit.com/r/curvy/top?t=year
|
||||
https://reddit.com/r/cutelittlebutts/top?t=year
|
||||
https://reddit.com/r/darkangels/top?t=year
|
||||
https://reddit.com/r/downblouse/top?t=year
|
||||
https://reddit.com/r/dirtysmall/top?t=year
|
||||
https://reddit.com/r/ebony/top?t=year
|
||||
https://reddit.com/r/fauxbait/top?t=year
|
||||
https://reddit.com/r/fuckdoll/top?t=year
|
||||
https://reddit.com/r/funsized/top?t=year
|
||||
https://reddit.com/r/funwithfriends/top?t=year
|
||||
https://reddit.com/r/ginger/top?t=year
|
||||
https://reddit.com/r/girlsinschooluniforms/top?t=year
|
||||
https://reddit.com/r/GirlsInWhiteTanks/top?t=year
|
||||
https://reddit.com/r/girlskissing/top?t=year
|
||||
https://reddit.com/r/gonewild18/top?t=year
|
||||
https://reddit.com/r/gwnerdy/top?t=year
|
||||
https://reddit.com/r/happyembarassedgirls/top?t=year
|
||||
https://reddit.com/r/juicyasians/top?t=year
|
||||
https://reddit.com/r/just18/top?t=year
|
||||
https://reddit.com/r/latinas/top?t=year
|
||||
https://reddit.com/r/legalteens/top?t=year
|
||||
https://reddit.com/r/o_faces/top?t=year
|
||||
https://reddit.com/r/petite/top?t=year
|
||||
https://reddit.com/r/petitegonewild/top?t=year
|
||||
https://reddit.com/r/pokies/top?t=year
|
||||
https://reddit.com/r/prettygirls/top?t=year
|
||||
https://reddit.com/r/realasians/top?t=year
|
||||
https://reddit.com/r/realgirls/top?t=year
|
||||
https://reddit.com/r/redheads/top?t=year
|
||||
https://reddit.com/r/repressedgonewild/top?t=year
|
||||
https://reddit.com/r/schoolgirlskirts/top?t=year
|
||||
https://reddit.com/r/seethru/top?t=year
|
||||
https://reddit.com/r/sexyfrex/top?t=year
|
||||
https://reddit.com/r/sexygirls/top?t=year
|
||||
https://reddit.com/r/shorthairchicks/top?t=year
|
||||
https://reddit.com/r/slimthick/top?t=year
|
||||
https://reddit.com/r/smallboobs/top?t=year
|
||||
https://reddit.com/r/smallcutie/top?t=year
|
||||
https://reddit.com/r/stacked/top?t=year
|
||||
https://reddit.com/r/tanktops/top?t=year
|
||||
https://reddit.com/r/teenbeauties/top?t=year
|
||||
https://reddit.com/r/theratio/top?t=year
|
||||
https://reddit.com/r/theunderboob/top?t=year
|
||||
https://reddit.com/r/thick/top?t=year
|
||||
https://reddit.com/r/thicker/top?t=year
|
||||
https://reddit.com/r/tinytits/top?t=year
|
||||
https://reddit.com/r/twingirls/top?t=year
|
||||
https://reddit.com/r/toocuteforporn/top?t=year
|
||||
https://reddit.com/r/WhiteTopAndJeans/top?t=year
|
||||
https://reddit.com/r/womenofcolor/top?t=year
|
||||
https://reddit.com/r/xsmallgirls/top?t=year
|
||||
|
||||
|
||||
toocuteforporn
|
||||
|
||||
|
||||
18_19
|
||||
2busty2hide
|
||||
aa_cups
|
||||
adorableporn
|
||||
asiancuties
|
||||
asiansgonewild
|
||||
asianhotties
|
||||
barelylegal
|
||||
barelylegalteens
|
||||
biggerthanyouthought
|
||||
bodyperfection
|
||||
boobies
|
||||
braids
|
||||
burstingout
|
||||
bustypetite
|
||||
chubby
|
||||
cleavage
|
||||
curls
|
||||
curvy
|
||||
cutelittlebutts
|
||||
darkangels
|
||||
downblouse
|
||||
dirtysmall
|
||||
ebony
|
||||
fauxbait
|
||||
fuckdoll
|
||||
funsized
|
||||
funwithfriends
|
||||
ginger
|
||||
girlsinschooluniforms
|
||||
GirlsInWhiteTanks
|
||||
girlskissing
|
||||
gonewild18
|
||||
gwnerdy
|
||||
happyembarassedgirls
|
||||
juicyasians
|
||||
just18
|
||||
latinas
|
||||
legalteens
|
||||
o_faces
|
||||
petite
|
||||
petitegonewild
|
||||
pokies
|
||||
prettygirls
|
||||
realasians
|
||||
realgirls
|
||||
redheads
|
||||
repressedgonewild
|
||||
seethru
|
||||
sexyfrex
|
||||
sexygirls
|
||||
shorthairchicks
|
||||
slimthick
|
||||
smallboobs
|
||||
smallcutie
|
||||
stacked
|
||||
tanktops
|
||||
teenbeauties
|
||||
theratio
|
||||
theunderboob
|
||||
thick
|
||||
thicker
|
||||
tinytits
|
||||
twingirls
|
||||
toocuteforporn
|
||||
WhiteTopAndJeans
|
||||
womenofcolor
|
||||
xsmallgirls
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
chan queue
|
||||
|
||||
younggiant https://8kun.top/s/res/534.html
|
||||
acup https://8kun.top/s/res/3386.html
|
||||
penninsula https://8kun.top/s/res/35323.html
|
||||
caramel https://8kun.top/s/res/9384.html
|
||||
chub2 https://8kun.top/s/res/2290.html
|
||||
amateur3 https://8kun.top/s/res/27209.html
|
||||
vball https://8kun.top/s/res/19691.html
|
||||
ariel https://8kun.top/s/res/2739.html
|
||||
fb2 https://8kun.top/s/res/27347.html
|
||||
models2 https://8kun.top/s/res/5853.html
|
||||
south https://8kun.top/s/res/17718.html
|
||||
milena https://8kun.top/s/res/16383.html
|
||||
pigtails https://8kun.top/s/res/186.html
|
||||
slut https://8kun.top/s/res/28135.html
|
||||
ibtc https://8kun.top/s/res/15527.html
|
||||
creep https://boards.4chan.org/b/thread/859972934
|
||||
girlb https://boards.4chan.org/b/thread/859975536
|
||||
celeb https://boards.4chan.org/b/thread/860000898
|
||||
fb3 https://boards.4chan.org/b/thread/860003419
|
||||
|
||||
|
||||
|
||||
"""
|
||||
|
|
@ -0,0 +1,459 @@
|
|||
|
||||
import os,json, funcy, re, sqlite3, operator, sys, socket
|
||||
from flask import render_template
|
||||
from collections import defaultdict
|
||||
|
||||
q = ''
|
||||
|
||||
DEEP_IP = '192.168.1.6'
|
||||
|
||||
this_machine = socket.gethostname()
|
||||
|
||||
if this_machine=='ROGDESKTOP':
|
||||
HOST = "192.168.1.7"
|
||||
PORT = "9999"
|
||||
BASE = "\\\\%s\\hd2\\deep1_homedir\\Documents\\ooo\\" % DEEP_IP
|
||||
LIBBASE = "\\\\%s\\hd2\\peter_home\\Documents\\scripts\\ooopics\\lib\\" % DEEP_IP
|
||||
MEMEBASE = "\\\\s\\hd2\\peter_home\\images\\Ok Pictures and Memes\\" % DEEP_IP
|
||||
CHANBASE = "\\\\%s\\hd2\\peter_home_offload\\Documents\\scripts\\chan\\" % DEEP_IP
|
||||
|
||||
MOVBASE = "\\\\%s\\hd2\\nogood\\media\\" % DEEP_IP
|
||||
DOCBASE = "\\\\%s\\hd2\\bit_complete" % DEEP_IP
|
||||
|
||||
else:
|
||||
|
||||
HOST = DEEP_IP
|
||||
PORT = "9999"
|
||||
BASE = "/media/hd2/deep1_homedir/Documents/ooo/"
|
||||
LIBBASE = "/media/hd2/peter_home/Documents/scripts/ooopics/lib/"
|
||||
MEMEBASE = "/media/hd2/peter_home/images/Ok Pictures and Memes/"
|
||||
CHANBASE = "/media/hd2/peter_home_offload/Documents/scripts/chan/"
|
||||
|
||||
MOVBASE = "/media/hd2/nogood/media/"
|
||||
DOCBASE = "/media/hd2/bit_complete"
|
||||
|
||||
|
||||
con = ''
|
||||
cur = ''
|
||||
|
||||
|
||||
def dict_factory(cursor, row):
|
||||
d = {}
|
||||
for idx, col in enumerate(cursor.description):
|
||||
d[col[0]] = row[idx]
|
||||
return d
|
||||
|
||||
|
||||
##########
|
||||
########## Determining the contents of a folder
|
||||
##########
|
||||
|
||||
movietype = "mkv,mp4,wmv,avi,webm,mpg,mpeg".split(",")
|
||||
booktype = "epub,pdf,odt,docx,html,rtf,mobi,djvu,azw,azw3,chm".split(",")
|
||||
musictype = "flac,mp3,wma,m3u".split(",")
|
||||
imagetype = "jpg,jpeg,png,gif,webp".split(",")
|
||||
archivetype = "zip,rar,iso,tar,gz".split(",")
|
||||
|
||||
types = {'movie':movietype, 'book':booktype, 'music':musictype, 'image':imagetype, 'archive':archivetype}
|
||||
#to_keep = "flac,epub,pdf,mkv,mp4,wmv,mp3,wma,avi,webm,m3u,zip,odt,jpeg,jpg,png,html,rtf,txt,mobi,djvu,azw,docx,azw3,".split(",")
|
||||
|
||||
def cleantext(x):
|
||||
return x.encode('utf8').decode(sys.stdout.encoding)
|
||||
|
||||
def overview_folder(f):
|
||||
count_files = 0
|
||||
count_dirs = 0
|
||||
size = 0
|
||||
for root, dirs, files in os.walk(f):
|
||||
count_files += len(files)
|
||||
count_dirs += len(dirs)
|
||||
size += sum( [ os.path.getsize(os.path.join(root,x)) for x in files ] )
|
||||
return (count_files,count_dirs,size)
|
||||
|
||||
|
||||
def ending(x): return x.split('.')[-1].lower()
|
||||
|
||||
def depth(x): return len(x.split('/')) - len(DOCBASE.split('/'))
|
||||
|
||||
def greater_type(x):
|
||||
for label,coll in types.items():
|
||||
if x in coll: return label
|
||||
return "unknown"
|
||||
|
||||
def count_types(filelist):
|
||||
endings = map(ending, filelist)
|
||||
gts = list(map(greater_type, endings))
|
||||
howmany = defaultdict(int)
|
||||
for G in gts: howmany[G] += 1
|
||||
sorted_d = sorted(howmany.items(), key=operator.itemgetter(1))
|
||||
return sorted_d
|
||||
|
||||
def most_common(filelist):
|
||||
sortedtypes = count_types(filelist)
|
||||
if len(sortedtypes)==0: return ''
|
||||
y = sortedtypes[-1]
|
||||
if not y[0]=='unknown': return y[0]
|
||||
if len(sortedtypes)>1:
|
||||
return sortedtypes[-2][0]
|
||||
return ''
|
||||
|
||||
#### GOAL: picture manager. Show folder, and
|
||||
####
|
||||
#### - mark for hide or delete
|
||||
#### - rate
|
||||
#### - accomodate major sortings, like personal/fam, gav, meme, x3, etc
|
||||
####
|
||||
#### - refer to any pic from Z app.
|
||||
####
|
||||
#### - make an infinite / zoomable ideaboard
|
||||
#### + make autolayout
|
||||
#### + make auto slideshows
|
||||
#### + multi-client slideshow for bigger effects (multi-projector)
|
||||
####
|
||||
#### - tag it
|
||||
#### - possibly move all selected or all of a tag if the drives cooperate
|
||||
#### - make hierarchy of tags so i can see or search a group of them
|
||||
#### - and hide some by default
|
||||
#### - framework for ai so i can practice
|
||||
#### + face detection
|
||||
#### + descriptions
|
||||
#### + similar photos
|
||||
#### + train fancier stuff, gans, face swaps, etc
|
||||
####
|
||||
|
||||
# oopics look.py
|
||||
|
||||
# my torrent sort.py
|
||||
|
||||
# zettle app
|
||||
|
||||
# my bookmark
|
||||
# app and
|
||||
# chrome bkmks
|
||||
|
||||
|
||||
|
||||
# Serve the ooopics backend
|
||||
|
||||
def tag(x,y): return "<%s>%s</%s>" % (x,y,x)
|
||||
|
||||
def img(s): return "<img src='img/%s' />" % s
|
||||
|
||||
def tagc(x,c,y): return '<%s class="%s">%s</%s>' % (x,c,y,x)
|
||||
|
||||
def a(href,txt): return '<a href="%s">%s</a>' % (href,txt)
|
||||
|
||||
def isPic(s): return os.path.isfile(BASE+s)
|
||||
|
||||
def isPicPre(s,p):
|
||||
t = s
|
||||
t.lower()
|
||||
if plain(t).startswith(p):
|
||||
return os.path.isfile(BASE+s)
|
||||
return False
|
||||
|
||||
def isVid(s): return s.name.endswith('.mp4') and os.path.isfile(MOVBASE+s.name)
|
||||
|
||||
def base(s): return BASE + s
|
||||
|
||||
def plain(s):
|
||||
s = s.lower()
|
||||
return re.sub(r'[^a-z0-9]','',s)
|
||||
|
||||
def plain_sw(s,pre):
|
||||
s = plain(s)
|
||||
if s[0] == pre[0] and s[1]==pre[1] and s[2]==pre[2]:
|
||||
print("%s startswith %s ?" % (s,pre))
|
||||
if s.startswith(pre):
|
||||
print('returning T')
|
||||
return True
|
||||
#print('returning F')
|
||||
return False
|
||||
|
||||
def first_n(s): # / i m g / _5____
|
||||
n = 4
|
||||
prefixlen = 4
|
||||
s = plain(s)
|
||||
if len(s) > (n+prefixlen): return s[prefixlen:prefixlen+n]
|
||||
return s
|
||||
|
||||
|
||||
def modtime(s):
|
||||
try:
|
||||
return os.path.getmtime(base(s))
|
||||
except:
|
||||
return ''
|
||||
|
||||
|
||||
def main2():
|
||||
all = BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
||||
|
||||
allp = os.listdir(all)
|
||||
allp = ['/img/'+x for x in filter(isPic, allp) ]
|
||||
allp.sort( key=modtime )
|
||||
|
||||
allj = json.dumps(allp,indent=2)
|
||||
return render_template('index.html', all_pics_json=allj, index="100")
|
||||
|
||||
|
||||
def prefix():
|
||||
all = BASE # "/home/phowell/hd2/deep1_homedir/Documents/ooo/"
|
||||
|
||||
allp = os.listdir(all)
|
||||
allp = ['/img/'+x for x in filter(isPic, allp) ]
|
||||
allp.sort()
|
||||
all_grps = [ [g for g in k] for k in funcy.partition_by(first_n, allp)]
|
||||
all_label_grps = [ (first_n(k[0]),len(k),k) for k in all_grps if len(k) > 3 ]
|
||||
print(all_label_grps)
|
||||
#return json.dumps(all_label_grps,indent=2)
|
||||
#all_label_grps.sort( key= lambda x: 0-x[1] )
|
||||
all_label_grps.sort( key= lambda x: x[0] )
|
||||
|
||||
all_keys = [ k[0] for k in all_label_grps ]
|
||||
amounts = [k[1] for k in all_label_grps ]
|
||||
|
||||
allk = json.dumps(all_keys,indent=2)
|
||||
alla = json.dumps(amounts,indent=2)
|
||||
allj = json.dumps(allp,indent=2)
|
||||
return render_template('prefix.html', prefix_json=allk, amounts=alla, all_pics_json=allj, index="1")
|
||||
|
||||
|
||||
# secret Cles0wAwm9o4N_jnPNXOwgH2-DJXKw
|
||||
# name stm21
|
||||
|
||||
def image_getter(thread_url):
|
||||
r = requests.get(thread_url) #, auth=('user', 'pass'))
|
||||
soup = bs(r.text,'html.parser')
|
||||
img_url = a.title[href].attr('href')
|
||||
folder = "tt1/"
|
||||
(head,tail) = os.path.split(img_url)
|
||||
|
||||
if os.path.exists( os.path.join(folder,tail) ):
|
||||
print(" + Image %i was already downloaded." % i)
|
||||
return
|
||||
|
||||
print(" getting %s" % img_url)
|
||||
r = requests.get(imgsrc,stream=True)
|
||||
if r.status_code == 200:
|
||||
with open(os.path.join(folder,tail),'wb') as f:
|
||||
r.raw.decode_content = True
|
||||
shutil.copyfileobj(r.raw, f)
|
||||
print(" + Done with image.")
|
||||
time.sleep(0.75)
|
||||
else:
|
||||
print(" - Failed with image.")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# hd2/peter_home_offload/Documents/scripts/chan/
|
||||
# all the folders
|
||||
# display images in a folder, mark them if [yes] save to database.
|
||||
# [clean] the folder, delete 'unmarked' ones
|
||||
#
|
||||
|
||||
def get_pic_by_id(g):
|
||||
#global con, cur
|
||||
con = sqlite3.connect(LIBBASE + 'ooo.db')
|
||||
con.row_factory = dict_factory
|
||||
cur = con.cursor()
|
||||
|
||||
cur.execute("SELECT * FROM pics p JOIN pictag pt ON pt.pic=p.id JOIN tags t ON t.id=pt.tag WHERE p.id=%s" % str(g))
|
||||
return json.dumps(cur.fetchone())
|
||||
|
||||
def get_all_pics(j=0):
|
||||
#global con, cur
|
||||
con = sqlite3.connect(LIBBASE + 'ooo.db')
|
||||
con.row_factory = dict_factory
|
||||
cur = con.cursor()
|
||||
|
||||
#cur.execute("SELECT * FROM pics p JOIN pictag pt ON pt.pic=p.id JOIN tags t ON t.id=pt.tag GROUP BY p.path")
|
||||
cur.execute("SELECT * FROM pics")
|
||||
if j: return json.dumps(cur.fetchall())
|
||||
return cur.fetchall()
|
||||
|
||||
def get_all_tags(which=1):
|
||||
#global con, cur
|
||||
con = sqlite3.connect(LIBBASE + 'ooo.db')
|
||||
con.row_factory = dict_factory
|
||||
cur = con.cursor()
|
||||
|
||||
cur.execute("SELECT * FROM 'tags' WHERE app=%s ORDER BY label" % str(which))
|
||||
return json.dumps(cur.fetchall())
|
||||
|
||||
|
||||
|
||||
def add_pic(path):
|
||||
#global con, cur
|
||||
con = sqlite3.connect(LIBBASE + 'ooo.db')
|
||||
con.row_factory = dict_factory
|
||||
cur = con.cursor()
|
||||
|
||||
cur.execute("INSERT INTO pics (path) VALUES('%s')" % str(path))
|
||||
con.commit()
|
||||
return json.dumps(cur.lastrowid)
|
||||
|
||||
|
||||
def add_pic_tag(tag_id,pic_id):
|
||||
con = sqlite3.connect(LIBBASE + 'ooo.db')
|
||||
cur = con.cursor()
|
||||
# TODO select and dont duplicate
|
||||
cur.execute("INSERT INTO pictag (pic,tag) VALUES('%s','%s')" % ( str(pic_id), str(tag_id) ) )
|
||||
con.commit()
|
||||
return json.dumps( {'status':'success','id':cur.lastrowid} )
|
||||
|
||||
def add_tag(tag):
|
||||
#global con, cur
|
||||
con = sqlite3.connect(LIBBASE + 'ooo.db')
|
||||
con.row_factory = dict_factory
|
||||
cur = con.cursor()
|
||||
cur.execute("INSERT INTO tags (label) VALUES('%s')" % tag )
|
||||
con.commit()
|
||||
return cur.lastrowid
|
||||
|
||||
|
||||
def add_pic_tag_str(pic_id,tag_str):
|
||||
#global con, cur
|
||||
con = sqlite3.connect(LIBBASE + 'ooo.db')
|
||||
con.row_factory = dict_factory
|
||||
cur = con.cursor()
|
||||
cur.execute("SELECT * FROM tags WHERE label='%s'" % str(tag_str))
|
||||
result = cur.fetchone()
|
||||
if result:
|
||||
tag_id = result['id']
|
||||
else:
|
||||
tag_id = add_tag(tag_str)
|
||||
return add_pic_tag(pic_id,tag_id)
|
||||
|
||||
|
||||
|
||||
|
||||
def start_tt():
|
||||
global q
|
||||
|
||||
url = 'https://www.reddit.com/r/TinyTits/top/?t=week'
|
||||
r = requests.get(url) #, auth=('user', 'pass'))
|
||||
open('temp.html','w').write(r.text)
|
||||
soup = bs(r.text,'html.parser')
|
||||
threads = []
|
||||
qqueue = []
|
||||
i = 0
|
||||
|
||||
|
||||
print(soup.get_text())
|
||||
|
||||
for img in soup.select('a.title'):
|
||||
|
||||
link = img.attrs('href')
|
||||
print(link)
|
||||
image_getter(link)
|
||||
i += 1
|
||||
|
||||
print("There are %i images to fetch." % len(qqueue))
|
||||
pool = ThreadPoolExecutor(max_workers=5)
|
||||
for q in qqueue:
|
||||
q["total"] = len(qqueue)
|
||||
future = pool.submit(image_getter, q)
|
||||
|
||||
|
||||
|
||||
|
||||
def plain_sort(index=''):
|
||||
root_len = len(DOCBASE)
|
||||
|
||||
output = "<link href='lib/pics.css' rel='stylesheet' />\n"
|
||||
output += tag('h2','Downloaded Files')
|
||||
print(' --starting dir listing')
|
||||
|
||||
folder_line = '<div class="pure-g"><div class="pure-u-%i-24"></div><div class="pure-u-%i-24"><b>%s</b></div></div>\n'
|
||||
file_line = '<div class="pure-g"><div class="pure-u-%i-24"></div><div class="pure-u-%i-24">%s</div></div>\n'
|
||||
|
||||
for (root,dirs,files) in os.walk( DOCBASE , topdown=True):
|
||||
dirs.sort()
|
||||
files.sort()
|
||||
#output += str(root)
|
||||
#output += str(dirs)
|
||||
#output += str(files)
|
||||
|
||||
print(' --%s' % root, end=", ")
|
||||
|
||||
myroot = root[root_len:]
|
||||
parts = myroot.split("\/")
|
||||
path_len = "" # " " * len(parts[:-1])
|
||||
path_last = parts[-1]
|
||||
(fi,di,si) = overview_folder(root) # num files, num dirs, size
|
||||
|
||||
if si < 1024:
|
||||
si = "%s b" % si
|
||||
elif si < 1024 * 1024:
|
||||
si = "%s K" % int( si / 1024.0 )
|
||||
elif si < (1024 * 1024 * 1024):
|
||||
si = "%s M" % int( si/(1024*1024.0) )
|
||||
else:
|
||||
si = "%s G" % int( si/(1024*1024*1024.0) )
|
||||
|
||||
default_cmd = most_common(files)
|
||||
if not default_cmd: default_cmd = "NO"
|
||||
|
||||
|
||||
|
||||
this_line = "%s\n\t%s \tDepth: %i \t Subdirs: %i \t # Files: %i \t # Total size: %s \n" % \
|
||||
( path_len + cleantext(path_last), default_cmd, depth(root), len(dirs), fi, si)
|
||||
|
||||
mydepth = depth(root) + 1
|
||||
if mydepth > 22: mydepth = 22
|
||||
|
||||
output += folder_line % (mydepth, 24-mydepth, this_line)
|
||||
|
||||
#moveit(files, root, default_cmd)
|
||||
try:
|
||||
#output += tag("p", "%i total files, in %i total folders, for %s" % (fi,di,si))
|
||||
for F in sorted(files, \
|
||||
key = lambda x: (os.path.splitext(x))[1]+(os.path.split(x))[1] ):
|
||||
output += file_line % (mydepth, 24-mydepth, cleantext(F))
|
||||
except Exception as e:
|
||||
print("**** Some sort of error with %s" % files, end=" ")
|
||||
print(e)
|
||||
|
||||
output += tag("p","All done with files.")
|
||||
return output
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
|
||||
from queue import Queue
|
||||
from bs4 import BeautifulSoup as bs
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import requests, os, json, shutil, time
|
||||
import requests
|
||||
import requests.auth
|
||||
#import praw
|
||||
|
||||
|
||||
|
||||
q = Queue()
|
||||
|
||||
|
||||
def start_tt2():
|
||||
client_auth = requests.auth.HTTPBasicAuth('DDsmF856ZAookA', 'Cles0wAwm9o4N_jnPNXOwgH2-DJXKw')
|
||||
post_data = {"grant_type": "password", "username": "ssttmm2323", "password": "ssttmm2323"}
|
||||
headers = {"User-Agent": "Totally Chrome"}
|
||||
response = requests.post("https://www.reddit.com/api/v1/access_token", auth=client_auth, data=post_data, headers=headers)
|
||||
print(response.json())
|
||||
|
||||
|
||||
headers = {"Authorization": "bearer fhTdafZI-0ClEzzYORfBSCR7x3M", "User-Agent": "Totally Chrome"}
|
||||
response = requests.get("https://oauth.reddit.com/api/v1/me", headers=headers)
|
||||
print(response.json())
|
||||
|
||||
def start_tt3():
|
||||
reddit = praw.Reddit( client_id="DDsmF856ZAookA",
|
||||
client_secret="Cles0wAwm9o4N_jnPNXOwgH2-DJXKw",
|
||||
password="ssttmm2323",
|
||||
user_agent="Totally Chrome",
|
||||
username="ssttmm2323",)
|
||||
|
||||
|
||||
# start_tt3()
|
||||
Loading…
Reference in New Issue