Add Metrics
parent
8701f54c5e
commit
5956dada75
|
@ -11,6 +11,7 @@ legacy/*
|
|||
static/dmca/*
|
||||
auth_data.json
|
||||
files.db
|
||||
stats.db
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
|
|
213
QuadFile/db.py
213
QuadFile/db.py
|
@ -1,25 +1,33 @@
|
|||
import sqlite3
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# TODO: (Hopefully) add support for DB types other than SQLite
|
||||
|
||||
def connect(target):
|
||||
return sqlite3.connect(target)
|
||||
|
||||
def add_file(filename, deletekey):
|
||||
def add_file(filename, deletekey, filesize):
|
||||
db = connect('files.db')
|
||||
db.execute('INSERT INTO files (file, time, accessed, deletekey) VALUES (?, ?, ?, ?)',
|
||||
[filename, time.time(), time.time(), deletekey])
|
||||
db.execute('INSERT INTO files (file, time, accessed, deletekey, filesize) VALUES (?, ?, ?, ?, ?)',
|
||||
[filename, time.time(), time.time(), deletekey, filesize])
|
||||
db.commit()
|
||||
db.close()
|
||||
|
||||
def update_file(filename):
|
||||
db = connect('files.db')
|
||||
db.execute('UPDATE files SET accessed = ? WHERE file = ?',
|
||||
db.execute("UPDATE files SET accessed = ?, views = views + 1 WHERE file = ?",
|
||||
[time.time(), filename])
|
||||
db.commit()
|
||||
db.close()
|
||||
|
||||
def add_deletiontime(deletiontime, filename):
|
||||
db = connect('files.db')
|
||||
db.execute("UPDATE files SET deletiontime = ? WHERE file = ?",
|
||||
[deletiontime, filename])
|
||||
db.commit()
|
||||
db.close()
|
||||
|
||||
def add_b2(filename, file_id):
|
||||
db = connect('files.db')
|
||||
db.execute('UPDATE files SET b2 = ? WHERE file = ?',
|
||||
|
@ -56,4 +64,199 @@ def check_value(column, value):
|
|||
if rv:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
return True
|
||||
|
||||
# Metrics for Prometheus/InfluxDB/Grafana
|
||||
|
||||
## QR Code Scanning
|
||||
def add_qrscan(ip):
|
||||
db = connect('stats.db')
|
||||
db.execute('INSERT INTO qrscan (time, ip) VALUES (?, ?)',
|
||||
[time.time(), ip])
|
||||
db.commit()
|
||||
db.close()
|
||||
|
||||
def total_qrscan():
|
||||
db = connect('stats.db')
|
||||
cur = db.execute('SELECT COUNT(*) FROM qrscan').fetchone()[0]
|
||||
db.close()
|
||||
return 'qrcode_total_scans ' + str(cur) + '\n\n'
|
||||
|
||||
## Files
|
||||
### Total Files Uploaded now
|
||||
def total_files_live():
|
||||
db = connect('files.db')
|
||||
cur = db.execute('SELECT COUNT(*) FROM files').fetchone()[0]
|
||||
db.close()
|
||||
return 'total_files_live ' + str(cur) + '\n'
|
||||
|
||||
def total_files():
|
||||
db = connect('stats.db')
|
||||
cur = db.execute("SELECT SUM(uploads) FROM total_filetype").fetchone()[0]
|
||||
db.close()
|
||||
# Just return 0 if empty (instead of "None")
|
||||
if not cur:
|
||||
return 'total_files ' + '0' + '\n\n'
|
||||
return 'total_files ' + str(cur) + '\n\n'
|
||||
|
||||
### File Size per filetype (to be added for total, then use live file count for total)
|
||||
#### Return a List of all filetypes
|
||||
def list_filetypes():
|
||||
db = connect('files.db')
|
||||
cur = db.execute('SELECT file FROM files')
|
||||
rv = cur.fetchall()
|
||||
# Convert Tuple to List (lowercase filetypes)
|
||||
filenames = [r[0].lower() for r in rv]
|
||||
# Fancy python to set the text to "None" if it has no ., and get everything > of the . if it does
|
||||
filenames = ['None' if '.' not in s else s.rsplit('.', 1)[1] for s in filenames]
|
||||
noduplicates = list(set(filenames))
|
||||
db.close()
|
||||
return noduplicates
|
||||
|
||||
#### Return live filesize for given filetype
|
||||
def list_sizes(filetype):
|
||||
db = connect('files.db')
|
||||
# Need to do two different queries, one for exts and one without
|
||||
if filetype == "None":
|
||||
cur = db.execute("SELECT SUM(filesize) FROM files WHERE file NOT LIKE ?", (f'%.%',))
|
||||
else:
|
||||
cur = db.execute("SELECT SUM(filesize) FROM files WHERE file LIKE ?", (f'%{filetype}%',))
|
||||
rv = cur.fetchone()
|
||||
db.close()
|
||||
return rv
|
||||
|
||||
#### Return a List of total filetypes
|
||||
def list_totalfiletypes():
|
||||
db = connect('stats.db')
|
||||
cur = db.execute('SELECT filetype FROM total_filetype')
|
||||
rv = cur.fetchall()
|
||||
filenames = [r[0].lower for r in rv]
|
||||
noduplicates = list(set(filenames))
|
||||
db.close()
|
||||
return noduplicates
|
||||
|
||||
#### Return total filesize for given filetype
|
||||
def list_totalsizes(filetype):
|
||||
db = connect('stats.db')
|
||||
cur = db.execute("SELECT total_size FROM total_filetype WHERE filetype LIKE ?", (f'%{filetype}%',))
|
||||
rv = cur.fetchall()
|
||||
db.close()
|
||||
return rv
|
||||
|
||||
#### Return number of live files for given filetype
|
||||
def live_typecount(filetype):
|
||||
db = connect('files.db')
|
||||
if filetype == "None":
|
||||
cur = db.execute("SELECT COUNT(file) FROM files WHERE file NOT LIKE ?", (f'%.%',))
|
||||
else:
|
||||
cur = db.execute("SELECT COUNT(file) FROM files WHERE file LIKE ?", (f'%{filetype}%',))
|
||||
rv = cur.fetchall()
|
||||
db.close()
|
||||
return rv
|
||||
|
||||
### Total Files uploaded by Filetype
|
||||
def addstats_file(filetype, filesize):
|
||||
db = connect('stats.db')
|
||||
try:
|
||||
db.execute("INSERT INTO total_filetype (filetype, uploads, total_size) VALUES (?, 1, ?)", [filetype,filesize])
|
||||
except sqlite3.IntegrityError:
|
||||
db.execute("UPDATE total_filetype SET uploads = uploads + 1, total_size = total_size + ? WHERE filetype = ?", [filesize, filetype,])
|
||||
db.commit()
|
||||
db.close()
|
||||
|
||||
def total_filetype():
|
||||
db = connect('stats.db')
|
||||
cur = db.execute('SELECT * FROM total_filetype')
|
||||
rv = cur.fetchall()
|
||||
db.close()
|
||||
return rv
|
||||
|
||||
### Return number of live files for given filetype
|
||||
def list_typecount(filetype):
|
||||
db = connect('files.db')
|
||||
cur = db.execute("SELECT COUNT(file) FROM files WHERE file LIKE ?", (f'%{filetype}%',))
|
||||
rv = cur.fetchall()
|
||||
db.close()
|
||||
return rv
|
||||
|
||||
### Total File Views
|
||||
|
||||
### Total File View
|
||||
def add_fileview(filetype):
|
||||
db = connect('stats.db')
|
||||
db.execute("UPDATE total_filetype SET views = views + 1 WHERE filetype = ?", [filetype,])
|
||||
db.commit()
|
||||
db.close()
|
||||
|
||||
### File Views per filetype
|
||||
def total_fileview(filetype):
|
||||
db = connect('stats.db')
|
||||
cur = db.execute("SELECT views FROM total_filetype WHERE filetype LIKE ?", (f'%{filetype}%',))
|
||||
rv = cur.fetchall()
|
||||
db.close()
|
||||
return rv
|
||||
|
||||
### File Deletion
|
||||
### File Deletion time max (file with the longest deletion time)
|
||||
|
||||
def longest_deletiontime():
|
||||
db = connect('files.db')
|
||||
cur = db.execute("SELECT deletiontime FROM files ORDER BY deletiontime DESC")
|
||||
rv = cur.fetchone()[0]
|
||||
# Just return 0 if the list is empty
|
||||
if not rv:
|
||||
return 0
|
||||
db.close()
|
||||
return rv
|
||||
|
||||
def average_deletiontime():
|
||||
db = connect('files.db')
|
||||
cur = db.execute("SELECT deletiontime FROM files WHERE deletiontime IS NOT NULL")
|
||||
# Fetch Tuple List of all times
|
||||
rv = cur.fetchall()
|
||||
db.close()
|
||||
# Convert Tuple to List
|
||||
dates = [r[0] for r in rv]
|
||||
# Just return 0 if the list is empty
|
||||
if not dates:
|
||||
return 0
|
||||
# Use List Comprehension to convert list to list with datetimes
|
||||
dates = [datetime.strptime(s, '%Y-%m-%d %H:%M:%S.%f') for s in dates]
|
||||
# Likely not the fastest, but take the difference from reference, and sum, avg the difference
|
||||
# then add the avg to the reference date https://stackoverflow.com/questions/19681703/average-time-for-datetime-list
|
||||
reference_date = datetime(1900, 1, 1)
|
||||
return reference_date + sum([date - reference_date for date in dates], timedelta()) / len(dates)
|
||||
|
||||
### File Deletion time per filetype (avg file deletion time on filetype)
|
||||
|
||||
### File Deletion Cleaner total
|
||||
|
||||
def add_cleanerdeletion():
|
||||
db = connect('stats.db')
|
||||
db.execute("UPDATE stats SET value = value + 1 WHERE variable = 'timedeletions'")
|
||||
db.commit()
|
||||
db.close()
|
||||
|
||||
### File Deletion URL Total
|
||||
|
||||
def add_urldeletion():
|
||||
db = connect('stats.db')
|
||||
db.execute("UPDATE stats SET value = value + 1 WHERE variable = 'urldeletions'")
|
||||
db.commit()
|
||||
db.close()
|
||||
|
||||
|
||||
## Views / Generic stats
|
||||
|
||||
def add_stat(stat):
|
||||
db = connect('stats.db')
|
||||
db.execute("UPDATE stats SET value = value + 1 WHERE variable = ?", [stat])
|
||||
db.commit()
|
||||
db.close()
|
||||
|
||||
def read_stat(stat):
|
||||
db = connect('stats.db')
|
||||
cur = db.execute("SELECT value FROM stats WHERE variable = ?", [stat])
|
||||
rv = cur.fetchone()[0]
|
||||
db.close()
|
||||
return rv
|
|
@ -83,7 +83,7 @@ Ensure the correct key is listed in known_hosts (ssh in with fowarding yourself,
|
|||
|
||||
Make sure to replace user with whatever user you want to run quadfile with (I reccomend a seperate user)
|
||||
|
||||
```
|
||||
```unit
|
||||
[Unit]
|
||||
Description=QuadFile + Gunicorn Server
|
||||
After=network.target
|
||||
|
@ -107,7 +107,7 @@ WantedBy = multi-user.target
|
|||
To have dynamic domain support, you must pass through a few things ontop of 'just proxy_pass it to gunicorn'.
|
||||
This example includes a permenant redirect to https.
|
||||
|
||||
```
|
||||
```nginx
|
||||
# Global redirect for http --> https
|
||||
server {
|
||||
listen 80;
|
||||
|
@ -148,5 +148,4 @@ server {
|
|||
autoindex on;
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
```
|
||||
|
|
|
@ -16,6 +16,9 @@ config["PORT"] = 8282
|
|||
## and again, seriously, leave this as theme unless you've copied and changed it.
|
||||
config["THEME_FOLDER"] = "quadfile-theme"
|
||||
|
||||
# Enable Stats/metrics endpoint
|
||||
config["METRICS"] = True
|
||||
|
||||
# Will output more logging data from QuadFile's logger
|
||||
config["DEBUG"] = False
|
||||
|
||||
|
|
|
@ -16,6 +16,9 @@ config["PORT"] = 8282
|
|||
## and again, seriously, leave this as theme unless you've copied and changed it.
|
||||
config["THEME_FOLDER"] = "theme"
|
||||
|
||||
# Enable Stats/metrics endpoint
|
||||
config["METRICS"] = True
|
||||
|
||||
# Will output more logging data from QuadFile's logger
|
||||
config["DEBUG"] = False
|
||||
|
||||
|
|
|
@ -2,4 +2,4 @@ flask==0.12.2
|
|||
python-magic==0.4.15
|
||||
gunicorn==20.0.4
|
||||
secrets==1.0.2
|
||||
|
||||
prometheus-flask-exporter
|
||||
|
|
223
run.py
223
run.py
|
@ -1,27 +1,30 @@
|
|||
#!/usr/bin/env python3
|
||||
from flask import Flask, request, redirect, url_for, send_from_directory, abort, render_template, make_response
|
||||
from werkzeug.utils import secure_filename
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
from threading import Thread, Timer
|
||||
import logging
|
||||
import os, datetime, time, sys
|
||||
import random
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
# import magic
|
||||
import secrets
|
||||
import sys
|
||||
import time
|
||||
from random import randint
|
||||
from threading import Thread, Timer
|
||||
|
||||
from flask import (Flask, abort, make_response, redirect, render_template,
|
||||
request, send_from_directory, url_for)
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
from werkzeug.utils import secure_filename
|
||||
|
||||
# Import our configuration
|
||||
from conf import config
|
||||
|
||||
# Import QuadFile stuff
|
||||
from QuadFile import db
|
||||
from QuadFile import application, db
|
||||
from QuadFile.output import print_log, time_to_string
|
||||
from QuadFile import application
|
||||
|
||||
|
||||
app = Flask(__name__, template_folder=config['THEME_FOLDER'] + "/templates", static_folder=config['THEME_FOLDER'] + "/static")
|
||||
app.config['EXPLAIN_TEMPLATE_LOADING'] = True
|
||||
app.config['EXPLAIN_TEMPLATE_LOADING'] = False
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app)
|
||||
|
||||
# TODO: Try to turn these into functions or something I dunno
|
||||
|
@ -38,6 +41,15 @@ if not os.path.exists('files.db'):
|
|||
quit()
|
||||
else:
|
||||
print_log('Notice', 'Database created')
|
||||
if config["METRICS"]:
|
||||
if not os.path.exists('stats.db'):
|
||||
print_log('Warning', 'Stats Database not found, attempting to create')
|
||||
os.system('sqlite3 stats.db < statsschema.sql')
|
||||
if not os.path.exists('stats.db'):
|
||||
print_log('Warning', 'Could not create stats database. Is sqlite3 available?')
|
||||
quit()
|
||||
else:
|
||||
print_log('Notice', 'Database created')
|
||||
if config["EXTENDED_DEBUG"] == False:
|
||||
log = logging.getLogger('werkzeug')
|
||||
log.setLevel(logging.ERROR)
|
||||
|
@ -55,6 +67,8 @@ def cleaner_thread():
|
|||
|
||||
def delete_file(file):
|
||||
print_log('Thread', 'Removing old file "' + file + '"')
|
||||
if config["METRICS"]:
|
||||
db.add_stat("timedeletions")
|
||||
try:
|
||||
os.remove(os.path.join(config["UPLOAD_FOLDER"], file))
|
||||
except Exception:
|
||||
|
@ -69,12 +83,12 @@ def delete_old():
|
|||
maxd = config["MAXDAYS"]
|
||||
|
||||
files = [f for f in os.listdir(config['UPLOAD_FOLDER'])]
|
||||
# at some point I should make
|
||||
# TODO: Calculate deletion times at upload, and add to DB (for better stats, and easier handling) #10
|
||||
if config["USE_0x0_DELETION"]:
|
||||
for f in files:
|
||||
stat = os.stat(os.path.join(config['UPLOAD_FOLDER'], f))
|
||||
systime = time.time()
|
||||
age = datetime.timedelta(seconds = systime - stat.st_mtime).days
|
||||
age = timedelta(seconds = systime - stat.st_mtime).days
|
||||
maxage = mind + (-maxd + mind) * (stat.st_size / maxs - 1) ** 3
|
||||
if age >= maxage:
|
||||
delete_file(f)
|
||||
|
@ -101,6 +115,87 @@ def allowed_file(filename):
|
|||
else:
|
||||
return '.' in filename and filename.rsplit('.', 1)[1] in config["ALLOWED_EXTENSIONS"]
|
||||
|
||||
def generateMetrics():
|
||||
# Total QR Scans
|
||||
metrics = "# QR Code Scanning\n"
|
||||
metrics = metrics + db.total_qrscan()
|
||||
|
||||
# File Stats
|
||||
metrics = metrics + "# File Stats\n"
|
||||
|
||||
# Total Living Files
|
||||
metrics = metrics + db.total_files_live()
|
||||
# Total Live Files
|
||||
metrics = metrics + str(db.total_files())
|
||||
|
||||
# On Disk File Sizes by Type
|
||||
filenames = db.list_filetypes()
|
||||
for extension in filenames:
|
||||
for size in db.list_sizes(extension):
|
||||
if not size: # Return 0 instead of NoneType
|
||||
filesize = 0
|
||||
metrics = metrics + "filesize_live_" + extension + " " + str(filesize) + "\n"
|
||||
else:
|
||||
metrics = metrics + "filesize_live_" + extension + " " + str(size) + "\n"
|
||||
metrics = metrics + "\n"
|
||||
|
||||
# Total File Sizes by Type
|
||||
filenames = db.list_totalfiletypes()
|
||||
for extension in filenames:
|
||||
for size in db.list_totalsizes(extension):
|
||||
metrics = metrics + "filesize_total_" + extension + " " + str(size[0]) + "\n"
|
||||
metrics = metrics + "\n"
|
||||
|
||||
# On Disk Files by Type
|
||||
filenames = db.list_filetypes()
|
||||
for extension in filenames:
|
||||
for size in db.live_typecount(extension):
|
||||
metrics = metrics + "filetype_live_" + extension + " " + str(size[0]) + "\n"
|
||||
metrics = metrics + "\n"
|
||||
|
||||
# Total Files by Type
|
||||
total_filetypes = db.total_filetype()
|
||||
for filetypes in total_filetypes:
|
||||
metrics = metrics + "filetype_total_" + filetypes[0] + " " + str(filetypes[1]) + "\n"
|
||||
metrics = metrics + "\n"
|
||||
|
||||
# Total Views per Type
|
||||
filenames = db.list_totalfiletypes()
|
||||
for extension in filenames:
|
||||
for size in db.total_fileview(extension):
|
||||
metrics = metrics + "views_total_" + extension + " " + str(size[0]) + "\n"
|
||||
metrics = metrics + "\n"
|
||||
|
||||
# File Deletion
|
||||
metrics = metrics + "# Longest File Deletion Expiry\n"
|
||||
metrics = metrics + "total_deletions_url " + str(db.read_stat('urldeletions')) + "\n"
|
||||
metrics = metrics + "total_deletions_time " + str(db.read_stat('timedeletions')) + "\n"
|
||||
metrics = metrics + "longest_expiry " + str(db.longest_deletiontime()) + "\n"
|
||||
|
||||
metrics = metrics + "# Average File Deletion Expiry\n"
|
||||
metrics = metrics + "average_expiry " + str(db.average_deletiontime()) + "\n"
|
||||
metrics = metrics + "\n"
|
||||
|
||||
# Page Errors
|
||||
metrics = metrics + "# Total Page Errors\n"
|
||||
metrics = metrics + "total_403 " + str(db.read_stat(403)) + "\n"
|
||||
metrics = metrics + "total_404 " + str(db.read_stat(404)) + "\n"
|
||||
metrics = metrics + "total_413 " + str(db.read_stat(413)) + "\n"
|
||||
metrics = metrics + "total_418 " + str(db.read_stat(418)) + "\n"
|
||||
metrics = metrics + "total_500 " + str(db.read_stat(500)) + "\n"
|
||||
metrics = metrics + "total_invalid_deletion_key " + str(db.read_stat('invaliddeletionkey')) + "\n\n"
|
||||
|
||||
# Page Views
|
||||
metrics = metrics + "# Total Page Views\n"
|
||||
metrics = metrics + "views_about " + str(db.read_stat('aboutviews')) + "\n"
|
||||
metrics = metrics + "views_faq " + str(db.read_stat('faqviews')) + "\n"
|
||||
metrics = metrics + "views_services " + str(db.read_stat('servicesviews')) + "\n"
|
||||
metrics = metrics + "views_welcome " + str(db.read_stat('welcomeviews')) + "\n"
|
||||
metrics = metrics + "views_dmca " + str(db.read_stat('dmcaviews')) + "\n"
|
||||
metrics = metrics + "views_czb " + str(db.read_stat('czb')) + "\n\n"
|
||||
|
||||
return metrics
|
||||
|
||||
@app.route('/', methods=['GET', 'POST'])
|
||||
def upload_file():
|
||||
if request.method == 'POST':
|
||||
|
@ -113,13 +208,31 @@ def upload_file():
|
|||
# Only continue if a file that's allowed gets submitted.
|
||||
if file and allowed_file(file.filename):
|
||||
filename = secure_filename(file.filename)
|
||||
if filename.find(".")!=-1: #check if filename has a .(to check if it should split ext)
|
||||
if '.' in filename: #check if filename has a .(to check if it should split ext)
|
||||
filename = secrets.token_urlsafe(3) + '.' + filename.rsplit('.',1)[1]
|
||||
else:
|
||||
filename = secrets.token_urlsafe(3)
|
||||
|
||||
# Do Metrics Stuffs
|
||||
if config["METRICS"]:
|
||||
# Calculate File Size
|
||||
file = request.files['file']
|
||||
file.seek(0, 2)
|
||||
filesize = file.tell()
|
||||
file.seek(0, 0) # seek back to start of file so it actually saves
|
||||
|
||||
# Strip nasty filetypes for sql injection, then add it for totals
|
||||
|
||||
if '.' not in filename:
|
||||
db.addstats_file("None", filesize)
|
||||
else:
|
||||
filetype = filename.rsplit('.',1)[1]
|
||||
db.addstats_file(filetype, filesize)
|
||||
else:
|
||||
filesize = 0
|
||||
|
||||
deletekey = secrets.token_urlsafe(10)
|
||||
thread1 = Thread(target = db.add_file, args = (filename, deletekey))
|
||||
thread1 = Thread(target = db.add_file, args = (filename, deletekey, filesize))
|
||||
thread1.start()
|
||||
print_log('Thread', 'Adding file to DB', print_debug)
|
||||
file.save(os.path.join(config['UPLOAD_FOLDER'], filename))
|
||||
|
@ -129,8 +242,29 @@ def upload_file():
|
|||
data["url"] = request.host_url + filename
|
||||
if config["GEN_DELETEKEY"]:
|
||||
data["deletionurl"] = request.host_url + "delete/" + deletekey
|
||||
print_log('Main', 'New file processed "' + filename + '"')
|
||||
|
||||
# Add deletion time to the database - not used yet but once the main server has
|
||||
# all the files in db with it, then I can use it to delete files properly.
|
||||
if config["USE_0x0_DELETION"]:
|
||||
maxs = config["MAX_FILESIZE"]
|
||||
mind = config["MINDAYS"]
|
||||
maxd = config["MAXDAYS"]
|
||||
|
||||
# Get filesize
|
||||
stat = os.stat(os.path.join(config['UPLOAD_FOLDER'], filename))
|
||||
# Calculate how long the file should be alive for
|
||||
lifetime = mind + (-maxd + mind) * (stat.st_size / maxs - 1) ** 3
|
||||
# Add max age to current time to get the proper time it should delete
|
||||
deletiontime = datetime.now() + timedelta(days=lifetime)
|
||||
db.add_deletiontime(deletiontime, filename)
|
||||
else:
|
||||
deletiontime = time.time() + config["TIME"]
|
||||
deletiontime = datetime.fromtimestamp(deletiontime)
|
||||
print(deletiontime)
|
||||
db.add_deletiontime(deletiontime, filename)
|
||||
|
||||
print_log('Main', 'New file processed "' + filename + '"')
|
||||
|
||||
try:
|
||||
if request.form["source"] == "web":
|
||||
print_log('Web', 'Returned link page for "' + filename + '"', print_debug)
|
||||
|
@ -150,26 +284,40 @@ def upload_file():
|
|||
# Def all the static pages
|
||||
@app.route('/about')
|
||||
def about():
|
||||
if config["METRICS"]:
|
||||
db.add_stat("aboutviews")
|
||||
return render_template('about.html', page=config["SITE_DATA"])
|
||||
@app.route('/faq')
|
||||
def faq():
|
||||
if config["METRICS"]:
|
||||
db.add_stat("faqviews")
|
||||
return render_template('faq.html', page=config["SITE_DATA"])
|
||||
@app.route('/services')
|
||||
def services():
|
||||
if config["METRICS"]:
|
||||
db.add_stat("servicesviews")
|
||||
return render_template('services.html', page=config["SITE_DATA"])
|
||||
@app.route('/welcome')
|
||||
def welcome():
|
||||
if config["METRICS"]:
|
||||
db.add_stat("welcomeviews")
|
||||
video = random.choice(os.listdir(config['THEME_FOLDER'] + "/static/welcome/"))
|
||||
return render_template('welcome.html', page=config["SITE_DATA"], video=video)
|
||||
@app.route('/dmca')
|
||||
def dmca():
|
||||
if config["METRICS"]:
|
||||
db.add_stat("dmcaviews")
|
||||
video = random.choice(os.listdir(config['THEME_FOLDER'] + "/static/dmca/"))
|
||||
return render_template('dmca.html', page=config["SITE_DATA"], video=video)
|
||||
@app.route('/czb')
|
||||
def czb():
|
||||
if config["METRICS"]:
|
||||
db.add_stat("czb")
|
||||
return render_template('czb.html', page=config["SITE_DATA"])
|
||||
@app.route('/qr')
|
||||
def qr():
|
||||
if config["METRICS"]:
|
||||
db.add_qrscan(request.environ.get('HTTP_X_FORWARDED_FOR', request.remote_addr))
|
||||
video = random.choice(os.listdir(config['THEME_FOLDER'] + "/static/welcome/"))
|
||||
return render_template('qr.html', page=config["SITE_DATA"], video=video)
|
||||
|
||||
|
@ -188,22 +336,30 @@ def robotsTxt():
|
|||
# Custom 404
|
||||
@app.errorhandler(404)
|
||||
def page_not_found(e):
|
||||
return error_page(error="We couldn't find that. Are you sure you know what you're looking for?", code=404), 404
|
||||
db.add_stat("404")
|
||||
return error_page(error="We couldn't find that. Are you sure you know what you're looking for?", code=404), 404
|
||||
@app.errorhandler(500)
|
||||
def internal_error(e):
|
||||
return error_page(error="Unknown error, try your upload again, or contact the admin.", code=500), 500
|
||||
db.add_stat("500")
|
||||
return error_page(error="Unknown error, try your upload again, or contact the admin.", code=500), 500
|
||||
@app.errorhandler(403)
|
||||
def no_permission(e):
|
||||
return error_page(error="Permission denied, no snooping.", code=403), 403
|
||||
db.add_stat("403")
|
||||
return error_page(error="Permission denied, no snooping.", code=403), 403
|
||||
|
||||
|
||||
@app.route('/<filename>', methods=['GET'])
|
||||
def get_file(filename):
|
||||
print_log('Web', 'Hit "' + filename + '"')
|
||||
try:
|
||||
db.update_file(filename)
|
||||
except Exception:
|
||||
print_log('Warning', 'Unable to update access time. Is the file in the database?')
|
||||
# try:
|
||||
db.update_file(filename)
|
||||
if config["METRICS"]:
|
||||
if '.' not in filename:
|
||||
db.add_fileview("None")
|
||||
else:
|
||||
db.add_fileview(filename.rsplit('.', 1)[1])
|
||||
# except Exception:
|
||||
# print_log('Warning', 'Unable to update access time (and view) Is the file in the database?')
|
||||
if config["X-ACCEL-REDIRECT"]:
|
||||
r = make_response()
|
||||
r.headers['Content-Type'] = ''
|
||||
|
@ -226,9 +382,13 @@ def delete_file_key_api():
|
|||
df = db.get_file_from_key(deletekey)
|
||||
if not df:
|
||||
print_log('Web', 'Someone used an invalid deletion key', print_debug)
|
||||
if config["METRICS"]:
|
||||
db.add_stat("invaliddeletionkey")
|
||||
return "Error: Invalid deletion key!\n"
|
||||
for file in df:
|
||||
print_log('Thread', 'Deleting ' + file["file"] + ' with deletion key')
|
||||
if config["METRICS"]:
|
||||
db.add_stat("urldeletions")
|
||||
try:
|
||||
os.remove(os.path.join(config["UPLOAD_FOLDER"], file["file"]))
|
||||
db.delete_entry(file["file"])
|
||||
|
@ -245,9 +405,13 @@ def delete_file_key(deletekey):
|
|||
df = db.get_file_from_key(deletekey)
|
||||
if not df:
|
||||
print_log('Web', 'Someone used an invalid deletion key')
|
||||
if config["METRICS"]:
|
||||
db.add_stat("invaliddeletionkey")
|
||||
return error_page(error="Invalid deletion key, double check your key!", code=403), 403
|
||||
for file in df:
|
||||
print_log('Thread', 'Deleting ' + file["file"] + ' with deletion key')
|
||||
if config["METRICS"]:
|
||||
db.add_stat("urldeletions")
|
||||
try:
|
||||
os.remove(os.path.join(config["UPLOAD_FOLDER"], file["file"]))
|
||||
db.delete_entry(file["file"])
|
||||
|
@ -275,7 +439,16 @@ def nginx_error(error):
|
|||
else:
|
||||
return error_page(error="We literally have no idea what just happened, try again or contact the admin", code="Unknown")
|
||||
|
||||
|
||||
# Metrics for Grafana
|
||||
## This should be denied on the proxy, or disabled in config
|
||||
## Try not to calculate anything on this, use the functions to update a table in
|
||||
@app.route('/metrics/')
|
||||
def metrics():
|
||||
if config["METRICS"]:
|
||||
response = make_response(generateMetrics(), 200)
|
||||
response.mimetype = "text/plain"
|
||||
return response
|
||||
return error_page(error="Metrics are disabled on this server :(", code=404), 404
|
||||
if config["DELETE_FILES"]:
|
||||
cleaner_thread()
|
||||
|
||||
|
|
|
@ -5,5 +5,8 @@ create table files (
|
|||
b2 text,
|
||||
time int,
|
||||
accessed int,
|
||||
deletekey text
|
||||
deletiontime text,
|
||||
deletekey text,
|
||||
filesize int,
|
||||
views int DEFAULT 0
|
||||
);
|
|
@ -0,0 +1,37 @@
|
|||
-- noinspection SqlNoDataSourceInspectionForFile
|
||||
drop table if exists qrscan;
|
||||
create table qrscan (
|
||||
time int,
|
||||
ip text
|
||||
);
|
||||
|
||||
drop table if exists stats;
|
||||
create table stats (
|
||||
variable text UNIQUE primary key,
|
||||
value int
|
||||
);
|
||||
|
||||
drop table if exists total_filetype;
|
||||
create table total_filetype (
|
||||
filetype text UNIQUE primary key,
|
||||
uploads int DEFAULT 0,
|
||||
views int DEFAULT 0,
|
||||
total_size int
|
||||
);
|
||||
|
||||
-- Prepopulate with needed stats
|
||||
INSERT INTO stats VALUES ('urldeletions', 0);
|
||||
INSERT INTO stats VALUES ('timedeletions', 0);
|
||||
INSERT INTO stats VALUES ('aboutviews', 0);
|
||||
INSERT INTO stats VALUES ('faqviews', 0);
|
||||
INSERT INTO stats VALUES ('servicesviews', 0);
|
||||
INSERT INTO stats VALUES ('welcomeviews', 0);
|
||||
INSERT INTO stats VALUES ('dmcaviews', 0);
|
||||
INSERT INTO stats VALUES ('czb', 0);
|
||||
INSERT INTO stats VALUES ('invaliddeletionkey', 0);
|
||||
|
||||
INSERT INTO stats VALUES ('403', 0);
|
||||
INSERT INTO stats VALUES ('404', 0);
|
||||
INSERT INTO stats VALUES ('413', 0);
|
||||
INSERT INTO stats VALUES ('418', 0);
|
||||
INSERT INTO stats VALUES ('500', 0);
|
Loading…
Reference in New Issue