2018-09-21 02:43:04 -04:00
|
|
|
import os
|
2018-09-21 02:58:27 -04:00
|
|
|
import sys
|
2018-09-21 02:43:04 -04:00
|
|
|
import tempfile
|
|
|
|
import zipfile
|
2018-09-21 02:58:27 -04:00
|
|
|
import mimetypes
|
2018-09-21 15:29:23 -04:00
|
|
|
import gzip
|
2018-09-21 02:58:27 -04:00
|
|
|
from flask import Response, request, render_template, make_response
|
|
|
|
|
|
|
|
from .. import strings
|
|
|
|
|
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
class ShareModeWeb(object):
|
2018-09-21 02:58:27 -04:00
|
|
|
"""
|
2018-09-21 14:14:32 -04:00
|
|
|
All of the web logic for share mode
|
2018-09-21 02:58:27 -04:00
|
|
|
"""
|
2018-09-21 14:41:49 -04:00
|
|
|
def __init__(self, common, web):
|
|
|
|
self.common = common
|
|
|
|
self.common.log('ShareModeWeb', '__init__')
|
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
self.web = web
|
2018-09-21 14:36:19 -04:00
|
|
|
|
|
|
|
# Information about the file to be shared
|
|
|
|
self.file_info = []
|
|
|
|
self.is_zipped = False
|
|
|
|
self.download_filename = None
|
|
|
|
self.download_filesize = None
|
2018-09-21 15:29:23 -04:00
|
|
|
self.gzip_filename = None
|
2018-09-21 19:22:37 -04:00
|
|
|
self.gzip_filesize = None
|
2018-09-21 14:36:19 -04:00
|
|
|
self.zip_writer = None
|
|
|
|
|
|
|
|
self.download_count = 0
|
|
|
|
|
|
|
|
# If "Stop After First Download" is checked (stay_open == False), only allow
|
|
|
|
# one download at a time.
|
|
|
|
self.download_in_progress = False
|
|
|
|
|
|
|
|
# If the client closes the OnionShare window while a download is in progress,
|
|
|
|
# it should immediately stop serving the file. The client_cancel global is
|
|
|
|
# used to tell the download function that the client is canceling the download.
|
|
|
|
self.client_cancel = False
|
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
self.define_routes()
|
|
|
|
|
|
|
|
def define_routes(self):
|
2018-09-21 02:58:27 -04:00
|
|
|
"""
|
2018-09-21 14:14:32 -04:00
|
|
|
The web app routes for sharing files
|
2018-09-21 02:58:27 -04:00
|
|
|
"""
|
2018-09-21 14:14:32 -04:00
|
|
|
@self.web.app.route("/<slug_candidate>")
|
|
|
|
def index(slug_candidate):
|
|
|
|
self.web.check_slug_candidate(slug_candidate)
|
|
|
|
return index_logic()
|
|
|
|
|
|
|
|
@self.web.app.route("/")
|
|
|
|
def index_public():
|
2018-09-21 14:41:49 -04:00
|
|
|
if not self.common.settings.get('public_mode'):
|
2018-09-21 14:14:32 -04:00
|
|
|
return self.web.error404()
|
|
|
|
return index_logic()
|
|
|
|
|
|
|
|
def index_logic(slug_candidate=''):
|
|
|
|
"""
|
|
|
|
Render the template for the onionshare landing page.
|
|
|
|
"""
|
|
|
|
self.web.add_request(self.web.REQUEST_LOAD, request.path)
|
|
|
|
|
|
|
|
# Deny new downloads if "Stop After First Download" is checked and there is
|
|
|
|
# currently a download
|
2018-09-21 14:36:19 -04:00
|
|
|
deny_download = not self.web.stay_open and self.download_in_progress
|
2018-09-21 14:14:32 -04:00
|
|
|
if deny_download:
|
|
|
|
r = make_response(render_template('denied.html'))
|
|
|
|
return self.web.add_security_headers(r)
|
|
|
|
|
2018-09-21 02:58:27 -04:00
|
|
|
# If download is allowed to continue, serve download page
|
2018-09-21 19:22:37 -04:00
|
|
|
if self.should_use_gzip():
|
|
|
|
filesize = self.gzip_filesize
|
|
|
|
else:
|
|
|
|
filesize = self.download_filesize
|
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
if self.web.slug:
|
|
|
|
r = make_response(render_template(
|
|
|
|
'send.html',
|
|
|
|
slug=self.web.slug,
|
2018-09-21 14:36:19 -04:00
|
|
|
file_info=self.file_info,
|
|
|
|
filename=os.path.basename(self.download_filename),
|
2018-09-21 19:22:37 -04:00
|
|
|
filesize=filesize,
|
2018-09-21 14:41:49 -04:00
|
|
|
filesize_human=self.common.human_readable_filesize(self.download_filesize),
|
2018-09-21 14:36:19 -04:00
|
|
|
is_zipped=self.is_zipped))
|
2018-09-21 14:14:32 -04:00
|
|
|
else:
|
|
|
|
# If download is allowed to continue, serve download page
|
|
|
|
r = make_response(render_template(
|
|
|
|
'send.html',
|
2018-09-21 14:36:19 -04:00
|
|
|
file_info=self.file_info,
|
|
|
|
filename=os.path.basename(self.download_filename),
|
2018-09-21 19:22:37 -04:00
|
|
|
filesize=filesize,
|
2018-09-21 14:41:49 -04:00
|
|
|
filesize_human=self.common.human_readable_filesize(self.download_filesize),
|
2018-09-21 14:36:19 -04:00
|
|
|
is_zipped=self.is_zipped))
|
2018-09-21 14:14:32 -04:00
|
|
|
return self.web.add_security_headers(r)
|
|
|
|
|
|
|
|
@self.web.app.route("/<slug_candidate>/download")
|
|
|
|
def download(slug_candidate):
|
|
|
|
self.web.check_slug_candidate(slug_candidate)
|
|
|
|
return download_logic()
|
|
|
|
|
|
|
|
@self.web.app.route("/download")
|
|
|
|
def download_public():
|
2018-09-21 14:41:49 -04:00
|
|
|
if not self.common.settings.get('public_mode'):
|
2018-09-21 14:14:32 -04:00
|
|
|
return self.web.error404()
|
|
|
|
return download_logic()
|
|
|
|
|
|
|
|
def download_logic(slug_candidate=''):
|
|
|
|
"""
|
|
|
|
Download the zip file.
|
|
|
|
"""
|
|
|
|
# Deny new downloads if "Stop After First Download" is checked and there is
|
|
|
|
# currently a download
|
2018-09-21 14:36:19 -04:00
|
|
|
deny_download = not self.web.stay_open and self.download_in_progress
|
2018-09-21 14:14:32 -04:00
|
|
|
if deny_download:
|
|
|
|
r = make_response(render_template('denied.html'))
|
|
|
|
return self.web.add_security_headers(r)
|
|
|
|
|
|
|
|
# Each download has a unique id
|
2018-09-21 14:36:19 -04:00
|
|
|
download_id = self.download_count
|
|
|
|
self.download_count += 1
|
2018-09-21 14:14:32 -04:00
|
|
|
|
|
|
|
# Prepare some variables to use inside generate() function below
|
|
|
|
# which is outside of the request context
|
|
|
|
shutdown_func = request.environ.get('werkzeug.server.shutdown')
|
|
|
|
path = request.path
|
|
|
|
|
2018-09-21 15:29:23 -04:00
|
|
|
# If this is a zipped file, then serve as-is. If it's not zipped, then,
|
|
|
|
# if the http client supports gzip compression, gzip the file first
|
|
|
|
# and serve that
|
2018-09-21 19:22:37 -04:00
|
|
|
use_gzip = self.should_use_gzip()
|
2018-09-21 15:29:23 -04:00
|
|
|
if use_gzip:
|
|
|
|
file_to_download = self.gzip_filename
|
2018-09-21 20:08:11 -04:00
|
|
|
filesize = self.gzip_filesize
|
2018-09-21 15:29:23 -04:00
|
|
|
else:
|
|
|
|
file_to_download = self.download_filename
|
2018-09-21 20:08:11 -04:00
|
|
|
filesize = self.download_filesize
|
2018-09-21 15:29:23 -04:00
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
# Tell GUI the download started
|
|
|
|
self.web.add_request(self.web.REQUEST_STARTED, path, {
|
2018-09-21 19:22:37 -04:00
|
|
|
'id': download_id,
|
|
|
|
'use_gzip': use_gzip
|
2018-09-21 15:29:23 -04:00
|
|
|
})
|
2018-09-21 14:14:32 -04:00
|
|
|
|
2018-09-21 14:36:19 -04:00
|
|
|
basename = os.path.basename(self.download_filename)
|
2018-09-21 14:14:32 -04:00
|
|
|
|
|
|
|
def generate():
|
|
|
|
# The user hasn't canceled the download
|
2018-09-21 14:36:19 -04:00
|
|
|
self.client_cancel = False
|
2018-09-21 14:14:32 -04:00
|
|
|
|
|
|
|
# Starting a new download
|
|
|
|
if not self.web.stay_open:
|
2018-09-21 14:36:19 -04:00
|
|
|
self.download_in_progress = True
|
2018-09-21 14:14:32 -04:00
|
|
|
|
|
|
|
chunk_size = 102400 # 100kb
|
|
|
|
|
2018-09-21 15:29:23 -04:00
|
|
|
fp = open(file_to_download, 'rb')
|
2018-09-21 14:14:32 -04:00
|
|
|
self.web.done = False
|
|
|
|
canceled = False
|
|
|
|
while not self.web.done:
|
|
|
|
# The user has canceled the download, so stop serving the file
|
2018-09-21 14:36:19 -04:00
|
|
|
if self.client_cancel:
|
2018-09-21 14:14:32 -04:00
|
|
|
self.web.add_request(self.web.REQUEST_CANCELED, path, {
|
|
|
|
'id': download_id
|
|
|
|
})
|
|
|
|
break
|
|
|
|
|
|
|
|
chunk = fp.read(chunk_size)
|
|
|
|
if chunk == b'':
|
|
|
|
self.web.done = True
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
yield chunk
|
|
|
|
|
|
|
|
# tell GUI the progress
|
|
|
|
downloaded_bytes = fp.tell()
|
2018-09-21 20:08:11 -04:00
|
|
|
percent = (1.0 * downloaded_bytes / filesize) * 100
|
2018-09-21 14:14:32 -04:00
|
|
|
|
|
|
|
# only output to stdout if running onionshare in CLI mode, or if using Linux (#203, #304)
|
2018-09-21 14:41:49 -04:00
|
|
|
if not self.web.is_gui or self.common.platform == 'Linux' or self.common.platform == 'BSD':
|
2018-09-21 14:14:32 -04:00
|
|
|
sys.stdout.write(
|
2018-09-21 14:41:49 -04:00
|
|
|
"\r{0:s}, {1:.2f}% ".format(self.common.human_readable_filesize(downloaded_bytes), percent))
|
2018-09-21 14:14:32 -04:00
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
self.web.add_request(self.web.REQUEST_PROGRESS, path, {
|
|
|
|
'id': download_id,
|
|
|
|
'bytes': downloaded_bytes
|
|
|
|
})
|
|
|
|
self.web.done = False
|
|
|
|
except:
|
|
|
|
# looks like the download was canceled
|
|
|
|
self.web.done = True
|
|
|
|
canceled = True
|
|
|
|
|
|
|
|
# tell the GUI the download has canceled
|
|
|
|
self.web.add_request(self.web.REQUEST_CANCELED, path, {
|
|
|
|
'id': download_id
|
|
|
|
})
|
2018-09-21 02:58:27 -04:00
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
fp.close()
|
2018-09-21 02:58:27 -04:00
|
|
|
|
2018-09-21 14:41:49 -04:00
|
|
|
if self.common.platform != 'Darwin':
|
2018-09-21 14:14:32 -04:00
|
|
|
sys.stdout.write("\n")
|
2018-09-21 02:58:27 -04:00
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
# Download is finished
|
|
|
|
if not self.web.stay_open:
|
2018-09-21 14:36:19 -04:00
|
|
|
self.download_in_progress = False
|
2018-09-21 02:58:27 -04:00
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
# Close the server, if necessary
|
|
|
|
if not self.web.stay_open and not canceled:
|
|
|
|
print(strings._("closing_automatically"))
|
|
|
|
self.web.running = False
|
|
|
|
try:
|
|
|
|
if shutdown_func is None:
|
|
|
|
raise RuntimeError('Not running with the Werkzeug Server')
|
|
|
|
shutdown_func()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
r = Response(generate())
|
2018-09-21 15:29:23 -04:00
|
|
|
if use_gzip:
|
|
|
|
r.headers.set('Content-Encoding', 'gzip')
|
2018-09-21 20:08:11 -04:00
|
|
|
r.headers.set('Content-Length', filesize)
|
2018-09-21 14:14:32 -04:00
|
|
|
r.headers.set('Content-Disposition', 'attachment', filename=basename)
|
|
|
|
r = self.web.add_security_headers(r)
|
|
|
|
# guess content type
|
|
|
|
(content_type, _) = mimetypes.guess_type(basename, strict=False)
|
|
|
|
if content_type is not None:
|
|
|
|
r.headers.set('Content-Type', content_type)
|
|
|
|
return r
|
|
|
|
|
|
|
|
def set_file_info(self, filenames, processed_size_callback=None):
|
|
|
|
"""
|
|
|
|
Using the list of filenames being shared, fill in details that the web
|
|
|
|
page will need to display. This includes zipping up the file in order to
|
|
|
|
get the zip file's name and size.
|
|
|
|
"""
|
2018-09-21 14:41:49 -04:00
|
|
|
self.common.log("ShareModeWeb", "set_file_info")
|
2018-09-21 14:14:32 -04:00
|
|
|
self.web.cancel_compression = False
|
|
|
|
|
2018-09-21 15:29:23 -04:00
|
|
|
self.cleanup_filenames = []
|
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
# build file info list
|
2018-09-21 14:36:19 -04:00
|
|
|
self.file_info = {'files': [], 'dirs': []}
|
2018-09-21 14:14:32 -04:00
|
|
|
for filename in filenames:
|
|
|
|
info = {
|
|
|
|
'filename': filename,
|
|
|
|
'basename': os.path.basename(filename.rstrip('/'))
|
|
|
|
}
|
|
|
|
if os.path.isfile(filename):
|
|
|
|
info['size'] = os.path.getsize(filename)
|
2018-09-21 14:41:49 -04:00
|
|
|
info['size_human'] = self.common.human_readable_filesize(info['size'])
|
2018-09-21 14:36:19 -04:00
|
|
|
self.file_info['files'].append(info)
|
2018-09-21 14:14:32 -04:00
|
|
|
if os.path.isdir(filename):
|
2018-09-21 14:41:49 -04:00
|
|
|
info['size'] = self.common.dir_size(filename)
|
|
|
|
info['size_human'] = self.common.human_readable_filesize(info['size'])
|
2018-09-21 14:36:19 -04:00
|
|
|
self.file_info['dirs'].append(info)
|
|
|
|
self.file_info['files'] = sorted(self.file_info['files'], key=lambda k: k['basename'])
|
|
|
|
self.file_info['dirs'] = sorted(self.file_info['dirs'], key=lambda k: k['basename'])
|
2018-09-21 14:14:32 -04:00
|
|
|
|
|
|
|
# Check if there's only 1 file and no folders
|
2018-09-21 14:36:19 -04:00
|
|
|
if len(self.file_info['files']) == 1 and len(self.file_info['dirs']) == 0:
|
|
|
|
self.download_filename = self.file_info['files'][0]['filename']
|
|
|
|
self.download_filesize = self.file_info['files'][0]['size']
|
2018-09-21 15:29:23 -04:00
|
|
|
|
|
|
|
# Compress the file with gzip now, so we don't have to do it on each request
|
|
|
|
self.gzip_filename = tempfile.mkstemp('wb+')[1]
|
|
|
|
self._gzip_compress(self.download_filename, self.gzip_filename, 6, processed_size_callback)
|
2018-09-21 19:22:37 -04:00
|
|
|
self.gzip_filesize = os.path.getsize(self.gzip_filename)
|
2018-09-21 15:29:23 -04:00
|
|
|
|
|
|
|
# Make sure the gzip file gets cleaned up when onionshare stops
|
|
|
|
self.cleanup_filenames.append(self.gzip_filename)
|
|
|
|
|
|
|
|
self.is_zipped = False
|
|
|
|
|
2018-09-21 14:14:32 -04:00
|
|
|
else:
|
|
|
|
# Zip up the files and folders
|
2018-09-21 14:41:49 -04:00
|
|
|
self.zip_writer = ZipWriter(self.common, processed_size_callback=processed_size_callback)
|
2018-09-21 14:36:19 -04:00
|
|
|
self.download_filename = self.zip_writer.zip_filename
|
|
|
|
for info in self.file_info['files']:
|
|
|
|
self.zip_writer.add_file(info['filename'])
|
2018-09-21 14:14:32 -04:00
|
|
|
# Canceling early?
|
|
|
|
if self.web.cancel_compression:
|
2018-09-21 14:36:19 -04:00
|
|
|
self.zip_writer.close()
|
2018-09-21 14:14:32 -04:00
|
|
|
return False
|
|
|
|
|
2018-09-21 14:36:19 -04:00
|
|
|
for info in self.file_info['dirs']:
|
|
|
|
if not self.zip_writer.add_dir(info['filename']):
|
2018-09-21 14:14:32 -04:00
|
|
|
return False
|
|
|
|
|
2018-09-21 14:36:19 -04:00
|
|
|
self.zip_writer.close()
|
|
|
|
self.download_filesize = os.path.getsize(self.download_filename)
|
2018-09-21 15:29:23 -04:00
|
|
|
|
|
|
|
# Make sure the zip file gets cleaned up when onionshare stops
|
|
|
|
self.cleanup_filenames.append(self.zip_writer.zip_filename)
|
|
|
|
|
2018-09-21 14:36:19 -04:00
|
|
|
self.is_zipped = True
|
2018-09-21 14:14:32 -04:00
|
|
|
|
|
|
|
return True
|
2018-09-21 02:43:04 -04:00
|
|
|
|
2018-09-21 19:22:37 -04:00
|
|
|
def should_use_gzip(self):
|
|
|
|
"""
|
|
|
|
Should we use gzip for this browser?
|
|
|
|
"""
|
|
|
|
return (not self.is_zipped) and ('gzip' in request.headers.get('Accept-Encoding', '').lower())
|
|
|
|
|
2018-09-21 15:29:23 -04:00
|
|
|
def _gzip_compress(self, input_filename, output_filename, level, processed_size_callback=None):
|
|
|
|
"""
|
|
|
|
Compress a file with gzip, without loading the whole thing into memory
|
|
|
|
Thanks: https://stackoverflow.com/questions/27035296/python-how-to-gzip-a-large-text-file-without-memoryerror
|
|
|
|
"""
|
|
|
|
bytes_processed = 0
|
|
|
|
blocksize = 1 << 16 # 64kB
|
|
|
|
with open(input_filename, 'rb') as input_file:
|
|
|
|
output_file = gzip.open(output_filename, 'wb', level)
|
|
|
|
while True:
|
|
|
|
if processed_size_callback is not None:
|
|
|
|
processed_size_callback(bytes_processed)
|
|
|
|
|
|
|
|
block = input_file.read(blocksize)
|
|
|
|
if len(block) == 0:
|
|
|
|
break
|
|
|
|
output_file.write(block)
|
|
|
|
bytes_processed += blocksize
|
|
|
|
|
|
|
|
output_file.close()
|
|
|
|
|
2018-09-21 02:43:04 -04:00
|
|
|
|
|
|
|
class ZipWriter(object):
|
|
|
|
"""
|
|
|
|
ZipWriter accepts files and directories and compresses them into a zip file
|
|
|
|
with. If a zip_filename is not passed in, it will use the default onionshare
|
|
|
|
filename.
|
|
|
|
"""
|
|
|
|
def __init__(self, common, zip_filename=None, processed_size_callback=None):
|
|
|
|
self.common = common
|
|
|
|
self.cancel_compression = False
|
|
|
|
|
|
|
|
if zip_filename:
|
|
|
|
self.zip_filename = zip_filename
|
|
|
|
else:
|
|
|
|
self.zip_filename = '{0:s}/onionshare_{1:s}.zip'.format(tempfile.mkdtemp(), self.common.random_string(4, 6))
|
|
|
|
|
|
|
|
self.z = zipfile.ZipFile(self.zip_filename, 'w', allowZip64=True)
|
|
|
|
self.processed_size_callback = processed_size_callback
|
|
|
|
if self.processed_size_callback is None:
|
|
|
|
self.processed_size_callback = lambda _: None
|
|
|
|
self._size = 0
|
|
|
|
self.processed_size_callback(self._size)
|
|
|
|
|
|
|
|
def add_file(self, filename):
|
|
|
|
"""
|
|
|
|
Add a file to the zip archive.
|
|
|
|
"""
|
|
|
|
self.z.write(filename, os.path.basename(filename), zipfile.ZIP_DEFLATED)
|
|
|
|
self._size += os.path.getsize(filename)
|
|
|
|
self.processed_size_callback(self._size)
|
|
|
|
|
|
|
|
def add_dir(self, filename):
|
|
|
|
"""
|
|
|
|
Add a directory, and all of its children, to the zip archive.
|
|
|
|
"""
|
|
|
|
dir_to_strip = os.path.dirname(filename.rstrip('/'))+'/'
|
|
|
|
for dirpath, dirnames, filenames in os.walk(filename):
|
|
|
|
for f in filenames:
|
|
|
|
# Canceling early?
|
|
|
|
if self.cancel_compression:
|
|
|
|
return False
|
|
|
|
|
|
|
|
full_filename = os.path.join(dirpath, f)
|
|
|
|
if not os.path.islink(full_filename):
|
|
|
|
arc_filename = full_filename[len(dir_to_strip):]
|
|
|
|
self.z.write(full_filename, arc_filename, zipfile.ZIP_DEFLATED)
|
|
|
|
self._size += os.path.getsize(full_filename)
|
|
|
|
self.processed_size_callback(self._size)
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
"""
|
|
|
|
Close the zip archive.
|
|
|
|
"""
|
|
|
|
self.z.close()
|